diff --git a/pymc3/distributions/__init__.py b/pymc3/distributions/__init__.py index 3f1c2b6a29..6d7abce67f 100644 --- a/pymc3/distributions/__init__.py +++ b/pymc3/distributions/__init__.py @@ -28,6 +28,7 @@ from .continuous import Logistic from .continuous import LogitNormal from .continuous import Interpolated +from .continuous import Rice from .discrete import Binomial from .discrete import BetaBinomial diff --git a/pymc3/distributions/continuous.py b/pymc3/distributions/continuous.py index f0e63e5e68..9272ad42d3 100644 --- a/pymc3/distributions/continuous.py +++ b/pymc3/distributions/continuous.py @@ -14,19 +14,22 @@ from scipy.interpolate import InterpolatedUnivariateSpline import warnings +from theano.scalar import i1, i0 + from pymc3.theanof import floatX from . import transforms from pymc3.util import get_variable_name from .special import log_i0 from ..math import invlogit, logit -from .dist_math import bound, logpow, gammaln, betaln, std_cdf, alltrue_elemwise, SplineWrapper +from .dist_math import bound, logpow, gammaln, betaln, std_cdf, alltrue_elemwise, SplineWrapper, i0e from .distribution import Continuous, draw_values, generate_samples __all__ = ['Uniform', 'Flat', 'HalfFlat', 'Normal', 'Beta', 'Exponential', 'Laplace', 'StudentT', 'Cauchy', 'HalfCauchy', 'Gamma', 'Weibull', 'HalfStudentT', 'Lognormal', 'ChiSquared', 'HalfNormal', 'Wald', 'Pareto', 'InverseGamma', 'ExGaussian', 'VonMises', 'SkewNormal', - 'Triangular', 'Gumbel', 'Logistic', 'LogitNormal', 'Interpolated'] + 'Triangular', 'Gumbel', 'Logistic', 'LogitNormal', 'Interpolated', + 'Rice'] class PositiveContinuous(Continuous): @@ -68,7 +71,7 @@ def assert_negative_support(var, label, distname, value=-1e-6): def get_tau_sd(tau=None, sd=None): """ - Find precision and standard deviation. The link between the two + Find precision and standard deviation. The link between the two parameterizations is given by the inverse relationship: .. math:: @@ -275,14 +278,14 @@ class Normal(Continuous): Standard deviation (sd > 0) (only required if tau is not specified). tau : float Precision (tau > 0) (only required if sd is not specified). - + Examples -------- .. code-block:: python with pm.Model(): x = pm.Normal('x', mu=0, sd=10) - + with pm.Model(): x = pm.Normal('x', mu=0, tau=1/23) """ @@ -887,7 +890,7 @@ class Lognormal(PositiveContinuous): Standard deviation. (sd > 0). (only required if tau is not specified). tau : float Scale parameter (tau > 0). (only required if sd is not specified). - + Example ------- .. code-block:: python @@ -994,14 +997,14 @@ class StudentT(Continuous): Standard deviation (sd > 0) (only required if lam is not specified) lam : float Precision (lam > 0) (only required if sd is not specified) - + Examples -------- .. code-block:: python with pm.Model(): x = pm.StudentT('x', nu=15, mu=0, sd=10) - + with pm.Model(): x = pm.StudentT('x', nu=15, mu=0, lam=1/23) """ @@ -1601,8 +1604,8 @@ def __init__(self, alpha, beta, *args, **kwargs): self.median = beta * tt.exp(gammaln(tt.log(2)))**(1. / alpha) self.variance = (beta**2) * \ tt.exp(gammaln(1 + 2. / alpha - self.mean**2)) - self.mode = tt.switch(alpha >= 1, - beta * ((alpha - 1)/alpha) ** (1 / alpha), + self.mode = tt.switch(alpha >= 1, + beta * ((alpha - 1)/alpha) ** (1 / alpha), 0) # Reference: https://en.wikipedia.org/wiki/Weibull_distribution assert_negative_support(alpha, 'alpha', 'Weibull') @@ -1682,7 +1685,7 @@ class HalfStudentT(PositiveContinuous): lam : float Scale parameter (lam > 0). Converges to the precision as nu increases. (only required if sd is not specified) - + Examples -------- .. code-block:: python @@ -1690,7 +1693,7 @@ class HalfStudentT(PositiveContinuous): # Only pass in one of lam or sd, but not both. with pm.Model(): x = pm.HalfStudentT('x', sd=10, nu=10) - + with pm.Model(): x = pm.HalfStudentT('x', lam=4, nu=10) """ @@ -2198,6 +2201,59 @@ def _repr_latex_(self, name=None, dist=None): get_variable_name(beta)) +class Rice(Continuous): + R""" + Rice distribution. + + .. math:: + + f(x\mid \nu ,\sigma )= + {\frac {x}{\sigma ^{2}}}\exp + \left({\frac {-(x^{2}+\nu ^{2})}{2\sigma ^{2}}}\right)I_{0}\left({\frac {x\nu }{\sigma ^{2}}}\right), + + ======== ============================================================== + Support :math:`x \in (0, +\infinity)` + Mean :math:`\sigma {\sqrt {\pi /2}}\,\,L_{{1/2}}(-\nu ^{2}/2\sigma ^{2})` + Variance :math:`2\sigma ^{2}+\nu ^{2}-{\frac {\pi \sigma ^{2}}{2}}L_{{1/2}}^{2} + \left({\frac {-\nu ^{2}}{2\sigma ^{2}}}\right)` + ======== ============================================================== + + + Parameters + ---------- + nu : float + shape parameter. + sd : float + standard deviation. + + """ + + def __init__(self, nu=None, sd=None, *args, **kwargs): + super(Rice, self).__init__(*args, **kwargs) + self.nu = nu = tt.as_tensor_variable(nu) + self.sd = sd = tt.as_tensor_variable(sd) + self.mean = sd * np.sqrt(np.pi / 2) * tt.exp((-nu**2 / (2 * sd**2)) / 2) * ((1 - (-nu**2 / (2 * sd**2))) + * i0(-(-nu**2 / (2 * sd**2)) / 2) - (-nu**2 / (2 * sd**2)) * i1(-(-nu**2 / (2 * sd**2)) / 2)) + self.variance = 2 * sd**2 + nu**2 - (np.pi * sd**2 / 2) * (tt.exp((-nu**2 / (2 * sd**2)) / 2) * ((1 - (-nu**2 / ( + 2 * sd**2))) * i0(-(-nu**2 / (2 * sd**2)) / 2) - (-nu**2 / (2 * sd**2)) * i1(-(-nu**2 / (2 * sd**2)) / 2)))**2 + + def random(self, point=None, size=None, repeat=None): + nu, sd = draw_values([self.nu, self.sd], + point=point) + return generate_samples(stats.rice.rvs, b=nu, scale=sd, loc=0, + dist_shape=self.shape, size=size) + + def logp(self, value): + nu = self.nu + sd = self.sd + x = value / sd + return bound(tt.log(x * tt.exp((-(x - nu) * (x - nu)) / 2) * i0e(x * nu) / sd), + sd >= 0, + nu >= 0, + value > 0, + ) + + class Logistic(Continuous): R""" Logistic log-likelihood. diff --git a/pymc3/distributions/dist_math.py b/pymc3/distributions/dist_math.py index e6e6daa1cf..f6725953cc 100644 --- a/pymc3/distributions/dist_math.py +++ b/pymc3/distributions/dist_math.py @@ -9,6 +9,7 @@ import scipy.linalg import theano.tensor as tt import theano +from theano.scalar import UnaryScalarOp, upgrade_to_float from .special import gammaln from pymc3.theanof import floatX @@ -461,3 +462,16 @@ def infer_shape(self, node, shapes): def eigh(a, UPLO='L'): """A copy, remove with Eigh and EighGrad when possible""" return Eigh(UPLO)(a) + + +class I0e(UnaryScalarOp): + """ + Modified Bessel function of the first kind of order 0, exponentially scaled. + """ + nfunc_spec = ('scipy.special.i0e', 1, 1) + + def impl(self, x): + return scipy.special.i0e(x) + + +i0e = I0e(upgrade_to_float, name='i0e') diff --git a/pymc3/tests/test_distributions.py b/pymc3/tests/test_distributions.py index ccee66a69a..8ef4e0afe0 100644 --- a/pymc3/tests/test_distributions.py +++ b/pymc3/tests/test_distributions.py @@ -16,7 +16,7 @@ Flat, LKJCorr, Wald, ChiSquared, HalfNormal, DiscreteUniform, Bound, Uniform, Triangular, Binomial, SkewNormal, DiscreteWeibull, Gumbel, Logistic, OrderedLogistic, LogitNormal, Interpolated, - ZeroInflatedBinomial, HalfFlat, AR1, KroneckerNormal) + ZeroInflatedBinomial, HalfFlat, AR1, KroneckerNormal, Rice) from ..distributions import continuous from pymc3.theanof import floatX @@ -1056,6 +1056,10 @@ def test_multidimensional_beta_construction(self): with Model(): Beta('beta', alpha=1., beta=1., shape=(10, 20)) + def test_rice(self): + self.pymc3_matches_scipy(Rice, Rplus, {'nu': Rplus, 'sd': Rplusbig}, + lambda value, nu, sd: sp.rice.logpdf(value, b=nu, loc=0, scale=sd)) + @pytest.mark.xfail(condition=(theano.config.floatX == "float32"), reason="Fails on float32") def test_interpolated(self): for mu in R.vals: