Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions pymc3/distributions/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
from .continuous import Logistic
from .continuous import LogitNormal
from .continuous import Interpolated
from .continuous import Rice

from .discrete import Binomial
from .discrete import BetaBinomial
Expand Down
80 changes: 68 additions & 12 deletions pymc3/distributions/continuous.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,19 +14,22 @@
from scipy.interpolate import InterpolatedUnivariateSpline
import warnings

from theano.scalar import i1, i0

from pymc3.theanof import floatX
from . import transforms
from pymc3.util import get_variable_name
from .special import log_i0
from ..math import invlogit, logit
from .dist_math import bound, logpow, gammaln, betaln, std_cdf, alltrue_elemwise, SplineWrapper
from .dist_math import bound, logpow, gammaln, betaln, std_cdf, alltrue_elemwise, SplineWrapper, i0e
from .distribution import Continuous, draw_values, generate_samples

__all__ = ['Uniform', 'Flat', 'HalfFlat', 'Normal', 'Beta', 'Exponential',
'Laplace', 'StudentT', 'Cauchy', 'HalfCauchy', 'Gamma', 'Weibull',
'HalfStudentT', 'Lognormal', 'ChiSquared', 'HalfNormal', 'Wald',
'Pareto', 'InverseGamma', 'ExGaussian', 'VonMises', 'SkewNormal',
'Triangular', 'Gumbel', 'Logistic', 'LogitNormal', 'Interpolated']
'Triangular', 'Gumbel', 'Logistic', 'LogitNormal', 'Interpolated',
'Rice']


class PositiveContinuous(Continuous):
Expand Down Expand Up @@ -68,7 +71,7 @@ def assert_negative_support(var, label, distname, value=-1e-6):

def get_tau_sd(tau=None, sd=None):
"""
Find precision and standard deviation. The link between the two
Find precision and standard deviation. The link between the two
parameterizations is given by the inverse relationship:

.. math::
Expand Down Expand Up @@ -275,14 +278,14 @@ class Normal(Continuous):
Standard deviation (sd > 0) (only required if tau is not specified).
tau : float
Precision (tau > 0) (only required if sd is not specified).

Examples
--------
.. code-block:: python

with pm.Model():
x = pm.Normal('x', mu=0, sd=10)

with pm.Model():
x = pm.Normal('x', mu=0, tau=1/23)
"""
Expand Down Expand Up @@ -887,7 +890,7 @@ class Lognormal(PositiveContinuous):
Standard deviation. (sd > 0). (only required if tau is not specified).
tau : float
Scale parameter (tau > 0). (only required if sd is not specified).

Example
-------
.. code-block:: python
Expand Down Expand Up @@ -994,14 +997,14 @@ class StudentT(Continuous):
Standard deviation (sd > 0) (only required if lam is not specified)
lam : float
Precision (lam > 0) (only required if sd is not specified)

Examples
--------
.. code-block:: python

with pm.Model():
x = pm.StudentT('x', nu=15, mu=0, sd=10)

with pm.Model():
x = pm.StudentT('x', nu=15, mu=0, lam=1/23)
"""
Expand Down Expand Up @@ -1601,8 +1604,8 @@ def __init__(self, alpha, beta, *args, **kwargs):
self.median = beta * tt.exp(gammaln(tt.log(2)))**(1. / alpha)
self.variance = (beta**2) * \
tt.exp(gammaln(1 + 2. / alpha - self.mean**2))
self.mode = tt.switch(alpha >= 1,
beta * ((alpha - 1)/alpha) ** (1 / alpha),
self.mode = tt.switch(alpha >= 1,
beta * ((alpha - 1)/alpha) ** (1 / alpha),
0) # Reference: https://en.wikipedia.org/wiki/Weibull_distribution

assert_negative_support(alpha, 'alpha', 'Weibull')
Expand Down Expand Up @@ -1682,15 +1685,15 @@ class HalfStudentT(PositiveContinuous):
lam : float
Scale parameter (lam > 0). Converges to the precision as nu
increases. (only required if sd is not specified)

Examples
--------
.. code-block:: python

# Only pass in one of lam or sd, but not both.
with pm.Model():
x = pm.HalfStudentT('x', sd=10, nu=10)

with pm.Model():
x = pm.HalfStudentT('x', lam=4, nu=10)
"""
Expand Down Expand Up @@ -2198,6 +2201,59 @@ def _repr_latex_(self, name=None, dist=None):
get_variable_name(beta))


class Rice(Continuous):
R"""
Rice distribution.

.. math::

f(x\mid \nu ,\sigma )=
{\frac {x}{\sigma ^{2}}}\exp
\left({\frac {-(x^{2}+\nu ^{2})}{2\sigma ^{2}}}\right)I_{0}\left({\frac {x\nu }{\sigma ^{2}}}\right),

======== ==============================================================
Support :math:`x \in (0, +\infinity)`
Mean :math:`\sigma {\sqrt {\pi /2}}\,\,L_{{1/2}}(-\nu ^{2}/2\sigma ^{2})`
Variance :math:`2\sigma ^{2}+\nu ^{2}-{\frac {\pi \sigma ^{2}}{2}}L_{{1/2}}^{2}
\left({\frac {-\nu ^{2}}{2\sigma ^{2}}}\right)`
======== ==============================================================


Parameters
----------
nu : float
shape parameter.
sd : float
standard deviation.

"""

def __init__(self, nu=None, sd=None, *args, **kwargs):
super(Rice, self).__init__(*args, **kwargs)
self.nu = nu = tt.as_tensor_variable(nu)
self.sd = sd = tt.as_tensor_variable(sd)
self.mean = sd * np.sqrt(np.pi / 2) * tt.exp((-nu**2 / (2 * sd**2)) / 2) * ((1 - (-nu**2 / (2 * sd**2)))
* i0(-(-nu**2 / (2 * sd**2)) / 2) - (-nu**2 / (2 * sd**2)) * i1(-(-nu**2 / (2 * sd**2)) / 2))
self.variance = 2 * sd**2 + nu**2 - (np.pi * sd**2 / 2) * (tt.exp((-nu**2 / (2 * sd**2)) / 2) * ((1 - (-nu**2 / (
2 * sd**2))) * i0(-(-nu**2 / (2 * sd**2)) / 2) - (-nu**2 / (2 * sd**2)) * i1(-(-nu**2 / (2 * sd**2)) / 2)))**2

def random(self, point=None, size=None, repeat=None):
nu, sd = draw_values([self.nu, self.sd],
point=point)
return generate_samples(stats.rice.rvs, b=nu, scale=sd, loc=0,
dist_shape=self.shape, size=size)

def logp(self, value):
nu = self.nu
sd = self.sd
x = value / sd
return bound(tt.log(x * tt.exp((-(x - nu) * (x - nu)) / 2) * i0e(x * nu) / sd),
sd >= 0,
nu >= 0,
value > 0,
)


class Logistic(Continuous):
R"""
Logistic log-likelihood.
Expand Down
14 changes: 14 additions & 0 deletions pymc3/distributions/dist_math.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import scipy.linalg
import theano.tensor as tt
import theano
from theano.scalar import UnaryScalarOp, upgrade_to_float

from .special import gammaln
from pymc3.theanof import floatX
Expand Down Expand Up @@ -461,3 +462,16 @@ def infer_shape(self, node, shapes):
def eigh(a, UPLO='L'):
"""A copy, remove with Eigh and EighGrad when possible"""
return Eigh(UPLO)(a)


class I0e(UnaryScalarOp):
"""
Modified Bessel function of the first kind of order 0, exponentially scaled.
"""
nfunc_spec = ('scipy.special.i0e', 1, 1)

def impl(self, x):
return scipy.special.i0e(x)


i0e = I0e(upgrade_to_float, name='i0e')
6 changes: 5 additions & 1 deletion pymc3/tests/test_distributions.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
Flat, LKJCorr, Wald, ChiSquared, HalfNormal, DiscreteUniform,
Bound, Uniform, Triangular, Binomial, SkewNormal, DiscreteWeibull,
Gumbel, Logistic, OrderedLogistic, LogitNormal, Interpolated,
ZeroInflatedBinomial, HalfFlat, AR1, KroneckerNormal)
ZeroInflatedBinomial, HalfFlat, AR1, KroneckerNormal, Rice)

from ..distributions import continuous
from pymc3.theanof import floatX
Expand Down Expand Up @@ -1056,6 +1056,10 @@ def test_multidimensional_beta_construction(self):
with Model():
Beta('beta', alpha=1., beta=1., shape=(10, 20))

def test_rice(self):
self.pymc3_matches_scipy(Rice, Rplus, {'nu': Rplus, 'sd': Rplusbig},
lambda value, nu, sd: sp.rice.logpdf(value, b=nu, loc=0, scale=sd))

@pytest.mark.xfail(condition=(theano.config.floatX == "float32"), reason="Fails on float32")
def test_interpolated(self):
for mu in R.vals:
Expand Down