Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pymc3/examples/GHME_2013.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def run(n=3000):
if n == "short":
n = 150
with model:
trace = sample(n, tune=int(n/2))
trace = sample(n, tune=int(n/2), init='advi+adapt_diag')

for i, country in enumerate(countries):
plt.subplot(2, 3, i + 1)
Expand Down
15 changes: 9 additions & 6 deletions pymc3/examples/arbitrary_stochastic.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,22 @@
import theano.tensor as tt


# custom log-liklihood
def logp(failure, lam, value):
return tt.sum(failure * tt.log(lam) - lam * value)


def build_model():
# data
failure = np.array([0., 1.])
value = np.array([1., 0.])

# custom log-liklihood
def logp(failure, value):
return tt.sum(failure * tt.log(lam) - lam * value)


# model
with pm.Model() as model:
lam = pm.Exponential('lam', 1.)
pm.DensityDist('x', logp, observed={'failure': failure, 'value': value})
pm.DensityDist('x', logp, observed={'failure': failure,
'lam': lam,
'value': value})
return model


Expand Down
13 changes: 7 additions & 6 deletions pymc3/examples/arma_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,8 @@
def build_model():
y = shared(np.array([15, 10, 16, 11, 9, 11, 10, 18], dtype=np.float32))
with pm.Model() as arma_model:
sigma = pm.HalfCauchy('sigma', 5.)
theta = pm.Normal('theta', 0., sd=2.)
sigma = pm.HalfNormal('sigma', 5.)
theta = pm.Normal('theta', 0., sd=1.)
phi = pm.Normal('phi', 0., sd=2.)
mu = pm.Normal('mu', 0., sd=10.)

Expand All @@ -76,11 +76,12 @@ def calc_next(last_y, this_y, err, mu, phi, theta):
def run(n_samples=1000):
model = build_model()
with model:
trace = pm.sample(draws=n_samples)
trace = pm.sample(draws=n_samples,
tune=1000,
nuts_kwargs=dict(target_accept=.99))

burn = n_samples // 10
pm.plots.traceplot(trace[burn:])
pm.plots.forestplot(trace[burn:])
pm.plots.traceplot(trace)
pm.plots.forestplot(trace)


if __name__ == '__main__':
Expand Down
4 changes: 1 addition & 3 deletions pymc3/examples/baseball.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,9 @@ def build_model():
def run(n=2000):
model = build_model()
with model:
# initialize NUTS() with ADVI under the hood
trace = pm.sample(n, nuts_kwargs={'target_accept':.99})

# drop some first samples as burnin
pm.traceplot(trace[1000:])
pm.traceplot(trace)

if __name__ == '__main__':
run()
17 changes: 8 additions & 9 deletions pymc3/examples/garch_example.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from pymc3 import Normal, sample, Model, Bound, summary
from pymc3 import Normal, sample, Model, Uniform, summary
import theano.tensor as tt
import numpy as np

Expand Down Expand Up @@ -32,16 +32,15 @@


def get_garch_model():
r = np.array([28, 8, -3, 7, -1, 1, 18, 12])
sigma1 = np.array([15, 10, 16, 11, 9, 11, 10, 18])
alpha0 = np.array([10, 10, 16, 8, 9, 11, 12, 18])
r = np.array([28, 8, -3, 7, -1, 1, 18, 12], dtype=np.float64)
sigma1 = np.array([15, 10, 16, 11, 9, 11, 10, 18], dtype=np.float64)
alpha0 = np.array([10, 10, 16, 8, 9, 11, 12, 18], dtype=np.float64)
shape = r.shape

with Model() as garch:
alpha1 = Normal('alpha1', mu=0., sd=1., shape=shape)
BoundedNormal = Bound(Normal, upper=(1 - alpha1))
beta1 = BoundedNormal('beta1', mu=0., sd=1e6, shape=shape)
mu = Normal('mu', mu=0., sd=1e6, shape=shape)
alpha1 = Uniform('alpha1', 0., 1., shape=shape)
beta1 = Uniform('beta1', 0., 1 - alpha1, shape=shape)
mu = Normal('mu', mu=0., sd=100., shape=shape)
theta = tt.sqrt(alpha0 + alpha1 * tt.pow(r - mu, 2) +
beta1 * tt.pow(sigma1, 2))
Normal('obs', mu, sd=theta, observed=r)
Expand All @@ -52,7 +51,7 @@ def run(n=1000):
if n == "short":
n = 50
with get_garch_model():
tr = sample(n, init=None)
tr = sample(n, tune=1000)
return tr


Expand Down
8 changes: 3 additions & 5 deletions pymc3/examples/gelman_bioassay.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,23 +9,21 @@
with pm.Model() as model:

# Logit-linear model parameters
alpha = pm.Normal('alpha', 0, tau=0.01)
beta = pm.Normal('beta', 0, tau=0.01)
alpha = pm.Normal('alpha', 0, sd=100.)
beta = pm.Normal('beta', 0, sd=1.)

# Calculate probabilities of death
theta = pm.Deterministic('theta', pm.math.invlogit(alpha + beta * dose))

# Data likelihood
deaths = pm.Binomial('deaths', n=n, p=theta, observed=[0, 1, 3, 5])

step = pm.NUTS()


def run(n=1000):
if n == "short":
n = 50
with model:
pm.sample(n, step)
pm.sample(n, tune=1000)

if __name__ == '__main__':
run()