diff --git a/pymc3/examples/GHME_2013.py b/pymc3/examples/GHME_2013.py index 67565477bf..bb1e57c7e0 100644 --- a/pymc3/examples/GHME_2013.py +++ b/pymc3/examples/GHME_2013.py @@ -58,7 +58,7 @@ def run(n=3000): if n == "short": n = 150 with model: - trace = sample(n, tune=int(n/2)) + trace = sample(n, tune=int(n/2), init='advi+adapt_diag') for i, country in enumerate(countries): plt.subplot(2, 3, i + 1) diff --git a/pymc3/examples/arbitrary_stochastic.py b/pymc3/examples/arbitrary_stochastic.py index 74383e2762..2d6e4fe1f2 100644 --- a/pymc3/examples/arbitrary_stochastic.py +++ b/pymc3/examples/arbitrary_stochastic.py @@ -3,19 +3,22 @@ import theano.tensor as tt +# custom log-liklihood +def logp(failure, lam, value): + return tt.sum(failure * tt.log(lam) - lam * value) + + def build_model(): # data failure = np.array([0., 1.]) value = np.array([1., 0.]) - - # custom log-liklihood - def logp(failure, value): - return tt.sum(failure * tt.log(lam) - lam * value) - + # model with pm.Model() as model: lam = pm.Exponential('lam', 1.) - pm.DensityDist('x', logp, observed={'failure': failure, 'value': value}) + pm.DensityDist('x', logp, observed={'failure': failure, + 'lam': lam, + 'value': value}) return model diff --git a/pymc3/examples/arma_example.py b/pymc3/examples/arma_example.py index 4c784e1bed..55889bbf97 100644 --- a/pymc3/examples/arma_example.py +++ b/pymc3/examples/arma_example.py @@ -53,8 +53,8 @@ def build_model(): y = shared(np.array([15, 10, 16, 11, 9, 11, 10, 18], dtype=np.float32)) with pm.Model() as arma_model: - sigma = pm.HalfCauchy('sigma', 5.) - theta = pm.Normal('theta', 0., sd=2.) + sigma = pm.HalfNormal('sigma', 5.) + theta = pm.Normal('theta', 0., sd=1.) phi = pm.Normal('phi', 0., sd=2.) mu = pm.Normal('mu', 0., sd=10.) @@ -76,11 +76,12 @@ def calc_next(last_y, this_y, err, mu, phi, theta): def run(n_samples=1000): model = build_model() with model: - trace = pm.sample(draws=n_samples) + trace = pm.sample(draws=n_samples, + tune=1000, + nuts_kwargs=dict(target_accept=.99)) - burn = n_samples // 10 - pm.plots.traceplot(trace[burn:]) - pm.plots.forestplot(trace[burn:]) + pm.plots.traceplot(trace) + pm.plots.forestplot(trace) if __name__ == '__main__': diff --git a/pymc3/examples/baseball.py b/pymc3/examples/baseball.py index c924b7f308..89eefa9ce8 100644 --- a/pymc3/examples/baseball.py +++ b/pymc3/examples/baseball.py @@ -28,11 +28,9 @@ def build_model(): def run(n=2000): model = build_model() with model: - # initialize NUTS() with ADVI under the hood trace = pm.sample(n, nuts_kwargs={'target_accept':.99}) - # drop some first samples as burnin - pm.traceplot(trace[1000:]) + pm.traceplot(trace) if __name__ == '__main__': run() diff --git a/pymc3/examples/garch_example.py b/pymc3/examples/garch_example.py index f5576af71a..d530e76011 100644 --- a/pymc3/examples/garch_example.py +++ b/pymc3/examples/garch_example.py @@ -1,4 +1,4 @@ -from pymc3 import Normal, sample, Model, Bound, summary +from pymc3 import Normal, sample, Model, Uniform, summary import theano.tensor as tt import numpy as np @@ -32,16 +32,15 @@ def get_garch_model(): - r = np.array([28, 8, -3, 7, -1, 1, 18, 12]) - sigma1 = np.array([15, 10, 16, 11, 9, 11, 10, 18]) - alpha0 = np.array([10, 10, 16, 8, 9, 11, 12, 18]) + r = np.array([28, 8, -3, 7, -1, 1, 18, 12], dtype=np.float64) + sigma1 = np.array([15, 10, 16, 11, 9, 11, 10, 18], dtype=np.float64) + alpha0 = np.array([10, 10, 16, 8, 9, 11, 12, 18], dtype=np.float64) shape = r.shape with Model() as garch: - alpha1 = Normal('alpha1', mu=0., sd=1., shape=shape) - BoundedNormal = Bound(Normal, upper=(1 - alpha1)) - beta1 = BoundedNormal('beta1', mu=0., sd=1e6, shape=shape) - mu = Normal('mu', mu=0., sd=1e6, shape=shape) + alpha1 = Uniform('alpha1', 0., 1., shape=shape) + beta1 = Uniform('beta1', 0., 1 - alpha1, shape=shape) + mu = Normal('mu', mu=0., sd=100., shape=shape) theta = tt.sqrt(alpha0 + alpha1 * tt.pow(r - mu, 2) + beta1 * tt.pow(sigma1, 2)) Normal('obs', mu, sd=theta, observed=r) @@ -52,7 +51,7 @@ def run(n=1000): if n == "short": n = 50 with get_garch_model(): - tr = sample(n, init=None) + tr = sample(n, tune=1000) return tr diff --git a/pymc3/examples/gelman_bioassay.py b/pymc3/examples/gelman_bioassay.py index c3ef2a4009..69d7ca1118 100644 --- a/pymc3/examples/gelman_bioassay.py +++ b/pymc3/examples/gelman_bioassay.py @@ -9,8 +9,8 @@ with pm.Model() as model: # Logit-linear model parameters - alpha = pm.Normal('alpha', 0, tau=0.01) - beta = pm.Normal('beta', 0, tau=0.01) + alpha = pm.Normal('alpha', 0, sd=100.) + beta = pm.Normal('beta', 0, sd=1.) # Calculate probabilities of death theta = pm.Deterministic('theta', pm.math.invlogit(alpha + beta * dose)) @@ -18,14 +18,12 @@ # Data likelihood deaths = pm.Binomial('deaths', n=n, p=theta, observed=[0, 1, 3, 5]) - step = pm.NUTS() - def run(n=1000): if n == "short": n = 50 with model: - pm.sample(n, step) + pm.sample(n, tune=1000) if __name__ == '__main__': run()