Skip to content

Commit

Permalink
Reset tuned Metropolis parameters in sequential sampling of chains (#…
Browse files Browse the repository at this point in the history
…3796)

* add regression test for #3733

* implement reset_tuning on Metropolis
closes #3733

* mention fix of #3733
  • Loading branch information
michaelosthege authored Feb 6, 2020
1 parent bb574a7 commit 812e60e
Show file tree
Hide file tree
Showing 3 changed files with 35 additions and 0 deletions.
1 change: 1 addition & 0 deletions RELEASE-NOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

### Maintenance
- Remove `sample_ppc` and `sample_ppc_w` that were deprecated in 3.6.
- Tuning results no longer leak into sequentially sampled `Metropolis` chains (see #3733 and #3796).

## PyMC3 3.8 (November 29 2019)

Expand Down
13 changes: 13 additions & 0 deletions pymc3/step_methods/metropolis.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,12 +146,25 @@ def __init__(self, vars=None, S=None, proposal_dist=None, scaling=1.,
self.any_discrete = self.discrete.any()
self.all_discrete = self.discrete.all()

# remember initial settings before tuning so they can be reset
self._untuned_settings = dict(
scaling=self.scaling,
steps_until_tune=tune_interval,
accepted=self.accepted
)

self.mode = mode

shared = pm.make_shared_replacements(vars, model)
self.delta_logp = delta_logp(model.logpt, vars, shared)
super().__init__(vars, shared)

def reset_tuning(self):
"""Resets the tuned sampler parameters to their initial values."""
for attr, initial_value in self._untuned_settings.items():
setattr(self, attr, initial_value)
return

def astep(self, q0):
if not self.steps_until_tune and self.tune:
# Tune scaling parameter
Expand Down
21 changes: 21 additions & 0 deletions pymc3/tests/test_step.py
Original file line number Diff line number Diff line change
Expand Up @@ -779,6 +779,27 @@ def test_parallelized_chains_are_random(self):
pass


class TestMetropolis:
def test_tuning_reset(self):
"""Re-use of the step method instance with cores=1 must not leak tuning information between chains."""
with Model() as pmodel:
D = 3
Normal('n', 0, 2, shape=(D,))
trace = sample(
tune=600,
draws=500,
step=Metropolis(tune=True, scaling=0.1),
cores=1,
chains=3,
discard_tuned_samples=False
)
for c in range(trace.nchains):
# check that the tuned settings changed and were reset
assert trace.get_sampler_stats('scaling', chains=c)[0] == 0.1
assert trace.get_sampler_stats('scaling', chains=c)[-1] != 0.1
pass


class TestDEMetropolisZ:
def test_tuning_lambda_sequential(self):
with Model() as pmodel:
Expand Down

0 comments on commit 812e60e

Please sign in to comment.