Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Jul 26, 2024
1 parent 1ab4a49 commit 69c4307
Show file tree
Hide file tree
Showing 52 changed files with 284 additions and 244 deletions.
4 changes: 2 additions & 2 deletions notebooks/SARMA Example.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -1554,7 +1554,7 @@
" hdi_forecast.coords[\"time\"].values,\n",
" *hdi_forecast.isel(observed_state=0).values.T,\n",
" alpha=0.25,\n",
" color=\"tab:blue\"\n",
" color=\"tab:blue\",\n",
" )\n",
"ax.set_title(\"Porcupine Graph of 10-Period Forecasts (parameters estimated on all data)\")\n",
"plt.show()"
Expand Down Expand Up @@ -2692,7 +2692,7 @@
" *forecast_hdi.values.T,\n",
" label=\"Forecast 94% HDI\",\n",
" color=\"tab:orange\",\n",
" alpha=0.25\n",
" alpha=0.25,\n",
")\n",
"ax.legend()\n",
"plt.show()"
Expand Down
6 changes: 3 additions & 3 deletions notebooks/Structural Timeseries Modeling.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -1657,7 +1657,7 @@
" nile.index,\n",
" *component_hdi.smoothed_posterior.sel(state=state).values.T,\n",
" color=\"tab:blue\",\n",
" alpha=0.15\n",
" alpha=0.15,\n",
" )\n",
" axis.set_title(state.title())"
]
Expand Down Expand Up @@ -1706,7 +1706,7 @@
" *hdi.smoothed_posterior.sum(dim=\"state\").values.T,\n",
" color=\"tab:blue\",\n",
" alpha=0.15,\n",
" label=\"HDI 94%\"\n",
" label=\"HDI 94%\",\n",
")\n",
"ax.legend()\n",
"plt.show()"
Expand Down Expand Up @@ -2750,7 +2750,7 @@
"ax.fill_between(\n",
" blossom_data.index,\n",
" *hdi_post.predicted_posterior_observed.isel(observed_state=0).values.T,\n",
" alpha=0.25\n",
" alpha=0.25,\n",
")\n",
"blossom_data.plot(ax=ax)"
]
Expand Down
9 changes: 4 additions & 5 deletions pymc_experimental/distributions/continuous.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,9 @@
The imports from pymc are not fully replicated here: add imports as necessary.
"""

from typing import Tuple, Union

import numpy as np
import pytensor.tensor as pt

from pymc import ChiSquared, CustomDist
from pymc.distributions import transforms
from pymc.distributions.dist_math import check_parameters
Expand All @@ -39,19 +38,19 @@ class GenExtremeRV(RandomVariable):
name: str = "Generalized Extreme Value"
signature = "(),(),()->()"
dtype: str = "floatX"
_print_name: Tuple[str, str] = ("Generalized Extreme Value", "\\operatorname{GEV}")
_print_name: tuple[str, str] = ("Generalized Extreme Value", "\\operatorname{GEV}")

def __call__(self, mu=0.0, sigma=1.0, xi=0.0, size=None, **kwargs) -> TensorVariable:
return super().__call__(mu, sigma, xi, size=size, **kwargs)

@classmethod
def rng_fn(
cls,
rng: Union[np.random.RandomState, np.random.Generator],
rng: np.random.RandomState | np.random.Generator,
mu: np.ndarray,
sigma: np.ndarray,
xi: np.ndarray,
size: Tuple[int, ...],
size: tuple[int, ...],
) -> np.ndarray:
# Notice negative here, since remainder of GenExtreme is based on Coles parametrization
return stats.genextreme.rvs(c=-xi, loc=mu, scale=sigma, random_state=rng, size=size)
Expand Down
1 change: 1 addition & 0 deletions pymc_experimental/distributions/discrete.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

import numpy as np
import pymc as pm

from pymc.distributions.dist_math import betaln, check_parameters, factln, logpow
from pymc.distributions.shape_utils import rv_size_is_none
from pytensor import tensor as pt
Expand Down
7 changes: 3 additions & 4 deletions pymc_experimental/distributions/histogram_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,18 +13,17 @@
# limitations under the License.


from typing import Dict

import numpy as np
import pymc as pm

from numpy.typing import ArrayLike

__all__ = ["quantile_histogram", "discrete_histogram", "histogram_approximation"]


def quantile_histogram(
data: ArrayLike, n_quantiles=1000, zero_inflation=False
) -> Dict[str, ArrayLike]:
) -> dict[str, ArrayLike]:
try:
import xhistogram.core
except ImportError as e:
Expand Down Expand Up @@ -67,7 +66,7 @@ def quantile_histogram(
return result


def discrete_histogram(data: ArrayLike, min_count=None) -> Dict[str, ArrayLike]:
def discrete_histogram(data: ArrayLike, min_count=None) -> dict[str, ArrayLike]:
try:
import xhistogram.core
except ImportError as e:
Expand Down
34 changes: 17 additions & 17 deletions pymc_experimental/distributions/multivariate/r2d2m2cp.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@


from collections import namedtuple
from typing import Sequence, Tuple, Union
from collections.abc import Sequence

import numpy as np
import pymc as pm
Expand All @@ -26,8 +26,8 @@
def _psivar2musigma(
psi: pt.TensorVariable,
explained_var: pt.TensorVariable,
psi_mask: Union[pt.TensorLike, None],
) -> Tuple[pt.TensorVariable, pt.TensorVariable]:
psi_mask: pt.TensorLike | None,
) -> tuple[pt.TensorVariable, pt.TensorVariable]:
sign = pt.sign(psi - 0.5)
if psi_mask is not None:
# any computation might be ignored for ~psi_mask
Expand Down Expand Up @@ -55,7 +55,7 @@ def _R2D2M2CP_beta(
psi: pt.TensorVariable,
*,
psi_mask,
dims: Union[str, Sequence[str]],
dims: str | Sequence[str],
centered=False,
) -> pt.TensorVariable:
"""R2D2M2CP beta prior.
Expand Down Expand Up @@ -120,7 +120,7 @@ def _R2D2M2CP_beta(
def _broadcast_as_dims(
*values: np.ndarray,
dims: Sequence[str],
) -> Union[Tuple[np.ndarray, ...], np.ndarray]:
) -> tuple[np.ndarray, ...] | np.ndarray:
model = pm.modelcontext(None)
shape = [len(model.coords[d]) for d in dims]
ret = tuple(np.broadcast_to(v, shape) for v in values)
Expand All @@ -135,7 +135,7 @@ def _psi_masked(
positive_probs_std: pt.TensorLike,
*,
dims: Sequence[str],
) -> Tuple[Union[pt.TensorLike, None], pt.TensorVariable]:
) -> tuple[pt.TensorLike | None, pt.TensorVariable]:
if not (
isinstance(positive_probs, pt.Constant) and isinstance(positive_probs_std, pt.Constant)
):
Expand Down Expand Up @@ -172,10 +172,10 @@ def _psi_masked(

def _psi(
positive_probs: pt.TensorLike,
positive_probs_std: Union[pt.TensorLike, None],
positive_probs_std: pt.TensorLike | None,
*,
dims: Sequence[str],
) -> Tuple[Union[pt.TensorLike, None], pt.TensorVariable]:
) -> tuple[pt.TensorLike | None, pt.TensorVariable]:
if positive_probs_std is not None:
mask, psi = _psi_masked(
positive_probs=pt.as_tensor(positive_probs),
Expand All @@ -194,9 +194,9 @@ def _psi(


def _phi(
variables_importance: Union[pt.TensorLike, None],
variance_explained: Union[pt.TensorLike, None],
importance_concentration: Union[pt.TensorLike, None],
variables_importance: pt.TensorLike | None,
variance_explained: pt.TensorLike | None,
importance_concentration: pt.TensorLike | None,
*,
dims: Sequence[str],
) -> pt.TensorVariable:
Expand Down Expand Up @@ -233,12 +233,12 @@ def R2D2M2CP(
*,
dims: Sequence[str],
r2: pt.TensorLike,
variables_importance: Union[pt.TensorLike, None] = None,
variance_explained: Union[pt.TensorLike, None] = None,
importance_concentration: Union[pt.TensorLike, None] = None,
r2_std: Union[pt.TensorLike, None] = None,
positive_probs: Union[pt.TensorLike, None] = 0.5,
positive_probs_std: Union[pt.TensorLike, None] = None,
variables_importance: pt.TensorLike | None = None,
variance_explained: pt.TensorLike | None = None,
importance_concentration: pt.TensorLike | None = None,
r2_std: pt.TensorLike | None = None,
positive_probs: pt.TensorLike | None = 0.5,
positive_probs_std: pt.TensorLike | None = None,
centered: bool = False,
) -> R2D2M2CPOut:
"""R2D2M2CP Prior.
Expand Down
4 changes: 2 additions & 2 deletions pymc_experimental/distributions/timeseries.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import warnings
from typing import List, Union

import numpy as np
import pymc as pm
import pytensor
import pytensor.tensor as pt

from pymc.distributions.dist_math import check_parameters
from pymc.distributions.distribution import (
Distribution,
Expand All @@ -26,7 +26,7 @@
from pytensor.tensor.random.op import RandomVariable


def _make_outputs_info(n_lags: int, init_dist: Distribution) -> List[Union[Distribution, dict]]:
def _make_outputs_info(n_lags: int, init_dist: Distribution) -> list[Distribution | dict]:
"""
Two cases are needed for outputs_info in the scans used by DiscreteMarkovRv. If n_lags = 1, we need to throw away
the first dimension of init_dist_ or else markov_chain will have shape (steps, 1, *batch_size) instead of
Expand Down
12 changes: 7 additions & 5 deletions pymc_experimental/gp/latent_approx.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from functools import partial
from typing import Optional

import numpy as np
import pymc as pm
import pytensor.tensor as pt

from pymc.gp.util import JITTER_DEFAULT, stabilize
from pytensor.tensor.linalg import cholesky, solve_triangular

Expand All @@ -33,7 +33,7 @@ class ProjectedProcess(pm.gp.Latent):
## AKA: DTC
def __init__(
self,
n_inducing: Optional[int] = None,
n_inducing: int | None = None,
*,
mean_func=pm.gp.mean.Zero(),
cov_func=pm.gp.cov.Constant(0.0),
Expand All @@ -59,20 +59,22 @@ def prior(
self,
name: str,
X: np.ndarray,
X_inducing: Optional[np.ndarray] = None,
X_inducing: np.ndarray | None = None,
jitter: float = JITTER_DEFAULT,
**kwargs,
) -> np.ndarray:
"""
Builds the GP prior with optional inducing points locations.
Parameters:
Parameters
----------
- name: Name for the GP variable.
- X: Input data.
- X_inducing: Optional. Inducing points for the GP.
- jitter: Jitter to ensure numerical stability.
Returns:
Returns
-------
- GP function
"""
# Check if X is a numpy array
Expand Down
1 change: 0 additions & 1 deletion pymc_experimental/inference/fit.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ def fit(method, **kwargs):
return fit_pathfinder(**kwargs)

if method == "laplace":

from pymc_experimental.inference.laplace import laplace

return laplace(**kwargs)
8 changes: 4 additions & 4 deletions pymc_experimental/inference/laplace.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,14 @@
# limitations under the License.

import warnings

from collections.abc import Sequence
from typing import Optional

import arviz as az
import numpy as np
import pymc as pm
import xarray as xr

from arviz import dict_to_dataset
from pymc.backends.arviz import (
coords_and_dims_for_inferencedata,
Expand All @@ -33,9 +34,9 @@

def laplace(
vars: Sequence[Variable],
draws: Optional[int] = 1000,
draws: int | None = 1000,
model=None,
random_seed: Optional[RandomSeed] = None,
random_seed: RandomSeed | None = None,
progressbar=True,
):
"""
Expand Down Expand Up @@ -72,7 +73,6 @@ def laplace(
Examples
--------
>>> import numpy as np
>>> import pymc as pm
>>> import arviz as az
Expand Down
4 changes: 2 additions & 2 deletions pymc_experimental/inference/pathfinder.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,13 @@

import collections
import sys
from typing import Optional

import arviz as az
import blackjax
import jax
import numpy as np
import pymc as pm

from packaging import version
from pymc.backends.arviz import coords_and_dims_for_inferencedata
from pymc.blocking import DictToArrayBijection, RaveledVars
Expand Down Expand Up @@ -63,7 +63,7 @@ def convert_flat_trace_to_idata(

def fit_pathfinder(
samples=1000,
random_seed: Optional[RandomSeed] = None,
random_seed: RandomSeed | None = None,
postprocessing_backend="cpu",
model=None,
**pathfinder_kwargs,
Expand Down
12 changes: 8 additions & 4 deletions pymc_experimental/inference/smc/sampling.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,16 @@
import logging
import time
import warnings
from typing import Callable, Dict, NamedTuple, Optional, cast

from collections.abc import Callable
from typing import NamedTuple, cast

import arviz as az
import blackjax
import jax
import jax.numpy as jnp
import numpy as np

from blackjax.smc.resampling import systematic
from pymc import draw, modelcontext, to_inference_data
from pymc.backends import NDArray
Expand All @@ -39,7 +42,7 @@ def sample_smc_blackjax(
kernel: str = "HMC",
target_essn: float = 0.5,
num_mcmc_steps: int = 10,
inner_kernel_params: Optional[dict] = None,
inner_kernel_params: dict | None = None,
model=None,
iterations_to_diagnose: int = 100,
):
Expand Down Expand Up @@ -319,6 +322,7 @@ def add_to_inference_data(
):
"""
Adds several SMC parameters into the az.InferenceData result
Parameters
----------
inference_data: arviz object to add attributes to.
Expand Down Expand Up @@ -389,7 +393,7 @@ def logp_fn_wrap(particles):
return logp_fn_wrap


def initialize_population(model, draws, random_seed) -> Dict[str, np.ndarray]:
def initialize_population(model, draws, random_seed) -> dict[str, np.ndarray]:
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=UserWarning, message="The effect of Potentials")

Expand All @@ -405,7 +409,7 @@ def initialize_population(model, draws, random_seed) -> Dict[str, np.ndarray]:
names = [model.rvs_to_values[rv].name for rv in model.free_RVs]
dict_prior = {k: np.stack(v) for k, v in zip(names, prior_values)}

return cast(Dict[str, np.ndarray], dict_prior)
return cast(dict[str, np.ndarray], dict_prior)


def var_map_from_model(model, initial_point) -> dict:
Expand Down
Loading

0 comments on commit 69c4307

Please sign in to comment.