Skip to content

Update pre-commit #368

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Jul 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 9 additions & 28 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
rev: v4.6.0
hooks:
- id: check-merge-conflict
- id: check-toml
Expand All @@ -10,37 +10,18 @@ repos:
- id: no-commit-to-branch
args: [--branch, main]
- id: trailing-whitespace
- repo: https://github.com/PyCQA/isort
rev: 5.13.2

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.5.5
hooks:
- id: isort
name: isort
- repo: https://github.com/asottile/pyupgrade
rev: v3.15.0
hooks:
- id: pyupgrade
args: [--py37-plus]
- repo: https://github.com/psf/black
rev: 24.1.1
hooks:
- id: black
- id: black-jupyter
- repo: https://github.com/PyCQA/pylint
rev: v3.0.3
hooks:
- id: pylint
args: [--rcfile=.pylintrc]
files: ^pymc_experimental/
- id: ruff
args: [ --fix, --unsafe-fixes, --exit-non-zero-on-fix ]
- id: ruff-format
types_or: [ python, pyi, jupyter ]

- repo: https://github.com/MarcoGorelli/madforhooks
rev: 0.4.1
hooks:
- id: no-print-statements
exclude: _version.py
files: ^pymc_experimental/
- repo: local
hooks:
- id: no-relative-imports
name: No relative imports
entry: from \.[\.\w]* import
types: [python]
language: pygrep
4 changes: 2 additions & 2 deletions notebooks/SARMA Example.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -1554,7 +1554,7 @@
" hdi_forecast.coords[\"time\"].values,\n",
" *hdi_forecast.isel(observed_state=0).values.T,\n",
" alpha=0.25,\n",
" color=\"tab:blue\"\n",
" color=\"tab:blue\",\n",
" )\n",
"ax.set_title(\"Porcupine Graph of 10-Period Forecasts (parameters estimated on all data)\")\n",
"plt.show()"
Expand Down Expand Up @@ -2692,7 +2692,7 @@
" *forecast_hdi.values.T,\n",
" label=\"Forecast 94% HDI\",\n",
" color=\"tab:orange\",\n",
" alpha=0.25\n",
" alpha=0.25,\n",
")\n",
"ax.legend()\n",
"plt.show()"
Expand Down
6 changes: 3 additions & 3 deletions notebooks/Structural Timeseries Modeling.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -1657,7 +1657,7 @@
" nile.index,\n",
" *component_hdi.smoothed_posterior.sel(state=state).values.T,\n",
" color=\"tab:blue\",\n",
" alpha=0.15\n",
" alpha=0.15,\n",
" )\n",
" axis.set_title(state.title())"
]
Expand Down Expand Up @@ -1706,7 +1706,7 @@
" *hdi.smoothed_posterior.sum(dim=\"state\").values.T,\n",
" color=\"tab:blue\",\n",
" alpha=0.15,\n",
" label=\"HDI 94%\"\n",
" label=\"HDI 94%\",\n",
")\n",
"ax.legend()\n",
"plt.show()"
Expand Down Expand Up @@ -2750,7 +2750,7 @@
"ax.fill_between(\n",
" blossom_data.index,\n",
" *hdi_post.predicted_posterior_observed.isel(observed_state=0).values.T,\n",
" alpha=0.25\n",
" alpha=0.25,\n",
")\n",
"blossom_data.plot(ax=ax)"
]
Expand Down
19 changes: 15 additions & 4 deletions pymc_experimental/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,10 @@
# limitations under the License.
import logging

from pymc_experimental import distributions, gp, statespace, utils
from pymc_experimental.inference.fit import fit
from pymc_experimental.model.marginal_model import MarginalModel
from pymc_experimental.model.model_api import as_model
from pymc_experimental.version import __version__

_log = logging.getLogger("pmx")
Expand All @@ -23,7 +27,14 @@
handler = logging.StreamHandler()
_log.addHandler(handler)

from pymc_experimental import distributions, gp, statespace, utils
from pymc_experimental.inference.fit import fit
from pymc_experimental.model.marginal_model import MarginalModel
from pymc_experimental.model.model_api import as_model

__all__ = [
"distributions",
"gp",
"statespace",
"utils",
"fit",
"MarginalModel",
"as_model",
"__version__",
]
9 changes: 4 additions & 5 deletions pymc_experimental/distributions/continuous.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,9 @@
The imports from pymc are not fully replicated here: add imports as necessary.
"""

from typing import Tuple, Union

import numpy as np
import pytensor.tensor as pt

from pymc import ChiSquared, CustomDist
from pymc.distributions import transforms
from pymc.distributions.dist_math import check_parameters
Expand All @@ -39,19 +38,19 @@ class GenExtremeRV(RandomVariable):
name: str = "Generalized Extreme Value"
signature = "(),(),()->()"
dtype: str = "floatX"
_print_name: Tuple[str, str] = ("Generalized Extreme Value", "\\operatorname{GEV}")
_print_name: tuple[str, str] = ("Generalized Extreme Value", "\\operatorname{GEV}")

def __call__(self, mu=0.0, sigma=1.0, xi=0.0, size=None, **kwargs) -> TensorVariable:
return super().__call__(mu, sigma, xi, size=size, **kwargs)

@classmethod
def rng_fn(
cls,
rng: Union[np.random.RandomState, np.random.Generator],
rng: np.random.RandomState | np.random.Generator,
mu: np.ndarray,
sigma: np.ndarray,
xi: np.ndarray,
size: Tuple[int, ...],
size: tuple[int, ...],
) -> np.ndarray:
# Notice negative here, since remainder of GenExtreme is based on Coles parametrization
return stats.genextreme.rvs(c=-xi, loc=mu, scale=sigma, random_state=rng, size=size)
Expand Down
1 change: 1 addition & 0 deletions pymc_experimental/distributions/discrete.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

import numpy as np
import pymc as pm

from pymc.distributions.dist_math import betaln, check_parameters, factln, logpow
from pymc.distributions.shape_utils import rv_size_is_none
from pytensor import tensor as pt
Expand Down
13 changes: 6 additions & 7 deletions pymc_experimental/distributions/histogram_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,18 +13,17 @@
# limitations under the License.


from typing import Dict

import numpy as np
import pymc as pm

from numpy.typing import ArrayLike

__all__ = ["quantile_histogram", "discrete_histogram", "histogram_approximation"]


def quantile_histogram(
data: ArrayLike, n_quantiles=1000, zero_inflation=False
) -> Dict[str, ArrayLike]:
) -> dict[str, ArrayLike]:
try:
import xhistogram.core
except ImportError as e:
Expand All @@ -34,7 +33,7 @@ def quantile_histogram(
import dask.dataframe
except ImportError:
dask = None
if dask and isinstance(data, (dask.dataframe.Series, dask.dataframe.DataFrame)):
if dask and isinstance(data, dask.dataframe.Series | dask.dataframe.DataFrame):
data = data.to_dask_array(lengths=True)
if zero_inflation:
zeros = (data == 0).sum(0)
Expand Down Expand Up @@ -67,7 +66,7 @@ def quantile_histogram(
return result


def discrete_histogram(data: ArrayLike, min_count=None) -> Dict[str, ArrayLike]:
def discrete_histogram(data: ArrayLike, min_count=None) -> dict[str, ArrayLike]:
try:
import xhistogram.core
except ImportError as e:
Expand All @@ -78,7 +77,7 @@ def discrete_histogram(data: ArrayLike, min_count=None) -> Dict[str, ArrayLike]:
except ImportError:
dask = None

if dask and isinstance(data, (dask.dataframe.Series, dask.dataframe.DataFrame)):
if dask and isinstance(data, dask.dataframe.Series | dask.dataframe.DataFrame):
data = data.to_dask_array(lengths=True)
mid, count_uniq = np.unique(data, return_counts=True)
if min_count is not None:
Expand Down Expand Up @@ -153,7 +152,7 @@ def histogram_approximation(name, dist, *, observed, **h_kwargs):
import dask.dataframe
except ImportError:
dask = None
if dask and isinstance(observed, (dask.dataframe.Series, dask.dataframe.DataFrame)):
if dask and isinstance(observed, dask.dataframe.Series | dask.dataframe.DataFrame):
observed = observed.to_dask_array(lengths=True)
if np.issubdtype(observed.dtype, np.integer):
histogram = discrete_histogram(observed, **h_kwargs)
Expand Down
2 changes: 2 additions & 0 deletions pymc_experimental/distributions/multivariate/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
from pymc_experimental.distributions.multivariate.r2d2m2cp import R2D2M2CP

__all__ = ["R2D2M2CP"]
42 changes: 21 additions & 21 deletions pymc_experimental/distributions/multivariate/r2d2m2cp.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@


from collections import namedtuple
from typing import Sequence, Tuple, Union
from collections.abc import Sequence

import numpy as np
import pymc as pm
Expand All @@ -26,8 +26,8 @@
def _psivar2musigma(
psi: pt.TensorVariable,
explained_var: pt.TensorVariable,
psi_mask: Union[pt.TensorLike, None],
) -> Tuple[pt.TensorVariable, pt.TensorVariable]:
psi_mask: pt.TensorLike | None,
) -> tuple[pt.TensorVariable, pt.TensorVariable]:
sign = pt.sign(psi - 0.5)
if psi_mask is not None:
# any computation might be ignored for ~psi_mask
Expand Down Expand Up @@ -55,7 +55,7 @@ def _R2D2M2CP_beta(
psi: pt.TensorVariable,
*,
psi_mask,
dims: Union[str, Sequence[str]],
dims: str | Sequence[str],
centered=False,
) -> pt.TensorVariable:
"""R2D2M2CP beta prior.
Expand Down Expand Up @@ -120,7 +120,7 @@ def _R2D2M2CP_beta(
def _broadcast_as_dims(
*values: np.ndarray,
dims: Sequence[str],
) -> Union[Tuple[np.ndarray, ...], np.ndarray]:
) -> tuple[np.ndarray, ...] | np.ndarray:
model = pm.modelcontext(None)
shape = [len(model.coords[d]) for d in dims]
ret = tuple(np.broadcast_to(v, shape) for v in values)
Expand All @@ -135,7 +135,7 @@ def _psi_masked(
positive_probs_std: pt.TensorLike,
*,
dims: Sequence[str],
) -> Tuple[Union[pt.TensorLike, None], pt.TensorVariable]:
) -> tuple[pt.TensorLike | None, pt.TensorVariable]:
if not (
isinstance(positive_probs, pt.Constant) and isinstance(positive_probs_std, pt.Constant)
):
Expand Down Expand Up @@ -172,10 +172,10 @@ def _psi_masked(

def _psi(
positive_probs: pt.TensorLike,
positive_probs_std: Union[pt.TensorLike, None],
positive_probs_std: pt.TensorLike | None,
*,
dims: Sequence[str],
) -> Tuple[Union[pt.TensorLike, None], pt.TensorVariable]:
) -> tuple[pt.TensorLike | None, pt.TensorVariable]:
if positive_probs_std is not None:
mask, psi = _psi_masked(
positive_probs=pt.as_tensor(positive_probs),
Expand All @@ -194,9 +194,9 @@ def _psi(


def _phi(
variables_importance: Union[pt.TensorLike, None],
variance_explained: Union[pt.TensorLike, None],
importance_concentration: Union[pt.TensorLike, None],
variables_importance: pt.TensorLike | None,
variance_explained: pt.TensorLike | None,
importance_concentration: pt.TensorLike | None,
*,
dims: Sequence[str],
) -> pt.TensorVariable:
Expand All @@ -210,15 +210,15 @@ def _phi(
variables_importance = pt.as_tensor(variables_importance)
if importance_concentration is not None:
variables_importance *= importance_concentration
return pm.Dirichlet("phi", variables_importance, dims=broadcast_dims + [dim])
return pm.Dirichlet("phi", variables_importance, dims=[*broadcast_dims, dim])
elif variance_explained is not None:
if len(model.coords[dim]) <= 1:
raise TypeError("Can't use variance explained with less than two variables")
phi = pt.as_tensor(variance_explained)
else:
phi = _broadcast_as_dims(1.0, dims=dims)
if importance_concentration is not None:
return pm.Dirichlet("phi", importance_concentration * phi, dims=broadcast_dims + [dim])
return pm.Dirichlet("phi", importance_concentration * phi, dims=[*broadcast_dims, dim])
else:
return phi

Expand All @@ -233,12 +233,12 @@ def R2D2M2CP(
*,
dims: Sequence[str],
r2: pt.TensorLike,
variables_importance: Union[pt.TensorLike, None] = None,
variance_explained: Union[pt.TensorLike, None] = None,
importance_concentration: Union[pt.TensorLike, None] = None,
r2_std: Union[pt.TensorLike, None] = None,
positive_probs: Union[pt.TensorLike, None] = 0.5,
positive_probs_std: Union[pt.TensorLike, None] = None,
variables_importance: pt.TensorLike | None = None,
variance_explained: pt.TensorLike | None = None,
importance_concentration: pt.TensorLike | None = None,
r2_std: pt.TensorLike | None = None,
positive_probs: pt.TensorLike | None = 0.5,
positive_probs_std: pt.TensorLike | None = None,
centered: bool = False,
) -> R2D2M2CPOut:
"""R2D2M2CP Prior.
Expand Down Expand Up @@ -413,7 +413,7 @@ def R2D2M2CP(
year = {2023}
}
"""
if not isinstance(dims, (list, tuple)):
if not isinstance(dims, list | tuple):
dims = (dims,)
*broadcast_dims, dim = dims
input_sigma = pt.as_tensor(input_sigma)
Expand All @@ -438,7 +438,7 @@ def R2D2M2CP(
r2,
phi,
psi,
dims=broadcast_dims + [dim],
dims=[*broadcast_dims, dim],
centered=centered,
psi_mask=mask,
)
Expand Down
6 changes: 3 additions & 3 deletions pymc_experimental/distributions/timeseries.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import warnings
from typing import List, Union

import numpy as np
import pymc as pm
import pytensor
import pytensor.tensor as pt

from pymc.distributions.dist_math import check_parameters
from pymc.distributions.distribution import (
Distribution,
Expand All @@ -26,7 +26,7 @@
from pytensor.tensor.random.op import RandomVariable


def _make_outputs_info(n_lags: int, init_dist: Distribution) -> List[Union[Distribution, dict]]:
def _make_outputs_info(n_lags: int, init_dist: Distribution) -> list[Distribution | dict]:
"""
Two cases are needed for outputs_info in the scans used by DiscreteMarkovRv. If n_lags = 1, we need to throw away
the first dimension of init_dist_ or else markov_chain will have shape (steps, 1, *batch_size) instead of
Expand Down Expand Up @@ -142,7 +142,7 @@ def dist(cls, P=None, logit_P=None, steps=None, init_dist=None, n_lags=1, **kwar

if init_dist is not None:
if not isinstance(init_dist, TensorVariable) or not isinstance(
init_dist.owner.op, (RandomVariable, SymbolicRandomVariable)
init_dist.owner.op, RandomVariable | SymbolicRandomVariable
):
raise ValueError(
f"Init dist must be a distribution created via the `.dist()` API, "
Expand Down
2 changes: 2 additions & 0 deletions pymc_experimental/gp/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,5 @@


from pymc_experimental.gp.latent_approx import KarhunenLoeveExpansion, ProjectedProcess

__all__ = ["KarhunenLoeveExpansion", "ProjectedProcess"]
Loading
Loading