Skip to content

Commit 5cbcff6

Browse files
authored
Merge branch 'main' into grw
2 parents a659e56 + 144b0ba commit 5cbcff6

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

42 files changed

+517
-913
lines changed

.github/workflows/arviz_compat.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ jobs:
8282
run: |
8383
conda activate pymc-test-py39
8484
pip uninstall arviz -y
85-
pip install git+git://github.com/arviz-devs/arviz.git
85+
pip install git+https://github.com/arviz-devs/arviz
8686
- name: Run tests
8787
run: |
8888
python -m pytest -vv --cov=pymc --cov-report=xml --cov-report term --durations=50 $TEST_SUBSET

.pre-commit-config.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ repos:
1414
exclude: ^requirements-dev\.txt$
1515
- id: trailing-whitespace
1616
- repo: https://github.com/pre-commit/mirrors-mypy
17-
rev: v0.931
17+
rev: v0.940
1818
hooks:
1919
- id: mypy
2020
name: Run static type checks
@@ -37,7 +37,7 @@ repos:
3737
- id: isort
3838
name: isort
3939
- repo: https://github.com/asottile/pyupgrade
40-
rev: v2.31.0
40+
rev: v2.31.1
4141
hooks:
4242
- id: pyupgrade
4343
args: [--py37-plus]

RELEASE-NOTES.md

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,8 @@ Instead update the vNext section until 4.0.0 is out.
77
⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠⚠
88
-->
99

10-
## PyMC vNext (4.0.0b1 → 4.0.0b2 → 4.0.0b3 → 4.0.0b4 → 4.0.0)
11-
⚠ The changes below are the delta between the upcoming releases `v3.11.5` →...→ `v4.0.0`.
10+
## PyMC vNext (4.0.0b1 → ... → 4.0.0b4 → 4.0.0)
11+
⚠ The changes below are the delta between the releases `v3.11.5` →...→ `v4.0.0`.
1212

1313
### Not-yet working features
1414
We plan to get these working again, but at this point their inner workings have not been refactored.
@@ -74,6 +74,7 @@ All of the above apply to:
7474
- The function `replace_with_values` function has been added to `gp.utils`.
7575
- `MarginalSparse` has been renamed `MarginalApprox`.
7676
- Removed `MixtureSameFamily`. `Mixture` is now capable of handling batched multivariate components (see [#5438](https://github.com/pymc-devs/pymc/pull/5438)).
77+
- `ZeroInflatedPoisson` `theta` parameter was renamed to `mu` (see [#5584](https://github.com/pymc-devs/pymc/pull/5584)).
7778
- ...
7879

7980
### Expected breaks
@@ -96,6 +97,7 @@ All of the above apply to:
9697
This includes API changes we did not warn about since at least `3.11.0` (2021-01).
9798

9899
- Setting initial values through `pm.Distribution(testval=...)` is now `pm.Distribution(initval=...)`.
100+
- Alternative `sd` keyword argument has been removed from all distributions. `sigma` should be used instead (see [#5583](https://github.com/pymc-devs/pymc/pull/5583)).
99101

100102

101103
### New features
@@ -127,6 +129,9 @@ This includes API changes we did not warn about since at least `3.11.0` (2021-01
127129
- Univariate censored distributions are now available via `pm.Censored`. [#5169](https://github.com/pymc-devs/pymc/pull/5169)
128130
- Nested models now inherit the parent model's coordinates. [#5344](https://github.com/pymc-devs/pymc/pull/5344)
129131
- `softmax` and `log_softmax` functions added to `math` module (see [#5279](https://github.com/pymc-devs/pymc/pull/5279)).
132+
- Adding support for blackjax's NUTS sampler `pymc.sampling_jax` (see [#5477](ihttps://github.com/pymc-devs/pymc/pull/5477))
133+
- `pymc.sampling_jax` samplers support `log_likelihood`, `observed_data`, and `sample_stats` in returned InferenceData object (see [#5189](https://github.com/pymc-devs/pymc/pull/5189))
134+
- Adding support for `pm.Deterministic` in `pymc.sampling_jax` (see [#5182](https://github.com/pymc-devs/pymc/pull/5182))
130135
- ...
131136

132137

benchmarks/benchmarks/benchmarks.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -32,17 +32,17 @@ def glm_hierarchical_model(random_seed=123):
3232

3333
n_counties = len(data.county.unique())
3434
with pm.Model() as model:
35-
mu_a = pm.Normal("mu_a", mu=0.0, sd=100**2)
35+
mu_a = pm.Normal("mu_a", mu=0.0, sigma=100**2)
3636
sigma_a = pm.HalfCauchy("sigma_a", 5)
37-
mu_b = pm.Normal("mu_b", mu=0.0, sd=100**2)
37+
mu_b = pm.Normal("mu_b", mu=0.0, sigma=100**2)
3838
sigma_b = pm.HalfCauchy("sigma_b", 5)
39-
a = pm.Normal("a", mu=0, sd=1, shape=n_counties)
40-
b = pm.Normal("b", mu=0, sd=1, shape=n_counties)
39+
a = pm.Normal("a", mu=0, sigma=1, shape=n_counties)
40+
b = pm.Normal("b", mu=0, sigma=1, shape=n_counties)
4141
a = mu_a + sigma_a * a
4242
b = mu_b + sigma_b * b
4343
eps = pm.HalfCauchy("eps", 5)
4444
radon_est = a[county_idx] + b[county_idx] * data.floor.values
45-
pm.Normal("radon_like", mu=radon_est, sd=eps, observed=data.log_radon)
45+
pm.Normal("radon_like", mu=radon_est, sigma=eps, observed=data.log_radon)
4646
return model
4747

4848

@@ -58,7 +58,7 @@ def mixture_model(random_seed=1234):
5858

5959
with pm.Model() as model:
6060
w = pm.Dirichlet("w", a=np.ones_like(w_true))
61-
mu = pm.Normal("mu", mu=0.0, sd=10.0, shape=w_true.shape)
61+
mu = pm.Normal("mu", mu=0.0, sigma=10.0, shape=w_true.shape)
6262
enforce_order = pm.Potential(
6363
"enforce_order",
6464
at.switch(mu[0] - mu[1] <= 0, 0.0, -np.inf)
@@ -88,7 +88,7 @@ class OverheadSuite:
8888
def setup(self, step):
8989
self.n_steps = 10000
9090
with pm.Model() as self.model:
91-
pm.Normal("x", mu=0, sd=1)
91+
pm.Normal("x", mu=0, sigma=1)
9292

9393
def time_overhead_sample(self, step):
9494
with self.model:
@@ -133,8 +133,8 @@ def time_drug_evaluation(self):
133133
sigma_low = 1
134134
sigma_high = 10
135135
with pm.Model():
136-
group1_mean = pm.Normal("group1_mean", y_mean, sd=y_std)
137-
group2_mean = pm.Normal("group2_mean", y_mean, sd=y_std)
136+
group1_mean = pm.Normal("group1_mean", y_mean, sigma=y_std)
137+
group2_mean = pm.Normal("group2_mean", y_mean, sigma=y_std)
138138
group1_std = pm.Uniform("group1_std", lower=sigma_low, upper=sigma_high)
139139
group2_std = pm.Uniform("group2_std", lower=sigma_low, upper=sigma_high)
140140
lambda_1 = group1_std**-2
@@ -301,7 +301,7 @@ def freefall(y, t, p):
301301
# If we know one of the parameter values, we can simply pass the value.
302302
ode_solution = ode_model(y0=[0], theta=[gamma, 9.8])
303303
# The ode_solution has a shape of (n_times, n_states)
304-
Y = pm.Normal("Y", mu=ode_solution, sd=sigma, observed=y)
304+
Y = pm.Normal("Y", mu=ode_solution, sigma=sigma, observed=y)
305305

306306
t0 = time.time()
307307
idata = pm.sample(500, tune=1000, chains=2, cores=2, random_seed=0)

docs/source/PyMC_and_Aesara.rst

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -188,8 +188,8 @@ example::
188188

189189
with pm.Model() as model:
190190
mu = pm.Normal('mu', 0, 1)
191-
sd = pm.HalfNormal('sd', 1)
192-
y = pm.Normal('y', mu=mu, sigma=sd, observed=data)
191+
sigma = pm.HalfNormal('sigma', 1)
192+
y = pm.Normal('y', mu=mu, sigma=sigma, observed=data)
193193

194194
is roughly equivalent to this::
195195

@@ -203,10 +203,10 @@ is roughly equivalent to this::
203203
model.add_free_variable(sd_log__)
204204
model.add_logp_term(corrected_logp_half_normal(sd_log__))
205205

206-
sd = at.exp(sd_log__)
207-
model.add_deterministic_variable(sd)
206+
sigma = at.exp(sd_log__)
207+
model.add_deterministic_variable(sigma)
208208

209-
model.add_logp_term(pm.Normal.dist(mu, sd).logp(data))
209+
model.add_logp_term(pm.Normal.dist(mu, sigma).logp(data))
210210

211211
The return values of the variable constructors are subclasses
212212
of Aesara variables, so when we define a variable we can use any
@@ -217,5 +217,5 @@ Aesara operation on them::
217217
# beta is a at.dvector
218218
beta = pm.Normal('beta', 0, 1, shape=len(design_matrix))
219219
predict = at.dot(design_matrix, beta)
220-
sd = pm.HalfCauchy('sd', beta=2.5)
221-
pm.Normal('y', mu=predict, sigma=sd, observed=data)
220+
sigma = pm.HalfCauchy('sigma', beta=2.5)
221+
pm.Normal('y', mu=predict, sigma=sigma, observed=data)

docs/source/contributing/developer_guide.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -888,8 +888,8 @@ others. The challenge and some summary of the solution could be found in Luciano
888888
889889
with pm.Model() as m:
890890
mu = pm.Normal('mu', 0., 1., shape=(5, 1))
891-
sd = pm.HalfNormal('sd', 5., shape=(1, 10))
892-
pm.Normal('x', mu=mu, sigma=sd, observed=np.random.randn(2, 5, 10))
891+
sigma = pm.HalfNormal('sigma', 5., shape=(1, 10))
892+
pm.Normal('x', mu=mu, sigma=sigma, observed=np.random.randn(2, 5, 10))
893893
trace = pm.sample_prior_predictive(100)
894894
895895
trace['x'].shape # ==> should be (100, 2, 5, 10)

0 commit comments

Comments
 (0)