Skip to content

Commit 255e8fc

Browse files
authored
Merge pull request #2225 from junpenglao/bugfixed_get_variable_name
fixed_ get_variable_name
2 parents edddb2c + 93e87c1 commit 255e8fc

File tree

6 files changed

+65
-47
lines changed

6 files changed

+65
-47
lines changed

pymc3/distributions/continuous.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -376,7 +376,7 @@ def __init__(self, mu=None, lam=None, phi=None, alpha=0., *args, **kwargs):
376376
self.alpha = alpha = tt.as_tensor_variable(alpha)
377377
self.mu = mu = tt.as_tensor_variable(mu)
378378
self.lam = lam = tt.as_tensor_variable(lam)
379-
self.phi = phi =tt.as_tensor_variable(phi)
379+
self.phi = phi = tt.as_tensor_variable(phi)
380380

381381
self.mean = self.mu + self.alpha
382382
self.mode = self.mu * (tt.sqrt(1. + (1.5 * self.mu / self.lam)**2)
@@ -1501,11 +1501,9 @@ def __init__(self, lower=0, upper=1, c=0.5,
15011501
*args, **kwargs):
15021502
super(Triangular, self).__init__(*args, **kwargs)
15031503

1504-
self.c = c
1505-
self.lower = lower
1506-
self.upper = upper
1507-
self.mean = c
1508-
self.median = self.mean
1504+
self.median = self.mean = self.c = c = tt.as_tensor_variable(c)
1505+
self.lower = lower = tt.as_tensor_variable(lower)
1506+
self.upper = upper = tt.as_tensor_variable(upper)
15091507

15101508
def random(self, point=None, size=None):
15111509
c, lower, upper = draw_values([self.c, self.lower, self.upper],

pymc3/distributions/discrete.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
import theano.tensor as tt
55
from scipy import stats
66

7+
from pymc3.util import get_variable_name
78
from .dist_math import bound, factln, binomln, betaln, logpow
89
from .distribution import Discrete, draw_values, generate_samples, reshape_sampled
910
from pymc3.math import tround
@@ -205,8 +206,8 @@ class DiscreteWeibull(Discrete):
205206
def __init__(self, q, beta, *args, **kwargs):
206207
super(DiscreteWeibull, self).__init__(*args, defaults=['median'], **kwargs)
207208

208-
self.q = q
209-
self.beta = beta
209+
self.q = q = tt.as_tensor_variable(q)
210+
self.beta = beta = tt.as_tensor_variable(beta)
210211

211212
self.median = self._ppf(0.5)
212213

pymc3/distributions/mixture.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import numpy as np
22
import theano.tensor as tt
33

4+
from pymc3.util import get_variable_name
45
from ..math import logsumexp
56
from .dist_math import bound
67
from .distribution import Discrete, Distribution, draw_values, generate_samples
@@ -41,7 +42,7 @@ class Mixture(Distribution):
4142
def __init__(self, w, comp_dists, *args, **kwargs):
4243
shape = kwargs.pop('shape', ())
4344

44-
self.w = w
45+
self.w = w = tt.as_tensor_variable(w)
4546
self.comp_dists = comp_dists
4647

4748
defaults = kwargs.pop('defaults', [])
@@ -167,7 +168,8 @@ class NormalMixture(Mixture):
167168
def __init__(self, w, mu, *args, **kwargs):
168169
_, sd = get_tau_sd(tau=kwargs.pop('tau', None),
169170
sd=kwargs.pop('sd', None))
170-
171+
self.mu = mu = tt.as_tensor_variable(mu)
172+
self.sd = sd = tt.as_tensor_variable(sd)
171173
super(NormalMixture, self).__init__(w, Normal.dist(mu, sd=sd),
172174
*args, **kwargs)
173175

@@ -176,8 +178,8 @@ def _repr_latex_(self, name=None, dist=None):
176178
dist = self
177179
mu = dist.mu
178180
w = dist.w
179-
sigma = dist.sigma
181+
sd = dist.sd
180182
return r'${} \sim \text{{NormalMixture}}(\mathit{{w}}={}, \mathit{{mu}}={}, \mathit{{sigma}}={})$'.format(name,
181183
get_variable_name(w),
182184
get_variable_name(mu),
183-
get_variable_name(sigma))
185+
get_variable_name(sd))

pymc3/distributions/multivariate.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
from pymc3.math import tround
1717
from pymc3.theanof import floatX
1818
from . import transforms
19+
from pymc3.util import get_variable_name
1920
from .distribution import Continuous, Discrete, draw_values, generate_samples
2021
from ..model import Deterministic
2122
from .continuous import ChiSquared, Normal
@@ -291,7 +292,7 @@ class MvStudentT(Continuous):
291292
def __init__(self, nu, Sigma, mu=None, *args, **kwargs):
292293
super(MvStudentT, self).__init__(*args, **kwargs)
293294
self.nu = nu = tt.as_tensor_variable(nu)
294-
self.mu = tt.zeros(Sigma.shape[0]) if mu is None else tt.as_tensor_variable(mu)
295+
mu = tt.zeros(Sigma.shape[0]) if mu is None else tt.as_tensor_variable(mu)
295296
self.Sigma = Sigma = tt.as_tensor_variable(Sigma)
296297

297298
self.mean = self.median = self.mode = self.mu = mu
@@ -593,9 +594,9 @@ def __init__(self, nu, V, *args, **kwargs):
593594
'on the issues surrounding the Wishart see here: '
594595
'https://github.com/pymc-devs/pymc3/issues/538.',
595596
UserWarning)
596-
self.nu = nu
597-
self.p = p = V.shape[0]
598-
self.V = V
597+
self.nu = nu = tt.as_tensor_variable(nu)
598+
self.p = p = tt.as_tensor_variable(V.shape[0])
599+
self.V = V = tt.as_tensor_variable(V)
599600
self.mean = nu * V
600601
self.mode = tt.switch(1 * (nu >= p + 1),
601602
(nu - p - 1) * V,
@@ -695,7 +696,7 @@ def WishartBartlett(name, S, nu, is_cholesky=False, return_cholesky=False, testv
695696
c = tt.sqrt(ChiSquared('c', nu - np.arange(2, 2 + n_diag), shape=n_diag,
696697
testval=diag_testval))
697698
pm._log.info('Added new variable c to model diagonal of Wishart.')
698-
z = Normal('z', 0, 1, shape=n_tril, testval=tril_testval)
699+
z = Normal('z', 0., 1., shape=n_tril, testval=tril_testval)
699700
pm._log.info('Added new variable z to model off-diagonals of Wishart.')
700701
# Construct A matrix
701702
A = tt.zeros(S.shape, dtype=np.float32)

pymc3/distributions/timeseries.py

Lines changed: 30 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
import theano.tensor as tt
22
from theano import scan
33

4+
from pymc3.util import get_variable_name
5+
from .continuous import get_tau_sd, Normal, Flat
46
from . import multivariate
5-
from . import continuous
67
from . import distribution
78

89
__all__ = [
@@ -29,20 +30,20 @@ class AR1(distribution.Continuous):
2930

3031
def __init__(self, k, tau_e, *args, **kwargs):
3132
super(AR1, self).__init__(*args, **kwargs)
32-
self.k = k
33-
self.tau_e = tau_e
33+
self.k = k = tt.as_tensor_variable(k)
34+
self.tau_e = tau_e = tt.as_tensor_variable(tau_e)
3435
self.tau = tau_e * (1 - k ** 2)
35-
self.mode = 0.
36+
self.mode = tt.as_tensor_variable(0.)
3637

3738
def logp(self, x):
3839
k = self.k
3940
tau_e = self.tau_e
4041

4142
x_im1 = x[:-1]
4243
x_i = x[1:]
43-
boundary = continuous.Normal.dist(0, tau_e).logp
44+
boundary = Normal.dist(0., tau=tau_e).logp
4445

45-
innov_like = continuous.Normal.dist(k * x_im1, tau_e).logp(x_i)
46+
innov_like = Normal.dist(k * x_im1, tau=tau_e).logp(x_i)
4647
return boundary(x[0]) + tt.sum(innov_like) + boundary(x[-1])
4748

4849
def _repr_latex_(self, name=None, dist=None):
@@ -71,14 +72,15 @@ class GaussianRandomWalk(distribution.Continuous):
7172
distribution for initial value (Defaults to Flat())
7273
"""
7374

74-
def __init__(self, tau=None, init=continuous.Flat.dist(), sd=None, mu=0.,
75+
def __init__(self, tau=None, init=Flat.dist(), sd=None, mu=0.,
7576
*args, **kwargs):
7677
super(GaussianRandomWalk, self).__init__(*args, **kwargs)
77-
self.tau = tau
78-
self.sd = sd
79-
self.mu = mu
78+
tau, sd = get_tau_sd(tau=tau, sd=sd)
79+
self.tau = tau = tt.as_tensor_variable(tau)
80+
self.sd = sd = tt.as_tensor_variable(sd)
81+
self.mu = mu = tt.as_tensor_variable(mu)
8082
self.init = init
81-
self.mean = 0.
83+
self.mean = tt.as_tensor_variable(0.)
8284

8385
def logp(self, x):
8486
tau = self.tau
@@ -89,7 +91,7 @@ def logp(self, x):
8991
x_im1 = x[:-1]
9092
x_i = x[1:]
9193

92-
innov_like = continuous.Normal.dist(mu=x_im1 + mu, tau=tau, sd=sd).logp(x_i)
94+
innov_like = Normal.dist(mu=x_im1 + mu, sd=sd).logp(x_i)
9395
return init.logp(x[0]) + tt.sum(innov_like)
9496

9597
def _repr_latex_(self, name=None, dist=None):
@@ -124,15 +126,15 @@ class GARCH11(distribution.Continuous):
124126
initial_vol >= 0, distribution for initial volatility, sigma_0
125127
"""
126128

127-
def __init__(self, omega=None, alpha_1=None, beta_1=None,
128-
initial_vol=None, *args, **kwargs):
129+
def __init__(self, omega, alpha_1, beta_1,
130+
initial_vol, *args, **kwargs):
129131
super(GARCH11, self).__init__(*args, **kwargs)
130132

131-
self.omega = omega
132-
self.alpha_1 = alpha_1
133-
self.beta_1 = beta_1
133+
self.omega = omega = tt.as_tensor_variable(omega)
134+
self.alpha_1 = alpha_1 = tt.as_tensor_variable(alpha_1)
135+
self.beta_1 = beta_1 = tt.as_tensor_variable(beta_1)
134136
self.initial_vol = initial_vol
135-
self.mean = 0
137+
self.mean = tt.as_tensor_variable(0.)
136138

137139
def get_volatility(self, x):
138140
x = x[:-1]
@@ -149,7 +151,7 @@ def volatility_update(x, vol, w, a, b):
149151

150152
def logp(self, x):
151153
vol = self.get_volatility(x)
152-
return tt.sum(continuous.Normal.dist(0, sd=vol).logp(x))
154+
return tt.sum(Normal.dist(0., sd=vol).logp(x))
153155

154156
def _repr_latex_(self, name=None, dist=None):
155157
if dist is None:
@@ -178,7 +180,7 @@ class EulerMaruyama(distribution.Continuous):
178180
"""
179181
def __init__(self, dt, sde_fn, sde_pars, *args, **kwds):
180182
super(EulerMaruyama, self).__init__(*args, **kwds)
181-
self.dt = dt
183+
self.dt = dt = tt.as_tensor_variable(dt)
182184
self.sde_fn = sde_fn
183185
self.sde_pars = sde_pars
184186

@@ -187,7 +189,7 @@ def logp(self, x):
187189
f, g = self.sde_fn(x[:-1], *self.sde_pars)
188190
mu = xt + self.dt * f
189191
sd = tt.sqrt(self.dt) * g
190-
return tt.sum(continuous.Normal.dist(mu=mu, sd=sd).logp(x[1:]))
192+
return tt.sum(Normal.dist(mu=mu, sd=sd).logp(x[1:]))
191193

192194
def _repr_latex_(self, name=None, dist=None):
193195
if dist is None:
@@ -210,7 +212,7 @@ class MvGaussianRandomWalk(distribution.Continuous):
210212
init : distribution
211213
distribution for initial value (Defaults to Flat())
212214
"""
213-
def __init__(self, mu=0., cov=None, init=continuous.Flat.dist(),
215+
def __init__(self, mu=0., cov=None, init=Flat.dist(),
214216
*args, **kwargs):
215217
super(MvGaussianRandomWalk, self).__init__(*args, **kwargs)
216218
if cov is None:
@@ -220,9 +222,9 @@ def __init__(self, mu=0., cov=None, init=continuous.Flat.dist(),
220222
if cov.ndim != 2:
221223
raise ValueError('cov must be two dimensional.')
222224
self.cov = cov
223-
self.mu = mu
225+
self.mu = mu = tt.as_tensor_variable(mu)
224226
self.init = init
225-
self.mean = 0.
227+
self.mean = tt.as_tensor_variable(0.)
226228

227229
def logp(self, x):
228230
cov = self.cov
@@ -259,13 +261,13 @@ class MvStudentTRandomWalk(distribution.Continuous):
259261
init : distribution
260262
distribution for initial value (Defaults to Flat())
261263
"""
262-
def __init__(self, nu, mu=0., cov=None, init=continuous.Flat.dist(),
264+
def __init__(self, nu, mu=0., cov=None, init=Flat.dist(),
263265
*args, **kwargs):
264266
super(MvStudentTRandomWalk, self).__init__(*args, **kwargs)
265-
self.mu = mu
266-
self.nu = nu
267+
self.mu = mu = tt.as_tensor_variable(mu)
268+
self.nu = nu = tt.as_tensor_variable(nu)
267269
self.init = init
268-
self.mean = 0.
270+
self.mean = tt.as_tensor_variable(0.)
269271

270272
if cov is None:
271273
raise ValueError('A covariance matrix must be provided as cov argument.')

pymc3/tests/test_distributions.py

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,9 @@
77
from ..model import Model, Point, Potential
88
from ..blocking import DictToVarBijection, DictToArrayBijection, ArrayOrdering
99
from ..distributions import (DensityDist, Categorical, Multinomial, VonMises, Dirichlet,
10-
MvStudentT, MvNormal, ZeroInflatedPoisson,
10+
MvStudentT, MvNormal, ZeroInflatedPoisson, GaussianRandomWalk,
1111
ZeroInflatedNegativeBinomial, Constant, Poisson, Bernoulli, Beta,
12-
BetaBinomial, HalfStudentT, StudentT, Weibull, Pareto,
12+
BetaBinomial, HalfStudentT, StudentT, Weibull, Pareto, NormalMixture,
1313
InverseGamma, Gamma, Cauchy, HalfCauchy, Lognormal, Laplace,
1414
NegativeBinomial, Geometric, Exponential, ExGaussian, Normal,
1515
Flat, LKJCorr, Wald, ChiSquared, HalfNormal, DiscreteUniform,
@@ -822,3 +822,17 @@ def ref_pdf(value):
822822
)
823823

824824
self.pymc3_matches_scipy(TestedInterpolated, R, {}, ref_pdf)
825+
826+
827+
def test_repr_latex_():
828+
with Model():
829+
x0 = Binomial('Discrete', p=.5, n=10)
830+
x1 = Normal('Continuous', mu=0., sd=1.)
831+
x2 = GaussianRandomWalk('Timeseries', mu=x1, sd=1., shape=2)
832+
x3 = MvStudentT('Multivariate', nu=5, mu=x2, Sigma=np.diag(np.ones(2)), shape=2)
833+
x4 = NormalMixture('Mixture', w=np.array([.5, .5]), mu=x3, sd=x0)
834+
assert x0._repr_latex_()=='$Discrete \\sim \\text{Binomial}(\\mathit{n}=10, \\mathit{p}=0.5)$'
835+
assert x1._repr_latex_()=='$Continuous \\sim \\text{Normal}(\\mathit{mu}=0.0, \\mathit{sd}=1.0)$'
836+
assert x2._repr_latex_()=='$Timeseries \\sim \\text{GaussianRandomWalk}(\\mathit{mu}=Continuous, \\mathit{sd}=1.0)$'
837+
assert x3._repr_latex_()=='$Multivariate \\sim \\text{MvStudentT}(\\mathit{nu}=5, \\mathit{mu}=Timeseries, \\mathit{Sigma}=array)$'
838+
assert x4._repr_latex_()=='$Mixture \\sim \\text{NormalMixture}(\\mathit{w}=array, \\mathit{mu}=Multivariate, \\mathit{sigma}=f(Discrete))$'

0 commit comments

Comments
 (0)