Skip to content

fixed_ get_variable_name #2225

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
May 26, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 4 additions & 6 deletions pymc3/distributions/continuous.py
Original file line number Diff line number Diff line change
Expand Up @@ -376,7 +376,7 @@ def __init__(self, mu=None, lam=None, phi=None, alpha=0., *args, **kwargs):
self.alpha = alpha = tt.as_tensor_variable(alpha)
self.mu = mu = tt.as_tensor_variable(mu)
self.lam = lam = tt.as_tensor_variable(lam)
self.phi = phi =tt.as_tensor_variable(phi)
self.phi = phi = tt.as_tensor_variable(phi)

self.mean = self.mu + self.alpha
self.mode = self.mu * (tt.sqrt(1. + (1.5 * self.mu / self.lam)**2)
Expand Down Expand Up @@ -1501,11 +1501,9 @@ def __init__(self, lower=0, upper=1, c=0.5,
*args, **kwargs):
super(Triangular, self).__init__(*args, **kwargs)

self.c = c
self.lower = lower
self.upper = upper
self.mean = c
self.median = self.mean
self.median = self.mean = self.c = c = tt.as_tensor_variable(c)
self.lower = lower = tt.as_tensor_variable(lower)
self.upper = upper = tt.as_tensor_variable(upper)

def random(self, point=None, size=None):
c, lower, upper = draw_values([self.c, self.lower, self.upper],
Expand Down
5 changes: 3 additions & 2 deletions pymc3/distributions/discrete.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import theano.tensor as tt
from scipy import stats

from pymc3.util import get_variable_name
from .dist_math import bound, factln, binomln, betaln, logpow
from .distribution import Discrete, draw_values, generate_samples, reshape_sampled
from pymc3.math import tround
Expand Down Expand Up @@ -205,8 +206,8 @@ class DiscreteWeibull(Discrete):
def __init__(self, q, beta, *args, **kwargs):
super(DiscreteWeibull, self).__init__(*args, defaults=['median'], **kwargs)

self.q = q
self.beta = beta
self.q = q = tt.as_tensor_variable(q)
self.beta = beta = tt.as_tensor_variable(beta)

self.median = self._ppf(0.5)

Expand Down
10 changes: 6 additions & 4 deletions pymc3/distributions/mixture.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import numpy as np
import theano.tensor as tt

from pymc3.util import get_variable_name
from ..math import logsumexp
from .dist_math import bound
from .distribution import Discrete, Distribution, draw_values, generate_samples
Expand Down Expand Up @@ -41,7 +42,7 @@ class Mixture(Distribution):
def __init__(self, w, comp_dists, *args, **kwargs):
shape = kwargs.pop('shape', ())

self.w = w
self.w = w = tt.as_tensor_variable(w)
self.comp_dists = comp_dists

defaults = kwargs.pop('defaults', [])
Expand Down Expand Up @@ -167,7 +168,8 @@ class NormalMixture(Mixture):
def __init__(self, w, mu, *args, **kwargs):
_, sd = get_tau_sd(tau=kwargs.pop('tau', None),
sd=kwargs.pop('sd', None))

self.mu = mu = tt.as_tensor_variable(mu)
self.sd = sd = tt.as_tensor_variable(sd)
super(NormalMixture, self).__init__(w, Normal.dist(mu, sd=sd),
*args, **kwargs)

Expand All @@ -176,8 +178,8 @@ def _repr_latex_(self, name=None, dist=None):
dist = self
mu = dist.mu
w = dist.w
sigma = dist.sigma
sd = dist.sd
return r'${} \sim \text{{NormalMixture}}(\mathit{{w}}={}, \mathit{{mu}}={}, \mathit{{sigma}}={})$'.format(name,
get_variable_name(w),
get_variable_name(mu),
get_variable_name(sigma))
get_variable_name(sd))
11 changes: 6 additions & 5 deletions pymc3/distributions/multivariate.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from pymc3.math import tround
from pymc3.theanof import floatX
from . import transforms
from pymc3.util import get_variable_name
from .distribution import Continuous, Discrete, draw_values, generate_samples
from ..model import Deterministic
from .continuous import ChiSquared, Normal
Expand Down Expand Up @@ -291,7 +292,7 @@ class MvStudentT(Continuous):
def __init__(self, nu, Sigma, mu=None, *args, **kwargs):
super(MvStudentT, self).__init__(*args, **kwargs)
self.nu = nu = tt.as_tensor_variable(nu)
self.mu = tt.zeros(Sigma.shape[0]) if mu is None else tt.as_tensor_variable(mu)
mu = tt.zeros(Sigma.shape[0]) if mu is None else tt.as_tensor_variable(mu)
self.Sigma = Sigma = tt.as_tensor_variable(Sigma)

self.mean = self.median = self.mode = self.mu = mu
Expand Down Expand Up @@ -593,9 +594,9 @@ def __init__(self, nu, V, *args, **kwargs):
'on the issues surrounding the Wishart see here: '
'https://github.com/pymc-devs/pymc3/issues/538.',
UserWarning)
self.nu = nu
self.p = p = V.shape[0]
self.V = V
self.nu = nu = tt.as_tensor_variable(nu)
self.p = p = tt.as_tensor_variable(V.shape[0])
self.V = V = tt.as_tensor_variable(V)
self.mean = nu * V
self.mode = tt.switch(1 * (nu >= p + 1),
(nu - p - 1) * V,
Expand Down Expand Up @@ -695,7 +696,7 @@ def WishartBartlett(name, S, nu, is_cholesky=False, return_cholesky=False, testv
c = tt.sqrt(ChiSquared('c', nu - np.arange(2, 2 + n_diag), shape=n_diag,
testval=diag_testval))
pm._log.info('Added new variable c to model diagonal of Wishart.')
z = Normal('z', 0, 1, shape=n_tril, testval=tril_testval)
z = Normal('z', 0., 1., shape=n_tril, testval=tril_testval)
pm._log.info('Added new variable z to model off-diagonals of Wishart.')
# Construct A matrix
A = tt.zeros(S.shape, dtype=np.float32)
Expand Down
58 changes: 30 additions & 28 deletions pymc3/distributions/timeseries.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import theano.tensor as tt
from theano import scan

from pymc3.util import get_variable_name
from .continuous import get_tau_sd, Normal, Flat
from . import multivariate
from . import continuous
from . import distribution

__all__ = [
Expand All @@ -29,20 +30,20 @@ class AR1(distribution.Continuous):

def __init__(self, k, tau_e, *args, **kwargs):
super(AR1, self).__init__(*args, **kwargs)
self.k = k
self.tau_e = tau_e
self.k = k = tt.as_tensor_variable(k)
self.tau_e = tau_e = tt.as_tensor_variable(tau_e)
self.tau = tau_e * (1 - k ** 2)
self.mode = 0.
self.mode = tt.as_tensor_variable(0.)

def logp(self, x):
k = self.k
tau_e = self.tau_e

x_im1 = x[:-1]
x_i = x[1:]
boundary = continuous.Normal.dist(0, tau_e).logp
boundary = Normal.dist(0., tau=tau_e).logp

innov_like = continuous.Normal.dist(k * x_im1, tau_e).logp(x_i)
innov_like = Normal.dist(k * x_im1, tau=tau_e).logp(x_i)
return boundary(x[0]) + tt.sum(innov_like) + boundary(x[-1])

def _repr_latex_(self, name=None, dist=None):
Expand Down Expand Up @@ -71,14 +72,15 @@ class GaussianRandomWalk(distribution.Continuous):
distribution for initial value (Defaults to Flat())
"""

def __init__(self, tau=None, init=continuous.Flat.dist(), sd=None, mu=0.,
def __init__(self, tau=None, init=Flat.dist(), sd=None, mu=0.,
*args, **kwargs):
super(GaussianRandomWalk, self).__init__(*args, **kwargs)
self.tau = tau
self.sd = sd
self.mu = mu
tau, sd = get_tau_sd(tau=tau, sd=sd)
self.tau = tau = tt.as_tensor_variable(tau)
self.sd = sd = tt.as_tensor_variable(sd)
self.mu = mu = tt.as_tensor_variable(mu)
self.init = init
self.mean = 0.
self.mean = tt.as_tensor_variable(0.)

def logp(self, x):
tau = self.tau
Expand All @@ -89,7 +91,7 @@ def logp(self, x):
x_im1 = x[:-1]
x_i = x[1:]

innov_like = continuous.Normal.dist(mu=x_im1 + mu, tau=tau, sd=sd).logp(x_i)
innov_like = Normal.dist(mu=x_im1 + mu, sd=sd).logp(x_i)
return init.logp(x[0]) + tt.sum(innov_like)

def _repr_latex_(self, name=None, dist=None):
Expand Down Expand Up @@ -124,15 +126,15 @@ class GARCH11(distribution.Continuous):
initial_vol >= 0, distribution for initial volatility, sigma_0
"""

def __init__(self, omega=None, alpha_1=None, beta_1=None,
initial_vol=None, *args, **kwargs):
def __init__(self, omega, alpha_1, beta_1,
initial_vol, *args, **kwargs):
super(GARCH11, self).__init__(*args, **kwargs)

self.omega = omega
self.alpha_1 = alpha_1
self.beta_1 = beta_1
self.omega = omega = tt.as_tensor_variable(omega)
self.alpha_1 = alpha_1 = tt.as_tensor_variable(alpha_1)
self.beta_1 = beta_1 = tt.as_tensor_variable(beta_1)
self.initial_vol = initial_vol
self.mean = 0
self.mean = tt.as_tensor_variable(0.)

def get_volatility(self, x):
x = x[:-1]
Expand All @@ -149,7 +151,7 @@ def volatility_update(x, vol, w, a, b):

def logp(self, x):
vol = self.get_volatility(x)
return tt.sum(continuous.Normal.dist(0, sd=vol).logp(x))
return tt.sum(Normal.dist(0., sd=vol).logp(x))

def _repr_latex_(self, name=None, dist=None):
if dist is None:
Expand Down Expand Up @@ -178,7 +180,7 @@ class EulerMaruyama(distribution.Continuous):
"""
def __init__(self, dt, sde_fn, sde_pars, *args, **kwds):
super(EulerMaruyama, self).__init__(*args, **kwds)
self.dt = dt
self.dt = dt = tt.as_tensor_variable(dt)
self.sde_fn = sde_fn
self.sde_pars = sde_pars

Expand All @@ -187,7 +189,7 @@ def logp(self, x):
f, g = self.sde_fn(x[:-1], *self.sde_pars)
mu = xt + self.dt * f
sd = tt.sqrt(self.dt) * g
return tt.sum(continuous.Normal.dist(mu=mu, sd=sd).logp(x[1:]))
return tt.sum(Normal.dist(mu=mu, sd=sd).logp(x[1:]))

def _repr_latex_(self, name=None, dist=None):
if dist is None:
Expand All @@ -210,7 +212,7 @@ class MvGaussianRandomWalk(distribution.Continuous):
init : distribution
distribution for initial value (Defaults to Flat())
"""
def __init__(self, mu=0., cov=None, init=continuous.Flat.dist(),
def __init__(self, mu=0., cov=None, init=Flat.dist(),
*args, **kwargs):
super(MvGaussianRandomWalk, self).__init__(*args, **kwargs)
if cov is None:
Expand All @@ -220,9 +222,9 @@ def __init__(self, mu=0., cov=None, init=continuous.Flat.dist(),
if cov.ndim != 2:
raise ValueError('cov must be two dimensional.')
self.cov = cov
self.mu = mu
self.mu = mu = tt.as_tensor_variable(mu)
self.init = init
self.mean = 0.
self.mean = tt.as_tensor_variable(0.)

def logp(self, x):
cov = self.cov
Expand Down Expand Up @@ -259,13 +261,13 @@ class MvStudentTRandomWalk(distribution.Continuous):
init : distribution
distribution for initial value (Defaults to Flat())
"""
def __init__(self, nu, mu=0., cov=None, init=continuous.Flat.dist(),
def __init__(self, nu, mu=0., cov=None, init=Flat.dist(),
*args, **kwargs):
super(MvStudentTRandomWalk, self).__init__(*args, **kwargs)
self.mu = mu
self.nu = nu
self.mu = mu = tt.as_tensor_variable(mu)
self.nu = nu = tt.as_tensor_variable(nu)
self.init = init
self.mean = 0.
self.mean = tt.as_tensor_variable(0.)

if cov is None:
raise ValueError('A covariance matrix must be provided as cov argument.')
Expand Down
18 changes: 16 additions & 2 deletions pymc3/tests/test_distributions.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@
from ..model import Model, Point, Potential
from ..blocking import DictToVarBijection, DictToArrayBijection, ArrayOrdering
from ..distributions import (DensityDist, Categorical, Multinomial, VonMises, Dirichlet,
MvStudentT, MvNormal, ZeroInflatedPoisson,
MvStudentT, MvNormal, ZeroInflatedPoisson, GaussianRandomWalk,
ZeroInflatedNegativeBinomial, Constant, Poisson, Bernoulli, Beta,
BetaBinomial, HalfStudentT, StudentT, Weibull, Pareto,
BetaBinomial, HalfStudentT, StudentT, Weibull, Pareto, NormalMixture,
InverseGamma, Gamma, Cauchy, HalfCauchy, Lognormal, Laplace,
NegativeBinomial, Geometric, Exponential, ExGaussian, Normal,
Flat, LKJCorr, Wald, ChiSquared, HalfNormal, DiscreteUniform,
Expand Down Expand Up @@ -822,3 +822,17 @@ def ref_pdf(value):
)

self.pymc3_matches_scipy(TestedInterpolated, R, {}, ref_pdf)


def test_repr_latex_():
with Model():
x0 = Binomial('Discrete', p=.5, n=10)
x1 = Normal('Continuous', mu=0., sd=1.)
x2 = GaussianRandomWalk('Timeseries', mu=x1, sd=1., shape=2)
x3 = MvStudentT('Multivariate', nu=5, mu=x2, Sigma=np.diag(np.ones(2)), shape=2)
x4 = NormalMixture('Mixture', w=np.array([.5, .5]), mu=x3, sd=x0)
assert x0._repr_latex_()=='$Discrete \\sim \\text{Binomial}(\\mathit{n}=10, \\mathit{p}=0.5)$'
assert x1._repr_latex_()=='$Continuous \\sim \\text{Normal}(\\mathit{mu}=0.0, \\mathit{sd}=1.0)$'
assert x2._repr_latex_()=='$Timeseries \\sim \\text{GaussianRandomWalk}(\\mathit{mu}=Continuous, \\mathit{sd}=1.0)$'
assert x3._repr_latex_()=='$Multivariate \\sim \\text{MvStudentT}(\\mathit{nu}=5, \\mathit{mu}=Timeseries, \\mathit{Sigma}=array)$'
assert x4._repr_latex_()=='$Mixture \\sim \\text{NormalMixture}(\\mathit{w}=array, \\mathit{mu}=Multivariate, \\mathit{sigma}=f(Discrete))$'