Skip to content

Updated Pareto distribution for v4 #4593

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Apr 6, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 19 additions & 18 deletions pymc3/distributions/continuous.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
halfnormal,
invgamma,
normal,
pareto,
uniform,
)
from aesara.tensor.random.op import RandomVariable
Expand Down Expand Up @@ -2029,23 +2030,19 @@ class Pareto(Continuous):
m: float
Scale parameter (m > 0).
"""
rv_op = pareto

def __init__(self, alpha, m, transform="lowerbound", *args, **kwargs):
self.alpha = alpha = at.as_tensor_variable(floatX(alpha))
self.m = m = at.as_tensor_variable(floatX(m))

self.mean = at.switch(at.gt(alpha, 1), alpha * m / (alpha - 1.0), np.inf)
self.median = m * 2.0 ** (1.0 / alpha)
self.variance = at.switch(
at.gt(alpha, 2), (alpha * m ** 2) / ((alpha - 2.0) * (alpha - 1.0) ** 2), np.inf
)
@classmethod
def dist(
cls, alpha: float = None, m: float = None, no_assert: bool = False, **kwargs
) -> RandomVariable:
alpha = at.as_tensor_variable(floatX(alpha))
m = at.as_tensor_variable(floatX(m))

assert_negative_support(alpha, "alpha", "Pareto")
assert_negative_support(m, "m", "Pareto")

if transform == "lowerbound":
transform = transforms.lowerbound(self.m)
super().__init__(transform=transform, *args, **kwargs)
return super().dist([alpha, m], **kwargs)

def _random(self, alpha, m, size=None):
u = np.random.uniform(size=size)
Expand All @@ -2071,7 +2068,11 @@ def random(self, point=None, size=None):
# alpha, m = draw_values([self.alpha, self.m], point=point, size=size)
# return generate_samples(self._random, alpha, m, dist_shape=self.shape, size=size)

def logp(self, value):
def logp(
value: Union[float, np.ndarray, TensorVariable],
alpha: Union[float, np.ndarray, TensorVariable],
m: Union[float, np.ndarray, TensorVariable],
):
"""
Calculate log-probability of Pareto distribution at specified value.

Expand All @@ -2085,8 +2086,6 @@ def logp(self, value):
-------
TensorVariable
"""
alpha = self.alpha
m = self.m
return bound(
at.log(alpha) + logpow(m, alpha) - logpow(value, alpha + 1),
value >= m,
Expand All @@ -2097,7 +2096,11 @@ def logp(self, value):
def _distr_parameters_for_repr(self):
return ["alpha", "m"]

def logcdf(self, value):
def logcdf(
value: Union[float, np.ndarray, TensorVariable],
alpha: Union[float, np.ndarray, TensorVariable],
m: Union[float, np.ndarray, TensorVariable],
):
"""
Compute the log of the cumulative distribution function for Pareto distribution
at the specified value.
Expand All @@ -2112,8 +2115,6 @@ def logcdf(self, value):
-------
TensorVariable
"""
m = self.m
alpha = self.alpha
arg = (m / value) ** alpha
return bound(
at.switch(
Expand Down
1 change: 0 additions & 1 deletion pymc3/tests/test_distributions.py
Original file line number Diff line number Diff line change
Expand Up @@ -1402,7 +1402,6 @@ def test_fun(value, mu, sigma):
decimal=select_by_precision(float64=4, float32=3),
)

@pytest.mark.xfail(reason="Distribution not refactored yet")
def test_pareto(self):
self.check_logp(
Pareto,
Expand Down
13 changes: 0 additions & 13 deletions pymc3/tests/test_distributions_random.py
Original file line number Diff line number Diff line change
Expand Up @@ -342,12 +342,6 @@ class TestStudentT(BaseTestCases.BaseTestCase):
params = {"nu": 5.0, "mu": 0.0, "lam": 1.0}


@pytest.mark.xfail(reason="This distribution has not been refactored for v4")
class TestPareto(BaseTestCases.BaseTestCase):
distribution = pm.Pareto
params = {"alpha": 0.5, "m": 1.0}


@pytest.mark.skip(reason="This test is covered by Aesara")
class TestCauchy(BaseTestCases.BaseTestCase):
distribution = pm.Cauchy
Expand Down Expand Up @@ -681,13 +675,6 @@ def ref_rand(size, alpha, beta):

pymc3_random(pm.InverseGamma, {"alpha": Rplus, "beta": Rplus}, ref_rand=ref_rand)

@pytest.mark.xfail(reason="This distribution has not been refactored for v4")
def test_pareto(self):
def ref_rand(size, alpha, m):
return st.pareto.rvs(alpha, scale=m, size=size)

pymc3_random(pm.Pareto, {"alpha": Rplusbig, "m": Rplusbig}, ref_rand=ref_rand)

@pytest.mark.xfail(reason="This distribution has not been refactored for v4")
def test_ex_gaussian(self):
def ref_rand(size, mu, sigma, nu):
Expand Down