15
15
16
16
from pymc3 .theanof import floatX
17
17
from . import transforms
18
+ from pymc3 .util import get_variable_name
18
19
19
20
from .dist_math import bound , logpow , gammaln , betaln , std_cdf , i0 , i1 , alltrue_elemwise , DifferentiableSplineWrapper
20
21
from .distribution import Continuous , draw_values , generate_samples , Bound
@@ -152,6 +153,15 @@ def logp(self, value):
152
153
return bound (- tt .log (upper - lower ),
153
154
value >= lower , value <= upper )
154
155
156
+ def _repr_latex_ (self , name = None , dist = None ):
157
+ if dist is None :
158
+ dist = self
159
+ lower = dist .lower
160
+ upper = dist .upper
161
+ return r'${} \sim \text{{Uniform}}(\mathit{{lower}}={}, \mathit{{upper}}={})$' .format (name ,
162
+ get_variable_name (lower ),
163
+ get_variable_name (upper ))
164
+
155
165
156
166
class Flat (Continuous ):
157
167
"""
@@ -169,6 +179,11 @@ def random(self, point=None, size=None, repeat=None):
169
179
def logp (self , value ):
170
180
return tt .zeros_like (value )
171
181
182
+ def _repr_latex_ (self , name = None , dist = None ):
183
+ if dist is None :
184
+ dist = self
185
+ return r'${} \sim \text{{Flat}()$'
186
+
172
187
173
188
class Normal (Continuous ):
174
189
R"""
@@ -232,6 +247,15 @@ def logp(self, value):
232
247
return bound ((- tau * (value - mu )** 2 + tt .log (tau / np .pi / 2. )) / 2. ,
233
248
sd > 0 )
234
249
250
+ def _repr_latex_ (self , name = None , dist = None ):
251
+ if dist is None :
252
+ dist = self
253
+ sd = dist .sd
254
+ mu = dist .mu
255
+ return r'${} \sim \text{{Normal}}(\mathit{{mu}}={}, \mathit{{sd}}={})$' .format (name ,
256
+ get_variable_name (mu ),
257
+ get_variable_name (sd ))
258
+
235
259
236
260
class HalfNormal (PositiveContinuous ):
237
261
R"""
@@ -283,6 +307,12 @@ def logp(self, value):
283
307
value >= 0 ,
284
308
tau > 0 , sd > 0 )
285
309
310
+ def _repr_latex_ (self , name = None , dist = None ):
311
+ if dist is None :
312
+ dist = self
313
+ sd = dist .sd
314
+ return r'${} \sim \text{{HalfNormal}}(\mathit{{sd}}={})$' .format (name ,
315
+ get_variable_name (sd ))
286
316
287
317
class Wald (PositiveContinuous ):
288
318
R"""
@@ -404,6 +434,17 @@ def logp(self, value):
404
434
value > 0 , value - alpha > 0 ,
405
435
mu > 0 , lam > 0 , alpha >= 0 )
406
436
437
+ def _repr_latex_ (self , name = None , dist = None ):
438
+ if dist is None :
439
+ dist = self
440
+ lam = dist .lam
441
+ mu = dist .mu
442
+ alpha = dist .alpha
443
+ return r'${} \sim \text{{Wald}}(\mathit{{mu}}={}, \mathit{{lam}}={}, \mathit{{alpha}}={})$' .format (name ,
444
+ get_variable_name (mu ),
445
+ get_variable_name (lam ),
446
+ get_variable_name (alpha ))
447
+
407
448
408
449
class Beta (UnitContinuous ):
409
450
R"""
@@ -492,6 +533,15 @@ def logp(self, value):
492
533
value >= 0 , value <= 1 ,
493
534
alpha > 0 , beta > 0 )
494
535
536
+ def _repr_latex_ (self , name = None , dist = None ):
537
+ if dist is None :
538
+ dist = self
539
+ alpha = dist .alpha
540
+ beta = dist .beta
541
+ return r'${} \sim \text{{Beta}}(\mathit{{alpha}}={}, \mathit{{alpha}}={})$' .format (name ,
542
+ get_variable_name (alpha ),
543
+ get_variable_name (beta ))
544
+
495
545
496
546
class Exponential (PositiveContinuous ):
497
547
R"""
@@ -534,6 +584,12 @@ def logp(self, value):
534
584
lam = self .lam
535
585
return bound (tt .log (lam ) - lam * value , value > 0 , lam > 0 )
536
586
587
+ def _repr_latex_ (self , name = None , dist = None ):
588
+ if dist is None :
589
+ dist = self
590
+ lam = dist .lam
591
+ return r'${} \sim \text{{Exponential}}(\mathit{{lam}}={})$' .format (name ,
592
+ get_variable_name (lam ))
537
593
538
594
class Laplace (Continuous ):
539
595
R"""
@@ -579,6 +635,15 @@ def logp(self, value):
579
635
580
636
return - tt .log (2 * b ) - abs (value - mu ) / b
581
637
638
+ def _repr_latex_ (self , name = None , dist = None ):
639
+ if dist is None :
640
+ dist = self
641
+ b = dist .b
642
+ mu = dist .mu
643
+ return r'${} \sim \text{{Laplace}}(\mathit{{mu}}={}, \mathit{{b}}={})$' .format (name ,
644
+ get_variable_name (mu ),
645
+ get_variable_name (b ))
646
+
582
647
583
648
class Lognormal (PositiveContinuous ):
584
649
R"""
@@ -643,6 +708,15 @@ def logp(self, value):
643
708
- tt .log (value ),
644
709
tau > 0 )
645
710
711
+ def _repr_latex_ (self , name = None , dist = None ):
712
+ if dist is None :
713
+ dist = self
714
+ tau = dist .tau
715
+ mu = dist .mu
716
+ return r'${} \sim \text{{Lognormal}}(\mathit{{mu}}={}, \mathit{{tau}}={})$' .format (name ,
717
+ get_variable_name (mu ),
718
+ get_variable_name (tau ))
719
+
646
720
647
721
class StudentT (Continuous ):
648
722
R"""
@@ -707,6 +781,17 @@ def logp(self, value):
707
781
- (nu + 1.0 ) / 2.0 * tt .log1p (lam * (value - mu )** 2 / nu ),
708
782
lam > 0 , nu > 0 , sd > 0 )
709
783
784
+ def _repr_latex_ (self , name = None , dist = None ):
785
+ if dist is None :
786
+ dist = self
787
+ nu = dist .nu
788
+ mu = dist .mu
789
+ lam = dist .lam
790
+ return r'${} \sim \text{{StudentT}}(\mathit{{nu}}={}, \mathit{{mu}}={}, \mathit{{lam}}={})$' .format (name ,
791
+ get_variable_name (nu ),
792
+ get_variable_name (mu ),
793
+ get_variable_name (lam ))
794
+
710
795
711
796
class Pareto (PositiveContinuous ):
712
797
R"""
@@ -769,6 +854,15 @@ def logp(self, value):
769
854
- logpow (value , alpha + 1 ),
770
855
value >= m , alpha > 0 , m > 0 )
771
856
857
+ def _repr_latex_ (self , name = None , dist = None ):
858
+ if dist is None :
859
+ dist = self
860
+ alpha = dist .alpha
861
+ m = dist .m
862
+ return r'${} \sim \text{{Pareto}}(\mathit{{alpha}}={}, \mathit{{m}}={})$' .format (name ,
863
+ get_variable_name (alpha ),
864
+ get_variable_name (m ))
865
+
772
866
773
867
class Cauchy (Continuous ):
774
868
R"""
@@ -821,6 +915,15 @@ def logp(self, value):
821
915
- tt .log1p (((value - alpha ) / beta )** 2 ),
822
916
beta > 0 )
823
917
918
+ def _repr_latex_ (self , name = None , dist = None ):
919
+ if dist is None :
920
+ dist = self
921
+ alpha = dist .alpha
922
+ beta = dist .beta
923
+ return r'${} \sim \text{{Cauchy}}(\mathit{{alpha}}={}, \mathit{{beta}}={})$' .format (name ,
924
+ get_variable_name (alpha ),
925
+ get_variable_name (beta ))
926
+
824
927
825
928
class HalfCauchy (PositiveContinuous ):
826
929
R"""
@@ -867,6 +970,12 @@ def logp(self, value):
867
970
- tt .log1p ((value / beta )** 2 ),
868
971
value >= 0 , beta > 0 )
869
972
973
+ def _repr_latex_ (self , name = None , dist = None ):
974
+ if dist is None :
975
+ dist = self
976
+ beta = dist .beta
977
+ return r'${} \sim \text{{HalfCauchy}}(\mathit{{beta}}={})$' .format (name ,
978
+ get_variable_name (beta ))
870
979
871
980
class Gamma (PositiveContinuous ):
872
981
R"""
@@ -950,6 +1059,15 @@ def logp(self, value):
950
1059
alpha > 0 ,
951
1060
beta > 0 )
952
1061
1062
+ def _repr_latex_ (self , name = None , dist = None ):
1063
+ if dist is None :
1064
+ dist = self
1065
+ beta = dist .beta
1066
+ alpha = dist .alpha
1067
+ return r'${} \sim \text{{Gamma}}(\mathit{{alpha}}={}, \mathit{{beta}}={})$' .format (name ,
1068
+ get_variable_name (alpha ),
1069
+ get_variable_name (beta ))
1070
+
953
1071
954
1072
class InverseGamma (PositiveContinuous ):
955
1073
R"""
@@ -1011,6 +1129,15 @@ def logp(self, value):
1011
1129
+ logpow (value , - alpha - 1 ),
1012
1130
value > 0 , alpha > 0 , beta > 0 )
1013
1131
1132
+ def _repr_latex_ (self , name = None , dist = None ):
1133
+ if dist is None :
1134
+ dist = self
1135
+ beta = dist .beta
1136
+ alpha = dist .alpha
1137
+ return r'${} \sim \text{{InverseGamma}}(\mathit{{alpha}}={}, \mathit{{beta}}={})$' .format (name ,
1138
+ get_variable_name (alpha ),
1139
+ get_variable_name (beta ))
1140
+
1014
1141
1015
1142
class ChiSquared (Gamma ):
1016
1143
R"""
@@ -1037,6 +1164,13 @@ def __init__(self, nu, *args, **kwargs):
1037
1164
super (ChiSquared , self ).__init__ (alpha = nu / 2. , beta = 0.5 ,
1038
1165
* args , ** kwargs )
1039
1166
1167
+ def _repr_latex_ (self , name = None , dist = None ):
1168
+ if dist is None :
1169
+ dist = self
1170
+ nu = dist .nu
1171
+ return r'${} \sim \Chi^2(\mathit{{nu}}={})$' .format (name ,
1172
+ get_variable_name (nu ))
1173
+
1040
1174
1041
1175
class Weibull (PositiveContinuous ):
1042
1176
R"""
@@ -1093,6 +1227,15 @@ def logp(self, value):
1093
1227
- (value / beta )** alpha ,
1094
1228
value >= 0 , alpha > 0 , beta > 0 )
1095
1229
1230
+ def _repr_latex_ (self , name = None , dist = None ):
1231
+ if dist is None :
1232
+ dist = self
1233
+ beta = dist .beta
1234
+ alpha = dist .alpha
1235
+ return r'${} \sim \text{{Weibull}}(\mathit{{alpha}}={}, \mathit{{beta}}={})$' .format (name ,
1236
+ get_variable_name (alpha ),
1237
+ get_variable_name (beta ))
1238
+
1096
1239
1097
1240
def StudentTpos (* args , ** kwargs ):
1098
1241
warnings .warn ("StudentTpos has been deprecated. In future, use HalfStudentT instead." ,
@@ -1183,6 +1326,17 @@ def logp(self, value):
1183
1326
- 0.5 * ((value - mu ) / sigma )** 2 )
1184
1327
return bound (lp , sigma > 0. , nu > 0. )
1185
1328
1329
+ def _repr_latex_ (self , name = None , dist = None ):
1330
+ if dist is None :
1331
+ dist = self
1332
+ sigma = dist .sigma
1333
+ mu = dist .mu
1334
+ nu = dist .nu
1335
+ return r'${} \sim \text{{ExGaussian}}(\mathit{{mu}}={}, \mathit{{sigma}}={}, \mathit{{nu}}={})$' .format (name ,
1336
+ get_variable_name (mu ),
1337
+ get_variable_name (sigma ),
1338
+ get_variable_name (nu ))
1339
+
1186
1340
1187
1341
class VonMises (Continuous ):
1188
1342
R"""
@@ -1231,6 +1385,16 @@ def logp(self, value):
1231
1385
kappa = self .kappa
1232
1386
return bound (kappa * tt .cos (mu - value ) - tt .log (2 * np .pi * i0 (kappa )), value >= - np .pi , value <= np .pi , kappa >= 0 )
1233
1387
1388
+ def _repr_latex_ (self , name = None , dist = None ):
1389
+ if dist is None :
1390
+ dist = self
1391
+ kappa = dist .kappa
1392
+ mu = dist .mu
1393
+ return r'${} \sim \text{{VonMises}}(\mathit{{mu}}={}, \mathit{{kappa}}={})$' .format (name ,
1394
+ get_variable_name (mu ),
1395
+ get_variable_name (kappa ))
1396
+
1397
+
1234
1398
1235
1399
class SkewNormal (Continuous ):
1236
1400
R"""
@@ -1306,6 +1470,17 @@ def logp(self, value):
1306
1470
+ tt .log (tau / np .pi / 2. )) / 2. ,
1307
1471
tau > 0 , sd > 0 )
1308
1472
1473
+ def _repr_latex_ (self , name = None , dist = None ):
1474
+ if dist is None :
1475
+ dist = self
1476
+ sd = dist .sd
1477
+ mu = dist .mu
1478
+ alpha = dist .alpha
1479
+ return r'${} \sim \text{{Skew-Normal}}(\mathit{{mu}}={}, \mathit{{sd}}={}, \mathit{{alpha}}={})$' .format (name ,
1480
+ get_variable_name (mu ),
1481
+ get_variable_name (sd ),
1482
+ get_variable_name (alpha ))
1483
+
1309
1484
1310
1485
class Triangular (Continuous ):
1311
1486
"""
@@ -1348,6 +1523,18 @@ def logp(self, value):
1348
1523
tt .switch (alltrue_elemwise ([c < value , value <= upper ]),
1349
1524
tt .log (2 * (upper - value ) / ((upper - lower ) * (upper - c ))),np .inf )))
1350
1525
1526
+ def _repr_latex_ (self , name = None , dist = None ):
1527
+ if dist is None :
1528
+ dist = self
1529
+ lower = dist .lower
1530
+ upper = dist .upper
1531
+ c = dist .c
1532
+ return r'${} \sim \text{{Triangular}}(\mathit{{c}}={}, \mathit{{lower}}={}, \mathit{{upper}}={})$' .format (name ,
1533
+ get_variable_name (c ),
1534
+ get_variable_name (lower ),
1535
+ get_variable_name (upper ))
1536
+
1537
+
1351
1538
class Gumbel (Continuous ):
1352
1539
R"""
1353
1540
Univariate Gumbel log-likelihood
@@ -1391,6 +1578,16 @@ def logp(self, value):
1391
1578
scaled = (value - self .mu ) / self .beta
1392
1579
return bound (- scaled - tt .exp (- scaled ) - tt .log (self .beta ), self .beta > 0 )
1393
1580
1581
+ def _repr_latex_ (self , name = None , dist = None ):
1582
+ if dist is None :
1583
+ dist = self
1584
+ beta = dist .beta
1585
+ mu = dist .mu
1586
+ return r'${} \sim \text{{Gumbel}}(\mathit{{mu}}={}, \mathit{{beta}}={})$' .format (name ,
1587
+ get_variable_name (mu ),
1588
+ get_variable_name (beta ))
1589
+
1590
+
1394
1591
class Interpolated (Continuous ):
1395
1592
R"""
1396
1593
Univariate probability distribution defined as a linear interpolation
0 commit comments