Skip to content

Commit 87f603b

Browse files
authored
Formatted Next 15 Files (#4160)
1 parent 6b765e4 commit 87f603b

File tree

15 files changed

+292
-299
lines changed

15 files changed

+292
-299
lines changed

pymc3/distributions/discrete.py

Lines changed: 4 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -644,25 +644,17 @@ def get_mu_alpha(self, mu=None, alpha=None, p=None, n=None):
644644
if n is not None:
645645
alpha = n
646646
else:
647-
raise ValueError(
648-
"Incompatible parametrization. Must specify either alpha or n."
649-
)
647+
raise ValueError("Incompatible parametrization. Must specify either alpha or n.")
650648
elif n is not None:
651-
raise ValueError(
652-
"Incompatible parametrization. Can't specify both alpha and n."
653-
)
649+
raise ValueError("Incompatible parametrization. Can't specify both alpha and n.")
654650

655651
if mu is None:
656652
if p is not None:
657653
mu = alpha * (1 - p) / p
658654
else:
659-
raise ValueError(
660-
"Incompatible parametrization. Must specify either mu or p."
661-
)
655+
raise ValueError("Incompatible parametrization. Must specify either mu or p.")
662656
elif p is not None:
663-
raise ValueError(
664-
"Incompatible parametrization. Can't specify both mu and p."
665-
)
657+
raise ValueError("Incompatible parametrization. Can't specify both mu and p.")
666658

667659
return mu, alpha
668660

pymc3/distributions/distribution.py

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -179,21 +179,15 @@ def _str_repr(self, name=None, dist=None, formatting="plain"):
179179

180180
if formatting == "latex":
181181
param_string = ",~".join(
182-
[
183-
fr"\mathit{{{name}}}={value}"
184-
for name, value in zip(param_names, param_values)
185-
]
182+
[fr"\mathit{{{name}}}={value}" for name, value in zip(param_names, param_values)]
186183
)
187184
return r"$\text{{{var_name}}} \sim \text{{{distr_name}}}({params})$".format(
188185
var_name=name, distr_name=dist._distr_name_for_repr(), params=param_string
189186
)
190187
else:
191188
# 'plain' is default option
192189
param_string = ", ".join(
193-
[
194-
f"{name}={value}"
195-
for name, value in zip(param_names, param_values)
196-
]
190+
[f"{name}={value}" for name, value in zip(param_names, param_values)]
197191
)
198192
return "{var_name} ~ {distr_name}({params})".format(
199193
var_name=name, distr_name=dist._distr_name_for_repr(), params=param_string

pymc3/distributions/transforms.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -440,8 +440,9 @@ class StickBreaking(Transform):
440440

441441
def __init__(self, eps=None):
442442
if eps is not None:
443-
warnings.warn("The argument `eps` is deprecated and will not be used.",
444-
DeprecationWarning)
443+
warnings.warn(
444+
"The argument `eps` is deprecated and will not be used.", DeprecationWarning
445+
)
445446

446447
def forward(self, x_):
447448
x = x_.T
@@ -471,9 +472,9 @@ def jacobian_det(self, y_):
471472
y = y_.T
472473
Km1 = y.shape[0] + 1
473474
sy = tt.sum(y, 0, keepdims=True)
474-
r = tt.concatenate([y+sy, tt.zeros(sy.shape)])
475+
r = tt.concatenate([y + sy, tt.zeros(sy.shape)])
475476
sr = logsumexp(r, 0, keepdims=True)
476-
d = tt.log(Km1) + (Km1*sy) - (Km1*sr)
477+
d = tt.log(Km1) + (Km1 * sy) - (Km1 * sr)
477478
return tt.sum(d, 0).T
478479

479480

pymc3/gp/cov.py

Lines changed: 22 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -87,10 +87,13 @@ def full(self, X, Xs):
8787

8888
def _slice(self, X, Xs):
8989
if self.input_dim != X.shape[-1]:
90-
warnings.warn(f"Only {self.input_dim} column(s) out of {X.shape[-1]} are"
91-
" being used to compute the covariance function. If this"
92-
" is not intended, increase 'input_dim' parameter to"
93-
" the number of columns to use. Ignore otherwise.", UserWarning)
90+
warnings.warn(
91+
f"Only {self.input_dim} column(s) out of {X.shape[-1]} are"
92+
" being used to compute the covariance function. If this"
93+
" is not intended, increase 'input_dim' parameter to"
94+
" the number of columns to use. Ignore otherwise.",
95+
UserWarning,
96+
)
9497
X = tt.as_tensor_variable(X[:, self.active_dims])
9598
if Xs is not None:
9699
Xs = tt.as_tensor_variable(Xs[:, self.active_dims])
@@ -109,9 +112,9 @@ def __rmul__(self, other):
109112
return self.__mul__(other)
110113

111114
def __pow__(self, other):
112-
if(
113-
isinstance(other, theano.compile.SharedVariable) and
114-
other.get_value().squeeze().shape == ()
115+
if (
116+
isinstance(other, theano.compile.SharedVariable)
117+
and other.get_value().squeeze().shape == ()
115118
):
116119
other = tt.squeeze(other)
117120
return Exponentiated(self, other)
@@ -123,7 +126,6 @@ def __pow__(self, other):
123126

124127
raise ValueError("A covariance function can only be exponentiated by a scalar value")
125128

126-
127129
def __array_wrap__(self, result):
128130
"""
129131
Required to allow radd/rmul by numpy arrays.
@@ -132,7 +134,9 @@ def __array_wrap__(self, result):
132134
if len(result.shape) <= 1:
133135
result = result.reshape(1, 1)
134136
elif len(result.shape) > 2:
135-
raise ValueError(f"cannot combine a covariance function with array of shape {result.shape}")
137+
raise ValueError(
138+
f"cannot combine a covariance function with array of shape {result.shape}"
139+
)
136140
r, c = result.shape
137141
A = np.zeros((r, c))
138142
for i in range(r):
@@ -149,11 +153,7 @@ def __array_wrap__(self, result):
149153
class Combination(Covariance):
150154
def __init__(self, factor_list):
151155
input_dim = max(
152-
[
153-
factor.input_dim
154-
for factor in factor_list
155-
if isinstance(factor, Covariance)
156-
]
156+
[factor.input_dim for factor in factor_list if isinstance(factor, Covariance)]
157157
)
158158
super().__init__(input_dim=input_dim)
159159
self.factor_list = []
@@ -205,10 +205,7 @@ class Exponentiated(Covariance):
205205
def __init__(self, kernel, power):
206206
self.kernel = kernel
207207
self.power = power
208-
super().__init__(
209-
input_dim=self.kernel.input_dim,
210-
active_dims=self.kernel.active_dims
211-
)
208+
super().__init__(input_dim=self.kernel.input_dim, active_dims=self.kernel.active_dims)
212209

213210
def __call__(self, X, Xs=None, diag=False):
214211
return self.kernel(X, Xs, diag=diag) ** self.power
@@ -247,9 +244,7 @@ def _split(self, X, Xs):
247244

248245
def __call__(self, X, Xs=None, diag=False):
249246
X_split, Xs_split = self._split(X, Xs)
250-
covs = [
251-
cov(x, xs, diag) for cov, x, xs in zip(self.factor_list, X_split, Xs_split)
252-
]
247+
covs = [cov(x, xs, diag) for cov, x, xs in zip(self.factor_list, X_split, Xs_split)]
253248
return reduce(mul, covs)
254249

255250

@@ -431,9 +426,7 @@ class Matern52(Stationary):
431426
def full(self, X, Xs=None):
432427
X, Xs = self._slice(X, Xs)
433428
r = self.euclidean_dist(X, Xs)
434-
return (1.0 + np.sqrt(5.0) * r + 5.0 / 3.0 * tt.square(r)) * tt.exp(
435-
-1.0 * np.sqrt(5.0) * r
436-
)
429+
return (1.0 + np.sqrt(5.0) * r + 5.0 / 3.0 * tt.square(r)) * tt.exp(-1.0 * np.sqrt(5.0) * r)
437430

438431

439432
class Matern32(Stationary):
@@ -605,14 +598,10 @@ def __init__(self, input_dim, lengthscale_func, args=None, active_dims=None):
605598
super().__init__(input_dim, active_dims)
606599
if active_dims is not None:
607600
if len(active_dims) > 1:
608-
raise NotImplementedError(
609-
("Higher dimensional inputs ", "are untested")
610-
)
601+
raise NotImplementedError(("Higher dimensional inputs ", "are untested"))
611602
else:
612603
if input_dim != 1:
613-
raise NotImplementedError(
614-
("Higher dimensional inputs ", "are untested")
615-
)
604+
raise NotImplementedError(("Higher dimensional inputs ", "are untested"))
616605
if not callable(lengthscale_func):
617606
raise TypeError("lengthscale_func must be callable")
618607
self.lfunc = handle_args(lengthscale_func, args)
@@ -642,9 +631,7 @@ def full(self, X, Xs=None):
642631
r2 = self.square_dist(X, Xs)
643632
rx2 = tt.reshape(tt.square(rx), (-1, 1))
644633
rz2 = tt.reshape(tt.square(rz), (1, -1))
645-
return tt.sqrt((2.0 * tt.outer(rx, rz)) / (rx2 + rz2)) * tt.exp(
646-
-1.0 * r2 / (rx2 + rz2)
647-
)
634+
return tt.sqrt((2.0 * tt.outer(rx, rz)) / (rx2 + rz2)) * tt.exp(-1.0 * r2 / (rx2 + rz2))
648635

649636
def diag(self, X):
650637
return tt.alloc(1.0, X.shape[0])
@@ -734,19 +721,15 @@ def __init__(self, input_dim, W=None, kappa=None, B=None, active_dims=None):
734721
raise ValueError("Coregion requires exactly one dimension to be active")
735722
make_B = W is not None or kappa is not None
736723
if make_B and B is not None:
737-
raise ValueError(
738-
"Exactly one of (W, kappa) and B must be provided to Coregion"
739-
)
724+
raise ValueError("Exactly one of (W, kappa) and B must be provided to Coregion")
740725
if make_B:
741726
self.W = tt.as_tensor_variable(W)
742727
self.kappa = tt.as_tensor_variable(kappa)
743728
self.B = tt.dot(self.W, self.W.T) + tt.diag(self.kappa)
744729
elif B is not None:
745730
self.B = tt.as_tensor_variable(B)
746731
else:
747-
raise ValueError(
748-
"Exactly one of (W, kappa) and B must be provided to Coregion"
749-
)
732+
raise ValueError("Exactly one of (W, kappa) and B must be provided to Coregion")
750733

751734
def full(self, X, Xs=None):
752735
X, Xs = self._slice(X, Xs)

0 commit comments

Comments
 (0)