Skip to content

Commit 0541aaa

Browse files
jessegrabowskiricardoV94
authored andcommitted
Rename optimized->optimize to match numpy/jax signature
1 parent 1399c2f commit 0541aaa

File tree

2 files changed

+8
-8
lines changed

2 files changed

+8
-8
lines changed

pytensor/tensor/einsum.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -36,12 +36,12 @@ class Einsum(OpFromGraph):
3636
Wrapper Op for Einsum graphs
3737
"""
3838

39-
__props__ = ("subscripts", "path", "optimized")
39+
__props__ = ("subscripts", "path", "optimize")
4040

41-
def __init__(self, *args, subscripts: str, path: str, optimized: bool, **kwargs):
41+
def __init__(self, *args, subscripts: str, path: str, optimize: bool, **kwargs):
4242
self.subscripts = subscripts
4343
self.path = path
44-
self.optimized = optimized
44+
self.optimize = optimize
4545
super().__init__(*args, **kwargs, strict=True)
4646

4747

@@ -224,7 +224,7 @@ def einsum(subscripts: str, *operands: "TensorLike") -> TensorVariable:
224224
shapes = [operand.type.shape for operand in operands]
225225

226226
if None in itertools.chain.from_iterable(shapes):
227-
# We mark optimized = False, even in cases where there is no ordering optimization to be done
227+
# We mark optimize = False, even in cases where there is no ordering optimization to be done
228228
# because the inner graph may have to accommodate dynamic shapes.
229229
# If those shapes become known later we will likely want to rebuild the Op (unless we inline it)
230230
if len(operands) == 1:
@@ -235,7 +235,7 @@ def einsum(subscripts: str, *operands: "TensorLike") -> TensorVariable:
235235
# We use (1,0) and not (0,1) because that's what opt_einsum tends to prefer, and so the Op signatures will match more often
236236
path = [(1, 0) for i in range(len(operands) - 1)]
237237
contraction_list = contraction_list_from_path(subscripts, operands, path)
238-
optimized = (
238+
optimize = (
239239
len(operands) <= 2
240240
) # If there are only 1 or 2 operands, there is no optimization to be done?
241241
else:
@@ -248,7 +248,7 @@ def einsum(subscripts: str, *operands: "TensorLike") -> TensorVariable:
248248
shapes=True,
249249
)
250250
path = [contraction[0] for contraction in contraction_list]
251-
optimized = True
251+
optimize = True
252252

253253
def sum_uniques(
254254
operand: TensorVariable, names: str, uniques: list[str]
@@ -413,6 +413,6 @@ def sum_repeats(
413413
inputs=list(operands),
414414
outputs=[einsum_result],
415415
path=tuple(path),
416-
optimized=optimized,
416+
optimize=optimize,
417417
)(*operands)
418418
return cast(TensorVariable, out)

tests/tensor/test_einsum.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ def test_einsum_signatures(static_shape_known, signature):
127127
for name, static_shape in zip(ascii_lowercase, static_shapes)
128128
]
129129
out = pt.einsum(signature, *operands)
130-
assert out.owner.op.optimized == static_shape_known or len(operands) <= 2
130+
assert out.owner.op.optimize == static_shape_known or len(operands) <= 2
131131

132132
rng = np.random.default_rng(37)
133133
test_values = [rng.normal(size=shape) for shape in shapes]

0 commit comments

Comments
 (0)