Skip to content

Commit e42f0fd

Browse files
remove debug flag
1 parent c1bea19 commit e42f0fd

File tree

1 file changed

+3
-15
lines changed

1 file changed

+3
-15
lines changed

pytensor/tensor/optimize.py

Lines changed: 3 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ def make_node(self, *inputs):
8888

8989

9090
class MinimizeOp(ScipyWrapperOp):
91-
__props__ = ("method", "jac", "hess", "hessp", "debug")
91+
__props__ = ("method", "jac", "hess", "hessp")
9292

9393
def __init__(
9494
self,
@@ -100,7 +100,6 @@ def __init__(
100100
hess: bool = False,
101101
hessp: bool = False,
102102
optimizer_kwargs: dict | None = None,
103-
debug: bool = False,
104103
):
105104
self.fgraph = FunctionGraph([x, *args], [objective])
106105

@@ -116,7 +115,6 @@ def __init__(
116115

117116
self.method = method
118117
self.optimizer_kwargs = optimizer_kwargs if optimizer_kwargs is not None else {}
119-
self.debug = debug
120118
self._fn = None
121119
self._fn_wrapped = None
122120

@@ -187,7 +185,6 @@ def minimize(
187185
x: TensorVariable,
188186
method: str = "BFGS",
189187
jac: bool = True,
190-
debug: bool = False,
191188
optimizer_kwargs: dict | None = None,
192189
):
193190
"""
@@ -209,9 +206,6 @@ def minimize(
209206
Whether to compute and use the gradient of teh objective function with respect to x for optimization.
210207
Default is True.
211208
212-
debug : bool, optional
213-
If True, prints raw scipy result after optimization. Default is False.
214-
215209
optimizer_kwargs
216210
Additional keyword arguments to pass to scipy.optimize.minimize
217211
@@ -233,15 +227,14 @@ def minimize(
233227
objective=objective,
234228
method=method,
235229
jac=jac,
236-
debug=debug,
237230
optimizer_kwargs=optimizer_kwargs,
238231
)
239232

240233
return minimize_op(x, *args)
241234

242235

243236
class RootOp(ScipyWrapperOp):
244-
__props__ = ("method", "jac", "debug")
237+
__props__ = ("method", "jac")
245238

246239
def __init__(
247240
self,
@@ -251,7 +244,6 @@ def __init__(
251244
method: str = "hybr",
252245
jac: bool = True,
253246
optimizer_kwargs: dict | None = None,
254-
debug: bool = False,
255247
):
256248
self.fgraph = FunctionGraph([variables, *args], [equations])
257249

@@ -263,7 +255,6 @@ def __init__(
263255

264256
self.method = method
265257
self.optimizer_kwargs = optimizer_kwargs if optimizer_kwargs is not None else {}
266-
self.debug = debug
267258
self._fn = None
268259
self._fn_wrapped = None
269260

@@ -312,7 +303,6 @@ def root(
312303
variables: TensorVariable,
313304
method: str = "hybr",
314305
jac: bool = True,
315-
debug: bool = False,
316306
):
317307
"""Find roots of a system of equations using scipy.optimize.root."""
318308

@@ -322,9 +312,7 @@ def root(
322312
if (arg is not variables and not isinstance(arg, Constant))
323313
]
324314

325-
root_op = RootOp(
326-
variables, *args, equations=equations, method=method, jac=jac, debug=debug
327-
)
315+
root_op = RootOp(variables, *args, equations=equations, method=method, jac=jac)
328316

329317
return root_op(variables, *args)
330318

0 commit comments

Comments
 (0)