Skip to content

Some pylint and pyupgrade cleanups #29

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 17 commits into from
Nov 29, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions doc/conf.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
#
# pytensor documentation build configuration file, created by
# sphinx-quickstart on Tue Oct 7 16:34:06 2008.
#
Expand Down
1 change: 0 additions & 1 deletion doc/extending/extending_pytensor_solution_1.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,6 @@ def grad(self, inputs, output_grads):
import numpy as np

from tests import unittest_tools as utt
from pytensor import function, printing
from pytensor import tensor as at
from pytensor.graph.basic import Apply
from pytensor.graph.op import Op
Expand Down
2 changes: 0 additions & 2 deletions doc/generate_dtype_tensor_table.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@


letters = [
('b', 'int8'),
('w', 'int16'),
Expand Down
6 changes: 1 addition & 5 deletions doc/scripts/docgen.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@

import sys
import os
import shutil
import inspect
import getopt
from collections import defaultdict

Expand All @@ -16,7 +14,7 @@
sys.argv[1:],
'o:f:',
['rst', 'help', 'nopdf', 'cache', 'check', 'test'])
options.update(dict([x, y or True] for x, y in opts))
options.update({x: y or True for x, y in opts})
if options['--help']:
print(f'Usage: {sys.argv[0]} [OPTIONS] [files...]')
print(' -o <dir>: output the html files in the specified dir')
Expand Down Expand Up @@ -100,8 +98,6 @@ def call_sphinx(builder, workdir):
shutil.rmtree(workdir)
except OSError as e:
print('OSError:', e)
except IOError as e:
print('IOError:', e)

if options['--test']:
mkdir("doc")
Expand Down
6 changes: 3 additions & 3 deletions pytensor/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
print(f"unable to find command, tried {commands}")
return None, None
stdout = process.communicate()[0].strip().decode()
if process.returncode != 0:
Expand Down Expand Up @@ -155,7 +155,7 @@ def git_get_keywords(versionfile_abs):
# _version.py.
keywords = {}
try:
with open(versionfile_abs, "r") as fobj:
with open(versionfile_abs) as fobj:
for line in fobj:
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
Expand Down Expand Up @@ -351,7 +351,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
pieces["error"] = "tag '{}' doesn't start with prefix '{}'".format(
full_tag,
tag_prefix,
)
Expand Down
12 changes: 6 additions & 6 deletions pytensor/compile/debugmode.py
Original file line number Diff line number Diff line change
Expand Up @@ -1604,7 +1604,7 @@ def f():
# storage will be None
if thunk_py:
_logger.debug(
f"{i} - running thunk_py with None as " "output storage"
f"{i} - running thunk_py with None as output storage"
)
try:
thunk_py()
Expand Down Expand Up @@ -2063,15 +2063,15 @@ def __init__(
infolog = StringIO()
print("Optimization process is unstable...", file=infolog)
print(
" (HINT: Ops that the nodes point to must compare " "equal)",
" (HINT: Ops that the nodes point to must compare equal)",
file=infolog,
)
print(
"(event index) (one event trace) (other event " "trace)",
"(event index) (one event trace) (other event trace)",
file=infolog,
)
print(
"-------------------------------------------------" "----",
"-----------------------------------------------------",
file=infolog,
)
for j in range(max(len(li), len(l0))):
Expand Down Expand Up @@ -2292,7 +2292,7 @@ def __init__(

if not isinstance(linker, _DummyLinker):
raise Exception(
"DebugMode can only use its own linker! You " "should not provide one.",
"DebugMode can only use its own linker! You should not provide one.",
linker,
)

Expand All @@ -2318,7 +2318,7 @@ def __init__(
self.require_matching_strides = require_matching_strides

if not (self.check_c_code or self.check_py_code):
raise ValueError("DebugMode has to check at least one of c and py " "code")
raise ValueError("DebugMode has to check at least one of c and py code")

def __str__(self):
return "DebugMode(linker={}, optimizer={})".format(
Expand Down
4 changes: 1 addition & 3 deletions pytensor/compile/function/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,9 +300,7 @@ def opt_log1p(node):
if uses_tuple:
# we must use old semantics in this case.
if profile:
raise NotImplementedError(
"profiling not supported in old-style " "function"
)
raise NotImplementedError("profiling not supported in old-style function")
if uses_updates or uses_givens:
raise NotImplementedError(
"In() instances and tuple inputs trigger the old "
Expand Down
2 changes: 1 addition & 1 deletion pytensor/compile/function/pfunc.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ def clone_inputs(i):
raise TypeError("update target must be a SharedVariable", store_into)
if store_into in update_d:
raise ValueError(
"this shared variable already has an update " "expression",
"this shared variable already has an update expression",
(store_into, update_d[store_into]),
)

Expand Down
2 changes: 1 addition & 1 deletion pytensor/compile/nanguardmode.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ def do_check_on(value, nd, var=None):
print(pytensor.printing.debugprint(nd, file="str"), file=sio)
else:
print(
"NanGuardMode found an error in an input of the " "graph.",
"NanGuardMode found an error in an input of the graph.",
file=sio,
)
# Add the stack trace
Expand Down
2 changes: 1 addition & 1 deletion pytensor/compile/profiling.py
Original file line number Diff line number Diff line change
Expand Up @@ -1308,7 +1308,7 @@ def compute_max_stats(running_memory, stats):

if len(fct_memory) > 1:
print(
"Memory Profile (the max between all functions in " "that profile)",
"Memory Profile (the max between all functions in that profile)",
file=file,
)
else:
Expand Down
2 changes: 1 addition & 1 deletion pytensor/compile/sharedvalue.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def __init__(
self.container = container
if (value is not None) or (strict is not None):
raise TypeError(
"value and strict are ignored if you pass " "a container here"
"value and strict are ignored if you pass a container here"
)
else:
self.container = Container(
Expand Down
8 changes: 4 additions & 4 deletions pytensor/configdefaults.py
Original file line number Diff line number Diff line change
Expand Up @@ -585,7 +585,7 @@ def add_compile_configvars():

config.add(
"cmodule__age_thresh_use",
"In seconds. The time after which " "PyTensor won't reuse a compile c module.",
"In seconds. The time after which PyTensor won't reuse a compile c module.",
# 24 days
IntParam(60 * 60 * 24 * 24, mutable=False),
in_c_key=False,
Expand Down Expand Up @@ -1004,7 +1004,7 @@ def add_testvalue_and_checking_configvars():

config.add(
"on_shape_error",
"warn: print a warning and use the default" " value. raise: raise an error",
"warn: print a warning and use the default value. raise: raise an error",
EnumStr("warn", ["raise"]),
in_c_key=False,
)
Expand Down Expand Up @@ -1149,14 +1149,14 @@ def add_metaopt_configvars():

config.add(
"metaopt__optimizer_excluding",
("exclude optimizers with these tags. " "Separate tags with ':'."),
("exclude optimizers with these tags. Separate tags with ':'."),
StrParam(""),
in_c_key=False,
)

config.add(
"metaopt__optimizer_including",
("include optimizers with these tags. " "Separate tags with ':'."),
("include optimizers with these tags. Separate tags with ':'."),
StrParam(""),
in_c_key=False,
)
Expand Down
5 changes: 1 addition & 4 deletions pytensor/configparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,10 +125,7 @@ def get_config_hash(self):
)
return hash_from_code(
"\n".join(
[
"{} = {}".format(cv.name, cv.__get__(self, self.__class__))
for cv in all_opts
]
[f"{cv.name} = {cv.__get__(self, self.__class__)}" for cv in all_opts]
)
)

Expand Down
38 changes: 14 additions & 24 deletions pytensor/gradient.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,10 +91,8 @@ def grad_not_implemented(op, x_pos, x, comment=""):

return (
NullType(
(
"This variable is Null because the grad method for "
f"input {x_pos} ({x}) of the {op} op is not implemented. {comment}"
)
"This variable is Null because the grad method for "
f"input {x_pos} ({x}) of the {op} op is not implemented. {comment}"
)
)()

Expand All @@ -114,10 +112,8 @@ def grad_undefined(op, x_pos, x, comment=""):

return (
NullType(
(
"This variable is Null because the grad method for "
f"input {x_pos} ({x}) of the {op} op is not implemented. {comment}"
)
"This variable is Null because the grad method for "
f"input {x_pos} ({x}) of the {op} op is not implemented. {comment}"
)
)()

Expand Down Expand Up @@ -1275,14 +1271,12 @@ def try_to_copy_if_needed(var):
# We therefore don't allow it because its usage has become
# so muddied.
raise TypeError(
(
f"{node.op}.grad returned None for a gradient term, "
"this is prohibited. Instead of None,"
"return zeros_like(input), disconnected_type(),"
" or a NullType variable such as those made with "
"the grad_undefined or grad_unimplemented helper "
"functions."
)
f"{node.op}.grad returned None for a gradient term, "
"this is prohibited. Instead of None,"
"return zeros_like(input), disconnected_type(),"
" or a NullType variable such as those made with "
"the grad_undefined or grad_unimplemented helper "
"functions."
)

# Check that the gradient term for this input
Expand Down Expand Up @@ -1402,10 +1396,8 @@ def access_grad_cache(var):

if hasattr(var, "ndim") and term.ndim != var.ndim:
raise ValueError(
(
f"{node.op}.grad returned a term with"
f" {int(term.ndim)} dimensions, but {int(var.ndim)} are required."
)
f"{node.op}.grad returned a term with"
f" {int(term.ndim)} dimensions, but {int(var.ndim)} are required."
)

terms.append(term)
Expand Down Expand Up @@ -1767,10 +1759,8 @@ def verify_grad(
for i, p in enumerate(pt):
if p.dtype not in ("float16", "float32", "float64"):
raise TypeError(
(
"verify_grad can work only with floating point "
f'inputs, but input {i} has dtype "{p.dtype}".'
)
"verify_grad can work only with floating point "
f'inputs, but input {i} has dtype "{p.dtype}".'
)

_type_tol = dict( # relative error tolerances for different types
Expand Down
8 changes: 3 additions & 5 deletions pytensor/graph/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -1184,11 +1184,9 @@ def clone_replace(
items = []
else:
raise ValueError(
(
"replace is neither a dictionary, list, "
f"tuple or None ! The value provided is {replace},"
f"of type {type(replace)}"
)
"replace is neither a dictionary, list, "
f"tuple or None ! The value provided is {replace},"
f"of type {type(replace)}"
)
tmp_replace = [(x, x.type()) for x, y in items]
new_replace = [(x, y) for ((_, x), (_, y)) in zip(tmp_replace, items)]
Expand Down
2 changes: 1 addition & 1 deletion pytensor/graph/features.py
Original file line number Diff line number Diff line change
Expand Up @@ -679,7 +679,7 @@ def on_detach(self, fgraph):
"""
if self.fgraph is not fgraph:
raise Exception(
"This NodeFinder instance was not attached to the" " provided fgraph."
"This NodeFinder instance was not attached to the provided fgraph."
)
self.fgraph = None
del fgraph.get_nodes
Expand Down
7 changes: 3 additions & 4 deletions pytensor/graph/op.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
List,
Optional,
Sequence,
Text,
Tuple,
TypeVar,
Union,
Expand Down Expand Up @@ -496,7 +495,7 @@ def prepare_node(
node: Apply,
storage_map: Optional[StorageMapType],
compute_map: Optional[ComputeMapType],
impl: Optional[Text],
impl: Optional[str],
) -> None:
"""Make any special modifications that the `Op` needs before doing :meth:`Op.make_thunk`.

Expand Down Expand Up @@ -573,7 +572,7 @@ def make_thunk(
storage_map: StorageMapType,
compute_map: ComputeMapType,
no_recycling: List[Variable],
impl: Optional[Text] = None,
impl: Optional[str] = None,
) -> ThunkType:
r"""Create a thunk.

Expand Down Expand Up @@ -676,7 +675,7 @@ def get_test_value(v: Any) -> Any:
return v.get_test_value()


def missing_test_message(msg: Text) -> None:
def missing_test_message(msg: str) -> None:
"""Display a message saying that some test_value is missing.

This uses the appropriate form based on ``config.compute_test_value``:
Expand Down
2 changes: 1 addition & 1 deletion pytensor/graph/rewriting/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def is_same_graph_with_merge(var1, var2, givens=None):
# We also need to make sure we replace a Variable if it is present in
# `givens`.
vars_replaced = [givens.get(v, v) for v in fgraph.outputs]
o1, o2 = [v.owner for v in vars_replaced]
o1, o2 = (v.owner for v in vars_replaced)
if o1 is None and o2 is None:
# Comparing two single-Variable graphs: they are equal if they are
# the same Variable.
Expand Down
8 changes: 4 additions & 4 deletions pytensor/graph/type.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from abc import abstractmethod
from typing import Any, Generic, Optional, Text, Tuple, TypeVar, Union
from typing import Any, Generic, Optional, Tuple, TypeVar, Union

from typing_extensions import TypeAlias

Expand Down Expand Up @@ -188,7 +188,7 @@ def is_valid_value(self, data: D, strict: bool = True) -> bool:
except (TypeError, ValueError):
return False

def make_variable(self, name: Optional[Text] = None) -> variable_type:
def make_variable(self, name: Optional[str] = None) -> variable_type:
"""Return a new `Variable` instance of this `Type`.

Parameters
Expand All @@ -199,7 +199,7 @@ def make_variable(self, name: Optional[Text] = None) -> variable_type:
"""
return self.variable_type(self, None, name=name)

def make_constant(self, value: D, name: Optional[Text] = None) -> constant_type:
def make_constant(self, value: D, name: Optional[str] = None) -> constant_type:
"""Return a new `Constant` instance of this `Type`.

Parameters
Expand All @@ -216,7 +216,7 @@ def clone(self, *args, **kwargs) -> "Type":
"""Clone a copy of this type with the given arguments/keyword values, if any."""
return type(self)(*args, **kwargs)

def __call__(self, name: Optional[Text] = None) -> variable_type:
def __call__(self, name: Optional[str] = None) -> variable_type:
"""Return a new `Variable` instance of Type `self`.

Parameters
Expand Down
4 changes: 1 addition & 3 deletions pytensor/graph/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,9 +245,7 @@ def __str__(self):
def __str__(self):
return "{}{{{}}}".format(
self.__class__.__name__,
", ".join(
"{}={!r}".format(p, getattr(self, p)) for p in props
),
", ".join(f"{p}={getattr(self, p)!r}" for p in props),
)

dct["__str__"] = __str__
Expand Down
Loading