Skip to content

Commit 3c1a762

Browse files
authored
mypy: use more f-strings (#12714)
Done largely using https://github.com/ikamensh/flynt I went over this pretty closely since I wasn't familiar with the tool I made a couple changes and left out a couple instances which were harder to parse
1 parent fc335cb commit 3c1a762

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

47 files changed

+190
-236
lines changed

mypy/build.py

Lines changed: 20 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -665,7 +665,7 @@ def dump_stats(self) -> None:
665665
if self.options.dump_build_stats:
666666
print("Stats:")
667667
for key, value in sorted(self.stats_summary().items()):
668-
print("{:24}{}".format(key + ":", value))
668+
print(f"{key + ':':24}{value}")
669669

670670
def use_fine_grained_cache(self) -> bool:
671671
return self.cache_enabled and self.options.use_fine_grained_cache
@@ -1083,7 +1083,7 @@ def read_deps_cache(manager: BuildManager,
10831083
except FileNotFoundError:
10841084
matched = False
10851085
if not matched:
1086-
manager.log('Invalid or missing fine-grained deps cache: {}'.format(meta['path']))
1086+
manager.log(f"Invalid or missing fine-grained deps cache: {meta['path']}")
10871087
return None
10881088

10891089
return module_deps_metas
@@ -1485,8 +1485,7 @@ def write_cache(id: str, path: str, tree: MypyFile,
14851485

14861486
# Obtain file paths.
14871487
meta_json, data_json, _ = get_cache_names(id, path, manager.options)
1488-
manager.log('Writing {} {} {} {}'.format(
1489-
id, path, meta_json, data_json))
1488+
manager.log(f'Writing {id} {path} {meta_json} {data_json}')
14901489

14911490
# Update tree.path so that in bazel mode it's made relative (since
14921491
# sometimes paths leak out).
@@ -1590,7 +1589,7 @@ def delete_cache(id: str, path: str, manager: BuildManager) -> None:
15901589
# tracked separately.
15911590
meta_path, data_path, _ = get_cache_names(id, path, manager.options)
15921591
cache_paths = [meta_path, data_path]
1593-
manager.log('Deleting {} {} {}'.format(id, path, " ".join(x for x in cache_paths if x)))
1592+
manager.log(f"Deleting {id} {path} {' '.join(x for x in cache_paths if x)}")
15941593

15951594
for filename in cache_paths:
15961595
try:
@@ -2490,7 +2489,7 @@ def find_module_and_diagnose(manager: BuildManager,
24902489
and not options.custom_typeshed_dir):
24912490
raise CompileError([
24922491
f'mypy: "{os.path.relpath(result)}" shadows library module "{id}"',
2493-
'note: A user-defined top-level module with name "%s" is not supported' % id
2492+
f'note: A user-defined top-level module with name "{id}" is not supported'
24942493
])
24952494
return (result, follow_imports)
24962495
else:
@@ -2523,7 +2522,7 @@ def find_module_and_diagnose(manager: BuildManager,
25232522
# If we can't find a root source it's always fatal.
25242523
# TODO: This might hide non-fatal errors from
25252524
# root sources processed earlier.
2526-
raise CompileError(["mypy: can't find module '%s'" % id])
2525+
raise CompileError([f"mypy: can't find module '{id}'"])
25272526
else:
25282527
raise ModuleNotFound
25292528

@@ -2670,21 +2669,21 @@ def log_configuration(manager: BuildManager, sources: List[BuildSource]) -> None
26702669
]
26712670

26722671
for conf_name, conf_value in configuration_vars:
2673-
manager.log("{:24}{}".format(conf_name + ":", conf_value))
2672+
manager.log(f"{conf_name + ':':24}{conf_value}")
26742673

26752674
for source in sources:
2676-
manager.log("{:24}{}".format("Found source:", source))
2675+
manager.log(f"{'Found source:':24}{source}")
26772676

26782677
# Complete list of searched paths can get very long, put them under TRACE
26792678
for path_type, paths in manager.search_paths._asdict().items():
26802679
if not paths:
2681-
manager.trace("No %s" % path_type)
2680+
manager.trace(f"No {path_type}")
26822681
continue
26832682

2684-
manager.trace("%s:" % path_type)
2683+
manager.trace(f"{path_type}:")
26852684

26862685
for pth in paths:
2687-
manager.trace(" %s" % pth)
2686+
manager.trace(f" {pth}")
26882687

26892688

26902689
# The driver
@@ -2720,7 +2719,7 @@ def dispatch(sources: List[BuildSource],
27202719
if not graph:
27212720
print("Nothing to do?!", file=stdout)
27222721
return graph
2723-
manager.log("Loaded graph with %d nodes (%.3f sec)" % (len(graph), t1 - t0))
2722+
manager.log(f"Loaded graph with {len(graph)} nodes ({t1 - t0:.3f} sec)")
27242723
if manager.options.dump_graph:
27252724
dump_graph(graph, stdout)
27262725
return graph
@@ -3009,7 +3008,7 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
30093008
scc.append('builtins')
30103009
if manager.options.verbosity >= 2:
30113010
for id in scc:
3012-
manager.trace("Priorities for %s:" % id,
3011+
manager.trace(f"Priorities for {id}:",
30133012
" ".join("%s:%d" % (x, graph[id].priorities[x])
30143013
for x in graph[id].dependencies
30153014
if x in ascc and x in graph[id].priorities))
@@ -3059,19 +3058,19 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
30593058
# (on some platforms).
30603059
if oldest_in_scc < newest_in_deps:
30613060
fresh = False
3062-
fresh_msg = "out of date by %.0f seconds" % (newest_in_deps - oldest_in_scc)
3061+
fresh_msg = f"out of date by {newest_in_deps - oldest_in_scc:.0f} seconds"
30633062
else:
30643063
fresh_msg = "fresh"
30653064
elif undeps:
3066-
fresh_msg = "stale due to changed suppression (%s)" % " ".join(sorted(undeps))
3065+
fresh_msg = f"stale due to changed suppression ({' '.join(sorted(undeps))})"
30673066
elif stale_scc:
30683067
fresh_msg = "inherently stale"
30693068
if stale_scc != ascc:
3070-
fresh_msg += " (%s)" % " ".join(sorted(stale_scc))
3069+
fresh_msg += f" ({' '.join(sorted(stale_scc))})"
30713070
if stale_deps:
3072-
fresh_msg += " with stale deps (%s)" % " ".join(sorted(stale_deps))
3071+
fresh_msg += f" with stale deps ({' '.join(sorted(stale_deps))})"
30733072
else:
3074-
fresh_msg = "stale due to deps (%s)" % " ".join(sorted(stale_deps))
3073+
fresh_msg = f"stale due to deps ({' '.join(sorted(stale_deps))})"
30753074

30763075
# Initialize transitive_error for all SCC members from union
30773076
# of transitive_error of dependencies.
@@ -3371,7 +3370,7 @@ def topsort(data: Dict[T, Set[T]]) -> Iterable[Set[T]]:
33713370
data = {item: (dep - ready)
33723371
for item, dep in data.items()
33733372
if item not in ready}
3374-
assert not data, "A cyclic dependency exists amongst %r" % data
3373+
assert not data, f"A cyclic dependency exists amongst {data!r}"
33753374

33763375

33773376
def missing_stubs_file(cache_dir: str) -> str:
@@ -3388,7 +3387,7 @@ def record_missing_stub_packages(cache_dir: str, missing_stub_packages: Set[str]
33883387
if missing_stub_packages:
33893388
with open(fnam, 'w') as f:
33903389
for pkg in sorted(missing_stub_packages):
3391-
f.write('%s\n' % pkg)
3390+
f.write(f'{pkg}\n')
33923391
else:
33933392
if os.path.isfile(fnam):
33943393
os.remove(fnam)

mypy/checker.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -886,7 +886,7 @@ def check_func_def(self, defn: FuncItem, typ: CallableType, name: Optional[str])
886886
self.msg.unimported_type_becomes_any("Return type", ret_type, fdef)
887887
for idx, arg_type in enumerate(fdef.type.arg_types):
888888
if has_any_from_unimported_type(arg_type):
889-
prefix = f"Argument {idx + 1} to \"{fdef.name}\""
889+
prefix = f'Argument {idx + 1} to "{fdef.name}"'
890890
self.msg.unimported_type_becomes_any(prefix, arg_type, fdef)
891891
check_for_explicit_any(fdef.type, self.options, self.is_typeshed_stub,
892892
self.msg, context=fdef)
@@ -1918,9 +1918,7 @@ def check_final_enum(self, defn: ClassDef, base: TypeInfo) -> None:
19181918
for sym in base.names.values():
19191919
if self.is_final_enum_value(sym):
19201920
self.fail(
1921-
'Cannot extend enum with existing members: "{}"'.format(
1922-
base.name,
1923-
),
1921+
f'Cannot extend enum with existing members: "{base.name}"',
19241922
defn,
19251923
)
19261924
break
@@ -2571,7 +2569,7 @@ def check_compatibility_super(self, lvalue: RefExpr, lvalue_type: Optional[Type]
25712569
return self.check_subtype(compare_type, base_type, rvalue,
25722570
message_registry.INCOMPATIBLE_TYPES_IN_ASSIGNMENT,
25732571
'expression has type',
2574-
'base class "%s" defined the type as' % base.name,
2572+
f'base class "{base.name}" defined the type as',
25752573
code=codes.ASSIGNMENT)
25762574
return True
25772575

mypy/checkexpr.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -263,7 +263,7 @@ def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type:
263263
result = self.object_type()
264264
else:
265265
if isinstance(node, PlaceholderNode):
266-
assert False, 'PlaceholderNode %r leaked to checker' % node.fullname
266+
assert False, f'PlaceholderNode {node.fullname!r} leaked to checker'
267267
# Unknown reference; use any type implicitly to avoid
268268
# generating extra type errors.
269269
result = AnyType(TypeOfAny.from_error)

mypy/checkstrformat.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -718,7 +718,7 @@ def check_mapping_str_interpolation(self, specifiers: List[ConversionSpecifier],
718718
self.chk.check_subtype(rep_type, expected_type, replacements,
719719
message_registry.INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION,
720720
'expression has type',
721-
'placeholder with key \'%s\' has type' % specifier.key,
721+
f'placeholder with key \'{specifier.key}\' has type',
722722
code=codes.STRING_FORMATTING)
723723
if specifier.conv_type == 's':
724724
self.check_s_special_cases(expr, rep_type, expr)

mypy/config_parser.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -211,10 +211,10 @@ def parse_config_file(options: Options, set_strict_flags: Callable[[], None],
211211

212212
if 'mypy' not in parser:
213213
if filename or file_read not in defaults.SHARED_CONFIG_FILES:
214-
print("%s: No [mypy] section in config file" % file_read, file=stderr)
214+
print(f"{file_read}: No [mypy] section in config file", file=stderr)
215215
else:
216216
section = parser['mypy']
217-
prefix = '{}: [{}]: '.format(file_read, 'mypy')
217+
prefix = f"{file_read}: [mypy]: "
218218
updates, report_dirs = parse_section(
219219
prefix, options, set_strict_flags, section, config_types, stderr)
220220
for k, v in updates.items():
@@ -322,7 +322,7 @@ def destructure_overrides(toml_data: Dict[str, Any]) -> Dict[str, Any]:
322322
for module in modules:
323323
module_overrides = override.copy()
324324
del module_overrides['module']
325-
old_config_name = 'mypy-%s' % module
325+
old_config_name = f'mypy-{module}'
326326
if old_config_name not in result:
327327
result[old_config_name] = module_overrides
328328
else:
@@ -447,7 +447,7 @@ def convert_to_boolean(value: Optional[Any]) -> bool:
447447
if not isinstance(value, str):
448448
value = str(value)
449449
if value.lower() not in configparser.RawConfigParser.BOOLEAN_STATES:
450-
raise ValueError('Not a boolean: %s' % value)
450+
raise ValueError(f'Not a boolean: {value}')
451451
return configparser.RawConfigParser.BOOLEAN_STATES[value.lower()]
452452

453453

@@ -552,7 +552,7 @@ def get_config_module_names(filename: Optional[str], modules: List[str]) -> str:
552552
return ''
553553

554554
if not is_toml(filename):
555-
return ", ".join("[mypy-%s]" % module for module in modules)
555+
return ", ".join(f"[mypy-{module}]" for module in modules)
556556

557557
return "module = ['%s']" % ("', '".join(sorted(modules)))
558558

mypy/dmypy/client.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -273,7 +273,7 @@ def do_run(args: argparse.Namespace) -> None:
273273
response = request(args.status_file, 'run', version=__version__, args=args.flags)
274274
# If the daemon signals that a restart is necessary, do it
275275
if 'restart' in response:
276-
print('Restarting: {}'.format(response['restart']))
276+
print(f"Restarting: {response['restart']}")
277277
restart_server(args, allow_sources=True)
278278
response = request(args.status_file, 'run', version=__version__, args=args.flags)
279279

@@ -300,7 +300,7 @@ def do_status(args: argparse.Namespace) -> None:
300300
if args.verbose or 'error' in response:
301301
show_stats(response)
302302
if 'error' in response:
303-
fail("Daemon is stuck; consider %s kill" % sys.argv[0])
303+
fail(f"Daemon is stuck; consider {sys.argv[0]} kill")
304304
print("Daemon is up and running")
305305

306306

@@ -311,7 +311,7 @@ def do_stop(args: argparse.Namespace) -> None:
311311
response = request(args.status_file, 'stop', timeout=5)
312312
if 'error' in response:
313313
show_stats(response)
314-
fail("Daemon is stuck; consider %s kill" % sys.argv[0])
314+
fail(f"Daemon is stuck; consider {sys.argv[0]} kill")
315315
else:
316316
print("Daemon stopped")
317317

@@ -389,7 +389,7 @@ def check_output(response: Dict[str, Any], verbose: bool,
389389
try:
390390
out, err, status_code = response['out'], response['err'], response['status']
391391
except KeyError:
392-
fail("Response: %s" % str(response))
392+
fail(f"Response: {str(response)}")
393393
sys.stdout.write(out)
394394
sys.stdout.flush()
395395
sys.stderr.write(err)

mypy/dmypy_server.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -264,7 +264,7 @@ def run_command(self, command: str, data: Dict[str, object]) -> Dict[str, object
264264
key = 'cmd_' + command
265265
method = getattr(self.__class__, key, None)
266266
if method is None:
267-
return {'error': "Unrecognized command '%s'" % command}
267+
return {'error': f"Unrecognized command '{command}'"}
268268
else:
269269
if command not in {'check', 'recheck', 'run'}:
270270
# Only the above commands use some error formatting.

mypy/dmypy_util.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,5 +27,5 @@ def receive(connection: IPCBase) -> Any:
2727
except Exception as e:
2828
raise OSError("Data received is not valid JSON") from e
2929
if not isinstance(data, dict):
30-
raise OSError("Data received is not a dict (%s)" % str(type(data)))
30+
raise OSError(f"Data received is not a dict ({type(data)})")
3131
return data

mypy/fastparse.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@
5656
if sys.version_info >= (3, 8):
5757
import ast as ast3
5858
assert 'kind' in ast3.Constant._fields, \
59-
"This 3.8.0 alpha (%s) is too old; 3.8.0a3 required" % sys.version.split()[0]
59+
f"This 3.8.0 alpha ({sys.version.split()[0]}) is too old; 3.8.0a3 required"
6060
# TODO: Num, Str, Bytes, NameConstant, Ellipsis are deprecated in 3.8.
6161
# TODO: Index, ExtSlice are deprecated in 3.9.
6262
from ast import (

mypy/main.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -791,9 +791,9 @@ def add_invertible_flag(flag: str,
791791
description='Generate a report in the specified format.')
792792
for report_type in sorted(defaults.REPORTER_NAMES):
793793
if report_type not in {'memory-xml'}:
794-
report_group.add_argument('--%s-report' % report_type.replace('_', '-'),
794+
report_group.add_argument(f"--{report_type.replace('_', '-')}-report",
795795
metavar='DIR',
796-
dest='special-opts:%s_report' % report_type)
796+
dest=f'special-opts:{report_type}_report')
797797

798798
other_group = parser.add_argument_group(
799799
title='Miscellaneous')
@@ -918,7 +918,7 @@ def add_invertible_flag(flag: str,
918918
# Don't explicitly test if "config_file is not None" for this check.
919919
# This lets `--config-file=` (an empty string) be used to disable all config files.
920920
if config_file and not os.path.exists(config_file):
921-
parser.error("Cannot find config file '%s'" % config_file)
921+
parser.error(f"Cannot find config file '{config_file}'")
922922

923923
options = Options()
924924

@@ -989,8 +989,7 @@ def set_strict_flags() -> None:
989989

990990
invalid_codes = (enabled_codes | disabled_codes) - valid_error_codes
991991
if invalid_codes:
992-
parser.error("Invalid error code(s): %s" %
993-
', '.join(sorted(invalid_codes)))
992+
parser.error(f"Invalid error code(s): {', '.join(sorted(invalid_codes))}")
994993

995994
options.disabled_error_codes |= {error_codes[code] for code in disabled_codes}
996995
options.enabled_error_codes |= {error_codes[code] for code in enabled_codes}
@@ -1090,17 +1089,17 @@ def process_package_roots(fscache: Optional[FileSystemCache],
10901089
package_root = []
10911090
for root in options.package_root:
10921091
if os.path.isabs(root):
1093-
parser.error("Package root cannot be absolute: %r" % root)
1092+
parser.error(f"Package root cannot be absolute: {root!r}")
10941093
drive, root = os.path.splitdrive(root)
10951094
if drive and drive != current_drive:
1096-
parser.error("Package root must be on current drive: %r" % (drive + root))
1095+
parser.error(f"Package root must be on current drive: {drive + root!r}")
10971096
# Empty package root is always okay.
10981097
if root:
10991098
root = os.path.relpath(root) # Normalize the heck out of it.
11001099
if not root.endswith(os.sep):
11011100
root = root + os.sep
11021101
if root.startswith(dotdotslash):
1103-
parser.error("Package root cannot be above current directory: %r" % root)
1102+
parser.error(f"Package root cannot be above current directory: {root!r}")
11041103
if root in trivial_paths:
11051104
root = ''
11061105
package_root.append(root)
@@ -1119,9 +1118,9 @@ def process_cache_map(parser: argparse.ArgumentParser,
11191118
for i in range(0, n, 3):
11201119
source, meta_file, data_file = special_opts.cache_map[i:i + 3]
11211120
if source in options.cache_map:
1122-
parser.error("Duplicate --cache-map source %s)" % source)
1121+
parser.error(f"Duplicate --cache-map source {source})")
11231122
if not source.endswith('.py') and not source.endswith('.pyi'):
1124-
parser.error("Invalid --cache-map source %s (triple[0] must be *.py[i])" % source)
1123+
parser.error(f"Invalid --cache-map source {source} (triple[0] must be *.py[i])")
11251124
if not meta_file.endswith('.meta.json'):
11261125
parser.error("Invalid --cache-map meta_file %s (triple[1] must be *.meta.json)" %
11271126
meta_file)
@@ -1140,7 +1139,7 @@ def maybe_write_junit_xml(td: float, serious: bool, messages: List[str], options
11401139

11411140
def fail(msg: str, stderr: TextIO, options: Options) -> NoReturn:
11421141
"""Fail with a serious error."""
1143-
stderr.write('%s\n' % msg)
1142+
stderr.write(f'{msg}\n')
11441143
maybe_write_junit_xml(0.0, serious=True, messages=[msg], options=options)
11451144
sys.exit(2)
11461145

mypy/memprofile.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -33,23 +33,23 @@ def collect_memory_stats() -> Tuple[Dict[str, int],
3333
n = type(obj).__name__
3434
if hasattr(obj, '__dict__'):
3535
# Keep track of which class a particular __dict__ is associated with.
36-
inferred[id(obj.__dict__)] = '%s (__dict__)' % n
36+
inferred[id(obj.__dict__)] = f'{n} (__dict__)'
3737
if isinstance(obj, (Node, Type)): # type: ignore
3838
if hasattr(obj, '__dict__'):
3939
for x in obj.__dict__.values():
4040
if isinstance(x, list):
4141
# Keep track of which node a list is associated with.
42-
inferred[id(x)] = '%s (list)' % n
42+
inferred[id(x)] = f'{n} (list)'
4343
if isinstance(x, tuple):
4444
# Keep track of which node a list is associated with.
45-
inferred[id(x)] = '%s (tuple)' % n
45+
inferred[id(x)] = f'{n} (tuple)'
4646

4747
for k in get_class_descriptors(type(obj)):
4848
x = getattr(obj, k, None)
4949
if isinstance(x, list):
50-
inferred[id(x)] = '%s (list)' % n
50+
inferred[id(x)] = f'{n} (list)'
5151
if isinstance(x, tuple):
52-
inferred[id(x)] = '%s (tuple)' % n
52+
inferred[id(x)] = f'{n} (tuple)'
5353

5454
freqs: Dict[str, int] = {}
5555
memuse: Dict[str, int] = {}

0 commit comments

Comments
 (0)