Skip to content

Use builtin generics and PEP 604 for type annotations wherever possible #13427

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Aug 18, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions misc/actions_stubs.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import os
import shutil
from typing import Any, Tuple
from typing import Any

try:
import click
Expand All @@ -20,7 +20,7 @@ def apply_all(
directory: str,
extension: str,
to_extension: str = "",
exclude: Tuple[str] = ("",),
exclude: tuple[str] = ("",),
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not related to your change but pretty sure this should be tuple[str, ...].

Copy link
Member Author

@AlexWaygood AlexWaygood Aug 15, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That looks highly plausible :) I'd rather not make that change in this PR, though

recursive: bool = True,
debug: bool = False,
) -> None:
Expand Down Expand Up @@ -100,7 +100,7 @@ def main(
directory: str,
extension: str,
to_extension: str,
exclude: Tuple[str],
exclude: tuple[str],
not_recursive: bool,
) -> None:
"""
Expand Down
10 changes: 5 additions & 5 deletions misc/analyze_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import os
import os.path
from collections import Counter
from typing import Any, Dict, Iterable, List, Optional
from typing import Any, Dict, Iterable
from typing_extensions import Final, TypeAlias as _TypeAlias

ROOT: Final = ".mypy_cache/3.5"
Expand Down Expand Up @@ -75,18 +75,18 @@ def pluck(name: str, chunks: Iterable[JsonDict]) -> Iterable[JsonDict]:
return (chunk for chunk in chunks if chunk[".class"] == name)


def report_counter(counter: Counter, amount: Optional[int] = None) -> None:
def report_counter(counter: Counter, amount: int | None = None) -> None:
for name, count in counter.most_common(amount):
print(f" {count: <8} {name}")
print()


def report_most_common(chunks: List[JsonDict], amount: Optional[int] = None) -> None:
def report_most_common(chunks: list[JsonDict], amount: int | None = None) -> None:
report_counter(Counter(str(chunk) for chunk in chunks), amount)


def compress(chunk: JsonDict) -> JsonDict:
cache = {} # type: Dict[int, JsonDict]
cache: dict[int, JsonDict] = {}
counter = 0

def helper(chunk: Any) -> Any:
Expand Down Expand Up @@ -119,7 +119,7 @@ def helper(chunk: Any) -> Any:


def decompress(chunk: JsonDict) -> JsonDict:
cache = {} # type: Dict[int, JsonDict]
cache: dict[int, JsonDict] = {}

def helper(chunk: Any) -> Any:
if not isinstance(chunk, dict):
Expand Down
14 changes: 7 additions & 7 deletions misc/diff-cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import os
import sys
from collections import defaultdict
from typing import Any, Dict, Optional, Set
from typing import Any

sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))

Expand All @@ -26,7 +26,7 @@ def make_cache(input_dir: str, sqlite: bool) -> MetadataStore:
return FilesystemMetadataStore(input_dir)


def merge_deps(all: Dict[str, Set[str]], new: Dict[str, Set[str]]) -> None:
def merge_deps(all: dict[str, set[str]], new: dict[str, set[str]]) -> None:
for k, v in new.items():
all.setdefault(k, set()).update(v)

Expand Down Expand Up @@ -70,13 +70,13 @@ def main() -> None:
cache1 = make_cache(args.input_dir1, args.sqlite)
cache2 = make_cache(args.input_dir2, args.sqlite)

type_misses: Dict[str, int] = defaultdict(int)
type_hits: Dict[str, int] = defaultdict(int)
type_misses: dict[str, int] = defaultdict(int)
type_hits: dict[str, int] = defaultdict(int)

updates: Dict[str, Optional[str]] = {}
updates: dict[str, str | None] = {}

deps1: Dict[str, Set[str]] = {}
deps2: Dict[str, Set[str]] = {}
deps1: dict[str, set[str]] = {}
deps2: dict[str, set[str]] = {}

misses = hits = 0
cache1_all = list(cache1.list_all())
Expand Down
3 changes: 1 addition & 2 deletions misc/dump-ast.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,14 @@

import argparse
import sys
from typing import Tuple

from mypy import defaults
from mypy.errors import CompileError
from mypy.options import Options
from mypy.parse import parse


def dump(fname: str, python_version: Tuple[int, int], quiet: bool = False) -> None:
def dump(fname: str, python_version: tuple[int, int], quiet: bool = False) -> None:
options = Options()
options.python_version = python_version
with open(fname, "rb") as f:
Expand Down
50 changes: 24 additions & 26 deletions misc/incremental_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
import textwrap
import time
from argparse import ArgumentParser, Namespace, RawDescriptionHelpFormatter
from typing import Any, Dict, List, Optional, Tuple
from typing import Any, Dict, Tuple
from typing_extensions import TypeAlias as _TypeAlias

CACHE_PATH: Final = ".incremental_checker_cache.json"
Expand All @@ -66,7 +66,7 @@ def delete_folder(folder_path: str) -> None:
shutil.rmtree(folder_path)


def execute(command: List[str], fail_on_error: bool = True) -> Tuple[str, str, int]:
def execute(command: list[str], fail_on_error: bool = True) -> tuple[str, str, int]:
proc = subprocess.Popen(
" ".join(command), stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=True
)
Expand Down Expand Up @@ -98,7 +98,7 @@ def initialize_repo(repo_url: str, temp_repo_path: str, branch: str) -> None:
execute(["git", "-C", temp_repo_path, "checkout", branch])


def get_commits(repo_folder_path: str, commit_range: str) -> List[Tuple[str, str]]:
def get_commits(repo_folder_path: str, commit_range: str) -> list[tuple[str, str]]:
raw_data, _stderr, _errcode = execute(
["git", "-C", repo_folder_path, "log", "--reverse", "--oneline", commit_range]
)
Expand All @@ -109,25 +109,25 @@ def get_commits(repo_folder_path: str, commit_range: str) -> List[Tuple[str, str
return output


def get_commits_starting_at(repo_folder_path: str, start_commit: str) -> List[Tuple[str, str]]:
def get_commits_starting_at(repo_folder_path: str, start_commit: str) -> list[tuple[str, str]]:
print(f"Fetching commits starting at {start_commit}")
return get_commits(repo_folder_path, f"{start_commit}^..HEAD")


def get_nth_commit(repo_folder_path: str, n: int) -> Tuple[str, str]:
def get_nth_commit(repo_folder_path: str, n: int) -> tuple[str, str]:
print(f"Fetching last {n} commits (or all, if there are fewer commits than n)")
return get_commits(repo_folder_path, f"-{n}")[0]


def run_mypy(
target_file_path: Optional[str],
target_file_path: str | None,
mypy_cache_path: str,
mypy_script: Optional[str],
mypy_script: str | None,
*,
incremental: bool = False,
daemon: bool = False,
verbose: bool = False,
) -> Tuple[float, str, Dict[str, Any]]:
) -> tuple[float, str, dict[str, Any]]:
"""Runs mypy against `target_file_path` and returns what mypy prints to stdout as a string.

If `incremental` is set to True, this function will use store and retrieve all caching data
Expand All @@ -136,7 +136,7 @@ def run_mypy(

If `daemon` is True, we use daemon mode; the daemon must be started and stopped by the caller.
"""
stats = {} # type: Dict[str, Any]
stats: dict[str, Any] = {}
if daemon:
command = DAEMON_CMD + ["check", "-v"]
else:
Expand All @@ -162,8 +162,8 @@ def run_mypy(
return runtime, output, stats


def filter_daemon_stats(output: str) -> Tuple[str, Dict[str, Any]]:
stats = {} # type: Dict[str, Any]
def filter_daemon_stats(output: str) -> tuple[str, dict[str, Any]]:
stats: dict[str, Any] = {}
lines = output.splitlines()
output_lines = []
for line in lines:
Expand Down Expand Up @@ -208,12 +208,12 @@ def save_cache(cache: JsonDict, incremental_cache_path: str = CACHE_PATH) -> Non


def set_expected(
commits: List[Tuple[str, str]],
commits: list[tuple[str, str]],
cache: JsonDict,
temp_repo_path: str,
target_file_path: Optional[str],
target_file_path: str | None,
mypy_cache_path: str,
mypy_script: Optional[str],
mypy_script: str | None,
) -> None:
"""Populates the given `cache` with the expected results for all of the given `commits`.

Expand Down Expand Up @@ -241,13 +241,13 @@ def set_expected(


def test_incremental(
commits: List[Tuple[str, str]],
commits: list[tuple[str, str]],
cache: JsonDict,
temp_repo_path: str,
target_file_path: Optional[str],
target_file_path: str | None,
mypy_cache_path: str,
*,
mypy_script: Optional[str] = None,
mypy_script: str | None = None,
daemon: bool = False,
exit_on_error: bool = False,
) -> None:
Expand All @@ -258,16 +258,16 @@ def test_incremental(
"""
print("Note: first commit is evaluated twice to warm up cache")
commits = [commits[0]] + commits
overall_stats = {} # type: Dict[str, float]
overall_stats: dict[str, float] = {}
for commit_id, message in commits:
print(f'Now testing commit {commit_id}: "{message}"')
execute(["git", "-C", temp_repo_path, "checkout", commit_id])
runtime, output, stats = run_mypy(
target_file_path, mypy_cache_path, mypy_script, incremental=True, daemon=daemon
)
relevant_stats = combine_stats(overall_stats, stats)
expected_runtime = cache[commit_id]["runtime"] # type: float
expected_output = cache[commit_id]["output"] # type: str
expected_runtime: float = cache[commit_id]["runtime"]
expected_output: str = cache[commit_id]["output"]
if output != expected_output:
print(" Output does not match expected result!")
print(f" Expected output ({expected_runtime:.3f} sec):")
Expand All @@ -286,10 +286,10 @@ def test_incremental(
print("Overall stats:", overall_stats)


def combine_stats(overall_stats: Dict[str, float], new_stats: Dict[str, Any]) -> Dict[str, float]:
def combine_stats(overall_stats: dict[str, float], new_stats: dict[str, Any]) -> dict[str, float]:
INTERESTING_KEYS = ["build_time", "gc_time"]
# For now, we only support float keys
relevant_stats = {} # type: Dict[str, float]
relevant_stats: dict[str, float] = {}
for key in INTERESTING_KEYS:
if key in new_stats:
value = float(new_stats[key])
Expand All @@ -306,7 +306,7 @@ def cleanup(temp_repo_path: str, mypy_cache_path: str) -> None:
def test_repo(
target_repo_url: str,
temp_repo_path: str,
target_file_path: Optional[str],
target_file_path: str | None,
mypy_path: str,
incremental_cache_path: str,
mypy_cache_path: str,
Expand Down Expand Up @@ -391,9 +391,7 @@ def test_repo(


def main() -> None:
help_factory = lambda prog: RawDescriptionHelpFormatter(
prog=prog, max_help_position=32
) # type: Any
help_factory: Any = lambda prog: RawDescriptionHelpFormatter(prog=prog, max_help_position=32)
parser = ArgumentParser(
prog="incremental_checker", description=__doc__, formatter_class=help_factory
)
Expand Down
8 changes: 4 additions & 4 deletions misc/perf_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import subprocess
import textwrap
import time
from typing import Callable, List, Tuple
from typing import Callable, Tuple


class Command:
Expand All @@ -28,7 +28,7 @@ def delete_folder(folder_path: str) -> None:
shutil.rmtree(folder_path)


def execute(command: List[str]) -> None:
def execute(command: list[str]) -> None:
proc = subprocess.Popen(
" ".join(command), stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=True
)
Expand All @@ -45,7 +45,7 @@ def execute(command: List[str]) -> None:
raise RuntimeError("Unexpected error from external tool.")


def trial(num_trials: int, command: Command) -> List[float]:
def trial(num_trials: int, command: Command) -> list[float]:
trials = []
for i in range(num_trials):
command.setup()
Expand All @@ -56,7 +56,7 @@ def trial(num_trials: int, command: Command) -> List[float]:
return trials


def report(name: str, times: List[float]) -> None:
def report(name: str, times: list[float]) -> None:
print(f"{name}:")
print(f" Times: {times}")
print(f" Mean: {statistics.mean(times)}")
Expand Down
4 changes: 2 additions & 2 deletions misc/proper_plugin.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from __future__ import annotations

from typing import Callable, Optional, Type as typing_Type
from typing import Callable, Type as typing_Type

from mypy.nodes import TypeInfo
from mypy.plugin import FunctionContext, Plugin
Expand Down Expand Up @@ -33,7 +33,7 @@ class ProperTypePlugin(Plugin):
all these became dangerous because typ may be e.g. an alias to union.
"""

def get_function_hook(self, fullname: str) -> Optional[Callable[[FunctionContext], Type]]:
def get_function_hook(self, fullname: str) -> Callable[[FunctionContext], Type] | None:
if fullname == "builtins.isinstance":
return isinstance_proper_hook
if fullname == "mypy.types.get_proper_type":
Expand Down
3 changes: 1 addition & 2 deletions misc/sync-typeshed.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
import sys
import tempfile
import textwrap
from typing import Optional


def check_state() -> None:
Expand All @@ -28,7 +27,7 @@ def check_state() -> None:
sys.exit('error: Output of "git status -s mypy/typeshed" must be empty')


def update_typeshed(typeshed_dir: str, commit: Optional[str]) -> str:
def update_typeshed(typeshed_dir: str, commit: str | None) -> str:
"""Update contents of local typeshed copy.

Return the normalized typeshed commit hash.
Expand Down
8 changes: 4 additions & 4 deletions misc/test_case_to_actual.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@
import os
import os.path
import sys
from typing import Iterator, List
from typing import Iterator


class Chunk:
def __init__(self, header_type: str, args: str) -> None:
self.header_type = header_type
self.args = args
self.lines = [] # type: List[str]
self.lines: list[str] = []


def is_header(line: str) -> bool:
Expand All @@ -22,7 +22,7 @@ def normalize(lines: Iterator[str]) -> Iterator[str]:


def produce_chunks(lines: Iterator[str]) -> Iterator[Chunk]:
current_chunk = None # type: Chunk
current_chunk: Chunk = None
for line in normalize(lines):
if is_header(line):
if current_chunk is not None:
Expand All @@ -36,7 +36,7 @@ def produce_chunks(lines: Iterator[str]) -> Iterator[Chunk]:
yield current_chunk


def write_out(filename: str, lines: List[str]) -> None:
def write_out(filename: str, lines: list[str]) -> None:
os.makedirs(os.path.dirname(filename), exist_ok=True)
with open(filename, "w") as stream:
stream.write("\n".join(lines))
Expand Down
Loading