From 1d9ca648bf86c051139acc70e06d6be2a5126d57 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Mon, 15 Aug 2022 09:45:31 +0200 Subject: [PATCH] Add more `from __future__ import annotations`; remove more quotes (#13412) --- conftest.py | 2 + docs/source/conf.py | 2 + misc/actions_stubs.py | 3 + misc/analyze_cache.py | 2 + misc/apply-cache-diff.py | 2 + misc/async_matrix.py | 4 +- misc/cherry-pick-typeshed.py | 2 + misc/convert-cache.py | 2 + misc/diff-cache.py | 2 + misc/dump-ast.py | 2 + misc/fix_annotate.py | 1 + misc/incremental_checker.py | 2 + misc/perf_checker.py | 2 + misc/proper_plugin.py | 2 + misc/sync-typeshed.py | 2 + misc/test_case_to_actual.py | 2 + misc/touch_checker.py | 2 + misc/upload-pypi.py | 2 + misc/variadics.py | 2 + mypy/__main__.py | 3 + mypy/api.py | 2 + mypy/applytype.py | 2 + mypy/argmap.py | 4 +- mypy/binder.py | 4 +- mypy/bogus_type.py | 2 + mypy/build.py | 18 +- mypy/checker.py | 6 +- mypy/checkexpr.py | 10 +- mypy/checkmember.py | 8 +- mypy/checkpattern.py | 10 +- mypy/checkstrformat.py | 10 +- mypy/config_parser.py | 2 + mypy/constraints.py | 4 +- mypy/copytype.py | 2 + mypy/defaults.py | 2 + mypy/dmypy_os.py | 2 + mypy/dmypy_server.py | 2 + mypy/dmypy_util.py | 2 + mypy/erasetype.py | 2 + mypy/errorcodes.py | 4 +- mypy/errors.py | 6 +- mypy/expandtype.py | 2 + mypy/exprtotype.py | 2 + mypy/fastparse.py | 2 + mypy/find_sources.py | 2 + mypy/fixup.py | 2 + mypy/freetree.py | 2 + mypy/fscache.py | 2 + mypy/fswatcher.py | 2 + mypy/gclogger.py | 4 +- mypy/git.py | 2 + mypy/indirection.py | 2 + mypy/infer.py | 2 + mypy/inspections.py | 2 + mypy/ipc.py | 10 +- mypy/join.py | 2 + mypy/literals.py | 2 + mypy/lookup.py | 2 + mypy/main.py | 2 + mypy/maptype.py | 2 + mypy/meet.py | 2 + mypy/memprofile.py | 2 + mypy/message_registry.py | 4 +- mypy/messages.py | 3 + mypy/metastore.py | 4 +- mypy/mixedtraverser.py | 2 + mypy/modulefinder.py | 2 + mypy/moduleinspect.py | 6 +- mypy/mro.py | 2 + mypy/nodes.py | 243 +++++++++++++------------- mypy/operators.py | 2 + mypy/options.py | 6 +- mypy/parse.py | 2 + mypy/patterns.py | 3 + mypy/plugin.py | 2 + mypy/reachability.py | 2 + mypy/renaming.py | 2 + mypy/report.py | 6 +- mypy/scope.py | 2 + mypy/semanal.py | 2 + mypy/semanal_classprop.py | 2 + mypy/semanal_enum.py | 2 + mypy/semanal_infer.py | 2 + mypy/semanal_main.py | 31 ++-- mypy/semanal_namedtuple.py | 2 + mypy/semanal_newtype.py | 2 + mypy/semanal_pass1.py | 2 + mypy/semanal_shared.py | 2 + mypy/semanal_typeargs.py | 2 + mypy/semanal_typeddict.py | 2 + mypy/server/target.py | 3 + mypy/sharedparse.py | 2 + mypy/solve.py | 2 + mypy/split_namespace.py | 2 + mypy/state.py | 2 + mypy/stats.py | 2 + mypy/strconv.py | 170 +++++++++--------- mypy/stubdoc.py | 3 + mypy/stubgen.py | 6 +- mypy/stubgenc.py | 2 + mypy/stubinfo.py | 3 + mypy/stubtest.py | 8 +- mypy/stubutil.py | 2 + mypy/subtypes.py | 2 + mypy/suggestions.py | 2 + mypy/traverser.py | 2 + mypy/treetransform.py | 2 + mypy/tvar_scope.py | 12 +- mypy/type_visitor.py | 2 + mypy/typeanal.py | 8 +- mypy/typeops.py | 2 + mypy/types.py | 182 ++++++++++---------- mypy/typestate.py | 2 + mypy/typetraverser.py | 2 + mypy/typevars.py | 2 + mypy/typevartuples.py | 2 + mypy/util.py | 4 +- mypy/version.py | 2 + mypy/visitor.py | 322 ++++++++++++++++++----------------- mypyc/__main__.py | 2 + mypyc/build.py | 10 +- mypyc/common.py | 2 + mypyc/crash.py | 4 +- mypyc/errors.py | 2 + mypyc/namegen.py | 2 + mypyc/options.py | 2 + mypyc/rt_subtype.py | 2 + mypyc/sametype.py | 2 + mypyc/subtype.py | 2 + runtests.py | 3 + scripts/find_type.py | 2 + setup.py | 2 + 132 files changed, 797 insertions(+), 540 deletions(-) diff --git a/conftest.py b/conftest.py index b40d4675c854..0bd7b6a38031 100644 --- a/conftest.py +++ b/conftest.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os.path pytest_plugins = ["mypy.test.data"] diff --git a/docs/source/conf.py b/docs/source/conf.py index 18602dacbbcd..5faefdc92ed1 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -12,6 +12,8 @@ # All configuration values have a default; values that are commented out # serve to show the default. +from __future__ import annotations + import os import sys diff --git a/misc/actions_stubs.py b/misc/actions_stubs.py index d7613cb06a5f..f0902f3d974f 100644 --- a/misc/actions_stubs.py +++ b/misc/actions_stubs.py @@ -1,4 +1,7 @@ #!/usr/bin/env python3 + +from __future__ import annotations + import os import shutil from typing import Any, Tuple diff --git a/misc/analyze_cache.py b/misc/analyze_cache.py index 333a188971b6..a7abc45db94c 100644 --- a/misc/analyze_cache.py +++ b/misc/analyze_cache.py @@ -1,5 +1,7 @@ #!/usr/bin/env python +from __future__ import annotations + import json import os import os.path diff --git a/misc/apply-cache-diff.py b/misc/apply-cache-diff.py index 53fd7e52b066..29c55247de92 100644 --- a/misc/apply-cache-diff.py +++ b/misc/apply-cache-diff.py @@ -5,6 +5,8 @@ many cases instead of full cache artifacts. """ +from __future__ import annotations + import argparse import json import os diff --git a/misc/async_matrix.py b/misc/async_matrix.py index 33d194c29116..ba04fc390069 100644 --- a/misc/async_matrix.py +++ b/misc/async_matrix.py @@ -5,6 +5,8 @@ testFullCoroutineMatrix in test-data/unit/check-async-await.test. """ +from __future__ import annotations + import sys from types import coroutine from typing import Any, Awaitable, Generator, Iterator @@ -35,7 +37,7 @@ async def decorated_coroutine() -> int: class It(Iterator[str]): stop = False - def __iter__(self) -> "It": + def __iter__(self) -> It: return self def __next__(self) -> str: diff --git a/misc/cherry-pick-typeshed.py b/misc/cherry-pick-typeshed.py index ae8ca3ac517a..3cf826533a94 100644 --- a/misc/cherry-pick-typeshed.py +++ b/misc/cherry-pick-typeshed.py @@ -5,6 +5,8 @@ python3 misc/cherry-pick-typeshed.py --typeshed-dir dir hash """ +from __future__ import annotations + import argparse import os.path import re diff --git a/misc/convert-cache.py b/misc/convert-cache.py index a83eddf1bcd7..92a313c6f2a0 100755 --- a/misc/convert-cache.py +++ b/misc/convert-cache.py @@ -5,6 +5,8 @@ See mypy/metastore.py for details. """ +from __future__ import annotations + import os import sys diff --git a/misc/diff-cache.py b/misc/diff-cache.py index 50dd54e12b3d..39be8023a2d5 100644 --- a/misc/diff-cache.py +++ b/misc/diff-cache.py @@ -5,6 +5,8 @@ many cases instead of full cache artifacts. """ +from __future__ import annotations + import argparse import json import os diff --git a/misc/dump-ast.py b/misc/dump-ast.py index 60e4c926103e..55e6941e5d70 100755 --- a/misc/dump-ast.py +++ b/misc/dump-ast.py @@ -3,6 +3,8 @@ Parse source files and print the abstract syntax trees. """ +from __future__ import annotations + import argparse import sys from typing import Tuple diff --git a/misc/fix_annotate.py b/misc/fix_annotate.py index 3815dd1c26f1..7148b69259be 100644 --- a/misc/fix_annotate.py +++ b/misc/fix_annotate.py @@ -27,6 +27,7 @@ def foo(self, bar, baz=12): Finally, it knows that __init__() is supposed to return None. """ +from __future__ import annotations import os import re diff --git a/misc/incremental_checker.py b/misc/incremental_checker.py index 8a441d6dc401..3b53cbb82502 100755 --- a/misc/incremental_checker.py +++ b/misc/incremental_checker.py @@ -31,6 +31,8 @@ python3 misc/incremental_checker.py commit 2a432b """ +from __future__ import annotations + import base64 import json import os diff --git a/misc/perf_checker.py b/misc/perf_checker.py index 5cf03d4b86f5..f97f5596fb64 100644 --- a/misc/perf_checker.py +++ b/misc/perf_checker.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 +from __future__ import annotations + import os import shutil import statistics diff --git a/misc/proper_plugin.py b/misc/proper_plugin.py index 4f8af8d301a3..afa9185136f9 100644 --- a/misc/proper_plugin.py +++ b/misc/proper_plugin.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Callable, Optional, Type as typing_Type from mypy.nodes import TypeInfo diff --git a/misc/sync-typeshed.py b/misc/sync-typeshed.py index e74c3723ef07..01702b090e93 100644 --- a/misc/sync-typeshed.py +++ b/misc/sync-typeshed.py @@ -7,6 +7,8 @@ By default, sync to the latest typeshed commit. """ +from __future__ import annotations + import argparse import os import shutil diff --git a/misc/test_case_to_actual.py b/misc/test_case_to_actual.py index dd8a8a293c3c..ead453ef3126 100644 --- a/misc/test_case_to_actual.py +++ b/misc/test_case_to_actual.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import os.path import sys diff --git a/misc/touch_checker.py b/misc/touch_checker.py index 0cb9d7e5cf80..a36a3a6de76b 100644 --- a/misc/touch_checker.py +++ b/misc/touch_checker.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 +from __future__ import annotations + import glob import os import shutil diff --git a/misc/upload-pypi.py b/misc/upload-pypi.py index ad244a547ddb..ffe60214f86f 100644 --- a/misc/upload-pypi.py +++ b/misc/upload-pypi.py @@ -5,6 +5,8 @@ """ +from __future__ import annotations + import argparse import contextlib import json diff --git a/misc/variadics.py b/misc/variadics.py index a216543a29c8..c54e3fd8e30e 100644 --- a/misc/variadics.py +++ b/misc/variadics.py @@ -3,6 +3,8 @@ See https://github.com/python/typing/issues/193#issuecomment-236383893 """ +from __future__ import annotations + LIMIT = 5 BOUND = "object" diff --git a/mypy/__main__.py b/mypy/__main__.py index b1263c734730..049553cd1b44 100644 --- a/mypy/__main__.py +++ b/mypy/__main__.py @@ -1,4 +1,7 @@ """Mypy type checker command line tool.""" + +from __future__ import annotations + import os import sys import traceback diff --git a/mypy/api.py b/mypy/api.py index 9ea6eb34ee5a..e98bf7982524 100644 --- a/mypy/api.py +++ b/mypy/api.py @@ -43,6 +43,8 @@ """ +from __future__ import annotations + import sys from io import StringIO from typing import Callable, List, TextIO, Tuple diff --git a/mypy/applytype.py b/mypy/applytype.py index 2f01b6fcc2a4..8ad0a7b95b4e 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Callable, Dict, Optional, Sequence import mypy.subtypes diff --git a/mypy/argmap.py b/mypy/argmap.py index 5df924a24386..9e2e20c004eb 100644 --- a/mypy/argmap.py +++ b/mypy/argmap.py @@ -1,5 +1,7 @@ """Utilities for mapping between actual and formal arguments (and their types).""" +from __future__ import annotations + from typing import TYPE_CHECKING, Callable, List, Optional, Sequence, Set from mypy import nodes @@ -158,7 +160,7 @@ def f(x: int, *args: str) -> None: ... needs a separate instance since instances have per-call state. """ - def __init__(self, context: "ArgumentInferContext") -> None: + def __init__(self, context: ArgumentInferContext) -> None: # Next tuple *args index to use. self.tuple_index = 0 # Keyword arguments in TypedDict **kwargs used. diff --git a/mypy/binder.py b/mypy/binder.py index 88c52a027107..ba46a91a8793 100644 --- a/mypy/binder.py +++ b/mypy/binder.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from collections import defaultdict from contextlib import contextmanager from typing import DefaultDict, Dict, Iterator, List, Optional, Set, Tuple, Union, cast @@ -234,7 +236,7 @@ def pop_frame(self, can_skip: bool, fall_through: int) -> Frame: return result @contextmanager - def accumulate_type_assignments(self) -> "Iterator[Assigns]": + def accumulate_type_assignments(self) -> Iterator[Assigns]: """Push a new map to collect assigned types in multiassign from union. If this map is not None, actual binding is deferred until all items in diff --git a/mypy/bogus_type.py b/mypy/bogus_type.py index 2193a986c57c..1a61abac9732 100644 --- a/mypy/bogus_type.py +++ b/mypy/bogus_type.py @@ -10,6 +10,8 @@ For those cases some other technique should be used. """ +from __future__ import annotations + from typing import Any, TypeVar from mypy_extensions import FlexibleAlias diff --git a/mypy/build.py b/mypy/build.py index c9fbe917a99e..84676d2fc308 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -10,6 +10,8 @@ """ # TODO: More consistent terminology, e.g. path/fnam, module/id, state/file +from __future__ import annotations + import contextlib import errno import gc @@ -132,7 +134,7 @@ class BuildResult: errors: List of error messages. """ - def __init__(self, manager: "BuildManager", graph: Graph) -> None: + def __init__(self, manager: BuildManager, graph: Graph) -> None: self.manager = manager self.graph = graph self.files = manager.modules @@ -600,7 +602,7 @@ def __init__( search_paths: SearchPaths, ignore_prefix: str, source_set: BuildSourceSet, - reports: "Optional[Reports]", + reports: Optional[Reports], options: Options, version_id: str, plugin: Plugin, @@ -1857,7 +1859,7 @@ class State: import_context: List[Tuple[str, int]] # The State from which this module was imported, if any - caller_state: Optional["State"] = None + caller_state: Optional[State] = None # If caller_state is set, the line number in the caller where the import occurred caller_line = 0 @@ -1896,9 +1898,9 @@ def __init__( path: Optional[str], source: Optional[str], manager: BuildManager, - caller_state: "Optional[State]" = None, + caller_state: Optional[State] = None, caller_line: int = 0, - ancestor_for: "Optional[State]" = None, + ancestor_for: Optional[State] = None, root_source: bool = False, # If `temporary` is True, this State is being created to just # quickly parse/load the tree, without an intention to further @@ -2545,9 +2547,9 @@ def find_module_and_diagnose( manager: BuildManager, id: str, options: Options, - caller_state: "Optional[State]" = None, + caller_state: Optional[State] = None, caller_line: int = 0, - ancestor_for: "Optional[State]" = None, + ancestor_for: Optional[State] = None, root_source: bool = False, skip_diagnose: bool = False, ) -> Tuple[str, str]: @@ -2765,7 +2767,7 @@ def skipping_module( manager.errors.set_import_context(save_import_context) -def skipping_ancestor(manager: BuildManager, id: str, path: str, ancestor_for: "State") -> None: +def skipping_ancestor(manager: BuildManager, id: str, path: str, ancestor_for: State) -> None: """Produce an error for an ancestor ignored due to --follow_imports=error""" # TODO: Read the path (the __init__.py file) and return # immediately if it's empty or only contains comments. diff --git a/mypy/checker.py b/mypy/checker.py index ab938e8a7b3c..38301f89c815 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -1,5 +1,7 @@ """Mypy type checker.""" +from __future__ import annotations + import fnmatch import itertools from collections import defaultdict @@ -301,7 +303,7 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface): pattern_checker: PatternChecker tscope: Scope - scope: "CheckerScope" + scope: CheckerScope # Stack of function return types return_types: List[Type] # Flags; true for dynamically typed functions @@ -5966,7 +5968,7 @@ def iterable_item_type(self, instance: Instance) -> Type: def function_type(self, func: FuncBase) -> FunctionLike: return function_type(func, self.named_type("builtins.function")) - def push_type_map(self, type_map: "TypeMap") -> None: + def push_type_map(self, type_map: TypeMap) -> None: if type_map is None: self.binder.unreachable() else: diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 7f72a21ebc18..8cdc8282a1e5 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -1,5 +1,7 @@ """Expression type checker. This file is conceptually part of TypeChecker.""" +from __future__ import annotations + import itertools from contextlib import contextmanager from typing import ( @@ -253,7 +255,7 @@ class ExpressionChecker(ExpressionVisitor[Type]): """ # Some services are provided by a TypeChecker instance. - chk: "mypy.checker.TypeChecker" + chk: mypy.checker.TypeChecker # This is shared with TypeChecker, but stored also here for convenience. msg: MessageBuilder # Type context for type inference @@ -265,9 +267,7 @@ class ExpressionChecker(ExpressionVisitor[Type]): strfrm_checker: StringFormatterChecker plugin: Plugin - def __init__( - self, chk: "mypy.checker.TypeChecker", msg: MessageBuilder, plugin: Plugin - ) -> None: + def __init__(self, chk: mypy.checker.TypeChecker, msg: MessageBuilder, plugin: Plugin) -> None: """Construct an expression type checker.""" self.chk = chk self.msg = msg @@ -654,7 +654,7 @@ def check_typeddict_call( self.chk.fail(message_registry.INVALID_TYPEDDICT_ARGS, context) return AnyType(TypeOfAny.from_error) - def validate_typeddict_kwargs(self, kwargs: DictExpr) -> "Optional[Dict[str, Expression]]": + def validate_typeddict_kwargs(self, kwargs: DictExpr) -> Optional[Dict[str, Expression]]: item_args = [item[1] for item in kwargs.items] item_names = [] # List[str] diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 1ee2d64e25f0..75101b3359ea 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -1,5 +1,7 @@ """Type checking of attribute access""" +from __future__ import annotations + from typing import TYPE_CHECKING, Callable, Optional, Sequence, Union, cast from mypy import meet, message_registry, subtypes @@ -85,7 +87,7 @@ def __init__( original_type: Type, context: Context, msg: MessageBuilder, - chk: "mypy.checker.TypeChecker", + chk: mypy.checker.TypeChecker, self_type: Optional[Type], module_symbol_table: Optional[SymbolTable] = None, ) -> None: @@ -111,7 +113,7 @@ def copy_modified( messages: Optional[MessageBuilder] = None, self_type: Optional[Type] = None, is_lvalue: Optional[bool] = None, - ) -> "MemberContext": + ) -> MemberContext: mx = MemberContext( self.is_lvalue, self.is_super, @@ -142,7 +144,7 @@ def analyze_member_access( msg: MessageBuilder, *, original_type: Type, - chk: "mypy.checker.TypeChecker", + chk: mypy.checker.TypeChecker, override_info: Optional[TypeInfo] = None, in_literal_context: bool = False, self_type: Optional[Type] = None, diff --git a/mypy/checkpattern.py b/mypy/checkpattern.py index 69d33a3f0b16..782d5c1304d9 100644 --- a/mypy/checkpattern.py +++ b/mypy/checkpattern.py @@ -1,5 +1,7 @@ """Pattern checker. This file is conceptually part of TypeChecker.""" +from __future__ import annotations + from collections import defaultdict from typing import Dict, List, NamedTuple, Optional, Set, Tuple, Union from typing_extensions import Final @@ -83,7 +85,7 @@ class PatternChecker(PatternVisitor[PatternType]): """ # Some services are provided by a TypeChecker instance. - chk: "mypy.checker.TypeChecker" + chk: mypy.checker.TypeChecker # This is shared with TypeChecker, but stored also here for convenience. msg: MessageBuilder # Currently unused @@ -101,9 +103,7 @@ class PatternChecker(PatternVisitor[PatternType]): # non_sequence_match_type_names non_sequence_match_types: List[Type] - def __init__( - self, chk: "mypy.checker.TypeChecker", msg: MessageBuilder, plugin: Plugin - ) -> None: + def __init__(self, chk: mypy.checker.TypeChecker, msg: MessageBuilder, plugin: Plugin) -> None: self.chk = chk self.msg = msg self.plugin = plugin @@ -690,7 +690,7 @@ def get_var(expr: Expression) -> Var: return node -def get_type_range(typ: Type) -> "mypy.checker.TypeRange": +def get_type_range(typ: Type) -> mypy.checker.TypeRange: typ = get_proper_type(typ) if ( isinstance(typ, Instance) diff --git a/mypy/checkstrformat.py b/mypy/checkstrformat.py index 645a01ed1220..855e09f6b0a1 100644 --- a/mypy/checkstrformat.py +++ b/mypy/checkstrformat.py @@ -10,6 +10,8 @@ implementation simple. """ +from __future__ import annotations + import re from typing import ( TYPE_CHECKING, @@ -306,16 +308,16 @@ class StringFormatterChecker: """ # Some services are provided by a TypeChecker instance. - chk: "mypy.checker.TypeChecker" + chk: mypy.checker.TypeChecker # This is shared with TypeChecker, but stored also here for convenience. msg: MessageBuilder # Some services are provided by a ExpressionChecker instance. - exprchk: "mypy.checkexpr.ExpressionChecker" + exprchk: mypy.checkexpr.ExpressionChecker def __init__( self, - exprchk: "mypy.checkexpr.ExpressionChecker", - chk: "mypy.checker.TypeChecker", + exprchk: mypy.checkexpr.ExpressionChecker, + chk: mypy.checker.TypeChecker, msg: MessageBuilder, ) -> None: """Construct an expression type checker.""" diff --git a/mypy/config_parser.py b/mypy/config_parser.py index 613f127afd08..bc9f75f419e1 100644 --- a/mypy/config_parser.py +++ b/mypy/config_parser.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import argparse import configparser import glob as fileglob diff --git a/mypy/constraints.py b/mypy/constraints.py index d483fa1aeb40..d005eeaeef8a 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -1,5 +1,7 @@ """Type inference constraints.""" +from __future__ import annotations + from typing import TYPE_CHECKING, Iterable, List, Optional, Sequence from typing_extensions import Final @@ -96,7 +98,7 @@ def infer_constraints_for_callable( arg_types: Sequence[Optional[Type]], arg_kinds: List[ArgKind], formal_to_actual: List[List[int]], - context: "ArgumentInferContext", + context: ArgumentInferContext, ) -> List[Constraint]: """Infer type variable constraints for a callable and actual arguments. diff --git a/mypy/copytype.py b/mypy/copytype.py index e5a02d811d8b..baa1ba34cbac 100644 --- a/mypy/copytype.py +++ b/mypy/copytype.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any, cast from mypy.types import ( diff --git a/mypy/defaults.py b/mypy/defaults.py index 4fae1870749a..02562b5f0963 100644 --- a/mypy/defaults.py +++ b/mypy/defaults.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os from typing_extensions import Final diff --git a/mypy/dmypy_os.py b/mypy/dmypy_os.py index 0b823b6f4132..63c3e4c88979 100644 --- a/mypy/dmypy_os.py +++ b/mypy/dmypy_os.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys from typing import Any, Callable diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index d5909569dcd9..81e72d4643c4 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -4,6 +4,8 @@ to enable fine-grained incremental reprocessing of changes. """ +from __future__ import annotations + import argparse import base64 import io diff --git a/mypy/dmypy_util.py b/mypy/dmypy_util.py index 5c1a4cd348dd..a1b419617f73 100644 --- a/mypy/dmypy_util.py +++ b/mypy/dmypy_util.py @@ -3,6 +3,8 @@ This should be pretty lightweight and not depend on other mypy code (other than ipc). """ +from __future__ import annotations + import json from typing import Any from typing_extensions import Final diff --git a/mypy/erasetype.py b/mypy/erasetype.py index d9e878600247..e8fdb72ab502 100644 --- a/mypy/erasetype.py +++ b/mypy/erasetype.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Callable, Container, Dict, List, Optional, cast from mypy.nodes import ARG_STAR, ARG_STAR2 diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index da616299758b..cfb83492e93f 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -3,10 +3,12 @@ These can be used for filtering specific errors. """ +from __future__ import annotations + from typing import Dict from typing_extensions import Final -error_codes: Dict[str, "ErrorCode"] = {} +error_codes: Dict[str, ErrorCode] = {} class ErrorCode: diff --git a/mypy/errors.py b/mypy/errors.py index 273cfbc834fc..13254652ecf0 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os.path import sys import traceback @@ -137,7 +139,7 @@ class ErrorWatcher: def __init__( self, - errors: "Errors", + errors: Errors, *, filter_errors: Union[bool, Callable[[str, ErrorInfo], bool]] = False, save_filtered_errors: bool = False, @@ -147,7 +149,7 @@ def __init__( self._filter = filter_errors self._filtered: Optional[List[ErrorInfo]] = [] if save_filtered_errors else None - def __enter__(self) -> "ErrorWatcher": + def __enter__(self) -> ErrorWatcher: self.errors._watchers.append(self) return self diff --git a/mypy/expandtype.py b/mypy/expandtype.py index 9a948ca2f115..5bd15c8a2646 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Dict, Iterable, List, Mapping, Optional, Sequence, TypeVar, Union, cast from mypy.types import ( diff --git a/mypy/exprtotype.py b/mypy/exprtotype.py index 76fe2d511762..957adb9610cb 100644 --- a/mypy/exprtotype.py +++ b/mypy/exprtotype.py @@ -1,5 +1,7 @@ """Translate an Expression to a Type value.""" +from __future__ import annotations + from typing import Optional from mypy.fastparse import parse_type_string diff --git a/mypy/fastparse.py b/mypy/fastparse.py index f213dfb22ff0..3cba8509a4c8 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import copy import re import sys diff --git a/mypy/find_sources.py b/mypy/find_sources.py index e2ed1109a2cb..9b7147eda7d5 100644 --- a/mypy/find_sources.py +++ b/mypy/find_sources.py @@ -1,5 +1,7 @@ """Routines for finding the sources that mypy will check""" +from __future__ import annotations + import functools import os from typing import List, Optional, Sequence, Set, Tuple diff --git a/mypy/fixup.py b/mypy/fixup.py index 885239b648aa..87c6258ff2d7 100644 --- a/mypy/fixup.py +++ b/mypy/fixup.py @@ -1,5 +1,7 @@ """Fix up various things after deserialization.""" +from __future__ import annotations + from typing import Any, Dict, Optional from typing_extensions import Final diff --git a/mypy/freetree.py b/mypy/freetree.py index 07eb4cf0ceb6..75b89e2623ae 100644 --- a/mypy/freetree.py +++ b/mypy/freetree.py @@ -1,5 +1,7 @@ """Generic node traverser visitor""" +from __future__ import annotations + from mypy.nodes import Block, MypyFile from mypy.traverser import TraverserVisitor diff --git a/mypy/fscache.py b/mypy/fscache.py index 365ca80d334e..9ce0942d3e30 100644 --- a/mypy/fscache.py +++ b/mypy/fscache.py @@ -28,6 +28,8 @@ advantage of the benefits. """ +from __future__ import annotations + import os import stat from typing import Dict, List, Set diff --git a/mypy/fswatcher.py b/mypy/fswatcher.py index 8144a7f43caa..5db3f61ffb8d 100644 --- a/mypy/fswatcher.py +++ b/mypy/fswatcher.py @@ -1,5 +1,7 @@ """Watch parts of the file system for changes.""" +from __future__ import annotations + from typing import AbstractSet, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple from mypy.fscache import FileSystemCache diff --git a/mypy/gclogger.py b/mypy/gclogger.py index 65508d2fda7a..98e0a880b3f3 100644 --- a/mypy/gclogger.py +++ b/mypy/gclogger.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import gc import time from typing import Mapping, Optional @@ -6,7 +8,7 @@ class GcLogger: """Context manager to log GC stats and overall time.""" - def __enter__(self) -> "GcLogger": + def __enter__(self) -> GcLogger: self.gc_start_time: Optional[float] = None self.gc_time = 0.0 self.gc_calls = 0 diff --git a/mypy/git.py b/mypy/git.py index 8e73b1eeb9c5..1c63bf6471dc 100644 --- a/mypy/git.py +++ b/mypy/git.py @@ -1,6 +1,8 @@ """Git utilities.""" # Used also from setup.py, so don't pull in anything additional here (like mypy or typing): +from __future__ import annotations + import os import subprocess diff --git a/mypy/indirection.py b/mypy/indirection.py index c241e55698ff..8e960bfaa7c3 100644 --- a/mypy/indirection.py +++ b/mypy/indirection.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Dict, Iterable, List, Optional, Set, Union import mypy.types as types diff --git a/mypy/infer.py b/mypy/infer.py index d3ad0bc19f9b..bba85edc1345 100644 --- a/mypy/infer.py +++ b/mypy/infer.py @@ -1,5 +1,7 @@ """Utilities for type argument inference.""" +from __future__ import annotations + from typing import List, NamedTuple, Optional, Sequence from mypy.constraints import ( diff --git a/mypy/inspections.py b/mypy/inspections.py index 97ce315497fa..8a0b03dc1dd2 100644 --- a/mypy/inspections.py +++ b/mypy/inspections.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os from collections import defaultdict from functools import cmp_to_key diff --git a/mypy/ipc.py b/mypy/ipc.py index 7f6926b18b9f..d1022ffd1220 100644 --- a/mypy/ipc.py +++ b/mypy/ipc.py @@ -4,6 +4,8 @@ On Windows, this uses NamedPipes. """ +from __future__ import annotations + import base64 import os import shutil @@ -159,12 +161,12 @@ def __init__(self, name: str, timeout: Optional[float]) -> None: self.connection.settimeout(timeout) self.connection.connect(name) - def __enter__(self) -> "IPCClient": + def __enter__(self) -> IPCClient: return self def __exit__( self, - exc_ty: "Optional[Type[BaseException]]" = None, + exc_ty: Optional[Type[BaseException]] = None, exc_val: Optional[BaseException] = None, exc_tb: Optional[TracebackType] = None, ) -> None: @@ -211,7 +213,7 @@ def __init__(self, name: str, timeout: Optional[float] = None) -> None: if timeout is not None: self.sock.settimeout(timeout) - def __enter__(self) -> "IPCServer": + def __enter__(self) -> IPCServer: if sys.platform == "win32": # NOTE: It is theoretically possible that this will hang forever if the # client never connects, though this can be "solved" by killing the server @@ -243,7 +245,7 @@ def __enter__(self) -> "IPCServer": def __exit__( self, - exc_ty: "Optional[Type[BaseException]]" = None, + exc_ty: Optional[Type[BaseException]] = None, exc_val: Optional[BaseException] = None, exc_tb: Optional[TracebackType] = None, ) -> None: diff --git a/mypy/join.py b/mypy/join.py index 6a60cb0720e1..e65fca8da0b4 100644 --- a/mypy/join.py +++ b/mypy/join.py @@ -1,5 +1,7 @@ """Calculation of the least upper bound types (joins).""" +from __future__ import annotations + from typing import List, Optional, Tuple import mypy.typeops diff --git a/mypy/literals.py b/mypy/literals.py index e325be0ff5fb..59bfbf649cf7 100644 --- a/mypy/literals.py +++ b/mypy/literals.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Any, Iterable, Optional, Tuple, Union from typing_extensions import Final diff --git a/mypy/lookup.py b/mypy/lookup.py index aa555ad11323..634703364d74 100644 --- a/mypy/lookup.py +++ b/mypy/lookup.py @@ -3,6 +3,8 @@ functions that will find a semantic node by its name. """ +from __future__ import annotations + from typing import Dict, Optional from mypy.nodes import MypyFile, SymbolTableNode, TypeInfo diff --git a/mypy/main.py b/mypy/main.py index 903d2d0d9b8f..bb24be7b177b 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -1,5 +1,7 @@ """Mypy type checker command line tool.""" +from __future__ import annotations + import argparse import os import subprocess diff --git a/mypy/maptype.py b/mypy/maptype.py index 59d86d9f79b8..aa6169d9cadb 100644 --- a/mypy/maptype.py +++ b/mypy/maptype.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Dict, List from mypy.expandtype import expand_type diff --git a/mypy/meet.py b/mypy/meet.py index 1ea5c49c0680..62fc7db146e6 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Callable, List, Optional, Tuple from mypy import join diff --git a/mypy/memprofile.py b/mypy/memprofile.py index b49bf8048e3b..54bdefc2f798 100644 --- a/mypy/memprofile.py +++ b/mypy/memprofile.py @@ -4,6 +4,8 @@ owned by particular AST nodes, etc. """ +from __future__ import annotations + import gc import sys from collections import defaultdict diff --git a/mypy/message_registry.py b/mypy/message_registry.py index 963d8858753f..12f12a392a1d 100644 --- a/mypy/message_registry.py +++ b/mypy/message_registry.py @@ -6,6 +6,8 @@ add a method to MessageBuilder and call this instead. """ +from __future__ import annotations + from typing import NamedTuple, Optional from typing_extensions import Final @@ -16,7 +18,7 @@ class ErrorMessage(NamedTuple): value: str code: Optional[codes.ErrorCode] = None - def format(self, *args: object, **kwargs: object) -> "ErrorMessage": + def format(self, *args: object, **kwargs: object) -> ErrorMessage: return ErrorMessage(self.value.format(*args, **kwargs), code=self.code) diff --git a/mypy/messages.py b/mypy/messages.py index 32093c7ba253..8ea928bc5dcb 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -8,6 +8,9 @@ Historically we tried to avoid all message string literals in the type checker but we are moving away from this convention. """ + +from __future__ import annotations + import difflib import re from contextlib import contextmanager diff --git a/mypy/metastore.py b/mypy/metastore.py index 3cc4dd804896..d1c5da8284aa 100644 --- a/mypy/metastore.py +++ b/mypy/metastore.py @@ -8,6 +8,8 @@ on OS X. """ +from __future__ import annotations + import binascii import os import time @@ -149,7 +151,7 @@ def list_all(self) -> Iterable[str]: MIGRATIONS: List[str] = [] -def connect_db(db_file: str) -> "sqlite3.Connection": +def connect_db(db_file: str) -> sqlite3.Connection: import sqlite3.dbapi2 db = sqlite3.dbapi2.connect(db_file) diff --git a/mypy/mixedtraverser.py b/mypy/mixedtraverser.py index 425752c1c129..a9c05966fc03 100644 --- a/mypy/mixedtraverser.py +++ b/mypy/mixedtraverser.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Optional from mypy.nodes import ( diff --git a/mypy/modulefinder.py b/mypy/modulefinder.py index f3173a9e81eb..f57c1b47aa1d 100644 --- a/mypy/modulefinder.py +++ b/mypy/modulefinder.py @@ -3,6 +3,8 @@ This builds on fscache.py; find_sources.py builds on top of this. """ +from __future__ import annotations + import ast import collections import functools diff --git a/mypy/moduleinspect.py b/mypy/moduleinspect.py index ec2e964f7ffc..794b2adb53c2 100644 --- a/mypy/moduleinspect.py +++ b/mypy/moduleinspect.py @@ -1,5 +1,7 @@ """Basic introspection of modules.""" +from __future__ import annotations + import importlib import inspect import os @@ -86,7 +88,7 @@ def get_package_properties(package_id: str) -> ModuleProperties: def worker( - tasks: "Queue[str]", results: "Queue[Union[str, ModuleProperties]]", sys_path: List[str] + tasks: Queue[str], results: Queue[Union[str, ModuleProperties]], sys_path: List[str] ) -> None: """The main loop of a worker introspection process.""" sys.path = sys_path @@ -170,7 +172,7 @@ def _get_from_queue(self) -> Union[ModuleProperties, str, None]: return None n += 1 - def __enter__(self) -> "ModuleInspect": + def __enter__(self) -> ModuleInspect: return self def __exit__(self, *args: object) -> None: diff --git a/mypy/mro.py b/mypy/mro.py index e4e8eecfb97e..b971a2c3885e 100644 --- a/mypy/mro.py +++ b/mypy/mro.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Callable, List, Optional from mypy.nodes import TypeInfo diff --git a/mypy/nodes.py b/mypy/nodes.py index 4eb5f2c0e4e5..20236b97fba9 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -1,5 +1,7 @@ """Abstract syntax tree node classes (i.e. parse tree).""" +from __future__ import annotations + import os from abc import abstractmethod from collections import defaultdict @@ -46,7 +48,7 @@ def __init__(self, line: int = -1, column: int = -1) -> None: def set_line( self, - target: Union["Context", int], + target: Union[Context, int], column: Optional[int] = None, end_line: Optional[int] = None, end_column: Optional[int] = None, @@ -268,7 +270,7 @@ def serialize(self) -> JsonDict: pass @classmethod - def deserialize(cls, data: JsonDict) -> "SymbolNode": + def deserialize(cls, data: JsonDict) -> SymbolNode: classname = data[".class"] method = deserialize_map.get(classname) if method is not None: @@ -309,9 +311,9 @@ class MypyFile(SymbolNode): alias_deps: DefaultDict[str, Set[str]] # Is there a UTF-8 BOM at the start? is_bom: bool - names: "SymbolTable" + names: SymbolTable # All import nodes within the file (also ones within functions etc.) - imports: List["ImportBase"] + imports: List[ImportBase] # Lines on which to ignore certain errors when checking. # If the value is empty, ignore all errors; otherwise, the list contains all # error codes to ignore. @@ -332,7 +334,7 @@ class MypyFile(SymbolNode): def __init__( self, defs: List[Statement], - imports: List["ImportBase"], + imports: List[ImportBase], is_bom: bool = False, ignored_lines: Optional[Dict[int, List[str]]] = None, ) -> None: @@ -391,7 +393,7 @@ def serialize(self) -> JsonDict: } @classmethod - def deserialize(cls, data: JsonDict) -> "MypyFile": + def deserialize(cls, data: JsonDict) -> MypyFile: assert data[".class"] == "MypyFile", data tree = MypyFile([], []) tree._fullname = data["_fullname"] @@ -419,7 +421,7 @@ class ImportBase(Statement): # # x = 1 # from m import x <-- add assignment representing "x = m.x" - assignments: List["AssignmentStmt"] + assignments: List[AssignmentStmt] def __init__(self) -> None: super().__init__() @@ -513,7 +515,7 @@ def serialize(self) -> JsonDict: assert False, "ImportedName leaked from semantic analysis" @classmethod - def deserialize(cls, data: JsonDict) -> "ImportedName": + def deserialize(cls, data: JsonDict) -> ImportedName: assert False, "ImportedName should never be serialized" def __str__(self) -> str: @@ -595,7 +597,7 @@ class OverloadedFuncDef(FuncBase, SymbolNode, Statement): unanalyzed_items: List[OverloadPart] impl: Optional[OverloadPart] - def __init__(self, items: List["OverloadPart"]) -> None: + def __init__(self, items: List[OverloadPart]) -> None: super().__init__() self.items = items self.unanalyzed_items = items.copy() @@ -628,7 +630,7 @@ def serialize(self) -> JsonDict: } @classmethod - def deserialize(cls, data: JsonDict) -> "OverloadedFuncDef": + def deserialize(cls, data: JsonDict) -> OverloadedFuncDef: assert data[".class"] == "OverloadedFuncDef" res = OverloadedFuncDef( [cast(OverloadPart, SymbolNode.deserialize(d)) for d in data["items"]] @@ -655,10 +657,10 @@ class Argument(Node): def __init__( self, - variable: "Var", - type_annotation: "Optional[mypy.types.Type]", + variable: Var, + type_annotation: Optional[mypy.types.Type], initializer: Optional[Expression], - kind: "ArgKind", + kind: ArgKind, pos_only: bool = False, ) -> None: super().__init__() @@ -717,15 +719,15 @@ class FuncItem(FuncBase): def __init__( self, arguments: Optional[List[Argument]] = None, - body: Optional["Block"] = None, - typ: "Optional[mypy.types.FunctionLike]" = None, + body: Optional[Block] = None, + typ: Optional[mypy.types.FunctionLike] = None, ) -> None: super().__init__() self.arguments = arguments or [] self.arg_names = [None if arg.pos_only else arg.variable.name for arg in self.arguments] self.arg_kinds: List[ArgKind] = [arg.kind for arg in self.arguments] self.max_pos: int = self.arg_kinds.count(ARG_POS) + self.arg_kinds.count(ARG_OPT) - self.body: "Block" = body or Block([]) + self.body: Block = body or Block([]) self.type = typ self.unanalyzed_type = typ self.is_overload: bool = False @@ -789,8 +791,8 @@ def __init__( self, name: str = "", # Function name arguments: Optional[List[Argument]] = None, - body: Optional["Block"] = None, - typ: "Optional[mypy.types.FunctionLike]" = None, + body: Optional[Block] = None, + typ: Optional[mypy.types.FunctionLike] = None, ) -> None: super().__init__(arguments, body, typ) self._name = name @@ -830,7 +832,7 @@ def serialize(self) -> JsonDict: } @classmethod - def deserialize(cls, data: JsonDict) -> "FuncDef": + def deserialize(cls, data: JsonDict) -> FuncDef: assert data[".class"] == "FuncDef" body = Block([]) ret = FuncDef( @@ -874,10 +876,10 @@ class Decorator(SymbolNode, Statement): # Some decorators are removed by semanal, keep the original here. original_decorators: List[Expression] # TODO: This is mostly used for the type; consider replacing with a 'type' attribute - var: "Var" # Represents the decorated function obj + var: Var # Represents the decorated function obj is_overload: bool - def __init__(self, func: FuncDef, decorators: List[Expression], var: "Var") -> None: + def __init__(self, func: FuncDef, decorators: List[Expression], var: Var) -> None: super().__init__() self.func = func self.decorators = decorators @@ -898,11 +900,11 @@ def is_final(self) -> bool: return self.func.is_final @property - def info(self) -> "TypeInfo": + def info(self) -> TypeInfo: return self.func.info @property - def type(self) -> "Optional[mypy.types.Type]": + def type(self) -> Optional[mypy.types.Type]: return self.var.type def accept(self, visitor: StatementVisitor[T]) -> T: @@ -917,7 +919,7 @@ def serialize(self) -> JsonDict: } @classmethod - def deserialize(cls, data: JsonDict) -> "Decorator": + def deserialize(cls, data: JsonDict) -> Decorator: assert data[".class"] == "Decorator" dec = Decorator(FuncDef.deserialize(data["func"]), [], Var.deserialize(data["var"])) dec.is_overload = data["is_overload"] @@ -978,7 +980,7 @@ class Var(SymbolNode): "allow_incompatible_override", ) - def __init__(self, name: str, type: "Optional[mypy.types.Type]" = None) -> None: + def __init__(self, name: str, type: Optional[mypy.types.Type] = None) -> None: super().__init__() self._name = name # Name without module prefix # TODO: Should be Optional[str] @@ -1052,7 +1054,7 @@ def serialize(self) -> JsonDict: return data @classmethod - def deserialize(cls, data: JsonDict) -> "Var": + def deserialize(cls, data: JsonDict) -> Var: assert data[".class"] == "Var" name = data["name"] type = None if data["type"] is None else mypy.types.deserialize_type(data["type"]) @@ -1085,13 +1087,13 @@ class ClassDef(Statement): name: str # Name of the class without module prefix fullname: Bogus[str] # Fully qualified name of the class - defs: "Block" - type_vars: List["mypy.types.TypeVarLikeType"] + defs: Block + type_vars: List[mypy.types.TypeVarLikeType] # Base class expressions (not semantically analyzed -- can be arbitrary expressions) base_type_exprs: List[Expression] # Special base classes like Generic[...] get moved here during semantic analysis removed_base_type_exprs: List[Expression] - info: "TypeInfo" # Related TypeInfo + info: TypeInfo # Related TypeInfo metaclass: Optional[Expression] decorators: List[Expression] keywords: Dict[str, Expression] @@ -1101,8 +1103,8 @@ class ClassDef(Statement): def __init__( self, name: str, - defs: "Block", - type_vars: Optional[List["mypy.types.TypeVarLikeType"]] = None, + defs: Block, + type_vars: Optional[List[mypy.types.TypeVarLikeType]] = None, base_type_exprs: Optional[List[Expression]] = None, metaclass: Optional[Expression] = None, keywords: Optional[List[Tuple[str, Expression]]] = None, @@ -1140,7 +1142,7 @@ def serialize(self) -> JsonDict: } @classmethod - def deserialize(self, data: JsonDict) -> "ClassDef": + def deserialize(self, data: JsonDict) -> ClassDef: assert data[".class"] == "ClassDef" res = ClassDef( data["name"], @@ -1246,9 +1248,9 @@ class AssignmentStmt(Statement): # This is a TempNode if and only if no rvalue (x: t). rvalue: Expression # Declared type in a comment, may be None. - type: Optional["mypy.types.Type"] + type: Optional[mypy.types.Type] # Original, not semantically analyzed type in annotation (used for reprocessing) - unanalyzed_type: Optional["mypy.types.Type"] + unanalyzed_type: Optional[mypy.types.Type] # This indicates usage of PEP 526 type annotation syntax in assignment. new_syntax: bool # Does this assignment define a type alias? @@ -1268,7 +1270,7 @@ def __init__( self, lvalues: List[Lvalue], rvalue: Expression, - type: "Optional[mypy.types.Type]" = None, + type: Optional[mypy.types.Type] = None, new_syntax: bool = False, ) -> None: super().__init__() @@ -1337,13 +1339,13 @@ class ForStmt(Statement): # Index variables index: Lvalue # Type given by type comments for index, can be None - index_type: Optional["mypy.types.Type"] + index_type: Optional[mypy.types.Type] # Original, not semantically analyzed type in annotation (used for reprocessing) - unanalyzed_index_type: Optional["mypy.types.Type"] + unanalyzed_index_type: Optional[mypy.types.Type] # Inferred iterable item type - inferred_item_type: Optional["mypy.types.Type"] + inferred_item_type: Optional[mypy.types.Type] # Inferred iterator type - inferred_iterator_type: Optional["mypy.types.Type"] + inferred_iterator_type: Optional[mypy.types.Type] # Expression to iterate expr: Expression body: Block @@ -1356,7 +1358,7 @@ def __init__( expr: Expression, body: Block, else_body: Optional[Block], - index_type: "Optional[mypy.types.Type]" = None, + index_type: Optional[mypy.types.Type] = None, ) -> None: super().__init__() self.index = index @@ -1476,7 +1478,7 @@ class TryStmt(Statement): body: Block # Try body # Plain 'except:' also possible types: List[Optional[Expression]] # Except type expressions - vars: List[Optional["NameExpr"]] # Except variable names + vars: List[Optional[NameExpr]] # Except variable names handlers: List[Block] # Except bodies else_body: Optional[Block] finally_body: Optional[Block] @@ -1484,7 +1486,7 @@ class TryStmt(Statement): def __init__( self, body: Block, - vars: List["Optional[NameExpr]"], + vars: List[Optional[NameExpr]], types: List[Optional[Expression]], handlers: List[Block], else_body: Optional[Block], @@ -1508,9 +1510,9 @@ class WithStmt(Statement): expr: List[Expression] target: List[Optional[Lvalue]] # Type given by type comments for target, can be None - unanalyzed_type: Optional["mypy.types.Type"] + unanalyzed_type: Optional[mypy.types.Type] # Semantically analyzed types from type comment (TypeList type expanded) - analyzed_types: List["mypy.types.Type"] + analyzed_types: List[mypy.types.Type] body: Block is_async: bool # True if `async with ...` (PEP 492, Python 3.5) @@ -1519,7 +1521,7 @@ def __init__( expr: List[Expression], target: List[Optional[Lvalue]], body: Block, - target_type: "Optional[mypy.types.Type]" = None, + target_type: Optional[mypy.types.Type] = None, ) -> None: super().__init__() self.expr = expr @@ -1535,14 +1537,14 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class MatchStmt(Statement): subject: Expression - patterns: List["Pattern"] + patterns: List[Pattern] guards: List[Optional[Expression]] bodies: List[Block] def __init__( self, subject: Expression, - patterns: List["Pattern"], + patterns: List[Pattern], guards: List[Optional[Expression]], bodies: List[Block], ) -> None: @@ -1709,7 +1711,7 @@ def __init__(self) -> None: # Is this expression appears as an rvalue of a valid type alias definition? self.is_alias_rvalue = False # Cache type guard from callable_type.type_guard - self.type_guard: Optional["mypy.types.Type"] = None + self.type_guard: Optional[mypy.types.Type] = None class NameExpr(RefExpr): @@ -1861,10 +1863,10 @@ class IndexExpr(Expression): base: Expression index: Expression # Inferred __getitem__ method type - method_type: Optional["mypy.types.Type"] + method_type: Optional[mypy.types.Type] # If not None, this is actually semantically a type application # Class[type, ...] or a type alias initializer. - analyzed: Union["TypeApplication", "TypeAliasExpr", None] + analyzed: Union[TypeApplication, TypeAliasExpr, None] def __init__(self, base: Expression, index: Expression) -> None: super().__init__() @@ -1885,7 +1887,7 @@ class UnaryExpr(Expression): op: str # TODO: Enum? expr: Expression # Inferred operator method type - method_type: Optional["mypy.types.Type"] + method_type: Optional[mypy.types.Type] def __init__(self, op: str, expr: Expression) -> None: super().__init__() @@ -1921,7 +1923,7 @@ class OpExpr(Expression): left: Expression right: Expression # Inferred type for the operator method type (when relevant). - method_type: Optional["mypy.types.Type"] + method_type: Optional[mypy.types.Type] # Per static analysis only: Is the right side going to be evaluated every time? right_always: bool # Per static analysis only: Is the right side unreachable? @@ -1948,7 +1950,7 @@ class ComparisonExpr(Expression): operators: List[str] operands: List[Expression] # Inferred type for the operator methods (when relevant; None for 'is'). - method_types: List[Optional["mypy.types.Type"]] + method_types: List[Optional[mypy.types.Type]] def __init__(self, operators: List[str], operands: List[Expression]) -> None: super().__init__() @@ -2000,9 +2002,9 @@ class CastExpr(Expression): __slots__ = ("expr", "type") expr: Expression - type: "mypy.types.Type" + type: mypy.types.Type - def __init__(self, expr: Expression, typ: "mypy.types.Type") -> None: + def __init__(self, expr: Expression, typ: mypy.types.Type) -> None: super().__init__() self.expr = expr self.type = typ @@ -2017,9 +2019,9 @@ class AssertTypeExpr(Expression): __slots__ = ("expr", "type") expr: Expression - type: "mypy.types.Type" + type: mypy.types.Type - def __init__(self, expr: Expression, typ: "mypy.types.Type") -> None: + def __init__(self, expr: Expression, typ: mypy.types.Type) -> None: super().__init__() self.expr = expr self.type = typ @@ -2038,10 +2040,7 @@ class RevealExpr(Expression): local_nodes: Optional[List[Var]] def __init__( - self, - kind: int, - expr: Optional[Expression] = None, - local_nodes: "Optional[List[Var]]" = None, + self, kind: int, expr: Optional[Expression] = None, local_nodes: Optional[List[Var]] = None ) -> None: super().__init__() self.expr = expr @@ -2058,7 +2057,7 @@ class SuperExpr(Expression): __slots__ = ("name", "info", "call") name: str - info: Optional["TypeInfo"] # Type that contains this super expression + info: Optional[TypeInfo] # Type that contains this super expression call: CallExpr # The expression super(...) def __init__(self, name: str, call: CallExpr) -> None: @@ -2272,9 +2271,9 @@ class TypeApplication(Expression): __slots__ = ("expr", "types") expr: Expression - types: List["mypy.types.Type"] + types: List[mypy.types.Type] - def __init__(self, expr: Expression, types: List["mypy.types.Type"]) -> None: + def __init__(self, expr: Expression, types: List[mypy.types.Type]) -> None: super().__init__() self.expr = expr self.types = types @@ -2309,7 +2308,7 @@ class TypeVarLikeExpr(SymbolNode, Expression): _fullname: str # Upper bound: only subtypes of upper_bound are valid as values. By default # this is 'object', meaning no restriction. - upper_bound: "mypy.types.Type" + upper_bound: mypy.types.Type # Variance of the type variable. Invariant is the default. # TypeVar(..., covariant=True) defines a covariant type variable. # TypeVar(..., contravariant=True) defines a contravariant type @@ -2317,7 +2316,7 @@ class TypeVarLikeExpr(SymbolNode, Expression): variance: int def __init__( - self, name: str, fullname: str, upper_bound: "mypy.types.Type", variance: int = INVARIANT + self, name: str, fullname: str, upper_bound: mypy.types.Type, variance: int = INVARIANT ) -> None: super().__init__() self._name = name @@ -2350,14 +2349,14 @@ class TypeVarExpr(TypeVarLikeExpr): # Value restriction: only types in the list are valid as values. If the # list is empty, there is no restriction. - values: List["mypy.types.Type"] + values: List[mypy.types.Type] def __init__( self, name: str, fullname: str, - values: List["mypy.types.Type"], - upper_bound: "mypy.types.Type", + values: List[mypy.types.Type], + upper_bound: mypy.types.Type, variance: int = INVARIANT, ) -> None: super().__init__(name, fullname, upper_bound, variance) @@ -2377,7 +2376,7 @@ def serialize(self) -> JsonDict: } @classmethod - def deserialize(cls, data: JsonDict) -> "TypeVarExpr": + def deserialize(cls, data: JsonDict) -> TypeVarExpr: assert data[".class"] == "TypeVarExpr" return TypeVarExpr( data["name"], @@ -2404,7 +2403,7 @@ def serialize(self) -> JsonDict: } @classmethod - def deserialize(cls, data: JsonDict) -> "ParamSpecExpr": + def deserialize(cls, data: JsonDict) -> ParamSpecExpr: assert data[".class"] == "ParamSpecExpr" return ParamSpecExpr( data["name"], @@ -2432,7 +2431,7 @@ def serialize(self) -> JsonDict: } @classmethod - def deserialize(cls, data: JsonDict) -> "TypeVarTupleExpr": + def deserialize(cls, data: JsonDict) -> TypeVarTupleExpr: assert data[".class"] == "TypeVarTupleExpr" return TypeVarTupleExpr( data["name"], @@ -2448,7 +2447,7 @@ class TypeAliasExpr(Expression): __slots__ = ("type", "tvars", "no_args", "node") # The target type. - type: "mypy.types.Type" + type: mypy.types.Type # Names of unbound type variables used to define the alias tvars: List[str] # Whether this alias was defined in bare form. Used to distinguish @@ -2457,9 +2456,9 @@ class TypeAliasExpr(Expression): # and # A = List[Any] no_args: bool - node: "TypeAlias" + node: TypeAlias - def __init__(self, node: "TypeAlias") -> None: + def __init__(self, node: TypeAlias) -> None: super().__init__() self.type = node.target self.tvars = node.alias_tvars @@ -2477,10 +2476,10 @@ class NamedTupleExpr(Expression): # The class representation of this named tuple (its tuple_type attribute contains # the tuple item types) - info: "TypeInfo" + info: TypeInfo is_typed: bool # whether this class was created with typing(_extensions).NamedTuple - def __init__(self, info: "TypeInfo", is_typed: bool = False) -> None: + def __init__(self, info: TypeInfo, is_typed: bool = False) -> None: super().__init__() self.info = info self.is_typed = is_typed @@ -2495,9 +2494,9 @@ class TypedDictExpr(Expression): __slots__ = ("info",) # The class representation of this typed dict - info: "TypeInfo" + info: TypeInfo - def __init__(self, info: "TypeInfo") -> None: + def __init__(self, info: TypeInfo) -> None: super().__init__() self.info = info @@ -2511,13 +2510,13 @@ class EnumCallExpr(Expression): __slots__ = ("info", "items", "values") # The class representation of this enumerated type - info: "TypeInfo" + info: TypeInfo # The item names (for debugging) items: List[str] values: List[Optional[Expression]] def __init__( - self, info: "TypeInfo", items: List[str], values: List[Optional[Expression]] + self, info: TypeInfo, items: List[str], values: List[Optional[Expression]] ) -> None: super().__init__() self.info = info @@ -2533,9 +2532,9 @@ class PromoteExpr(Expression): __slots__ = ("type",) - type: "mypy.types.Type" + type: mypy.types.Type - def __init__(self, type: "mypy.types.Type") -> None: + def __init__(self, type: mypy.types.Type) -> None: super().__init__() self.type = type @@ -2550,12 +2549,12 @@ class NewTypeExpr(Expression): name: str # The base type (the second argument to NewType) - old_type: Optional["mypy.types.Type"] + old_type: Optional[mypy.types.Type] # The synthesized class representing the new type (inherits old_type) - info: Optional["TypeInfo"] + info: Optional[TypeInfo] def __init__( - self, name: str, old_type: "Optional[mypy.types.Type]", line: int, column: int + self, name: str, old_type: Optional[mypy.types.Type], line: int, column: int ) -> None: super().__init__(line=line, column=column) self.name = name @@ -2594,13 +2593,13 @@ class TempNode(Expression): __slots__ = ("type", "no_rhs") - type: "mypy.types.Type" + type: mypy.types.Type # Is this TempNode used to indicate absence of a right hand side in an annotated assignment? # (e.g. for 'x: int' the rvalue is TempNode(AnyType(TypeOfAny.special_form), no_rhs=True)) no_rhs: bool def __init__( - self, typ: "mypy.types.Type", no_rhs: bool = False, *, context: Optional[Context] = None + self, typ: mypy.types.Type, no_rhs: bool = False, *, context: Optional[Context] = None ) -> None: """Construct a dummy node; optionally borrow line/column from context object.""" super().__init__() @@ -2677,17 +2676,17 @@ class is generic then it will be a type constructor of higher kind. defn: ClassDef # Corresponding ClassDef # Method Resolution Order: the order of looking up attributes. The first # value always to refers to this class. - mro: List["TypeInfo"] + mro: List[TypeInfo] # Used to stash the names of the mro classes temporarily between # deserialization and fixup. See deserialize() for why. _mro_refs: Optional[List[str]] bad_mro: bool # Could not construct full MRO is_final: bool - declared_metaclass: Optional["mypy.types.Instance"] - metaclass_type: Optional["mypy.types.Instance"] + declared_metaclass: Optional[mypy.types.Instance] + metaclass_type: Optional[mypy.types.Instance] - names: "SymbolTable" # Names defined directly in this type + names: SymbolTable # Names defined directly in this type is_abstract: bool # Does the class have any abstract attributes? is_protocol: bool # Is this a protocol class? runtime_protocol: bool # Does this protocol support isinstance checks? @@ -2720,8 +2719,8 @@ class is generic then it will be a type constructor of higher kind. # If concurrent/parallel type checking will be added in future, # then there should be one matrix per thread/process to avoid false negatives # during the type checking phase. - assuming: List[Tuple["mypy.types.Instance", "mypy.types.Instance"]] - assuming_proper: List[Tuple["mypy.types.Instance", "mypy.types.Instance"]] + assuming: List[Tuple[mypy.types.Instance, mypy.types.Instance]] + assuming_proper: List[Tuple[mypy.types.Instance, mypy.types.Instance]] # Ditto for temporary 'inferring' stack of recursive constraint inference. # It contains Instances of protocol types that appeared as an argument to # constraints.infer_constraints(). We need 'inferring' to avoid infinite recursion for @@ -2731,7 +2730,7 @@ class is generic then it will be a type constructor of higher kind. # since this would require to pass them in many dozens of calls. In particular, # there is a dependency infer_constraint -> is_subtype -> is_callable_subtype -> # -> infer_constraints. - inferring: List["mypy.types.Instance"] + inferring: List[mypy.types.Instance] # 'inferring' and 'assuming' can't be made sets, since we need to use # is_same_type to correctly treat unions. @@ -2754,13 +2753,13 @@ class is generic then it will be a type constructor of higher kind. has_param_spec_type: bool # Direct base classes. - bases: List["mypy.types.Instance"] + bases: List[mypy.types.Instance] # Another type which this type will be treated as a subtype of, # even though it's not a subclass in Python. The non-standard # `@_promote` decorator introduces this, and there are also # several builtin examples, in particular `int` -> `float`. - _promote: List["mypy.types.Type"] + _promote: List[mypy.types.Type] # This is used for promoting native integer types such as 'i64' to # 'int'. (_promote is used for the other direction.) This only @@ -2770,21 +2769,21 @@ class is generic then it will be a type constructor of higher kind. # This results in some unintuitive results, such as that even # though i64 is compatible with int and int is compatible with # float, i64 is *not* compatible with float. - alt_promote: Optional["TypeInfo"] + alt_promote: Optional[TypeInfo] # Representation of a Tuple[...] base class, if the class has any # (e.g., for named tuples). If this is not None, the actual Type # object used for this class is not an Instance but a TupleType; # the corresponding Instance is set as the fallback type of the # tuple type. - tuple_type: Optional["mypy.types.TupleType"] + tuple_type: Optional[mypy.types.TupleType] # Is this a named tuple type? is_named_tuple: bool # If this class is defined by the TypedDict type constructor, # then this is not None. - typeddict_type: Optional["mypy.types.TypedDictType"] + typeddict_type: Optional[mypy.types.TypedDictType] # Is this a newtype type? is_newtype: bool @@ -2805,7 +2804,7 @@ class is generic then it will be a type constructor of higher kind. # To overcome this, we create a TypeAlias node, that will point to these types. # We store this node in the `special_alias` attribute, because it must be the same node # in case we are doing multiple semantic analysis passes. - special_alias: Optional["TypeAlias"] + special_alias: Optional[TypeAlias] FLAGS: Final = [ "is_abstract", @@ -2819,7 +2818,7 @@ class is generic then it will be a type constructor of higher kind. "is_intersection", ] - def __init__(self, names: "SymbolTable", defn: ClassDef, module_name: str) -> None: + def __init__(self, names: SymbolTable, defn: ClassDef, module_name: str) -> None: """Initialize a TypeInfo.""" super().__init__() self._fullname = defn.fullname @@ -2888,14 +2887,14 @@ def is_generic(self) -> bool: """Is the type generic (i.e. does it have type variables)?""" return len(self.type_vars) > 0 - def get(self, name: str) -> "Optional[SymbolTableNode]": + def get(self, name: str) -> Optional[SymbolTableNode]: for cls in self.mro: n = cls.names.get(name) if n: return n return None - def get_containing_type_info(self, name: str) -> "Optional[TypeInfo]": + def get_containing_type_info(self, name: str) -> Optional[TypeInfo]: for cls in self.mro: if name in cls.names: return cls @@ -2913,7 +2912,7 @@ def protocol_members(self) -> List[str]: members.add(name) return sorted(list(members)) - def __getitem__(self, name: str) -> "SymbolTableNode": + def __getitem__(self, name: str) -> SymbolTableNode: n = self.get(name) if n: return n @@ -2944,7 +2943,7 @@ def get_method(self, name: str) -> Union[FuncBase, Decorator, None]: return None return None - def calculate_metaclass_type(self) -> "Optional[mypy.types.Instance]": + def calculate_metaclass_type(self) -> Optional[mypy.types.Instance]: declared = self.declared_metaclass if declared is not None and not declared.type.has_base("builtins.type"): return declared @@ -2977,14 +2976,14 @@ def has_base(self, fullname: str) -> bool: return True return False - def direct_base_classes(self) -> "List[TypeInfo]": + def direct_base_classes(self) -> List[TypeInfo]: """Return a direct base classes. Omit base classes of other base classes. """ return [base.type for base in self.bases] - def update_tuple_type(self, typ: "mypy.types.TupleType") -> None: + def update_tuple_type(self, typ: mypy.types.TupleType) -> None: """Update tuple_type and special_alias as needed.""" self.tuple_type = typ alias = TypeAlias.from_tuple_type(self) @@ -2993,7 +2992,7 @@ def update_tuple_type(self, typ: "mypy.types.TupleType") -> None: else: self.special_alias.target = alias.target - def update_typeddict_type(self, typ: "mypy.types.TypedDictType") -> None: + def update_typeddict_type(self, typ: mypy.types.TypedDictType) -> None: """Update typeddict_type and special_alias as needed.""" self.typeddict_type = typ alias = TypeAlias.from_typeddict_type(self) @@ -3011,15 +3010,15 @@ def __str__(self) -> str: def dump( self, - str_conv: "Optional[mypy.strconv.StrConv]" = None, - type_str_conv: "Optional[mypy.types.TypeStrVisitor]" = None, + str_conv: Optional[mypy.strconv.StrConv] = None, + type_str_conv: Optional[mypy.types.TypeStrVisitor] = None, ) -> str: """Return a string dump of the contents of the TypeInfo.""" if not str_conv: str_conv = mypy.strconv.StrConv() base: str = "" - def type_str(typ: "mypy.types.Type") -> str: + def type_str(typ: mypy.types.Type) -> str: if type_str_conv: return typ.accept(type_str_conv) return str(typ) @@ -3076,7 +3075,7 @@ def serialize(self) -> JsonDict: return data @classmethod - def deserialize(cls, data: JsonDict) -> "TypeInfo": + def deserialize(cls, data: JsonDict) -> TypeInfo: names = SymbolTable.deserialize(data["names"]) defn = ClassDef.deserialize(data["defn"]) module_name = data["module_name"] @@ -3267,7 +3266,7 @@ def f(x: B[T]) -> T: ... # without T, Any would be used here def __init__( self, - target: "mypy.types.Type", + target: mypy.types.Type, fullname: str, line: int, column: int, @@ -3291,7 +3290,7 @@ def __init__( super().__init__(line, column) @classmethod - def from_tuple_type(cls, info: TypeInfo) -> "TypeAlias": + def from_tuple_type(cls, info: TypeInfo) -> TypeAlias: """Generate an alias to the tuple type described by a given TypeInfo.""" assert info.tuple_type return TypeAlias( @@ -3302,7 +3301,7 @@ def from_tuple_type(cls, info: TypeInfo) -> "TypeAlias": ) @classmethod - def from_typeddict_type(cls, info: TypeInfo) -> "TypeAlias": + def from_typeddict_type(cls, info: TypeInfo) -> TypeAlias: """Generate an alias to the TypedDict type described by a given TypeInfo.""" assert info.typeddict_type return TypeAlias( @@ -3337,7 +3336,7 @@ def accept(self, visitor: NodeVisitor[T]) -> T: return visitor.visit_type_alias(self) @classmethod - def deserialize(cls, data: JsonDict) -> "TypeAlias": + def deserialize(cls, data: JsonDict) -> TypeAlias: assert data[".class"] == "TypeAlias" fullname = data["fullname"] alias_tvars = data["alias_tvars"] @@ -3530,7 +3529,7 @@ def fullname(self) -> Optional[str]: return None @property - def type(self) -> "Optional[mypy.types.Type]": + def type(self) -> Optional[mypy.types.Type]: node = self.node if isinstance(node, (Var, SYMBOL_FUNCBASE_TYPES)) and node.type is not None: return node.type @@ -3539,7 +3538,7 @@ def type(self) -> "Optional[mypy.types.Type]": else: return None - def copy(self) -> "SymbolTableNode": + def copy(self) -> SymbolTableNode: new = SymbolTableNode( self.kind, self.node, self.module_public, self.implicit, self.module_hidden ) @@ -3592,7 +3591,7 @@ def serialize(self, prefix: str, name: str) -> JsonDict: return data @classmethod - def deserialize(cls, data: JsonDict) -> "SymbolTableNode": + def deserialize(cls, data: JsonDict) -> SymbolTableNode: assert data[".class"] == "SymbolTableNode" kind = inverse_node_kinds[data["kind"]] if "cross_ref" in data: @@ -3639,7 +3638,7 @@ def __str__(self) -> str: a[-1] += ")" return "\n".join(a) - def copy(self) -> "SymbolTable": + def copy(self) -> SymbolTable: return SymbolTable([(key, node.copy()) for key, node in self.items()]) def serialize(self, fullname: str) -> JsonDict: @@ -3655,7 +3654,7 @@ def serialize(self, fullname: str) -> JsonDict: return data @classmethod - def deserialize(cls, data: JsonDict) -> "SymbolTable": + def deserialize(cls, data: JsonDict) -> SymbolTable: assert data[".class"] == "SymbolTable" st = SymbolTable() for key, value in data.items(): diff --git a/mypy/operators.py b/mypy/operators.py index b546ede36d06..2b383ef199bb 100644 --- a/mypy/operators.py +++ b/mypy/operators.py @@ -1,5 +1,7 @@ """Information about Python operators""" +from __future__ import annotations + from typing_extensions import Final # Map from binary operator id to related method name (in Python 3). diff --git a/mypy/options.py b/mypy/options.py index d55272c538bc..fd574feb5de3 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import pprint import re import sys @@ -335,7 +337,7 @@ def snapshot(self) -> object: def __repr__(self) -> str: return f"Options({pprint.pformat(self.snapshot())})" - def apply_changes(self, changes: Dict[str, object]) -> "Options": + def apply_changes(self, changes: Dict[str, object]) -> Options: new_options = Options() # Under mypyc, we don't have a __dict__, so we need to do worse things. replace_object_state(new_options, self, copy_dict=True) @@ -390,7 +392,7 @@ def build_per_module_cache(self) -> None: # they only count as used if actually used by a real module. self.unused_configs.update(structured_keys) - def clone_for_module(self, module: str) -> "Options": + def clone_for_module(self, module: str) -> Options: """Create an Options object that incorporates per-module options. NOTE: Once this method is called all Options objects should be diff --git a/mypy/parse.py b/mypy/parse.py index d078a742562d..4738222c2312 100644 --- a/mypy/parse.py +++ b/mypy/parse.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Optional, Union from mypy.errors import Errors diff --git a/mypy/patterns.py b/mypy/patterns.py index 11aec70655c6..03e88b977330 100644 --- a/mypy/patterns.py +++ b/mypy/patterns.py @@ -1,4 +1,7 @@ """Classes for representing match statement patterns.""" + +from __future__ import annotations + from typing import List, Optional, TypeVar, Union from mypy_extensions import trait diff --git a/mypy/plugin.py b/mypy/plugin.py index 948d2b1e829a..fbdf6a6ae110 100644 --- a/mypy/plugin.py +++ b/mypy/plugin.py @@ -119,6 +119,8 @@ class C: pass semantic analyzer is enabled (it's always true in mypy 0.730 and later). """ +from __future__ import annotations + from abc import abstractmethod from typing import Any, Callable, Dict, List, NamedTuple, Optional, Tuple, TypeVar, Union diff --git a/mypy/reachability.py b/mypy/reachability.py index b43092c424fb..e667555cdb33 100644 --- a/mypy/reachability.py +++ b/mypy/reachability.py @@ -1,5 +1,7 @@ """Utilities related to determining the reachability of code (in semantic analysis).""" +from __future__ import annotations + from typing import Optional, Tuple, TypeVar, Union from typing_extensions import Final diff --git a/mypy/renaming.py b/mypy/renaming.py index abb3cd4aa8a1..f05f07cb29e5 100644 --- a/mypy/renaming.py +++ b/mypy/renaming.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from contextlib import contextmanager from typing import Dict, Iterator, List, Set from typing_extensions import Final diff --git a/mypy/report.py b/mypy/report.py index 841139180f28..375f63b1d463 100644 --- a/mypy/report.py +++ b/mypy/report.py @@ -1,5 +1,7 @@ """Classes for producing HTML reports about imprecision.""" +from __future__ import annotations + import collections import itertools import json @@ -30,7 +32,7 @@ except ImportError: LXML_INSTALLED = False -type_of_any_name_map: Final["collections.OrderedDict[int, str]"] = collections.OrderedDict( +type_of_any_name_map: Final[collections.OrderedDict[int, str]] = collections.OrderedDict( [ (TypeOfAny.unannotated, "Unannotated"), (TypeOfAny.explicit, "Explicit"), @@ -58,7 +60,7 @@ def __init__(self, data_dir: str, report_dirs: Dict[str, str]) -> None: for report_type, report_dir in sorted(report_dirs.items()): self.add_report(report_type, report_dir) - def add_report(self, report_type: str, report_dir: str) -> "AbstractReporter": + def add_report(self, report_type: str, report_dir: str) -> AbstractReporter: try: return self.named_reporters[report_type] except KeyError: diff --git a/mypy/scope.py b/mypy/scope.py index c627b9d48ba1..f082c0b24b42 100644 --- a/mypy/scope.py +++ b/mypy/scope.py @@ -3,6 +3,8 @@ TODO: Use everywhere where we track targets, including in mypy.errors. """ +from __future__ import annotations + from contextlib import contextmanager, nullcontext from typing import Iterator, List, Optional, Tuple from typing_extensions import TypeAlias as _TypeAlias diff --git a/mypy/semanal.py b/mypy/semanal.py index 43fe3b163485..ae892bcf0111 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -48,6 +48,8 @@ reduce memory use). """ +from __future__ import annotations + from contextlib import contextmanager from typing import ( Any, diff --git a/mypy/semanal_classprop.py b/mypy/semanal_classprop.py index 478430aaea1c..97de6bc3cbd0 100644 --- a/mypy/semanal_classprop.py +++ b/mypy/semanal_classprop.py @@ -3,6 +3,8 @@ These happen after semantic analysis and before type checking. """ +from __future__ import annotations + from typing import List, Optional, Set, Tuple from typing_extensions import Final diff --git a/mypy/semanal_enum.py b/mypy/semanal_enum.py index f1c999995704..d83f85995ea0 100644 --- a/mypy/semanal_enum.py +++ b/mypy/semanal_enum.py @@ -3,6 +3,8 @@ This is conceptually part of mypy.semanal (semantic analyzer pass 2). """ +from __future__ import annotations + from typing import List, Optional, Tuple, cast from typing_extensions import Final diff --git a/mypy/semanal_infer.py b/mypy/semanal_infer.py index 56b504645160..fbbbc7812091 100644 --- a/mypy/semanal_infer.py +++ b/mypy/semanal_infer.py @@ -1,5 +1,7 @@ """Simple type inference for decorated functions during semantic analysis.""" +from __future__ import annotations + from typing import Optional from mypy.nodes import ARG_POS, CallExpr, Decorator, Expression, FuncDef, RefExpr, Var diff --git a/mypy/semanal_main.py b/mypy/semanal_main.py index a2f9cf3c7e98..8ecb99344a69 100644 --- a/mypy/semanal_main.py +++ b/mypy/semanal_main.py @@ -24,6 +24,8 @@ will be incomplete. """ +from __future__ import annotations + from contextlib import nullcontext from typing import TYPE_CHECKING, Callable, List, Optional, Tuple, Union from typing_extensions import Final, TypeAlias as _TypeAlias @@ -67,7 +69,7 @@ core_modules: Final = ["typing", "builtins", "abc", "collections"] -def semantic_analysis_for_scc(graph: "Graph", scc: List[str], errors: Errors) -> None: +def semantic_analysis_for_scc(graph: Graph, scc: List[str], errors: Errors) -> None: """Perform semantic analysis for all modules in a SCC (import cycle). Assume that reachability analysis has already been performed. @@ -95,7 +97,7 @@ def semantic_analysis_for_scc(graph: "Graph", scc: List[str], errors: Errors) -> cleanup_builtin_scc(graph["builtins"]) -def cleanup_builtin_scc(state: "State") -> None: +def cleanup_builtin_scc(state: State) -> None: """Remove imported names from builtins namespace. This way names imported from typing in builtins.pyi aren't available @@ -108,10 +110,7 @@ def cleanup_builtin_scc(state: "State") -> None: def semantic_analysis_for_targets( - state: "State", - nodes: List[FineGrainedDeferredNode], - graph: "Graph", - saved_attrs: SavedAttributes, + state: State, nodes: List[FineGrainedDeferredNode], graph: Graph, saved_attrs: SavedAttributes ) -> None: """Semantically analyze only selected nodes in a given module. @@ -166,7 +165,7 @@ def restore_saved_attrs(saved_attrs: SavedAttributes) -> None: info.names[name] = sym -def process_top_levels(graph: "Graph", scc: List[str], patches: Patches) -> None: +def process_top_levels(graph: Graph, scc: List[str], patches: Patches) -> None: # Process top levels until everything has been bound. # Reverse order of the scc so the first modules in the original list will be @@ -224,7 +223,7 @@ def process_top_levels(graph: "Graph", scc: List[str], patches: Patches) -> None final_iteration = not any_progress -def process_functions(graph: "Graph", scc: List[str], patches: Patches) -> None: +def process_functions(graph: Graph, scc: List[str], patches: Patches) -> None: # Process functions. for module in scc: tree = graph[module].tree @@ -247,8 +246,8 @@ def process_functions(graph: "Graph", scc: List[str], patches: Patches) -> None: def process_top_level_function( - analyzer: "SemanticAnalyzer", - state: "State", + analyzer: SemanticAnalyzer, + state: State, module: str, target: str, node: Union[FuncDef, OverloadedFuncDef, Decorator], @@ -308,7 +307,7 @@ def get_all_leaf_targets(file: MypyFile) -> List[TargetInfo]: def semantic_analyze_target( target: str, - state: "State", + state: State, node: Union[MypyFile, FuncDef, OverloadedFuncDef, Decorator], active_type: Optional[TypeInfo], final_iteration: bool, @@ -362,7 +361,7 @@ def semantic_analyze_target( return [], analyzer.incomplete, analyzer.progress -def check_type_arguments(graph: "Graph", scc: List[str], errors: Errors) -> None: +def check_type_arguments(graph: Graph, scc: List[str], errors: Errors) -> None: for module in scc: state = graph[module] assert state.tree @@ -373,7 +372,7 @@ def check_type_arguments(graph: "Graph", scc: List[str], errors: Errors) -> None def check_type_arguments_in_targets( - targets: List[FineGrainedDeferredNode], state: "State", errors: Errors + targets: List[FineGrainedDeferredNode], state: State, errors: Errors ) -> None: """Check type arguments against type variable bounds and restrictions. @@ -393,7 +392,7 @@ def check_type_arguments_in_targets( target.node.accept(analyzer) -def apply_class_plugin_hooks(graph: "Graph", scc: List[str], errors: Errors) -> None: +def apply_class_plugin_hooks(graph: Graph, scc: List[str], errors: Errors) -> None: """Apply class plugin hooks within a SCC. We run these after to the main semantic analysis so that the hooks @@ -449,7 +448,7 @@ def apply_hooks_to_class( return ok -def calculate_class_properties(graph: "Graph", scc: List[str], errors: Errors) -> None: +def calculate_class_properties(graph: Graph, scc: List[str], errors: Errors) -> None: builtins = graph["builtins"].tree assert builtins for module in scc: @@ -467,6 +466,6 @@ def calculate_class_properties(graph: "Graph", scc: List[str], errors: Errors) - ) -def check_blockers(graph: "Graph", scc: List[str]) -> None: +def check_blockers(graph: Graph, scc: List[str]) -> None: for module in scc: graph[module].check_blockers() diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py index 3903c52ab0e7..87557d9320fd 100644 --- a/mypy/semanal_namedtuple.py +++ b/mypy/semanal_namedtuple.py @@ -3,6 +3,8 @@ This is conceptually part of mypy.semanal. """ +from __future__ import annotations + from contextlib import contextmanager from typing import Dict, Iterator, List, Mapping, Optional, Tuple, cast from typing_extensions import Final diff --git a/mypy/semanal_newtype.py b/mypy/semanal_newtype.py index c70329816421..93ab95136ea7 100644 --- a/mypy/semanal_newtype.py +++ b/mypy/semanal_newtype.py @@ -3,6 +3,8 @@ This is conceptually part of mypy.semanal (semantic analyzer pass 2). """ +from __future__ import annotations + from typing import Optional, Tuple from mypy import errorcodes as codes diff --git a/mypy/semanal_pass1.py b/mypy/semanal_pass1.py index f636a7777cd2..55430be00a1e 100644 --- a/mypy/semanal_pass1.py +++ b/mypy/semanal_pass1.py @@ -1,5 +1,7 @@ """Block/import reachability analysis.""" +from __future__ import annotations + from mypy.nodes import ( AssertStmt, AssignmentStmt, diff --git a/mypy/semanal_shared.py b/mypy/semanal_shared.py index 2c1d843f4c7a..8f7ef1a4355d 100644 --- a/mypy/semanal_shared.py +++ b/mypy/semanal_shared.py @@ -1,5 +1,7 @@ """Shared definitions used by different parts of semantic analysis.""" +from __future__ import annotations + from abc import abstractmethod from typing import Callable, List, Optional, Union from typing_extensions import Final, Protocol diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py index 2897e1805cbb..575911225e90 100644 --- a/mypy/semanal_typeargs.py +++ b/mypy/semanal_typeargs.py @@ -5,6 +5,8 @@ operations, including subtype checks. """ +from __future__ import annotations + from typing import List, Optional, Set from mypy import errorcodes as codes, message_registry diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py index 2261df76acb3..71c8b04be73c 100644 --- a/mypy/semanal_typeddict.py +++ b/mypy/semanal_typeddict.py @@ -1,5 +1,7 @@ """Semantic analysis of TypedDict definitions.""" +from __future__ import annotations + from typing import List, Optional, Set, Tuple from typing_extensions import Final diff --git a/mypy/server/target.py b/mypy/server/target.py index 06987b551d6b..c06eeeb923f9 100644 --- a/mypy/server/target.py +++ b/mypy/server/target.py @@ -1,3 +1,6 @@ +from __future__ import annotations + + def trigger_to_target(s: str) -> str: assert s[0] == "<" # Strip off the angle brackets diff --git a/mypy/sharedparse.py b/mypy/sharedparse.py index 000a1442d6b4..e1f11efd14df 100644 --- a/mypy/sharedparse.py +++ b/mypy/sharedparse.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Optional from typing_extensions import Final diff --git a/mypy/solve.py b/mypy/solve.py index 90bbd5b9d3b5..b50607e054aa 100644 --- a/mypy/solve.py +++ b/mypy/solve.py @@ -1,5 +1,7 @@ """Type inference constraint solving""" +from __future__ import annotations + from collections import defaultdict from typing import Dict, List, Optional diff --git a/mypy/split_namespace.py b/mypy/split_namespace.py index e5cadb65de40..24c4e28286fc 100644 --- a/mypy/split_namespace.py +++ b/mypy/split_namespace.py @@ -7,6 +7,8 @@ # In its own file largely because mypyc doesn't support its use of # __getattr__/__setattr__ and has some issues with __dict__ +from __future__ import annotations + import argparse from typing import Any, Tuple diff --git a/mypy/state.py b/mypy/state.py index f21023ff3fff..db0f06dc1824 100644 --- a/mypy/state.py +++ b/mypy/state.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from contextlib import contextmanager from typing import Iterator, Optional, Tuple from typing_extensions import Final diff --git a/mypy/stats.py b/mypy/stats.py index a40bc445d85f..e348c83d4e3f 100644 --- a/mypy/stats.py +++ b/mypy/stats.py @@ -1,5 +1,7 @@ """Utilities for calculating and reporting statistics about types.""" +from __future__ import annotations + import os import typing from collections import Counter diff --git a/mypy/strconv.py b/mypy/strconv.py index f5126b1a91be..3875e54962aa 100644 --- a/mypy/strconv.py +++ b/mypy/strconv.py @@ -1,5 +1,7 @@ """Conversion of parse tree nodes to strings.""" +from __future__ import annotations + import os import re from typing import TYPE_CHECKING, Any, List, Optional, Sequence, Tuple, Union @@ -41,7 +43,7 @@ def format_id(self, o: object) -> str: else: return "" - def dump(self, nodes: Sequence[object], obj: "mypy.nodes.Context") -> str: + def dump(self, nodes: Sequence[object], obj: mypy.nodes.Context) -> str: """Convert a list of items to a multiline pretty-printed string. The tag is produced from the type name of obj and its line @@ -54,7 +56,7 @@ def dump(self, nodes: Sequence[object], obj: "mypy.nodes.Context") -> str: tag += f"<{self.get_id(obj)}>" return dump_tagged(nodes, tag, self) - def func_helper(self, o: "mypy.nodes.FuncItem") -> List[object]: + def func_helper(self, o: mypy.nodes.FuncItem) -> List[object]: """Return a list in a format suitable for dump() that represents the arguments and the body of a function. The caller can then decorate the array with information specific to methods, global functions or @@ -86,7 +88,7 @@ def func_helper(self, o: "mypy.nodes.FuncItem") -> List[object]: # Top-level structures - def visit_mypy_file(self, o: "mypy.nodes.MypyFile") -> str: + def visit_mypy_file(self, o: mypy.nodes.MypyFile) -> str: # Skip implicit definitions. a: List[Any] = [o.defs] if o.is_bom: @@ -102,7 +104,7 @@ def visit_mypy_file(self, o: "mypy.nodes.MypyFile") -> str: a.append("IgnoredLines(%s)" % ", ".join(str(line) for line in sorted(o.ignored_lines))) return self.dump(a, o) - def visit_import(self, o: "mypy.nodes.Import") -> str: + def visit_import(self, o: mypy.nodes.Import) -> str: a = [] for id, as_id in o.ids: if as_id is not None: @@ -111,7 +113,7 @@ def visit_import(self, o: "mypy.nodes.Import") -> str: a.append(id) return f"Import:{o.line}({', '.join(a)})" - def visit_import_from(self, o: "mypy.nodes.ImportFrom") -> str: + def visit_import_from(self, o: mypy.nodes.ImportFrom) -> str: a = [] for name, as_name in o.names: if as_name is not None: @@ -120,12 +122,12 @@ def visit_import_from(self, o: "mypy.nodes.ImportFrom") -> str: a.append(name) return f"ImportFrom:{o.line}({'.' * o.relative + o.id}, [{', '.join(a)}])" - def visit_import_all(self, o: "mypy.nodes.ImportAll") -> str: + def visit_import_all(self, o: mypy.nodes.ImportAll) -> str: return f"ImportAll:{o.line}({'.' * o.relative + o.id})" # Definitions - def visit_func_def(self, o: "mypy.nodes.FuncDef") -> str: + def visit_func_def(self, o: mypy.nodes.FuncDef) -> str: a = self.func_helper(o) a.insert(0, o.name) arg_kinds = {arg.kind for arg in o.arguments} @@ -141,7 +143,7 @@ def visit_func_def(self, o: "mypy.nodes.FuncDef") -> str: a.insert(-1, "Property") return self.dump(a, o) - def visit_overloaded_func_def(self, o: "mypy.nodes.OverloadedFuncDef") -> str: + def visit_overloaded_func_def(self, o: mypy.nodes.OverloadedFuncDef) -> str: a: Any = o.items[:] if o.type: a.insert(0, o.type) @@ -153,7 +155,7 @@ def visit_overloaded_func_def(self, o: "mypy.nodes.OverloadedFuncDef") -> str: a.insert(-1, "Class") return self.dump(a, o) - def visit_class_def(self, o: "mypy.nodes.ClassDef") -> str: + def visit_class_def(self, o: mypy.nodes.ClassDef) -> str: a = [o.name, o.defs.body] # Display base types unless they are implicitly just builtins.object # (in this case base_type_exprs is empty). @@ -177,7 +179,7 @@ def visit_class_def(self, o: "mypy.nodes.ClassDef") -> str: a.insert(1, "FallbackToAny") return self.dump(a, o) - def visit_var(self, o: "mypy.nodes.Var") -> str: + def visit_var(self, o: mypy.nodes.Var) -> str: lst = "" # Add :nil line number tag if no line number is specified to remain # compatible with old test case descriptions that assume this. @@ -185,24 +187,24 @@ def visit_var(self, o: "mypy.nodes.Var") -> str: lst = ":nil" return "Var" + lst + "(" + o.name + ")" - def visit_global_decl(self, o: "mypy.nodes.GlobalDecl") -> str: + def visit_global_decl(self, o: mypy.nodes.GlobalDecl) -> str: return self.dump([o.names], o) - def visit_nonlocal_decl(self, o: "mypy.nodes.NonlocalDecl") -> str: + def visit_nonlocal_decl(self, o: mypy.nodes.NonlocalDecl) -> str: return self.dump([o.names], o) - def visit_decorator(self, o: "mypy.nodes.Decorator") -> str: + def visit_decorator(self, o: mypy.nodes.Decorator) -> str: return self.dump([o.var, o.decorators, o.func], o) # Statements - def visit_block(self, o: "mypy.nodes.Block") -> str: + def visit_block(self, o: mypy.nodes.Block) -> str: return self.dump(o.body, o) - def visit_expression_stmt(self, o: "mypy.nodes.ExpressionStmt") -> str: + def visit_expression_stmt(self, o: mypy.nodes.ExpressionStmt) -> str: return self.dump([o.expr], o) - def visit_assignment_stmt(self, o: "mypy.nodes.AssignmentStmt") -> str: + def visit_assignment_stmt(self, o: mypy.nodes.AssignmentStmt) -> str: a: List[Any] = [] if len(o.lvalues) > 1: a = [("Lvalues", o.lvalues)] @@ -213,16 +215,16 @@ def visit_assignment_stmt(self, o: "mypy.nodes.AssignmentStmt") -> str: a.append(o.type) return self.dump(a, o) - def visit_operator_assignment_stmt(self, o: "mypy.nodes.OperatorAssignmentStmt") -> str: + def visit_operator_assignment_stmt(self, o: mypy.nodes.OperatorAssignmentStmt) -> str: return self.dump([o.op, o.lvalue, o.rvalue], o) - def visit_while_stmt(self, o: "mypy.nodes.WhileStmt") -> str: + def visit_while_stmt(self, o: mypy.nodes.WhileStmt) -> str: a: List[Any] = [o.expr, o.body] if o.else_body: a.append(("Else", o.else_body.body)) return self.dump(a, o) - def visit_for_stmt(self, o: "mypy.nodes.ForStmt") -> str: + def visit_for_stmt(self, o: mypy.nodes.ForStmt) -> str: a: List[Any] = [] if o.is_async: a.append(("Async", "")) @@ -234,10 +236,10 @@ def visit_for_stmt(self, o: "mypy.nodes.ForStmt") -> str: a.append(("Else", o.else_body.body)) return self.dump(a, o) - def visit_return_stmt(self, o: "mypy.nodes.ReturnStmt") -> str: + def visit_return_stmt(self, o: mypy.nodes.ReturnStmt) -> str: return self.dump([o.expr], o) - def visit_if_stmt(self, o: "mypy.nodes.IfStmt") -> str: + def visit_if_stmt(self, o: mypy.nodes.IfStmt) -> str: a: List[Any] = [] for i in range(len(o.expr)): a.append(("If", [o.expr[i]])) @@ -248,31 +250,31 @@ def visit_if_stmt(self, o: "mypy.nodes.IfStmt") -> str: else: return self.dump([a, ("Else", o.else_body.body)], o) - def visit_break_stmt(self, o: "mypy.nodes.BreakStmt") -> str: + def visit_break_stmt(self, o: mypy.nodes.BreakStmt) -> str: return self.dump([], o) - def visit_continue_stmt(self, o: "mypy.nodes.ContinueStmt") -> str: + def visit_continue_stmt(self, o: mypy.nodes.ContinueStmt) -> str: return self.dump([], o) - def visit_pass_stmt(self, o: "mypy.nodes.PassStmt") -> str: + def visit_pass_stmt(self, o: mypy.nodes.PassStmt) -> str: return self.dump([], o) - def visit_raise_stmt(self, o: "mypy.nodes.RaiseStmt") -> str: + def visit_raise_stmt(self, o: mypy.nodes.RaiseStmt) -> str: return self.dump([o.expr, o.from_expr], o) - def visit_assert_stmt(self, o: "mypy.nodes.AssertStmt") -> str: + def visit_assert_stmt(self, o: mypy.nodes.AssertStmt) -> str: if o.msg is not None: return self.dump([o.expr, o.msg], o) else: return self.dump([o.expr], o) - def visit_await_expr(self, o: "mypy.nodes.AwaitExpr") -> str: + def visit_await_expr(self, o: mypy.nodes.AwaitExpr) -> str: return self.dump([o.expr], o) - def visit_del_stmt(self, o: "mypy.nodes.DelStmt") -> str: + def visit_del_stmt(self, o: mypy.nodes.DelStmt) -> str: return self.dump([o.expr], o) - def visit_try_stmt(self, o: "mypy.nodes.TryStmt") -> str: + def visit_try_stmt(self, o: mypy.nodes.TryStmt) -> str: a: List[Any] = [o.body] for i in range(len(o.vars)): @@ -288,7 +290,7 @@ def visit_try_stmt(self, o: "mypy.nodes.TryStmt") -> str: return self.dump(a, o) - def visit_with_stmt(self, o: "mypy.nodes.WithStmt") -> str: + def visit_with_stmt(self, o: mypy.nodes.WithStmt) -> str: a: List[Any] = [] if o.is_async: a.append(("Async", "")) @@ -300,7 +302,7 @@ def visit_with_stmt(self, o: "mypy.nodes.WithStmt") -> str: a.append(o.unanalyzed_type) return self.dump(a + [o.body], o) - def visit_match_stmt(self, o: "mypy.nodes.MatchStmt") -> str: + def visit_match_stmt(self, o: mypy.nodes.MatchStmt) -> str: a: List[Any] = [o.subject] for i in range(len(o.patterns)): a.append(("Pattern", [o.patterns[i]])) @@ -313,32 +315,32 @@ def visit_match_stmt(self, o: "mypy.nodes.MatchStmt") -> str: # Simple expressions - def visit_int_expr(self, o: "mypy.nodes.IntExpr") -> str: + def visit_int_expr(self, o: mypy.nodes.IntExpr) -> str: return f"IntExpr({o.value})" - def visit_str_expr(self, o: "mypy.nodes.StrExpr") -> str: + def visit_str_expr(self, o: mypy.nodes.StrExpr) -> str: return f"StrExpr({self.str_repr(o.value)})" - def visit_bytes_expr(self, o: "mypy.nodes.BytesExpr") -> str: + def visit_bytes_expr(self, o: mypy.nodes.BytesExpr) -> str: return f"BytesExpr({self.str_repr(o.value)})" def str_repr(self, s: str) -> str: s = re.sub(r"\\u[0-9a-fA-F]{4}", lambda m: "\\" + m.group(0), s) return re.sub("[^\\x20-\\x7e]", lambda m: r"\u%.4x" % ord(m.group(0)), s) - def visit_float_expr(self, o: "mypy.nodes.FloatExpr") -> str: + def visit_float_expr(self, o: mypy.nodes.FloatExpr) -> str: return f"FloatExpr({o.value})" - def visit_complex_expr(self, o: "mypy.nodes.ComplexExpr") -> str: + def visit_complex_expr(self, o: mypy.nodes.ComplexExpr) -> str: return f"ComplexExpr({o.value})" - def visit_ellipsis(self, o: "mypy.nodes.EllipsisExpr") -> str: + def visit_ellipsis(self, o: mypy.nodes.EllipsisExpr) -> str: return "Ellipsis" - def visit_star_expr(self, o: "mypy.nodes.StarExpr") -> str: + def visit_star_expr(self, o: mypy.nodes.StarExpr) -> str: return self.dump([o.expr], o) - def visit_name_expr(self, o: "mypy.nodes.NameExpr") -> str: + def visit_name_expr(self, o: mypy.nodes.NameExpr) -> str: pretty = self.pretty_name( o.name, o.kind, o.fullname, o.is_inferred_def or o.is_special_form, o.node ) @@ -352,7 +354,7 @@ def pretty_name( kind: Optional[int], fullname: Optional[str], is_inferred_def: bool, - target_node: "Optional[mypy.nodes.Node]" = None, + target_node: Optional[mypy.nodes.Node] = None, ) -> str: n = name if is_inferred_def: @@ -376,20 +378,20 @@ def pretty_name( n += id return n - def visit_member_expr(self, o: "mypy.nodes.MemberExpr") -> str: + def visit_member_expr(self, o: mypy.nodes.MemberExpr) -> str: pretty = self.pretty_name(o.name, o.kind, o.fullname, o.is_inferred_def, o.node) return self.dump([o.expr, pretty], o) - def visit_yield_expr(self, o: "mypy.nodes.YieldExpr") -> str: + def visit_yield_expr(self, o: mypy.nodes.YieldExpr) -> str: return self.dump([o.expr], o) - def visit_yield_from_expr(self, o: "mypy.nodes.YieldFromExpr") -> str: + def visit_yield_from_expr(self, o: mypy.nodes.YieldFromExpr) -> str: if o.expr: return self.dump([o.expr.accept(self)], o) else: return self.dump([], o) - def visit_call_expr(self, o: "mypy.nodes.CallExpr") -> str: + def visit_call_expr(self, o: mypy.nodes.CallExpr) -> str: if o.analyzed: return o.analyzed.accept(self) args: List[mypy.nodes.Expression] = [] @@ -408,55 +410,55 @@ def visit_call_expr(self, o: "mypy.nodes.CallExpr") -> str: a: List[Any] = [o.callee, ("Args", args)] return self.dump(a + extra, o) - def visit_op_expr(self, o: "mypy.nodes.OpExpr") -> str: + def visit_op_expr(self, o: mypy.nodes.OpExpr) -> str: return self.dump([o.op, o.left, o.right], o) - def visit_comparison_expr(self, o: "mypy.nodes.ComparisonExpr") -> str: + def visit_comparison_expr(self, o: mypy.nodes.ComparisonExpr) -> str: return self.dump([o.operators, o.operands], o) - def visit_cast_expr(self, o: "mypy.nodes.CastExpr") -> str: + def visit_cast_expr(self, o: mypy.nodes.CastExpr) -> str: return self.dump([o.expr, o.type], o) - def visit_assert_type_expr(self, o: "mypy.nodes.AssertTypeExpr") -> str: + def visit_assert_type_expr(self, o: mypy.nodes.AssertTypeExpr) -> str: return self.dump([o.expr, o.type], o) - def visit_reveal_expr(self, o: "mypy.nodes.RevealExpr") -> str: + def visit_reveal_expr(self, o: mypy.nodes.RevealExpr) -> str: if o.kind == mypy.nodes.REVEAL_TYPE: return self.dump([o.expr], o) else: # REVEAL_LOCALS return self.dump([o.local_nodes], o) - def visit_assignment_expr(self, o: "mypy.nodes.AssignmentExpr") -> str: + def visit_assignment_expr(self, o: mypy.nodes.AssignmentExpr) -> str: return self.dump([o.target, o.value], o) - def visit_unary_expr(self, o: "mypy.nodes.UnaryExpr") -> str: + def visit_unary_expr(self, o: mypy.nodes.UnaryExpr) -> str: return self.dump([o.op, o.expr], o) - def visit_list_expr(self, o: "mypy.nodes.ListExpr") -> str: + def visit_list_expr(self, o: mypy.nodes.ListExpr) -> str: return self.dump(o.items, o) - def visit_dict_expr(self, o: "mypy.nodes.DictExpr") -> str: + def visit_dict_expr(self, o: mypy.nodes.DictExpr) -> str: return self.dump([[k, v] for k, v in o.items], o) - def visit_set_expr(self, o: "mypy.nodes.SetExpr") -> str: + def visit_set_expr(self, o: mypy.nodes.SetExpr) -> str: return self.dump(o.items, o) - def visit_tuple_expr(self, o: "mypy.nodes.TupleExpr") -> str: + def visit_tuple_expr(self, o: mypy.nodes.TupleExpr) -> str: return self.dump(o.items, o) - def visit_index_expr(self, o: "mypy.nodes.IndexExpr") -> str: + def visit_index_expr(self, o: mypy.nodes.IndexExpr) -> str: if o.analyzed: return o.analyzed.accept(self) return self.dump([o.base, o.index], o) - def visit_super_expr(self, o: "mypy.nodes.SuperExpr") -> str: + def visit_super_expr(self, o: mypy.nodes.SuperExpr) -> str: return self.dump([o.name, o.call], o) - def visit_type_application(self, o: "mypy.nodes.TypeApplication") -> str: + def visit_type_application(self, o: mypy.nodes.TypeApplication) -> str: return self.dump([o.expr, ("Types", o.types)], o) - def visit_type_var_expr(self, o: "mypy.nodes.TypeVarExpr") -> str: + def visit_type_var_expr(self, o: mypy.nodes.TypeVarExpr) -> str: import mypy.types a: List[Any] = [] @@ -470,7 +472,7 @@ def visit_type_var_expr(self, o: "mypy.nodes.TypeVarExpr") -> str: a += [f"UpperBound({o.upper_bound})"] return self.dump(a, o) - def visit_paramspec_expr(self, o: "mypy.nodes.ParamSpecExpr") -> str: + def visit_paramspec_expr(self, o: mypy.nodes.ParamSpecExpr) -> str: import mypy.types a: List[Any] = [] @@ -482,7 +484,7 @@ def visit_paramspec_expr(self, o: "mypy.nodes.ParamSpecExpr") -> str: a += [f"UpperBound({o.upper_bound})"] return self.dump(a, o) - def visit_type_var_tuple_expr(self, o: "mypy.nodes.TypeVarTupleExpr") -> str: + def visit_type_var_tuple_expr(self, o: mypy.nodes.TypeVarTupleExpr) -> str: import mypy.types a: List[Any] = [] @@ -494,46 +496,46 @@ def visit_type_var_tuple_expr(self, o: "mypy.nodes.TypeVarTupleExpr") -> str: a += [f"UpperBound({o.upper_bound})"] return self.dump(a, o) - def visit_type_alias_expr(self, o: "mypy.nodes.TypeAliasExpr") -> str: + def visit_type_alias_expr(self, o: mypy.nodes.TypeAliasExpr) -> str: return f"TypeAliasExpr({o.type})" - def visit_namedtuple_expr(self, o: "mypy.nodes.NamedTupleExpr") -> str: + def visit_namedtuple_expr(self, o: mypy.nodes.NamedTupleExpr) -> str: return f"NamedTupleExpr:{o.line}({o.info.name}, {o.info.tuple_type})" - def visit_enum_call_expr(self, o: "mypy.nodes.EnumCallExpr") -> str: + def visit_enum_call_expr(self, o: mypy.nodes.EnumCallExpr) -> str: return f"EnumCallExpr:{o.line}({o.info.name}, {o.items})" - def visit_typeddict_expr(self, o: "mypy.nodes.TypedDictExpr") -> str: + def visit_typeddict_expr(self, o: mypy.nodes.TypedDictExpr) -> str: return f"TypedDictExpr:{o.line}({o.info.name})" - def visit__promote_expr(self, o: "mypy.nodes.PromoteExpr") -> str: + def visit__promote_expr(self, o: mypy.nodes.PromoteExpr) -> str: return f"PromoteExpr:{o.line}({o.type})" - def visit_newtype_expr(self, o: "mypy.nodes.NewTypeExpr") -> str: + def visit_newtype_expr(self, o: mypy.nodes.NewTypeExpr) -> str: return f"NewTypeExpr:{o.line}({o.name}, {self.dump([o.old_type], o)})" - def visit_lambda_expr(self, o: "mypy.nodes.LambdaExpr") -> str: + def visit_lambda_expr(self, o: mypy.nodes.LambdaExpr) -> str: a = self.func_helper(o) return self.dump(a, o) - def visit_generator_expr(self, o: "mypy.nodes.GeneratorExpr") -> str: + def visit_generator_expr(self, o: mypy.nodes.GeneratorExpr) -> str: condlists = o.condlists if any(o.condlists) else None return self.dump([o.left_expr, o.indices, o.sequences, condlists], o) - def visit_list_comprehension(self, o: "mypy.nodes.ListComprehension") -> str: + def visit_list_comprehension(self, o: mypy.nodes.ListComprehension) -> str: return self.dump([o.generator], o) - def visit_set_comprehension(self, o: "mypy.nodes.SetComprehension") -> str: + def visit_set_comprehension(self, o: mypy.nodes.SetComprehension) -> str: return self.dump([o.generator], o) - def visit_dictionary_comprehension(self, o: "mypy.nodes.DictionaryComprehension") -> str: + def visit_dictionary_comprehension(self, o: mypy.nodes.DictionaryComprehension) -> str: condlists = o.condlists if any(o.condlists) else None return self.dump([o.key, o.value, o.indices, o.sequences, condlists], o) - def visit_conditional_expr(self, o: "mypy.nodes.ConditionalExpr") -> str: + def visit_conditional_expr(self, o: mypy.nodes.ConditionalExpr) -> str: return self.dump([("Condition", [o.cond]), o.if_expr, o.else_expr], o) - def visit_slice_expr(self, o: "mypy.nodes.SliceExpr") -> str: + def visit_slice_expr(self, o: mypy.nodes.SliceExpr) -> str: a: List[Any] = [o.begin_index, o.end_index, o.stride] if not a[0]: a[0] = "" @@ -541,28 +543,28 @@ def visit_slice_expr(self, o: "mypy.nodes.SliceExpr") -> str: a[1] = "" return self.dump(a, o) - def visit_temp_node(self, o: "mypy.nodes.TempNode") -> str: + def visit_temp_node(self, o: mypy.nodes.TempNode) -> str: return self.dump([o.type], o) - def visit_as_pattern(self, o: "mypy.patterns.AsPattern") -> str: + def visit_as_pattern(self, o: mypy.patterns.AsPattern) -> str: return self.dump([o.pattern, o.name], o) - def visit_or_pattern(self, o: "mypy.patterns.OrPattern") -> str: + def visit_or_pattern(self, o: mypy.patterns.OrPattern) -> str: return self.dump(o.patterns, o) - def visit_value_pattern(self, o: "mypy.patterns.ValuePattern") -> str: + def visit_value_pattern(self, o: mypy.patterns.ValuePattern) -> str: return self.dump([o.expr], o) - def visit_singleton_pattern(self, o: "mypy.patterns.SingletonPattern") -> str: + def visit_singleton_pattern(self, o: mypy.patterns.SingletonPattern) -> str: return self.dump([o.value], o) - def visit_sequence_pattern(self, o: "mypy.patterns.SequencePattern") -> str: + def visit_sequence_pattern(self, o: mypy.patterns.SequencePattern) -> str: return self.dump(o.patterns, o) - def visit_starred_pattern(self, o: "mypy.patterns.StarredPattern") -> str: + def visit_starred_pattern(self, o: mypy.patterns.StarredPattern) -> str: return self.dump([o.capture], o) - def visit_mapping_pattern(self, o: "mypy.patterns.MappingPattern") -> str: + def visit_mapping_pattern(self, o: mypy.patterns.MappingPattern) -> str: a: List[Any] = [] for i in range(len(o.keys)): a.append(("Key", [o.keys[i]])) @@ -571,7 +573,7 @@ def visit_mapping_pattern(self, o: "mypy.patterns.MappingPattern") -> str: a.append(("Rest", [o.rest])) return self.dump(a, o) - def visit_class_pattern(self, o: "mypy.patterns.ClassPattern") -> str: + def visit_class_pattern(self, o: mypy.patterns.ClassPattern) -> str: a: List[Any] = [o.class_ref] if len(o.positionals) > 0: a.append(("Positionals", o.positionals)) @@ -581,7 +583,7 @@ def visit_class_pattern(self, o: "mypy.patterns.ClassPattern") -> str: return self.dump(a, o) -def dump_tagged(nodes: Sequence[object], tag: Optional[str], str_conv: "StrConv") -> str: +def dump_tagged(nodes: Sequence[object], tag: Optional[str], str_conv: StrConv) -> str: """Convert an array into a pretty-printed multiline string representation. The format is diff --git a/mypy/stubdoc.py b/mypy/stubdoc.py index 608cdb375d2e..3af1cf957633 100644 --- a/mypy/stubdoc.py +++ b/mypy/stubdoc.py @@ -3,6 +3,9 @@ This module provides several functions to generate better stubs using docstrings and Sphinx docs (.rst files). """ + +from __future__ import annotations + import contextlib import io import re diff --git a/mypy/stubgen.py b/mypy/stubgen.py index fc4a7e0fcd9d..b3dd97b85e37 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -39,6 +39,8 @@ - we don't seem to always detect properties ('closed' in 'io', for example) """ +from __future__ import annotations + import argparse import glob import os @@ -279,7 +281,7 @@ class AnnotationPrinter(TypeStrVisitor): # TODO: Generate valid string representation for callable types. # TODO: Use short names for Instances. - def __init__(self, stubgen: "StubGenerator") -> None: + def __init__(self, stubgen: StubGenerator) -> None: super().__init__() self.stubgen = stubgen @@ -324,7 +326,7 @@ class AliasPrinter(NodeVisitor[str]): Visit r.h.s of the definition to get the string representation of type alias. """ - def __init__(self, stubgen: "StubGenerator") -> None: + def __init__(self, stubgen: StubGenerator) -> None: self.stubgen = stubgen super().__init__() diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py index 50a38b7aa916..e90ebbb51c90 100755 --- a/mypy/stubgenc.py +++ b/mypy/stubgenc.py @@ -4,6 +4,8 @@ The public interface is via the mypy.stubgen module. """ +from __future__ import annotations + import importlib import inspect import os.path diff --git a/mypy/stubinfo.py b/mypy/stubinfo.py index b9e777e9d157..ef025e1caa0f 100644 --- a/mypy/stubinfo.py +++ b/mypy/stubinfo.py @@ -1,3 +1,6 @@ +from __future__ import annotations + + def is_legacy_bundled_package(prefix: str) -> bool: return prefix in legacy_bundled_packages diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 86d843c03c61..c8c3af29a893 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -4,6 +4,8 @@ """ +from __future__ import annotations + import argparse import collections.abc import copy @@ -604,7 +606,7 @@ def get_desc(arg: Any) -> str: return ret @staticmethod - def from_funcitem(stub: nodes.FuncItem) -> "Signature[nodes.Argument]": + def from_funcitem(stub: nodes.FuncItem) -> Signature[nodes.Argument]: stub_sig: Signature[nodes.Argument] = Signature() stub_args = maybe_strip_cls(stub.name, stub.arguments) for stub_arg in stub_args: @@ -621,7 +623,7 @@ def from_funcitem(stub: nodes.FuncItem) -> "Signature[nodes.Argument]": return stub_sig @staticmethod - def from_inspect_signature(signature: inspect.Signature) -> "Signature[inspect.Parameter]": + def from_inspect_signature(signature: inspect.Signature) -> Signature[inspect.Parameter]: runtime_sig: Signature[inspect.Parameter] = Signature() for runtime_arg in signature.parameters.values(): if runtime_arg.kind in ( @@ -640,7 +642,7 @@ def from_inspect_signature(signature: inspect.Signature) -> "Signature[inspect.P return runtime_sig @staticmethod - def from_overloadedfuncdef(stub: nodes.OverloadedFuncDef) -> "Signature[nodes.Argument]": + def from_overloadedfuncdef(stub: nodes.OverloadedFuncDef) -> Signature[nodes.Argument]: """Returns a Signature from an OverloadedFuncDef. If life were simple, to verify_overloadedfuncdef, we'd just verify_funcitem for each of its diff --git a/mypy/stubutil.py b/mypy/stubutil.py index 87d27ac6fd65..4c142a92ef15 100644 --- a/mypy/stubutil.py +++ b/mypy/stubutil.py @@ -1,5 +1,7 @@ """Utilities for mypy.stubgen, mypy.stubgenc, and mypy.stubdoc modules.""" +from __future__ import annotations + import os.path import re import sys diff --git a/mypy/subtypes.py b/mypy/subtypes.py index e8bb3bffa858..7bc616d8f462 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from contextlib import contextmanager from typing import Any, Callable, Iterator, List, Optional, Set, Tuple, TypeVar, Union, cast from typing_extensions import Final, TypeAlias as _TypeAlias diff --git a/mypy/suggestions.py b/mypy/suggestions.py index 7bd1bed77763..c23ada8e7af4 100644 --- a/mypy/suggestions.py +++ b/mypy/suggestions.py @@ -22,6 +22,8 @@ * No understanding of type variables at *all* """ +from __future__ import annotations + import itertools import json import os diff --git a/mypy/traverser.py b/mypy/traverser.py index 8e3a6486ace7..0c100a2cc988 100644 --- a/mypy/traverser.py +++ b/mypy/traverser.py @@ -1,5 +1,7 @@ """Generic node traverser visitor""" +from __future__ import annotations + from typing import List, Tuple from mypy_extensions import mypyc_attr diff --git a/mypy/treetransform.py b/mypy/treetransform.py index c9270223f6de..91fa3d3a6836 100644 --- a/mypy/treetransform.py +++ b/mypy/treetransform.py @@ -3,6 +3,8 @@ Subclass TransformVisitor to perform non-trivial transformations. """ +from __future__ import annotations + from typing import Dict, Iterable, List, Optional, cast from mypy.nodes import ( diff --git a/mypy/tvar_scope.py b/mypy/tvar_scope.py index 8464bb58b336..44a7c2cf9e31 100644 --- a/mypy/tvar_scope.py +++ b/mypy/tvar_scope.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Dict, Optional, Union from mypy.nodes import ( @@ -25,9 +27,9 @@ class TypeVarLikeScope: def __init__( self, - parent: "Optional[TypeVarLikeScope]" = None, + parent: Optional[TypeVarLikeScope] = None, is_class_scope: bool = False, - prohibited: "Optional[TypeVarLikeScope]" = None, + prohibited: Optional[TypeVarLikeScope] = None, namespace: str = "", ) -> None: """Initializer for TypeVarLikeScope @@ -49,7 +51,7 @@ def __init__( self.func_id = parent.func_id self.class_id = parent.class_id - def get_function_scope(self) -> "Optional[TypeVarLikeScope]": + def get_function_scope(self) -> Optional[TypeVarLikeScope]: """Get the nearest parent that's a function scope, not a class scope""" it: Optional[TypeVarLikeScope] = self while it is not None and it.is_class_scope: @@ -65,11 +67,11 @@ def allow_binding(self, fullname: str) -> bool: return False return True - def method_frame(self) -> "TypeVarLikeScope": + def method_frame(self) -> TypeVarLikeScope: """A new scope frame for binding a method""" return TypeVarLikeScope(self, False, None) - def class_frame(self, namespace: str) -> "TypeVarLikeScope": + def class_frame(self, namespace: str) -> TypeVarLikeScope: """A new scope frame for binding a class. Prohibits *this* class's tvars""" return TypeVarLikeScope(self.get_function_scope(), True, self, namespace=namespace) diff --git a/mypy/type_visitor.py b/mypy/type_visitor.py index 2a83abfbd0bd..5efcd195da38 100644 --- a/mypy/type_visitor.py +++ b/mypy/type_visitor.py @@ -11,6 +11,8 @@ other modules refer to them. """ +from __future__ import annotations + from abc import abstractmethod from typing import Any, Callable, Generic, Iterable, List, Optional, Sequence, Set, TypeVar, cast diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 0e1bc045f216..84ade0c6554e 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -1,5 +1,7 @@ """Semantic analysis of types""" +from __future__ import annotations + import itertools from contextlib import contextmanager from itertools import chain @@ -1610,7 +1612,7 @@ class TypeVarLikeQuery(TypeQuery[TypeVarLikeList]): def __init__( self, lookup: Callable[[str, Context], Optional[SymbolTableNode]], - scope: "TypeVarLikeScope", + scope: TypeVarLikeScope, *, include_callables: bool = True, include_bound_tvars: bool = False, @@ -1677,7 +1679,7 @@ def __init__( self, seen_nodes: Set[TypeAlias], lookup: Callable[[str, Context], Optional[SymbolTableNode]], - scope: "TypeVarLikeScope", + scope: TypeVarLikeScope, ) -> None: self.seen_nodes = seen_nodes self.lookup = lookup @@ -1720,7 +1722,7 @@ def detect_diverging_alias( node: TypeAlias, target: Type, lookup: Callable[[str, Context], Optional[SymbolTableNode]], - scope: "TypeVarLikeScope", + scope: TypeVarLikeScope, ) -> bool: """This detects type aliases that will diverge during type checking. diff --git a/mypy/typeops.py b/mypy/typeops.py index 91654dd654d2..061aae91b173 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -5,6 +5,8 @@ since these may assume that MROs are ready. """ +from __future__ import annotations + import itertools from typing import ( Any, diff --git a/mypy/types.py b/mypy/types.py index 9276b0b9a706..64a28a25924d 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -1,5 +1,7 @@ """Classes for representing mypy types.""" +from __future__ import annotations + import sys from abc import abstractmethod from typing import ( @@ -181,7 +183,7 @@ class TypeOfAny: suggestion_engine: Final = 9 -def deserialize_type(data: Union[JsonDict, str]) -> "Type": +def deserialize_type(data: Union[JsonDict, str]) -> Type: if isinstance(data, str): return Instance.deserialize(data) classname = data[".class"] @@ -216,7 +218,7 @@ def can_be_true_default(self) -> bool: def can_be_false_default(self) -> bool: return True - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: raise RuntimeError("Not implemented") def __repr__(self) -> str: @@ -226,7 +228,7 @@ def serialize(self) -> Union[JsonDict, str]: raise NotImplementedError(f"Cannot serialize {self.__class__.__name__} instance") @classmethod - def deserialize(cls, data: JsonDict) -> "Type": + def deserialize(cls, data: JsonDict) -> Type: raise NotImplementedError(f"Cannot deserialize {cls.__name__} instance") def is_singleton_type(self) -> bool: @@ -279,14 +281,14 @@ def _expand_once(self) -> Type: self.alias.target, self.alias.alias_tvars, self.args, self.line, self.column ) - def _partial_expansion(self) -> Tuple["ProperType", bool]: + def _partial_expansion(self) -> Tuple[ProperType, bool]: # Private method mostly for debugging and testing. unroller = UnrollAliasVisitor(set()) unrolled = self.accept(unroller) assert isinstance(unrolled, ProperType) return unrolled, unroller.recursed - def expand_all_if_possible(self) -> Optional["ProperType"]: + def expand_all_if_possible(self) -> Optional[ProperType]: """Attempt a full expansion of the type alias (including nested aliases). If the expansion is not possible, i.e. the alias is (mutually-)recursive, @@ -318,7 +320,7 @@ def can_be_false_default(self) -> bool: return self.alias.target.can_be_false return super().can_be_false_default() - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_type_alias_type(self) def __hash__(self) -> int: @@ -340,7 +342,7 @@ def serialize(self) -> JsonDict: return data @classmethod - def deserialize(cls, data: JsonDict) -> "TypeAliasType": + def deserialize(cls, data: JsonDict) -> TypeAliasType: assert data[".class"] == "TypeAliasType" args: List[Type] = [] if "args" in data: @@ -351,7 +353,7 @@ def deserialize(cls, data: JsonDict) -> "TypeAliasType": alias.type_ref = data["type_ref"] return alias - def copy_modified(self, *, args: Optional[List[Type]] = None) -> "TypeAliasType": + def copy_modified(self, *, args: Optional[List[Type]] = None) -> TypeAliasType: return TypeAliasType( self.alias, args if args is not None else self.args.copy(), self.line, self.column ) @@ -384,7 +386,7 @@ def __repr__(self) -> str: else: return f"NotRequired[{self.item}]" - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return self.item.accept(visitor) @@ -429,7 +431,7 @@ def __init__(self, raw_id: int, meta_level: int = 0, *, namespace: str = "") -> self.namespace = namespace @staticmethod - def new(meta_level: int) -> "TypeVarId": + def new(meta_level: int) -> TypeVarId: raw_id = TypeVarId.next_raw_id TypeVarId.next_raw_id += 1 return TypeVarId(raw_id, meta_level) @@ -487,7 +489,7 @@ def serialize(self) -> JsonDict: raise NotImplementedError @classmethod - def deserialize(cls, data: JsonDict) -> "TypeVarLikeType": + def deserialize(cls, data: JsonDict) -> TypeVarLikeType: raise NotImplementedError @@ -516,7 +518,7 @@ def __init__( self.variance = variance @staticmethod - def new_unification_variable(old: "TypeVarType") -> "TypeVarType": + def new_unification_variable(old: TypeVarType) -> TypeVarType: new_id = TypeVarId.new(meta_level=1) return TypeVarType( old.name, @@ -529,7 +531,7 @@ def new_unification_variable(old: "TypeVarType") -> "TypeVarType": old.column, ) - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_type_var(self) def __hash__(self) -> int: @@ -554,7 +556,7 @@ def serialize(self) -> JsonDict: } @classmethod - def deserialize(cls, data: JsonDict) -> "TypeVarType": + def deserialize(cls, data: JsonDict) -> TypeVarType: assert data[".class"] == "TypeVarType" return TypeVarType( data["name"], @@ -596,7 +598,7 @@ class ParamSpecType(TypeVarLikeType): __slots__ = ("flavor", "prefix") flavor: int - prefix: "Parameters" + prefix: Parameters def __init__( self, @@ -608,14 +610,14 @@ def __init__( *, line: int = -1, column: int = -1, - prefix: Optional["Parameters"] = None, + prefix: Optional[Parameters] = None, ) -> None: super().__init__(name, fullname, id, upper_bound, line=line, column=column) self.flavor = flavor self.prefix = prefix or Parameters([], [], []) @staticmethod - def new_unification_variable(old: "ParamSpecType") -> "ParamSpecType": + def new_unification_variable(old: ParamSpecType) -> ParamSpecType: new_id = TypeVarId.new(meta_level=1) return ParamSpecType( old.name, @@ -628,7 +630,7 @@ def new_unification_variable(old: "ParamSpecType") -> "ParamSpecType": prefix=old.prefix, ) - def with_flavor(self, flavor: int) -> "ParamSpecType": + def with_flavor(self, flavor: int) -> ParamSpecType: return ParamSpecType( self.name, self.fullname, @@ -643,8 +645,8 @@ def copy_modified( *, id: Bogus[Union[TypeVarId, int]] = _dummy, flavor: Bogus[int] = _dummy, - prefix: Bogus["Parameters"] = _dummy, - ) -> "ParamSpecType": + prefix: Bogus[Parameters] = _dummy, + ) -> ParamSpecType: return ParamSpecType( self.name, self.fullname, @@ -656,7 +658,7 @@ def copy_modified( prefix=prefix if prefix is not _dummy else self.prefix, ) - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_param_spec(self) def name_with_suffix(self) -> str: @@ -689,7 +691,7 @@ def serialize(self) -> JsonDict: } @classmethod - def deserialize(cls, data: JsonDict) -> "ParamSpecType": + def deserialize(cls, data: JsonDict) -> ParamSpecType: assert data[".class"] == "ParamSpecType" return ParamSpecType( data["name"], @@ -718,13 +720,13 @@ def serialize(self) -> JsonDict: } @classmethod - def deserialize(cls, data: JsonDict) -> "TypeVarTupleType": + def deserialize(cls, data: JsonDict) -> TypeVarTupleType: assert data[".class"] == "TypeVarTupleType" return TypeVarTupleType( data["name"], data["fullname"], data["id"], deserialize_type(data["upper_bound"]) ) - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_type_var_tuple(self) def __hash__(self) -> int: @@ -736,7 +738,7 @@ def __eq__(self, other: object) -> bool: return self.id == other.id @staticmethod - def new_unification_variable(old: "TypeVarTupleType") -> "TypeVarTupleType": + def new_unification_variable(old: TypeVarTupleType) -> TypeVarTupleType: new_id = TypeVarId.new(meta_level=1) return TypeVarTupleType( old.name, old.fullname, new_id, old.upper_bound, line=old.line, column=old.column @@ -792,7 +794,7 @@ def __init__( self.original_str_expr = original_str_expr self.original_str_fallback = original_str_fallback - def copy_modified(self, args: Bogus[Optional[Sequence[Type]]] = _dummy) -> "UnboundType": + def copy_modified(self, args: Bogus[Optional[Sequence[Type]]] = _dummy) -> UnboundType: if args is _dummy: args = self.args return UnboundType( @@ -806,7 +808,7 @@ def copy_modified(self, args: Bogus[Optional[Sequence[Type]]] = _dummy) -> "Unbo original_str_fallback=self.original_str_fallback, ) - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_unbound_type(self) def __hash__(self) -> int: @@ -833,7 +835,7 @@ def serialize(self) -> JsonDict: } @classmethod - def deserialize(cls, data: JsonDict) -> "UnboundType": + def deserialize(cls, data: JsonDict) -> UnboundType: assert data[".class"] == "UnboundType" return UnboundType( data["name"], @@ -868,7 +870,7 @@ def __init__( self.name = name self.constructor = constructor - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) return visitor.visit_callable_argument(self) @@ -893,7 +895,7 @@ def __init__(self, items: List[Type], line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.items = items - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) return visitor.visit_type_list(self) @@ -923,14 +925,14 @@ def __init__(self, typ: Type, line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.type = typ - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_unpack_type(self) def serialize(self) -> JsonDict: return {".class": "UnpackType", "type": self.type.serialize()} @classmethod - def deserialize(cls, data: JsonDict) -> "UnpackType": + def deserialize(cls, data: JsonDict) -> UnpackType: assert data[".class"] == "UnpackType" typ = data["type"] return UnpackType(deserialize_type(typ)) @@ -944,7 +946,7 @@ class AnyType(ProperType): def __init__( self, type_of_any: int, - source_any: Optional["AnyType"] = None, + source_any: Optional[AnyType] = None, missing_import_name: Optional[str] = None, line: int = -1, column: int = -1, @@ -976,15 +978,15 @@ def __init__( def is_from_error(self) -> bool: return self.type_of_any == TypeOfAny.from_error - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_any(self) def copy_modified( self, # Mark with Bogus because _dummy is just an object (with type Any) type_of_any: Bogus[int] = _dummy, - original_any: Bogus[Optional["AnyType"]] = _dummy, - ) -> "AnyType": + original_any: Bogus[Optional[AnyType]] = _dummy, + ) -> AnyType: if type_of_any is _dummy: type_of_any = self.type_of_any if original_any is _dummy: @@ -1012,7 +1014,7 @@ def serialize(self) -> JsonDict: } @classmethod - def deserialize(cls, data: JsonDict) -> "AnyType": + def deserialize(cls, data: JsonDict) -> AnyType: assert data[".class"] == "AnyType" source = data["source_any"] return AnyType( @@ -1055,7 +1057,7 @@ def can_be_true_default(self) -> bool: def can_be_false_default(self) -> bool: return False - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_uninhabited_type(self) def __hash__(self) -> int: @@ -1068,7 +1070,7 @@ def serialize(self) -> JsonDict: return {".class": "UninhabitedType", "is_noreturn": self.is_noreturn} @classmethod - def deserialize(cls, data: JsonDict) -> "UninhabitedType": + def deserialize(cls, data: JsonDict) -> UninhabitedType: assert data[".class"] == "UninhabitedType" return UninhabitedType(is_noreturn=data["is_noreturn"]) @@ -1093,14 +1095,14 @@ def __hash__(self) -> int: def __eq__(self, other: object) -> bool: return isinstance(other, NoneType) - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_none_type(self) def serialize(self) -> JsonDict: return {".class": "NoneType"} @classmethod - def deserialize(cls, data: JsonDict) -> "NoneType": + def deserialize(cls, data: JsonDict) -> NoneType: assert data[".class"] == "NoneType" return NoneType() @@ -1122,7 +1124,7 @@ class ErasedType(ProperType): __slots__ = () - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_erased_type(self) @@ -1140,14 +1142,14 @@ def __init__(self, source: Optional[str] = None, line: int = -1, column: int = - super().__init__(line, column) self.source = source - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_deleted_type(self) def serialize(self) -> JsonDict: return {".class": "DeletedType", "source": self.source} @classmethod - def deserialize(cls, data: JsonDict) -> "DeletedType": + def deserialize(cls, data: JsonDict) -> DeletedType: assert data[".class"] == "DeletedType" return DeletedType(data["source"]) @@ -1196,7 +1198,7 @@ def __init__( line: int = -1, column: int = -1, *, - last_known_value: Optional["LiteralType"] = None, + last_known_value: Optional[LiteralType] = None, ) -> None: super().__init__(line, column) self.type = typ @@ -1254,7 +1256,7 @@ def __init__( # Cached hash value self._hash = -1 - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_instance(self) def __hash__(self) -> int: @@ -1284,7 +1286,7 @@ def serialize(self) -> Union[JsonDict, str]: return data @classmethod - def deserialize(cls, data: Union[JsonDict, str]) -> "Instance": + def deserialize(cls, data: Union[JsonDict, str]) -> Instance: if isinstance(data, str): inst = Instance(NOT_READY, []) inst.type_ref = data @@ -1305,8 +1307,8 @@ def copy_modified( self, *, args: Bogus[List[Type]] = _dummy, - last_known_value: Bogus[Optional["LiteralType"]] = _dummy, - ) -> "Instance": + last_known_value: Bogus[Optional[LiteralType]] = _dummy, + ) -> Instance: new = Instance( self.type, args if args is not _dummy else self.args, @@ -1360,11 +1362,11 @@ def type_object(self) -> mypy.nodes.TypeInfo: @property @abstractmethod - def items(self) -> List["CallableType"]: + def items(self) -> List[CallableType]: pass @abstractmethod - def with_name(self, name: str) -> "FunctionLike": + def with_name(self, name: str) -> FunctionLike: pass @abstractmethod @@ -1424,7 +1426,7 @@ def copy_modified( *, variables: Bogus[Sequence[TypeVarLikeType]] = _dummy, is_ellipsis_args: Bogus[bool] = _dummy, - ) -> "Parameters": + ) -> Parameters: return Parameters( arg_types=arg_types if arg_types is not _dummy else self.arg_types, arg_kinds=arg_kinds if arg_kinds is not _dummy else self.arg_kinds, @@ -1521,7 +1523,7 @@ def try_synthesizing_arg_from_vararg( else: return None - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_parameters(self) def serialize(self) -> JsonDict: @@ -1534,7 +1536,7 @@ def serialize(self) -> JsonDict: } @classmethod - def deserialize(cls, data: JsonDict) -> "Parameters": + def deserialize(cls, data: JsonDict) -> Parameters: assert data[".class"] == "Parameters" return Parameters( [deserialize_type(t) for t in data["arg_types"]], @@ -1678,7 +1680,7 @@ def copy_modified( def_extras: Bogus[Dict[str, Any]] = _dummy, type_guard: Bogus[Optional[Type]] = _dummy, from_concatenate: Bogus[bool] = _dummy, - ) -> "CallableType": + ) -> CallableType: return CallableType( arg_types=arg_types if arg_types is not _dummy else self.arg_types, arg_kinds=arg_kinds if arg_kinds is not _dummy else self.arg_kinds, @@ -1741,10 +1743,10 @@ def type_object(self) -> mypy.nodes.TypeInfo: assert isinstance(ret, Instance) return ret.type - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_callable_type(self) - def with_name(self, name: str) -> "CallableType": + def with_name(self, name: str) -> CallableType: """Return a copy of this type with the specified name.""" return self.copy_modified(ret_type=self.ret_type, name=name) @@ -1831,7 +1833,7 @@ def try_synthesizing_arg_from_vararg( return None @property - def items(self) -> List["CallableType"]: + def items(self) -> List[CallableType]: return [self] def is_generic(self) -> bool: @@ -1872,8 +1874,8 @@ def param_spec(self) -> Optional[ParamSpecType]: ) def expand_param_spec( - self, c: Union["CallableType", Parameters], no_prefix: bool = False - ) -> "CallableType": + self, c: Union[CallableType, Parameters], no_prefix: bool = False + ) -> CallableType: variables = c.variables if no_prefix: @@ -1947,7 +1949,7 @@ def serialize(self) -> JsonDict: } @classmethod - def deserialize(cls, data: JsonDict) -> "CallableType": + def deserialize(cls, data: JsonDict) -> CallableType: assert data[".class"] == "CallableType" # TODO: Set definition to the containing SymbolNode? return CallableType( @@ -2004,7 +2006,7 @@ def type_object(self) -> mypy.nodes.TypeInfo: # query only (any) one of them. return self._items[0].type_object() - def with_name(self, name: str) -> "Overloaded": + def with_name(self, name: str) -> Overloaded: ni: List[CallableType] = [] for it in self._items: ni.append(it.with_name(name)) @@ -2013,7 +2015,7 @@ def with_name(self, name: str) -> "Overloaded": def get_name(self) -> Optional[str]: return self._items[0].name - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_overloaded(self) def __hash__(self) -> int: @@ -2028,7 +2030,7 @@ def serialize(self) -> JsonDict: return {".class": "Overloaded", "items": [t.serialize() for t in self.items]} @classmethod - def deserialize(cls, data: JsonDict) -> "Overloaded": + def deserialize(cls, data: JsonDict) -> Overloaded: assert data[".class"] == "Overloaded" return Overloaded([CallableType.deserialize(t) for t in data["items"]]) @@ -2089,7 +2091,7 @@ def can_be_any_bool(self) -> bool: def length(self) -> int: return len(self.items) - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_tuple_type(self) def __hash__(self) -> int: @@ -2109,7 +2111,7 @@ def serialize(self) -> JsonDict: } @classmethod - def deserialize(cls, data: JsonDict) -> "TupleType": + def deserialize(cls, data: JsonDict) -> TupleType: assert data[".class"] == "TupleType" return TupleType( [deserialize_type(t) for t in data["items"]], @@ -2119,16 +2121,14 @@ def deserialize(cls, data: JsonDict) -> "TupleType": def copy_modified( self, *, fallback: Optional[Instance] = None, items: Optional[List[Type]] = None - ) -> "TupleType": + ) -> TupleType: if fallback is None: fallback = self.partial_fallback if items is None: items = self.items return TupleType(items, fallback, self.line, self.column) - def slice( - self, begin: Optional[int], end: Optional[int], stride: Optional[int] - ) -> "TupleType": + def slice(self, begin: Optional[int], end: Optional[int], stride: Optional[int]) -> TupleType: return TupleType( self.items[begin:end:stride], self.partial_fallback, @@ -2179,7 +2179,7 @@ def __init__( self.can_be_true = len(self.items) > 0 self.can_be_false = len(self.required_keys) == 0 - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_typeddict_type(self) def __hash__(self) -> int: @@ -2205,7 +2205,7 @@ def serialize(self) -> JsonDict: } @classmethod - def deserialize(cls, data: JsonDict) -> "TypedDictType": + def deserialize(cls, data: JsonDict) -> TypedDictType: assert data[".class"] == "TypedDictType" return TypedDictType( {n: deserialize_type(t) for (n, t) in data["items"]}, @@ -2216,7 +2216,7 @@ def deserialize(cls, data: JsonDict) -> "TypedDictType": def is_anonymous(self) -> bool: return self.fallback.type.fullname in TPDICT_FB_NAMES - def as_anonymous(self) -> "TypedDictType": + def as_anonymous(self) -> TypedDictType: if self.is_anonymous(): return self assert self.fallback.type.typeddict_type is not None @@ -2228,7 +2228,7 @@ def copy_modified( fallback: Optional[Instance] = None, item_types: Optional[List[Type]] = None, required_keys: Optional[Set[str]] = None, - ) -> "TypedDictType": + ) -> TypedDictType: if fallback is None: fallback = self.fallback if item_types is None: @@ -2243,19 +2243,17 @@ def create_anonymous_fallback(self) -> Instance: anonymous = self.as_anonymous() return anonymous.fallback - def names_are_wider_than(self, other: "TypedDictType") -> bool: + def names_are_wider_than(self, other: TypedDictType) -> bool: return len(other.items.keys() - self.items.keys()) == 0 - def zip(self, right: "TypedDictType") -> Iterable[Tuple[str, Type, Type]]: + def zip(self, right: TypedDictType) -> Iterable[Tuple[str, Type, Type]]: left = self for (item_name, left_item_type) in left.items.items(): right_item_type = right.items.get(item_name) if right_item_type is not None: yield (item_name, left_item_type, right_item_type) - def zipall( - self, right: "TypedDictType" - ) -> Iterable[Tuple[str, Optional[Type], Optional[Type]]]: + def zipall(self, right: TypedDictType) -> Iterable[Tuple[str, Optional[Type], Optional[Type]]]: left = self for (item_name, left_item_type) in left.items.items(): right_item_type = right.items.get(item_name) @@ -2328,7 +2326,7 @@ def __init__( def simple_name(self) -> str: return self.base_type_name.replace("builtins.", "") - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) return visitor.visit_raw_expression_type(self) @@ -2380,7 +2378,7 @@ def can_be_false_default(self) -> bool: def can_be_true_default(self) -> bool: return bool(self.value) - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_literal_type(self) def __hash__(self) -> int: @@ -2429,7 +2427,7 @@ def serialize(self) -> Union[JsonDict, str]: } @classmethod - def deserialize(cls, data: JsonDict) -> "LiteralType": + def deserialize(cls, data: JsonDict) -> LiteralType: assert data[".class"] == "LiteralType" return LiteralType(value=data["value"], fallback=Instance.deserialize(data["fallback"])) @@ -2451,7 +2449,7 @@ def __init__(self, type: Type, line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.type = type - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) return visitor.visit_star_type(self) @@ -2513,7 +2511,7 @@ def make_union(items: Sequence[Type], line: int = -1, column: int = -1) -> Type: def length(self) -> int: return len(self.items) - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_union_type(self) def has_readable_member(self, name: str) -> bool: @@ -2539,7 +2537,7 @@ def serialize(self) -> JsonDict: return {".class": "UnionType", "items": [t.serialize() for t in self.items]} @classmethod - def deserialize(cls, data: JsonDict) -> "UnionType": + def deserialize(cls, data: JsonDict) -> UnionType: assert data[".class"] == "UnionType" return UnionType([deserialize_type(t) for t in data["items"]]) @@ -2570,16 +2568,16 @@ class PartialType(ProperType): def __init__( self, - type: "Optional[mypy.nodes.TypeInfo]", - var: "mypy.nodes.Var", - value_type: "Optional[Instance]" = None, + type: Optional[mypy.nodes.TypeInfo], + var: mypy.nodes.Var, + value_type: Optional[Instance] = None, ) -> None: super().__init__() self.type = type self.var = var self.value_type = value_type - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_partial_type(self) @@ -2593,7 +2591,7 @@ class EllipsisType(ProperType): __slots__ = () - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) return visitor.visit_ellipsis_type(self) @@ -2659,7 +2657,7 @@ def make_normalized(item: Type, *, line: int = -1, column: int = -1) -> ProperTy ) return TypeType(item, line=line, column=column) # type: ignore[arg-type] - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_type_type(self) def __hash__(self) -> int: @@ -2702,7 +2700,7 @@ def __init__(self, fullname: Optional[str], args: List[Type], line: int) -> None self.fullname = fullname # Must be a valid full name of an actual node (or None). self.args = args - def accept(self, visitor: "TypeVisitor[T]") -> T: + def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) return visitor.visit_placeholder_type(self) diff --git a/mypy/typestate.py b/mypy/typestate.py index ea69671edba9..d298f7f659ea 100644 --- a/mypy/typestate.py +++ b/mypy/typestate.py @@ -3,6 +3,8 @@ and potentially other mutable TypeInfo state. This module contains mutable global state. """ +from __future__ import annotations + from typing import ClassVar, Dict, List, Optional, Set, Tuple from typing_extensions import Final, TypeAlias as _TypeAlias diff --git a/mypy/typetraverser.py b/mypy/typetraverser.py index b2591afbc5d3..afe77efff78d 100644 --- a/mypy/typetraverser.py +++ b/mypy/typetraverser.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Iterable from mypy_extensions import trait diff --git a/mypy/typevars.py b/mypy/typevars.py index aefdf339587c..2323d7e6aacc 100644 --- a/mypy/typevars.py +++ b/mypy/typevars.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import List, Union from mypy.erasetype import erase_typevars diff --git a/mypy/typevartuples.py b/mypy/typevartuples.py index a4b71da3f5f9..689a7fdb647d 100644 --- a/mypy/typevartuples.py +++ b/mypy/typevartuples.py @@ -1,5 +1,7 @@ """Helpers for interacting with type var tuples.""" +from __future__ import annotations + from typing import Optional, Sequence, Tuple, TypeVar from mypy.types import Instance, ProperType, Type, UnpackType, get_proper_type diff --git a/mypy/util.py b/mypy/util.py index 9277fb99ebeb..990acd2edfab 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -1,5 +1,7 @@ """Utility functions with no non-trivial dependencies.""" +from __future__ import annotations + import hashlib import io import os @@ -346,7 +348,7 @@ def correct_relative_import( fields_cache: Final[Dict[Type[object], List[str]]] = {} -def get_class_descriptors(cls: "Type[object]") -> Sequence[str]: +def get_class_descriptors(cls: Type[object]) -> Sequence[str]: import inspect # Lazy import for minor startup speed win # Maintain a cache of type -> attributes defined by descriptors in the class diff --git a/mypy/version.py b/mypy/version.py index 71536d51b83b..e0dc42b478f8 100644 --- a/mypy/version.py +++ b/mypy/version.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os from mypy import git diff --git a/mypy/visitor.py b/mypy/visitor.py index d5398cec9bf6..62e7b4f90c8e 100644 --- a/mypy/visitor.py +++ b/mypy/visitor.py @@ -1,5 +1,7 @@ """Generic abstract syntax tree node visitor""" +from __future__ import annotations + from abc import abstractmethod from typing import TYPE_CHECKING, Generic, TypeVar @@ -18,179 +20,179 @@ @mypyc_attr(allow_interpreted_subclasses=True) class ExpressionVisitor(Generic[T]): @abstractmethod - def visit_int_expr(self, o: "mypy.nodes.IntExpr") -> T: + def visit_int_expr(self, o: mypy.nodes.IntExpr) -> T: pass @abstractmethod - def visit_str_expr(self, o: "mypy.nodes.StrExpr") -> T: + def visit_str_expr(self, o: mypy.nodes.StrExpr) -> T: pass @abstractmethod - def visit_bytes_expr(self, o: "mypy.nodes.BytesExpr") -> T: + def visit_bytes_expr(self, o: mypy.nodes.BytesExpr) -> T: pass @abstractmethod - def visit_float_expr(self, o: "mypy.nodes.FloatExpr") -> T: + def visit_float_expr(self, o: mypy.nodes.FloatExpr) -> T: pass @abstractmethod - def visit_complex_expr(self, o: "mypy.nodes.ComplexExpr") -> T: + def visit_complex_expr(self, o: mypy.nodes.ComplexExpr) -> T: pass @abstractmethod - def visit_ellipsis(self, o: "mypy.nodes.EllipsisExpr") -> T: + def visit_ellipsis(self, o: mypy.nodes.EllipsisExpr) -> T: pass @abstractmethod - def visit_star_expr(self, o: "mypy.nodes.StarExpr") -> T: + def visit_star_expr(self, o: mypy.nodes.StarExpr) -> T: pass @abstractmethod - def visit_name_expr(self, o: "mypy.nodes.NameExpr") -> T: + def visit_name_expr(self, o: mypy.nodes.NameExpr) -> T: pass @abstractmethod - def visit_member_expr(self, o: "mypy.nodes.MemberExpr") -> T: + def visit_member_expr(self, o: mypy.nodes.MemberExpr) -> T: pass @abstractmethod - def visit_yield_from_expr(self, o: "mypy.nodes.YieldFromExpr") -> T: + def visit_yield_from_expr(self, o: mypy.nodes.YieldFromExpr) -> T: pass @abstractmethod - def visit_yield_expr(self, o: "mypy.nodes.YieldExpr") -> T: + def visit_yield_expr(self, o: mypy.nodes.YieldExpr) -> T: pass @abstractmethod - def visit_call_expr(self, o: "mypy.nodes.CallExpr") -> T: + def visit_call_expr(self, o: mypy.nodes.CallExpr) -> T: pass @abstractmethod - def visit_op_expr(self, o: "mypy.nodes.OpExpr") -> T: + def visit_op_expr(self, o: mypy.nodes.OpExpr) -> T: pass @abstractmethod - def visit_comparison_expr(self, o: "mypy.nodes.ComparisonExpr") -> T: + def visit_comparison_expr(self, o: mypy.nodes.ComparisonExpr) -> T: pass @abstractmethod - def visit_cast_expr(self, o: "mypy.nodes.CastExpr") -> T: + def visit_cast_expr(self, o: mypy.nodes.CastExpr) -> T: pass @abstractmethod - def visit_assert_type_expr(self, o: "mypy.nodes.AssertTypeExpr") -> T: + def visit_assert_type_expr(self, o: mypy.nodes.AssertTypeExpr) -> T: pass @abstractmethod - def visit_reveal_expr(self, o: "mypy.nodes.RevealExpr") -> T: + def visit_reveal_expr(self, o: mypy.nodes.RevealExpr) -> T: pass @abstractmethod - def visit_super_expr(self, o: "mypy.nodes.SuperExpr") -> T: + def visit_super_expr(self, o: mypy.nodes.SuperExpr) -> T: pass @abstractmethod - def visit_unary_expr(self, o: "mypy.nodes.UnaryExpr") -> T: + def visit_unary_expr(self, o: mypy.nodes.UnaryExpr) -> T: pass @abstractmethod - def visit_assignment_expr(self, o: "mypy.nodes.AssignmentExpr") -> T: + def visit_assignment_expr(self, o: mypy.nodes.AssignmentExpr) -> T: pass @abstractmethod - def visit_list_expr(self, o: "mypy.nodes.ListExpr") -> T: + def visit_list_expr(self, o: mypy.nodes.ListExpr) -> T: pass @abstractmethod - def visit_dict_expr(self, o: "mypy.nodes.DictExpr") -> T: + def visit_dict_expr(self, o: mypy.nodes.DictExpr) -> T: pass @abstractmethod - def visit_tuple_expr(self, o: "mypy.nodes.TupleExpr") -> T: + def visit_tuple_expr(self, o: mypy.nodes.TupleExpr) -> T: pass @abstractmethod - def visit_set_expr(self, o: "mypy.nodes.SetExpr") -> T: + def visit_set_expr(self, o: mypy.nodes.SetExpr) -> T: pass @abstractmethod - def visit_index_expr(self, o: "mypy.nodes.IndexExpr") -> T: + def visit_index_expr(self, o: mypy.nodes.IndexExpr) -> T: pass @abstractmethod - def visit_type_application(self, o: "mypy.nodes.TypeApplication") -> T: + def visit_type_application(self, o: mypy.nodes.TypeApplication) -> T: pass @abstractmethod - def visit_lambda_expr(self, o: "mypy.nodes.LambdaExpr") -> T: + def visit_lambda_expr(self, o: mypy.nodes.LambdaExpr) -> T: pass @abstractmethod - def visit_list_comprehension(self, o: "mypy.nodes.ListComprehension") -> T: + def visit_list_comprehension(self, o: mypy.nodes.ListComprehension) -> T: pass @abstractmethod - def visit_set_comprehension(self, o: "mypy.nodes.SetComprehension") -> T: + def visit_set_comprehension(self, o: mypy.nodes.SetComprehension) -> T: pass @abstractmethod - def visit_dictionary_comprehension(self, o: "mypy.nodes.DictionaryComprehension") -> T: + def visit_dictionary_comprehension(self, o: mypy.nodes.DictionaryComprehension) -> T: pass @abstractmethod - def visit_generator_expr(self, o: "mypy.nodes.GeneratorExpr") -> T: + def visit_generator_expr(self, o: mypy.nodes.GeneratorExpr) -> T: pass @abstractmethod - def visit_slice_expr(self, o: "mypy.nodes.SliceExpr") -> T: + def visit_slice_expr(self, o: mypy.nodes.SliceExpr) -> T: pass @abstractmethod - def visit_conditional_expr(self, o: "mypy.nodes.ConditionalExpr") -> T: + def visit_conditional_expr(self, o: mypy.nodes.ConditionalExpr) -> T: pass @abstractmethod - def visit_type_var_expr(self, o: "mypy.nodes.TypeVarExpr") -> T: + def visit_type_var_expr(self, o: mypy.nodes.TypeVarExpr) -> T: pass @abstractmethod - def visit_paramspec_expr(self, o: "mypy.nodes.ParamSpecExpr") -> T: + def visit_paramspec_expr(self, o: mypy.nodes.ParamSpecExpr) -> T: pass @abstractmethod - def visit_type_var_tuple_expr(self, o: "mypy.nodes.TypeVarTupleExpr") -> T: + def visit_type_var_tuple_expr(self, o: mypy.nodes.TypeVarTupleExpr) -> T: pass @abstractmethod - def visit_type_alias_expr(self, o: "mypy.nodes.TypeAliasExpr") -> T: + def visit_type_alias_expr(self, o: mypy.nodes.TypeAliasExpr) -> T: pass @abstractmethod - def visit_namedtuple_expr(self, o: "mypy.nodes.NamedTupleExpr") -> T: + def visit_namedtuple_expr(self, o: mypy.nodes.NamedTupleExpr) -> T: pass @abstractmethod - def visit_enum_call_expr(self, o: "mypy.nodes.EnumCallExpr") -> T: + def visit_enum_call_expr(self, o: mypy.nodes.EnumCallExpr) -> T: pass @abstractmethod - def visit_typeddict_expr(self, o: "mypy.nodes.TypedDictExpr") -> T: + def visit_typeddict_expr(self, o: mypy.nodes.TypedDictExpr) -> T: pass @abstractmethod - def visit_newtype_expr(self, o: "mypy.nodes.NewTypeExpr") -> T: + def visit_newtype_expr(self, o: mypy.nodes.NewTypeExpr) -> T: pass @abstractmethod - def visit__promote_expr(self, o: "mypy.nodes.PromoteExpr") -> T: + def visit__promote_expr(self, o: mypy.nodes.PromoteExpr) -> T: pass @abstractmethod - def visit_await_expr(self, o: "mypy.nodes.AwaitExpr") -> T: + def visit_await_expr(self, o: mypy.nodes.AwaitExpr) -> T: pass @abstractmethod - def visit_temp_node(self, o: "mypy.nodes.TempNode") -> T: + def visit_temp_node(self, o: mypy.nodes.TempNode) -> T: pass @@ -200,111 +202,111 @@ class StatementVisitor(Generic[T]): # Definitions @abstractmethod - def visit_assignment_stmt(self, o: "mypy.nodes.AssignmentStmt") -> T: + def visit_assignment_stmt(self, o: mypy.nodes.AssignmentStmt) -> T: pass @abstractmethod - def visit_for_stmt(self, o: "mypy.nodes.ForStmt") -> T: + def visit_for_stmt(self, o: mypy.nodes.ForStmt) -> T: pass @abstractmethod - def visit_with_stmt(self, o: "mypy.nodes.WithStmt") -> T: + def visit_with_stmt(self, o: mypy.nodes.WithStmt) -> T: pass @abstractmethod - def visit_del_stmt(self, o: "mypy.nodes.DelStmt") -> T: + def visit_del_stmt(self, o: mypy.nodes.DelStmt) -> T: pass @abstractmethod - def visit_func_def(self, o: "mypy.nodes.FuncDef") -> T: + def visit_func_def(self, o: mypy.nodes.FuncDef) -> T: pass @abstractmethod - def visit_overloaded_func_def(self, o: "mypy.nodes.OverloadedFuncDef") -> T: + def visit_overloaded_func_def(self, o: mypy.nodes.OverloadedFuncDef) -> T: pass @abstractmethod - def visit_class_def(self, o: "mypy.nodes.ClassDef") -> T: + def visit_class_def(self, o: mypy.nodes.ClassDef) -> T: pass @abstractmethod - def visit_global_decl(self, o: "mypy.nodes.GlobalDecl") -> T: + def visit_global_decl(self, o: mypy.nodes.GlobalDecl) -> T: pass @abstractmethod - def visit_nonlocal_decl(self, o: "mypy.nodes.NonlocalDecl") -> T: + def visit_nonlocal_decl(self, o: mypy.nodes.NonlocalDecl) -> T: pass @abstractmethod - def visit_decorator(self, o: "mypy.nodes.Decorator") -> T: + def visit_decorator(self, o: mypy.nodes.Decorator) -> T: pass # Module structure @abstractmethod - def visit_import(self, o: "mypy.nodes.Import") -> T: + def visit_import(self, o: mypy.nodes.Import) -> T: pass @abstractmethod - def visit_import_from(self, o: "mypy.nodes.ImportFrom") -> T: + def visit_import_from(self, o: mypy.nodes.ImportFrom) -> T: pass @abstractmethod - def visit_import_all(self, o: "mypy.nodes.ImportAll") -> T: + def visit_import_all(self, o: mypy.nodes.ImportAll) -> T: pass # Statements @abstractmethod - def visit_block(self, o: "mypy.nodes.Block") -> T: + def visit_block(self, o: mypy.nodes.Block) -> T: pass @abstractmethod - def visit_expression_stmt(self, o: "mypy.nodes.ExpressionStmt") -> T: + def visit_expression_stmt(self, o: mypy.nodes.ExpressionStmt) -> T: pass @abstractmethod - def visit_operator_assignment_stmt(self, o: "mypy.nodes.OperatorAssignmentStmt") -> T: + def visit_operator_assignment_stmt(self, o: mypy.nodes.OperatorAssignmentStmt) -> T: pass @abstractmethod - def visit_while_stmt(self, o: "mypy.nodes.WhileStmt") -> T: + def visit_while_stmt(self, o: mypy.nodes.WhileStmt) -> T: pass @abstractmethod - def visit_return_stmt(self, o: "mypy.nodes.ReturnStmt") -> T: + def visit_return_stmt(self, o: mypy.nodes.ReturnStmt) -> T: pass @abstractmethod - def visit_assert_stmt(self, o: "mypy.nodes.AssertStmt") -> T: + def visit_assert_stmt(self, o: mypy.nodes.AssertStmt) -> T: pass @abstractmethod - def visit_if_stmt(self, o: "mypy.nodes.IfStmt") -> T: + def visit_if_stmt(self, o: mypy.nodes.IfStmt) -> T: pass @abstractmethod - def visit_break_stmt(self, o: "mypy.nodes.BreakStmt") -> T: + def visit_break_stmt(self, o: mypy.nodes.BreakStmt) -> T: pass @abstractmethod - def visit_continue_stmt(self, o: "mypy.nodes.ContinueStmt") -> T: + def visit_continue_stmt(self, o: mypy.nodes.ContinueStmt) -> T: pass @abstractmethod - def visit_pass_stmt(self, o: "mypy.nodes.PassStmt") -> T: + def visit_pass_stmt(self, o: mypy.nodes.PassStmt) -> T: pass @abstractmethod - def visit_raise_stmt(self, o: "mypy.nodes.RaiseStmt") -> T: + def visit_raise_stmt(self, o: mypy.nodes.RaiseStmt) -> T: pass @abstractmethod - def visit_try_stmt(self, o: "mypy.nodes.TryStmt") -> T: + def visit_try_stmt(self, o: mypy.nodes.TryStmt) -> T: pass @abstractmethod - def visit_match_stmt(self, o: "mypy.nodes.MatchStmt") -> T: + def visit_match_stmt(self, o: mypy.nodes.MatchStmt) -> T: pass @@ -312,35 +314,35 @@ def visit_match_stmt(self, o: "mypy.nodes.MatchStmt") -> T: @mypyc_attr(allow_interpreted_subclasses=True) class PatternVisitor(Generic[T]): @abstractmethod - def visit_as_pattern(self, o: "mypy.patterns.AsPattern") -> T: + def visit_as_pattern(self, o: mypy.patterns.AsPattern) -> T: pass @abstractmethod - def visit_or_pattern(self, o: "mypy.patterns.OrPattern") -> T: + def visit_or_pattern(self, o: mypy.patterns.OrPattern) -> T: pass @abstractmethod - def visit_value_pattern(self, o: "mypy.patterns.ValuePattern") -> T: + def visit_value_pattern(self, o: mypy.patterns.ValuePattern) -> T: pass @abstractmethod - def visit_singleton_pattern(self, o: "mypy.patterns.SingletonPattern") -> T: + def visit_singleton_pattern(self, o: mypy.patterns.SingletonPattern) -> T: pass @abstractmethod - def visit_sequence_pattern(self, o: "mypy.patterns.SequencePattern") -> T: + def visit_sequence_pattern(self, o: mypy.patterns.SequencePattern) -> T: pass @abstractmethod - def visit_starred_pattern(self, o: "mypy.patterns.StarredPattern") -> T: + def visit_starred_pattern(self, o: mypy.patterns.StarredPattern) -> T: pass @abstractmethod - def visit_mapping_pattern(self, o: "mypy.patterns.MappingPattern") -> T: + def visit_mapping_pattern(self, o: mypy.patterns.MappingPattern) -> T: pass @abstractmethod - def visit_class_pattern(self, o: "mypy.patterns.ClassPattern") -> T: + def visit_class_pattern(self, o: mypy.patterns.ClassPattern) -> T: pass @@ -358,261 +360,261 @@ class NodeVisitor(Generic[T], ExpressionVisitor[T], StatementVisitor[T], Pattern # Not in superclasses: - def visit_mypy_file(self, o: "mypy.nodes.MypyFile") -> T: + def visit_mypy_file(self, o: mypy.nodes.MypyFile) -> T: pass # TODO: We have a visit_var method, but no visit_typeinfo or any # other non-Statement SymbolNode (accepting those will raise a # runtime error). Maybe this should be resolved in some direction. - def visit_var(self, o: "mypy.nodes.Var") -> T: + def visit_var(self, o: mypy.nodes.Var) -> T: pass # Module structure - def visit_import(self, o: "mypy.nodes.Import") -> T: + def visit_import(self, o: mypy.nodes.Import) -> T: pass - def visit_import_from(self, o: "mypy.nodes.ImportFrom") -> T: + def visit_import_from(self, o: mypy.nodes.ImportFrom) -> T: pass - def visit_import_all(self, o: "mypy.nodes.ImportAll") -> T: + def visit_import_all(self, o: mypy.nodes.ImportAll) -> T: pass # Definitions - def visit_func_def(self, o: "mypy.nodes.FuncDef") -> T: + def visit_func_def(self, o: mypy.nodes.FuncDef) -> T: pass - def visit_overloaded_func_def(self, o: "mypy.nodes.OverloadedFuncDef") -> T: + def visit_overloaded_func_def(self, o: mypy.nodes.OverloadedFuncDef) -> T: pass - def visit_class_def(self, o: "mypy.nodes.ClassDef") -> T: + def visit_class_def(self, o: mypy.nodes.ClassDef) -> T: pass - def visit_global_decl(self, o: "mypy.nodes.GlobalDecl") -> T: + def visit_global_decl(self, o: mypy.nodes.GlobalDecl) -> T: pass - def visit_nonlocal_decl(self, o: "mypy.nodes.NonlocalDecl") -> T: + def visit_nonlocal_decl(self, o: mypy.nodes.NonlocalDecl) -> T: pass - def visit_decorator(self, o: "mypy.nodes.Decorator") -> T: + def visit_decorator(self, o: mypy.nodes.Decorator) -> T: pass - def visit_type_alias(self, o: "mypy.nodes.TypeAlias") -> T: + def visit_type_alias(self, o: mypy.nodes.TypeAlias) -> T: pass - def visit_placeholder_node(self, o: "mypy.nodes.PlaceholderNode") -> T: + def visit_placeholder_node(self, o: mypy.nodes.PlaceholderNode) -> T: pass # Statements - def visit_block(self, o: "mypy.nodes.Block") -> T: + def visit_block(self, o: mypy.nodes.Block) -> T: pass - def visit_expression_stmt(self, o: "mypy.nodes.ExpressionStmt") -> T: + def visit_expression_stmt(self, o: mypy.nodes.ExpressionStmt) -> T: pass - def visit_assignment_stmt(self, o: "mypy.nodes.AssignmentStmt") -> T: + def visit_assignment_stmt(self, o: mypy.nodes.AssignmentStmt) -> T: pass - def visit_operator_assignment_stmt(self, o: "mypy.nodes.OperatorAssignmentStmt") -> T: + def visit_operator_assignment_stmt(self, o: mypy.nodes.OperatorAssignmentStmt) -> T: pass - def visit_while_stmt(self, o: "mypy.nodes.WhileStmt") -> T: + def visit_while_stmt(self, o: mypy.nodes.WhileStmt) -> T: pass - def visit_for_stmt(self, o: "mypy.nodes.ForStmt") -> T: + def visit_for_stmt(self, o: mypy.nodes.ForStmt) -> T: pass - def visit_return_stmt(self, o: "mypy.nodes.ReturnStmt") -> T: + def visit_return_stmt(self, o: mypy.nodes.ReturnStmt) -> T: pass - def visit_assert_stmt(self, o: "mypy.nodes.AssertStmt") -> T: + def visit_assert_stmt(self, o: mypy.nodes.AssertStmt) -> T: pass - def visit_del_stmt(self, o: "mypy.nodes.DelStmt") -> T: + def visit_del_stmt(self, o: mypy.nodes.DelStmt) -> T: pass - def visit_if_stmt(self, o: "mypy.nodes.IfStmt") -> T: + def visit_if_stmt(self, o: mypy.nodes.IfStmt) -> T: pass - def visit_break_stmt(self, o: "mypy.nodes.BreakStmt") -> T: + def visit_break_stmt(self, o: mypy.nodes.BreakStmt) -> T: pass - def visit_continue_stmt(self, o: "mypy.nodes.ContinueStmt") -> T: + def visit_continue_stmt(self, o: mypy.nodes.ContinueStmt) -> T: pass - def visit_pass_stmt(self, o: "mypy.nodes.PassStmt") -> T: + def visit_pass_stmt(self, o: mypy.nodes.PassStmt) -> T: pass - def visit_raise_stmt(self, o: "mypy.nodes.RaiseStmt") -> T: + def visit_raise_stmt(self, o: mypy.nodes.RaiseStmt) -> T: pass - def visit_try_stmt(self, o: "mypy.nodes.TryStmt") -> T: + def visit_try_stmt(self, o: mypy.nodes.TryStmt) -> T: pass - def visit_with_stmt(self, o: "mypy.nodes.WithStmt") -> T: + def visit_with_stmt(self, o: mypy.nodes.WithStmt) -> T: pass - def visit_match_stmt(self, o: "mypy.nodes.MatchStmt") -> T: + def visit_match_stmt(self, o: mypy.nodes.MatchStmt) -> T: pass # Expressions (default no-op implementation) - def visit_int_expr(self, o: "mypy.nodes.IntExpr") -> T: + def visit_int_expr(self, o: mypy.nodes.IntExpr) -> T: pass - def visit_str_expr(self, o: "mypy.nodes.StrExpr") -> T: + def visit_str_expr(self, o: mypy.nodes.StrExpr) -> T: pass - def visit_bytes_expr(self, o: "mypy.nodes.BytesExpr") -> T: + def visit_bytes_expr(self, o: mypy.nodes.BytesExpr) -> T: pass - def visit_float_expr(self, o: "mypy.nodes.FloatExpr") -> T: + def visit_float_expr(self, o: mypy.nodes.FloatExpr) -> T: pass - def visit_complex_expr(self, o: "mypy.nodes.ComplexExpr") -> T: + def visit_complex_expr(self, o: mypy.nodes.ComplexExpr) -> T: pass - def visit_ellipsis(self, o: "mypy.nodes.EllipsisExpr") -> T: + def visit_ellipsis(self, o: mypy.nodes.EllipsisExpr) -> T: pass - def visit_star_expr(self, o: "mypy.nodes.StarExpr") -> T: + def visit_star_expr(self, o: mypy.nodes.StarExpr) -> T: pass - def visit_name_expr(self, o: "mypy.nodes.NameExpr") -> T: + def visit_name_expr(self, o: mypy.nodes.NameExpr) -> T: pass - def visit_member_expr(self, o: "mypy.nodes.MemberExpr") -> T: + def visit_member_expr(self, o: mypy.nodes.MemberExpr) -> T: pass - def visit_yield_from_expr(self, o: "mypy.nodes.YieldFromExpr") -> T: + def visit_yield_from_expr(self, o: mypy.nodes.YieldFromExpr) -> T: pass - def visit_yield_expr(self, o: "mypy.nodes.YieldExpr") -> T: + def visit_yield_expr(self, o: mypy.nodes.YieldExpr) -> T: pass - def visit_call_expr(self, o: "mypy.nodes.CallExpr") -> T: + def visit_call_expr(self, o: mypy.nodes.CallExpr) -> T: pass - def visit_op_expr(self, o: "mypy.nodes.OpExpr") -> T: + def visit_op_expr(self, o: mypy.nodes.OpExpr) -> T: pass - def visit_comparison_expr(self, o: "mypy.nodes.ComparisonExpr") -> T: + def visit_comparison_expr(self, o: mypy.nodes.ComparisonExpr) -> T: pass - def visit_cast_expr(self, o: "mypy.nodes.CastExpr") -> T: + def visit_cast_expr(self, o: mypy.nodes.CastExpr) -> T: pass - def visit_assert_type_expr(self, o: "mypy.nodes.AssertTypeExpr") -> T: + def visit_assert_type_expr(self, o: mypy.nodes.AssertTypeExpr) -> T: pass - def visit_reveal_expr(self, o: "mypy.nodes.RevealExpr") -> T: + def visit_reveal_expr(self, o: mypy.nodes.RevealExpr) -> T: pass - def visit_super_expr(self, o: "mypy.nodes.SuperExpr") -> T: + def visit_super_expr(self, o: mypy.nodes.SuperExpr) -> T: pass - def visit_assignment_expr(self, o: "mypy.nodes.AssignmentExpr") -> T: + def visit_assignment_expr(self, o: mypy.nodes.AssignmentExpr) -> T: pass - def visit_unary_expr(self, o: "mypy.nodes.UnaryExpr") -> T: + def visit_unary_expr(self, o: mypy.nodes.UnaryExpr) -> T: pass - def visit_list_expr(self, o: "mypy.nodes.ListExpr") -> T: + def visit_list_expr(self, o: mypy.nodes.ListExpr) -> T: pass - def visit_dict_expr(self, o: "mypy.nodes.DictExpr") -> T: + def visit_dict_expr(self, o: mypy.nodes.DictExpr) -> T: pass - def visit_tuple_expr(self, o: "mypy.nodes.TupleExpr") -> T: + def visit_tuple_expr(self, o: mypy.nodes.TupleExpr) -> T: pass - def visit_set_expr(self, o: "mypy.nodes.SetExpr") -> T: + def visit_set_expr(self, o: mypy.nodes.SetExpr) -> T: pass - def visit_index_expr(self, o: "mypy.nodes.IndexExpr") -> T: + def visit_index_expr(self, o: mypy.nodes.IndexExpr) -> T: pass - def visit_type_application(self, o: "mypy.nodes.TypeApplication") -> T: + def visit_type_application(self, o: mypy.nodes.TypeApplication) -> T: pass - def visit_lambda_expr(self, o: "mypy.nodes.LambdaExpr") -> T: + def visit_lambda_expr(self, o: mypy.nodes.LambdaExpr) -> T: pass - def visit_list_comprehension(self, o: "mypy.nodes.ListComprehension") -> T: + def visit_list_comprehension(self, o: mypy.nodes.ListComprehension) -> T: pass - def visit_set_comprehension(self, o: "mypy.nodes.SetComprehension") -> T: + def visit_set_comprehension(self, o: mypy.nodes.SetComprehension) -> T: pass - def visit_dictionary_comprehension(self, o: "mypy.nodes.DictionaryComprehension") -> T: + def visit_dictionary_comprehension(self, o: mypy.nodes.DictionaryComprehension) -> T: pass - def visit_generator_expr(self, o: "mypy.nodes.GeneratorExpr") -> T: + def visit_generator_expr(self, o: mypy.nodes.GeneratorExpr) -> T: pass - def visit_slice_expr(self, o: "mypy.nodes.SliceExpr") -> T: + def visit_slice_expr(self, o: mypy.nodes.SliceExpr) -> T: pass - def visit_conditional_expr(self, o: "mypy.nodes.ConditionalExpr") -> T: + def visit_conditional_expr(self, o: mypy.nodes.ConditionalExpr) -> T: pass - def visit_type_var_expr(self, o: "mypy.nodes.TypeVarExpr") -> T: + def visit_type_var_expr(self, o: mypy.nodes.TypeVarExpr) -> T: pass - def visit_paramspec_expr(self, o: "mypy.nodes.ParamSpecExpr") -> T: + def visit_paramspec_expr(self, o: mypy.nodes.ParamSpecExpr) -> T: pass - def visit_type_var_tuple_expr(self, o: "mypy.nodes.TypeVarTupleExpr") -> T: + def visit_type_var_tuple_expr(self, o: mypy.nodes.TypeVarTupleExpr) -> T: pass - def visit_type_alias_expr(self, o: "mypy.nodes.TypeAliasExpr") -> T: + def visit_type_alias_expr(self, o: mypy.nodes.TypeAliasExpr) -> T: pass - def visit_namedtuple_expr(self, o: "mypy.nodes.NamedTupleExpr") -> T: + def visit_namedtuple_expr(self, o: mypy.nodes.NamedTupleExpr) -> T: pass - def visit_enum_call_expr(self, o: "mypy.nodes.EnumCallExpr") -> T: + def visit_enum_call_expr(self, o: mypy.nodes.EnumCallExpr) -> T: pass - def visit_typeddict_expr(self, o: "mypy.nodes.TypedDictExpr") -> T: + def visit_typeddict_expr(self, o: mypy.nodes.TypedDictExpr) -> T: pass - def visit_newtype_expr(self, o: "mypy.nodes.NewTypeExpr") -> T: + def visit_newtype_expr(self, o: mypy.nodes.NewTypeExpr) -> T: pass - def visit__promote_expr(self, o: "mypy.nodes.PromoteExpr") -> T: + def visit__promote_expr(self, o: mypy.nodes.PromoteExpr) -> T: pass - def visit_await_expr(self, o: "mypy.nodes.AwaitExpr") -> T: + def visit_await_expr(self, o: mypy.nodes.AwaitExpr) -> T: pass - def visit_temp_node(self, o: "mypy.nodes.TempNode") -> T: + def visit_temp_node(self, o: mypy.nodes.TempNode) -> T: pass # Patterns - def visit_as_pattern(self, o: "mypy.patterns.AsPattern") -> T: + def visit_as_pattern(self, o: mypy.patterns.AsPattern) -> T: pass - def visit_or_pattern(self, o: "mypy.patterns.OrPattern") -> T: + def visit_or_pattern(self, o: mypy.patterns.OrPattern) -> T: pass - def visit_value_pattern(self, o: "mypy.patterns.ValuePattern") -> T: + def visit_value_pattern(self, o: mypy.patterns.ValuePattern) -> T: pass - def visit_singleton_pattern(self, o: "mypy.patterns.SingletonPattern") -> T: + def visit_singleton_pattern(self, o: mypy.patterns.SingletonPattern) -> T: pass - def visit_sequence_pattern(self, o: "mypy.patterns.SequencePattern") -> T: + def visit_sequence_pattern(self, o: mypy.patterns.SequencePattern) -> T: pass - def visit_starred_pattern(self, o: "mypy.patterns.StarredPattern") -> T: + def visit_starred_pattern(self, o: mypy.patterns.StarredPattern) -> T: pass - def visit_mapping_pattern(self, o: "mypy.patterns.MappingPattern") -> T: + def visit_mapping_pattern(self, o: mypy.patterns.MappingPattern) -> T: pass - def visit_class_pattern(self, o: "mypy.patterns.ClassPattern") -> T: + def visit_class_pattern(self, o: mypy.patterns.ClassPattern) -> T: pass diff --git a/mypyc/__main__.py b/mypyc/__main__.py index a37b500fae74..a3b9d21bc65a 100644 --- a/mypyc/__main__.py +++ b/mypyc/__main__.py @@ -10,6 +10,8 @@ mypycify, suitable for prototyping and testing. """ +from __future__ import annotations + import os import os.path import subprocess diff --git a/mypyc/build.py b/mypyc/build.py index b61325f8a232..a3a91fd97873 100644 --- a/mypyc/build.py +++ b/mypyc/build.py @@ -18,6 +18,8 @@ hackily decide based on whether setuptools has been imported already. """ +from __future__ import annotations + import hashlib import os.path import re @@ -64,7 +66,7 @@ from distutils import ccompiler, sysconfig -def get_extension() -> Type["Extension"]: +def get_extension() -> Type[Extension]: # We can work with either setuptools or distutils, and pick setuptools # if it has been imported. use_setuptools = "setuptools" in sys.modules @@ -253,7 +255,7 @@ def build_using_shared_lib( deps: List[str], build_dir: str, extra_compile_args: List[str], -) -> List["Extension"]: +) -> List[Extension]: """Produce the list of extension modules when a shared library is needed. This creates one shared library extension module that all of the @@ -296,7 +298,7 @@ def build_using_shared_lib( def build_single_module( sources: List[BuildSource], cfiles: List[str], extra_compile_args: List[str] -) -> List["Extension"]: +) -> List[Extension]: """Produce the list of extension modules for a standalone extension. This contains just one module, since there is no need for a shared module. @@ -461,7 +463,7 @@ def mypycify( skip_cgen_input: Optional[Any] = None, target_dir: Optional[str] = None, include_runtime_files: Optional[bool] = None, -) -> List["Extension"]: +) -> List[Extension]: """Main entry point to building using mypyc. This produces a list of Extension objects that should be passed as the diff --git a/mypyc/common.py b/mypyc/common.py index bd22f5e43a07..b631dff207ad 100644 --- a/mypyc/common.py +++ b/mypyc/common.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys from typing import Any, Dict, Optional, Tuple from typing_extensions import Final diff --git a/mypyc/crash.py b/mypyc/crash.py index 394e6a7b6fc5..19136ea2f1de 100644 --- a/mypyc/crash.py +++ b/mypyc/crash.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys import traceback from contextlib import contextmanager @@ -12,7 +14,7 @@ def catch_errors(module_path: str, line: int) -> Iterator[None]: crash_report(module_path, line) -def crash_report(module_path: str, line: int) -> "NoReturn": +def crash_report(module_path: str, line: int) -> NoReturn: # Adapted from report_internal_error in mypy err = sys.exc_info()[1] tb = traceback.extract_stack()[:-4] diff --git a/mypyc/errors.py b/mypyc/errors.py index dd0c5dcbc4cc..d93a108c1725 100644 --- a/mypyc/errors.py +++ b/mypyc/errors.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import List import mypy.errors diff --git a/mypyc/namegen.py b/mypyc/namegen.py index 9df9be82d3a7..5872de5db0a9 100644 --- a/mypyc/namegen.py +++ b/mypyc/namegen.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Dict, Iterable, List, Optional, Set, Tuple diff --git a/mypyc/options.py b/mypyc/options.py index bf8bacba9117..334e03390797 100644 --- a/mypyc/options.py +++ b/mypyc/options.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import sys from typing import Optional, Tuple diff --git a/mypyc/rt_subtype.py b/mypyc/rt_subtype.py index 4e4191406333..f3fe1a442d22 100644 --- a/mypyc/rt_subtype.py +++ b/mypyc/rt_subtype.py @@ -13,6 +13,8 @@ coercion is necessary first. """ +from __future__ import annotations + from mypyc.ir.rtypes import ( RArray, RInstance, diff --git a/mypyc/sametype.py b/mypyc/sametype.py index c16b2e658d58..a3cfd5c08059 100644 --- a/mypyc/sametype.py +++ b/mypyc/sametype.py @@ -1,5 +1,7 @@ """Same type check for RTypes.""" +from __future__ import annotations + from mypyc.ir.func_ir import FuncSignature from mypyc.ir.rtypes import ( RArray, diff --git a/mypyc/subtype.py b/mypyc/subtype.py index 26ceb9e308f1..726a48d7a01d 100644 --- a/mypyc/subtype.py +++ b/mypyc/subtype.py @@ -1,5 +1,7 @@ """Subtype check for RTypes.""" +from __future__ import annotations + from mypyc.ir.rtypes import ( RArray, RInstance, diff --git a/runtests.py b/runtests.py index bd991d2ca250..c41f1db7e40f 100755 --- a/runtests.py +++ b/runtests.py @@ -1,4 +1,7 @@ #!/usr/bin/env python3 + +from __future__ import annotations + import subprocess from subprocess import Popen from sys import argv, executable, exit diff --git a/scripts/find_type.py b/scripts/find_type.py index d52424952a33..a04368905451 100755 --- a/scripts/find_type.py +++ b/scripts/find_type.py @@ -23,6 +23,8 @@ # # For an Emacs example, see misc/macs.el. +from __future__ import annotations + import os.path import re import subprocess diff --git a/setup.py b/setup.py index 6b9ed2f578ae..a8c86ff663a3 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,7 @@ #!/usr/bin/env python +from __future__ import annotations + import glob import os import os.path