From f98f78216ba9d6ab68c8e69c19e9f3c7926c5efe Mon Sep 17 00:00:00 2001 From: Shantanu <12621235+hauntsaninja@users.noreply.github.com> Date: Mon, 28 Jun 2021 08:05:36 -0700 Subject: [PATCH] Use variable annotations (#10723) * Run com2ann * manual fixes (mainly None defaults) * manual fixes (mainly stringifying types) * manual fixes (more default types) * and some type ignores * run darker to appease flake8 * fix tests part 1 of ??? * fix tests part 2 of ??? * fix tests part 3 of ??? * fix tests part 4 of ??? Co-authored-by: hauntsaninja <> --- mypy/applytype.py | 2 +- mypy/argmap.py | 8 +- mypy/binder.py | 16 +- mypy/build.py | 155 ++++---- mypy/checker.py | 173 ++++----- mypy/checkexpr.py | 149 ++++---- mypy/checkmember.py | 8 +- mypy/checkstrformat.py | 68 ++-- mypy/config_parser.py | 20 +- mypy/constraints.py | 24 +- mypy/defaults.py | 58 +-- mypy/dmypy/client.py | 2 +- mypy/dmypy_os.py | 4 +- mypy/dmypy_server.py | 16 +- mypy/dmypy_util.py | 2 +- mypy/errorcodes.py | 190 +++++----- mypy/errors.py | 64 ++-- mypy/expandtype.py | 10 +- mypy/exprtotype.py | 4 +- mypy/fastparse.py | 80 ++--- mypy/fastparse2.py | 74 ++-- mypy/find_sources.py | 4 +- mypy/fixup.py | 4 +- mypy/fscache.py | 24 +- mypy/fswatcher.py | 4 +- mypy/gclogger.py | 2 +- mypy/indirection.py | 6 +- mypy/ipc.py | 8 +- mypy/join.py | 21 +- mypy/literals.py | 13 +- mypy/main.py | 26 +- mypy/maptype.py | 8 +- mypy/meet.py | 11 +- mypy/memprofile.py | 4 +- mypy/message_registry.py | 249 +++++++------ mypy/messages.py | 46 +-- mypy/metastore.py | 3 +- mypy/modulefinder.py | 24 +- mypy/moduleinspect.py | 10 +- mypy/mro.py | 2 +- mypy/nodes.py | 517 ++++++++++++++------------- mypy/operators.py | 50 +-- mypy/options.py | 74 ++-- mypy/plugin.py | 20 +- mypy/plugins/attrs.py | 33 +- mypy/plugins/ctypes.py | 4 +- mypy/plugins/dataclasses.py | 14 +- mypy/plugins/default.py | 2 +- mypy/plugins/enums.py | 16 +- mypy/plugins/functools.py | 4 +- mypy/reachability.py | 31 +- mypy/renaming.py | 18 +- mypy/report.py | 67 ++-- mypy/scope.py | 6 +- mypy/semanal.py | 121 +++---- mypy/semanal_classprop.py | 18 +- mypy/semanal_enum.py | 2 +- mypy/semanal_main.py | 10 +- mypy/semanal_namedtuple.py | 39 +- mypy/semanal_shared.py | 2 +- mypy/semanal_typeargs.py | 2 +- mypy/semanal_typeddict.py | 19 +- mypy/server/astdiff.py | 2 +- mypy/server/astmerge.py | 2 +- mypy/server/aststrip.py | 2 +- mypy/server/deps.py | 12 +- mypy/server/mergecheck.py | 4 +- mypy/server/objgraph.py | 40 ++- mypy/server/subexpr.py | 2 +- mypy/server/trigger.py | 2 +- mypy/server/update.py | 52 +-- mypy/sharedparse.py | 16 +- mypy/solve.py | 10 +- mypy/state.py | 2 +- mypy/stats.py | 22 +- mypy/strconv.py | 46 +-- mypy/stubdoc.py | 30 +- mypy/stubgen.py | 92 ++--- mypy/stubgenc.py | 42 ++- mypy/stubtest.py | 22 +- mypy/subtypes.py | 15 +- mypy/suggestions.py | 24 +- mypy/test/data.py | 72 ++-- mypy/test/helpers.py | 2 +- mypy/test/testdaemon.py | 2 +- mypy/test/testdeps.py | 2 +- mypy/test/testerrorstream.py | 2 +- mypy/test/testfinegrained.py | 8 +- mypy/test/testgraph.py | 9 +- mypy/test/testinfer.py | 12 +- mypy/test/testipc.py | 4 +- mypy/test/testmerge.py | 2 +- mypy/test/testsamples.py | 4 +- mypy/test/testsemanal.py | 2 +- mypy/test/testsolve.py | 2 +- mypy/test/teststubgen.py | 89 +++-- mypy/test/testtypes.py | 2 +- mypy/test/typefixture.py | 2 +- mypy/test/visitors.py | 2 +- mypy/traverser.py | 2 +- mypy/treetransform.py | 6 +- mypy/tvar_scope.py | 8 +- mypy/type_visitor.py | 8 +- mypy/typeanal.py | 44 +-- mypy/typeops.py | 24 +- mypy/types.py | 133 +++---- mypy/typestate.py | 18 +- mypy/typevars.py | 2 +- mypy/util.py | 39 +- mypyc/analysis/dataflow.py | 18 +- mypyc/build.py | 14 +- mypyc/codegen/cstring.py | 2 +- mypyc/codegen/emit.py | 6 +- mypyc/codegen/emitclass.py | 34 +- mypyc/codegen/emitfunc.py | 6 +- mypyc/codegen/emitmodule.py | 16 +- mypyc/codegen/emitwrapper.py | 8 +- mypyc/codegen/literals.py | 20 +- mypyc/common.py | 47 ++- mypyc/ir/class_ir.py | 32 +- mypyc/ir/func_ir.py | 14 +- mypyc/ir/ops.py | 92 ++--- mypyc/ir/pprint.py | 8 +- mypyc/ir/rtypes.py | 83 +++-- mypyc/irbuild/builder.py | 30 +- mypyc/irbuild/classdef.py | 10 +- mypyc/irbuild/context.py | 24 +- mypyc/irbuild/expression.py | 8 +- mypyc/irbuild/for_helpers.py | 23 +- mypyc/irbuild/function.py | 6 +- mypyc/irbuild/ll_builder.py | 22 +- mypyc/irbuild/main.py | 2 +- mypyc/irbuild/mapper.py | 4 +- mypyc/irbuild/nonlocalcontrol.py | 2 +- mypyc/irbuild/prebuildvisitor.py | 14 +- mypyc/irbuild/specialize.py | 2 +- mypyc/irbuild/targets.py | 4 +- mypyc/irbuild/util.py | 2 +- mypyc/irbuild/visitor.py | 2 +- mypyc/namegen.py | 8 +- mypyc/primitives/int_ops.py | 4 +- mypyc/primitives/registry.py | 12 +- mypyc/primitives/str_ops.py | 13 +- mypyc/test-data/fixtures/ir.py | 2 +- mypyc/test-data/fixtures/testutil.py | 2 +- mypyc/test/test_analysis.py | 2 +- mypyc/test/test_emit.py | 2 +- mypyc/test/test_emitfunc.py | 2 +- mypyc/test/test_external.py | 2 +- mypyc/test/testutil.py | 2 +- mypyc/transform/exceptions.py | 4 +- mypyc/transform/refcount.py | 8 +- mypyc/transform/uninit.py | 4 +- 153 files changed, 2173 insertions(+), 2055 deletions(-) diff --git a/mypy/applytype.py b/mypy/applytype.py index 2bc2fa92f7dc..d034c8f37969 100644 --- a/mypy/applytype.py +++ b/mypy/applytype.py @@ -75,7 +75,7 @@ def apply_generic_arguments( types = get_proper_types(orig_types) # Create a map from type variable id to target type. - id_to_type = {} # type: Dict[TypeVarId, Type] + id_to_type: Dict[TypeVarId, Type] = {} for tvar, type in zip(tvars, types): assert not isinstance(type, PartialType), "Internal error: must never apply partial type" diff --git a/mypy/argmap.py b/mypy/argmap.py index ff7e94e93cbe..216406dc02b6 100644 --- a/mypy/argmap.py +++ b/mypy/argmap.py @@ -23,8 +23,8 @@ def map_actuals_to_formals(actual_kinds: List[int], argument type with the given index. """ nformals = len(formal_kinds) - formal_to_actual = [[] for i in range(nformals)] # type: List[List[int]] - ambiguous_actual_kwargs = [] # type: List[int] + formal_to_actual: List[List[int]] = [[] for i in range(nformals)] + ambiguous_actual_kwargs: List[int] = [] fi = 0 for ai, actual_kind in enumerate(actual_kinds): if actual_kind == nodes.ARG_POS: @@ -112,7 +112,7 @@ def map_formals_to_actuals(actual_kinds: List[int], formal_names, actual_arg_type) # Now reverse the mapping. - actual_to_formal = [[] for _ in actual_kinds] # type: List[List[int]] + actual_to_formal: List[List[int]] = [[] for _ in actual_kinds] for formal, actuals in enumerate(formal_to_actual): for actual in actuals: actual_to_formal[actual].append(formal) @@ -145,7 +145,7 @@ def __init__(self) -> None: # Next tuple *args index to use. self.tuple_index = 0 # Keyword arguments in TypedDict **kwargs used. - self.kwargs_used = set() # type: Set[str] + self.kwargs_used: Set[str] = set() def expand_actual_type(self, actual_type: Type, diff --git a/mypy/binder.py b/mypy/binder.py index 394c5ccf987c..1c711ce9c631 100644 --- a/mypy/binder.py +++ b/mypy/binder.py @@ -32,7 +32,7 @@ class Frame: """ def __init__(self) -> None: - self.types = {} # type: Dict[Key, Type] + self.types: Dict[Key, Type] = {} self.unreachable = False # Should be set only if we're entering a frame where it's not @@ -69,7 +69,7 @@ class A: """ # Stored assignments for situations with tuple/list lvalue and rvalue of union type. # This maps an expression to a list of bound types for every item in the union type. - type_assignments = None # type: Optional[Assigns] + type_assignments: Optional[Assigns] = None def __init__(self) -> None: # The stack of frames currently used. These map @@ -85,21 +85,21 @@ def __init__(self) -> None: # the end of the frame or by a loop control construct # or raised exception. The last element of self.frames # has no corresponding element in this list. - self.options_on_return = [] # type: List[List[Frame]] + self.options_on_return: List[List[Frame]] = [] # Maps literal_hash(expr) to get_declaration(expr) # for every expr stored in the binder - self.declarations = {} # type: Dict[Key, Optional[Type]] + self.declarations: Dict[Key, Optional[Type]] = {} # Set of other keys to invalidate if a key is changed, e.g. x -> {x.a, x[0]} # Whenever a new key (e.g. x.a.b) is added, we update this - self.dependencies = {} # type: Dict[Key, Set[Key]] + self.dependencies: Dict[Key, Set[Key]] = {} # Whether the last pop changed the newly top frame on exit self.last_pop_changed = False - self.try_frames = set() # type: Set[int] - self.break_frames = [] # type: List[int] - self.continue_frames = [] # type: List[int] + self.try_frames: Set[int] = set() + self.break_frames: List[int] = [] + self.continue_frames: List[int] = [] def _add_dependencies(self, key: Key, value: Optional[Key] = None) -> None: if value is None: diff --git a/mypy/build.py b/mypy/build.py index 14bf1e68a51e..bafdf6b86ba3 100644 --- a/mypy/build.py +++ b/mypy/build.py @@ -66,7 +66,7 @@ # mode only that is useful during development. This produces only a subset of # output compared to --verbose output. We use a global flag to enable this so # that it's easy to enable this when running tests. -DEBUG_FINE_GRAINED = False # type: Final +DEBUG_FINE_GRAINED: Final = False # These modules are special and should always come from typeshed. CORE_BUILTIN_MODULES = { @@ -102,7 +102,7 @@ def __init__(self, manager: 'BuildManager', graph: Graph) -> None: self.files = manager.modules self.types = manager.all_types # Non-empty if export_types True in options self.used_cache = manager.cache_enabled - self.errors = [] # type: List[str] # Filled in by build if desired + self.errors: List[str] = [] # Filled in by build if desired class BuildSourceSet: @@ -110,8 +110,8 @@ class BuildSourceSet: def __init__(self, sources: List[BuildSource]) -> None: self.source_text_present = False - self.source_modules = set() # type: Set[str] - self.source_paths = set() # type: Set[str] + self.source_modules: Set[str] = set() + self.source_paths: Set[str] = set() for source in sources: if source.text is not None: @@ -329,7 +329,7 @@ def cache_meta_from_dict(meta: Dict[str, Any], data_json: str) -> CacheMeta: meta: JSON metadata read from the metadata cache file data_json: Path to the .data.json file containing the AST trees """ - sentinel = None # type: Any # Values to be validated by the caller + sentinel: Any = None # Values to be validated by the caller return CacheMeta( meta.get('id', sentinel), meta.get('path', sentinel), @@ -353,12 +353,12 @@ def cache_meta_from_dict(meta: Dict[str, Any], data_json: str) -> CacheMeta: # Priorities used for imports. (Here, top-level includes inside a class.) # These are used to determine a more predictable order in which the # nodes in an import cycle are processed. -PRI_HIGH = 5 # type: Final # top-level "from X import blah" -PRI_MED = 10 # type: Final # top-level "import X" -PRI_LOW = 20 # type: Final # either form inside a function -PRI_MYPY = 25 # type: Final # inside "if MYPY" or "if TYPE_CHECKING" -PRI_INDIRECT = 30 # type: Final # an indirect dependency -PRI_ALL = 99 # type: Final # include all priorities +PRI_HIGH: Final = 5 # top-level "from X import blah" +PRI_MED: Final = 10 # top-level "import X" +PRI_LOW: Final = 20 # either form inside a function +PRI_MYPY: Final = 25 # inside "if MYPY" or "if TYPE_CHECKING" +PRI_INDIRECT: Final = 30 # an indirect dependency +PRI_ALL: Final = 99 # include all priorities def import_priority(imp: ImportBase, toplevel_priority: int) -> int: @@ -383,7 +383,8 @@ def load_plugins_from_config( plugins (for cache validation). """ import importlib - snapshot = {} # type: Dict[str, str] + + snapshot: Dict[str, str] = {} if not options.config_file: return [], snapshot @@ -396,11 +397,11 @@ def plugin_error(message: str) -> None: errors.report(line, 0, message) errors.raise_error(use_stdout=False) - custom_plugins = [] # type: List[Plugin] + custom_plugins: List[Plugin] = [] errors.set_file(options.config_file, None) for plugin_path in options.plugins: func_name = 'plugin' - plugin_dir = None # type: Optional[str] + plugin_dir: Optional[str] = None if ':' in os.path.basename(plugin_path): plugin_path, func_name = plugin_path.rsplit(':', 1) if plugin_path.endswith('.py'): @@ -476,7 +477,7 @@ def load_plugins(options: Options, custom_plugins += extra_plugins - default_plugin = DefaultPlugin(options) # type: Plugin + default_plugin: Plugin = DefaultPlugin(options) if not custom_plugins: return default_plugin, snapshot @@ -581,7 +582,7 @@ def __init__(self, data_dir: str, stdout: TextIO, stderr: TextIO, ) -> None: - self.stats = {} # type: Dict[str, Any] # Values are ints or floats + self.stats: Dict[str, Any] = {} # Values are ints or floats self.stdout = stdout self.stderr = stderr self.start_time = time.time() @@ -593,31 +594,31 @@ def __init__(self, data_dir: str, self.reports = reports self.options = options self.version_id = version_id - self.modules = {} # type: Dict[str, MypyFile] - self.missing_modules = set() # type: Set[str] - self.fg_deps_meta = {} # type: Dict[str, FgDepMeta] + self.modules: Dict[str, MypyFile] = {} + self.missing_modules: Set[str] = set() + self.fg_deps_meta: Dict[str, FgDepMeta] = {} # fg_deps holds the dependencies of every module that has been # processed. We store this in BuildManager so that we can compute # dependencies as we go, which allows us to free ASTs and type information, # saving a ton of memory on net. - self.fg_deps = {} # type: Dict[str, Set[str]] + self.fg_deps: Dict[str, Set[str]] = {} # Always convert the plugin to a ChainedPlugin so that it can be manipulated if needed if not isinstance(plugin, ChainedPlugin): plugin = ChainedPlugin(options, [plugin]) self.plugin = plugin # Set of namespaces (module or class) that are being populated during semantic # analysis and may have missing definitions. - self.incomplete_namespaces = set() # type: Set[str] + self.incomplete_namespaces: Set[str] = set() self.semantic_analyzer = SemanticAnalyzer( self.modules, self.missing_modules, self.incomplete_namespaces, self.errors, self.plugin) - self.all_types = {} # type: Dict[Expression, Type] # Enabled by export_types + self.all_types: Dict[Expression, Type] = {} # Enabled by export_types self.indirection_detector = TypeIndirectionVisitor() - self.stale_modules = set() # type: Set[str] - self.rechecked_modules = set() # type: Set[str] + self.stale_modules: Set[str] = set() + self.rechecked_modules: Set[str] = set() self.flush_errors = flush_errors has_reporters = reports is not None and reports.reporters self.cache_enabled = (options.incremental @@ -630,13 +631,13 @@ def __init__(self, data_dir: str, # a mapping from source files to their corresponding shadow files # for efficient lookup - self.shadow_map = {} # type: Dict[str, str] + self.shadow_map: Dict[str, str] = {} if self.options.shadow_file is not None: self.shadow_map = {source_file: shadow_file for (source_file, shadow_file) in self.options.shadow_file} # a mapping from each file being typechecked to its possible shadow file - self.shadow_equivalence_map = {} # type: Dict[str, Optional[str]] + self.shadow_equivalence_map: Dict[str, Optional[str]] = {} self.plugin = plugin self.plugins_snapshot = plugins_snapshot self.old_plugins_snapshot = read_plugins_snapshot(self) @@ -644,9 +645,9 @@ def __init__(self, data_dir: str, # Fine grained targets (module top levels and top level functions) processed by # the semantic analyzer, used only for testing. Currently used only by the new # semantic analyzer. - self.processed_targets = [] # type: List[str] + self.processed_targets: List[str] = [] # Missing stub packages encountered. - self.missing_stub_packages = set() # type: Set[str] + self.missing_stub_packages: Set[str] = set() # Cache for mypy ASTs that have completed semantic analysis # pass 1. When multiple files are added to the build in a # single daemon increment, only one of the files gets added @@ -654,7 +655,7 @@ def __init__(self, data_dir: str, # until all the files have been added. This means that a # new file can be processed O(n**2) times. This cache # avoids most of this redundant work. - self.ast_cache = {} # type: Dict[str, Tuple[MypyFile, List[ErrorInfo]]] + self.ast_cache: Dict[str, Tuple[MypyFile, List[ErrorInfo]]] = {} def dump_stats(self) -> None: if self.options.dump_build_stats: @@ -727,7 +728,7 @@ def correct_rel_imp(imp: Union[ImportFrom, ImportAll]) -> str: return new_id - res = [] # type: List[Tuple[int, str, int]] + res: List[Tuple[int, str, int]] = [] for imp in file.imports: if not imp.is_unreachable: if isinstance(imp, Import): @@ -861,13 +862,13 @@ def deps_to_json(x: Dict[str, Set[str]]) -> str: # File for storing metadata about all the fine-grained dependency caches -DEPS_META_FILE = '@deps.meta.json' # type: Final +DEPS_META_FILE: Final = "@deps.meta.json" # File for storing fine-grained dependencies that didn't a parent in the build -DEPS_ROOT_FILE = '@root.deps.json' # type: Final +DEPS_ROOT_FILE: Final = "@root.deps.json" # The name of the fake module used to store fine-grained dependencies that # have no other place to go. -FAKE_ROOT_MODULE = '@root' # type: Final +FAKE_ROOT_MODULE: Final = "@root" def write_deps_cache(rdeps: Dict[str, Dict[str, Set[str]]], @@ -910,7 +911,7 @@ def write_deps_cache(rdeps: Dict[str, Dict[str, Set[str]]], else: fg_deps_meta[id] = {'path': deps_json, 'mtime': manager.getmtime(deps_json)} - meta_snapshot = {} # type: Dict[str, str] + meta_snapshot: Dict[str, str] = {} for id, st in graph.items(): # If we didn't parse a file (so it doesn't have a # source_hash), then it must be a module with a fresh cache, @@ -949,7 +950,7 @@ def invert_deps(deps: Dict[str, Set[str]], # Prepopulate the map for all the modules that have been processed, # so that we always generate files for processed modules (even if # there aren't any dependencies to them.) - rdeps = {id: {} for id, st in graph.items() if st.tree} # type: Dict[str, Dict[str, Set[str]]] + rdeps: Dict[str, Dict[str, Set[str]]] = {id: {} for id, st in graph.items() if st.tree} for trigger, targets in deps.items(): module = module_prefix(graph, trigger_to_target(trigger)) if not module or not graph[module].tree: @@ -990,7 +991,7 @@ def generate_deps_for_cache(manager: BuildManager, return rdeps -PLUGIN_SNAPSHOT_FILE = '@plugins_snapshot.json' # type: Final +PLUGIN_SNAPSHOT_FILE: Final = "@plugins_snapshot.json" def write_plugins_snapshot(manager: BuildManager) -> None: @@ -1018,11 +1019,11 @@ def read_plugins_snapshot(manager: BuildManager) -> Optional[Dict[str, str]]: def read_quickstart_file(options: Options, stdout: TextIO, ) -> Optional[Dict[str, Tuple[float, int, str]]]: - quickstart = None # type: Optional[Dict[str, Tuple[float, int, str]]] + quickstart: Optional[Dict[str, Tuple[float, int, str]]] = None if options.quickstart_file: # This is very "best effort". If the file is missing or malformed, # just ignore it. - raw_quickstart = {} # type: Dict[str, Any] + raw_quickstart: Dict[str, Any] = {} try: with open(options.quickstart_file, "r") as f: raw_quickstart = json.load(f) @@ -1150,7 +1151,7 @@ def exclude_from_backups(target_dir: str) -> None: def create_metastore(options: Options) -> MetadataStore: """Create the appropriate metadata store.""" if options.sqlite_cache: - mds = SqliteMetadataStore(_cache_dir_prefix(options)) # type: MetadataStore + mds: MetadataStore = SqliteMetadataStore(_cache_dir_prefix(options)) else: mds = FilesystemMetadataStore(_cache_dir_prefix(options)) return mds @@ -1724,40 +1725,40 @@ class State: case path is None. Otherwise source is None and path isn't. """ - manager = None # type: BuildManager - order_counter = 0 # type: ClassVar[int] - order = None # type: int # Order in which modules were encountered - id = None # type: str # Fully qualified module name - path = None # type: Optional[str] # Path to module source - abspath = None # type: Optional[str] # Absolute path to module source - xpath = None # type: str # Path or '' - source = None # type: Optional[str] # Module source code - source_hash = None # type: Optional[str] # Hash calculated based on the source code - meta_source_hash = None # type: Optional[str] # Hash of the source given in the meta, if any - meta = None # type: Optional[CacheMeta] - data = None # type: Optional[str] - tree = None # type: Optional[MypyFile] + manager: BuildManager + order_counter: ClassVar[int] = 0 + order: int # Order in which modules were encountered + id: str # Fully qualified module name + path: Optional[str] = None # Path to module source + abspath: Optional[str] = None # Absolute path to module source + xpath: str # Path or '' + source: Optional[str] = None # Module source code + source_hash: Optional[str] = None # Hash calculated based on the source code + meta_source_hash: Optional[str] = None # Hash of the source given in the meta, if any + meta: Optional[CacheMeta] = None + data: Optional[str] = None + tree: Optional[MypyFile] = None # We keep both a list and set of dependencies. A set because it makes it efficient to # prevent duplicates and the list because I am afraid of changing the order of # iteration over dependencies. # They should be managed with add_dependency and suppress_dependency. - dependencies = None # type: List[str] # Modules directly imported by the module - dependencies_set = None # type: Set[str] # The same but as a set for deduplication purposes - suppressed = None # type: List[str] # Suppressed/missing dependencies - suppressed_set = None # type: Set[str] # Suppressed/missing dependencies - priorities = None # type: Dict[str, int] + dependencies: List[str] # Modules directly imported by the module + dependencies_set: Set[str] # The same but as a set for deduplication purposes + suppressed: List[str] # Suppressed/missing dependencies + suppressed_set: Set[str] # Suppressed/missing dependencies + priorities: Dict[str, int] # Map each dependency to the line number where it is first imported - dep_line_map = None # type: Dict[str, int] + dep_line_map: Dict[str, int] # Parent package, its parent, etc. - ancestors = None # type: Optional[List[str]] + ancestors: Optional[List[str]] = None # List of (path, line number) tuples giving context for import - import_context = None # type: List[Tuple[str, int]] + import_context: List[Tuple[str, int]] # The State from which this module was imported, if any - caller_state = None # type: Optional[State] + caller_state: Optional["State"] = None # If caller_state is set, the line number in the caller where the import occurred caller_line = 0 @@ -1766,10 +1767,10 @@ class State: externally_same = True # Contains a hash of the public interface in incremental mode - interface_hash = "" # type: str + interface_hash: str = "" # Options, specialized for this file - options = None # type: Options + options: Options # Whether to ignore all errors ignore_all = False @@ -1779,11 +1780,11 @@ class State: # Errors reported before semantic analysis, to allow fine-grained # mode to keep reporting them. - early_errors = None # type: List[ErrorInfo] + early_errors: List[ErrorInfo] # Type checker used for checking this file. Use type_checker() for # access and to construct this on demand. - _type_checker = None # type: Optional[TypeChecker] + _type_checker: Optional[TypeChecker] = None fine_grained_deps_loaded = False @@ -2521,7 +2522,7 @@ def in_partial_package(id: str, manager: BuildManager) -> bool: while '.' in id: parent, _ = id.rsplit('.', 1) if parent in manager.modules: - parent_mod = manager.modules[parent] # type: Optional[MypyFile] + parent_mod: Optional[MypyFile] = manager.modules[parent] else: # Parent is not in build, try quickly if we can find it. try: @@ -2722,8 +2723,8 @@ class NodeInfo: def __init__(self, index: int, scc: List[str]) -> None: self.node_id = "n%d" % index self.scc = scc - self.sizes = {} # type: Dict[str, int] # mod -> size in bytes - self.deps = {} # type: Dict[str, int] # node_id -> pri + self.sizes: Dict[str, int] = {} # mod -> size in bytes + self.deps: Dict[str, int] = {} # node_id -> pri def dumps(self) -> str: """Convert to JSON string.""" @@ -2789,13 +2790,13 @@ def load_graph(sources: List[BuildSource], manager: BuildManager, there are syntax errors. """ - graph = old_graph if old_graph is not None else {} # type: Graph + graph: Graph = old_graph if old_graph is not None else {} # The deque is used to implement breadth-first traversal. # TODO: Consider whether to go depth-first instead. This may # affect the order in which we process files within import cycles. new = new_modules if new_modules is not None else [] - entry_points = set() # type: Set[str] + entry_points: Set[str] = set() # Seed the graph with the initial root sources. for bs in sources: try: @@ -2900,7 +2901,7 @@ def process_graph(graph: Graph, manager: BuildManager) -> None: manager.log("Found %d SCCs; largest has %d nodes" % (len(sccs), max(len(scc) for scc in sccs))) - fresh_scc_queue = [] # type: List[List[str]] + fresh_scc_queue: List[List[str]] = [] # We're processing SCCs from leaves (those without further # dependencies) to roots (those from which everything else can be @@ -3157,9 +3158,9 @@ def sorted_components(graph: Graph, sccs = list(strongly_connected_components(vertices, edges)) # Topsort. sccsmap = {id: frozenset(scc) for scc in sccs for id in scc} - data = {} # type: Dict[AbstractSet[str], Set[AbstractSet[str]]] + data: Dict[AbstractSet[str], Set[AbstractSet[str]]] = {} for scc in sccs: - deps = set() # type: Set[AbstractSet[str]] + deps: Set[AbstractSet[str]] = set() for id in scc: deps.update(sccsmap[x] for x in deps_filtered(graph, vertices, id, pri_max)) data[frozenset(scc)] = deps @@ -3204,10 +3205,10 @@ def strongly_connected_components(vertices: AbstractSet[str], From http://code.activestate.com/recipes/578507/. """ - identified = set() # type: Set[str] - stack = [] # type: List[str] - index = {} # type: Dict[str, int] - boundaries = [] # type: List[int] + identified: Set[str] = set() + stack: List[str] = [] + index: Dict[str, int] = {} + boundaries: List[int] = [] def dfs(v: str) -> Iterator[Set[str]]: index[v] = len(stack) diff --git a/mypy/checker.py b/mypy/checker.py index 9332e6dc60fe..81ca24900aa4 100644 --- a/mypy/checker.py +++ b/mypy/checker.py @@ -83,7 +83,7 @@ T = TypeVar('T') -DEFAULT_LAST_PASS = 1 # type: Final # Pass numbers start at 0 +DEFAULT_LAST_PASS: Final = 1 # Pass numbers start at 0 DeferredNodeType = Union[FuncDef, LambdaExpr, OverloadedFuncDef, Decorator] FineGrainedDeferredNodeType = Union[FuncDef, MypyFile, OverloadedFuncDef] @@ -154,33 +154,33 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface): # Are we type checking a stub? is_stub = False # Error message reporter - errors = None # type: Errors + errors: Errors # Utility for generating messages - msg = None # type: MessageBuilder + msg: MessageBuilder # Types of type checked nodes - type_map = None # type: Dict[Expression, Type] + type_map: Dict[Expression, Type] # Helper for managing conditional types - binder = None # type: ConditionalTypeBinder + binder: ConditionalTypeBinder # Helper for type checking expressions - expr_checker = None # type: mypy.checkexpr.ExpressionChecker + expr_checker: mypy.checkexpr.ExpressionChecker - tscope = None # type: Scope - scope = None # type: CheckerScope + tscope: Scope + scope: "CheckerScope" # Stack of function return types - return_types = None # type: List[Type] + return_types: List[Type] # Flags; true for dynamically typed functions - dynamic_funcs = None # type: List[bool] + dynamic_funcs: List[bool] # Stack of collections of variables with partial types - partial_types = None # type: List[PartialTypeScope] + partial_types: List[PartialTypeScope] # Vars for which partial type errors are already reported # (to avoid logically duplicate errors with different error context). - partial_reported = None # type: Set[Var] - globals = None # type: SymbolTable - modules = None # type: Dict[str, MypyFile] + partial_reported: Set[Var] + globals: SymbolTable + modules: Dict[str, MypyFile] # Nodes that couldn't be checked because some types weren't available. We'll run # another pass and try these again. - deferred_nodes = None # type: List[DeferredNode] + deferred_nodes: List[DeferredNode] # Type checking pass number (0 = first pass) pass_num = 0 # Last pass number to take @@ -192,20 +192,20 @@ class TypeChecker(NodeVisitor[None], CheckerPluginInterface): is_typeshed_stub = False # Should strict Optional-related errors be suppressed in this file? suppress_none_errors = False # TODO: Get it from options instead - options = None # type: Options + options: Options # Used for collecting inferred attribute types so that they can be checked # for consistency. - inferred_attribute_types = None # type: Optional[Dict[Var, Type]] + inferred_attribute_types: Optional[Dict[Var, Type]] = None # Don't infer partial None types if we are processing assignment from Union - no_partial_types = False # type: bool + no_partial_types: bool = False # The set of all dependencies (suppressed or not) that this module accesses, either # directly or indirectly. - module_refs = None # type: Set[str] + module_refs: Set[str] # Plugin that provides special type checking rules for specific library # functions such as open(), etc. - plugin = None # type: Plugin + plugin: Plugin def __init__(self, errors: Errors, modules: Dict[str, MypyFile], options: Options, tree: MypyFile, path: str, plugin: Plugin) -> None: @@ -332,7 +332,7 @@ def check_second_pass(self, else: assert not self.deferred_nodes self.deferred_nodes = [] - done = set() # type: Set[Union[DeferredNodeType, FineGrainedDeferredNodeType]] + done: Set[Union[DeferredNodeType, FineGrainedDeferredNodeType]] = set() for node, active_typeinfo in todo: if node in done: continue @@ -467,10 +467,10 @@ def check_overlapping_overloads(self, defn: OverloadedFuncDef) -> None: # items are decorators # Compute some info about the implementation (if it exists) for use below - impl_type = None # type: Optional[CallableType] + impl_type: Optional[CallableType] = None if defn.impl: if isinstance(defn.impl, FuncDef): - inner_type = defn.impl.type # type: Optional[Type] + inner_type: Optional[Type] = defn.impl.type elif isinstance(defn.impl, Decorator): inner_type = defn.impl.var.type else: @@ -905,7 +905,7 @@ def check_func_def(self, defn: FuncItem, typ: CallableType, name: Optional[str]) with self.scope.push_function(defn): # We temporary push the definition to get the self type as # visible from *inside* of this function/method. - ref_type = self.scope.active_self_type() # type: Optional[Type] + ref_type: Optional[Type] = self.scope.active_self_type() if (isinstance(defn, FuncDef) and ref_type is not None and i == 0 and not defn.is_static and typ.arg_kinds[0] not in [nodes.ARG_STAR, nodes.ARG_STAR2]): @@ -944,7 +944,7 @@ def check_func_def(self, defn: FuncItem, typ: CallableType, name: Optional[str]) arg_type.variance == COVARIANT and defn.name not in ('__init__', '__new__') ): - ctx = arg_type # type: Context + ctx: Context = arg_type if ctx.line < 0: ctx = typ self.fail(message_registry.FUNCTION_PARAMETER_CANNOT_BE_COVARIANT, ctx) @@ -1389,7 +1389,7 @@ def check_setattr_method(self, typ: Type, context: Context) -> None: def expand_typevars(self, defn: FuncItem, typ: CallableType) -> List[Tuple[FuncItem, CallableType]]: # TODO use generator - subst = [] # type: List[List[Tuple[TypeVarId, Type]]] + subst: List[List[Tuple[TypeVarId, Type]]] = [] tvars = list(typ.variables) or [] if defn.info: # Class type variables @@ -1402,7 +1402,7 @@ def expand_typevars(self, defn: FuncItem, # value restricted type variables. (Except when running mypyc, # where we need one canonical version of the function.) if subst and not self.options.mypyc: - result = [] # type: List[Tuple[FuncItem, CallableType]] + result: List[Tuple[FuncItem, CallableType]] = [] for substitutions in itertools.product(*subst): mapping = dict(substitutions) expanded = cast(CallableType, expand_type(typ, mapping)) @@ -1479,7 +1479,7 @@ def check_method_override_for_base_with_name( # Construct the type of the overriding method. if isinstance(defn, (FuncDef, OverloadedFuncDef)): - typ = self.function_type(defn) # type: Type + typ: Type = self.function_type(defn) override_class_or_static = defn.is_class or defn.is_static override_class = defn.is_class else: @@ -1735,7 +1735,7 @@ def visit_class_def(self, defn: ClassDef) -> None: self.check_final_deletable(typ) if defn.decorators: - sig = type_object_type(defn.info, self.named_type) # type: Type + sig: Type = type_object_type(defn.info, self.named_type) # Decorators are applied in reverse order. for decorator in reversed(defn.decorators): if (isinstance(decorator, CallExpr) @@ -1796,7 +1796,7 @@ def check_init_subclass(self, defn: ClassDef) -> None: name_expr.node = base callee = MemberExpr(name_expr, '__init_subclass__') args = list(defn.keywords.values()) - arg_names = list(defn.keywords.keys()) # type: List[Optional[str]] + arg_names: List[Optional[str]] = list(defn.keywords.keys()) # 'metaclass' keyword is consumed by the rest of the type machinery, # and is never passed to __init_subclass__ implementations if 'metaclass' in arg_names: @@ -1827,10 +1827,14 @@ def check_protocol_variance(self, defn: ClassDef) -> None: object_type = Instance(info.mro[-1], []) tvars = info.defn.type_vars for i, tvar in enumerate(tvars): - up_args = [object_type if i == j else AnyType(TypeOfAny.special_form) - for j, _ in enumerate(tvars)] # type: List[Type] - down_args = [UninhabitedType() if i == j else AnyType(TypeOfAny.special_form) - for j, _ in enumerate(tvars)] # type: List[Type] + up_args: List[Type] = [ + object_type if i == j else AnyType(TypeOfAny.special_form) + for j, _ in enumerate(tvars) + ] + down_args: List[Type] = [ + UninhabitedType() if i == j else AnyType(TypeOfAny.special_form) + for j, _ in enumerate(tvars) + ] up, down = Instance(info, up_args), Instance(info, down_args) # TODO: add advanced variance checks for recursive protocols if is_subtype(down, up, ignore_declared_variance=True): @@ -2162,10 +2166,10 @@ def check_assignment(self, lvalue: Lvalue, rvalue: Expression, infer_lvalue_type self.infer_variable_type(inferred, lvalue, rvalue_type, rvalue) # (type, operator) tuples for augmented assignments supported with partial types - partial_type_augmented_ops = { + partial_type_augmented_ops: Final = { ('builtins.list', '+'), ('builtins.set', '|'), - } # type: Final + } def try_infer_partial_generic_type_from_assignment(self, lvalue: Lvalue, @@ -2497,9 +2501,9 @@ def check_assignment_to_multiple_lvalues(self, lvalues: List[Lvalue], rvalue: Ex # using the type of rhs, because this allowed more fine grained # control in cases like: a, b = [int, str] where rhs would get # type List[object] - rvalues = [] # type: List[Expression] - iterable_type = None # type: Optional[Type] - last_idx = None # type: Optional[int] + rvalues: List[Expression] = [] + iterable_type: Optional[Type] = None + last_idx: Optional[int] = None for idx_rval, rval in enumerate(rvalue.items): if isinstance(rval, StarExpr): typs = get_proper_type(self.expr_checker.visit_star_expr(rval).type) @@ -2521,8 +2525,8 @@ def check_assignment_to_multiple_lvalues(self, lvalues: List[Lvalue], rvalue: Ex context) else: rvalues.append(rval) - iterable_start = None # type: Optional[int] - iterable_end = None # type: Optional[int] + iterable_start: Optional[int] = None + iterable_end: Optional[int] = None for i, rval in enumerate(rvalues): if isinstance(rval, StarExpr): typs = get_proper_type(self.expr_checker.visit_star_expr(rval).type) @@ -2629,8 +2633,7 @@ def check_multi_assignment_from_union(self, lvalues: List[Expression], rvalue: E for binder. """ self.no_partial_types = True - transposed = tuple([] for _ in - self.flatten_lvalues(lvalues)) # type: Tuple[List[Type], ...] + transposed: Tuple[List[Type], ...] = tuple([] for _ in self.flatten_lvalues(lvalues)) # Notify binder that we want to defer bindings and instead collect types. with self.binder.accumulate_type_assignments() as assignments: for item in rvalue_type.items: @@ -2649,7 +2652,7 @@ def check_multi_assignment_from_union(self, lvalues: List[Expression], rvalue: E # TODO: See todo in binder.py, ConditionalTypeBinder.assign_type # It's unclear why the 'declared_type' param is sometimes 'None' - clean_items = [] # type: List[Tuple[Type, Type]] + clean_items: List[Tuple[Type, Type]] = [] for type, declared_type in items: assert declared_type is not None clean_items.append((type, declared_type)) @@ -2670,7 +2673,7 @@ def check_multi_assignment_from_union(self, lvalues: List[Expression], rvalue: E self.no_partial_types = False def flatten_lvalues(self, lvalues: List[Expression]) -> List[Expression]: - res = [] # type: List[Expression] + res: List[Expression] = [] for lv in lvalues: if isinstance(lv, (TupleExpr, ListExpr)): res.extend(self.flatten_lvalues(lv.items)) @@ -2741,7 +2744,7 @@ def lvalue_type_for_inference(self, lvalues: List[Lvalue], rvalue_type: TupleTyp left_rv_types, star_rv_types, right_rv_types = self.split_around_star( rvalue_type.items, star_index, len(lvalues)) - type_parameters = [] # type: List[Type] + type_parameters: List[Type] = [] def append_types_for_inference(lvs: List[Expression], rv_types: List[Type]) -> None: for lv, rv_type in zip(lvs, rv_types): @@ -3122,13 +3125,13 @@ def check_indexed_assignment(self, lvalue: IndexExpr, assert isinstance(upper_bound_type, TypedDictType) typed_dict_type = upper_bound_type item_type = self.expr_checker.visit_typeddict_index_expr(typed_dict_type, lvalue.index) - method_type = CallableType( + method_type: Type = CallableType( arg_types=[self.named_type('builtins.str'), item_type], arg_kinds=[ARG_POS, ARG_POS], arg_names=[None, None], ret_type=NoneType(), fallback=self.named_type('builtins.function') - ) # type: Type + ) else: method_type = self.expr_checker.analyze_external_member_access( '__setitem__', basetype, context) @@ -3449,7 +3452,7 @@ def check_except_handler_test(self, n: Expression) -> Type: """Type check an exception handler test clause.""" typ = self.expr_checker.accept(n) - all_types = [] # type: List[Type] + all_types: List[Type] = [] test_types = self.get_types_from_except_handler(typ, n) for ttype in get_proper_types(test_types): @@ -3522,7 +3525,7 @@ def analyze_iterable_item_type(self, expr: Expression) -> Tuple[Type, Type]: iterator = echk.check_method_call_by_name('__iter__', iterable, [], [], expr)[0] if isinstance(iterable, TupleType): - joined = UninhabitedType() # type: Type + joined: Type = UninhabitedType() for item in iterable.items: joined = join_types(joined, item) return iterator, joined @@ -3541,7 +3544,7 @@ def analyze_container_item_type(self, typ: Type) -> Optional[Type]: """ typ = get_proper_type(typ) if isinstance(typ, UnionType): - types = [] # type: List[Type] + types: List[Type] = [] for item in typ.items: c_type = self.analyze_container_item_type(item) if c_type: @@ -3592,7 +3595,7 @@ def visit_decorator(self, e: Decorator) -> None: # Process decorators from the inside out to determine decorated signature, which # may be different from the declared signature. - sig = self.function_type(e.func) # type: Type + sig: Type = self.function_type(e.func) for d in reversed(e.decorators): if refers_to_fullname(d, 'typing.overload'): self.fail(message_registry.MULTIPLE_OVERLOADS_REQUIRED, e) @@ -3604,7 +3607,7 @@ def visit_decorator(self, e: Decorator) -> None: fullname = d.fullname # if this is a expression like @b.a where b is an object, get the type of b # so we can pass it the method hook in the plugins - object_type = None # type: Optional[Type] + object_type: Optional[Type] = None if fullname is None and isinstance(d, MemberExpr) and d.expr in self.type_map: object_type = self.type_map[d.expr] fullname = self.expr_checker.method_fullname(object_type, d.name) @@ -4000,9 +4003,9 @@ def is_type_call(expr: CallExpr) -> bool: return (refers_to_fullname(expr.callee, 'builtins.type') and len(expr.args) == 1) # exprs that are being passed into type - exprs_in_type_calls = [] # type: List[Expression] + exprs_in_type_calls: List[Expression] = [] # type that is being compared to type(expr) - type_being_compared = None # type: Optional[List[TypeRange]] + type_being_compared: Optional[List[TypeRange]] = None # whether the type being compared to is final is_final = False @@ -4031,8 +4034,8 @@ def is_type_call(expr: CallExpr) -> bool: if not exprs_in_type_calls: return {}, {} - if_maps = [] # type: List[TypeMap] - else_maps = [] # type: List[TypeMap] + if_maps: List[TypeMap] = [] + else_maps: List[TypeMap] = [] for expr in exprs_in_type_calls: current_if_map, current_else_map = self.conditional_type_map_with_intersection( expr, @@ -4184,7 +4187,7 @@ def find_isinstance_check_helper(self, node: Expression) -> Tuple[TypeMap, TypeM # Set to 'false' only if the user defines custom __eq__ or __ne__ methods # that could cause identity-based narrowing to produce invalid results. if operator in {'is', 'is not'}: - is_valid_target = is_singleton_type # type: Callable[[Type], bool] + is_valid_target: Callable[[Type], bool] = is_singleton_type coerce_only_in_literal_context = False should_narrow_by_identity = True else: @@ -4201,8 +4204,8 @@ def has_no_custom_eq_checks(t: Type) -> bool: expr_types = [operand_types[i] for i in expr_indices] should_narrow_by_identity = all(map(has_no_custom_eq_checks, expr_types)) - if_map = {} # type: TypeMap - else_map = {} # type: TypeMap + if_map: TypeMap = {} + else_map: TypeMap = {} if should_narrow_by_identity: if_map, else_map = self.refine_identity_comparison_expression( operands, @@ -4268,9 +4271,9 @@ def has_no_custom_eq_checks(t: Type) -> bool: # Restrict the type of the variable to True-ish/False-ish in the if and else branches # respectively vartype = type_map[node] - if_type = true_only(vartype) # type: Type - else_type = false_only(vartype) # type: Type - ref = node # type: Expression + if_type: Type = true_only(vartype) + else_type: Type = false_only(vartype) + ref: Expression = node if_map = ({ref: if_type} if not isinstance(get_proper_type(if_type), UninhabitedType) else None) else_map = ({ref: else_type} if not isinstance(get_proper_type(else_type), @@ -4358,7 +4361,7 @@ def refine_parent_types(self, For more details about what a 'lookup operation' is and how we use the expr_type to refine the parent types of lookup_expr, see the docstring in 'propagate_up_typemap_info'. """ - output = {} # type: Dict[Expression, Type] + output: Dict[Expression, Type] = {} # Note: parent_expr and parent_type are progressively refined as we crawl up the # parent lookup chain. @@ -4506,7 +4509,7 @@ def refine_identity_comparison_expression(self, if coerce_only_in_literal_context: should_coerce = any(is_literal_type_like(operand_types[i]) for i in chain_indices) - target = None # type: Optional[Type] + target: Optional[Type] = None possible_target_indices = [] for i in chain_indices: expr_type = operand_types[i] @@ -4651,9 +4654,9 @@ def check_subtype(self, return False if self.should_suppress_optional_error([subtype]): return False - extra_info = [] # type: List[str] + extra_info: List[str] = [] note_msg = '' - notes = [] # type: List[str] + notes: List[str] = [] if subtype_label is not None or supertype_label is not None: subtype_str, supertype_str = format_type_distinctly(subtype, supertype) if subtype_label is not None: @@ -5020,8 +5023,8 @@ def conditional_type_map_with_intersection(self, # For some reason, doing "yes_map, no_map = conditional_type_maps(...)" # doesn't work: mypyc will decide that 'yes_map' is of type None if we try. initial_maps = conditional_type_map(expr, expr_type, type_ranges) - yes_map = initial_maps[0] # type: TypeMap - no_map = initial_maps[1] # type: TypeMap + yes_map: TypeMap = initial_maps[0] + no_map: TypeMap = initial_maps[1] if yes_map is not None or type_ranges is None: return yes_map, no_map @@ -5252,7 +5255,7 @@ def or_conditional_maps(m1: TypeMap, m2: TypeMap) -> TypeMap: # expressions whose type is refined by both conditions. (We do not # learn anything about expressions whose type is refined by only # one condition.) - result = {} # type: Dict[Expression, Type] + result: Dict[Expression, Type] = {} for n1 in m1: for n2 in m2: if literal_hash(n1) == literal_hash(n2): @@ -5299,7 +5302,7 @@ def reduce_conditional_maps(type_maps: List[Tuple[TypeMap, TypeMap]], def convert_to_typetype(type_map: TypeMap) -> TypeMap: - converted_type_map = {} # type: Dict[Expression, Type] + converted_type_map: Dict[Expression, Type] = {} if type_map is None: return None for expr, typ in type_map.items(): @@ -5336,7 +5339,7 @@ def flatten_types(t: Type) -> List[Type]: def get_isinstance_type(expr: Expression, type_map: Dict[Expression, Type]) -> Optional[List[TypeRange]]: all_types = get_proper_types(flatten_types(type_map[expr])) - types = [] # type: List[TypeRange] + types: List[TypeRange] = [] for typ in all_types: if isinstance(typ, FunctionLike) and typ.is_type_obj(): # Type variables may be present -- erase them, which is the best @@ -5446,7 +5449,7 @@ def detach_callable(typ: CallableType) -> CallableType: from a class or not.""" type_list = typ.arg_types + [typ.ret_type] - appear_map = {} # type: Dict[str, List[int]] + appear_map: Dict[str, List[int]] = {} for i, inner_type in enumerate(type_list): typevars_available = get_type_vars(inner_type) for var in typevars_available: @@ -5600,7 +5603,7 @@ def is_node_static(node: Optional[Node]) -> Optional[bool]: class CheckerScope: # We keep two stacks combined, to maintain the relative order - stack = None # type: List[Union[TypeInfo, FuncItem, MypyFile]] + stack: List[Union[TypeInfo, FuncItem, MypyFile]] def __init__(self, module: MypyFile) -> None: self.stack = [module] @@ -5696,15 +5699,15 @@ class DisjointDict(Generic[TKey, TValue]): """ def __init__(self) -> None: # Each key maps to a unique ID - self._key_to_id = {} # type: Dict[TKey, int] + self._key_to_id: Dict[TKey, int] = {} # Each id points to the parent id, forming a forest of upwards-pointing trees. If the # current id already is the root, it points to itself. We gradually flatten these trees # as we perform root lookups: eventually all nodes point directly to its root. - self._id_to_parent_id = {} # type: Dict[int, int] + self._id_to_parent_id: Dict[int, int] = {} # Each root id in turn maps to the set of values. - self._root_id_to_values = {} # type: Dict[int, Set[TValue]] + self._root_id_to_values: Dict[int, Set[TValue]] = {} def add_mapping(self, keys: Set[TKey], values: Set[TValue]) -> None: """Adds a 'Set[TKey] -> Set[TValue]' mapping. If there already exists a mapping @@ -5728,7 +5731,7 @@ def add_mapping(self, keys: Set[TKey], values: Set[TValue]) -> None: def items(self) -> List[Tuple[Set[TKey], Set[TValue]]]: """Returns all disjoint mappings in key-value pairs.""" - root_id_to_keys = {} # type: Dict[int, Set[TKey]] + root_id_to_keys: Dict[int, Set[TKey]] = {} for key in self._key_to_id: root_id = self._lookup_root_id(key) if root_id not in root_id_to_keys: @@ -5809,14 +5812,12 @@ def group_comparison_operands(pairwise_comparisons: Iterable[Tuple[str, Expressi This function is currently only used to assist with type-narrowing refinements and is extracted out to a helper function so we can unit test it. """ - groups = { - op: DisjointDict() for op in operators_to_group - } # type: Dict[str, DisjointDict[Key, int]] - - simplified_operator_list = [] # type: List[Tuple[str, List[int]]] - last_operator = None # type: Optional[str] - current_indices = set() # type: Set[int] - current_hashes = set() # type: Set[Key] + groups: Dict[str, DisjointDict[Key, int]] = {op: DisjointDict() for op in operators_to_group} + + simplified_operator_list: List[Tuple[str, List[int]]] = [] + last_operator: Optional[str] = None + current_indices: Set[int] = set() + current_hashes: Set[Key] = set() for i, (operator, left_expr, right_expr) in enumerate(pairwise_comparisons): if last_operator is None: last_operator = operator diff --git a/mypy/checkexpr.py b/mypy/checkexpr.py index 6b37f3afd293..46ac5c3e35a0 100644 --- a/mypy/checkexpr.py +++ b/mypy/checkexpr.py @@ -88,13 +88,17 @@ # may cause performance issues. The reason is that although union math algorithm we use # nicely captures most corner cases, its worst case complexity is exponential, # see https://github.com/python/mypy/pull/5255#discussion_r196896335 for discussion. -MAX_UNIONS = 5 # type: Final +MAX_UNIONS: Final = 5 # Types considered safe for comparisons with --strict-equality due to known behaviour of __eq__. # NOTE: All these types are subtypes of AbstractSet. -OVERLAPPING_TYPES_WHITELIST = ['builtins.set', 'builtins.frozenset', - 'typing.KeysView', 'typing.ItemsView'] # type: Final +OVERLAPPING_TYPES_WHITELIST: Final = [ + "builtins.set", + "builtins.frozenset", + "typing.KeysView", + "typing.ItemsView", +] class TooManyUnions(Exception): @@ -108,7 +112,7 @@ def extract_refexpr_names(expr: RefExpr) -> Set[str]: Note that currently, the only two subclasses of RefExpr are NameExpr and MemberExpr.""" - output = set() # type: Set[str] + output: Set[str] = set() while isinstance(expr.node, MypyFile) or expr.fullname is not None: if isinstance(expr.node, MypyFile) and expr.fullname is not None: # If it's None, something's wrong (perhaps due to an @@ -146,14 +150,14 @@ class ExpressionChecker(ExpressionVisitor[Type]): """ # Some services are provided by a TypeChecker instance. - chk = None # type: mypy.checker.TypeChecker + chk: "mypy.checker.TypeChecker" # This is shared with TypeChecker, but stored also here for convenience. - msg = None # type: MessageBuilder + msg: MessageBuilder # Type context for type inference - type_context = None # type: List[Optional[Type]] + type_context: List[Optional[Type]] - strfrm_checker = None # type: StringFormatterChecker - plugin = None # type: Plugin + strfrm_checker: StringFormatterChecker + plugin: Plugin def __init__(self, chk: 'mypy.checker.TypeChecker', @@ -169,7 +173,7 @@ def __init__(self, # used by the union math in overloads. # TODO: refactor this to use a pattern similar to one in # multiassign_from_union, or maybe even combine the two? - self.type_overrides = {} # type: Dict[Expression, Type] + self.type_overrides: Dict[Expression, Type] = {} self.strfrm_checker = StringFormatterChecker(self, self.chk, self.msg) def visit_name_expr(self, e: NameExpr) -> Type: @@ -182,7 +186,7 @@ def visit_name_expr(self, e: NameExpr) -> Type: return self.narrow_type_from_binder(e, result) def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type: - result = None # type: Optional[Type] + result: Optional[Type] = None node = e.node if isinstance(e, NameExpr) and e.is_special_form: @@ -590,14 +594,16 @@ def get_partial_self_var(self, expr: MemberExpr) -> Optional[Var]: return None # Types and methods that can be used to infer partial types. - item_args = {'builtins.list': ['append'], - 'builtins.set': ['add', 'discard'], - } # type: ClassVar[Dict[str, List[str]]] - container_args = {'builtins.list': {'extend': ['builtins.list']}, - 'builtins.dict': {'update': ['builtins.dict']}, - 'collections.OrderedDict': {'update': ['builtins.dict']}, - 'builtins.set': {'update': ['builtins.set', 'builtins.list']}, - } # type: ClassVar[Dict[str, Dict[str, List[str]]]] + item_args: ClassVar[Dict[str, List[str]]] = { + "builtins.list": ["append"], + "builtins.set": ["add", "discard"], + } + container_args: ClassVar[Dict[str, Dict[str, List[str]]]] = { + "builtins.list": {"extend": ["builtins.list"]}, + "builtins.dict": {"update": ["builtins.dict"]}, + "collections.OrderedDict": {"update": ["builtins.dict"]}, + "builtins.set": {"update": ["builtins.set", "builtins.list"]}, + } def try_infer_partial_type(self, e: CallExpr) -> None: """Try to make partial type precise from a call.""" @@ -711,10 +717,10 @@ def apply_function_plugin(self, Return the inferred return type. """ num_formals = len(callee.arg_types) - formal_arg_types = [[] for _ in range(num_formals)] # type: List[List[Type]] - formal_arg_exprs = [[] for _ in range(num_formals)] # type: List[List[Expression]] - formal_arg_names = [[] for _ in range(num_formals)] # type: List[List[Optional[str]]] - formal_arg_kinds = [[] for _ in range(num_formals)] # type: List[List[int]] + formal_arg_types: List[List[Type]] = [[] for _ in range(num_formals)] + formal_arg_exprs: List[List[Expression]] = [[] for _ in range(num_formals)] + formal_arg_names: List[List[Optional[str]]] = [[] for _ in range(num_formals)] + formal_arg_kinds: List[List[int]] = [[] for _ in range(num_formals)] for formal, actuals in enumerate(formal_to_actual): for actual in actuals: formal_arg_types[formal].append(arg_types[actual]) @@ -756,7 +762,7 @@ def apply_signature_hook( arg_kinds, arg_names, callee.arg_kinds, callee.arg_names, lambda i: self.accept(args[i])) - formal_arg_exprs = [[] for _ in range(num_formals)] # type: List[List[Expression]] + formal_arg_exprs: List[List[Expression]] = [[] for _ in range(num_formals)] for formal, actuals in enumerate(formal_to_actual): for actual in actuals: formal_arg_exprs[formal].append(args[actual]) @@ -863,7 +869,7 @@ def check_call_expr_with_callee_type(self, def check_union_call_expr(self, e: CallExpr, object_type: UnionType, member: str) -> Type: """"Type check calling a member expression where the base type is a union.""" - res = [] # type: List[Type] + res: List[Type] = [] for typ in object_type.relevant_items(): # Member access errors are already reported when visiting the member expression. with self.msg.disable_errors(): @@ -1081,7 +1087,7 @@ def infer_arg_types_in_empty_context(self, args: List[Expression]) -> List[Type] In short, we basically recurse on each argument without considering in what context the argument was called. """ - res = [] # type: List[Type] + res: List[Type] = [] for arg in args: arg_type = self.accept(arg) @@ -1101,7 +1107,7 @@ def infer_arg_types_in_context( Returns the inferred types of *actual arguments*. """ - res = [None] * len(args) # type: List[Optional[Type]] + res: List[Optional[Type]] = [None] * len(args) for i, actuals in enumerate(formal_to_actual): for ai in actuals: @@ -1178,7 +1184,7 @@ def infer_function_type_arguments_using_context( return callable.copy_modified() args = infer_type_arguments(callable.type_var_ids(), ret_type, erased_ctx) # Only substitute non-Uninhabited and non-erased types. - new_args = [] # type: List[Optional[Type]] + new_args: List[Optional[Type]] = [] for arg in args: if has_uninhabited_component(arg) or has_erased_component(arg): new_args.append(None) @@ -1212,7 +1218,7 @@ def infer_function_type_arguments(self, callee_type: CallableType, arg_pass_nums = self.get_arg_infer_passes( callee_type.arg_types, formal_to_actual, len(args)) - pass1_args = [] # type: List[Optional[Type]] + pass1_args: List[Optional[Type]] = [] for i, arg in enumerate(arg_types): if arg_pass_nums[i] > 1: pass1_args.append(None) @@ -1349,7 +1355,7 @@ def check_argument_count(self, # TODO(jukka): We could return as soon as we find an error if messages is None. # Collect list of all actual arguments matched to formal arguments. - all_actuals = [] # type: List[int] + all_actuals: List[int] = [] for actuals in formal_to_actual: all_actuals.extend(actuals) @@ -1545,8 +1551,8 @@ def check_overload_call(self, # This is because picking the first overload often ends up being too greedy: # for example, when we have a fallback alternative that accepts an unrestricted # typevar. See https://github.com/python/mypy/issues/4063 for related discussion. - erased_targets = None # type: Optional[List[CallableType]] - unioned_result = None # type: Optional[Tuple[Type, Type]] + erased_targets: Optional[List[CallableType]] = None + unioned_result: Optional[Tuple[Type, Type]] = None union_interrupted = False # did we try all union combinations? if any(self.real_union(arg) for arg in arg_types): unioned_errors = arg_messages.clean_copy() @@ -1610,7 +1616,7 @@ def check_overload_call(self, # a note with whatever error message 'self.check_call' will generate. # In particular, the note's line and column numbers need to be the same # as the error's. - target = erased_targets[0] # type: Type + target: Type = erased_targets[0] else: # There was no plausible match: give up target = AnyType(TypeOfAny.from_error) @@ -1653,8 +1659,8 @@ def has_shape(typ: Type) -> bool: return (isinstance(typ, TupleType) or isinstance(typ, TypedDictType) or (isinstance(typ, Instance) and typ.type.is_named_tuple)) - matches = [] # type: List[CallableType] - star_matches = [] # type: List[CallableType] + matches: List[CallableType] = [] + star_matches: List[CallableType] = [] args_have_var_arg = False args_have_kw_arg = False @@ -1702,9 +1708,9 @@ def infer_overload_return_type(self, """ arg_messages = self.msg if arg_messages is None else arg_messages - matches = [] # type: List[CallableType] - return_types = [] # type: List[Type] - inferred_types = [] # type: List[Type] + matches: List[CallableType] = [] + return_types: List[Type] = [] + inferred_types: List[Type] = [] args_contain_any = any(map(has_any_type, arg_types)) for typ in plausible_targets: @@ -1774,7 +1780,7 @@ def overload_erased_call_targets(self, Assumes all of the given targets have argument counts compatible with the caller. """ - matches = [] # type: List[CallableType] + matches: List[CallableType] = [] for typ in plausible_targets: if self.erased_signature_similarity(arg_types, arg_kinds, arg_names, args, typ, context): @@ -1850,7 +1856,7 @@ def union_overload_result(self, return None # Step 5: If splitting succeeded, then filter out duplicate items before returning. - seen = set() # type: Set[Tuple[Type, Type]] + seen: Set[Tuple[Type, Type]] = set() result = [] for pair in res_items: if pair not in seen: @@ -1903,9 +1909,9 @@ def combine_function_signatures(self, types: Sequence[Type]) -> Union[AnyType, C # confusing and ought to be re-written anyways.) callables, variables = merge_typevars_in_callables_by_name(callables) - new_args = [[] for _ in range(len(callables[0].arg_types))] # type: List[List[Type]] + new_args: List[List[Type]] = [[] for _ in range(len(callables[0].arg_types))] new_kinds = list(callables[0].arg_kinds) - new_returns = [] # type: List[Type] + new_returns: List[Type] = [] too_complex = False for target in callables: @@ -2185,14 +2191,14 @@ def visit_comparison_expr(self, e: ComparisonExpr) -> Type: Comparison expressions are type checked consecutive-pair-wise That is, 'a < b > c == d' is check as 'a < b and b > c and c == d' """ - result = None # type: Optional[Type] - sub_result = None # type: Optional[Type] + result: Optional[Type] = None + sub_result: Optional[Type] = None # Check each consecutive operand pair and their operator for left, right, operator in zip(e.operands, e.operands[1:], e.operators): left_type = self.accept(left) - method_type = None # type: Optional[mypy.types.Type] + method_type: Optional[mypy.types.Type] = None if operator == 'in' or operator == 'not in': # If the right operand has partial type, look it up without triggering @@ -2410,8 +2416,8 @@ def check_union_method_call_by_name(self, union item and unions the result. We do this to allow plugins to act on individual union items. """ - res = [] # type: List[Type] - meth_res = [] # type: List[Type] + res: List[Type] = [] + meth_res: List[Type] = [] for typ in base_type.relevant_items(): # Format error messages consistently with # mypy.checkmember.analyze_union_member_access(). @@ -2864,7 +2870,7 @@ def visit_unary_expr(self, e: UnaryExpr) -> Type: operand_type = self.accept(e.expr) op = e.op if op == 'not': - result = self.bool_type() # type: Type + result: Type = self.bool_type() else: method = operators.unary_op_methods[op] result, method_type = self.check_method_call_by_name(method, operand_type, [], [], e) @@ -2942,9 +2948,9 @@ def visit_index_with_type(self, left_type: Type, e: IndexExpr, return result def visit_tuple_slice_helper(self, left_type: TupleType, slic: SliceExpr) -> Type: - begin = [None] # type: Sequence[Optional[int]] - end = [None] # type: Sequence[Optional[int]] - stride = [None] # type: Sequence[Optional[int]] + begin: Sequence[Optional[int]] = [None] + end: Sequence[Optional[int]] = [None] + stride: Sequence[Optional[int]] = [None] if slic.begin_index: begin_raw = self.try_getting_int_literals(slic.begin_index) @@ -2964,7 +2970,7 @@ def visit_tuple_slice_helper(self, left_type: TupleType, slic: SliceExpr) -> Typ return self.nonliteral_tuple_index_helper(left_type, slic) stride = stride_raw - items = [] # type: List[Type] + items: List[Type] = [] for b, e, s in itertools.product(begin, end, stride): items.append(left_type.slice(b, e, s)) return make_simplified_union(items) @@ -3024,7 +3030,7 @@ def visit_typeddict_index_expr(self, td_type: TypedDictType, index: Expression) else: typ = get_proper_type(self.accept(index)) if isinstance(typ, UnionType): - key_types = list(typ.items) # type: List[Type] + key_types: List[Type] = list(typ.items) else: key_types = [typ] @@ -3051,9 +3057,10 @@ def visit_typeddict_index_expr(self, td_type: TypedDictType, index: Expression) value_types.append(value_type) return make_simplified_union(value_types) - def visit_enum_index_expr(self, enum_type: TypeInfo, index: Expression, - context: Context) -> Type: - string_type = self.named_type('builtins.str') # type: Type + def visit_enum_index_expr( + self, enum_type: TypeInfo, index: Expression, context: Context + ) -> Type: + string_type: Type = self.named_type("builtins.str") if self.chk.options.python_version[0] < 3: string_type = UnionType.make_union([string_type, self.named_type('builtins.unicode')]) @@ -3243,7 +3250,7 @@ def fast_container_type( ctx = self.type_context[-1] if ctx: return None - values = [] # type: List[Type] + values: List[Type] = [] for item in items: if isinstance(item, StarExpr): # fallback to slow path @@ -3314,7 +3321,7 @@ def visit_tuple_expr(self, e: TupleExpr) -> Type: # Infer item types. Give up if there's a star expression # that's not a Tuple. - items = [] # type: List[Type] + items: List[Type] = [] j = 0 # Index into type_context_items; irrelevant if type_context_items is none for i in range(len(e.items)): item = e.items[i] @@ -3358,9 +3365,9 @@ def fast_dict_type(self, e: DictExpr) -> Optional[Type]: ctx = self.type_context[-1] if ctx: return None - keys = [] # type: List[Type] - values = [] # type: List[Type] - stargs = None # type: Optional[Tuple[Type, Type]] + keys: List[Type] = [] + values: List[Type] = [] + stargs: Optional[Tuple[Type, Type]] = None for key, value in e.items: if key is None: st = get_proper_type(self.accept(value)) @@ -3410,8 +3417,8 @@ def visit_dict_expr(self, e: DictExpr) -> Type: return dt # Collect function arguments, watching out for **expr. - args = [] # type: List[Expression] # Regular "key: value" - stargs = [] # type: List[Expression] # For "**expr" + args: List[Expression] = [] # Regular "key: value" + stargs: List[Expression] = [] # For "**expr" for key, value in e.items: if key is None: stargs.append(value) @@ -3645,13 +3652,13 @@ def _super_arg_types(self, e: SuperExpr) -> Union[Type, Tuple[Type, Type]]: # Zero-argument super() is like super(, ) current_type = fill_typevars(e.info) - type_type = TypeType(current_type) # type: ProperType + type_type: ProperType = TypeType(current_type) # Use the type of the self argument, in case it was annotated method = self.chk.scope.top_function() assert method is not None if method.arguments: - instance_type = method.arguments[0].variable.type or current_type # type: Type + instance_type: Type = method.arguments[0].variable.type or current_type else: self.chk.fail(message_registry.SUPER_ENCLOSING_POSITIONAL_ARGS_REQUIRED, e) return AnyType(TypeOfAny.from_error) @@ -3730,7 +3737,7 @@ def visit_generator_expr(self, e: GeneratorExpr) -> Type: if any(e.is_async): typ = 'typing.AsyncGenerator' # received type is always None in async generator expressions - additional_args = [NoneType()] # type: List[Type] + additional_args: List[Type] = [NoneType()] else: typ = 'typing.Generator' # received type and returned type are None @@ -3750,7 +3757,7 @@ def check_generator_or_comprehension(self, gen: GeneratorExpr, # Infer the type of the list comprehension by using a synthetic generic # callable type. tvdef = TypeVarDef('T', 'T', -1, [], self.object_type()) - tv_list = [TypeVarType(tvdef)] # type: List[Type] + tv_list: List[Type] = [TypeVarType(tvdef)] constructor = CallableType( tv_list, [nodes.ARG_POS], @@ -4058,7 +4065,7 @@ def visit_yield_from_expr(self, e: YieldFromExpr, allow_none_return: bool = Fals # Check that the expr is an instance of Iterable and get the type of the iterator produced # by __iter__. if isinstance(subexpr_type, AnyType): - iter_type = AnyType(TypeOfAny.from_another_any, source_any=subexpr_type) # type: Type + iter_type: Type = AnyType(TypeOfAny.from_another_any, source_any=subexpr_type) elif self.chk.type_is_iterable(subexpr_type): if is_async_def(subexpr_type) and not has_coroutine_decorator(return_type): self.chk.msg.yield_from_invalid_operand_type(subexpr_type, e) @@ -4449,9 +4456,9 @@ def merge_typevars_in_callables_by_name( Returns both the new list of callables and a list of all distinct TypeVarDef objects used. """ - output = [] # type: List[CallableType] - unique_typevars = {} # type: Dict[str, TypeVarType] - variables = [] # type: List[TypeVarDef] + output: List[CallableType] = [] + unique_typevars: Dict[str, TypeVarType] = {} + variables: List[TypeVarDef] = [] for target in callables: if target.is_generic(): diff --git a/mypy/checkmember.py b/mypy/checkmember.py index 1f8cceddab64..e0fa1b94dc89 100644 --- a/mypy/checkmember.py +++ b/mypy/checkmember.py @@ -522,7 +522,7 @@ def instance_alias_type(alias: TypeAlias, As usual, we first erase any unbound type variables to Any. """ - target = get_proper_type(alias.target) # type: Type + target: Type = get_proper_type(alias.target) assert isinstance(get_proper_type(target), Instance), "Must be called only with aliases to classes" target = get_proper_type(set_any_tvars(alias, alias.line, alias.column)) @@ -568,7 +568,7 @@ def analyze_var(name: str, if mx.is_lvalue and var.is_classvar: mx.msg.cant_assign_to_classvar(name, mx.context) t = get_proper_type(expand_type_by_instance(typ, itype)) - result = t # type: Type + result: Type = t typ = get_proper_type(typ) if ( var.is_initialized_in_class @@ -746,7 +746,7 @@ def analyze_class_attribute_access(itype: Instance, # Find the class where method/variable was defined. if isinstance(node.node, Decorator): - super_info = node.node.var.info # type: Optional[TypeInfo] + super_info: Optional[TypeInfo] = node.node.var.info elif isinstance(node.node, (Var, SYMBOL_FUNCBASE_TYPES)): super_info = node.node.info else: @@ -947,7 +947,7 @@ def type_object_type(info: TypeInfo, builtin_type: Callable[[str], Instance]) -> fallback = info.metaclass_type or builtin_type('builtins.type') if init_index < new_index: - method = init_method.node # type: Union[FuncBase, Decorator] + method: Union[FuncBase, Decorator] = init_method.node is_new = False elif init_index > new_index: method = new_method.node diff --git a/mypy/checkstrformat.py b/mypy/checkstrformat.py index b09ff496de4d..78628dc34ba0 100644 --- a/mypy/checkstrformat.py +++ b/mypy/checkstrformat.py @@ -92,27 +92,40 @@ def compile_new_format_re(custom_spec: bool) -> Pattern[str]: return re.compile(field + conversion + format_spec) -FORMAT_RE = compile_format_re() # type: Final -FORMAT_RE_NEW = compile_new_format_re(False) # type: Final -FORMAT_RE_NEW_CUSTOM = compile_new_format_re(True) # type: Final -DUMMY_FIELD_NAME = '__dummy_name__' # type: Final +FORMAT_RE: Final = compile_format_re() +FORMAT_RE_NEW: Final = compile_new_format_re(False) +FORMAT_RE_NEW_CUSTOM: Final = compile_new_format_re(True) +DUMMY_FIELD_NAME: Final = "__dummy_name__" # Format types supported by str.format() for builtin classes. -SUPPORTED_TYPES_NEW = {'b', 'c', 'd', 'e', 'E', 'f', 'F', - 'g', 'G', 'n', 'o', 's', 'x', 'X', '%'} # type: Final +SUPPORTED_TYPES_NEW: Final = { + "b", + "c", + "d", + "e", + "E", + "f", + "F", + "g", + "G", + "n", + "o", + "s", + "x", + "X", + "%", +} # Types that require either int or float. -NUMERIC_TYPES_OLD = {'d', 'i', 'o', 'u', 'x', 'X', - 'e', 'E', 'f', 'F', 'g', 'G'} # type: Final -NUMERIC_TYPES_NEW = {'b', 'd', 'o', 'e', 'E', 'f', 'F', - 'g', 'G', 'n', 'x', 'X', '%'} # type: Final +NUMERIC_TYPES_OLD: Final = {"d", "i", "o", "u", "x", "X", "e", "E", "f", "F", "g", "G"} +NUMERIC_TYPES_NEW: Final = {"b", "d", "o", "e", "E", "f", "F", "g", "G", "n", "x", "X", "%"} # These types accept _only_ int. -REQUIRE_INT_OLD = {'o', 'x', 'X'} # type: Final -REQUIRE_INT_NEW = {'b', 'd', 'o', 'x', 'X'} # type: Final +REQUIRE_INT_OLD: Final = {"o", "x", "X"} +REQUIRE_INT_NEW: Final = {"b", "d", "o", "x", "X"} # These types fall back to SupportsFloat with % (other fall back to SupportsInt) -FLOAT_TYPES = {'e', 'E', 'f', 'F', 'g', 'G'} # type: Final +FLOAT_TYPES: Final = {"e", "E", "f", "F", "g", "G"} class ConversionSpecifier: @@ -170,11 +183,11 @@ class StringFormatterChecker: """ # Some services are provided by a TypeChecker instance. - chk = None # type: mypy.checker.TypeChecker + chk: "mypy.checker.TypeChecker" # This is shared with TypeChecker, but stored also here for convenience. - msg = None # type: MessageBuilder + msg: MessageBuilder # Some services are provided by a ExpressionChecker instance. - exprchk = None # type: mypy.checkexpr.ExpressionChecker + exprchk: "mypy.checkexpr.ExpressionChecker" def __init__(self, exprchk: 'mypy.checkexpr.ExpressionChecker', @@ -227,7 +240,7 @@ def parse_format_value(self, format_value: str, ctx: Context, if top_targets is None: return None - result = [] # type: List[ConversionSpecifier] + result: List[ConversionSpecifier] = [] for target in top_targets: match = FORMAT_RE_NEW.fullmatch(target) if match: @@ -337,7 +350,7 @@ def check_specs_in_format_call(self, call: CallExpr, continue # Adjust expected and actual types. if not spec.type: - expected_type = AnyType(TypeOfAny.special_form) # type: Optional[Type] + expected_type: Optional[Type] = AnyType(TypeOfAny.special_form) else: assert isinstance(call.callee, MemberExpr) if isinstance(call.callee.expr, (StrExpr, UnicodeExpr)): @@ -403,8 +416,8 @@ def find_replacements_in_call(self, call: CallExpr, In case of an error use TempNode(AnyType). """ - result = [] # type: List[Expression] - used = set() # type: Set[Expression] + result: List[Expression] = [] + used: Set[Expression] = set() for key in keys: if key.isdecimal(): expr = self.get_expr_by_position(int(key), call) @@ -519,8 +532,9 @@ def apply_field_accessors(self, spec: ConversionSpecifier, repl: Expression, # This is a bit of a dirty trick, but it looks like this is the simplest way. temp_errors = self.msg.clean_copy().errors dummy = DUMMY_FIELD_NAME + spec.field[len(spec.key):] - temp_ast = parse(dummy, fnam='', module=None, - options=self.chk.options, errors=temp_errors) # type: Node + temp_ast: Node = parse( + dummy, fnam="", module=None, options=self.chk.options, errors=temp_errors + ) if temp_errors.is_errors(): self.msg.fail('Syntax error in format specifier "{}"'.format(spec.field), ctx, code=codes.STRING_FORMATTING) @@ -622,7 +636,7 @@ def check_str_interpolation(self, assert False def parse_conversion_specifiers(self, format: str) -> List[ConversionSpecifier]: - specifiers = [] # type: List[ConversionSpecifier] + specifiers: List[ConversionSpecifier] = [] for parens_key, key, flags, width, precision, type in FORMAT_RE.findall(format): if parens_key == '': key = None @@ -653,7 +667,7 @@ def check_simple_str_interpolation(self, specifiers: List[ConversionSpecifier], return rhs_type = get_proper_type(self.accept(replacements)) - rep_types = [] # type: List[Type] + rep_types: List[Type] = [] if isinstance(rhs_type, TupleType): rep_types = rhs_type.items elif isinstance(rhs_type, AnyType): @@ -698,7 +712,7 @@ def check_mapping_str_interpolation(self, specifiers: List[ConversionSpecifier], if (isinstance(replacements, DictExpr) and all(isinstance(k, (StrExpr, BytesExpr, UnicodeExpr)) for k, v in replacements.items)): - mapping = {} # type: Dict[str, Type] + mapping: Dict[str, Type] = {} for k, v in replacements.items: if self.chk.options.python_version >= (3, 0) and isinstance(expr, BytesExpr): # Special case: for bytes formatting keys must be bytes. @@ -761,7 +775,7 @@ def build_dict_type(self, expr: FormatStringExpr) -> Type: def build_replacement_checkers(self, specifiers: List[ConversionSpecifier], context: Context, expr: FormatStringExpr ) -> Optional[List[Checkers]]: - checkers = [] # type: List[Checkers] + checkers: List[Checkers] = [] for specifier in specifiers: checker = self.replacement_checkers(specifier, context, expr) if checker is None: @@ -775,7 +789,7 @@ def replacement_checkers(self, specifier: ConversionSpecifier, context: Context, of the right type for the specifier. The first functions take a node and checks its type in the right type context. The second function just checks a type. """ - checkers = [] # type: List[Checkers] + checkers: List[Checkers] = [] if specifier.width == '*': checkers.append(self.checkers_for_star(context)) diff --git a/mypy/config_parser.py b/mypy/config_parser.py index c60097174234..dc135f1c4a6c 100644 --- a/mypy/config_parser.py +++ b/mypy/config_parser.py @@ -102,7 +102,7 @@ def check_follow_imports(choice: str) -> str: # sufficient, and we don't have to do anything here. This table # exists to specify types for values initialized to None or container # types. -ini_config_types = { +ini_config_types: Final[Dict[str, _INI_PARSER_CALLABLE]] = { 'python_version': parse_version, 'strict_optional_whitelist': lambda s: s.split(), 'custom_typing_module': str, @@ -125,10 +125,10 @@ def check_follow_imports(choice: str) -> str: 'cache_dir': expand_path, 'python_executable': expand_path, 'strict': bool, -} # type: Final[Dict[str, _INI_PARSER_CALLABLE]] +} # Reuse the ini_config_types and overwrite the diff -toml_config_types = ini_config_types.copy() # type: Final[Dict[str, _INI_PARSER_CALLABLE]] +toml_config_types: Final[Dict[str, _INI_PARSER_CALLABLE]] = ini_config_types.copy() toml_config_types.update({ 'python_version': lambda s: parse_version(str(s)), 'strict_optional_whitelist': try_split, @@ -158,7 +158,7 @@ def parse_config_file(options: Options, set_strict_flags: Callable[[], None], stderr = stderr or sys.stderr if filename is not None: - config_files = (filename,) # type: Tuple[str, ...] + config_files: Tuple[str, ...] = (filename,) else: config_files = tuple(map(os.path.expanduser, defaults.CONFIG_FILES)) @@ -176,7 +176,7 @@ def parse_config_file(options: Options, set_strict_flags: Callable[[], None], if 'mypy' not in toml_data: continue toml_data = OrderedDict({'mypy': toml_data['mypy']}) - parser = destructure_overrides(toml_data) # type: MutableMapping[str, Any] + parser: MutableMapping[str, Any] = destructure_overrides(toml_data) config_types = toml_config_types else: config_parser.read(config_file) @@ -336,8 +336,8 @@ def parse_section(prefix: str, template: Options, Returns a dict of option values encountered, and a dict of report directories. """ - results = {} # type: Dict[str, object] - report_dirs = {} # type: Dict[str, str] + results: Dict[str, object] = {} + report_dirs: Dict[str, str] = {} for key in section: invert = False options_key = key @@ -380,7 +380,7 @@ def parse_section(prefix: str, template: Options, else: continue ct = type(dv) - v = None # type: Any + v: Any = None try: if ct is bool: if isinstance(section, dict): @@ -443,7 +443,7 @@ def split_directive(s: str) -> Tuple[List[str], List[str]]: Returns the parts and a list of error messages.""" parts = [] - cur = [] # type: List[str] + cur: List[str] = [] errors = [] i = 0 while i < len(s): @@ -499,7 +499,7 @@ def parse_mypy_comments( generated. """ - errors = [] # type: List[Tuple[int, str]] + errors: List[Tuple[int, str]] = [] sections = {} for lineno, line in args: diff --git a/mypy/constraints.py b/mypy/constraints.py index 074f038a30bc..7cc1c20fb6c8 100644 --- a/mypy/constraints.py +++ b/mypy/constraints.py @@ -18,8 +18,8 @@ from mypy.argmap import ArgTypeExpander from mypy.typestate import TypeState -SUBTYPE_OF = 0 # type: Final -SUPERTYPE_OF = 1 # type: Final +SUBTYPE_OF: Final = 0 +SUPERTYPE_OF: Final = 1 class Constraint: @@ -28,9 +28,9 @@ class Constraint: It can be either T <: type or T :> type (T is a type variable). """ - type_var = None # type: TypeVarId + type_var: TypeVarId op = 0 # SUBTYPE_OF or SUPERTYPE_OF - target = None # type: Type + target: Type def __init__(self, type_var: TypeVarId, op: int, target: Type) -> None: self.type_var = type_var @@ -51,7 +51,7 @@ def infer_constraints_for_callable( Return a list of constraints. """ - constraints = [] # type: List[Constraint] + constraints: List[Constraint] = [] mapper = ArgTypeExpander() for i, actuals in enumerate(formal_to_actual): @@ -268,7 +268,7 @@ class ConstraintBuilderVisitor(TypeVisitor[List[Constraint]]): # The type that is compared against a template # TODO: The value may be None. Is that actually correct? - actual = None # type: ProperType + actual: ProperType def __init__(self, actual: ProperType, direction: int) -> None: # Direction must be SUBTYPE_OF or SUPERTYPE_OF. @@ -314,7 +314,7 @@ def visit_type_var(self, template: TypeVarType) -> List[Constraint]: def visit_instance(self, template: Instance) -> List[Constraint]: original_actual = actual = self.actual - res = [] # type: List[Constraint] + res: List[Constraint] = [] if isinstance(actual, (CallableType, Overloaded)) and template.type.is_protocol: if template.type.protocol_members == ['__call__']: # Special case: a generic callback protocol @@ -448,7 +448,7 @@ def visit_callable_type(self, template: CallableType) -> List[Constraint]: cactual = self.actual # FIX verify argument counts # FIX what if one of the functions is generic - res = [] # type: List[Constraint] + res: List[Constraint] = [] # We can't infer constraints from arguments if the template is Callable[..., T] (with # literal '...'). @@ -500,7 +500,7 @@ def infer_against_overloaded(self, overloaded: Overloaded, def visit_tuple_type(self, template: TupleType) -> List[Constraint]: actual = self.actual if isinstance(actual, TupleType) and len(actual.items) == len(template.items): - res = [] # type: List[Constraint] + res: List[Constraint] = [] for i in range(len(template.items)): res.extend(infer_constraints(template.items[i], actual.items[i], @@ -514,7 +514,7 @@ def visit_tuple_type(self, template: TupleType) -> List[Constraint]: def visit_typeddict_type(self, template: TypedDictType) -> List[Constraint]: actual = self.actual if isinstance(actual, TypedDictType): - res = [] # type: List[Constraint] + res: List[Constraint] = [] # NOTE: Non-matching keys are ignored. Compatibility is checked # elsewhere so this shouldn't be unsafe. for (item_name, template_item_type, actual_item_type) in template.zip(actual): @@ -538,13 +538,13 @@ def visit_type_guard_type(self, template: TypeGuardType) -> List[Constraint]: assert False, "This should be never called, got {}".format(template) def infer_against_any(self, types: Iterable[Type], any_type: AnyType) -> List[Constraint]: - res = [] # type: List[Constraint] + res: List[Constraint] = [] for t in types: res.extend(infer_constraints(t, any_type, self.direction)) return res def visit_overloaded(self, template: Overloaded) -> List[Constraint]: - res = [] # type: List[Constraint] + res: List[Constraint] = [] for t in template.items(): res.extend(infer_constraints(t, self.actual, self.direction)) return res diff --git a/mypy/defaults.py b/mypy/defaults.py index 49543cfcecaa..dc9e49c2e9c6 100644 --- a/mypy/defaults.py +++ b/mypy/defaults.py @@ -2,35 +2,45 @@ from typing_extensions import Final -PYTHON2_VERSION = (2, 7) # type: Final -PYTHON3_VERSION = (3, 6) # type: Final -PYTHON3_VERSION_MIN = (3, 4) # type: Final -CACHE_DIR = '.mypy_cache' # type: Final -CONFIG_FILE = ['mypy.ini', '.mypy.ini'] # type: Final -PYPROJECT_CONFIG_FILES = ['pyproject.toml', ] # type: Final -SHARED_CONFIG_FILES = ['setup.cfg', ] # type: Final -USER_CONFIG_FILES = ['~/.config/mypy/config', '~/.mypy.ini', ] # type: Final -if os.environ.get('XDG_CONFIG_HOME'): - USER_CONFIG_FILES.insert(0, os.path.join(os.environ['XDG_CONFIG_HOME'], 'mypy/config')) +PYTHON2_VERSION: Final = (2, 7) +PYTHON3_VERSION: Final = (3, 6) +PYTHON3_VERSION_MIN: Final = (3, 4) +CACHE_DIR: Final = ".mypy_cache" +CONFIG_FILE: Final = ["mypy.ini", ".mypy.ini"] +PYPROJECT_CONFIG_FILES: Final = [ + "pyproject.toml", +] +SHARED_CONFIG_FILES: Final = [ + "setup.cfg", +] +USER_CONFIG_FILES: Final = [ + "~/.config/mypy/config", + "~/.mypy.ini", +] +if os.environ.get("XDG_CONFIG_HOME"): + USER_CONFIG_FILES.insert(0, os.path.join(os.environ["XDG_CONFIG_HOME"], "mypy/config")) -CONFIG_FILES = (CONFIG_FILE + PYPROJECT_CONFIG_FILES + SHARED_CONFIG_FILES + - USER_CONFIG_FILES) # type: Final +CONFIG_FILES: Final = ( + CONFIG_FILE + PYPROJECT_CONFIG_FILES + SHARED_CONFIG_FILES + USER_CONFIG_FILES +) # This must include all reporters defined in mypy.report. This is defined here # to make reporter names available without importing mypy.report -- this speeds # up startup. -REPORTER_NAMES = ['linecount', - 'any-exprs', - 'linecoverage', - 'memory-xml', - 'cobertura-xml', - 'xml', - 'xslt-html', - 'xslt-txt', - 'html', - 'txt', - 'lineprecision'] # type: Final +REPORTER_NAMES: Final = [ + "linecount", + "any-exprs", + "linecoverage", + "memory-xml", + "cobertura-xml", + "xml", + "xslt-html", + "xslt-txt", + "html", + "txt", + "lineprecision", +] # Threshold after which we sometimes filter out most errors to avoid very # verbose output -MANY_ERRORS_THRESHOLD = 200 # type: Final +MANY_ERRORS_THRESHOLD: Final = 200 diff --git a/mypy/dmypy/client.py b/mypy/dmypy/client.py index 141c18993fcc..3629372d1a85 100644 --- a/mypy/dmypy/client.py +++ b/mypy/dmypy/client.py @@ -469,7 +469,7 @@ def request(status_file: str, command: str, *, timeout: Optional[int] = None, raised OSError. This covers cases such as connection refused or closed prematurely as well as invalid JSON received. """ - response = {} # type: Dict[str, str] + response: Dict[str, str] = {} args = dict(kwds) args['command'] = command # Tell the server whether this request was initiated from a human-facing terminal, diff --git a/mypy/dmypy_os.py b/mypy/dmypy_os.py index 77cf963ad612..3168f7566a27 100644 --- a/mypy/dmypy_os.py +++ b/mypy/dmypy_os.py @@ -10,8 +10,8 @@ PROCESS_QUERY_LIMITED_INFORMATION = ctypes.c_ulong(0x1000) kernel32 = ctypes.windll.kernel32 - OpenProcess = kernel32.OpenProcess # type: Callable[[DWORD, int, int], HANDLE] - GetExitCodeProcess = kernel32.GetExitCodeProcess # type: Callable[[HANDLE, Any], int] + OpenProcess: Callable[[DWORD, int, int], HANDLE] = kernel32.OpenProcess + GetExitCodeProcess: Callable[[HANDLE, Any], int] = kernel32.GetExitCodeProcess else: import os import signal diff --git a/mypy/dmypy_server.py b/mypy/dmypy_server.py index c6a23a47b49e..2f8fc7f119ba 100644 --- a/mypy/dmypy_server.py +++ b/mypy/dmypy_server.py @@ -35,7 +35,7 @@ from mypy.version import __version__ from mypy.util import FancyFormatter, count_stats -MEM_PROFILE = False # type: Final # If True, dump memory profile after initialization +MEM_PROFILE: Final = False # If True, dump memory profile after initialization if sys.platform == 'win32': from subprocess import STARTUPINFO @@ -127,7 +127,7 @@ def daemonize(options: Options, # Server code. -CONNECTION_NAME = 'dmypy' # type: Final +CONNECTION_NAME: Final = "dmypy" def process_start_options(flags: List[str], allow_sources: bool) -> Options: @@ -172,7 +172,7 @@ def __init__(self, options: Options, # Snapshot the options info before we muck with it, to detect changes self.options_snapshot = options.snapshot() self.timeout = timeout - self.fine_grained_manager = None # type: Optional[FineGrainedBuildManager] + self.fine_grained_manager: Optional[FineGrainedBuildManager] = None if os.path.isfile(status_file): os.unlink(status_file) @@ -216,7 +216,7 @@ def serve(self) -> None: while True: with server: data = receive(server) - resp = {} # type: Dict[str, Any] + resp: Dict[str, Any] = {} if 'command' not in data: resp = {'error': "No command found in request"} else: @@ -275,7 +275,7 @@ def run_command(self, command: str, data: Dict[str, object]) -> Dict[str, object def cmd_status(self, fswatcher_dump_file: Optional[str] = None) -> Dict[str, object]: """Return daemon status.""" - res = {} # type: Dict[str, object] + res: Dict[str, object] = {} res.update(get_meminfo()) if fswatcher_dump_file: data = self.fswatcher.dump_file_data() if hasattr(self, 'fswatcher') else {} @@ -771,7 +771,7 @@ def pretty_messages(self, messages: List[str], n_sources: int, messages = self.formatter.fit_in_terminal(messages, fixed_terminal_width=terminal_width) if self.options.error_summary: - summary = None # type: Optional[str] + summary: Optional[str] = None if messages: n_errors, n_files = count_stats(messages) if n_errors: @@ -868,11 +868,11 @@ def cmd_hang(self) -> Dict[str, object]: # Misc utilities. -MiB = 2**20 # type: Final +MiB: Final = 2 ** 20 def get_meminfo() -> Dict[str, Any]: - res = {} # type: Dict[str, Any] + res: Dict[str, Any] = {} try: import psutil # type: ignore # It's not in typeshed yet except ImportError: diff --git a/mypy/dmypy_util.py b/mypy/dmypy_util.py index f598742d2474..8a527afe5762 100644 --- a/mypy/dmypy_util.py +++ b/mypy/dmypy_util.py @@ -10,7 +10,7 @@ from mypy.ipc import IPCBase -DEFAULT_STATUS_FILE = '.dmypy.json' # type: Final +DEFAULT_STATUS_FILE: Final = ".dmypy.json" def receive(connection: IPCBase) -> Any: diff --git a/mypy/errorcodes.py b/mypy/errorcodes.py index 01b946c46747..22c592959530 100644 --- a/mypy/errorcodes.py +++ b/mypy/errorcodes.py @@ -8,9 +8,9 @@ # All created error codes are implicitly stored in this list. -all_error_codes = [] # type: List[ErrorCode] +all_error_codes: List["ErrorCode"] = [] -error_codes = {} # type: Dict[str, ErrorCode] +error_codes: Dict[str, "ErrorCode"] = {} class ErrorCode: @@ -28,103 +28,107 @@ def __str__(self) -> str: return ''.format(self.code) -ATTR_DEFINED = ErrorCode( - 'attr-defined', "Check that attribute exists", 'General') # type: Final -NAME_DEFINED = ErrorCode( - 'name-defined', "Check that name is defined", 'General') # type: Final -CALL_ARG = ErrorCode( - 'call-arg', "Check number, names and kinds of arguments in calls", 'General') # type: Final -ARG_TYPE = ErrorCode( - 'arg-type', "Check argument types in calls", 'General') # type: Final -CALL_OVERLOAD = ErrorCode( - 'call-overload', "Check that an overload variant matches arguments", 'General') # type: Final -VALID_TYPE = ErrorCode( - 'valid-type', "Check that type (annotation) is valid", 'General') # type: Final -VAR_ANNOTATED = ErrorCode( - 'var-annotated', "Require variable annotation if type can't be inferred", - 'General') # type: Final -OVERRIDE = ErrorCode( - 'override', "Check that method override is compatible with base class", - 'General') # type: Final -RETURN = ErrorCode( - 'return', "Check that function always returns a value", 'General') # type: Final -RETURN_VALUE = ErrorCode( - 'return-value', "Check that return value is compatible with signature", - 'General') # type: Final -ASSIGNMENT = ErrorCode( - 'assignment', "Check that assigned value is compatible with target", 'General') # type: Final -TYPE_ARG = ErrorCode( - 'type-arg', "Check that generic type arguments are present", 'General') # type: Final -TYPE_VAR = ErrorCode( - 'type-var', "Check that type variable values are valid", 'General') # type: Final -UNION_ATTR = ErrorCode( - 'union-attr', "Check that attribute exists in each item of a union", 'General') # type: Final -INDEX = ErrorCode( - 'index', "Check indexing operations", 'General') # type: Final -OPERATOR = ErrorCode( - 'operator', "Check that operator is valid for operands", 'General') # type: Final -LIST_ITEM = ErrorCode( - 'list-item', "Check list items in a list expression [item, ...]", 'General') # type: Final -DICT_ITEM = ErrorCode( - 'dict-item', - "Check dict items in a dict expression {key: value, ...}", 'General') # type: Final -TYPEDDICT_ITEM = ErrorCode( - 'typeddict-item', "Check items when constructing TypedDict", 'General') # type: Final -HAS_TYPE = ErrorCode( - 'has-type', "Check that type of reference can be determined", 'General') # type: Final -IMPORT = ErrorCode( - 'import', "Require that imported module can be found or has stubs", 'General') # type: Final -NO_REDEF = ErrorCode( - 'no-redef', "Check that each name is defined once", 'General') # type: Final -FUNC_RETURNS_VALUE = ErrorCode( - 'func-returns-value', "Check that called function returns a value in value context", - 'General') # type: Final -ABSTRACT = ErrorCode( - 'abstract', "Prevent instantiation of classes with abstract attributes", - 'General') # type: Final -VALID_NEWTYPE = ErrorCode( - 'valid-newtype', "Check that argument 2 to NewType is valid", 'General') # type: Final -STRING_FORMATTING = ErrorCode( - 'str-format', "Check that string formatting/interpolation is type-safe", - 'General') # type: Final -STR_BYTES_PY3 = ErrorCode( - 'str-bytes-safe', "Warn about dangerous coercions related to bytes and string types", - 'General') # type: Final -EXIT_RETURN = ErrorCode( - 'exit-return', "Warn about too general return type for '__exit__'", 'General') # type: Final +ATTR_DEFINED: Final = ErrorCode("attr-defined", "Check that attribute exists", "General") +NAME_DEFINED: Final = ErrorCode("name-defined", "Check that name is defined", "General") +CALL_ARG: Final = ErrorCode( + "call-arg", "Check number, names and kinds of arguments in calls", "General" +) +ARG_TYPE: Final = ErrorCode("arg-type", "Check argument types in calls", "General") +CALL_OVERLOAD: Final = ErrorCode( + "call-overload", "Check that an overload variant matches arguments", "General" +) +VALID_TYPE: Final = ErrorCode("valid-type", "Check that type (annotation) is valid", "General") +VAR_ANNOTATED: Final = ErrorCode( + "var-annotated", "Require variable annotation if type can't be inferred", "General" +) +OVERRIDE: Final = ErrorCode( + "override", "Check that method override is compatible with base class", "General" +) +RETURN: Final = ErrorCode("return", "Check that function always returns a value", "General") +RETURN_VALUE: Final = ErrorCode( + "return-value", "Check that return value is compatible with signature", "General" +) +ASSIGNMENT: Final = ErrorCode( + "assignment", "Check that assigned value is compatible with target", "General" +) +TYPE_ARG: Final = ErrorCode("type-arg", "Check that generic type arguments are present", "General") +TYPE_VAR: Final = ErrorCode("type-var", "Check that type variable values are valid", "General") +UNION_ATTR: Final = ErrorCode( + "union-attr", "Check that attribute exists in each item of a union", "General" +) +INDEX: Final = ErrorCode("index", "Check indexing operations", "General") +OPERATOR: Final = ErrorCode("operator", "Check that operator is valid for operands", "General") +LIST_ITEM: Final = ErrorCode( + "list-item", "Check list items in a list expression [item, ...]", "General" +) +DICT_ITEM: Final = ErrorCode( + "dict-item", "Check dict items in a dict expression {key: value, ...}", "General" +) +TYPEDDICT_ITEM: Final = ErrorCode( + "typeddict-item", "Check items when constructing TypedDict", "General" +) +HAS_TYPE: Final = ErrorCode( + "has-type", "Check that type of reference can be determined", "General" +) +IMPORT: Final = ErrorCode( + "import", "Require that imported module can be found or has stubs", "General" +) +NO_REDEF: Final = ErrorCode("no-redef", "Check that each name is defined once", "General") +FUNC_RETURNS_VALUE: Final = ErrorCode( + "func-returns-value", "Check that called function returns a value in value context", "General" +) +ABSTRACT: Final = ErrorCode( + "abstract", "Prevent instantiation of classes with abstract attributes", "General" +) +VALID_NEWTYPE: Final = ErrorCode( + "valid-newtype", "Check that argument 2 to NewType is valid", "General" +) +STRING_FORMATTING: Final = ErrorCode( + "str-format", "Check that string formatting/interpolation is type-safe", "General" +) +STR_BYTES_PY3: Final = ErrorCode( + "str-bytes-safe", "Warn about dangerous coercions related to bytes and string types", "General" +) +EXIT_RETURN: Final = ErrorCode( + "exit-return", "Warn about too general return type for '__exit__'", "General" +) # These error codes aren't enabled by default. -NO_UNTYPED_DEF = ErrorCode( - 'no-untyped-def', "Check that every function has an annotation", 'General') # type: Final -NO_UNTYPED_CALL = ErrorCode( +NO_UNTYPED_DEF: Final = ErrorCode( + "no-untyped-def", "Check that every function has an annotation", "General" +) +NO_UNTYPED_CALL: Final = ErrorCode( 'no-untyped-call', "Disallow calling functions without type annotations from annotated functions", - 'General') # type: Final -REDUNDANT_CAST = ErrorCode( - 'redundant-cast', "Check that cast changes type of expression", 'General') # type: Final -COMPARISON_OVERLAP = ErrorCode( - 'comparison-overlap', - "Check that types in comparisons and 'in' expressions overlap", 'General') # type: Final -NO_ANY_UNIMPORTED = ErrorCode( - 'no-any-unimported', 'Reject "Any" types from unfollowed imports', 'General') # type: Final -NO_ANY_RETURN = ErrorCode( - 'no-any-return', 'Reject returning value with "Any" type if return type is not "Any"', - 'General') # type: Final -UNREACHABLE = ErrorCode( - 'unreachable', "Warn about unreachable statements or expressions", 'General') # type: Final -REDUNDANT_EXPR = ErrorCode( - 'redundant-expr', - "Warn about redundant expressions", - 'General', - default_enabled=False) # type: Final -NAME_MATCH = ErrorCode( - 'name-match', "Check that type definition has consistent naming", 'General') # type: Final + "General", +) +REDUNDANT_CAST: Final = ErrorCode( + "redundant-cast", "Check that cast changes type of expression", "General" +) +COMPARISON_OVERLAP: Final = ErrorCode( + "comparison-overlap", "Check that types in comparisons and 'in' expressions overlap", "General" +) +NO_ANY_UNIMPORTED: Final = ErrorCode( + "no-any-unimported", 'Reject "Any" types from unfollowed imports', "General" +) +NO_ANY_RETURN: Final = ErrorCode( + "no-any-return", + 'Reject returning value with "Any" type if return type is not "Any"', + "General", +) +UNREACHABLE: Final = ErrorCode( + "unreachable", "Warn about unreachable statements or expressions", "General" +) +REDUNDANT_EXPR: Final = ErrorCode( + "redundant-expr", "Warn about redundant expressions", "General", default_enabled=False +) +NAME_MATCH: Final = ErrorCode( + "name-match", "Check that type definition has consistent naming", "General" +) # Syntax errors are often blocking. -SYNTAX = ErrorCode( - 'syntax', "Report syntax errors", 'General') # type: Final +SYNTAX: Final = ErrorCode("syntax", "Report syntax errors", "General") # This is a catch-all for remaining uncategorized errors. -MISC = ErrorCode( - 'misc', "Miscellaneous other checks", 'General') # type: Final +MISC: Final = ErrorCode("misc", "Miscellaneous other checks", "General") diff --git a/mypy/errors.py b/mypy/errors.py index d4cde65ebe68..ab797a770f83 100644 --- a/mypy/errors.py +++ b/mypy/errors.py @@ -14,8 +14,8 @@ from mypy import errorcodes as codes from mypy.util import DEFAULT_SOURCE_OFFSET, is_typeshed_file -T = TypeVar('T') -allowed_duplicates = ['@overload', 'Got:', 'Expected:'] # type: Final +T = TypeVar("T") +allowed_duplicates: Final = ["@overload", "Got:", "Expected:"] class ErrorInfo: @@ -23,19 +23,19 @@ class ErrorInfo: # Description of a sequence of imports that refer to the source file # related to this error. Each item is a (path, line number) tuple. - import_ctx = None # type: List[Tuple[str, int]] + import_ctx: List[Tuple[str, int]] # The path to source file that was the source of this error. file = '' # The fully-qualified id of the source module for this error. - module = None # type: Optional[str] + module: Optional[str] = None # The name of the type in which this error is located at. - type = '' # type: Optional[str] # Unqualified, may be None + type: Optional[str] = "" # Unqualified, may be None # The name of the function or member in which this error is located at. - function_or_member = '' # type: Optional[str] # Unqualified, may be None + function_or_member: Optional[str] = "" # Unqualified, may be None # The line number related to this error within file. line = 0 # -1 if unknown @@ -50,7 +50,7 @@ class ErrorInfo: message = '' # The error code. - code = None # type: Optional[ErrorCode] + code: Optional[ErrorCode] = None # If True, we should halt build after the file that generated this error. blocker = False @@ -60,10 +60,10 @@ class ErrorInfo: # Actual origin of the error message as tuple (path, line number, end line number) # If end line number is unknown, use line number. - origin = None # type: Tuple[str, int, int] + origin: Tuple[str, int, int] # Fine-grained incremental target where this was reported - target = None # type: Optional[str] + target: Optional[str] = None # If True, don't show this message in output, but still record the error (needed # by mypy daemon) @@ -120,47 +120,47 @@ class Errors: # Map from files to generated error messages. Is an OrderedDict so # that it can be used to order messages based on the order the # files were processed. - error_info_map = None # type: Dict[str, List[ErrorInfo]] + error_info_map: Dict[str, List[ErrorInfo]] # Files that we have reported the errors for - flushed_files = None # type: Set[str] + flushed_files: Set[str] # Current error context: nested import context/stack, as a list of (path, line) pairs. - import_ctx = None # type: List[Tuple[str, int]] + import_ctx: List[Tuple[str, int]] # Path name prefix that is removed from all paths, if set. - ignore_prefix = None # type: Optional[str] + ignore_prefix: Optional[str] = None # Path to current file. - file = '' # type: str + file: str = "" # Ignore some errors on these lines of each file # (path -> line -> error-codes) - ignored_lines = None # type: Dict[str, Dict[int, List[str]]] + ignored_lines: Dict[str, Dict[int, List[str]]] # Lines on which an error was actually ignored. - used_ignored_lines = None # type: Dict[str, Set[int]] + used_ignored_lines: Dict[str, Set[int]] # Files where all errors should be ignored. - ignored_files = None # type: Set[str] + ignored_files: Set[str] # Collection of reported only_once messages. - only_once_messages = None # type: Set[str] + only_once_messages: Set[str] # Set to True to show "In function "foo":" messages. - show_error_context = False # type: bool + show_error_context: bool = False # Set to True to show column numbers in error messages. - show_column_numbers = False # type: bool + show_column_numbers: bool = False # Set to True to show absolute file paths in error messages. - show_absolute_path = False # type: bool + show_absolute_path: bool = False # State for keeping track of the current fine-grained incremental mode target. # (See mypy.server.update for more about targets.) # Current module id. - target_module = None # type: Optional[str] - scope = None # type: Optional[Scope] + target_module: Optional[str] = None + scope: Optional[Scope] = None # Have we seen an import-related error so far? If yes, we filter out other messages # in some cases to avoid reporting huge numbers of errors. @@ -511,7 +511,7 @@ def format_messages(self, error_info: List[ErrorInfo], is True also append a relevant trimmed source code line (only for severity 'error'). """ - a = [] # type: List[str] + a: List[str] = [] error_info = [info for info in error_info if not info.hidden] errors = self.render_messages(self.sort_messages(error_info)) errors = self.remove_duplicates(errors) @@ -595,10 +595,10 @@ def render_messages(self, The path item may be None. If the line item is negative, the line number is not defined for the tuple. """ - result = [] # type: List[ErrorTuple] - prev_import_context = [] # type: List[Tuple[str, int]] - prev_function_or_member = None # type: Optional[str] - prev_type = None # type: Optional[str] + result: List[ErrorTuple] = [] + prev_import_context: List[Tuple[str, int]] = [] + prev_function_or_member: Optional[str] = None + prev_type: Optional[str] = None for e in errors: # Report module import context, if different from previous message. @@ -666,7 +666,7 @@ def sort_messages(self, errors: List[ErrorInfo]) -> List[ErrorInfo]: context by line number, but otherwise retain the general ordering of the messages. """ - result = [] # type: List[ErrorInfo] + result: List[ErrorInfo] = [] i = 0 while i < len(errors): i0 = i @@ -684,7 +684,7 @@ def sort_messages(self, errors: List[ErrorInfo]) -> List[ErrorInfo]: def remove_duplicates(self, errors: List[ErrorTuple]) -> List[ErrorTuple]: """Remove duplicates from a sorted error list.""" - res = [] # type: List[ErrorTuple] + res: List[ErrorTuple] = [] i = 0 while i < len(errors): dup = False @@ -727,10 +727,10 @@ class CompileError(Exception): """ - messages = None # type: List[str] + messages: List[str] use_stdout = False # Can be set in case there was a module with a blocking error - module_with_blocker = None # type: Optional[str] + module_with_blocker: Optional[str] = None def __init__(self, messages: List[str], diff --git a/mypy/expandtype.py b/mypy/expandtype.py index c9a1a2430afb..f6ae0f494bf8 100644 --- a/mypy/expandtype.py +++ b/mypy/expandtype.py @@ -23,7 +23,7 @@ def expand_type_by_instance(typ: Type, instance: Instance) -> Type: if not instance.args: return typ else: - variables = {} # type: Dict[TypeVarId, Type] + variables: Dict[TypeVarId, Type] = {} for binder, arg in zip(instance.type.defn.type_vars, instance.args): variables[binder.id] = arg return expand_type(typ, variables) @@ -38,7 +38,7 @@ def freshen_function_type_vars(callee: F) -> F: if not callee.is_generic(): return cast(F, callee) tvdefs = [] - tvmap = {} # type: Dict[TypeVarId, Type] + tvmap: Dict[TypeVarId, Type] = {} for v in callee.variables: # TODO(shantanu): fix for ParamSpecDef assert isinstance(v, TypeVarDef) @@ -57,7 +57,7 @@ def freshen_function_type_vars(callee: F) -> F: class ExpandTypeVisitor(TypeVisitor[Type]): """Visitor that substitutes type variables with values.""" - variables = None # type: Mapping[TypeVarId, Type] # TypeVar id -> TypeVar value + variables: Mapping[TypeVarId, Type] # TypeVar id -> TypeVar value def __init__(self, variables: Mapping[TypeVarId, Type]) -> None: self.variables = variables @@ -102,7 +102,7 @@ def visit_callable_type(self, t: CallableType) -> Type: if t.type_guard is not None else None)) def visit_overloaded(self, t: Overloaded) -> Type: - items = [] # type: List[CallableType] + items: List[CallableType] = [] for item in t.items(): new_item = item.accept(self) assert isinstance(new_item, ProperType) @@ -145,7 +145,7 @@ def visit_type_alias_type(self, t: TypeAliasType) -> Type: return t.copy_modified(args=self.expand_types(t.args)) def expand_types(self, types: Iterable[Type]) -> List[Type]: - a = [] # type: List[Type] + a: List[Type] = [] for t in types: a.append(t.accept(self)) return a diff --git a/mypy/exprtotype.py b/mypy/exprtotype.py index 578080477e0c..e0d2fe783543 100644 --- a/mypy/exprtotype.py +++ b/mypy/exprtotype.py @@ -37,7 +37,7 @@ def expr_to_unanalyzed_type(expr: Expression, _parent: Optional[Expression] = No """ # The `parent` parameter is used in recursive calls to provide context for # understanding whether an CallableArgument is ok. - name = None # type: Optional[str] + name: Optional[str] = None if isinstance(expr, NameExpr): name = expr.name if name == 'True': @@ -96,7 +96,7 @@ def expr_to_unanalyzed_type(expr: Expression, _parent: Optional[Expression] = No # Go through the constructor args to get its name and type. name = None default_type = AnyType(TypeOfAny.unannotated) - typ = default_type # type: Type + typ: Type = default_type for i, arg in enumerate(expr.args): if expr.arg_names[i] is not None: if expr.arg_names[i] == "name": diff --git a/mypy/fastparse.py b/mypy/fastparse.py index ed85c5962424..a493de8cf928 100644 --- a/mypy/fastparse.py +++ b/mypy/fastparse.py @@ -42,7 +42,7 @@ try: # pull this into a final variable to make mypyc be quiet about the # the default argument warning - PY_MINOR_VERSION = sys.version_info[1] # type: Final + PY_MINOR_VERSION: Final = sys.version_info[1] # Check if we can use the stdlib ast module instead of typed_ast. if sys.version_info >= (3, 8): @@ -122,12 +122,12 @@ def ast3_parse(source: Union[str, bytes], filename: str, mode: str, # There is no way to create reasonable fallbacks at this stage, # they must be patched later. -MISSING_FALLBACK = FakeInfo("fallback can't be filled out until semanal") # type: Final -_dummy_fallback = Instance(MISSING_FALLBACK, [], -1) # type: Final +MISSING_FALLBACK: Final = FakeInfo("fallback can't be filled out until semanal") +_dummy_fallback: Final = Instance(MISSING_FALLBACK, [], -1) -TYPE_COMMENT_SYNTAX_ERROR = 'syntax error in type comment' # type: Final +TYPE_COMMENT_SYNTAX_ERROR: Final = "syntax error in type comment" -INVALID_TYPE_IGNORE = 'Invalid "type: ignore" comment' # type: Final +INVALID_TYPE_IGNORE: Final = 'Invalid "type: ignore" comment' TYPE_IGNORE_PATTERN = re.compile(r'[^#]*#\s*type:\s*ignore\s*(.*)') @@ -228,8 +228,8 @@ def parse_type_comment(type_comment: str, extra_ignore = TYPE_IGNORE_PATTERN.match(type_comment) if extra_ignore: # Typeshed has a non-optional return type for group! - tag = cast(Any, extra_ignore).group(1) # type: Optional[str] - ignored = parse_type_ignore_tag(tag) # type: Optional[List[str]] + tag: Optional[str] = cast(Any, extra_ignore).group(1) + ignored: Optional[List[str]] = parse_type_ignore_tag(tag) if ignored is None: if errors is not None: errors.report(line, column, INVALID_TYPE_IGNORE, code=codes.SYNTAX) @@ -294,17 +294,17 @@ def __init__(self, is_stub: bool, errors: Errors) -> None: # 'C' for class, 'F' for function - self.class_and_function_stack = [] # type: List[Literal['C', 'F']] - self.imports = [] # type: List[ImportBase] + self.class_and_function_stack: List[Literal["C", "F"]] = [] + self.imports: List[ImportBase] = [] self.options = options self.is_stub = is_stub self.errors = errors - self.type_ignores = {} # type: Dict[int, List[str]] + self.type_ignores: Dict[int, List[str]] = {} # Cache of visit_X methods keyed by type of visited object - self.visitor_cache = {} # type: Dict[type, Callable[[Optional[AST]], Any]] + self.visitor_cache: Dict[type, Callable[[Optional[AST]], Any]] = {} def note(self, msg: str, line: int, column: int) -> None: self.errors.report(line, column, msg, severity='note', code=codes.SYNTAX) @@ -335,7 +335,7 @@ def set_line(self, node: N, n: Union[ast3.expr, ast3.stmt, ast3.ExceptHandler]) return node def translate_opt_expr_list(self, l: Sequence[Optional[AST]]) -> List[Optional[Expression]]: - res = [] # type: List[Optional[Expression]] + res: List[Optional[Expression]] = [] for e in l: exp = self.visit(e) res.append(exp) @@ -362,7 +362,7 @@ def translate_stmt_list(self, mark_block_unreachable(block) return [block] - res = [] # type: List[Statement] + res: List[Statement] = [] for stmt in stmts: node = self.visit(stmt) res.append(node) @@ -384,7 +384,7 @@ def translate_type_comment(self, self.type_ignores[lineno] = extra_ignore return typ - op_map = { + op_map: Final[Dict[typing.Type[AST], str]] = { ast3.Add: '+', ast3.Sub: '-', ast3.Mult: '*', @@ -398,7 +398,7 @@ def translate_type_comment(self, ast3.BitXor: '^', ast3.BitAnd: '&', ast3.FloorDiv: '//' - } # type: Final[Dict[typing.Type[AST], str]] + } def from_operator(self, op: ast3.operator) -> str: op_name = ASTConverter.op_map.get(type(op)) @@ -407,7 +407,7 @@ def from_operator(self, op: ast3.operator) -> str: else: return op_name - comp_op_map = { + comp_op_map: Final[Dict[typing.Type[AST], str]] = { ast3.Gt: '>', ast3.Lt: '<', ast3.Eq: '==', @@ -418,7 +418,7 @@ def from_operator(self, op: ast3.operator) -> str: ast3.IsNot: 'is not', ast3.In: 'in', ast3.NotIn: 'not in' - } # type: Final[Dict[typing.Type[AST], str]] + } def from_comp_operator(self, op: ast3.cmpop) -> str: op_name = ASTConverter.comp_op_map.get(type(op)) @@ -441,9 +441,9 @@ def as_required_block(self, stmts: List[ast3.stmt], lineno: int) -> Block: return b def fix_function_overloads(self, stmts: List[Statement]) -> List[Statement]: - ret = [] # type: List[Statement] - current_overload = [] # type: List[OverloadPart] - current_overload_name = None # type: Optional[str] + ret: List[Statement] = [] + current_overload: List[OverloadPart] = [] + current_overload_name: Optional[str] = None for stmt in stmts: if (current_overload_name is not None and isinstance(stmt, (Decorator, FuncDef)) @@ -525,12 +525,12 @@ def do_func_def(self, n: Union[ast3.FunctionDef, ast3.AsyncFunctionDef], posonlyargs = [arg.arg for arg in getattr(n.args, "posonlyargs", [])] arg_kinds = [arg.kind for arg in args] - arg_names = [arg.variable.name for arg in args] # type: List[Optional[str]] + arg_names: List[Optional[str]] = [arg.variable.name for arg in args] arg_names = [None if argument_elide_name(name) or name in posonlyargs else name for name in arg_names] if special_function_elide_names(n.name): arg_names = [None] * len(arg_names) - arg_types = [] # type: List[Optional[Type]] + arg_types: List[Optional[Type]] = [] if no_type_check: arg_types = [None] * len(args) return_type = None @@ -620,7 +620,7 @@ def do_func_def(self, n: Union[ast3.FunctionDef, ast3.AsyncFunctionDef], # Before 3.8, [typed_]ast the line number points to the first decorator. # In 3.8, it points to the 'def' line, where we want it. lineno += len(n.decorator_list) - end_lineno = None # type: Optional[int] + end_lineno: Optional[int] = None else: # Set end_lineno to the old pre-3.8 lineno, in order to keep # existing "# type: ignore" comments working: @@ -637,7 +637,7 @@ def do_func_def(self, n: Union[ast3.FunctionDef, ast3.AsyncFunctionDef], deco = Decorator(func_def, self.translate_expr_list(n.decorator_list), var) first = n.decorator_list[0] deco.set_line(first.lineno, first.col_offset) - retval = deco # type: Union[FuncDef, Decorator] + retval: Union[FuncDef, Decorator] = deco else: # FuncDef overrides set_line -- can't use self.set_line func_def.set_line(lineno, n.col_offset) @@ -659,7 +659,7 @@ def transform_args(self, no_type_check: bool = False, ) -> List[Argument]: new_args = [] - names = [] # type: List[ast3.arg] + names: List[ast3.arg] = [] args_args = getattr(args, "posonlyargs", cast(List[ast3.arg], [])) + args.args args_defaults = args.defaults num_no_defaults = len(args_args) - len(args_defaults) @@ -771,7 +771,7 @@ def visit_Assign(self, n: ast3.Assign) -> AssignmentStmt: def visit_AnnAssign(self, n: ast3.AnnAssign) -> AssignmentStmt: line = n.lineno if n.value is None: # always allow 'x: int' - rvalue = TempNode(AnyType(TypeOfAny.special_form), no_rhs=True) # type: Expression + rvalue: Expression = TempNode(AnyType(TypeOfAny.special_form), no_rhs=True) rvalue.line = line rvalue.column = n.col_offset else: @@ -872,7 +872,7 @@ def visit_Assert(self, n: ast3.Assert) -> AssertStmt: # Import(alias* names) def visit_Import(self, n: ast3.Import) -> Import: - names = [] # type: List[Tuple[str, Optional[str]]] + names: List[Tuple[str, Optional[str]]] = [] for alias in n.names: name = self.translate_module_id(alias.name) asname = alias.asname @@ -891,7 +891,7 @@ def visit_ImportFrom(self, n: ast3.ImportFrom) -> ImportBase: assert n.level is not None if len(n.names) == 1 and n.names[0].name == '*': mod = n.module if n.module is not None else '' - i = ImportAll(mod, n.level) # type: ImportBase + i: ImportBase = ImportAll(mod, n.level) else: i = ImportFrom(self.translate_module_id(n.module) if n.module is not None else '', n.level, @@ -1095,7 +1095,7 @@ def visit_Call(self, n: Call) -> CallExpr: # Constant(object value) -- a constant, in Python 3.8. def visit_Constant(self, n: Constant) -> Any: val = n.value - e = None # type: Any + e: Any = None if val is None: e = NameExpr('None') elif isinstance(val, str): @@ -1122,9 +1122,9 @@ def visit_Num(self, n: ast3.Num) -> Union[IntExpr, FloatExpr, ComplexExpr]: # a parent of int and float, and this causes isinstance below # to think that the complex branch is always picked. Avoid # this by throwing away the type. - val = n.n # type: object + val: object = n.n if isinstance(val, int): - e = IntExpr(val) # type: Union[IntExpr, FloatExpr, ComplexExpr] + e: Union[IntExpr, FloatExpr, ComplexExpr] = IntExpr(val) elif isinstance(val, float): e = FloatExpr(val) elif isinstance(val, complex): @@ -1206,7 +1206,7 @@ def visit_Attribute(self, n: Attribute) -> Union[MemberExpr, SuperExpr]: if (isinstance(obj, CallExpr) and isinstance(obj.callee, NameExpr) and obj.callee.name == 'super'): - e = SuperExpr(member_expr.name, obj) # type: Union[MemberExpr, SuperExpr] + e: Union[MemberExpr, SuperExpr] = SuperExpr(member_expr.name, obj) else: e = member_expr return self.set_line(e, n) @@ -1241,10 +1241,10 @@ def visit_Name(self, n: Name) -> NameExpr: # List(expr* elts, expr_context ctx) def visit_List(self, n: ast3.List) -> Union[ListExpr, TupleExpr]: - expr_list = [self.visit(e) for e in n.elts] # type: List[Expression] + expr_list: List[Expression] = [self.visit(e) for e in n.elts] if isinstance(n.ctx, ast3.Store): # [x, y] = z and (x, y) = z means exactly the same thing - e = TupleExpr(expr_list) # type: Union[ListExpr, TupleExpr] + e: Union[ListExpr, TupleExpr] = TupleExpr(expr_list) else: e = ListExpr(expr_list) return self.set_line(e, n) @@ -1284,7 +1284,7 @@ def __init__(self, self.errors = errors self.line = line self.override_column = override_column - self.node_stack = [] # type: List[AST] + self.node_stack: List[AST] = [] self.assume_str_is_unicode = assume_str_is_unicode self.is_evaluated = is_evaluated @@ -1379,9 +1379,9 @@ def visit_Call(self, e: Call) -> Type: if not constructor: self.fail("Expected arg constructor name", e.lineno, e.col_offset) - name = None # type: Optional[str] + name: Optional[str] = None default_type = AnyType(TypeOfAny.special_form) - typ = default_type # type: Type + typ: Type = default_type for i, arg in enumerate(e.args): if i == 0: converted = self.visit(arg) @@ -1490,7 +1490,7 @@ def numeric_type(self, value: object, n: AST) -> Type: # to think that the complex branch is always picked. Avoid # this by throwing away the type. if isinstance(value, int): - numeric_value = value # type: Optional[int] + numeric_value: Optional[int] = value type_name = 'builtins.int' else: # Other kinds of numbers (floats, complex) are not valid parameters for @@ -1525,7 +1525,7 @@ def visit_Str(self, n: Str) -> Type: # this method doesn't actually ever run.) We can't just do # an attribute access with a `# type: ignore` because it would be # unused on < 3.8. - kind = getattr(n, 'kind') # type: str # noqa + kind: str = getattr(n, "kind") # noqa if 'u' in kind or self.assume_str_is_unicode: return parse_type_string(n.s, 'builtins.unicode', self.line, n.col_offset, @@ -1543,7 +1543,7 @@ def visit_Bytes(self, n: Bytes) -> Type: # Subscript(expr value, expr slice, expr_context ctx) # Python 3.9 and later def visit_Subscript(self, n: ast3.Subscript) -> Type: if sys.version_info >= (3, 9): # Really 3.9a5 or later - sliceval = n.slice # type: Any + sliceval: Any = n.slice if (isinstance(sliceval, ast3.Slice) or (isinstance(sliceval, ast3.Tuple) and any(isinstance(x, ast3.Slice) for x in sliceval.elts))): diff --git a/mypy/fastparse2.py b/mypy/fastparse2.py index 3473253a8aaa..687343c58a80 100644 --- a/mypy/fastparse2.py +++ b/mypy/fastparse2.py @@ -82,11 +82,11 @@ # There is no way to create reasonable fallbacks at this stage, # they must be patched later. -MISSING_FALLBACK = FakeInfo("fallback can't be filled out until semanal") # type: Final -_dummy_fallback = Instance(MISSING_FALLBACK, [], -1) # type: Final +MISSING_FALLBACK: Final = FakeInfo("fallback can't be filled out until semanal") +_dummy_fallback: Final = Instance(MISSING_FALLBACK, [], -1) -TYPE_COMMENT_SYNTAX_ERROR = 'syntax error in type comment' # type: Final -TYPE_COMMENT_AST_ERROR = 'invalid type comment' # type: Final +TYPE_COMMENT_SYNTAX_ERROR: Final = "syntax error in type comment" +TYPE_COMMENT_AST_ERROR: Final = "invalid type comment" def parse(source: Union[str, bytes], @@ -144,8 +144,8 @@ def __init__(self, options: Options, errors: Errors) -> None: # 'C' for class, 'F' for function - self.class_and_function_stack = [] # type: List[Literal['C', 'F']] - self.imports = [] # type: List[ImportBase] + self.class_and_function_stack: List[Literal["C", "F"]] = [] + self.imports: List[ImportBase] = [] self.options = options self.errors = errors @@ -170,9 +170,9 @@ def __init__(self, self.unicode_literals = False # Cache of visit_X methods keyed by type of visited object - self.visitor_cache = {} # type: Dict[type, Callable[[Optional[AST]], Any]] + self.visitor_cache: Dict[type, Callable[[Optional[AST]], Any]] = {} - self.type_ignores = {} # type: Dict[int, List[str]] + self.type_ignores: Dict[int, List[str]] = {} def fail(self, msg: str, line: int, column: int, blocker: bool = True) -> None: if blocker or not self.options.ignore_errors: @@ -195,7 +195,7 @@ def set_line(self, node: N, n: Union[ast27.expr, ast27.stmt, ast27.ExceptHandler return node def translate_expr_list(self, l: Sequence[AST]) -> List[Expression]: - res = [] # type: List[Expression] + res: List[Expression] = [] for e in l: exp = self.visit(e) assert isinstance(exp, Expression) @@ -219,7 +219,7 @@ def translate_stmt_list(self, mark_block_unreachable(block) return [block] - res = [] # type: List[Statement] + res: List[Statement] = [] for stmt in stmts: node = self.visit(stmt) assert isinstance(node, Statement) @@ -241,7 +241,7 @@ def translate_type_comment(self, n: ast27.stmt, self.type_ignores[lineno] = extra_ignore return typ - op_map = { + op_map: Final[Dict[typing.Type[AST], str]] = { ast27.Add: '+', ast27.Sub: '-', ast27.Mult: '*', @@ -254,7 +254,7 @@ def translate_type_comment(self, n: ast27.stmt, ast27.BitXor: '^', ast27.BitAnd: '&', ast27.FloorDiv: '//' - } # type: Final[Dict[typing.Type[AST], str]] + } def from_operator(self, op: ast27.operator) -> str: op_name = ASTConverter.op_map.get(type(op)) @@ -265,7 +265,7 @@ def from_operator(self, op: ast27.operator) -> str: else: return op_name - comp_op_map = { + comp_op_map: Final[Dict[typing.Type[AST], str]] = { ast27.Gt: '>', ast27.Lt: '<', ast27.Eq: '==', @@ -276,7 +276,7 @@ def from_operator(self, op: ast27.operator) -> str: ast27.IsNot: 'is not', ast27.In: 'in', ast27.NotIn: 'not in' - } # type: Final[Dict[typing.Type[AST], str]] + } def from_comp_operator(self, op: ast27.cmpop) -> str: op_name = ASTConverter.comp_op_map.get(type(op)) @@ -299,9 +299,9 @@ def as_required_block(self, stmts: List[ast27.stmt], lineno: int) -> Block: return b def fix_function_overloads(self, stmts: List[Statement]) -> List[Statement]: - ret = [] # type: List[Statement] - current_overload = [] # type: List[OverloadPart] - current_overload_name = None # type: Optional[str] + ret: List[Statement] = [] + current_overload: List[OverloadPart] = [] + current_overload_name: Optional[str] = None for stmt in stmts: if (current_overload_name is not None and isinstance(stmt, (Decorator, FuncDef)) @@ -371,12 +371,12 @@ def visit_FunctionDef(self, n: ast27.FunctionDef) -> Statement: args, decompose_stmts = self.transform_args(n.args, lineno) arg_kinds = [arg.kind for arg in args] - arg_names = [arg.variable.name for arg in args] # type: List[Optional[str]] + arg_names: List[Optional[str]] = [arg.variable.name for arg in args] arg_names = [None if argument_elide_name(name) else name for name in arg_names] if special_function_elide_names(n.name): arg_names = [None] * len(arg_names) - arg_types = [] # type: List[Optional[Type]] + arg_types: List[Optional[Type]] = [] type_comment = n.type_comment if (n.decorator_list and any(is_no_type_check_decorator(d) for d in n.decorator_list)): arg_types = [None] * len(args) @@ -460,7 +460,7 @@ def visit_FunctionDef(self, n: ast27.FunctionDef) -> Statement: func_def.body.set_line(func_def.get_line()) dec = Decorator(func_def, self.translate_expr_list(n.decorator_list), var) dec.set_line(lineno, n.col_offset) - retval = dec # type: Statement + retval: Statement = dec else: # Overrides set_line -- can't use self.set_line func_def.set_line(lineno, n.col_offset) @@ -480,19 +480,19 @@ def transform_args(self, n: ast27.arguments, line: int, ) -> Tuple[List[Argument], List[Statement]]: - type_comments = n.type_comments # type: Sequence[Optional[str]] + type_comments: Sequence[Optional[str]] = n.type_comments converter = TypeConverter(self.errors, line=line, assume_str_is_unicode=self.unicode_literals) - decompose_stmts = [] # type: List[Statement] + decompose_stmts: List[Statement] = [] n_args = n.args args = [(self.convert_arg(i, arg, line, decompose_stmts), self.get_type(i, type_comments, converter)) for i, arg in enumerate(n_args)] defaults = self.translate_expr_list(n.defaults) - names = [name for arg in n_args for name in self.extract_names(arg)] # type: List[str] + names: List[str] = [name for arg in n_args for name in self.extract_names(arg)] - new_args = [] # type: List[Argument] + new_args: List[Argument] = [] num_no_defaults = len(args) - len(defaults) # positional arguments without defaults for a, annotation in args[:num_no_defaults]: @@ -558,7 +558,7 @@ def get_type(self, typ = converter.visit_raw_str(comment) extra_ignore = TYPE_IGNORE_PATTERN.match(comment) if extra_ignore: - tag = cast(Any, extra_ignore).group(1) # type: Optional[str] + tag: Optional[str] = cast(Any, extra_ignore).group(1) ignored = parse_type_ignore_tag(tag) if ignored is None: self.fail(INVALID_TYPE_IGNORE, converter.line, -1) @@ -691,7 +691,7 @@ def try_handler(self, orelse: List[ast27.stmt], finalbody: List[ast27.stmt], lineno: int) -> TryStmt: - vs = [] # type: List[Optional[NameExpr]] + vs: List[Optional[NameExpr]] = [] for item in handlers: if item.name is None: vs.append(None) @@ -732,7 +732,7 @@ def visit_Assert(self, n: ast27.Assert) -> AssertStmt: # Import(alias* names) def visit_Import(self, n: ast27.Import) -> Import: - names = [] # type: List[Tuple[str, Optional[str]]] + names: List[Tuple[str, Optional[str]]] = [] for alias in n.names: name = self.translate_module_id(alias.name) asname = alias.asname @@ -751,7 +751,7 @@ def visit_ImportFrom(self, n: ast27.ImportFrom) -> ImportBase: assert n.level is not None if len(n.names) == 1 and n.names[0].name == '*': mod = n.module if n.module is not None else '' - i = ImportAll(mod, n.level) # type: ImportBase + i: ImportBase = ImportAll(mod, n.level) else: module_id = self.translate_module_id(n.module) if n.module is not None else '' i = ImportFrom(module_id, n.level, [(a.name, a.asname) for a in n.names]) @@ -922,9 +922,9 @@ def visit_Compare(self, n: ast27.Compare) -> ComparisonExpr: # Call(expr func, expr* args, keyword* keywords) # keyword = (identifier? arg, expr value) def visit_Call(self, n: Call) -> CallExpr: - arg_types = [] # type: List[ast27.expr] - arg_kinds = [] # type: List[int] - signature = [] # type: List[Optional[str]] + arg_types: List[ast27.expr] = [] + arg_kinds: List[int] = [] + signature: List[Optional[str]] = [] args = n.args arg_types.extend(args) @@ -958,14 +958,14 @@ def visit_Num(self, n: ast27.Num) -> Expression: # a parent of int and float, and this causes isinstance below # to think that the complex branch is always picked. Avoid # this by throwing away the type. - value = n.n # type: object + value: object = n.n is_inverse = False if str(n.n).startswith('-'): # Hackish because of complex. value = -n.n is_inverse = True if isinstance(value, int): - expr = IntExpr(value) # type: Expression + expr: Expression = IntExpr(value) elif isinstance(value, float): expr = FloatExpr(value) elif isinstance(value, complex): @@ -990,7 +990,7 @@ def visit_Str(self, n: ast27.Str) -> Expression: # to be unicode. if isinstance(n.s, bytes): contents = bytes_to_human_readable_repr(n.s) - e = StrExpr(contents, from_python_3=False) # type: Union[StrExpr, UnicodeExpr] + e: Union[StrExpr, UnicodeExpr] = StrExpr(contents, from_python_3=False) return self.set_line(e, n) else: e = UnicodeExpr(n.s) @@ -1011,7 +1011,7 @@ def visit_Attribute(self, n: Attribute) -> Expression: if (isinstance(obj, CallExpr) and isinstance(obj.callee, NameExpr) and obj.callee.name == 'super'): - e = SuperExpr(member_expr.name, obj) # type: Expression + e: Expression = SuperExpr(member_expr.name, obj) else: e = member_expr return self.set_line(e, n) @@ -1033,10 +1033,10 @@ def visit_Name(self, n: Name) -> NameExpr: # List(expr* elts, expr_context ctx) def visit_List(self, n: ast27.List) -> Union[ListExpr, TupleExpr]: - expr_list = [self.visit(e) for e in n.elts] # type: List[Expression] + expr_list: List[Expression] = [self.visit(e) for e in n.elts] if isinstance(n.ctx, ast27.Store): # [x, y] = z and (x, y) = z means exactly the same thing - e = TupleExpr(expr_list) # type: Union[ListExpr, TupleExpr] + e: Union[ListExpr, TupleExpr] = TupleExpr(expr_list) else: e = ListExpr(expr_list) return self.set_line(e, n) diff --git a/mypy/find_sources.py b/mypy/find_sources.py index 4f50d8ff52b2..a44648f261ed 100644 --- a/mypy/find_sources.py +++ b/mypy/find_sources.py @@ -10,7 +10,7 @@ from mypy.fscache import FileSystemCache from mypy.options import Options -PY_EXTENSIONS = tuple(PYTHON_EXTENSIONS) # type: Final +PY_EXTENSIONS: Final = tuple(PYTHON_EXTENSIONS) class InvalidSourceList(Exception): @@ -101,7 +101,7 @@ def is_explicit_package_base(self, path: str) -> bool: def find_sources_in_dir(self, path: str) -> List[BuildSource]: sources = [] - seen = set() # type: Set[str] + seen: Set[str] = set() names = sorted(self.fscache.listdir(path), key=keyfunc) for name in names: # Skip certain names altogether diff --git a/mypy/fixup.py b/mypy/fixup.py index f995ad36f0f6..6bfdd474a368 100644 --- a/mypy/fixup.py +++ b/mypy/fixup.py @@ -28,7 +28,7 @@ def fixup_module(tree: MypyFile, modules: Dict[str, MypyFile], # TODO: Fix up .info when deserializing, i.e. much earlier. class NodeFixer(NodeVisitor[None]): - current_info = None # type: Optional[TypeInfo] + current_info: Optional[TypeInfo] = None def __init__(self, modules: Dict[str, MypyFile], allow_missing: bool) -> None: self.modules = modules @@ -306,7 +306,7 @@ def lookup_qualified_stnode(modules: Dict[str, MypyFile], name: str, return lookup_fully_qualified(name, modules, raise_on_missing=not allow_missing) -_SUGGESTION = "" # type: Final +_SUGGESTION: Final = "" def missing_info(modules: Dict[str, MypyFile]) -> TypeInfo: diff --git a/mypy/fscache.py b/mypy/fscache.py index aa688868cc71..37f50f6228a4 100644 --- a/mypy/fscache.py +++ b/mypy/fscache.py @@ -41,7 +41,7 @@ class FileSystemCache: def __init__(self) -> None: # The package root is not flushed with the caches. # It is set by set_package_root() below. - self.package_root = [] # type: List[str] + self.package_root: List[str] = [] self.flush() def set_package_root(self, package_root: List[str]) -> None: @@ -49,16 +49,16 @@ def set_package_root(self, package_root: List[str]) -> None: def flush(self) -> None: """Start another transaction and empty all caches.""" - self.stat_cache = {} # type: Dict[str, os.stat_result] - self.stat_error_cache = {} # type: Dict[str, OSError] - self.listdir_cache = {} # type: Dict[str, List[str]] - self.listdir_error_cache = {} # type: Dict[str, OSError] - self.isfile_case_cache = {} # type: Dict[str, bool] - self.exists_case_cache = {} # type: Dict[str, bool] - self.read_cache = {} # type: Dict[str, bytes] - self.read_error_cache = {} # type: Dict[str, Exception] - self.hash_cache = {} # type: Dict[str, str] - self.fake_package_cache = set() # type: Set[str] + self.stat_cache: Dict[str, os.stat_result] = {} + self.stat_error_cache: Dict[str, OSError] = {} + self.listdir_cache: Dict[str, List[str]] = {} + self.listdir_error_cache: Dict[str, OSError] = {} + self.isfile_case_cache: Dict[str, bool] = {} + self.exists_case_cache: Dict[str, bool] = {} + self.read_cache: Dict[str, bytes] = {} + self.read_error_cache: Dict[str, Exception] = {} + self.hash_cache: Dict[str, str] = {} + self.fake_package_cache: Set[str] = set() def stat(self, path: str) -> os.stat_result: if path in self.stat_cache: @@ -144,7 +144,7 @@ def _fake_init(self, path: str) -> os.stat_result: # Get stat result as a sequence so we can modify it. # (Alas, typeshed's os.stat_result is not a sequence yet.) tpl = tuple(st) # type: ignore[arg-type, var-annotated] - seq = list(tpl) # type: List[float] + seq: List[float] = list(tpl) seq[stat.ST_MODE] = stat.S_IFREG | 0o444 seq[stat.ST_INO] = 1 seq[stat.ST_NLINK] = 1 diff --git a/mypy/fswatcher.py b/mypy/fswatcher.py index 7ab78b2c4ed3..80af313e8227 100644 --- a/mypy/fswatcher.py +++ b/mypy/fswatcher.py @@ -29,8 +29,8 @@ class FileSystemWatcher: def __init__(self, fs: FileSystemCache) -> None: self.fs = fs - self._paths = set() # type: Set[str] - self._file_data = {} # type: Dict[str, Optional[FileData]] + self._paths: Set[str] = set() + self._file_data: Dict[str, Optional[FileData]] = {} def dump_file_data(self) -> Dict[str, Tuple[float, int, str]]: return {k: v for k, v in self._file_data.items() if v is not None} diff --git a/mypy/gclogger.py b/mypy/gclogger.py index 650ef2f04930..1f36225461de 100644 --- a/mypy/gclogger.py +++ b/mypy/gclogger.py @@ -8,7 +8,7 @@ class GcLogger: """Context manager to log GC stats and overall time.""" def __enter__(self) -> 'GcLogger': - self.gc_start_time = None # type: Optional[float] + self.gc_start_time: Optional[float] = None self.gc_time = 0.0 self.gc_calls = 0 self.gc_collected = 0 diff --git a/mypy/indirection.py b/mypy/indirection.py index aff942ce9393..952bccc36c06 100644 --- a/mypy/indirection.py +++ b/mypy/indirection.py @@ -19,8 +19,8 @@ class TypeIndirectionVisitor(TypeVisitor[Set[str]]): """Returns all module references within a particular type.""" def __init__(self) -> None: - self.cache = {} # type: Dict[types.Type, Set[str]] - self.seen_aliases = set() # type: Set[types.TypeAliasType] + self.cache: Dict[types.Type, Set[str]] = {} + self.seen_aliases: Set[types.TypeAliasType] = set() def find_modules(self, typs: Iterable[types.Type]) -> Set[str]: self.seen_aliases.clear() @@ -28,7 +28,7 @@ def find_modules(self, typs: Iterable[types.Type]) -> Set[str]: def _visit(self, typ_or_typs: Union[types.Type, Iterable[types.Type]]) -> Set[str]: typs = [typ_or_typs] if isinstance(typ_or_typs, types.Type) else typ_or_typs - output = set() # type: Set[str] + output: Set[str] = set() for typ in typs: if isinstance(typ, types.TypeAliasType): # Avoid infinite recursion for recursive type aliases. diff --git a/mypy/ipc.py b/mypy/ipc.py index 83d3ca787329..8a6a310d7ff8 100644 --- a/mypy/ipc.py +++ b/mypy/ipc.py @@ -23,8 +23,8 @@ _IPCHandle = int kernel32 = ctypes.windll.kernel32 - DisconnectNamedPipe = kernel32.DisconnectNamedPipe # type: Callable[[_IPCHandle], int] - FlushFileBuffers = kernel32.FlushFileBuffers # type: Callable[[_IPCHandle], int] + DisconnectNamedPipe: Callable[[_IPCHandle], int] = kernel32.DisconnectNamedPipe + FlushFileBuffers: Callable[[_IPCHandle], int] = kernel32.FlushFileBuffers else: import socket _IPCHandle = socket.socket @@ -42,7 +42,7 @@ class IPCBase: and writing. """ - connection = None # type: _IPCHandle + connection: _IPCHandle def __init__(self, name: str, timeout: Optional[float]) -> None: self.name = name @@ -175,7 +175,7 @@ def __exit__(self, class IPCServer(IPCBase): - BUFFER_SIZE = 2**16 # type: Final + BUFFER_SIZE: Final = 2 ** 16 def __init__(self, name: str, timeout: Optional[float] = None) -> None: if sys.platform == 'win32': diff --git a/mypy/join.py b/mypy/join.py index 23838ffad5ed..bad256ccf11c 100644 --- a/mypy/join.py +++ b/mypy/join.py @@ -22,7 +22,7 @@ class InstanceJoiner: def __init__(self) -> None: - self.seen_instances = [] # type: List[Tuple[Instance, Instance]] + self.seen_instances: List[Tuple[Instance, Instance]] = [] def join_instances(self, t: Instance, s: Instance) -> ProperType: if (t, s) in self.seen_instances or (s, t) in self.seen_instances: @@ -36,13 +36,13 @@ def join_instances(self, t: Instance, s: Instance) -> ProperType: # potentially different arguments). # Combine type arguments. - args = [] # type: List[Type] + args: List[Type] = [] # N.B: We use zip instead of indexing because the lengths might have # mismatches during daemon reprocessing. for ta, sa, type_var in zip(t.args, s.args, t.type.defn.type_vars): ta_proper = get_proper_type(ta) sa_proper = get_proper_type(sa) - new_type = None # type: Optional[Type] + new_type: Optional[Type] = None if isinstance(ta_proper, AnyType): new_type = AnyType(TypeOfAny.from_another_any, ta_proper) elif isinstance(sa_proper, AnyType): @@ -69,7 +69,7 @@ def join_instances(self, t: Instance, s: Instance) -> ProperType: return object_from_instance(t) assert new_type is not None args.append(new_type) - result = Instance(t.type, args) # type: ProperType + result: ProperType = Instance(t.type, args) elif t.type.bases and is_subtype_ignoring_tvars(t, s): result = self.join_instances_via_supertype(t, s) else: @@ -91,7 +91,7 @@ def join_instances_via_supertype(self, t: Instance, s: Instance) -> ProperType: # Compute the "best" supertype of t when joined with s. # The definition of "best" may evolve; for now it is the one with # the longest MRO. Ties are broken by using the earlier base. - best = None # type: Optional[ProperType] + best: Optional[ProperType] = None for base in t.type.bases: mapped = map_instance_to_supertype(t, base.type) res = self.join_instances(mapped, s) @@ -254,7 +254,7 @@ def visit_instance(self, t: Instance) -> ProperType: if self.instance_joiner is None: self.instance_joiner = InstanceJoiner() nominal = self.instance_joiner.join_instances(t, self.s) - structural = None # type: Optional[Instance] + structural: Optional[Instance] = None if t.type.is_protocol and is_protocol_implementation(self.s, t): structural = t elif self.s.type.is_protocol and is_protocol_implementation(t, self.s): @@ -335,7 +335,7 @@ def visit_overloaded(self, t: Overloaded) -> ProperType: # Ov([Any, int] -> Any, [Any, int] -> Any) # # TODO: Consider more cases of callable subtyping. - result = [] # type: List[CallableType] + result: List[CallableType] = [] s = self.s if isinstance(s, FunctionLike): # The interesting case where both types are function types. @@ -378,7 +378,7 @@ def visit_tuple_type(self, t: TupleType) -> ProperType: mypy.typeops.tuple_fallback(t)) assert isinstance(fallback, Instance) if self.s.length() == t.length(): - items = [] # type: List[Type] + items: List[Type] = [] for i in range(t.length()): items.append(self.join(t.items[i], self.s.items[i])) return TupleType(items, fallback) @@ -481,7 +481,8 @@ def is_similar_callables(t: CallableType, s: CallableType) -> bool: def join_similar_callables(t: CallableType, s: CallableType) -> CallableType: from mypy.meet import meet_types - arg_types = [] # type: List[Type] + + arg_types: List[Type] = [] for i in range(len(t.arg_types)): arg_types.append(meet_types(t.arg_types[i], s.arg_types[i])) # TODO in combine_similar_callables also applies here (names and kinds) @@ -499,7 +500,7 @@ def join_similar_callables(t: CallableType, s: CallableType) -> CallableType: def combine_similar_callables(t: CallableType, s: CallableType) -> CallableType: - arg_types = [] # type: List[Type] + arg_types: List[Type] = [] for i in range(len(t.arg_types)): arg_types.append(join_types(t.arg_types[i], s.arg_types[i])) # TODO kinds and argument names diff --git a/mypy/literals.py b/mypy/literals.py index 95872cbd9fca..16288433b460 100644 --- a/mypy/literals.py +++ b/mypy/literals.py @@ -125,7 +125,7 @@ def visit_op_expr(self, e: OpExpr) -> Key: return ('Binary', e.op, literal_hash(e.left), literal_hash(e.right)) def visit_comparison_expr(self, e: ComparisonExpr) -> Key: - rest = tuple(e.operators) # type: Any + rest: Any = tuple(e.operators) rest += tuple(literal_hash(o) for o in e.operands) return ('Comparison',) + rest @@ -134,7 +134,7 @@ def visit_unary_expr(self, e: UnaryExpr) -> Key: def seq_expr(self, e: Union[ListExpr, TupleExpr, SetExpr], name: str) -> Optional[Key]: if all(literal(x) == LITERAL_YES for x in e.items): - rest = tuple(literal_hash(x) for x in e.items) # type: Any + rest: Any = tuple(literal_hash(x) for x in e.items) return (name,) + rest return None @@ -143,9 +143,10 @@ def visit_list_expr(self, e: ListExpr) -> Optional[Key]: def visit_dict_expr(self, e: DictExpr) -> Optional[Key]: if all(a and literal(a) == literal(b) == LITERAL_YES for a, b in e.items): - rest = tuple((literal_hash(a) if a else None, literal_hash(b)) - for a, b in e.items) # type: Any - return ('Dict',) + rest + rest: Any = tuple( + (literal_hash(a) if a else None, literal_hash(b)) for a, b in e.items + ) + return ("Dict",) + rest return None def visit_tuple_expr(self, e: TupleExpr) -> Optional[Key]: @@ -241,4 +242,4 @@ def visit_temp_node(self, e: TempNode) -> None: return None -_hasher = _Hasher() # type: Final +_hasher: Final = _Hasher() diff --git a/mypy/main.py b/mypy/main.py index ac71d035c75b..2931b9848bbb 100644 --- a/mypy/main.py +++ b/mypy/main.py @@ -28,8 +28,8 @@ from mypy.version import __version__ -orig_stat = os.stat # type: Final -MEM_PROFILE = False # type: Final # If True, dump memory profile +orig_stat: Final = os.stat +MEM_PROFILE: Final = False # If True, dump memory profile def stat_proxy(path: str) -> os.stat_result: @@ -208,11 +208,11 @@ def _fill_text(self, text: str, width: int, indent: str) -> str: # Define pairs of flag prefixes with inverse meaning. -flag_prefix_pairs = [ +flag_prefix_pairs: Final = [ ('allow', 'disallow'), ('show', 'hide'), -] # type: Final -flag_prefix_map = {} # type: Final[Dict[str, str]] +] +flag_prefix_map: Final[Dict[str, str]] = {} for a, b in flag_prefix_pairs: flag_prefix_map[a] = b flag_prefix_map[b] = a @@ -282,11 +282,11 @@ def infer_python_executable(options: Options, options.python_executable = python_executable -HEADER = """%(prog)s [-h] [-v] [-V] [more options; see below] - [-m MODULE] [-p PACKAGE] [-c PROGRAM_TEXT] [files ...]""" # type: Final +HEADER: Final = """%(prog)s [-h] [-v] [-V] [more options; see below] + [-m MODULE] [-p PACKAGE] [-c PROGRAM_TEXT] [files ...]""" -DESCRIPTION = """ +DESCRIPTION: Final = """ Mypy is a program that will type check your Python code. Pass in any files or folders you want to type check. Mypy will @@ -307,11 +307,11 @@ def infer_python_executable(options: Options, command line flags. For more details, see: - https://mypy.readthedocs.io/en/stable/config_file.html -""" # type: Final +""" -FOOTER = """Environment variables: +FOOTER: Final = """Environment variables: Define MYPYPATH for additional module search path entries. - Define MYPY_CACHE_DIR to override configuration cache_dir path.""" # type: Final + Define MYPY_CACHE_DIR to override configuration cache_dir path.""" class CapturableArgumentParser(argparse.ArgumentParser): @@ -434,8 +434,8 @@ def process_options(args: List[str], stdout=stdout, stderr=stderr) - strict_flag_names = [] # type: List[str] - strict_flag_assignments = [] # type: List[Tuple[str, bool]] + strict_flag_names: List[str] = [] + strict_flag_assignments: List[Tuple[str, bool]] = [] def add_invertible_flag(flag: str, *, diff --git a/mypy/maptype.py b/mypy/maptype.py index 5e58754655ef..1216c6015378 100644 --- a/mypy/maptype.py +++ b/mypy/maptype.py @@ -28,11 +28,11 @@ def map_instance_to_supertypes(instance: Instance, supertype: TypeInfo) -> List[Instance]: # FIX: Currently we should only have one supertype per interface, so no # need to return an array - result = [] # type: List[Instance] + result: List[Instance] = [] for path in class_derivation_paths(instance.type, supertype): types = [instance] for sup in path: - a = [] # type: List[Instance] + a: List[Instance] = [] for t in types: a.extend(map_instance_to_direct_supertypes(t, sup)) types = a @@ -56,7 +56,7 @@ def class_derivation_paths(typ: TypeInfo, """ # FIX: Currently we might only ever have a single path, so this could be # simplified - result = [] # type: List[List[TypeInfo]] + result: List[List[TypeInfo]] = [] for base in typ.bases: btype = base.type @@ -74,7 +74,7 @@ def map_instance_to_direct_supertypes(instance: Instance, supertype: TypeInfo) -> List[Instance]: # FIX: There should only be one supertypes, always. typ = instance.type - result = [] # type: List[Instance] + result: List[Instance] = [] for b in typ.bases: if b.type == supertype: diff --git a/mypy/meet.py b/mypy/meet.py index 943219d97e24..032f38f49fa8 100644 --- a/mypy/meet.py +++ b/mypy/meet.py @@ -444,7 +444,7 @@ def visit_any(self, t: AnyType) -> ProperType: def visit_union_type(self, t: UnionType) -> ProperType: if isinstance(self.s, UnionType): - meets = [] # type: List[Type] + meets: List[Type] = [] for x in t.items: for y in self.s.items: meets.append(meet_types(x, y)) @@ -493,7 +493,7 @@ def visit_instance(self, t: Instance) -> ProperType: if is_subtype(t, self.s) or is_subtype(self.s, t): # Combine type arguments. We could have used join below # equivalently. - args = [] # type: List[Type] + args: List[Type] = [] # N.B: We use zip instead of indexing because the lengths might have # mismatches during daemon reprocessing. for ta, sia in zip(t.args, si.args): @@ -580,7 +580,7 @@ def visit_overloaded(self, t: Overloaded) -> ProperType: def visit_tuple_type(self, t: TupleType) -> ProperType: if isinstance(self.s, TupleType) and self.s.length() == t.length(): - items = [] # type: List[Type] + items: List[Type] = [] for i in range(t.length()): items.append(self.meet(t.items[i], self.s.items[i])) # TODO: What if the fallbacks are different? @@ -600,7 +600,7 @@ def visit_typeddict_type(self, t: TypedDictType) -> ProperType: if (not is_equivalent(l, r) or (name in t.required_keys) != (name in self.s.required_keys)): return self.default(self.s) - item_list = [] # type: List[Tuple[str, Type]] + item_list: List[Tuple[str, Type]] = [] for (item_name, s_item_type, t_item_type) in self.s.zipall(t): if s_item_type is not None: item_list.append((item_name, s_item_type)) @@ -664,7 +664,8 @@ def default(self, typ: Type) -> ProperType: def meet_similar_callables(t: CallableType, s: CallableType) -> CallableType: from mypy.join import join_types - arg_types = [] # type: List[Type] + + arg_types: List[Type] = [] for i in range(len(t.arg_types)): arg_types.append(join_types(t.arg_types[i], s.arg_types[i])) # TODO in combine_similar_callables also applies here (names and kinds) diff --git a/mypy/memprofile.py b/mypy/memprofile.py index 9ed2c4afee06..5052d0418994 100644 --- a/mypy/memprofile.py +++ b/mypy/memprofile.py @@ -51,8 +51,8 @@ def collect_memory_stats() -> Tuple[Dict[str, int], if isinstance(x, tuple): inferred[id(x)] = '%s (tuple)' % n - freqs = {} # type: Dict[str, int] - memuse = {} # type: Dict[str, int] + freqs: Dict[str, int] = {} + memuse: Dict[str, int] = {} for obj in objs: if id(obj) in inferred: name = inferred[id(obj)] diff --git a/mypy/message_registry.py b/mypy/message_registry.py index 187046519f9a..48b53336f15d 100644 --- a/mypy/message_registry.py +++ b/mypy/message_registry.py @@ -9,138 +9,151 @@ from typing_extensions import Final # Invalid types -INVALID_TYPE_RAW_ENUM_VALUE = "Invalid type: try using Literal[{}.{}] instead?" # type: Final +INVALID_TYPE_RAW_ENUM_VALUE: Final = "Invalid type: try using Literal[{}.{}] instead?" # Type checker error message constants -NO_RETURN_VALUE_EXPECTED = 'No return value expected' # type: Final -MISSING_RETURN_STATEMENT = 'Missing return statement' # type: Final -INVALID_IMPLICIT_RETURN = 'Implicit return in function which does not return' # type: Final -INCOMPATIBLE_RETURN_VALUE_TYPE = 'Incompatible return value type' # type: Final -RETURN_VALUE_EXPECTED = 'Return value expected' # type: Final -NO_RETURN_EXPECTED = 'Return statement in function which does not return' # type: Final -INVALID_EXCEPTION = 'Exception must be derived from BaseException' # type: Final -INVALID_EXCEPTION_TYPE = 'Exception type must be derived from BaseException' # type: Final -RETURN_IN_ASYNC_GENERATOR = '"return" with value in async generator is not allowed' # type: Final -INVALID_RETURN_TYPE_FOR_GENERATOR = \ - 'The return type of a generator function should be "Generator"' \ - ' or one of its supertypes' # type: Final -INVALID_RETURN_TYPE_FOR_ASYNC_GENERATOR = \ - 'The return type of an async generator function should be "AsyncGenerator" or one of its ' \ - 'supertypes' # type: Final -INVALID_GENERATOR_RETURN_ITEM_TYPE = \ - 'The return type of a generator function must be None in' \ - ' its third type parameter in Python 2' # type: Final -YIELD_VALUE_EXPECTED = 'Yield value expected' # type: Final -INCOMPATIBLE_TYPES = 'Incompatible types' # type: Final -INCOMPATIBLE_TYPES_IN_ASSIGNMENT = 'Incompatible types in assignment' # type: Final -INCOMPATIBLE_REDEFINITION = 'Incompatible redefinition' # type: Final -INCOMPATIBLE_TYPES_IN_AWAIT = 'Incompatible types in "await"' # type: Final -INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AENTER = \ - 'Incompatible types in "async with" for "__aenter__"' # type: Final -INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AEXIT = \ - 'Incompatible types in "async with" for "__aexit__"' # type: Final -INCOMPATIBLE_TYPES_IN_ASYNC_FOR = 'Incompatible types in "async for"' # type: Final +NO_RETURN_VALUE_EXPECTED: Final = "No return value expected" +MISSING_RETURN_STATEMENT: Final = "Missing return statement" +INVALID_IMPLICIT_RETURN: Final = "Implicit return in function which does not return" +INCOMPATIBLE_RETURN_VALUE_TYPE: Final = "Incompatible return value type" +RETURN_VALUE_EXPECTED: Final = "Return value expected" +NO_RETURN_EXPECTED: Final = "Return statement in function which does not return" +INVALID_EXCEPTION: Final = "Exception must be derived from BaseException" +INVALID_EXCEPTION_TYPE: Final = "Exception type must be derived from BaseException" +RETURN_IN_ASYNC_GENERATOR: Final = '"return" with value in async generator is not allowed' +INVALID_RETURN_TYPE_FOR_GENERATOR: Final = ( + 'The return type of a generator function should be "Generator"' " or one of its supertypes" +) +INVALID_RETURN_TYPE_FOR_ASYNC_GENERATOR: Final = ( + 'The return type of an async generator function should be "AsyncGenerator" or one of its ' + "supertypes" +) +INVALID_GENERATOR_RETURN_ITEM_TYPE: Final = ( + "The return type of a generator function must be None in" + " its third type parameter in Python 2" +) +YIELD_VALUE_EXPECTED: Final = "Yield value expected" +INCOMPATIBLE_TYPES: Final = "Incompatible types" +INCOMPATIBLE_TYPES_IN_ASSIGNMENT: Final = "Incompatible types in assignment" +INCOMPATIBLE_REDEFINITION: Final = "Incompatible redefinition" +INCOMPATIBLE_TYPES_IN_AWAIT: Final = 'Incompatible types in "await"' +INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AENTER: Final = ( + 'Incompatible types in "async with" for "__aenter__"' +) +INCOMPATIBLE_TYPES_IN_ASYNC_WITH_AEXIT: Final = ( + 'Incompatible types in "async with" for "__aexit__"' +) +INCOMPATIBLE_TYPES_IN_ASYNC_FOR: Final = 'Incompatible types in "async for"' -INCOMPATIBLE_TYPES_IN_YIELD = 'Incompatible types in "yield"' # type: Final -INCOMPATIBLE_TYPES_IN_YIELD_FROM = 'Incompatible types in "yield from"' # type: Final -INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION = \ - 'Incompatible types in string interpolation' # type: Final -MUST_HAVE_NONE_RETURN_TYPE = 'The return type of "{}" must be None' # type: Final -INVALID_TUPLE_INDEX_TYPE = 'Invalid tuple index type' # type: Final -TUPLE_INDEX_OUT_OF_RANGE = 'Tuple index out of range' # type: Final -INVALID_SLICE_INDEX = 'Slice index must be an integer or None' # type: Final -CANNOT_INFER_LAMBDA_TYPE = 'Cannot infer type of lambda' # type: Final -CANNOT_ACCESS_INIT = 'Cannot access "__init__" directly' # type: Final -NON_INSTANCE_NEW_TYPE = '"__new__" must return a class instance (got {})' # type: Final -INVALID_NEW_TYPE = 'Incompatible return type for "__new__"' # type: Final -BAD_CONSTRUCTOR_TYPE = 'Unsupported decorated constructor type' # type: Final -CANNOT_ASSIGN_TO_METHOD = 'Cannot assign to a method' # type: Final -CANNOT_ASSIGN_TO_TYPE = 'Cannot assign to a type' # type: Final -INCONSISTENT_ABSTRACT_OVERLOAD = \ - 'Overloaded method has both abstract and non-abstract variants' # type: Final -MULTIPLE_OVERLOADS_REQUIRED = 'Single overload definition, multiple required' # type: Final -READ_ONLY_PROPERTY_OVERRIDES_READ_WRITE = \ - 'Read-only property cannot override read-write property' # type: Final -FORMAT_REQUIRES_MAPPING = 'Format requires a mapping' # type: Final -RETURN_TYPE_CANNOT_BE_CONTRAVARIANT = \ - "Cannot use a contravariant type variable as return type" # type: Final -FUNCTION_PARAMETER_CANNOT_BE_COVARIANT = \ - "Cannot use a covariant type variable as a parameter" # type: Final -INCOMPATIBLE_IMPORT_OF = "Incompatible import of" # type: Final -FUNCTION_TYPE_EXPECTED = "Function is missing a type annotation" # type: Final -ONLY_CLASS_APPLICATION = "Type application is only supported for generic classes" # type: Final -RETURN_TYPE_EXPECTED = "Function is missing a return type annotation" # type: Final -ARGUMENT_TYPE_EXPECTED = \ - "Function is missing a type annotation for one or more arguments" # type: Final -KEYWORD_ARGUMENT_REQUIRES_STR_KEY_TYPE = \ - 'Keyword argument only valid with "str" key type in call to "dict"' # type: Final -ALL_MUST_BE_SEQ_STR = 'Type of __all__ must be {}, not {}' # type: Final -INVALID_TYPEDDICT_ARGS = \ - 'Expected keyword arguments, {...}, or dict(...) in TypedDict constructor' # type: Final -TYPEDDICT_KEY_MUST_BE_STRING_LITERAL = \ - 'Expected TypedDict key to be string literal' # type: Final -MALFORMED_ASSERT = 'Assertion is always true, perhaps remove parentheses?' # type: Final -DUPLICATE_TYPE_SIGNATURES = 'Function has duplicate type signatures' # type: Final -DESCRIPTOR_SET_NOT_CALLABLE = "{}.__set__ is not callable" # type: Final -DESCRIPTOR_GET_NOT_CALLABLE = "{}.__get__ is not callable" # type: Final -MODULE_LEVEL_GETATTRIBUTE = '__getattribute__ is not valid at the module level' # type: Final +INCOMPATIBLE_TYPES_IN_YIELD: Final = 'Incompatible types in "yield"' +INCOMPATIBLE_TYPES_IN_YIELD_FROM: Final = 'Incompatible types in "yield from"' +INCOMPATIBLE_TYPES_IN_STR_INTERPOLATION: Final = "Incompatible types in string interpolation" +MUST_HAVE_NONE_RETURN_TYPE: Final = 'The return type of "{}" must be None' +INVALID_TUPLE_INDEX_TYPE: Final = "Invalid tuple index type" +TUPLE_INDEX_OUT_OF_RANGE: Final = "Tuple index out of range" +INVALID_SLICE_INDEX: Final = "Slice index must be an integer or None" +CANNOT_INFER_LAMBDA_TYPE: Final = "Cannot infer type of lambda" +CANNOT_ACCESS_INIT: Final = 'Cannot access "__init__" directly' +NON_INSTANCE_NEW_TYPE: Final = '"__new__" must return a class instance (got {})' +INVALID_NEW_TYPE: Final = 'Incompatible return type for "__new__"' +BAD_CONSTRUCTOR_TYPE: Final = "Unsupported decorated constructor type" +CANNOT_ASSIGN_TO_METHOD: Final = "Cannot assign to a method" +CANNOT_ASSIGN_TO_TYPE: Final = "Cannot assign to a type" +INCONSISTENT_ABSTRACT_OVERLOAD: Final = ( + "Overloaded method has both abstract and non-abstract variants" +) +MULTIPLE_OVERLOADS_REQUIRED: Final = "Single overload definition, multiple required" +READ_ONLY_PROPERTY_OVERRIDES_READ_WRITE: Final = ( + "Read-only property cannot override read-write property" +) +FORMAT_REQUIRES_MAPPING: Final = "Format requires a mapping" +RETURN_TYPE_CANNOT_BE_CONTRAVARIANT: Final = ( + "Cannot use a contravariant type variable as return type" +) +FUNCTION_PARAMETER_CANNOT_BE_COVARIANT: Final = ( + "Cannot use a covariant type variable as a parameter" +) +INCOMPATIBLE_IMPORT_OF: Final = "Incompatible import of" +FUNCTION_TYPE_EXPECTED: Final = "Function is missing a type annotation" +ONLY_CLASS_APPLICATION: Final = "Type application is only supported for generic classes" +RETURN_TYPE_EXPECTED: Final = "Function is missing a return type annotation" +ARGUMENT_TYPE_EXPECTED: Final = "Function is missing a type annotation for one or more arguments" +KEYWORD_ARGUMENT_REQUIRES_STR_KEY_TYPE: Final = ( + 'Keyword argument only valid with "str" key type in call to "dict"' +) +ALL_MUST_BE_SEQ_STR: Final = "Type of __all__ must be {}, not {}" +INVALID_TYPEDDICT_ARGS: Final = ( + "Expected keyword arguments, {...}, or dict(...) in TypedDict constructor" +) +TYPEDDICT_KEY_MUST_BE_STRING_LITERAL: Final = "Expected TypedDict key to be string literal" +MALFORMED_ASSERT: Final = "Assertion is always true, perhaps remove parentheses?" +DUPLICATE_TYPE_SIGNATURES: Final = "Function has duplicate type signatures" +DESCRIPTOR_SET_NOT_CALLABLE: Final = "{}.__set__ is not callable" +DESCRIPTOR_GET_NOT_CALLABLE: Final = "{}.__get__ is not callable" +MODULE_LEVEL_GETATTRIBUTE: Final = "__getattribute__ is not valid at the module level" # Generic -GENERIC_INSTANCE_VAR_CLASS_ACCESS = \ - 'Access to generic instance variables via class is ambiguous' # type: Final -GENERIC_CLASS_VAR_ACCESS = \ - 'Access to generic class variables is ambiguous' # type: Final -BARE_GENERIC = 'Missing type parameters for generic type {}' # type: Final -IMPLICIT_GENERIC_ANY_BUILTIN = \ - 'Implicit generic "Any". Use "{}" and specify generic parameters' # type: Final +GENERIC_INSTANCE_VAR_CLASS_ACCESS: Final = ( + "Access to generic instance variables via class is ambiguous" +) +GENERIC_CLASS_VAR_ACCESS: Final = "Access to generic class variables is ambiguous" +BARE_GENERIC: Final = "Missing type parameters for generic type {}" +IMPLICIT_GENERIC_ANY_BUILTIN: Final = ( + 'Implicit generic "Any". Use "{}" and specify generic parameters' +) # TypeVar -INCOMPATIBLE_TYPEVAR_VALUE = 'Value of type variable "{}" of {} cannot be {}' # type: Final -CANNOT_USE_TYPEVAR_AS_EXPRESSION = \ - 'Type variable "{}.{}" cannot be used as an expression' # type: Final +INCOMPATIBLE_TYPEVAR_VALUE: Final = 'Value of type variable "{}" of {} cannot be {}' +CANNOT_USE_TYPEVAR_AS_EXPRESSION: Final = 'Type variable "{}.{}" cannot be used as an expression' # Super -TOO_MANY_ARGS_FOR_SUPER = 'Too many arguments for "super"' # type: Final -TOO_FEW_ARGS_FOR_SUPER = 'Too few arguments for "super"' # type: Final -SUPER_WITH_SINGLE_ARG_NOT_SUPPORTED = '"super" with a single argument not supported' # type: Final -UNSUPPORTED_ARG_1_FOR_SUPER = 'Unsupported argument 1 for "super"' # type: Final -UNSUPPORTED_ARG_2_FOR_SUPER = 'Unsupported argument 2 for "super"' # type: Final -SUPER_VARARGS_NOT_SUPPORTED = 'Varargs not supported with "super"' # type: Final -SUPER_POSITIONAL_ARGS_REQUIRED = '"super" only accepts positional arguments' # type: Final -SUPER_ARG_2_NOT_INSTANCE_OF_ARG_1 = \ - 'Argument 2 for "super" not an instance of argument 1' # type: Final -TARGET_CLASS_HAS_NO_BASE_CLASS = 'Target class has no base class' # type: Final -SUPER_OUTSIDE_OF_METHOD_NOT_SUPPORTED = \ - 'super() outside of a method is not supported' # type: Final -SUPER_ENCLOSING_POSITIONAL_ARGS_REQUIRED = \ - 'super() requires one or more positional arguments in enclosing function' # type: Final +TOO_MANY_ARGS_FOR_SUPER: Final = 'Too many arguments for "super"' +TOO_FEW_ARGS_FOR_SUPER: Final = 'Too few arguments for "super"' +SUPER_WITH_SINGLE_ARG_NOT_SUPPORTED: Final = '"super" with a single argument not supported' +UNSUPPORTED_ARG_1_FOR_SUPER: Final = 'Unsupported argument 1 for "super"' +UNSUPPORTED_ARG_2_FOR_SUPER: Final = 'Unsupported argument 2 for "super"' +SUPER_VARARGS_NOT_SUPPORTED: Final = 'Varargs not supported with "super"' +SUPER_POSITIONAL_ARGS_REQUIRED: Final = '"super" only accepts positional arguments' +SUPER_ARG_2_NOT_INSTANCE_OF_ARG_1: Final = 'Argument 2 for "super" not an instance of argument 1' +TARGET_CLASS_HAS_NO_BASE_CLASS: Final = "Target class has no base class" +SUPER_OUTSIDE_OF_METHOD_NOT_SUPPORTED: Final = "super() outside of a method is not supported" +SUPER_ENCLOSING_POSITIONAL_ARGS_REQUIRED: Final = ( + "super() requires one or more positional arguments in enclosing function" +) # Self-type -MISSING_OR_INVALID_SELF_TYPE = \ - "Self argument missing for a non-static method (or an invalid type for self)" # type: Final -ERASED_SELF_TYPE_NOT_SUPERTYPE = \ - 'The erased type of self "{}" is not a supertype of its class "{}"' # type: Final -INVALID_SELF_TYPE_OR_EXTRA_ARG = \ - "Invalid type for self, or extra argument type in function annotation" # type: Final +MISSING_OR_INVALID_SELF_TYPE: Final = ( + "Self argument missing for a non-static method (or an invalid type for self)" +) +ERASED_SELF_TYPE_NOT_SUPERTYPE: Final = ( + 'The erased type of self "{}" is not a supertype of its class "{}"' +) +INVALID_SELF_TYPE_OR_EXTRA_ARG: Final = ( + "Invalid type for self, or extra argument type in function annotation" +) # Final -CANNOT_INHERIT_FROM_FINAL = 'Cannot inherit from final class "{}"' # type: Final -DEPENDENT_FINAL_IN_CLASS_BODY = \ - "Final name declared in class body cannot depend on type variables" # type: Final -CANNOT_ACCESS_FINAL_INSTANCE_ATTR = \ - 'Cannot access final instance attribute "{}" on class object' # type: Final -CANNOT_MAKE_DELETABLE_FINAL = \ - "Deletable attribute cannot be final" # type: Final +CANNOT_INHERIT_FROM_FINAL: Final = 'Cannot inherit from final class "{}"' +DEPENDENT_FINAL_IN_CLASS_BODY: Final = ( + "Final name declared in class body cannot depend on type variables" +) +CANNOT_ACCESS_FINAL_INSTANCE_ATTR: Final = ( + 'Cannot access final instance attribute "{}" on class object' +) +CANNOT_MAKE_DELETABLE_FINAL: Final = "Deletable attribute cannot be final" # ClassVar -CANNOT_OVERRIDE_INSTANCE_VAR = \ - 'Cannot override instance variable (previously declared on base class "{}") with class ' \ - 'variable' # type: Final -CANNOT_OVERRIDE_CLASS_VAR = \ - 'Cannot override class variable (previously declared on base class "{}") with instance ' \ - 'variable' # type: Final +CANNOT_OVERRIDE_INSTANCE_VAR: Final = ( + 'Cannot override instance variable (previously declared on base class "{}") with class ' + "variable" +) +CANNOT_OVERRIDE_CLASS_VAR: Final = ( + 'Cannot override class variable (previously declared on base class "{}") with instance ' + "variable" +) # Protocol -RUNTIME_PROTOCOL_EXPECTED = \ - 'Only @runtime_checkable protocols can be used with instance and class checks' # type: Final -CANNOT_INSTANTIATE_PROTOCOL = 'Cannot instantiate protocol class "{}"' # type: Final +RUNTIME_PROTOCOL_EXPECTED: Final = ( + "Only @runtime_checkable protocols can be used with instance and class checks" +) +CANNOT_INSTANTIATE_PROTOCOL: Final = 'Cannot instantiate protocol class "{}"' diff --git a/mypy/messages.py b/mypy/messages.py index 940747096774..5278cbcee7e2 100644 --- a/mypy/messages.py +++ b/mypy/messages.py @@ -43,7 +43,7 @@ from mypy.errorcodes import ErrorCode from mypy import message_registry, errorcodes as codes -TYPES_FOR_UNIMPORTED_HINTS = { +TYPES_FOR_UNIMPORTED_HINTS: Final = { 'typing.Any', 'typing.Callable', 'typing.Dict', @@ -56,23 +56,23 @@ 'typing.TypeVar', 'typing.Union', 'typing.cast', -} # type: Final +} -ARG_CONSTRUCTOR_NAMES = { +ARG_CONSTRUCTOR_NAMES: Final = { ARG_POS: "Arg", ARG_OPT: "DefaultArg", ARG_NAMED: "NamedArg", ARG_NAMED_OPT: "DefaultNamedArg", ARG_STAR: "VarArg", ARG_STAR2: "KwArg", -} # type: Final +} # Map from the full name of a missing definition to the test fixture (under # test-data/unit/fixtures/) that provides the definition. This is used for # generating better error messages when running mypy tests only. -SUGGESTED_TEST_FIXTURES = { +SUGGESTED_TEST_FIXTURES: Final = { 'builtins.list': 'list.pyi', 'builtins.dict': 'dict.pyi', 'builtins.set': 'set.pyi', @@ -83,7 +83,7 @@ 'builtins.isinstance': 'isinstancelist.pyi', 'builtins.property': 'property.pyi', 'builtins.classmethod': 'classmethod.pyi', -} # type: Final +} class MessageBuilder: @@ -99,9 +99,9 @@ class MessageBuilder: # Report errors using this instance. It knows about the current file and # import context. - errors = None # type: Errors + errors: Errors - modules = None # type: Dict[str, MypyFile] + modules: Dict[str, MypyFile] # Number of times errors have been disabled. disable_count = 0 @@ -463,7 +463,7 @@ def incompatible_argument(self, msg = '' code = codes.MISC - notes = [] # type: List[str] + notes: List[str] = [] if callee_name == '': name = callee_name[1:-1] n -= 1 @@ -691,7 +691,7 @@ def duplicate_argument_value(self, callee: CallableType, index: int, def does_not_return_value(self, callee_type: Optional[Type], context: Context) -> None: """Report an error about use of an unusable type.""" - name = None # type: Optional[str] + name: Optional[str] = None callee_type = get_proper_type(callee_type) if isinstance(callee_type, FunctionLike): name = callable_name(callee_type) @@ -1350,9 +1350,11 @@ def report_protocol_problems(self, # note: method, attr MAX_ITEMS = 2 # Maximum number of conflicts, missing members, and overloads shown # List of special situations where we don't want to report additional problems - exclusions = {TypedDictType: ['typing.Mapping'], - TupleType: ['typing.Iterable', 'typing.Sequence'], - Instance: []} # type: Dict[type, List[str]] + exclusions: Dict[type, List[str]] = { + TypedDictType: ["typing.Mapping"], + TupleType: ["typing.Iterable", "typing.Sequence"], + Instance: [], + } if supertype.type.fullname in exclusions[type(subtype)]: return if any(isinstance(tp, UninhabitedType) for tp in get_proper_types(supertype.args)): @@ -1633,7 +1635,7 @@ def format(typ: Type) -> str: return '{}[{}]'.format(alias, ', '.join(items)) else: # There are type arguments. Convert the arguments to strings. - a = [] # type: List[str] + a: List[str] = [] for arg in itype.args: a.append(format(arg)) s = ', '.join(a) @@ -1752,7 +1754,7 @@ def collect_all_instances(t: Type) -> List[Instance]: class CollectAllInstancesQuery(TypeTraverserVisitor): def __init__(self) -> None: - self.instances = [] # type: List[Instance] + self.instances: List[Instance] = [] def visit_instance(self, t: Instance) -> None: self.instances.append(t) @@ -1765,7 +1767,7 @@ def find_type_overlaps(*types: Type) -> Set[str]: This is used to ensure that distinct types with the same short name are printed with their fullname. """ - d = {} # type: Dict[str, Set[str]] + d: Dict[str, Set[str]] = {} for type in types: for inst in collect_all_instances(type): d.setdefault(inst.type.name, set()).add(inst.type.fullname) @@ -1773,7 +1775,7 @@ def find_type_overlaps(*types: Type) -> Set[str]: if 'typing.{}'.format(shortname) in TYPES_FOR_UNIMPORTED_HINTS: d[shortname].add('typing.{}'.format(shortname)) - overlaps = set() # type: Set[str] + overlaps: Set[str] = set() for fullnames in d.values(): if len(fullnames) > 1: overlaps.update(fullnames) @@ -1917,7 +1919,7 @@ def get_missing_protocol_members(left: Instance, right: Instance) -> List[str]: (i.e. completely missing) in 'left'. """ assert right.type.is_protocol - missing = [] # type: List[str] + missing: List[str] = [] for member in right.type.protocol_members: if not find_member(member, left, left): missing.append(member) @@ -1929,7 +1931,7 @@ def get_conflict_protocol_types(left: Instance, right: Instance) -> List[Tuple[s Return them as a list of ('member', 'got', 'expected'). """ assert right.type.is_protocol - conflicts = [] # type: List[Tuple[str, Type, Type]] + conflicts: List[Tuple[str, Type, Type]] = [] for member in right.type.protocol_members: if member in ('__init__', '__new__'): continue @@ -1952,7 +1954,7 @@ def get_bad_protocol_flags(left: Instance, right: Instance 'left' and 'right'. """ assert right.type.is_protocol - all_flags = [] # type: List[Tuple[str, Set[int], Set[int]]] + all_flags: List[Tuple[str, Set[int], Set[int]]] = [] for member in right.type.protocol_members: if find_member(member, left, left): item = (member, @@ -2055,9 +2057,9 @@ def temp_message_builder() -> MessageBuilder: # For hard-coding suggested missing member alternatives. -COMMON_MISTAKES = { +COMMON_MISTAKES: Final[Dict[str, Sequence[str]]] = { 'add': ('append', 'extend'), -} # type: Final[Dict[str, Sequence[str]]] +} def best_matches(current: str, options: Iterable[str]) -> List[str]: diff --git a/mypy/metastore.py b/mypy/metastore.py index a75d6b2ffdba..3d4cdeff3400 100644 --- a/mypy/metastore.py +++ b/mypy/metastore.py @@ -146,8 +146,7 @@ def list_all(self) -> Iterable[str]: CREATE INDEX IF NOT EXISTS path_idx on files(path); ''' # No migrations yet -MIGRATIONS = [ -] # type: List[str] +MIGRATIONS: List[str] = [] def connect_db(db_file: str) -> 'sqlite3.Connection': diff --git a/mypy/modulefinder.py b/mypy/modulefinder.py index eb763652175e..df5c556409a3 100644 --- a/mypy/modulefinder.py +++ b/mypy/modulefinder.py @@ -33,9 +33,9 @@ OnePackageDir = Tuple[str, bool] PackageDirs = List[OnePackageDir] -PYTHON_EXTENSIONS = ['.pyi', '.py'] # type: Final +PYTHON_EXTENSIONS: Final = [".pyi", ".py"] -PYTHON2_STUB_DIR = '@python2' # type: Final +PYTHON2_STUB_DIR: Final = "@python2" # TODO: Consider adding more reasons here? @@ -127,10 +127,10 @@ def __init__(self, self.fscache = fscache or FileSystemCache() # Cache for get_toplevel_possibilities: # search_paths -> (toplevel_id -> list(package_dirs)) - self.initial_components = {} # type: Dict[Tuple[str, ...], Dict[str, List[str]]] + self.initial_components: Dict[Tuple[str, ...], Dict[str, List[str]]] = {} # Cache find_module: id -> result - self.results = {} # type: Dict[str, ModuleSearchResult] - self.ns_ancestors = {} # type: Dict[str, str] + self.results: Dict[str, ModuleSearchResult] = {} + self.ns_ancestors: Dict[str, str] = {} self.options = options custom_typeshed_dir = None if options: @@ -174,7 +174,7 @@ def get_toplevel_possibilities(self, lib_path: Tuple[str, ...], id: str) -> List return self.initial_components[lib_path].get(id, []) # Enumerate all the files in the directories on lib_path and produce the map - components = {} # type: Dict[str, List[str]] + components: Dict[str, List[str]] = {} for dir in lib_path: try: contents = self.fscache.listdir(dir) @@ -276,8 +276,8 @@ def _find_module(self, id: str, use_typeshed: bool) -> ModuleSearchResult: # We have two sets of folders so that we collect *all* stubs folders and # put them in the front of the search path - third_party_inline_dirs = [] # type: PackageDirs - third_party_stubs_dirs = [] # type: PackageDirs + third_party_inline_dirs: PackageDirs = [] + third_party_stubs_dirs: PackageDirs = [] found_possible_third_party_missing_type_hints = False need_installed_stubs = False # Third-party stub/typed packages @@ -461,7 +461,7 @@ def find_modules_recursive(self, module: str) -> List[BuildSource]: # calls find_module, which will handle the preference between packages, pyi and py. # Another difference is it doesn't handle nested search paths / package roots. - seen = set() # type: Set[str] + seen: Set[str] = set() names = sorted(self.fscache.listdir(package_path)) for name in names: # Skip certain names altogether @@ -545,7 +545,7 @@ def default_lib_path(data_dir: str, pyversion: Tuple[int, int], custom_typeshed_dir: Optional[str]) -> List[str]: """Return default standard library search paths.""" - path = [] # type: List[str] + path: List[str] = [] if custom_typeshed_dir: typeshed_dir = os.path.join(custom_typeshed_dir, "stdlib") @@ -607,7 +607,7 @@ def get_site_packages_dirs(python_executable: Optional[str]) -> Tuple[List[str], def expand_site_packages(site_packages: List[str]) -> Tuple[List[str], List[str]]: """Expands .pth imports in site-packages directories""" - egg_dirs = [] # type: List[str] + egg_dirs: List[str] = [] for dir in site_packages: if not os.path.isdir(dir): continue @@ -697,7 +697,7 @@ def compute_search_paths(sources: List[BuildSource], lib_path.appendleft(os.path.join(root_dir, 'test-data', 'unit', 'lib-stub')) # alt_lib_path is used by some tests to bypass the normal lib_path mechanics. # If we don't have one, grab directories of source files. - python_path = [] # type: List[str] + python_path: List[str] = [] if not alt_lib_path: for source in sources: # Include directory of the program file in the module search path. diff --git a/mypy/moduleinspect.py b/mypy/moduleinspect.py index 94491de4d804..ebcbb25ea5e5 100644 --- a/mypy/moduleinspect.py +++ b/mypy/moduleinspect.py @@ -45,9 +45,9 @@ def get_package_properties(package_id: str) -> ModuleProperties: package = importlib.import_module(package_id) except BaseException as e: raise InspectError(str(e)) from e - name = getattr(package, '__name__', package_id) - file = getattr(package, '__file__', None) - path = getattr(package, '__path__', None) # type: Optional[List[str]] + name = getattr(package, "__name__", package_id) + file = getattr(package, "__file__", None) + path: Optional[List[str]] = getattr(package, "__path__", None) if not isinstance(path, list): path = None pkg_all = getattr(package, '__all__', None) @@ -118,8 +118,8 @@ def __init__(self) -> None: self._start() def _start(self) -> None: - self.tasks = Queue() # type: Queue[str] - self.results = Queue() # type: Queue[Union[ModuleProperties, str]] + self.tasks: Queue[str] = Queue() + self.results: Queue[Union[ModuleProperties, str]] = Queue() self.proc = Process(target=worker, args=(self.tasks, self.results, sys.path)) self.proc.start() self.counter = 0 # Number of successful roundtrips diff --git a/mypy/mro.py b/mypy/mro.py index 59c53996e628..2aeb96e4e756 100644 --- a/mypy/mro.py +++ b/mypy/mro.py @@ -44,7 +44,7 @@ def linearize_hierarchy(info: TypeInfo, def merge(seqs: List[List[TypeInfo]]) -> List[TypeInfo]: seqs = [s[:] for s in seqs] - result = [] # type: List[TypeInfo] + result: List[TypeInfo] = [] while True: seqs = [s for s in seqs if s] if not seqs: diff --git a/mypy/nodes.py b/mypy/nodes.py index 9bf27f5b43a6..81432c12424e 100644 --- a/mypy/nodes.py +++ b/mypy/nodes.py @@ -24,7 +24,7 @@ class Context: def __init__(self, line: int = -1, column: int = -1) -> None: self.line = line self.column = column - self.end_line = None # type: Optional[int] + self.end_line: Optional[int] = None def set_line(self, target: Union['Context', int], @@ -70,42 +70,44 @@ def get_column(self) -> int: # # TODO rename to use more descriptive names -LDEF = 0 # type: Final -GDEF = 1 # type: Final -MDEF = 2 # type: Final +LDEF: Final = 0 +GDEF: Final = 1 +MDEF: Final = 2 # Placeholder for a name imported via 'from ... import'. Second phase of # semantic will replace this the actual imported reference. This is # needed so that we can detect whether a name has been imported during # XXX what? -UNBOUND_IMPORTED = 3 # type: Final +UNBOUND_IMPORTED: Final = 3 # RevealExpr node kinds -REVEAL_TYPE = 0 # type: Final -REVEAL_LOCALS = 1 # type: Final +REVEAL_TYPE: Final = 0 +REVEAL_LOCALS: Final = 1 -LITERAL_YES = 2 # type: Final -LITERAL_TYPE = 1 # type: Final -LITERAL_NO = 0 # type: Final +LITERAL_YES: Final = 2 +LITERAL_TYPE: Final = 1 +LITERAL_NO: Final = 0 -node_kinds = { +node_kinds: Final = { LDEF: 'Ldef', GDEF: 'Gdef', MDEF: 'Mdef', UNBOUND_IMPORTED: 'UnboundImported', -} # type: Final -inverse_node_kinds = {_kind: _name for _name, _kind in node_kinds.items()} # type: Final +} +inverse_node_kinds: Final = {_kind: _name for _name, _kind in node_kinds.items()} -implicit_module_attrs = {'__name__': '__builtins__.str', - '__doc__': None, # depends on Python version, see semanal.py - '__file__': '__builtins__.str', - '__package__': '__builtins__.str'} # type: Final +implicit_module_attrs: Final = { + "__name__": "__builtins__.str", + "__doc__": None, # depends on Python version, see semanal.py + "__file__": "__builtins__.str", + "__package__": "__builtins__.str", +} # These aliases exist because built-in class objects are not subscriptable. # For example `list[int]` fails at runtime. Instead List[int] should be used. -type_aliases = { +type_aliases: Final = { 'typing.List': 'builtins.list', 'typing.Dict': 'builtins.dict', 'typing.Set': 'builtins.set', @@ -115,11 +117,11 @@ def get_column(self) -> int: 'typing.DefaultDict': 'collections.defaultdict', 'typing.Deque': 'collections.deque', 'typing.OrderedDict': 'collections.OrderedDict', -} # type: Final +} # This keeps track of the oldest supported Python version where the corresponding # alias source is available. -type_aliases_source_versions = { +type_aliases_source_versions: Final = { 'typing.List': (2, 7), 'typing.Dict': (2, 7), 'typing.Set': (2, 7), @@ -129,17 +131,16 @@ def get_column(self) -> int: 'typing.DefaultDict': (2, 7), 'typing.Deque': (2, 7), 'typing.OrderedDict': (3, 7), -} # type: Final +} -reverse_builtin_aliases = { +reverse_builtin_aliases: Final = { 'builtins.list': 'typing.List', 'builtins.dict': 'typing.Dict', 'builtins.set': 'typing.Set', 'builtins.frozenset': 'typing.FrozenSet', -} # type: Final +} -_nongen_builtins = {'builtins.tuple': 'typing.Tuple', - 'builtins.enumerate': ''} # type: Final +_nongen_builtins: Final = {"builtins.tuple": "typing.Tuple", "builtins.enumerate": ""} _nongen_builtins.update((name, alias) for alias, name in type_aliases.items()) # Drop OrderedDict from this for backward compatibility del _nongen_builtins['collections.OrderedDict'] @@ -150,9 +151,11 @@ def get_nongen_builtins(python_version: Tuple[int, int]) -> Dict[str, str]: return _nongen_builtins if python_version < (3, 9) else {} -RUNTIME_PROTOCOL_DECOS = ('typing.runtime_checkable', - 'typing_extensions.runtime', - 'typing_extensions.runtime_checkable') # type: Final +RUNTIME_PROTOCOL_DECOS: Final = ( + "typing.runtime_checkable", + "typing_extensions.runtime", + "typing_extensions.runtime_checkable", +) class Node(Context): @@ -242,22 +245,22 @@ class MypyFile(SymbolNode): """The abstract syntax tree of a single source file.""" # Fully qualified module name - _fullname = None # type: Bogus[str] + _fullname: Bogus[str] # Path to the file (empty string if not known) path = '' # Top-level definitions and statements - defs = None # type: List[Statement] + defs: List[Statement] # Type alias dependencies as mapping from target to set of alias full names - alias_deps = None # type: DefaultDict[str, Set[str]] + alias_deps: DefaultDict[str, Set[str]] # Is there a UTF-8 BOM at the start? is_bom = False - names = None # type: SymbolTable + names: "SymbolTable" # All import nodes within the file (also ones within functions etc.) - imports = None # type: List[ImportBase] + imports: List["ImportBase"] # Lines on which to ignore certain errors when checking. # If the value is empty, ignore all errors; otherwise, the list contains all # error codes to ignore. - ignored_lines = None # type: Dict[int, List[str]] + ignored_lines: Dict[int, List[str]] # Is this file represented by a stub file (.pyi)? is_stub = False # Is this loaded from the cache and thus missing the actual body of the file? @@ -267,7 +270,7 @@ class MypyFile(SymbolNode): # module errors in addition to missing attribute errors. is_partial_stub_package = False # Plugin-created dependencies - plugin_deps = None # type: Dict[str, Set[str]] + plugin_deps: Dict[str, Set[str]] def __init__(self, defs: List[Statement], @@ -342,7 +345,7 @@ class ImportBase(Statement): # # x = 1 # from m import x <-- add assignment representing "x = m.x" - assignments = None # type: List[AssignmentStmt] + assignments: List["AssignmentStmt"] def __init__(self) -> None: super().__init__() @@ -352,7 +355,7 @@ def __init__(self) -> None: class Import(ImportBase): """import m [as n]""" - ids = None # type: List[Tuple[str, Optional[str]]] # (module id, as id) + ids: List[Tuple[str, Optional[str]]] # (module id, as id) def __init__(self, ids: List[Tuple[str, Optional[str]]]) -> None: super().__init__() @@ -365,9 +368,9 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class ImportFrom(ImportBase): """from m import x [as y], ...""" - id = None # type: str - relative = None # type: int - names = None # type: List[Tuple[str, Optional[str]]] # Tuples (name, as name) + id: str + relative: int + names: List[Tuple[str, Optional[str]]] # Tuples (name, as name) def __init__(self, id: str, relative: int, names: List[Tuple[str, Optional[str]]]) -> None: super().__init__() @@ -381,10 +384,11 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class ImportAll(ImportBase): """from m import *""" - id = None # type: str - relative = None # type: int + + id: str + relative: int # NOTE: Only filled and used by old semantic analyzer. - imported_names = None # type: List[str] + imported_names: List[str] def __init__(self, id: str, relative: int) -> None: super().__init__() @@ -431,9 +435,7 @@ def __str__(self) -> str: return 'ImportedName(%s)' % self.target_fullname -FUNCBASE_FLAGS = [ - 'is_property', 'is_class', 'is_static', 'is_final' -] # type: Final +FUNCBASE_FLAGS: Final = ["is_property", "is_class", "is_static", "is_final"] class FuncBase(Node): @@ -464,9 +466,9 @@ def __init__(self) -> None: super().__init__() # Type signature. This is usually CallableType or Overloaded, but it can be # something else for decorated functions. - self.type = None # type: Optional[mypy.types.ProperType] + self.type: Optional[mypy.types.ProperType] = None # Original, not semantically analyzed type (used for reprocessing) - self.unanalyzed_type = None # type: Optional[mypy.types.ProperType] + self.unanalyzed_type: Optional[mypy.types.ProperType] = None # If method, reference to TypeInfo # TODO: Type should be Optional[TypeInfo] self.info = FUNC_NO_INFO @@ -500,9 +502,9 @@ class OverloadedFuncDef(FuncBase, SymbolNode, Statement): Overloaded variants must be consecutive in the source file. """ - items = None # type: List[OverloadPart] - unanalyzed_items = None # type: List[OverloadPart] - impl = None # type: Optional[OverloadPart] + items: List[OverloadPart] + unanalyzed_items: List[OverloadPart] + impl: Optional[OverloadPart] = None def __init__(self, items: List['OverloadPart']) -> None: super().__init__() @@ -583,10 +585,10 @@ def set_line(self, self.variable.set_line(self.line, self.column, self.end_line) -FUNCITEM_FLAGS = FUNCBASE_FLAGS + [ +FUNCITEM_FLAGS: Final = FUNCBASE_FLAGS + [ 'is_overload', 'is_generator', 'is_coroutine', 'is_async_generator', 'is_awaitable_coroutine', -] # type: Final +] class FuncItem(FuncBase): @@ -617,7 +619,7 @@ def __init__(self, super().__init__() self.arguments = arguments self.arg_names = [arg.variable.name for arg in self.arguments] - self.arg_kinds = [arg.kind for arg in self.arguments] # type: List[int] + self.arg_kinds: List[int] = [arg.kind for arg in self.arguments] self.max_pos = self.arg_kinds.count(ARG_POS) + self.arg_kinds.count(ARG_OPT) self.body = body self.type = typ @@ -627,7 +629,7 @@ def __init__(self, self.is_coroutine = False self.is_async_generator = False self.is_awaitable_coroutine = False - self.expanded = [] # type: List[FuncItem] + self.expanded: List[FuncItem] = [] self.min_args = 0 for i in range(len(self.arguments)): @@ -649,9 +651,9 @@ def is_dynamic(self) -> bool: return self.type is None -FUNCDEF_FLAGS = FUNCITEM_FLAGS + [ +FUNCDEF_FLAGS: Final = FUNCITEM_FLAGS + [ 'is_decorated', 'is_conditional', 'is_abstract', -] # type: Final +] class FuncDef(FuncItem, SymbolNode, Statement): @@ -679,7 +681,7 @@ def __init__(self, self.is_abstract = False self.is_final = False # Original conditional definition - self.original_def = None # type: Union[None, FuncDef, Var, Decorator] + self.original_def: Union[None, FuncDef, Var, Decorator] = None @property def name(self) -> str: @@ -738,12 +740,12 @@ class Decorator(SymbolNode, Statement): A single Decorator object can include any number of function decorators. """ - func = None # type: FuncDef # Decorated function - decorators = None # type: List[Expression] # Decorators (may be empty) + func: FuncDef # Decorated function + decorators: List[Expression] # Decorators (may be empty) # Some decorators are removed by semanal, keep the original here. - original_decorators = None # type: List[Expression] + original_decorators: List[Expression] # TODO: This is mostly used for the type; consider replacing with a 'type' attribute - var = None # type: Var # Represents the decorated function obj + var: "Var" # Represents the decorated function obj is_overload = False def __init__(self, func: FuncDef, decorators: List[Expression], @@ -795,12 +797,12 @@ def deserialize(cls, data: JsonDict) -> 'Decorator': return dec -VAR_FLAGS = [ +VAR_FLAGS: Final = [ 'is_self', 'is_initialized_in_class', 'is_staticmethod', 'is_classmethod', 'is_property', 'is_settable_property', 'is_suppressed_import', 'is_classvar', 'is_abstract_var', 'is_final', 'final_unset_in_class', 'final_set_in_init', 'explicit_self_type', 'is_ready', 'from_module_getattr', -] # type: Final +] class Var(SymbolNode): @@ -839,7 +841,7 @@ def __init__(self, name: str, type: 'Optional[mypy.types.Type]' = None) -> None: self._fullname = cast('Bogus[str]', None) # Name with module prefix # TODO: Should be Optional[TypeInfo] self.info = VAR_NO_INFO - self.type = type # type: Optional[mypy.types.Type] # Declared or inferred type, or None + self.type: Optional[mypy.types.Type] = type # Declared or inferred type, or None # Is this the first argument to an ordinary method (usually "self")? self.is_self = False self.is_ready = True # If inferred, is the inferred type available? @@ -860,7 +862,7 @@ def __init__(self, name: str, type: 'Optional[mypy.types.Type]' = None) -> None: # If constant value is a simple literal, # store the literal value (unboxed) for the benefit of # tools like mypyc. - self.final_value = None # type: Optional[Union[int, float, bool, str]] + self.final_value: Optional[Union[int, float, bool, str]] = None # Where the value was set (only for class attributes) self.final_unset_in_class = False self.final_set_in_init = False @@ -889,12 +891,13 @@ def accept(self, visitor: NodeVisitor[T]) -> T: def serialize(self) -> JsonDict: # TODO: Leave default values out? # NOTE: Sometimes self.is_ready is False here, but we don't care. - data = {'.class': 'Var', - 'name': self._name, - 'fullname': self._fullname, - 'type': None if self.type is None else self.type.serialize(), - 'flags': get_flags(self, VAR_FLAGS), - } # type: JsonDict + data: JsonDict = { + ".class": "Var", + "name": self._name, + "fullname": self._fullname, + "type": None if self.type is None else self.type.serialize(), + "flags": get_flags(self, VAR_FLAGS), + } if self.final_value is not None: data['final_value'] = self.final_value return data @@ -915,19 +918,19 @@ def deserialize(cls, data: JsonDict) -> 'Var': class ClassDef(Statement): """Class definition""" - name = None # type: str # Name of the class without module prefix - fullname = None # type: Bogus[str] # Fully qualified name of the class - defs = None # type: Block - type_vars = None # type: List[mypy.types.TypeVarDef] + name: str # Name of the class without module prefix + fullname: Bogus[str] = None # type: ignore # Fully qualified name of the class + defs: "Block" + type_vars: List["mypy.types.TypeVarDef"] # Base class expressions (not semantically analyzed -- can be arbitrary expressions) - base_type_exprs = None # type: List[Expression] + base_type_exprs: List[Expression] # Special base classes like Generic[...] get moved here during semantic analysis - removed_base_type_exprs = None # type: List[Expression] - info = None # type: TypeInfo # Related TypeInfo - metaclass = None # type: Optional[Expression] - decorators = None # type: List[Expression] - keywords = None # type: OrderedDict[str, Expression] - analyzed = None # type: Optional[Expression] + removed_base_type_exprs: List[Expression] + info: "TypeInfo" # Related TypeInfo + metaclass: Optional[Expression] = None + decorators: List[Expression] + keywords: "OrderedDict[str, Expression]" + analyzed: Optional[Expression] = None has_incompatible_baseclass = False def __init__(self, @@ -977,7 +980,7 @@ def deserialize(self, data: JsonDict) -> 'ClassDef': class GlobalDecl(Statement): """Declaration global x, y, ...""" - names = None # type: List[str] + names: List[str] def __init__(self, names: List[str]) -> None: super().__init__() @@ -990,7 +993,7 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class NonlocalDecl(Statement): """Declaration nonlocal x, y, ...""" - names = None # type: List[str] + names: List[str] def __init__(self, names: List[str]) -> None: super().__init__() @@ -1022,7 +1025,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class ExpressionStmt(Statement): """An expression as a statement, such as print(s).""" - expr = None # type: Expression + + expr: Expression def __init__(self, expr: Expression) -> None: super().__init__() @@ -1043,15 +1047,15 @@ class AssignmentStmt(Statement): An lvalue can be NameExpr, TupleExpr, ListExpr, MemberExpr, or IndexExpr. """ - lvalues = None # type: List[Lvalue] + lvalues: List[Lvalue] # This is a TempNode if and only if no rvalue (x: t). - rvalue = None # type: Expression + rvalue: Expression # Declared type in a comment, may be None. - type = None # type: Optional[mypy.types.Type] + type: Optional["mypy.types.Type"] = None # Original, not semantically analyzed type in annotation (used for reprocessing) - unanalyzed_type = None # type: Optional[mypy.types.Type] + unanalyzed_type: Optional["mypy.types.Type"] = None # This indicates usage of PEP 526 type annotation syntax in assignment. - new_syntax = False # type: bool + new_syntax: bool = False # Does this assignment define a type alias? is_alias_def = False # Is this a final definition? @@ -1079,8 +1083,8 @@ class OperatorAssignmentStmt(Statement): """Operator assignment statement such as x += 1""" op = '' - lvalue = None # type: Lvalue - rvalue = None # type: Expression + lvalue: Lvalue + rvalue: Expression def __init__(self, op: str, lvalue: Lvalue, rvalue: Expression) -> None: super().__init__() @@ -1093,9 +1097,9 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class WhileStmt(Statement): - expr = None # type: Expression - body = None # type: Block - else_body = None # type: Optional[Block] + expr: Expression + body: Block + else_body: Optional[Block] = None def __init__(self, expr: Expression, body: Block, else_body: Optional[Block]) -> None: super().__init__() @@ -1109,19 +1113,19 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class ForStmt(Statement): # Index variables - index = None # type: Lvalue + index: Lvalue # Type given by type comments for index, can be None - index_type = None # type: Optional[mypy.types.Type] + index_type: Optional["mypy.types.Type"] = None # Original, not semantically analyzed type in annotation (used for reprocessing) - unanalyzed_index_type = None # type: Optional[mypy.types.Type] + unanalyzed_index_type: Optional["mypy.types.Type"] = None # Inferred iterable item type - inferred_item_type = None # type: Optional[mypy.types.Type] + inferred_item_type: Optional["mypy.types.Type"] = None # Inferred iterator type - inferred_iterator_type = None # type: Optional[mypy.types.Type] + inferred_iterator_type: Optional["mypy.types.Type"] = None # Expression to iterate - expr = None # type: Expression - body = None # type: Block - else_body = None # type: Optional[Block] + expr: Expression + body: Block + else_body: Optional[Block] = None is_async = False # True if `async for ...` (PEP 492, Python 3.5) def __init__(self, @@ -1143,7 +1147,7 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class ReturnStmt(Statement): - expr = None # type: Optional[Expression] + expr: Optional[Expression] = None def __init__(self, expr: Optional[Expression]) -> None: super().__init__() @@ -1154,8 +1158,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class AssertStmt(Statement): - expr = None # type: Expression - msg = None # type: Optional[Expression] + expr: Expression + msg: Optional[Expression] = None def __init__(self, expr: Expression, msg: Optional[Expression] = None) -> None: super().__init__() @@ -1167,7 +1171,7 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class DelStmt(Statement): - expr = None # type: Lvalue + expr: Lvalue def __init__(self, expr: Lvalue) -> None: super().__init__() @@ -1193,9 +1197,9 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class IfStmt(Statement): - expr = None # type: List[Expression] - body = None # type: List[Block] - else_body = None # type: Optional[Block] + expr: List[Expression] + body: List[Block] + else_body: Optional[Block] = None def __init__(self, expr: List[Expression], body: List[Block], else_body: Optional[Block]) -> None: @@ -1210,8 +1214,8 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class RaiseStmt(Statement): # Plain 'raise' is a valid statement. - expr = None # type: Optional[Expression] - from_expr = None # type: Optional[Expression] + expr: Optional[Expression] = None + from_expr: Optional[Expression] = None def __init__(self, expr: Optional[Expression], from_expr: Optional[Expression]) -> None: super().__init__() @@ -1223,13 +1227,13 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class TryStmt(Statement): - body = None # type: Block # Try body + body: Block # Try body # Plain 'except:' also possible - types = None # type: List[Optional[Expression]] # Except type expressions - vars = None # type: List[Optional[NameExpr]] # Except variable names - handlers = None # type: List[Block] # Except bodies - else_body = None # type: Optional[Block] - finally_body = None # type: Optional[Block] + types: List[Optional[Expression]] # Except type expressions + vars: List[Optional["NameExpr"]] # Except variable names + handlers: List[Block] # Except bodies + else_body: Optional[Block] = None + finally_body: Optional[Block] = None def __init__(self, body: Block, vars: List['Optional[NameExpr]'], types: List[Optional[Expression]], @@ -1248,13 +1252,13 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class WithStmt(Statement): - expr = None # type: List[Expression] - target = None # type: List[Optional[Lvalue]] + expr: List[Expression] + target: List[Optional[Lvalue]] # Type given by type comments for target, can be None - unanalyzed_type = None # type: Optional[mypy.types.Type] + unanalyzed_type: Optional["mypy.types.Type"] = None # Semantically analyzed types from type comment (TypeList type expanded) - analyzed_types = None # type: List[mypy.types.Type] - body = None # type: Block + analyzed_types: List["mypy.types.Type"] + body: Block is_async = False # True if `async with ...` (PEP 492, Python 3.5) def __init__(self, expr: List[Expression], target: List[Optional[Lvalue]], @@ -1273,10 +1277,10 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class PrintStmt(Statement): """Python 2 print statement""" - args = None # type: List[Expression] + args: List[Expression] newline = False # The file-like target object (given using >>). - target = None # type: Optional[Expression] + target: Optional[Expression] = None def __init__(self, args: List[Expression], @@ -1294,9 +1298,9 @@ def accept(self, visitor: StatementVisitor[T]) -> T: class ExecStmt(Statement): """Python 2 exec statement""" - expr = None # type: Expression - globals = None # type: Optional[Expression] - locals = None # type: Optional[Expression] + expr: Expression + globals: Optional[Expression] = None + locals: Optional[Expression] = None def __init__(self, expr: Expression, globals: Optional[Expression], @@ -1434,7 +1438,7 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class StarExpr(Expression): """Star expression""" - expr = None # type: Expression + expr: Expression def __init__(self, expr: Expression) -> None: super().__init__() @@ -1456,11 +1460,11 @@ class RefExpr(Expression): def __init__(self) -> None: super().__init__() # LDEF/GDEF/MDEF/... (None if not available) - self.kind = None # type: Optional[int] + self.kind: Optional[int] = None # Var, FuncDef or TypeInfo that describes this - self.node = None # type: Optional[SymbolNode] + self.node: Optional[SymbolNode] = None # Fully qualified name (or name if not global) - self.fullname = None # type: Optional[str] + self.fullname: Optional[str] = None # Does this define a new name? self.is_new_def = False # Does this define a new name with inferred type? @@ -1471,7 +1475,7 @@ def __init__(self) -> None: # Is this expression appears as an rvalue of a valid type alias definition? self.is_alias_rvalue = False # Cache type guard from callable_type.type_guard - self.type_guard = None # type: Optional[mypy.types.Type] + self.type_guard: Optional["mypy.types.Type"] = None class NameExpr(RefExpr): @@ -1506,7 +1510,7 @@ def __init__(self, expr: Expression, name: str) -> None: self.name = name # The variable node related to a definition through 'self.x = '. # The nodes of other kinds of member expressions are resolved during type checking. - self.def_var = None # type: Optional[Var] + self.def_var: Optional[Var] = None def accept(self, visitor: ExpressionVisitor[T]) -> T: return visitor.visit_member_expr(self) @@ -1515,17 +1519,17 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: # Kinds of arguments # Positional argument -ARG_POS = 0 # type: Final +ARG_POS: Final = 0 # Positional, optional argument (functions only, not calls) -ARG_OPT = 1 # type: Final +ARG_OPT: Final = 1 # *arg argument -ARG_STAR = 2 # type: Final +ARG_STAR: Final = 2 # Keyword argument x=y in call, or keyword-only function arg -ARG_NAMED = 3 # type: Final +ARG_NAMED: Final = 3 # **arg argument -ARG_STAR2 = 4 # type: Final +ARG_STAR2: Final = 4 # In an argument list, keyword-only and also optional -ARG_NAMED_OPT = 5 # type: Final +ARG_NAMED_OPT: Final = 5 class CallExpr(Expression): @@ -1551,7 +1555,7 @@ def __init__(self, self.args = args self.arg_kinds = arg_kinds # ARG_ constants # Each name can be None if not a keyword argument. - self.arg_names = arg_names # type: List[Optional[str]] + self.arg_names: List[Optional[str]] = arg_names # If not None, the node that represents the meaning of the CallExpr. For # cast(...) this is a CastExpr. self.analyzed = analyzed @@ -1561,7 +1565,7 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class YieldFromExpr(Expression): - expr = None # type: Expression + expr: Expression def __init__(self, expr: Expression) -> None: super().__init__() @@ -1572,7 +1576,7 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class YieldExpr(Expression): - expr = None # type: Optional[Expression] + expr: Optional[Expression] = None def __init__(self, expr: Optional[Expression]) -> None: super().__init__() @@ -1588,13 +1592,13 @@ class IndexExpr(Expression): Also wraps type application such as List[int] as a special form. """ - base = None # type: Expression - index = None # type: Expression + base: Expression + index: Expression # Inferred __getitem__ method type - method_type = None # type: Optional[mypy.types.Type] + method_type: Optional["mypy.types.Type"] = None # If not None, this is actually semantically a type application # Class[type, ...] or a type alias initializer. - analyzed = None # type: Union[TypeApplication, TypeAliasExpr, None] + analyzed: Union["TypeApplication", "TypeAliasExpr", None] def __init__(self, base: Expression, index: Expression) -> None: super().__init__() @@ -1610,9 +1614,9 @@ class UnaryExpr(Expression): """Unary operation""" op = '' - expr = None # type: Expression + expr: Expression # Inferred operator method type - method_type = None # type: Optional[mypy.types.Type] + method_type: Optional["mypy.types.Type"] = None def __init__(self, op: str, expr: Expression) -> None: super().__init__() @@ -1639,10 +1643,10 @@ class OpExpr(Expression): which have specific nodes).""" op = '' - left = None # type: Expression - right = None # type: Expression + left: Expression + right: Expression # Inferred type for the operator method type (when relevant). - method_type = None # type: Optional[mypy.types.Type] + method_type: Optional["mypy.types.Type"] = None # Per static analysis only: Is the right side going to be evaluated every time? right_always = False # Per static analysis only: Is the right side unreachable? @@ -1661,10 +1665,10 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class ComparisonExpr(Expression): """Comparison expression (e.g. a < b > c < d).""" - operators = None # type: List[str] - operands = None # type: List[Expression] + operators: List[str] + operands: List[Expression] # Inferred type for the operator methods (when relevant; None for 'is'). - method_types = None # type: List[Optional[mypy.types.Type]] + method_types: List[Optional["mypy.types.Type"]] def __init__(self, operators: List[str], operands: List[Expression]) -> None: super().__init__() @@ -1689,9 +1693,9 @@ class SliceExpr(Expression): This is only valid as index in index expressions. """ - begin_index = None # type: Optional[Expression] - end_index = None # type: Optional[Expression] - stride = None # type: Optional[Expression] + begin_index: Optional[Expression] = None + end_index: Optional[Expression] = None + stride: Optional[Expression] = None def __init__(self, begin_index: Optional[Expression], end_index: Optional[Expression], @@ -1708,8 +1712,8 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class CastExpr(Expression): """Cast expression cast(type, expr).""" - expr = None # type: Expression - type = None # type: mypy.types.Type + expr: Expression + type: "mypy.types.Type" def __init__(self, expr: Expression, typ: 'mypy.types.Type') -> None: super().__init__() @@ -1723,9 +1727,9 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class RevealExpr(Expression): """Reveal type expression reveal_type(expr) or reveal_locals() expression.""" - expr = None # type: Optional[Expression] - kind = 0 # type: int - local_nodes = None # type: Optional[List[Var]] + expr: Optional[Expression] = None + kind: int = 0 + local_nodes: Optional[List[Var]] = None def __init__( self, kind: int, @@ -1744,8 +1748,8 @@ class SuperExpr(Expression): """Expression super().name""" name = '' - info = None # type: Optional[TypeInfo] # Type that contains this super expression - call = None # type: CallExpr # The expression super(...) + info: Optional["TypeInfo"] = None # Type that contains this super expression + call: CallExpr # The expression super(...) def __init__(self, name: str, call: CallExpr) -> None: super().__init__() @@ -1780,7 +1784,7 @@ def is_dynamic(self) -> bool: class ListExpr(Expression): """List literal expression [...].""" - items = None # type: List[Expression] + items: List[Expression] def __init__(self, items: List[Expression]) -> None: super().__init__() @@ -1793,7 +1797,7 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class DictExpr(Expression): """Dictionary literal expression {key: value, ...}.""" - items = None # type: List[Tuple[Optional[Expression], Expression]] + items: List[Tuple[Optional[Expression], Expression]] def __init__(self, items: List[Tuple[Optional[Expression], Expression]]) -> None: super().__init__() @@ -1808,7 +1812,7 @@ class TupleExpr(Expression): Also lvalue sequences (..., ...) and [..., ...]""" - items = None # type: List[Expression] + items: List[Expression] def __init__(self, items: List[Expression]) -> None: super().__init__() @@ -1821,7 +1825,7 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class SetExpr(Expression): """Set literal expression {value, ...}.""" - items = None # type: List[Expression] + items: List[Expression] def __init__(self, items: List[Expression]) -> None: super().__init__() @@ -1834,11 +1838,11 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class GeneratorExpr(Expression): """Generator expression ... for ... in ... [ for ... in ... ] [ if ... ].""" - left_expr = None # type: Expression - sequences = None # type: List[Expression] - condlists = None # type: List[List[Expression]] - is_async = None # type: List[bool] - indices = None # type: List[Lvalue] + left_expr: Expression + sequences: List[Expression] + condlists: List[List[Expression]] + is_async: List[bool] + indices: List[Lvalue] def __init__(self, left_expr: Expression, indices: List[Lvalue], sequences: List[Expression], condlists: List[List[Expression]], @@ -1857,7 +1861,7 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class ListComprehension(Expression): """List comprehension (e.g. [x + 1 for x in a])""" - generator = None # type: GeneratorExpr + generator: GeneratorExpr def __init__(self, generator: GeneratorExpr) -> None: super().__init__() @@ -1870,7 +1874,7 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class SetComprehension(Expression): """Set comprehension (e.g. {x + 1 for x in a})""" - generator = None # type: GeneratorExpr + generator: GeneratorExpr def __init__(self, generator: GeneratorExpr) -> None: super().__init__() @@ -1883,12 +1887,12 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class DictionaryComprehension(Expression): """Dictionary comprehension (e.g. {k: v for k, v in a}""" - key = None # type: Expression - value = None # type: Expression - sequences = None # type: List[Expression] - condlists = None # type: List[List[Expression]] - is_async = None # type: List[bool] - indices = None # type: List[Lvalue] + key: Expression + value: Expression + sequences: List[Expression] + condlists: List[List[Expression]] + is_async: List[bool] + indices: List[Lvalue] def __init__(self, key: Expression, value: Expression, indices: List[Lvalue], sequences: List[Expression], condlists: List[List[Expression]], @@ -1908,9 +1912,9 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class ConditionalExpr(Expression): """Conditional expression (e.g. x if y else z)""" - cond = None # type: Expression - if_expr = None # type: Expression - else_expr = None # type: Expression + cond: Expression + if_expr: Expression + else_expr: Expression def __init__(self, cond: Expression, if_expr: Expression, else_expr: Expression) -> None: super().__init__() @@ -1925,7 +1929,7 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class BackquoteExpr(Expression): """Python 2 expression `...`.""" - expr = None # type: Expression + expr: Expression def __init__(self, expr: Expression) -> None: super().__init__() @@ -1938,8 +1942,8 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class TypeApplication(Expression): """Type application expr[type, ...]""" - expr = None # type: Expression - types = None # type: List[mypy.types.Type] + expr: Expression + types: List["mypy.types.Type"] def __init__(self, expr: Expression, types: List['mypy.types.Type']) -> None: super().__init__() @@ -1959,9 +1963,9 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: # # If T is contravariant in Foo[T], Foo[object] is a subtype of # Foo[int], but not vice versa. -INVARIANT = 0 # type: Final -COVARIANT = 1 # type: Final -CONTRAVARIANT = 2 # type: Final +INVARIANT: Final = 0 +COVARIANT: Final = 1 +CONTRAVARIANT: Final = 2 class TypeVarLikeExpr(SymbolNode, Expression): @@ -1970,7 +1974,7 @@ class TypeVarLikeExpr(SymbolNode, Expression): _fullname = '' # Upper bound: only subtypes of upper_bound are valid as values. By default # this is 'object', meaning no restriction. - upper_bound = None # type: mypy.types.Type + upper_bound: "mypy.types.Type" # Variance of the type variable. Invariant is the default. # TypeVar(..., covariant=True) defines a covariant type variable. # TypeVar(..., contravariant=True) defines a contravariant type @@ -2008,7 +2012,7 @@ class TypeVarExpr(TypeVarLikeExpr): """ # Value restriction: only types in the list are valid as values. If the # list is empty, there is no restriction. - values = None # type: List[mypy.types.Type] + values: List["mypy.types.Type"] def __init__(self, name: str, fullname: str, values: List['mypy.types.Type'], @@ -2067,15 +2071,15 @@ class TypeAliasExpr(Expression): """Type alias expression (rvalue).""" # The target type. - type = None # type: mypy.types.Type + type: "mypy.types.Type" # Names of unbound type variables used to define the alias - tvars = None # type: List[str] + tvars: List[str] # Whether this alias was defined in bare form. Used to distinguish # between # A = List # and # A = List[Any] - no_args = False # type: bool + no_args: bool = False def __init__(self, node: 'TypeAlias') -> None: super().__init__() @@ -2093,7 +2097,7 @@ class NamedTupleExpr(Expression): # The class representation of this named tuple (its tuple_type attribute contains # the tuple item types) - info = None # type: TypeInfo + info: "TypeInfo" is_typed = False # whether this class was created with typing.NamedTuple def __init__(self, info: 'TypeInfo', is_typed: bool = False) -> None: @@ -2109,7 +2113,7 @@ class TypedDictExpr(Expression): """Typed dict expression TypedDict(...).""" # The class representation of this typed dict - info = None # type: TypeInfo + info: "TypeInfo" def __init__(self, info: 'TypeInfo') -> None: super().__init__() @@ -2123,10 +2127,10 @@ class EnumCallExpr(Expression): """Named tuple expression Enum('name', 'val1 val2 ...').""" # The class representation of this enumerated type - info = None # type: TypeInfo + info: "TypeInfo" # The item names (for debugging) - items = None # type: List[str] - values = None # type: List[Optional[Expression]] + items: List[str] + values: List[Optional[Expression]] def __init__(self, info: 'TypeInfo', items: List[str], values: List[Optional[Expression]]) -> None: @@ -2142,7 +2146,7 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class PromoteExpr(Expression): """Ducktype class decorator expression _promote(...).""" - type = None # type: mypy.types.Type + type: "mypy.types.Type" def __init__(self, type: 'mypy.types.Type') -> None: super().__init__() @@ -2154,11 +2158,12 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class NewTypeExpr(Expression): """NewType expression NewType(...).""" - name = None # type: str + + name: str # The base type (the second argument to NewType) - old_type = None # type: Optional[mypy.types.Type] + old_type: Optional["mypy.types.Type"] = None # The synthesized class representing the new type (inherits old_type) - info = None # type: Optional[TypeInfo] + info: Optional["TypeInfo"] = None def __init__(self, name: str, old_type: 'Optional[mypy.types.Type]', line: int, column: int) -> None: @@ -2175,7 +2180,7 @@ def accept(self, visitor: ExpressionVisitor[T]) -> T: class AwaitExpr(Expression): """Await expression (await ...).""" - expr = None # type: Expression + expr: Expression def __init__(self, expr: Expression) -> None: super().__init__() @@ -2196,10 +2201,10 @@ class TempNode(Expression): some fixed type. """ - type = None # type: mypy.types.Type + type: "mypy.types.Type" # Is this TempNode used to indicate absence of a right hand side in an annotated assignment? # (e.g. for 'x: int' the rvalue is TempNode(AnyType(TypeOfAny.special_form), no_rhs=True)) - no_rhs = False # type: bool + no_rhs: bool = False def __init__(self, typ: 'mypy.types.Type', @@ -2234,29 +2239,29 @@ class is generic then it will be a type constructor of higher kind. the appropriate number of arguments. """ - _fullname = None # type: Bogus[str] # Fully qualified name + _fullname: Bogus[str] = None # type: ignore # Fully qualified name # Fully qualified name for the module this type was defined in. This # information is also in the fullname, but is harder to extract in the # case of nested class definitions. - module_name = None # type: str - defn = None # type: ClassDef # Corresponding ClassDef + module_name: str + defn: ClassDef # Corresponding ClassDef # Method Resolution Order: the order of looking up attributes. The first # value always to refers to this class. - mro = None # type: List[TypeInfo] + mro: List["TypeInfo"] # Used to stash the names of the mro classes temporarily between # deserialization and fixup. See deserialize() for why. - _mro_refs = None # type: Optional[List[str]] + _mro_refs: Optional[List[str]] = None bad_mro = False # Could not construct full MRO - declared_metaclass = None # type: Optional[mypy.types.Instance] - metaclass_type = None # type: Optional[mypy.types.Instance] + declared_metaclass: Optional["mypy.types.Instance"] = None + metaclass_type: Optional["mypy.types.Instance"] = None - names = None # type: SymbolTable # Names defined directly in this type + names: "SymbolTable" # Names defined directly in this type is_abstract = False # Does the class have any abstract attributes? is_protocol = False # Is this a protocol class? runtime_protocol = False # Does this protocol support isinstance checks? - abstract_attributes = None # type: List[str] - deletable_attributes = None # type: List[str] # Used by mypyc only + abstract_attributes: List[str] + deletable_attributes: List[str] # Used by mypyc only # The attributes 'assuming' and 'assuming_proper' represent structural subtype matrices. # @@ -2278,8 +2283,8 @@ class is generic then it will be a type constructor of higher kind. # If concurrent/parallel type checking will be added in future, # then there should be one matrix per thread/process to avoid false negatives # during the type checking phase. - assuming = None # type: List[Tuple[mypy.types.Instance, mypy.types.Instance]] - assuming_proper = None # type: List[Tuple[mypy.types.Instance, mypy.types.Instance]] + assuming: List[Tuple["mypy.types.Instance", "mypy.types.Instance"]] + assuming_proper: List[Tuple["mypy.types.Instance", "mypy.types.Instance"]] # Ditto for temporary 'inferring' stack of recursive constraint inference. # It contains Instance's of protocol types that appeared as an argument to # constraints.infer_constraints(). We need 'inferring' to avoid infinite recursion for @@ -2289,7 +2294,7 @@ class is generic then it will be a type constructor of higher kind. # since this would require to pass them in many dozens of calls. In particular, # there is a dependency infer_constraint -> is_subtype -> is_callable_subtype -> # -> infer_constraints. - inferring = None # type: List[mypy.types.Instance] + inferring: List["mypy.types.Instance"] # 'inferring' and 'assuming' can't be made sets, since we need to use # is_same_type to correctly treat unions. @@ -2306,30 +2311,30 @@ class is generic then it will be a type constructor of higher kind. # Information related to type annotations. # Generic type variable names (full names) - type_vars = None # type: List[str] + type_vars: List[str] # Direct base classes. - bases = None # type: List[mypy.types.Instance] + bases: List["mypy.types.Instance"] # Another type which this type will be treated as a subtype of, # even though it's not a subclass in Python. The non-standard # `@_promote` decorator introduces this, and there are also # several builtin examples, in particular `int` -> `float`. - _promote = None # type: Optional[mypy.types.Type] + _promote: Optional["mypy.types.Type"] = None # Representation of a Tuple[...] base class, if the class has any # (e.g., for named tuples). If this is not None, the actual Type # object used for this class is not an Instance but a TupleType; # the corresponding Instance is set as the fallback type of the # tuple type. - tuple_type = None # type: Optional[mypy.types.TupleType] + tuple_type: Optional["mypy.types.TupleType"] = None # Is this a named tuple type? is_named_tuple = False # If this class is defined by the TypedDict type constructor, # then this is not None. - typeddict_type = None # type: Optional[mypy.types.TypedDictType] + typeddict_type: Optional["mypy.types.TypedDictType"] = None # Is this a newtype type? is_newtype = False @@ -2339,13 +2344,13 @@ class is generic then it will be a type constructor of higher kind. # This is a dictionary that will be serialized and un-serialized as is. # It is useful for plugins to add their data to save in the cache. - metadata = None # type: Dict[str, JsonDict] + metadata: Dict[str, JsonDict] - FLAGS = [ + FLAGS: Final = [ 'is_abstract', 'is_enum', 'fallback_to_any', 'is_named_tuple', 'is_newtype', 'is_protocol', 'runtime_protocol', 'is_final', 'is_intersection', - ] # type: Final + ] def __init__(self, names: 'SymbolTable', defn: ClassDef, module_name: str) -> None: """Initialize a TypeInfo.""" @@ -2402,7 +2407,7 @@ def get_containing_type_info(self, name: str) -> 'Optional[TypeInfo]': def protocol_members(self) -> List[str]: # Protocol members are names of all attributes/methods defined in a protocol # and in all its supertypes (except for 'object'). - members = set() # type: Set[str] + members: Set[str] = set() assert self.mro, "This property can be only accessed after MRO is (re-)calculated" for base in self.mro[:-1]: # we skip "object" since everyone implements it if base.is_protocol: @@ -2488,7 +2493,7 @@ def dump(self, """Return a string dump of the contents of the TypeInfo.""" if not str_conv: str_conv = mypy.strconv.StrConv() - base = '' # type: str + base: str = "" def type_str(typ: 'mypy.types.Type') -> str: if type_str_conv: @@ -2615,9 +2620,9 @@ def __getattribute__(self, attr: str) -> None: raise AssertionError(object.__getattribute__(self, 'msg')) -VAR_NO_INFO = FakeInfo('Var is lacking info') # type: Final[TypeInfo] -CLASSDEF_NO_INFO = FakeInfo('ClassDef is lacking info') # type: Final[TypeInfo] -FUNC_NO_INFO = FakeInfo('FuncBase for non-methods lack info') # type: Final[TypeInfo] +VAR_NO_INFO: Final[TypeInfo] = FakeInfo("Var is lacking info") +CLASSDEF_NO_INFO: Final[TypeInfo] = FakeInfo("ClassDef is lacking info") +FUNC_NO_INFO: Final[TypeInfo] = FakeInfo("FuncBase for non-methods lack info") class TypeAlias(SymbolNode): @@ -2727,7 +2732,7 @@ def __init__(self, target: 'mypy.types.Type', fullname: str, line: int, column: self.normalized = normalized # This attribute is manipulated by TypeAliasType. If non-None, # it is the cached value. - self._is_recursive = None # type: Optional[bool] + self._is_recursive: Optional[bool] = None self.eager = eager super().__init__(line, column) @@ -2740,15 +2745,16 @@ def fullname(self) -> str: return self._fullname def serialize(self) -> JsonDict: - data = {'.class': 'TypeAlias', - 'fullname': self._fullname, - 'target': self.target.serialize(), - 'alias_tvars': self.alias_tvars, - 'no_args': self.no_args, - 'normalized': self.normalized, - 'line': self.line, - 'column': self.column - } # type: JsonDict + data: JsonDict = { + ".class": "TypeAlias", + "fullname": self._fullname, + "target": self.target.serialize(), + "alias_tvars": self.alias_tvars, + "no_args": self.no_args, + "normalized": self.normalized, + "line": self.line, + "column": self.column, + } return data def accept(self, visitor: NodeVisitor[T]) -> T: @@ -2923,7 +2929,7 @@ def __init__(self, self.module_public = module_public self.implicit = implicit self.module_hidden = module_hidden - self.cross_ref = None # type: Optional[str] + self.cross_ref: Optional[str] = None self.plugin_generated = plugin_generated self.no_serialize = no_serialize @@ -2969,9 +2975,10 @@ def serialize(self, prefix: str, name: str) -> JsonDict: prefix: full name of the containing module or class; or None name: name of this object relative to the containing object """ - data = {'.class': 'SymbolTableNode', - 'kind': node_kinds[self.kind], - } # type: JsonDict + data: JsonDict = { + ".class": "SymbolTableNode", + "kind": node_kinds[self.kind], + } if self.module_hidden: data['module_hidden'] = True if not self.module_public: @@ -3028,7 +3035,7 @@ class SymbolTable(Dict[str, SymbolTableNode]): """ def __str__(self) -> str: - a = [] # type: List[str] + a: List[str] = [] for key, value in self.items(): # Filter out the implicit import of builtins. if isinstance(value, SymbolTableNode): @@ -3048,7 +3055,7 @@ def copy(self) -> 'SymbolTable': for key, node in self.items()]) def serialize(self, fullname: str) -> JsonDict: - data = {'.class': 'SymbolTable'} # type: JsonDict + data: JsonDict = {".class": "SymbolTable"} for key, value in self.items(): # Skip __builtins__: it's a reference to the builtins # module that gets added to every module by @@ -3084,7 +3091,7 @@ def get_member_expr_fullname(expr: MemberExpr) -> Optional[str]: Return a string of form foo.bar, foo.bar.baz, or similar, or None if the argument cannot be represented in this form. """ - initial = None # type: Optional[str] + initial: Optional[str] = None if isinstance(expr.expr, NameExpr): initial = expr.expr.name elif isinstance(expr.expr, MemberExpr): @@ -3094,12 +3101,12 @@ def get_member_expr_fullname(expr: MemberExpr) -> Optional[str]: return '{}.{}'.format(initial, expr.name) -deserialize_map = { +deserialize_map: Final = { key: obj.deserialize for key, obj in globals().items() if type(obj) is not FakeInfo and isinstance(obj, type) and issubclass(obj, SymbolNode) and obj is not SymbolNode -} # type: Final +} def check_arg_kinds(arg_kinds: List[int], nodes: List[T], fail: Callable[[str, T], None]) -> None: @@ -3138,7 +3145,7 @@ def check_arg_kinds(arg_kinds: List[int], nodes: List[T], fail: Callable[[str, T def check_arg_names(names: Sequence[Optional[str]], nodes: List[T], fail: Callable[[str, T], None], description: str = 'function definition') -> None: - seen_names = set() # type: Set[Optional[str]] + seen_names: Set[Optional[str]] = set() for name, node in zip(names, nodes): if name is not None and name in seen_names: fail('Duplicate argument "{}" in {}'.format(name, description), node) diff --git a/mypy/operators.py b/mypy/operators.py index 253131dbc459..aa26cb2ec6e9 100644 --- a/mypy/operators.py +++ b/mypy/operators.py @@ -4,7 +4,7 @@ # Map from binary operator id to related method name (in Python 3). -op_methods = { +op_methods: Final = { '+': '__add__', '-': '__sub__', '*': '__mul__', @@ -26,24 +26,34 @@ '>': '__gt__', '<=': '__le__', 'in': '__contains__', -} # type: Final +} -op_methods_to_symbols = {v: k for (k, v) in op_methods.items()} # type: Final +op_methods_to_symbols: Final = {v: k for (k, v) in op_methods.items()} op_methods_to_symbols['__div__'] = '/' -comparison_fallback_method = '__cmp__' # type: Final -ops_falling_back_to_cmp = {'__ne__', '__eq__', - '__lt__', '__le__', - '__gt__', '__ge__'} # type: Final +comparison_fallback_method: Final = "__cmp__" +ops_falling_back_to_cmp: Final = {"__ne__", "__eq__", "__lt__", "__le__", "__gt__", "__ge__"} -ops_with_inplace_method = { - '+', '-', '*', '/', '%', '//', '**', '@', '&', '|', '^', '<<', '>>'} # type: Final +ops_with_inplace_method: Final = { + "+", + "-", + "*", + "/", + "%", + "//", + "**", + "@", + "&", + "|", + "^", + "<<", + ">>", +} -inplace_operator_methods = set( - '__i' + op_methods[op][2:] for op in ops_with_inplace_method) # type: Final +inplace_operator_methods: Final = set("__i" + op_methods[op][2:] for op in ops_with_inplace_method) -reverse_op_methods = { +reverse_op_methods: Final = { '__add__': '__radd__', '__sub__': '__rsub__', '__mul__': '__rmul__', @@ -64,14 +74,14 @@ '__ge__': '__le__', '__gt__': '__lt__', '__le__': '__ge__', -} # type: Final +} -reverse_op_method_names = set(reverse_op_methods.values()) # type: Final +reverse_op_method_names: Final = set(reverse_op_methods.values()) # Suppose we have some class A. When we do A() + A(), Python will only check # the output of A().__add__(A()) and skip calling the __radd__ method entirely. # This shortcut is used only for the following methods: -op_methods_that_shortcut = { +op_methods_that_shortcut: Final = { '__add__', '__sub__', '__mul__', @@ -87,13 +97,13 @@ '__xor__', '__lshift__', '__rshift__', -} # type: Final +} -normal_from_reverse_op = dict((m, n) for n, m in reverse_op_methods.items()) # type: Final -reverse_op_method_set = set(reverse_op_methods.values()) # type: Final +normal_from_reverse_op: Final = dict((m, n) for n, m in reverse_op_methods.items()) +reverse_op_method_set: Final = set(reverse_op_methods.values()) -unary_op_methods = { +unary_op_methods: Final = { '-': '__neg__', '+': '__pos__', '~': '__invert__', -} # type: Final +} diff --git a/mypy/options.py b/mypy/options.py index 3eb4809344cf..310fff5dbfe5 100644 --- a/mypy/options.py +++ b/mypy/options.py @@ -14,12 +14,12 @@ class BuildType: - STANDARD = 0 # type: Final - MODULE = 1 # type: Final - PROGRAM_TEXT = 2 # type: Final + STANDARD: Final = 0 + MODULE: Final = 1 + PROGRAM_TEXT: Final = 2 -PER_MODULE_OPTIONS = { +PER_MODULE_OPTIONS: Final = { # Please keep this list sorted "allow_redefinition", "allow_untyped_globals", @@ -53,11 +53,11 @@ class BuildType: "warn_return_any", "warn_unreachable", "warn_unused_ignores", -} # type: Final +} -OPTIONS_AFFECTING_CACHE = ((PER_MODULE_OPTIONS | - {"platform", "bazel", "plugins"}) - - {"debug_cache"}) # type: Final +OPTIONS_AFFECTING_CACHE: Final = (PER_MODULE_OPTIONS | {"platform", "bazel", "plugins"}) - { + "debug_cache" +} class Options: @@ -65,19 +65,19 @@ class Options: def __init__(self) -> None: # Cache for clone_for_module() - self._per_module_cache = None # type: Optional[Dict[str, Options]] + self._per_module_cache: Optional[Dict[str, Options]] = None # -- build options -- self.build_type = BuildType.STANDARD - self.python_version = sys.version_info[:2] # type: Tuple[int, int] + self.python_version: Tuple[int, int] = sys.version_info[:2] # The executable used to search for PEP 561 packages. If this is None, # then mypy does not search for PEP 561 packages. - self.python_executable = sys.executable # type: Optional[str] + self.python_executable: Optional[str] = sys.executable self.platform = sys.platform - self.custom_typing_module = None # type: Optional[str] - self.custom_typeshed_dir = None # type: Optional[str] - self.mypy_path = [] # type: List[str] - self.report_dirs = {} # type: Dict[str, str] + self.custom_typing_module: Optional[str] = None + self.custom_typeshed_dir: Optional[str] = None + self.mypy_path: List[str] = [] + self.report_dirs: Dict[str, str] = {} # Show errors in PEP 561 packages/site-packages modules self.no_silence_site_packages = False self.no_site_packages = False @@ -100,7 +100,7 @@ def __init__(self) -> None: # top-level __init__.py to your packages. self.explicit_package_bases = False # File names, directory names or subpaths to avoid checking - self.exclude = "" # type: str + self.exclude: str = "" # disallow_any options self.disallow_any_generics = False @@ -161,7 +161,7 @@ def __init__(self) -> None: # Files in which to allow strict-Optional related errors # TODO: Kill this in favor of show_none_errors - self.strict_optional_whitelist = None # type: Optional[List[str]] + self.strict_optional_whitelist: Optional[List[str]] = None # Alternate way to show/hide strict-None-checking related errors self.show_none_errors = True @@ -188,36 +188,36 @@ def __init__(self) -> None: self.warn_unreachable = False # Variable names considered True - self.always_true = [] # type: List[str] + self.always_true: List[str] = [] # Variable names considered False - self.always_false = [] # type: List[str] + self.always_false: List[str] = [] # Error codes to disable - self.disable_error_code = [] # type: List[str] - self.disabled_error_codes = set() # type: Set[ErrorCode] + self.disable_error_code: List[str] = [] + self.disabled_error_codes: Set[ErrorCode] = set() # Error codes to enable - self.enable_error_code = [] # type: List[str] - self.enabled_error_codes = set() # type: Set[ErrorCode] + self.enable_error_code: List[str] = [] + self.enabled_error_codes: Set[ErrorCode] = set() # Use script name instead of __main__ self.scripts_are_modules = False # Config file name - self.config_file = None # type: Optional[str] + self.config_file: Optional[str] = None # A filename containing a JSON mapping from filenames to # mtime/size/hash arrays, used to avoid having to recalculate # source hashes as often. - self.quickstart_file = None # type: Optional[str] + self.quickstart_file: Optional[str] = None # A comma-separated list of files/directories for mypy to type check; # supports globbing - self.files = None # type: Optional[List[str]] + self.files: Optional[List[str]] = None # Write junit.xml to given file - self.junit_xml = None # type: Optional[str] + self.junit_xml: Optional[str] = None # Caching and incremental checking options self.incremental = True @@ -246,12 +246,12 @@ def __init__(self) -> None: self.wip_pep_612 = False # Paths of user plugins - self.plugins = [] # type: List[str] + self.plugins: List[str] = [] # Per-module options (raw) - self.per_module_options = OrderedDict() # type: OrderedDict[str, Dict[str, object]] - self._glob_options = [] # type: List[Tuple[str, Pattern[str]]] - self.unused_configs = set() # type: Set[str] + self.per_module_options: OrderedDict[str, Dict[str, object]] = OrderedDict() + self._glob_options: List[Tuple[str, Pattern[str]]] = [] + self.unused_configs: Set[str] = set() # -- development options -- self.verbosity = 0 # More verbose messages (for troubleshooting) @@ -270,8 +270,8 @@ def __init__(self) -> None: self.use_builtins_fixtures = False # -- experimental options -- - self.shadow_file = None # type: Optional[List[List[str]]] - self.show_column_numbers = False # type: bool + self.shadow_file: Optional[List[List[str]]] = None + self.show_column_numbers: bool = False self.show_error_codes = False # Use soft word wrap and show trimmed source snippets with error location markers. self.pretty = False @@ -286,15 +286,15 @@ def __init__(self) -> None: self.export_types = False # List of package roots -- directories under these are packages even # if they don't have __init__.py. - self.package_root = [] # type: List[str] - self.cache_map = {} # type: Dict[str, Tuple[str, str]] + self.package_root: List[str] = [] + self.cache_map: Dict[str, Tuple[str, str]] = {} # Don't properly free objects on exit, just kill the current process. self.fast_exit = False # Used to transform source code before parsing if not None # TODO: Make the type precise (AnyStr -> AnyStr) - self.transform_source = None # type: Optional[Callable[[Any], Any]] + self.transform_source: Optional[Callable[[Any], Any]] = None # Print full path to each file in the report. - self.show_absolute_path = False # type: bool + self.show_absolute_path: bool = False # Install missing stub packages if True self.install_types = False # Install missing stub packages in non-interactive mode (don't prompt for diff --git a/mypy/plugin.py b/mypy/plugin.py index 8dceb050c436..0f38bb32eeea 100644 --- a/mypy/plugin.py +++ b/mypy/plugin.py @@ -148,7 +148,7 @@ class TypeAnalyzerPluginInterface: # This might be different from Plugin.options (that contains default/global options) # if there are per-file options in the config. This applies to all other interfaces # in this file. - options = None # type: Options + options: Options @abstractmethod def fail(self, msg: str, ctx: Context, *, code: Optional[ErrorCode] = None) -> None: @@ -191,7 +191,7 @@ class CommonPluginApi: # Global mypy options. # Per-file options can be only accessed on various # XxxPluginInterface classes. - options = None # type: Options + options: Options @abstractmethod def lookup_fully_qualified(self, fullname: str) -> Optional[SymbolTableNode]: @@ -211,9 +211,9 @@ class CheckerPluginInterface: docstrings in checker.py for more details. """ - msg = None # type: MessageBuilder - options = None # type: Options - path = None # type: str + msg: MessageBuilder + options: Options + path: str # Type context for type inference @property @@ -243,11 +243,11 @@ class SemanticAnalyzerPluginInterface: # TODO: clean-up lookup functions. """ - modules = None # type: Dict[str, MypyFile] + modules: Dict[str, MypyFile] # Options for current file. - options = None # type: Options - cur_mod_id = None # type: str - msg = None # type: MessageBuilder + options: Options + cur_mod_id: str + msg: MessageBuilder @abstractmethod def named_type(self, qualified_name: str, args: Optional[List[Type]] = None) -> Instance: @@ -477,7 +477,7 @@ def __init__(self, options: Options) -> None: # This can't be set in __init__ because it is executed too soon in build.py. # Therefore, build.py *must* set it later before graph processing starts # by calling set_modules(). - self._modules = None # type: Optional[Dict[str, MypyFile]] + self._modules: Optional[Dict[str, MypyFile]] = None def set_modules(self, modules: Dict[str, MypyFile]) -> None: self._modules = modules diff --git a/mypy/plugins/attrs.py b/mypy/plugins/attrs.py index 8e3f77527d94..c1340e6eaf64 100644 --- a/mypy/plugins/attrs.py +++ b/mypy/plugins/attrs.py @@ -32,29 +32,24 @@ KW_ONLY_PYTHON_2_UNSUPPORTED = "kw_only is not supported in Python 2" # The names of the different functions that create classes or arguments. -attr_class_makers = { +attr_class_makers: Final = { 'attr.s', 'attr.attrs', 'attr.attributes', -} # type: Final -attr_dataclass_makers = { +} +attr_dataclass_makers: Final = { 'attr.dataclass', -} # type: Final -attr_frozen_makers = { - 'attr.frozen' -} # type: Final -attr_define_makers = { - 'attr.define', - 'attr.mutable' -} # type: Final -attr_attrib_makers = { +} +attr_frozen_makers: Final = {"attr.frozen"} +attr_define_makers: Final = {"attr.define", "attr.mutable"} +attr_attrib_makers: Final = { 'attr.ib', 'attr.attrib', 'attr.attr', 'attr.field', -} # type: Final +} -SELF_TVAR_NAME = '_AT' # type: Final +SELF_TVAR_NAME: Final = "_AT" class Converter: @@ -98,7 +93,7 @@ def argument(self, ctx: 'mypy.plugin.ClassDefContext') -> Argument: converter = ctx.api.lookup_qualified(self.converter.name, self.info, True) # Get the type of the converter. - converter_type = None # type: Optional[Type] + converter_type: Optional[Type] = None if converter and isinstance(converter.node, TypeInfo): from mypy.checkmember import type_object_type # To avoid import cycle. converter_type = type_object_type(converter.node, ctx.api.builtin_type) @@ -112,7 +107,7 @@ def argument(self, ctx: 'mypy.plugin.ClassDefContext') -> Argument: if isinstance(converter_type, CallableType) and converter_type.arg_types: init_type = ctx.api.anal_type(converter_type.arg_types[0]) elif isinstance(converter_type, Overloaded): - types = [] # type: List[Type] + types: List[Type] = [] for item in converter_type.items(): # Walk the overloads looking for methods that can accept one argument. num_arg_types = len(item.arg_types) @@ -340,7 +335,7 @@ def _analyze_class(ctx: 'mypy.plugin.ClassDefContext', auto_attribs=None means we'll detect which mode to use. kw_only=True means that all attributes created here will be keyword only args in __init__. """ - own_attrs = OrderedDict() # type: OrderedDict[str, Attribute] + own_attrs: OrderedDict[str, Attribute] = OrderedDict() if auto_attribs is None: auto_attribs = _detect_auto_attribs(ctx) @@ -627,8 +622,8 @@ def _parse_assignments( lvalue: Expression, stmt: AssignmentStmt) -> Tuple[List[NameExpr], List[Expression]]: """Convert a possibly complex assignment expression into lists of lvalues and rvalues.""" - lvalues = [] # type: List[NameExpr] - rvalues = [] # type: List[Expression] + lvalues: List[NameExpr] = [] + rvalues: List[Expression] = [] if isinstance(lvalue, (TupleExpr, ListExpr)): if all(isinstance(item, NameExpr) for item in lvalue.items): lvalues = cast(List[NameExpr], lvalue.items) diff --git a/mypy/plugins/ctypes.py b/mypy/plugins/ctypes.py index d2b69e423d4b..87ffcdfe3339 100644 --- a/mypy/plugins/ctypes.py +++ b/mypy/plugins/ctypes.py @@ -194,7 +194,7 @@ def array_value_callback(ctx: 'mypy.plugin.AttributeContext') -> Type: """Callback to provide an accurate type for ctypes.Array.value.""" et = _get_array_element_type(ctx.type) if et is not None: - types = [] # type: List[Type] + types: List[Type] = [] for tp in union_items(et): if isinstance(tp, AnyType): types.append(AnyType(TypeOfAny.from_another_any, source_any=tp)) @@ -215,7 +215,7 @@ def array_raw_callback(ctx: 'mypy.plugin.AttributeContext') -> Type: """Callback to provide an accurate type for ctypes.Array.raw.""" et = _get_array_element_type(ctx.type) if et is not None: - types = [] # type: List[Type] + types: List[Type] = [] for tp in union_items(et): if (isinstance(tp, AnyType) or isinstance(tp, Instance) and tp.type.fullname == 'ctypes.c_char'): diff --git a/mypy/plugins/dataclasses.py b/mypy/plugins/dataclasses.py index 5d96ad90c4e7..dc9cdaaa33df 100644 --- a/mypy/plugins/dataclasses.py +++ b/mypy/plugins/dataclasses.py @@ -20,12 +20,12 @@ from mypy.server.trigger import make_wildcard_trigger # The set of decorators that generate dataclasses. -dataclass_makers = { +dataclass_makers: Final = { 'dataclass', 'dataclasses.dataclass', -} # type: Final +} -SELF_TVAR_NAME = '_DT' # type: Final +SELF_TVAR_NAME: Final = "_DT" class DataclassAttribute: @@ -213,8 +213,8 @@ def collect_attributes(self) -> Optional[List[DataclassAttribute]]: # First, collect attributes belonging to the current class. ctx = self._ctx cls = self._ctx.cls - attrs = [] # type: List[DataclassAttribute] - known_attrs = set() # type: Set[str] + attrs: List[DataclassAttribute] = [] + known_attrs: Set[str] = set() for stmt in cls.defs.body: # Any assignment that doesn't use the new type declaration # syntax can be ignored out of hand. @@ -300,8 +300,8 @@ def collect_attributes(self) -> Optional[List[DataclassAttribute]]: # Each class depends on the set of attributes in its dataclass ancestors. ctx.api.add_plugin_dependency(make_wildcard_trigger(info.fullname)) - for data in info.metadata['dataclass']['attributes']: - name = data['name'] # type: str + for data in info.metadata["dataclass"]["attributes"]: + name: str = data["name"] if name not in known_attrs: attr = DataclassAttribute.deserialize(info, data, ctx.api) attr.expand_typevar_from_subtype(ctx.cls.info) diff --git a/mypy/plugins/default.py b/mypy/plugins/default.py index 552a52c5c860..c05d610adcb1 100644 --- a/mypy/plugins/default.py +++ b/mypy/plugins/default.py @@ -237,7 +237,7 @@ def typed_dict_get_callback(ctx: MethodContext) -> Type: if keys is None: return ctx.default_return_type - output_types = [] # type: List[Type] + output_types: List[Type] = [] for key in keys: value_type = get_proper_type(ctx.type.items.get(key)) if value_type is None: diff --git a/mypy/plugins/enums.py b/mypy/plugins/enums.py index ade32998ed14..1b22c09fe7bb 100644 --- a/mypy/plugins/enums.py +++ b/mypy/plugins/enums.py @@ -19,15 +19,13 @@ # Note: 'enum.EnumMeta' is deliberately excluded from this list. Classes that directly use # enum.EnumMeta do not necessarily automatically have the 'name' and 'value' attributes. -ENUM_PREFIXES = {'enum.Enum', 'enum.IntEnum', 'enum.Flag', 'enum.IntFlag'} # type: Final -ENUM_NAME_ACCESS = ( - {'{}.name'.format(prefix) for prefix in ENUM_PREFIXES} - | {'{}._name_'.format(prefix) for prefix in ENUM_PREFIXES} -) # type: Final -ENUM_VALUE_ACCESS = ( - {'{}.value'.format(prefix) for prefix in ENUM_PREFIXES} - | {'{}._value_'.format(prefix) for prefix in ENUM_PREFIXES} -) # type: Final +ENUM_PREFIXES: Final = {"enum.Enum", "enum.IntEnum", "enum.Flag", "enum.IntFlag"} +ENUM_NAME_ACCESS: Final = {"{}.name".format(prefix) for prefix in ENUM_PREFIXES} | { + "{}._name_".format(prefix) for prefix in ENUM_PREFIXES +} +ENUM_VALUE_ACCESS: Final = {"{}.value".format(prefix) for prefix in ENUM_PREFIXES} | { + "{}._value_".format(prefix) for prefix in ENUM_PREFIXES +} def enum_name_callback(ctx: 'mypy.plugin.AttributeContext') -> Type: diff --git a/mypy/plugins/functools.py b/mypy/plugins/functools.py index 09bf9992476e..bf71465e1003 100644 --- a/mypy/plugins/functools.py +++ b/mypy/plugins/functools.py @@ -45,7 +45,7 @@ def functools_total_ordering_maker_callback(ctx: mypy.plugin.ClassDefContext, other_type = _find_other_type(root_method) bool_type = ctx.api.named_type('__builtins__.bool') - ret_type = bool_type # type: Type + ret_type: Type = bool_type if root_method.type.ret_type != ctx.api.named_type('__builtins__.bool'): proper_ret_type = get_proper_type(root_method.type.ret_type) if not (isinstance(proper_ret_type, UnboundType) @@ -84,7 +84,7 @@ def _find_other_type(method: _MethodInfo) -> Type: def _analyze_class(ctx: mypy.plugin.ClassDefContext) -> Dict[str, Optional[_MethodInfo]]: """Analyze the class body, its parents, and return the comparison methods found.""" # Traverse the MRO and collect ordering methods. - comparison_methods = {} # type: Dict[str, Optional[_MethodInfo]] + comparison_methods: Dict[str, Optional[_MethodInfo]] = {} # Skip object because total_ordering does not use methods from object for cls in ctx.cls.info.mro[:-1]: for name in _ORDERING_METHODS: diff --git a/mypy/reachability.py b/mypy/reachability.py index 0a29c9b3bba7..44a21b993cfc 100644 --- a/mypy/reachability.py +++ b/mypy/reachability.py @@ -13,27 +13,28 @@ from mypy.literals import literal # Inferred truth value of an expression. -ALWAYS_TRUE = 1 # type: Final -MYPY_TRUE = 2 # type: Final # True in mypy, False at runtime -ALWAYS_FALSE = 3 # type: Final -MYPY_FALSE = 4 # type: Final # False in mypy, True at runtime -TRUTH_VALUE_UNKNOWN = 5 # type: Final +ALWAYS_TRUE: Final = 1 +MYPY_TRUE: Final = 2 # True in mypy, False at runtime +ALWAYS_FALSE: Final = 3 +MYPY_FALSE: Final = 4 # False in mypy, True at runtime +TRUTH_VALUE_UNKNOWN: Final = 5 -inverted_truth_mapping = { +inverted_truth_mapping: Final = { ALWAYS_TRUE: ALWAYS_FALSE, ALWAYS_FALSE: ALWAYS_TRUE, TRUTH_VALUE_UNKNOWN: TRUTH_VALUE_UNKNOWN, MYPY_TRUE: MYPY_FALSE, MYPY_FALSE: MYPY_TRUE, -} # type: Final - -reverse_op = {"==": "==", - "!=": "!=", - "<": ">", - ">": "<", - "<=": ">=", - ">=": "<=", - } # type: Final +} + +reverse_op: Final = { + "==": "==", + "!=": "!=", + "<": ">", + ">": "<", + "<=": ">=", + ">=": "<=", +} def infer_reachability_of_if_statement(s: IfStmt, options: Options) -> None: diff --git a/mypy/renaming.py b/mypy/renaming.py index 574a36bff2a8..a43abb13c688 100644 --- a/mypy/renaming.py +++ b/mypy/renaming.py @@ -10,9 +10,9 @@ from mypy.traverser import TraverserVisitor # Scope kinds -FILE = 0 # type: Final -FUNCTION = 1 # type: Final -CLASS = 2 # type: Final +FILE: Final = 0 +FUNCTION: Final = 1 +CLASS: Final = 2 class VariableRenameVisitor(TraverserVisitor): @@ -54,20 +54,20 @@ def __init__(self) -> None: # Number of surrounding loop statements self.loop_depth = 0 # Map block id to loop depth. - self.block_loop_depth = {} # type: Dict[int, int] + self.block_loop_depth: Dict[int, int] = {} # Stack of block ids being processed. - self.blocks = [] # type: List[int] + self.blocks: List[int] = [] # List of scopes; each scope maps short (unqualified) name to block id. - self.var_blocks = [] # type: List[Dict[str, int]] + self.var_blocks: List[Dict[str, int]] = [] # References to variables that we may need to rename. List of # scopes; each scope is a mapping from name to list of collections # of names that refer to the same logical variable. - self.refs = [] # type: List[Dict[str, List[List[NameExpr]]]] + self.refs: List[Dict[str, List[List[NameExpr]]]] = [] # Number of reads of the most recent definition of a variable (per scope) - self.num_reads = [] # type: List[Dict[str, int]] + self.num_reads: List[Dict[str, int]] = [] # Kinds of nested scopes (FILE, FUNCTION or CLASS) - self.scope_kinds = [] # type: List[int] + self.scope_kinds: List[int] = [] def visit_mypy_file(self, file_node: MypyFile) -> None: """Rename variables within a file. diff --git a/mypy/report.py b/mypy/report.py index 1ae9fd30c819..18cbe13138e9 100644 --- a/mypy/report.py +++ b/mypy/report.py @@ -33,26 +33,28 @@ except ImportError: LXML_INSTALLED = False -type_of_any_name_map = collections.OrderedDict([ - (TypeOfAny.unannotated, "Unannotated"), - (TypeOfAny.explicit, "Explicit"), - (TypeOfAny.from_unimported_type, "Unimported"), - (TypeOfAny.from_omitted_generics, "Omitted Generics"), - (TypeOfAny.from_error, "Error"), - (TypeOfAny.special_form, "Special Form"), - (TypeOfAny.implementation_artifact, "Implementation Artifact"), -]) # type: Final[collections.OrderedDict[int, str]] +type_of_any_name_map: Final["collections.OrderedDict[int, str]"] = collections.OrderedDict( + [ + (TypeOfAny.unannotated, "Unannotated"), + (TypeOfAny.explicit, "Explicit"), + (TypeOfAny.from_unimported_type, "Unimported"), + (TypeOfAny.from_omitted_generics, "Omitted Generics"), + (TypeOfAny.from_error, "Error"), + (TypeOfAny.special_form, "Special Form"), + (TypeOfAny.implementation_artifact, "Implementation Artifact"), + ] +) ReporterClasses = Dict[str, Tuple[Callable[['Reports', str], 'AbstractReporter'], bool]] -reporter_classes = {} # type: Final[ReporterClasses] +reporter_classes: Final[ReporterClasses] = {} class Reports: def __init__(self, data_dir: str, report_dirs: Dict[str, str]) -> None: self.data_dir = data_dir - self.reporters = [] # type: List[AbstractReporter] - self.named_reporters = {} # type: Dict[str, AbstractReporter] + self.reporters: List[AbstractReporter] = [] + self.named_reporters: Dict[str, AbstractReporter] = {} for report_type, report_dir in sorted(report_dirs.items()): self.add_report(report_type, report_dir) @@ -145,7 +147,7 @@ def visit_func_def(self, defn: FuncDef) -> None: class LineCountReporter(AbstractReporter): def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) - self.counts = {} # type: Dict[str, Tuple[int, int, int, int]] + self.counts: Dict[str, Tuple[int, int, int, int]] = {} def on_file(self, tree: MypyFile, @@ -173,12 +175,12 @@ def on_file(self, annotated_funcs, total_funcs) def on_finish(self) -> None: - counts = sorted(((c, p) for p, c in self.counts.items()), - reverse=True) # type: List[Tuple[Tuple[int, int, int, int], str]] - total_counts = tuple(sum(c[i] for c, p in counts) - for i in range(4)) - with open(os.path.join(self.output_dir, 'linecount.txt'), 'w') as f: - f.write('{:7} {:7} {:6} {:6} total\n'.format(*total_counts)) + counts: List[Tuple[Tuple[int, int, int, int], str]] = sorted( + ((c, p) for p, c in self.counts.items()), reverse=True + ) + total_counts = tuple(sum(c[i] for c, p in counts) for i in range(4)) + with open(os.path.join(self.output_dir, "linecount.txt"), "w") as f: + f.write("{:7} {:7} {:6} {:6} total\n".format(*total_counts)) for c, p in counts: f.write('{:7} {:7} {:6} {:6} {}\n'.format( c[0], c[1], c[2], c[3], p)) @@ -192,8 +194,8 @@ class AnyExpressionsReporter(AbstractReporter): def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) - self.counts = {} # type: Dict[str, Tuple[int, int]] - self.any_types_counter = {} # type: Dict[str, typing.Counter[int]] + self.counts: Dict[str, Tuple[int, int]] = {} + self.any_types_counter: Dict[str, typing.Counter[int]] = {} def on_file(self, tree: MypyFile, @@ -256,7 +258,7 @@ def _report_any_exprs(self) -> None: total_coverage = (float(total_expr - total_any) / float(total_expr)) * 100 column_names = ["Name", "Anys", "Exprs", "Coverage"] - rows = [] # type: List[List[str]] + rows: List[List[str]] = [] for filename in sorted(self.counts): (num_any, num_total) = self.counts[filename] coverage = (float(num_total - num_any) / float(num_total)) * 100 @@ -267,14 +269,14 @@ def _report_any_exprs(self) -> None: self._write_out_report('any-exprs.txt', column_names, rows, total_row) def _report_types_of_anys(self) -> None: - total_counter = collections.Counter() # type: typing.Counter[int] + total_counter: typing.Counter[int] = collections.Counter() for counter in self.any_types_counter.values(): for any_type, value in counter.items(): total_counter[any_type] += value file_column_name = "Name" total_row_name = "Total" column_names = [file_column_name] + list(type_of_any_name_map.values()) - rows = [] # type: List[List[str]] + rows: List[List[str]] = [] for filename, counter in self.any_types_counter.items(): rows.append([filename] + [str(counter[typ]) for typ in type_of_any_name_map]) rows.sort(key=lambda x: x[0]) @@ -389,7 +391,7 @@ class LineCoverageReporter(AbstractReporter): def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) - self.lines_covered = {} # type: Dict[str, List[int]] + self.lines_covered: Dict[str, List[int]] = {} def on_file(self, tree: MypyFile, @@ -444,14 +446,13 @@ def __init__(self, reports: Reports, output_dir: str) -> None: self.css_html_path = os.path.join(reports.data_dir, 'xml', 'mypy-html.css') xsd_path = os.path.join(reports.data_dir, 'xml', 'mypy.xsd') self.schema = etree.XMLSchema(etree.parse(xsd_path)) - self.last_xml = None # type: Optional[Any] - self.files = [] # type: List[FileInfo] + self.last_xml: Optional[Any] = None + self.files: List[FileInfo] = [] # XML doesn't like control characters, but they are sometimes # legal in source code (e.g. comments, string literals). # Tabs (#x09) are allowed in XML content. - control_fixer = str.maketrans( - ''.join(chr(i) for i in range(32) if i != 9), '?' * 31) # type: Final + control_fixer: Final = str.maketrans("".join(chr(i) for i in range(32) if i != 9), "?" * 31) def on_file(self, tree: MypyFile, @@ -501,7 +502,7 @@ def on_file(self, def _get_any_info_for_line(visitor: stats.StatisticsVisitor, lineno: int) -> str: if lineno in visitor.any_line_map: result = "Any Types on this line: " - counter = collections.Counter() # type: typing.Counter[int] + counter: typing.Counter[int] = collections.Counter() for typ in visitor.any_line_map[lineno]: counter[typ.type_of_any] += 1 for any_type, occurrences in counter.items(): @@ -548,8 +549,8 @@ class CoberturaPackage(object): def __init__(self, name: str) -> None: self.name = name - self.classes = {} # type: Dict[str, Any] - self.packages = {} # type: Dict[str, CoberturaPackage] + self.classes: Dict[str, Any] = {} + self.packages: Dict[str, CoberturaPackage] = {} self.total_lines = 0 self.covered_lines = 0 @@ -811,7 +812,7 @@ class LinePrecisionReporter(AbstractReporter): def __init__(self, reports: Reports, output_dir: str) -> None: super().__init__(reports, output_dir) - self.files = [] # type: List[FileInfo] + self.files: List[FileInfo] = [] def on_file(self, tree: MypyFile, diff --git a/mypy/scope.py b/mypy/scope.py index 22608ef3a0fe..4b1d89a1cf01 100644 --- a/mypy/scope.py +++ b/mypy/scope.py @@ -16,9 +16,9 @@ class Scope: """Track which target we are processing at any given time.""" def __init__(self) -> None: - self.module = None # type: Optional[str] - self.classes = [] # type: List[TypeInfo] - self.function = None # type: Optional[FuncBase] + self.module: Optional[str] = None + self.classes: List[TypeInfo] = [] + self.function: Optional[FuncBase] = None # Number of nested scopes ignored (that don't get their own separate targets) self.ignored = 0 diff --git a/mypy/semanal.py b/mypy/semanal.py index 7c5f8fd7bc30..ae3d176aa154 100644 --- a/mypy/semanal.py +++ b/mypy/semanal.py @@ -124,7 +124,7 @@ T = TypeVar('T') -FUTURE_IMPORTS = { +FUTURE_IMPORTS: Final = { '__future__.nested_scopes': 'nested_scopes', '__future__.generators': 'generators', '__future__.division': 'division', @@ -135,12 +135,12 @@ '__future__.barry_as_FLUFL': 'barry_as_FLUFL', '__future__.generator_stop': 'generator_stop', '__future__.annotations': 'annotations', -} # type: Final +} # Special cased built-in classes that are needed for basic functionality and need to be # available very early on. -CORE_BUILTIN_CLASSES = ['object', 'bool', 'function'] # type: Final +CORE_BUILTIN_CLASSES: Final = ["object", "bool", "function"] # Used for tracking incomplete references @@ -159,30 +159,30 @@ class SemanticAnalyzer(NodeVisitor[None], __deletable__ = ['patches', 'options', 'cur_mod_node'] # Module name space - modules = None # type: Dict[str, MypyFile] + modules: Dict[str, MypyFile] # Global name space for current module - globals = None # type: SymbolTable + globals: SymbolTable # Names declared using "global" (separate set for each scope) - global_decls = None # type: List[Set[str]] + global_decls: List[Set[str]] # Names declated using "nonlocal" (separate set for each scope) - nonlocal_decls = None # type: List[Set[str]] + nonlocal_decls: List[Set[str]] # Local names of function scopes; None for non-function scopes. - locals = None # type: List[Optional[SymbolTable]] + locals: List[Optional[SymbolTable]] # Whether each scope is a comprehension scope. - is_comprehension_stack = None # type: List[bool] + is_comprehension_stack: List[bool] # Nested block depths of scopes - block_depth = None # type: List[int] + block_depth: List[int] # TypeInfo of directly enclosing class (or None) - type = None # type: Optional[TypeInfo] + type: Optional[TypeInfo] = None # Stack of outer classes (the second tuple item contains tvars). - type_stack = None # type: List[Optional[TypeInfo]] + type_stack: List[Optional[TypeInfo]] # Type variables bound by the current scope, be it class or function - tvar_scope = None # type: TypeVarLikeScope + tvar_scope: TypeVarLikeScope # Per-module options - options = None # type: Options + options: Options # Stack of functions being analyzed - function_stack = None # type: List[FuncItem] + function_stack: List[FuncItem] # Set to True if semantic analysis defines a name, or replaces a # placeholder definition. If some iteration makes no progress, @@ -203,26 +203,26 @@ class SemanticAnalyzer(NodeVisitor[None], # # Note that a star import adds a special name '*' to the set, this blocks # adding _any_ names in the current file. - missing_names = None # type: List[Set[str]] + missing_names: List[Set[str]] # Callbacks that will be called after semantic analysis to tweak things. - patches = None # type: List[Tuple[int, Callable[[], None]]] + patches: List[Tuple[int, Callable[[], None]]] loop_depth = 0 # Depth of breakable loops cur_mod_id = '' # Current module id (or None) (phase 2) _is_stub_file = False # Are we analyzing a stub file? _is_typeshed_stub_file = False # Are we analyzing a typeshed stub file? - imports = None # type: Set[str] # Imported modules (during phase 2 analysis) + imports: Set[str] # Imported modules (during phase 2 analysis) # Note: some imports (and therefore dependencies) might # not be found in phase 1, for example due to * imports. - errors = None # type: Errors # Keeps track of generated errors - plugin = None # type: Plugin # Mypy plugin for special casing of library features - statement = None # type: Optional[Statement] # Statement/definition being analyzed - future_import_flags = None # type: Set[str] + errors: Errors # Keeps track of generated errors + plugin: Plugin # Mypy plugin for special casing of library features + statement: Optional[Statement] = None # Statement/definition being analyzed + future_import_flags: Set[str] # Mapping from 'async def' function definitions to their return type wrapped as a # 'Coroutine[Any, Any, T]'. Used to keep track of whether a function definition's # return type has already been wrapped, by checking if the function definition's # type is stored in this mapping and that it still matches. - wrapped_coro_return_types = {} # type: Dict[FuncDef, Type] + wrapped_coro_return_types: Dict[FuncDef, Type] = {} def __init__(self, modules: Dict[str, MypyFile], @@ -247,13 +247,14 @@ def __init__(self, # analyzed in several iterations until all names are resolved. We need to save # the local namespaces for the top level function and all nested functions between # these iterations. See also semanal_main.process_top_level_function(). - self.saved_locals = {} \ - # type: Dict[Union[FuncItem, GeneratorExpr, DictionaryComprehension], SymbolTable] + self.saved_locals: Dict[ + Union[FuncItem, GeneratorExpr, DictionaryComprehension], SymbolTable + ] = {} self.imports = set() self.type = None self.type_stack = [] # Are the namespaces of classes being processed complete? - self.incomplete_type_stack = [] # type: List[bool] + self.incomplete_type_stack: List[bool] = [] self.tvar_scope = TypeVarLikeScope() self.function_stack = [] self.block_depth = [0] @@ -267,9 +268,9 @@ def __init__(self, # missing name in these namespaces, we need to defer the current analysis target, # since it's possible that the name will be there once the namespace is complete. self.incomplete_namespaces = incomplete_namespaces - self.all_exports = [] # type: List[str] + self.all_exports: List[str] = [] # Map from module id to list of explicitly exported names (i.e. names in __all__). - self.export_map = {} # type: Dict[str, List[str]] + self.export_map: Dict[str, List[str]] = {} self.plugin = plugin # If True, process function definitions. If False, don't. This is used # for processing module top levels in fine-grained incremental mode. @@ -278,9 +279,9 @@ def __init__(self, # Trace line numbers for every file where deferral happened during analysis of # current SCC or top-level function. - self.deferral_debug_context = [] # type: List[Tuple[str, int]] + self.deferral_debug_context: List[Tuple[str, int]] = [] - self.future_import_flags = set() # type: Set[str] + self.future_import_flags: Set[str] = set() # mypyc doesn't properly handle implementing an abstractproperty # with a regular attribute so we make them properties @@ -357,7 +358,7 @@ def prepare_builtins_namespace(self, file_node: MypyFile) -> None: assert isinstance(bool_info, TypeInfo) bool_type = Instance(bool_info, []) - special_var_types = [ + special_var_types: List[Tuple[str, Type]] = [ ('None', NoneType()), # reveal_type is a mypy-only function that gives an error with # the type of its arg. @@ -368,7 +369,7 @@ def prepare_builtins_namespace(self, file_node: MypyFile) -> None: ('True', bool_type), ('False', bool_type), ('__debug__', bool_type), - ] # type: List[Tuple[str, Type]] + ] for name, typ in special_var_types: v = Var(name, typ) @@ -419,7 +420,7 @@ def add_implicit_module_attrs(self, file_node: MypyFile) -> None: # unicode docstrings should be accepted in Python 2 if name == '__doc__': if self.options.python_version >= (3, 0): - typ = UnboundType('__builtins__.str') # type: Type + typ: Type = UnboundType("__builtins__.str") else: typ = UnionType([UnboundType('__builtins__.str'), UnboundType('__builtins__.unicode')]) @@ -648,7 +649,7 @@ def prepare_method_signature(self, func: FuncDef, info: TypeInfo) -> None: elif isinstance(functype, CallableType): self_type = get_proper_type(functype.arg_types[0]) if isinstance(self_type, AnyType): - leading_type = fill_typevars(info) # type: Type + leading_type: Type = fill_typevars(info) if func.is_class or func.name == '__new__': leading_type = self.class_type(leading_type) func.type = replace_implicit_first_type(functype, leading_type) @@ -761,7 +762,7 @@ def analyze_overload_sigs_and_impl( """ types = [] non_overload_indexes = [] - impl = None # type: Optional[OverloadPart] + impl: Optional[OverloadPart] = None for i, item in enumerate(defn.items): if i != 0: # Assume that the first item was already visited @@ -982,7 +983,7 @@ def visit_decorator(self, dec: Decorator) -> None: dec.func._fullname = self.qualified_name(dec.name) for d in dec.decorators: d.accept(self) - removed = [] # type: List[int] + removed: List[int] = [] no_type_check = False for i, d in enumerate(dec.decorators): # A bunch of decorators are special cased here. @@ -1258,8 +1259,8 @@ class Foo(Bar, Generic[T]): ... Returns (remaining base expressions, inferred type variables, is protocol). """ - removed = [] # type: List[int] - declared_tvars = [] # type: TypeVarLikeList + removed: List[int] = [] + declared_tvars: TypeVarLikeList = [] is_protocol = False for i, base_expr in enumerate(base_type_exprs): self.analyze_type_expr(base_expr) @@ -1304,7 +1305,7 @@ class Foo(Bar, Generic[T]): ... # grained incremental mode. defn.removed_base_type_exprs.append(defn.base_type_exprs[i]) del base_type_exprs[i] - tvar_defs = [] # type: List[TypeVarDef] + tvar_defs: List[TypeVarDef] = [] for name, tvar_expr in declared_tvars: tvar_def = self.tvar_scope.bind_new(name, tvar_expr) assert isinstance(tvar_def, TypeVarDef), ( @@ -1335,7 +1336,7 @@ def analyze_class_typevar_declaration( sym.node.fullname == 'typing.Protocol' and base.args or sym.node.fullname == 'typing_extensions.Protocol' and base.args): is_proto = sym.node.fullname != 'typing.Generic' - tvars = [] # type: TypeVarLikeList + tvars: TypeVarLikeList = [] for arg in unbound.args: tag = self.track_incomplete_refs() tvar = self.analyze_unbound_tvar(arg) @@ -1367,7 +1368,7 @@ def get_all_bases_tvars(self, base_type_exprs: List[Expression], removed: List[int]) -> TypeVarLikeList: """Return all type variable references in bases.""" - tvars = [] # type: TypeVarLikeList + tvars: TypeVarLikeList = [] for i, base_expr in enumerate(base_type_exprs): if i not in removed: try: @@ -1491,7 +1492,7 @@ def configure_base_classes(self, related to the base classes: defn.info.bases, defn.info.mro, and miscellaneous others (at least tuple_type, fallback_to_any, and is_enum.) """ - base_types = [] # type: List[Instance] + base_types: List[Instance] = [] info = defn.info info.tuple_type = None @@ -1600,7 +1601,7 @@ def update_metaclass(self, defn: ClassDef) -> None: """ # Look for "__metaclass__ = " in Python 2 - python2_meta_expr = None # type: Optional[Expression] + python2_meta_expr: Optional[Expression] = None if self.options.python_version[0] == 2: for body_node in defn.defs.body: if isinstance(body_node, ClassDef) and body_node.name == "__metaclass__": @@ -1612,7 +1613,7 @@ def update_metaclass(self, defn: ClassDef) -> None: python2_meta_expr = body_node.rvalue # Look for six.with_metaclass(M, B1, B2, ...) - with_meta_expr = None # type: Optional[Expression] + with_meta_expr: Optional[Expression] = None if len(defn.base_type_exprs) == 1: base_expr = defn.base_type_exprs[0] if isinstance(base_expr, CallExpr) and isinstance(base_expr.callee, RefExpr): @@ -1626,7 +1627,7 @@ def update_metaclass(self, defn: ClassDef) -> None: defn.base_type_exprs = base_expr.args[1:] # Look for @six.add_metaclass(M) - add_meta_expr = None # type: Optional[Expression] + add_meta_expr: Optional[Expression] = None for dec_expr in defn.decorators: if isinstance(dec_expr, CallExpr) and isinstance(dec_expr.callee, RefExpr): dec_expr.callee.accept(self) @@ -2382,7 +2383,7 @@ def store_final_status(self, s: AssignmentStmt) -> None: s.is_final_def = True def flatten_lvalues(self, lvalues: List[Expression]) -> List[Expression]: - res = [] # type: List[Expression] + res: List[Expression] = [] for lv in lvalues: if isinstance(lv, (TupleExpr, ListExpr)): res.extend(self.flatten_lvalues(lv.items)) @@ -2452,8 +2453,8 @@ def analyze_simple_literal_type(self, rvalue: Expression, is_final: bool) -> Opt if isinstance(rvalue, FloatExpr): return self.named_type_or_none('builtins.float') - value = None # type: Optional[LiteralValue] - type_name = None # type: Optional[str] + value: Optional[LiteralValue] = None + type_name: Optional[str] = None if isinstance(rvalue, IntExpr): value, type_name = rvalue.value, 'builtins.int' if isinstance(rvalue, StrExpr): @@ -2502,7 +2503,7 @@ def analyze_alias(self, rvalue: Expression, allow_placeholder=allow_placeholder, in_dynamic_func=dynamic, global_scope=global_scope) - typ = None # type: Optional[Type] + typ: Optional[Type] = None if res: typ, depends_on = res found_type_vars = typ.accept(TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope)) @@ -2573,7 +2574,7 @@ def check_and_set_up_type_alias(self, s: AssignmentStmt) -> bool: # Cannot redefine existing node as type alias. return False - res = None # type: Optional[Type] + res: Optional[Type] = None if self.is_none_alias(rvalue): res = NoneType() alias_tvars, depends_on, qualified_tvars = \ @@ -3051,7 +3052,7 @@ def process_typevar_parameters(self, args: List[Expression], has_values = (num_values > 0) covariant = False contravariant = False - upper_bound = self.object_type() # type: Type + upper_bound: Type = self.object_type() for param_value, param_name, param_kind in zip(args, names, kinds): if not param_kind == ARG_NAMED: self.fail("Unexpected argument to TypeVar()", context) @@ -3190,7 +3191,7 @@ def basic_new_typeinfo(self, name: str, def analyze_value_types(self, items: List[Expression]) -> List[Type]: """Analyze types from values expressions in type variable definition.""" - result = [] # type: List[Type] + result: List[Type] = [] for node in items: try: analyzed = self.anal_type(expr_to_unanalyzed_type(node), @@ -3452,7 +3453,7 @@ def analyze_try_stmt(self, s: TryStmt, visitor: NodeVisitor[None]) -> None: def visit_with_stmt(self, s: WithStmt) -> None: self.statement = s - types = [] # type: List[Type] + types: List[Type] = [] if s.unanalyzed_type: assert isinstance(s.unanalyzed_type, ProperType) @@ -3474,7 +3475,7 @@ def visit_with_stmt(self, s: WithStmt) -> None: # We have multiple targets and one type self.fail('Multiple types expected for multiple "with" targets', s) - new_types = [] # type: List[Type] + new_types: List[Type] = [] for e, n in zip(s.expr, s.target): e.accept(self) if n: @@ -3656,7 +3657,7 @@ def visit_call_expr(self, expr: CallExpr) -> None: elif refers_to_fullname(expr.callee, 'builtins.reveal_locals'): # Store the local variable names into the RevealExpr for use in the # type checking pass - local_nodes = [] # type: List[Var] + local_nodes: List[Var] = [] if self.is_module_scope(): # try to determine just the variable declarations in module scope # self.globals.values() contains SymbolTableNode's @@ -3879,7 +3880,7 @@ def analyze_type_application_args(self, expr: IndexExpr) -> Optional[List[Type]] self.analyze_type_expr(index) if self.found_incomplete_ref(tag): return None - types = [] # type: List[Type] + types: List[Type] = [] if isinstance(index, TupleExpr): items = index.items else: @@ -4825,7 +4826,7 @@ def already_defined(self, original_ctx: Optional[Union[SymbolTableNode, SymbolNode]], noun: str) -> None: if isinstance(original_ctx, SymbolTableNode): - node = original_ctx.node # type: Optional[SymbolNode] + node: Optional[SymbolNode] = original_ctx.node elif isinstance(original_ctx, SymbolNode): node = original_ctx else: @@ -5111,7 +5112,7 @@ def find_duplicate(list: List[T]) -> Optional[T]: def remove_imported_names_from_symtable(names: SymbolTable, module: str) -> None: """Remove all imported names from the symbol table of a module.""" - removed = [] # type: List[str] + removed: List[str] = [] for name, node in names.items(): if node.node is None: continue @@ -5150,7 +5151,7 @@ def apply_semantic_analyzer_patches(patches: List[Tuple[int, Callable[[], None]] def names_modified_by_assignment(s: AssignmentStmt) -> List[NameExpr]: """Return all unqualified (short) names assigned to in an assignment statement.""" - result = [] # type: List[NameExpr] + result: List[NameExpr] = [] for lvalue in s.lvalues: result += names_modified_in_lvalue(lvalue) return result @@ -5163,7 +5164,7 @@ def names_modified_in_lvalue(lvalue: Lvalue) -> List[NameExpr]: elif isinstance(lvalue, StarExpr): return names_modified_in_lvalue(lvalue.expr) elif isinstance(lvalue, (ListExpr, TupleExpr)): - result = [] # type: List[NameExpr] + result: List[NameExpr] = [] for item in lvalue.items: result += names_modified_in_lvalue(item) return result diff --git a/mypy/semanal_classprop.py b/mypy/semanal_classprop.py index 8dc518662445..4be57b64342e 100644 --- a/mypy/semanal_classprop.py +++ b/mypy/semanal_classprop.py @@ -17,17 +17,17 @@ # These add extra ad-hoc edges to the subtyping relation. For example, # int is considered a subtype of float, even though there is no # subclass relationship. -TYPE_PROMOTIONS = { +TYPE_PROMOTIONS: Final = { 'builtins.int': 'float', 'builtins.float': 'complex', -} # type: Final +} # Hard coded type promotions for Python 3. # # Note that the bytearray -> bytes promotion is a little unsafe # as some functions only accept bytes objects. Here convenience # trumps safety. -TYPE_PROMOTIONS_PYTHON3 = TYPE_PROMOTIONS.copy() # type: Final +TYPE_PROMOTIONS_PYTHON3: Final = TYPE_PROMOTIONS.copy() TYPE_PROMOTIONS_PYTHON3.update({ 'builtins.bytearray': 'bytes', 'builtins.memoryview': 'bytes', @@ -38,7 +38,7 @@ # These promotions are unsafe, but we are doing them anyway # for convenience and also for Python 3 compatibility # (bytearray -> str). -TYPE_PROMOTIONS_PYTHON2 = TYPE_PROMOTIONS.copy() # type: Final +TYPE_PROMOTIONS_PYTHON2: Final = TYPE_PROMOTIONS.copy() TYPE_PROMOTIONS_PYTHON2.update({ 'builtins.str': 'unicode', 'builtins.bytearray': 'str', @@ -55,9 +55,9 @@ def calculate_class_abstract_status(typ: TypeInfo, is_stub_file: bool, errors: E """ if typ.typeddict_type: return # TypedDict can't be abstract - concrete = set() # type: Set[str] - abstract = [] # type: List[str] - abstract_in_this_class = [] # type: List[str] + concrete: Set[str] = set() + abstract: List[str] = [] + abstract_in_this_class: List[str] = [] if typ.is_newtype: # Special case: NewTypes are considered as always non-abstract, so they can be used as: # Config = NewType('Config', Mapping[str, str]) @@ -73,7 +73,7 @@ def calculate_class_abstract_status(typ: TypeInfo, is_stub_file: bool, errors: E # different items have a different abstract status, there # should be an error reported elsewhere. if node.items: # can be empty for invalid overloads - func = node.items[0] # type: Optional[Node] + func: Optional[Node] = node.items[0] else: func = None else: @@ -152,7 +152,7 @@ def add_type_promotion(info: TypeInfo, module_names: SymbolTable, options: Optio This includes things like 'int' being compatible with 'float'. """ defn = info.defn - promote_target = None # type: Optional[Type] + promote_target: Optional[Type] = None for decorator in defn.decorators: if isinstance(decorator, CallExpr): analyzed = decorator.analyzed diff --git a/mypy/semanal_enum.py b/mypy/semanal_enum.py index 295f142d90bf..07e8e048decd 100644 --- a/mypy/semanal_enum.py +++ b/mypy/semanal_enum.py @@ -129,7 +129,7 @@ def parse_enum_call_args(self, call: CallExpr, return self.fail_enum_call_arg( "%s() expects a string literal as the first argument" % class_name, call) items = [] - values = [] # type: List[Optional[Expression]] + values: List[Optional[Expression]] = [] if isinstance(names, (StrExpr, UnicodeExpr)): fields = names.value for field in fields.replace(',', ' ').split(): diff --git a/mypy/semanal_main.py b/mypy/semanal_main.py index 58cfa261d69a..3e7a27f80a2f 100644 --- a/mypy/semanal_main.py +++ b/mypy/semanal_main.py @@ -71,7 +71,7 @@ def semantic_analysis_for_scc(graph: 'Graph', scc: List[str], errors: Errors) -> The scc will be processed roughly in the order the modules are included in the list. """ - patches = [] # type: Patches + patches: Patches = [] # Note that functions can't define new module-level attributes # using 'global x', since module top levels are fully processed # before functions. This limitation is unlikely to go away soon. @@ -116,7 +116,7 @@ def semantic_analysis_for_targets( defined on self) removed by AST stripper that may need to be reintroduced here. They must be added before any methods are analyzed. """ - patches = [] # type: Patches + patches: Patches = [] if any(isinstance(n.node, MypyFile) for n in nodes): # Process module top level first (if needed). process_top_levels(graph, [state.id], patches) @@ -190,7 +190,7 @@ def process_top_levels(graph: 'Graph', scc: List[str], patches: Patches) -> None if final_iteration: # Give up. It's impossible to bind all names. state.manager.incomplete_namespaces.clear() - all_deferred = [] # type: List[str] + all_deferred: List[str] = [] any_progress = False while worklist: next_id = worklist.pop() @@ -289,7 +289,7 @@ def process_top_level_function(analyzer: 'SemanticAnalyzer', def get_all_leaf_targets(file: MypyFile) -> List[TargetInfo]: """Return all leaf targets in a symbol table (module-level and methods).""" - result = [] # type: List[TargetInfo] + result: List[TargetInfo] = [] for fullname, node, active_type in file.local_definitions(): if isinstance(node.node, (FuncDef, OverloadedFuncDef, Decorator)): result.append((fullname, node.node, active_type)) @@ -373,7 +373,7 @@ def check_type_arguments_in_targets(targets: List[FineGrainedDeferredNode], stat with state.wrap_context(): with strict_optional_set(state.options.strict_optional): for target in targets: - func = None # type: Optional[Union[FuncDef, OverloadedFuncDef]] + func: Optional[Union[FuncDef, OverloadedFuncDef]] = None if isinstance(target.node, (FuncDef, OverloadedFuncDef)): func = target.node saved = (state.id, target.active_typeinfo, func) # module, class, function diff --git a/mypy/semanal_namedtuple.py b/mypy/semanal_namedtuple.py index 4adeb56273c6..227a038df5f6 100644 --- a/mypy/semanal_namedtuple.py +++ b/mypy/semanal_namedtuple.py @@ -26,15 +26,26 @@ # Matches "_prohibited" in typing.py, but adds __annotations__, which works at runtime but can't # easily be supported in a static checker. -NAMEDTUPLE_PROHIBITED_NAMES = ('__new__', '__init__', '__slots__', '__getnewargs__', - '_fields', '_field_defaults', '_field_types', - '_make', '_replace', '_asdict', '_source', - '__annotations__') # type: Final +NAMEDTUPLE_PROHIBITED_NAMES: Final = ( + "__new__", + "__init__", + "__slots__", + "__getnewargs__", + "_fields", + "_field_defaults", + "_field_types", + "_make", + "_replace", + "_asdict", + "_source", + "__annotations__", +) -NAMEDTUP_CLASS_ERROR = ('Invalid statement in NamedTuple definition; ' - 'expected "field_name: field_type [= default]"') # type: Final +NAMEDTUP_CLASS_ERROR: Final = ( + "Invalid statement in NamedTuple definition; " 'expected "field_name: field_type [= default]"' +) -SELF_TVAR_NAME = '_NT' # type: Final +SELF_TVAR_NAME: Final = "_NT" class NamedTupleAnalyzer: @@ -87,9 +98,9 @@ def check_namedtuple_classdef(self, defn: ClassDef, is_stub_file: bool return [], [], {} if len(defn.base_type_exprs) > 1: self.fail('NamedTuple should be a single base', defn) - items = [] # type: List[str] - types = [] # type: List[Type] - default_items = {} # type: Dict[str, Expression] + items: List[str] = [] + types: List[Type] = [] + default_items: Dict[str, Expression] = {} for stmt in defn.defs.body: if not isinstance(stmt, AssignmentStmt): # Still allow pass or ... (for empty namedtuples). @@ -256,7 +267,7 @@ def parse_namedtuple_args(self, call: CallExpr, fullname: str if len(args) < 2: self.fail("Too few arguments for namedtuple()", call) return None - defaults = [] # type: List[Expression] + defaults: List[Expression] = [] if len(args) > 2: # Typed namedtuple doesn't support additional arguments. if fullname == 'typing.NamedTuple': @@ -284,7 +295,7 @@ def parse_namedtuple_args(self, call: CallExpr, fullname: str "namedtuple() expects a string literal as the first argument", call) return None typename = cast(Union[StrExpr, BytesExpr, UnicodeExpr], call.args[0]).value - types = [] # type: List[Type] + types: List[Type] = [] if not isinstance(args[1], (ListExpr, TupleExpr)): if (fullname == 'collections.namedtuple' and isinstance(args[1], (StrExpr, BytesExpr, UnicodeExpr))): @@ -331,8 +342,8 @@ def parse_namedtuple_fields_with_types(self, nodes: List[Expression], context: C Return (names, types, defaults, whether types are all ready), or None if error occurred. """ - items = [] # type: List[str] - types = [] # type: List[Type] + items: List[str] = [] + types: List[Type] = [] for item in nodes: if isinstance(item, TupleExpr): if len(item.items) != 2: diff --git a/mypy/semanal_shared.py b/mypy/semanal_shared.py index 60bcdb2cb928..6b0fe1251693 100644 --- a/mypy/semanal_shared.py +++ b/mypy/semanal_shared.py @@ -22,7 +22,7 @@ # (after the main pass): # Fix fallbacks (does joins) -PRIORITY_FALLBACKS = 1 # type: Final +PRIORITY_FALLBACKS: Final = 1 @trait diff --git a/mypy/semanal_typeargs.py b/mypy/semanal_typeargs.py index 38a13c12b468..73eaa18ef490 100644 --- a/mypy/semanal_typeargs.py +++ b/mypy/semanal_typeargs.py @@ -31,7 +31,7 @@ def __init__(self, errors: Errors, options: Options, is_typeshed_file: bool) -> self.recurse_into_functions = True # Keep track of the type aliases already visited. This is needed to avoid # infinite recursion on types like A = Union[int, List[A]]. - self.seen_aliases = set() # type: Set[TypeAliasType] + self.seen_aliases: Set[TypeAliasType] = set() def visit_mypy_file(self, o: MypyFile) -> None: self.errors.set_file(o.path, o.fullname, scope=self.scope) diff --git a/mypy/semanal_typeddict.py b/mypy/semanal_typeddict.py index 14e62b5929bd..d954529c1b14 100644 --- a/mypy/semanal_typeddict.py +++ b/mypy/semanal_typeddict.py @@ -18,8 +18,9 @@ from mypy.errorcodes import ErrorCode from mypy import errorcodes as codes -TPDICT_CLASS_ERROR = ('Invalid statement in TypedDict definition; ' - 'expected "field_name: field_type"') # type: Final +TPDICT_CLASS_ERROR: Final = ( + "Invalid statement in TypedDict definition; " 'expected "field_name: field_type"' +) class TypedDictAnalyzer: @@ -72,7 +73,7 @@ def analyze_typeddict_classdef(self, defn: ClassDef) -> Tuple[bool, Optional[Typ not self.is_typeddict(expr) for expr in defn.base_type_exprs): self.fail("All bases of a new TypedDict must be TypedDict types", defn) typeddict_bases = list(filter(self.is_typeddict, defn.base_type_exprs)) - keys = [] # type: List[str] + keys: List[str] = [] types = [] required_keys = set() @@ -121,8 +122,8 @@ def analyze_typeddict_classdef_fields( * List of types for each key * Set of required keys """ - fields = [] # type: List[str] - types = [] # type: List[Type] + fields: List[str] = [] + types: List[Type] = [] for stmt in defn.defs.body: if not isinstance(stmt, AssignmentStmt): # Still allow pass or ... (for empty TypedDict's). @@ -156,7 +157,7 @@ def analyze_typeddict_classdef_fields( elif not isinstance(stmt.rvalue, TempNode): # x: int assigns rvalue to TempNode(AnyType()) self.fail('Right hand side values are not supported in TypedDict', stmt) - total = True # type: Optional[bool] + total: Optional[bool] = True if 'total' in defn.keywords: total = self.api.parse_bool(defn.keywords['total']) if total is None: @@ -243,7 +244,7 @@ def parse_typeddict_args( if not isinstance(args[1], DictExpr): return self.fail_typeddict_arg( "TypedDict() expects a dictionary literal as the second argument", call) - total = True # type: Optional[bool] + total: Optional[bool] = True if len(args) == 3: total = self.api.parse_bool(call.args[2]) if total is None: @@ -275,8 +276,8 @@ def parse_typeddict_fields_with_types( Return names, types, was there an error. If some type is not ready, return None. """ seen_keys = set() - items = [] # type: List[str] - types = [] # type: List[Type] + items: List[str] = [] + types: List[Type] = [] for (field_name_expr, field_type_expr) in dict_items: if isinstance(field_name_expr, (StrExpr, BytesExpr, UnicodeExpr)): key = field_name_expr.value diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py index f74f3f35c7e1..a7250181de82 100644 --- a/mypy/server/astdiff.py +++ b/mypy/server/astdiff.py @@ -128,7 +128,7 @@ def snapshot_symbol_table(name_prefix: str, table: SymbolTable) -> Dict[str, Sna things defined in other modules are represented just by the names of the targets. """ - result = {} # type: Dict[str, SnapshotItem] + result: Dict[str, SnapshotItem] = {} for name, symbol in table.items(): node = symbol.node # TODO: cross_ref? diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py index 8b9726019224..f8fa2713a757 100644 --- a/mypy/server/astmerge.py +++ b/mypy/server/astmerge.py @@ -103,7 +103,7 @@ def replacement_map_from_symbol_table( the given module prefix. Don't recurse into other modules accessible through the symbol table. """ - replacements = {} # type: Dict[SymbolNode, SymbolNode] + replacements: Dict[SymbolNode, SymbolNode] = {} for name, node in old.items(): if (name in new and (node.kind == MDEF or node.node and get_prefix(node.node.fullname) == prefix)): diff --git a/mypy/server/aststrip.py b/mypy/server/aststrip.py index 8572314fc75a..3af7efe7e142 100644 --- a/mypy/server/aststrip.py +++ b/mypy/server/aststrip.py @@ -71,7 +71,7 @@ def strip_target(node: Union[MypyFile, FuncDef, OverloadedFuncDef], class NodeStripVisitor(TraverserVisitor): def __init__(self, saved_class_attrs: SavedAttributes) -> None: # The current active class. - self.type = None # type: Optional[TypeInfo] + self.type: Optional[TypeInfo] = None # This is True at class scope, but not in methods. self.is_class_body = False # By default, process function definitions. If False, don't -- this is used for diff --git a/mypy/server/deps.py b/mypy/server/deps.py index 0d04575d1dd6..f67fda425ecd 100644 --- a/mypy/server/deps.py +++ b/mypy/server/deps.py @@ -165,7 +165,7 @@ def __init__(self, # are preserved at alias expansion points in `semanal.py`, stored as an attribute # on MypyFile, and then passed here. self.alias_deps = alias_deps - self.map = {} # type: Dict[str, Set[str]] + self.map: Dict[str, Set[str]] = {} self.is_class = False self.is_package_init_file = False self.options = options @@ -184,7 +184,7 @@ def visit_func_def(self, o: FuncDef) -> None: target = self.scope.current_target() if o.type: if self.is_class and isinstance(o.type, FunctionLike): - signature = bind_self(o.type) # type: Type + signature: Type = bind_self(o.type) else: signature = o.type for trigger in self.get_type_triggers(signature): @@ -219,7 +219,7 @@ def visit_decorator(self, o: Decorator) -> None: # then if `dec` is unannotated, then it will "spoil" `func` and consequently # all call sites, making them all `Any`. for d in o.decorators: - tname = None # type: Optional[str] + tname: Optional[str] = None if isinstance(d, RefExpr) and d.fullname is not None: tname = d.fullname if (isinstance(d, CallExpr) and isinstance(d.callee, RefExpr) and @@ -419,7 +419,7 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None: # then it will make all points of use of `x` unchecked. if (isinstance(rvalue, CallExpr) and isinstance(rvalue.callee, RefExpr) and rvalue.callee.fullname is not None): - fname = None # type: Optional[str] + fname: Optional[str] = None if isinstance(rvalue.callee.node, TypeInfo): # use actual __init__ as a dependency source init = rvalue.callee.node.get('__init__') @@ -871,7 +871,7 @@ def get_type_triggers(typ: Type, use_logical_deps: bool) -> List[str]: class TypeTriggersVisitor(TypeVisitor[List[str]]): def __init__(self, use_logical_deps: bool) -> None: - self.deps = [] # type: List[str] + self.deps: List[str] = [] self.use_logical_deps = use_logical_deps def get_type_triggers(self, typ: Type) -> List[str]: @@ -1003,7 +1003,7 @@ def dump_all_dependencies(modules: Dict[str, MypyFile], python_version: Tuple[int, int], options: Options) -> None: """Generate dependencies for all interesting modules and print them to stdout.""" - all_deps = {} # type: Dict[str, Set[str]] + all_deps: Dict[str, Set[str]] = {} for id, node in modules.items(): # Uncomment for debugging: # print('processing', id) diff --git a/mypy/server/mergecheck.py b/mypy/server/mergecheck.py index afa450fb5a75..476d1cc809f7 100644 --- a/mypy/server/mergecheck.py +++ b/mypy/server/mergecheck.py @@ -7,7 +7,7 @@ from mypy.server.objgraph import get_reachable_graph, get_path # If True, print more verbose output on failure. -DUMP_MISMATCH_NODES = False # type: Final +DUMP_MISMATCH_NODES: Final = False def check_consistency(o: object) -> None: @@ -19,7 +19,7 @@ def check_consistency(o: object) -> None: reachable = list(seen.values()) syms = [x for x in reachable if isinstance(x, SymbolNode)] - m = {} # type: Dict[str, SymbolNode] + m: Dict[str, SymbolNode] = {} for sym in syms: if isinstance(sym, FakeInfo): continue diff --git a/mypy/server/objgraph.py b/mypy/server/objgraph.py index a7b45f5ec81f..89f0ba79388b 100644 --- a/mypy/server/objgraph.py +++ b/mypy/server/objgraph.py @@ -7,46 +7,48 @@ from typing import List, Dict, Iterator, Tuple, Mapping from typing_extensions import Final -method_descriptor_type = type(object.__dir__) # type: Final -method_wrapper_type = type(object().__ne__) # type: Final -wrapper_descriptor_type = type(object.__ne__) # type: Final - -FUNCTION_TYPES = (types.BuiltinFunctionType, - types.FunctionType, - types.MethodType, - method_descriptor_type, - wrapper_descriptor_type, - method_wrapper_type) # type: Final - -ATTR_BLACKLIST = { +method_descriptor_type: Final = type(object.__dir__) +method_wrapper_type: Final = type(object().__ne__) +wrapper_descriptor_type: Final = type(object.__ne__) + +FUNCTION_TYPES: Final = ( + types.BuiltinFunctionType, + types.FunctionType, + types.MethodType, + method_descriptor_type, + wrapper_descriptor_type, + method_wrapper_type, +) + +ATTR_BLACKLIST: Final = { '__doc__', '__name__', '__class__', '__dict__', -} # type: Final +} # Instances of these types can't have references to other objects -ATOMIC_TYPE_BLACKLIST = { +ATOMIC_TYPE_BLACKLIST: Final = { bool, int, float, str, type(None), object, -} # type: Final +} # Don't look at most attributes of these types -COLLECTION_TYPE_BLACKLIST = { +COLLECTION_TYPE_BLACKLIST: Final = { list, set, dict, tuple, -} # type: Final +} # Don't return these objects -TYPE_BLACKLIST = { +TYPE_BLACKLIST: Final = { weakref.ReferenceType, -} # type: Final +} def isproperty(o: object, attr: str) -> bool: diff --git a/mypy/server/subexpr.py b/mypy/server/subexpr.py index cc645332d9d4..2fb0ef4ffaf1 100644 --- a/mypy/server/subexpr.py +++ b/mypy/server/subexpr.py @@ -20,7 +20,7 @@ def get_subexpressions(node: Node) -> List[Expression]: class SubexpressionFinder(TraverserVisitor): def __init__(self) -> None: - self.expressions = [] # type: List[Expression] + self.expressions: List[Expression] = [] def visit_int_expr(self, o: Expression) -> None: self.add(o) diff --git a/mypy/server/trigger.py b/mypy/server/trigger.py index c9f206d66a6d..c10264766ae6 100644 --- a/mypy/server/trigger.py +++ b/mypy/server/trigger.py @@ -5,7 +5,7 @@ # Used as a suffix for triggers to handle "from m import *" dependencies (see also # make_wildcard_trigger) -WILDCARD_TAG = '[wildcard]' # type: Final +WILDCARD_TAG: Final = "[wildcard]" def make_trigger(name: str) -> str: diff --git a/mypy/server/update.py b/mypy/server/update.py index 085c143fadd1..03d79b8af7fb 100644 --- a/mypy/server/update.py +++ b/mypy/server/update.py @@ -148,7 +148,7 @@ from mypy.util import module_prefix, split_target from mypy.typestate import TypeState -MAX_ITER = 1000 # type: Final +MAX_ITER: Final = 1000 SENSITIVE_INTERNAL_MODULES = tuple(core_modules) + ("mypy_extensions", "typing_extensions") @@ -173,22 +173,22 @@ def __init__(self, result: BuildResult) -> None: self.previous_targets_with_errors = manager.errors.targets() self.previous_messages = result.errors[:] # Module, if any, that had blocking errors in the last run as (id, path) tuple. - self.blocking_error = None # type: Optional[Tuple[str, str]] + self.blocking_error: Optional[Tuple[str, str]] = None # Module that we haven't processed yet but that are known to be stale. - self.stale = [] # type: List[Tuple[str, str]] + self.stale: List[Tuple[str, str]] = [] # Disable the cache so that load_graph doesn't try going back to disk # for the cache. self.manager.cache_enabled = False # Some hints to the test suite about what is going on: # Active triggers during the last update - self.triggered = [] # type: List[str] + self.triggered: List[str] = [] # Modules passed to update during the last update - self.changed_modules = [] # type: List[Tuple[str, str]] + self.changed_modules: List[Tuple[str, str]] = [] # Modules processed during the last update - self.updated_modules = [] # type: List[str] + self.updated_modules: List[str] = [] # Targets processed during last update (for testing only). - self.processed_targets = [] # type: List[str] + self.processed_targets: List[str] = [] def update(self, changed_modules: List[Tuple[str, str]], @@ -383,7 +383,7 @@ def update_module(self, t0 = time.time() # Record symbol table snapshot of old version the changed module. - old_snapshots = {} # type: Dict[str, Dict[str, SnapshotItem]] + old_snapshots: Dict[str, Dict[str, SnapshotItem]] = {} if module in manager.modules: snapshot = snapshot_symbol_table(module, manager.modules[module].names) old_snapshots[module] = snapshot @@ -439,7 +439,7 @@ def find_unloaded_deps(manager: BuildManager, graph: Dict[str, State], dependencies.) """ worklist = list(initial) - seen = set() # type: Set[str] + seen: Set[str] = set() unloaded = [] while worklist: node = worklist.pop() @@ -566,7 +566,7 @@ def restore(ids: List[str]) -> None: elif id in graph: del graph[id] - new_modules = [] # type: List[State] + new_modules: List[State] = [] try: if module in graph: del graph[module] @@ -613,7 +613,7 @@ def restore(ids: List[str]) -> None: return BlockedUpdate(module, path, remaining_modules, err.messages) # Merge old and new ASTs. - new_modules_dict = {module: state.tree} # type: Dict[str, Optional[MypyFile]] + new_modules_dict: Dict[str, Optional[MypyFile]] = {module: state.tree} replace_modules_with_new_variants(manager, graph, {orig_module: orig_tree}, new_modules_dict) t1 = time.time() @@ -687,7 +687,7 @@ def delete_module(module_id: str, def dedupe_modules(modules: List[Tuple[str, str]]) -> List[Tuple[str, str]]: - seen = set() # type: Set[str] + seen: Set[str] = set() result = [] for id, path in modules: if id not in seen: @@ -719,7 +719,7 @@ def calculate_active_triggers(manager: BuildManager, For example, if only the signature of function m.f is different in the new symbol table, return {''}. """ - names = set() # type: Set[str] + names: Set[str] = set() for id in new_modules: snapshot1 = old_snapshots.get(id) if snapshot1 is None: @@ -798,7 +798,7 @@ def propagate_changes_using_dependencies( """ num_iter = 0 - remaining_modules = [] # type: List[Tuple[str, str]] + remaining_modules: List[Tuple[str, str]] = [] # Propagate changes until nothing visible has changed during the last # iteration. @@ -856,11 +856,11 @@ def find_targets_recursive( * Dictionary from module id to a set of stale targets. * A set of module ids for unparsed modules with stale targets. """ - result = {} # type: Dict[str, Set[FineGrainedDeferredNode]] + result: Dict[str, Set[FineGrainedDeferredNode]] = {} worklist = triggers - processed = set() # type: Set[str] - stale_protos = set() # type: Set[TypeInfo] - unloaded_files = set() # type: Set[str] + processed: Set[str] = set() + stale_protos: Set[TypeInfo] = set() + unloaded_files: Set[str] = set() # Find AST nodes corresponding to each target. # @@ -950,7 +950,7 @@ def key(node: FineGrainedDeferredNode) -> int: manager.errors.add_error_info(info) # Strip semantic analysis information. - saved_attrs = {} # type: SavedAttributes + saved_attrs: SavedAttributes = {} for deferred in nodes: processed_targets.append(deferred.node.fullname) strip_target(deferred.node, saved_attrs) @@ -1055,8 +1055,8 @@ def not_found() -> None: components = rest.split('.') else: components = [] - node = modules[module] # type: Optional[SymbolNode] - file = None # type: Optional[MypyFile] + node: Optional[SymbolNode] = modules[module] + file: Optional[MypyFile] = None active_class = None for c in components: if isinstance(node, TypeInfo): @@ -1086,7 +1086,7 @@ def not_found() -> None: not_found() return [], None result = [FineGrainedDeferredNode(file, None)] - stale_info = None # type: Optional[TypeInfo] + stale_info: Optional[TypeInfo] = None if node.is_protocol: stale_info = node for name, symnode in node.names.items(): @@ -1140,15 +1140,15 @@ def target_from_node(module: str, return '%s.%s' % (module, node.name) -if sys.platform != 'win32': - INIT_SUFFIXES = ('/__init__.py', '/__init__.pyi') # type: Final +if sys.platform != "win32": + INIT_SUFFIXES: Final = ("/__init__.py", "/__init__.pyi") else: - INIT_SUFFIXES = ( + INIT_SUFFIXES: Final = ( os.sep + '__init__.py', os.sep + '__init__.pyi', os.altsep + '__init__.py', os.altsep + '__init__.pyi', - ) # type: Final + ) def refresh_suppressed_submodules( diff --git a/mypy/sharedparse.py b/mypy/sharedparse.py index 88e77ecd0dc2..7dbaffcbee97 100644 --- a/mypy/sharedparse.py +++ b/mypy/sharedparse.py @@ -4,7 +4,7 @@ """Shared logic between our three mypy parser files.""" -_NON_BINARY_MAGIC_METHODS = { +_NON_BINARY_MAGIC_METHODS: Final = { "__abs__", "__call__", "__complex__", @@ -37,16 +37,16 @@ "__setitem__", "__str__", "__unicode__", -} # type: Final +} -MAGIC_METHODS_ALLOWING_KWARGS = { +MAGIC_METHODS_ALLOWING_KWARGS: Final = { "__init__", "__init_subclass__", "__new__", "__call__", -} # type: Final +} -BINARY_MAGIC_METHODS = { +BINARY_MAGIC_METHODS: Final = { "__add__", "__and__", "__cmp__", @@ -97,13 +97,13 @@ "__sub__", "__truediv__", "__xor__", -} # type: Final +} assert not (_NON_BINARY_MAGIC_METHODS & BINARY_MAGIC_METHODS) -MAGIC_METHODS = _NON_BINARY_MAGIC_METHODS | BINARY_MAGIC_METHODS # type: Final +MAGIC_METHODS: Final = _NON_BINARY_MAGIC_METHODS | BINARY_MAGIC_METHODS -MAGIC_METHODS_POS_ARGS_ONLY = MAGIC_METHODS - MAGIC_METHODS_ALLOWING_KWARGS # type: Final +MAGIC_METHODS_POS_ARGS_ONLY: Final = MAGIC_METHODS - MAGIC_METHODS_ALLOWING_KWARGS def special_function_elide_names(name: str) -> bool: diff --git a/mypy/solve.py b/mypy/solve.py index b89c8f35f350..8a3280e33c0b 100644 --- a/mypy/solve.py +++ b/mypy/solve.py @@ -22,17 +22,17 @@ def solve_constraints(vars: List[TypeVarId], constraints: List[Constraint], pick AnyType. """ # Collect a list of constraints for each type variable. - cmap = defaultdict(list) # type: Dict[TypeVarId, List[Constraint]] + cmap: Dict[TypeVarId, List[Constraint]] = defaultdict(list) for con in constraints: cmap[con.type_var].append(con) - res = [] # type: List[Optional[Type]] + res: List[Optional[Type]] = [] # Solve each type variable separately. for tvar in vars: - bottom = None # type: Optional[Type] - top = None # type: Optional[Type] - candidate = None # type: Optional[Type] + bottom: Optional[Type] = None + top: Optional[Type] = None + candidate: Optional[Type] = None # Process each constraint separately, and calculate the lower and upper # bounds based on constraints. Note that we assume that the constraint diff --git a/mypy/state.py b/mypy/state.py index 16f5c3ea2985..475559899da6 100644 --- a/mypy/state.py +++ b/mypy/state.py @@ -6,7 +6,7 @@ # Value varies by file being processed strict_optional = False -find_occurrences = None # type: Optional[Tuple[str, str]] +find_occurrences: Optional[Tuple[str, str]] = None @contextmanager diff --git a/mypy/stats.py b/mypy/stats.py index 17725ac86bdc..a9769b55e20d 100644 --- a/mypy/stats.py +++ b/mypy/stats.py @@ -24,19 +24,19 @@ from mypy.util import correct_relative_import from mypy.argmap import map_formals_to_actuals -TYPE_EMPTY = 0 # type: Final -TYPE_UNANALYZED = 1 # type: Final # type of non-typechecked code -TYPE_PRECISE = 2 # type: Final -TYPE_IMPRECISE = 3 # type: Final -TYPE_ANY = 4 # type: Final +TYPE_EMPTY: Final = 0 +TYPE_UNANALYZED: Final = 1 # type of non-typechecked code +TYPE_PRECISE: Final = 2 +TYPE_IMPRECISE: Final = 3 +TYPE_ANY: Final = 4 -precision_names = [ +precision_names: Final = [ 'empty', 'unanalyzed', 'precise', 'imprecise', 'any', -] # type: Final +] class StatisticsVisitor(TraverserVisitor): @@ -68,10 +68,10 @@ def __init__(self, self.line = -1 - self.line_map = {} # type: Dict[int, int] + self.line_map: Dict[int, int] = {} - self.type_of_any_counter = Counter() # type: typing.Counter[int] - self.any_line_map = {} # type: Dict[int, List[AnyType]] + self.type_of_any_counter: typing.Counter[int] = Counter() + self.any_line_map: Dict[int, List[AnyType]] = {} # For each scope (top level/function), whether the scope was type checked # (annotated function). @@ -79,7 +79,7 @@ def __init__(self, # TODO: Handle --check-untyped-defs self.checked_scopes = [True] - self.output = [] # type: List[str] + self.output: List[str] = [] TraverserVisitor.__init__(self) diff --git a/mypy/strconv.py b/mypy/strconv.py index 5cc890bd91dc..4227eabb2a06 100644 --- a/mypy/strconv.py +++ b/mypy/strconv.py @@ -24,7 +24,7 @@ class StrConv(NodeVisitor[str]): def __init__(self, show_ids: bool = False) -> None: self.show_ids = show_ids - self.id_mapper = None # type: Optional[IdMapper] + self.id_mapper: Optional[IdMapper] = None if show_ids: self.id_mapper = IdMapper() @@ -58,10 +58,10 @@ def func_helper(self, o: 'mypy.nodes.FuncItem') -> List[object]: array with information specific to methods, global functions or anonymous functions. """ - args = [] # type: List[Union[mypy.nodes.Var, Tuple[str, List[mypy.nodes.Node]]]] - extra = [] # type: List[Tuple[str, List[mypy.nodes.Var]]] + args: List[Union[mypy.nodes.Var, Tuple[str, List[mypy.nodes.Node]]]] = [] + extra: List[Tuple[str, List[mypy.nodes.Var]]] = [] for arg in o.arguments: - kind = arg.kind # type: int + kind: int = arg.kind if kind in (mypy.nodes.ARG_POS, mypy.nodes.ARG_NAMED): args.append(arg.variable) elif kind in (mypy.nodes.ARG_OPT, mypy.nodes.ARG_NAMED_OPT): @@ -71,7 +71,7 @@ def func_helper(self, o: 'mypy.nodes.FuncItem') -> List[object]: extra.append(('VarArg', [arg.variable])) elif kind == mypy.nodes.ARG_STAR2: extra.append(('DictVarArg', [arg.variable])) - a = [] # type: List[Any] + a: List[Any] = [] if args: a.append(('Args', args)) if o.type: @@ -86,7 +86,7 @@ def func_helper(self, o: 'mypy.nodes.FuncItem') -> List[object]: def visit_mypy_file(self, o: 'mypy.nodes.MypyFile') -> str: # Skip implicit definitions. - a = [o.defs] # type: List[Any] + a: List[Any] = [o.defs] if o.is_bom: a.insert(0, 'BOM') # Omit path to special file with name "main". This is used to simplify @@ -141,7 +141,7 @@ def visit_func_def(self, o: 'mypy.nodes.FuncDef') -> str: return self.dump(a, o) def visit_overloaded_func_def(self, o: 'mypy.nodes.OverloadedFuncDef') -> str: - a = o.items[:] # type: Any + a: Any = o.items[:] if o.type: a.insert(0, o.type) if o.impl: @@ -203,7 +203,7 @@ def visit_expression_stmt(self, o: 'mypy.nodes.ExpressionStmt') -> str: return self.dump([o.expr], o) def visit_assignment_stmt(self, o: 'mypy.nodes.AssignmentStmt') -> str: - a = [] # type: List[Any] + a: List[Any] = [] if len(o.lvalues) > 1: a = [('Lvalues', o.lvalues)] else: @@ -217,13 +217,13 @@ def visit_operator_assignment_stmt(self, o: 'mypy.nodes.OperatorAssignmentStmt') return self.dump([o.op, o.lvalue, o.rvalue], o) def visit_while_stmt(self, o: 'mypy.nodes.WhileStmt') -> str: - a = [o.expr, o.body] # type: List[Any] + a: List[Any] = [o.expr, o.body] if o.else_body: a.append(('Else', o.else_body.body)) return self.dump(a, o) def visit_for_stmt(self, o: 'mypy.nodes.ForStmt') -> str: - a = [] # type: List[Any] + a: List[Any] = [] if o.is_async: a.append(('Async', '')) a.append(o.index) @@ -238,7 +238,7 @@ def visit_return_stmt(self, o: 'mypy.nodes.ReturnStmt') -> str: return self.dump([o.expr], o) def visit_if_stmt(self, o: 'mypy.nodes.IfStmt') -> str: - a = [] # type: List[Any] + a: List[Any] = [] for i in range(len(o.expr)): a.append(('If', [o.expr[i]])) a.append(('Then', o.body[i].body)) @@ -273,7 +273,7 @@ def visit_del_stmt(self, o: 'mypy.nodes.DelStmt') -> str: return self.dump([o.expr], o) def visit_try_stmt(self, o: 'mypy.nodes.TryStmt') -> str: - a = [o.body] # type: List[Any] + a: List[Any] = [o.body] for i in range(len(o.vars)): a.append(o.types[i]) @@ -289,7 +289,7 @@ def visit_try_stmt(self, o: 'mypy.nodes.TryStmt') -> str: return self.dump(a, o) def visit_with_stmt(self, o: 'mypy.nodes.WithStmt') -> str: - a = [] # type: List[Any] + a: List[Any] = [] if o.is_async: a.append(('Async', '')) for i in range(len(o.expr)): @@ -301,7 +301,7 @@ def visit_with_stmt(self, o: 'mypy.nodes.WithStmt') -> str: return self.dump(a + [o.body], o) def visit_print_stmt(self, o: 'mypy.nodes.PrintStmt') -> str: - a = o.args[:] # type: List[Any] + a: List[Any] = o.args[:] if o.target: a.append(('Target', [o.target])) if o.newline: @@ -393,8 +393,8 @@ def visit_yield_from_expr(self, o: 'mypy.nodes.YieldFromExpr') -> str: def visit_call_expr(self, o: 'mypy.nodes.CallExpr') -> str: if o.analyzed: return o.analyzed.accept(self) - args = [] # type: List[mypy.nodes.Expression] - extra = [] # type: List[Union[str, Tuple[str, List[Any]]]] + args: List[mypy.nodes.Expression] = [] + extra: List[Union[str, Tuple[str, List[Any]]]] = [] for i, kind in enumerate(o.arg_kinds): if kind in [mypy.nodes.ARG_POS, mypy.nodes.ARG_STAR]: args.append(o.args[i]) @@ -405,8 +405,8 @@ def visit_call_expr(self, o: 'mypy.nodes.CallExpr') -> str: elif kind == mypy.nodes.ARG_STAR2: extra.append(('DictVarArg', [o.args[i]])) else: - raise RuntimeError('unknown kind %d' % kind) - a = [o.callee, ('Args', args)] # type: List[Any] + raise RuntimeError("unknown kind %d" % kind) + a: List[Any] = [o.callee, ("Args", args)] return self.dump(a + extra, o) def visit_op_expr(self, o: 'mypy.nodes.OpExpr') -> str: @@ -456,7 +456,8 @@ def visit_type_application(self, o: 'mypy.nodes.TypeApplication') -> str: def visit_type_var_expr(self, o: 'mypy.nodes.TypeVarExpr') -> str: import mypy.types - a = [] # type: List[Any] + + a: List[Any] = [] if o.variance == mypy.nodes.COVARIANT: a += ['Variance(COVARIANT)'] if o.variance == mypy.nodes.CONTRAVARIANT: @@ -469,7 +470,8 @@ def visit_type_var_expr(self, o: 'mypy.nodes.TypeVarExpr') -> str: def visit_paramspec_expr(self, o: 'mypy.nodes.ParamSpecExpr') -> str: import mypy.types - a = [] # type: List[Any] + + a: List[Any] = [] if o.variance == mypy.nodes.COVARIANT: a += ['Variance(COVARIANT)'] if o.variance == mypy.nodes.CONTRAVARIANT: @@ -522,7 +524,7 @@ def visit_conditional_expr(self, o: 'mypy.nodes.ConditionalExpr') -> str: return self.dump([('Condition', [o.cond]), o.if_expr, o.else_expr], o) def visit_slice_expr(self, o: 'mypy.nodes.SliceExpr') -> str: - a = [o.begin_index, o.end_index, o.stride] # type: List[Any] + a: List[Any] = [o.begin_index, o.end_index, o.stride] if not a[0]: a[0] = '' if not a[1]: @@ -550,7 +552,7 @@ def dump_tagged(nodes: Sequence[object], tag: Optional[str], str_conv: 'StrConv' """ from mypy.types import Type, TypeStrVisitor - a = [] # type: List[str] + a: List[str] = [] if tag: a.append(tag + '(') for n in nodes: diff --git a/mypy/stubdoc.py b/mypy/stubdoc.py index 801e661440d2..16336b2da330 100644 --- a/mypy/stubdoc.py +++ b/mypy/stubdoc.py @@ -17,8 +17,8 @@ Sig = Tuple[str, str] -_TYPE_RE = re.compile(r'^[a-zA-Z_][\w\[\], ]*(\.[a-zA-Z_][\w\[\], ]*)*$') # type: Final -_ARG_NAME_RE = re.compile(r'\**[A-Za-z_][A-Za-z0-9_]*$') # type: Final +_TYPE_RE: Final = re.compile(r"^[a-zA-Z_][\w\[\], ]*(\.[a-zA-Z_][\w\[\], ]*)*$") +_ARG_NAME_RE: Final = re.compile(r"\**[A-Za-z_][A-Za-z0-9_]*$") def is_valid_type(s: str) -> bool: @@ -60,13 +60,13 @@ def __eq__(self, other: Any) -> bool: # States of the docstring parser. -STATE_INIT = 1 # type: Final -STATE_FUNCTION_NAME = 2 # type: Final -STATE_ARGUMENT_LIST = 3 # type: Final -STATE_ARGUMENT_TYPE = 4 # type: Final -STATE_ARGUMENT_DEFAULT = 5 # type: Final -STATE_RETURN_VALUE = 6 # type: Final -STATE_OPEN_BRACKET = 7 # type: Final # For generic types. +STATE_INIT: Final = 1 +STATE_FUNCTION_NAME: Final = 2 +STATE_ARGUMENT_LIST: Final = 3 +STATE_ARGUMENT_TYPE: Final = 4 +STATE_ARGUMENT_DEFAULT: Final = 5 +STATE_RETURN_VALUE: Final = 6 +STATE_OPEN_BRACKET: Final = 7 # For generic types. class DocStringParser: @@ -77,14 +77,14 @@ def __init__(self, function_name: str) -> None: self.function_name = function_name self.state = [STATE_INIT] self.accumulator = "" - self.arg_type = None # type: Optional[str] + self.arg_type: Optional[str] = None self.arg_name = "" - self.arg_default = None # type: Optional[str] + self.arg_default: Optional[str] = None self.ret_type = "Any" self.found = False - self.args = [] # type: List[ArgSig] + self.args: List[ArgSig] = [] # Valid signatures found so far. - self.signatures = [] # type: List[FunctionSig] + self.signatures: List[FunctionSig] = [] def add_token(self, token: tokenize.TokenInfo) -> None: """Process next token from the token stream.""" @@ -303,7 +303,7 @@ def parse_signature(sig: str) -> Optional[Tuple[str, def build_signature(positional: Sequence[str], optional: Sequence[str]) -> str: """Build function signature from lists of positional and optional argument names.""" - args = [] # type: MutableSequence[str] + args: MutableSequence[str] = [] args.extend(positional) for arg in optional: if arg.startswith('*'): @@ -342,7 +342,7 @@ def parse_all_signatures(lines: Sequence[str]) -> Tuple[List[Sig], def find_unique_signatures(sigs: Sequence[Sig]) -> List[Sig]: """Remove names with duplicate found signatures.""" - sig_map = {} # type: MutableMapping[str, List[str]] + sig_map: MutableMapping[str, List[str]] = {} for name, sig in sigs: sig_map.setdefault(name, []).append(sig) diff --git a/mypy/stubgen.py b/mypy/stubgen.py index 7028b8da04b6..91f461b84c15 100755 --- a/mypy/stubgen.py +++ b/mypy/stubgen.py @@ -96,32 +96,32 @@ # Common ways of naming package containing vendored modules. -VENDOR_PACKAGES = [ +VENDOR_PACKAGES: Final = [ 'packages', 'vendor', 'vendored', '_vendor', '_vendored_packages', -] # type: Final +] # Avoid some file names that are unnecessary or likely to cause trouble (\n for end of path). -BLACKLIST = [ +BLACKLIST: Final = [ '/six.py\n', # Likely vendored six; too dynamic for us to handle '/vendored/', # Vendored packages '/vendor/', # Vendored packages '/_vendor/', '/_vendored_packages/', -] # type: Final +] # Special-cased names that are implicitly exported from the stub (from m import y as y). -EXTRA_EXPORTED = { +EXTRA_EXPORTED: Final = { 'pyasn1_modules.rfc2437.univ', 'pyasn1_modules.rfc2459.char', 'pyasn1_modules.rfc2459.univ', -} # type: Final +} # These names should be omitted from generated stubs. -IGNORED_DUNDERS = { +IGNORED_DUNDERS: Final = { '__all__', '__author__', '__version__', @@ -137,10 +137,10 @@ '__getstate__', '__setstate__', '__slots__', -} # type: Final +} # These methods are expected to always return a non-trivial value. -METHODS_WITH_RETURN_VALUE = { +METHODS_WITH_RETURN_VALUE: Final = { '__ne__', '__eq__', '__lt__', @@ -149,7 +149,7 @@ '__ge__', '__hash__', '__iter__', -} # type: Final +} class Options: @@ -202,7 +202,7 @@ def __init__(self, module: str, path: Optional[str] = None, runtime_all: Optional[List[str]] = None) -> None: self.source = BuildSource(path, module, None) self.runtime_all = runtime_all - self.ast = None # type: Optional[MypyFile] + self.ast: Optional[MypyFile] = None @property def module(self) -> str: @@ -215,17 +215,17 @@ def path(self) -> Optional[str]: # What was generated previously in the stub file. We keep track of these to generate # nicely formatted output (add empty line between non-empty classes, for example). -EMPTY = 'EMPTY' # type: Final -FUNC = 'FUNC' # type: Final -CLASS = 'CLASS' # type: Final -EMPTY_CLASS = 'EMPTY_CLASS' # type: Final -VAR = 'VAR' # type: Final -NOT_IN_ALL = 'NOT_IN_ALL' # type: Final +EMPTY: Final = "EMPTY" +FUNC: Final = "FUNC" +CLASS: Final = "CLASS" +EMPTY_CLASS: Final = "EMPTY_CLASS" +VAR: Final = "VAR" +NOT_IN_ALL: Final = "NOT_IN_ALL" # Indicates that we failed to generate a reasonable output # for a given node. These should be manually replaced by a user. -ERROR_MARKER = '' # type: Final +ERROR_MARKER: Final = "" class AnnotationPrinter(TypeStrVisitor): @@ -298,7 +298,7 @@ def visit_name_expr(self, node: NameExpr) -> str: return node.name def visit_member_expr(self, o: MemberExpr) -> str: - node = o # type: Expression + node: Expression = o trailer = '' while isinstance(node, MemberExpr): trailer = '.' + node.name + trailer @@ -337,27 +337,27 @@ def __init__(self) -> None: # 'import m' ==> module_for['m'] == None # 'import pkg.m' ==> module_for['pkg.m'] == None # ==> module_for['pkg'] == None - self.module_for = {} # type: Dict[str, Optional[str]] + self.module_for: Dict[str, Optional[str]] = {} # direct_imports['foo'] is the module path used when the name 'foo' was added to the # namespace. # import foo.bar.baz ==> direct_imports['foo'] == 'foo.bar.baz' # ==> direct_imports['foo.bar'] == 'foo.bar.baz' # ==> direct_imports['foo.bar.baz'] == 'foo.bar.baz' - self.direct_imports = {} # type: Dict[str, str] + self.direct_imports: Dict[str, str] = {} # reverse_alias['foo'] is the name that 'foo' had originally when imported with an # alias; examples # 'import numpy as np' ==> reverse_alias['np'] == 'numpy' # 'import foo.bar as bar' ==> reverse_alias['bar'] == 'foo.bar' # 'from decimal import Decimal as D' ==> reverse_alias['D'] == 'Decimal' - self.reverse_alias = {} # type: Dict[str, str] + self.reverse_alias: Dict[str, str] = {} # required_names is the set of names that are actually used in a type annotation - self.required_names = set() # type: Set[str] + self.required_names: Set[str] = set() # Names that should be reexported if they come from another module - self.reexports = set() # type: Set[str] + self.reexports: Set[str] = set() def add_import_from(self, module: str, names: List[Tuple[str, Optional[str]]]) -> None: for name, alias in names: @@ -405,7 +405,7 @@ def import_lines(self) -> List[str]: # To summarize multiple names imported from a same module, we collect those # in the `module_map` dictionary, mapping a module path to the list of names that should # be imported from it. the names can also be alias in the form 'original as alias' - module_map = defaultdict(list) # type: Mapping[str, List[str]] + module_map: Mapping[str, List[str]] = defaultdict(list) for name in sorted(self.required_names): # If we haven't seen this name in an import statement, ignore it @@ -452,7 +452,7 @@ class DefinitionFinder(mypy.traverser.TraverserVisitor): def __init__(self) -> None: # Short names of things defined at the top level. - self.names = set() # type: Set[str] + self.names: Set[str] = set() def visit_class_def(self, o: ClassDef) -> None: # Don't recurse into classes, as we only keep track of top-level definitions. @@ -476,7 +476,7 @@ class ReferenceFinder(mypy.mixedtraverser.MixedTraverserVisitor): def __init__(self) -> None: # Short names of things defined at the top level. - self.refs = set() # type: Set[str] + self.refs: Set[str] = set() def visit_block(self, block: Block) -> None: if not block.is_unreachable: @@ -518,16 +518,16 @@ def __init__(self, export_less: bool = False) -> None: # Best known value of __all__. self._all_ = _all_ - self._output = [] # type: List[str] - self._decorators = [] # type: List[str] - self._import_lines = [] # type: List[str] + self._output: List[str] = [] + self._decorators: List[str] = [] + self._import_lines: List[str] = [] # Current indent level (indent is hardcoded to 4 spaces). self._indent = '' # Stack of defined variables (per scope). - self._vars = [[]] # type: List[List[str]] + self._vars: List[List[str]] = [[]] # What was generated previously in the stub file. self._state = EMPTY - self._toplevel_names = [] # type: List[str] + self._toplevel_names: List[str] = [] self._pyversion = pyversion self._include_private = include_private self.import_tracker = ImportTracker() @@ -541,9 +541,9 @@ def __init__(self, for name in _all_ or (): if name not in IGNORED_DUNDERS: self.import_tracker.reexport(name) - self.defined_names = set() # type: Set[str] + self.defined_names: Set[str] = set() # Short names of methods defined in the body of the current class - self.method_names = set() # type: Set[str] + self.method_names: Set[str] = set() def visit_mypy_file(self, o: MypyFile) -> None: self.module = o.fullname # Current module being processed @@ -613,7 +613,7 @@ def visit_func_def(self, o: FuncDef, is_abstract: bool = False, self.clear_decorators() self.add("%s%sdef %s(" % (self._indent, 'async ' if o.is_coroutine else '', o.name)) self.record_name(o.name) - args = [] # type: List[str] + args: List[str] = [] for i, arg_ in enumerate(o.arguments): var = arg_.variable kind = arg_.kind @@ -794,7 +794,7 @@ def process_member_expr_decorator(self, expr: MemberExpr, context: Decorator) -> def visit_class_def(self, o: ClassDef) -> None: self.method_names = find_method_names(o.defs.body) - sep = None # type: Optional[int] + sep: Optional[int] = None if not self._indent and self._state != EMPTY: sep = len(self._output) self.add('\n') @@ -832,7 +832,7 @@ def visit_class_def(self, o: ClassDef) -> None: def get_base_types(self, cdef: ClassDef) -> List[str]: """Get list of base classes for a class.""" - base_types = [] # type: List[str] + base_types: List[str] = [] for base in cdef.base_type_exprs: if isinstance(base, NameExpr): if base.name != 'object': @@ -868,7 +868,7 @@ def visit_assignment_stmt(self, o: AssignmentStmt) -> None: if isinstance(lvalue, TupleExpr) or isinstance(lvalue, ListExpr): items = lvalue.items if isinstance(o.unanalyzed_type, TupleType): # type: ignore - annotations = o.unanalyzed_type.items # type: Iterable[Optional[Type]] + annotations: Iterable[Optional[Type]] = o.unanalyzed_type.items else: annotations = [None] * len(items) else: @@ -986,7 +986,7 @@ def visit_import_all(self, o: ImportAll) -> None: self.add_import_line('from %s%s import *\n' % ('.' * o.relative, o.id)) def visit_import_from(self, o: ImportFrom) -> None: - exported_names = set() # type: Set[str] + exported_names: Set[str] = set() import_names = [] module, relative = translate_module_name(o.id, o.relative) if self.module: @@ -1211,7 +1211,7 @@ def find_method_names(defs: List[Statement]) -> Set[str]: class SelfTraverser(mypy.traverser.TraverserVisitor): def __init__(self) -> None: - self.results = [] # type: List[Tuple[str, Expression]] + self.results: List[Tuple[str, Expression]] = [] def visit_assignment_stmt(self, o: AssignmentStmt) -> None: lvalue = o.lvalues[0] @@ -1268,7 +1268,7 @@ def collect_build_targets(options: Options, mypy_opts: MypyOptions) -> Tuple[Lis options.packages, options.search_path, options.pyversion) - c_modules = [] # type: List[StubSource] + c_modules: List[StubSource] = [] else: # Using imports is the default, since we can also find C modules. py_modules, c_modules = find_module_paths_using_imports(options.modules, @@ -1303,8 +1303,8 @@ def find_module_paths_using_imports(modules: List[str], This function uses runtime Python imports to get the information. """ with ModuleInspect() as inspect: - py_modules = [] # type: List[StubSource] - c_modules = [] # type: List[StubSource] + py_modules: List[StubSource] = [] + c_modules: List[StubSource] = [] found = list(walk_packages(inspect, packages, verbose)) modules = modules + found modules = [mod @@ -1378,7 +1378,7 @@ def find_module_paths_using_search(modules: List[str], packages: List[str], This is used if user passes --no-import, and will not find C modules. Exit if some of the modules or packages can't be found. """ - result = [] # type: List[StubSource] + result: List[StubSource] = [] typeshed_path = default_lib_path(mypy.build.default_data_dir(), pyversion, None) search_paths = SearchPaths(('.',) + tuple(search_path), (), (), tuple(typeshed_path)) cache = FindModuleCache(search_paths, fscache=None, options=None) @@ -1495,8 +1495,8 @@ def collect_docs_signatures(doc_dir: str) -> Tuple[Dict[str, str], Dict[str, str Return a tuple (function signatures, class signatures). Currently only used for C modules. """ - all_sigs = [] # type: List[Sig] - all_class_sigs = [] # type: List[Sig] + all_sigs: List[Sig] = [] + all_class_sigs: List[Sig] = [] for path in glob.glob('%s/*.rst' % doc_dir): with open(path) as f: loc_sigs, loc_class_sigs = parse_all_signatures(f.readlines()) diff --git a/mypy/stubgenc.py b/mypy/stubgenc.py index 279deb19c96f..bc90acfd7e0d 100755 --- a/mypy/stubgenc.py +++ b/mypy/stubgenc.py @@ -50,15 +50,15 @@ def generate_stub_for_c_module(module_name: str, subdir = os.path.dirname(target) if subdir and not os.path.isdir(subdir): os.makedirs(subdir) - imports = [] # type: List[str] - functions = [] # type: List[str] + imports: List[str] = [] + functions: List[str] = [] done = set() items = sorted(module.__dict__.items(), key=lambda x: x[0]) for name, obj in items: if is_c_function(obj): generate_c_function_stub(module, name, obj, functions, imports=imports, sigs=sigs) done.add(name) - types = [] # type: List[str] + types: List[str] = [] for name, obj in items: if name.startswith('__') and name.endswith('__'): continue @@ -159,11 +159,19 @@ def generate_c_function_stub(module: ModuleType, ret_type = 'None' if name == '__init__' and class_name else 'Any' - if (name in ('__new__', '__init__') and name not in sigs and class_name and - class_name in class_sigs): - inferred = [FunctionSig(name=name, - args=infer_arg_sig_from_anon_docstring(class_sigs[class_name]), - ret_type=ret_type)] # type: Optional[List[FunctionSig]] + if ( + name in ("__new__", "__init__") + and name not in sigs + and class_name + and class_name in class_sigs + ): + inferred: Optional[List[FunctionSig]] = [ + FunctionSig( + name=name, + args=infer_arg_sig_from_anon_docstring(class_sigs[class_name]), + ret_type=ret_type, + ) + ] else: docstr = getattr(obj, '__doc__', None) inferred = infer_sig_from_docstring(docstr, name) @@ -310,14 +318,14 @@ def generate_c_type_stub(module: ModuleType, """ # typeshed gives obj.__dict__ the not quite correct type Dict[str, Any] # (it could be a mappingproxy!), which makes mypyc mad, so obfuscate it. - obj_dict = getattr(obj, '__dict__') # type: Mapping[str, Any] # noqa + obj_dict: Mapping[str, Any] = getattr(obj, "__dict__") # noqa items = sorted(obj_dict.items(), key=lambda x: method_name_sort_key(x[0])) - methods = [] # type: List[str] - types = [] # type: List[str] - static_properties = [] # type: List[str] - rw_properties = [] # type: List[str] - ro_properties = [] # type: List[str] - done = set() # type: Set[str] + methods: List[str] = [] + types: List[str] = [] + static_properties: List[str] = [] + rw_properties: List[str] = [] + ro_properties: List[str] = [] + done: Set[str] = set() for attr, value in items: if is_c_method(value) or is_c_classmethod(value): done.add(attr) @@ -365,7 +373,7 @@ def generate_c_type_stub(module: ModuleType, # remove the class itself all_bases = all_bases[1:] # Remove base classes of other bases as redundant. - bases = [] # type: List[type] + bases: List[type] = [] for base in all_bases: if not any(issubclass(b, base) for b in bases): bases.append(base) @@ -431,7 +439,7 @@ def is_skipped_attribute(attr: str) -> bool: def infer_method_sig(name: str) -> List[ArgSig]: - args = None # type: Optional[List[ArgSig]] + args: Optional[List[ArgSig]] = None if name.startswith('__') and name.endswith('__'): name = name[2:-2] if name in ('hash', 'iter', 'next', 'sizeof', 'copy', 'deepcopy', 'reduce', 'getinitargs', diff --git a/mypy/stubtest.py b/mypy/stubtest.py index 0a5c826ea5ed..9c77785138ed 100644 --- a/mypy/stubtest.py +++ b/mypy/stubtest.py @@ -97,7 +97,7 @@ def get_description(self, concise: bool = False) -> str: return _style(self.object_desc, bold=True) + " " + self.message stub_line = None - stub_file = None # type: None + stub_file: None = None if not isinstance(self.stub_object, Missing): stub_line = self.stub_object.line # TODO: Find a way of getting the stub file @@ -382,10 +382,10 @@ def maybe_strip_cls(name: str, args: List[nodes.Argument]) -> List[nodes.Argumen class Signature(Generic[T]): def __init__(self) -> None: - self.pos = [] # type: List[T] - self.kwonly = {} # type: Dict[str, T] - self.varpos = None # type: Optional[T] - self.varkw = None # type: Optional[T] + self.pos: List[T] = [] + self.kwonly: Dict[str, T] = {} + self.varpos: Optional[T] = None + self.varkw: Optional[T] = None def __str__(self) -> str: def get_name(arg: Any) -> str: @@ -430,7 +430,7 @@ def get_desc(arg: Any) -> str: @staticmethod def from_funcitem(stub: nodes.FuncItem) -> "Signature[nodes.Argument]": - stub_sig = Signature() # type: Signature[nodes.Argument] + stub_sig: Signature[nodes.Argument] = Signature() stub_args = maybe_strip_cls(stub.name, stub.arguments) for stub_arg in stub_args: if stub_arg.kind in (nodes.ARG_POS, nodes.ARG_OPT): @@ -447,7 +447,7 @@ def from_funcitem(stub: nodes.FuncItem) -> "Signature[nodes.Argument]": @staticmethod def from_inspect_signature(signature: inspect.Signature) -> "Signature[inspect.Parameter]": - runtime_sig = Signature() # type: Signature[inspect.Parameter] + runtime_sig: Signature[inspect.Parameter] = Signature() for runtime_arg in signature.parameters.values(): if runtime_arg.kind in ( inspect.Parameter.POSITIONAL_ONLY, @@ -477,7 +477,7 @@ def from_overloadedfuncdef(stub: nodes.OverloadedFuncDef) -> "Signature[nodes.Ar # For most dunder methods, just assume all args are positional-only assume_positional_only = is_dunder(stub.name, exclude_special=True) - all_args = {} # type: Dict[str, List[Tuple[nodes.Argument, int]]] + all_args: Dict[str, List[Tuple[nodes.Argument, int]]] = {} for func in map(_resolve_funcitem_from_decorator, stub.items): assert func is not None args = maybe_strip_cls(stub.name, func.arguments) @@ -521,7 +521,7 @@ def get_kind(arg_name: str) -> int: return nodes.ARG_OPT if is_pos else nodes.ARG_NAMED_OPT return nodes.ARG_POS if is_pos else nodes.ARG_NAMED - sig = Signature() # type: Signature[nodes.Argument] + sig: Signature[nodes.Argument] = Signature() for arg_name in sorted(all_args, key=get_position): # example_arg_name gives us a real name (in case we had a fake index-based name) example_arg_name = all_args[arg_name][0][0].variable.name @@ -833,7 +833,7 @@ def apply_decorator_to_funcitem( # anything else when running on typeshed's stdlib. return None - func = dec.func # type: nodes.FuncItem + func: nodes.FuncItem = dec.func for decorator in dec.original_decorators: resulting_func = apply_decorator_to_funcitem(decorator, func) if resulting_func is None: @@ -993,7 +993,7 @@ def anytype() -> mypy.types.AnyType: return fallback -_all_stubs = {} # type: Dict[str, nodes.MypyFile] +_all_stubs: Dict[str, nodes.MypyFile] = {} def build_stubs(modules: List[str], options: Options, find_submodules: bool = False) -> List[str]: diff --git a/mypy/subtypes.py b/mypy/subtypes.py index ffcaf8f2bc92..7b54e0f83c79 100644 --- a/mypy/subtypes.py +++ b/mypy/subtypes.py @@ -26,9 +26,9 @@ from mypy import state # Flags for detected protocol members -IS_SETTABLE = 1 # type: Final -IS_CLASSVAR = 2 # type: Final -IS_CLASS_OR_STATIC = 3 # type: Final +IS_SETTABLE: Final = 1 +IS_CLASSVAR: Final = 2 +IS_CLASS_OR_STATIC: Final = 3 TypeParameterChecker = Callable[[Type, Type, int], bool] @@ -700,8 +700,9 @@ def find_node_type(node: Union[Var, FuncBase], itype: Instance, subtype: Type) - from mypy.typeops import bind_self if isinstance(node, FuncBase): - typ = mypy.typeops.function_type( - node, fallback=Instance(itype.type.mro[-1], [])) # type: Optional[Type] + typ: Optional[Type] = mypy.typeops.function_type( + node, fallback=Instance(itype.type.mro[-1], []) + ) else: typ = node.type typ = get_proper_type(typ) @@ -728,7 +729,7 @@ def non_method_protocol_members(tp: TypeInfo) -> List[str]: """Find all non-callable members of a protocol.""" assert tp.is_protocol - result = [] # type: List[str] + result: List[str] = [] anytype = AnyType(TypeOfAny.special_form) instance = Instance(tp, [anytype] * len(tp.defn.type_vars)) @@ -1081,7 +1082,7 @@ def unify_generic_callable(type: CallableType, target: CallableType, if return_constraint_direction is None: return_constraint_direction = mypy.constraints.SUBTYPE_OF - constraints = [] # type: List[mypy.constraints.Constraint] + constraints: List[mypy.constraints.Constraint] = [] for arg_type, target_arg_type in zip(type.arg_types, target.arg_types): c = mypy.constraints.infer_constraints( arg_type, target_arg_type, mypy.constraints.SUPERTYPE_OF) diff --git a/mypy/suggestions.py b/mypy/suggestions.py index 8df180d825b4..3aa26e83f318 100644 --- a/mypy/suggestions.py +++ b/mypy/suggestions.py @@ -86,7 +86,7 @@ def __init__(self, target: str) -> None: self.target = target # List of call sites found by dmypy suggest: # (path, line, , , ) - self.mystery_hits = [] # type: List[Callsite] + self.mystery_hits: List[Callsite] = [] def get_function_hook(self, fullname: str ) -> Optional[Callable[[FunctionContext], Type]]: @@ -119,7 +119,7 @@ class ReturnFinder(TraverserVisitor): """Visitor for finding all types returned from a function.""" def __init__(self, typemap: Dict[Expression, Type]) -> None: self.typemap = typemap - self.return_types = [] # type: List[Type] + self.return_types: List[Type] = [] def visit_return_stmt(self, o: ReturnStmt) -> None: if o.expr is not None and o.expr in self.typemap: @@ -144,9 +144,7 @@ class ArgUseFinder(TraverserVisitor): """ def __init__(self, func: FuncDef, typemap: Dict[Expression, Type]) -> None: self.typemap = typemap - self.arg_types = { - arg.variable: [] for arg in func.arguments - } # type: Dict[SymbolNode, List[Type]] + self.arg_types: Dict[SymbolNode, List[Type]] = {arg.variable: [] for arg in func.arguments} def visit_call_expr(self, o: CallExpr) -> None: if not any(isinstance(e, RefExpr) and e.node in self.arg_types for e in o.args): @@ -303,7 +301,7 @@ def get_args(self, is_method: bool, callsites: List[Callsite], uses: List[List[Type]]) -> List[List[Type]]: """Produce a list of type suggestions for each argument type.""" - types = [] # type: List[List[Type]] + types: List[List[Type]] = [] for i in range(len(base.arg_kinds)): # Make self args Any but this will get overridden somewhere in the checker if i == 0 and is_method: @@ -473,7 +471,7 @@ def format_args(self, arg_kinds: List[List[int]], arg_names: List[List[Optional[str]]], arg_types: List[List[Type]]) -> str: - args = [] # type: List[str] + args: List[str] = [] for i in range(len(arg_types)): for kind, name, typ in zip(arg_kinds[i], arg_names[i], arg_types[i]): arg = self.format_type(None, typ) @@ -496,7 +494,7 @@ def find_node(self, key: str) -> Tuple[str, str, FuncDef]: e.g., path/to/file.py:42 """ # TODO: Also return OverloadedFuncDef -- currently these are ignored. - node = None # type: Optional[SymbolNode] + node: Optional[SymbolNode] = None if ':' in key: if key.count(':') > 1: raise SuggestionFailure( @@ -535,7 +533,7 @@ def find_node_by_module_and_name(self, modname: str, tail: str) -> Optional[Symb # N.B. This is reimplemented from update's lookup_target # basically just to produce better error messages. - names = tree.names # type: SymbolTable + names: SymbolTable = tree.names # Look through any classes components = tail.split('.') @@ -543,7 +541,7 @@ def find_node_by_module_and_name(self, modname: str, tail: str) -> Optional[Symb if component not in names: raise SuggestionFailure("Unknown class %s.%s" % (modname, '.'.join(components[:i + 1]))) - node = names[component].node # type: Optional[SymbolNode] + node: Optional[SymbolNode] = names[component].node if not isinstance(node, TypeInfo): raise SuggestionFailure("Object %s.%s is not a class" % (modname, '.'.join(components[:i + 1]))) @@ -574,8 +572,8 @@ def find_node_by_file_and_line(self, file: str, line: int) -> Tuple[str, SymbolN raise SuggestionFailure('Unknown module: ' + modname) # We must be sure about any edits in this file as this might affect the line numbers. tree = self.ensure_loaded(self.fgmanager.graph[modname], force=True) - node = None # type: Optional[SymbolNode] - closest_line = None # type: Optional[int] + node: Optional[SymbolNode] = None + closest_line: Optional[int] = None # TODO: Handle nested functions. for _, sym, _ in tree.local_definitions(): if isinstance(sym.node, (FuncDef, Decorator)): @@ -1020,7 +1018,7 @@ def refine_callable(t: CallableType, s: CallableType) -> CallableType: def dedup(old: List[T]) -> List[T]: - new = [] # type: List[T] + new: List[T] = [] for x in old: if x not in new: new.append(x) diff --git a/mypy/test/data.py b/mypy/test/data.py index 9ff1eb1373e0..26136ef6a375 100644 --- a/mypy/test/data.py +++ b/mypy/test/data.py @@ -43,15 +43,15 @@ def parse_test_case(case: 'DataDrivenTestCase') -> None: out_section_missing = case.suite.required_out_section normalize_output = True - files = [] # type: List[Tuple[str, str]] # path and contents - output_files = [] # type: List[Tuple[str, str]] # path and contents for output files - output = [] # type: List[str] # Regular output errors - output2 = {} # type: Dict[int, List[str]] # Output errors for incremental, runs 2+ - deleted_paths = {} # type: Dict[int, Set[str]] # from run number of paths - stale_modules = {} # type: Dict[int, Set[str]] # from run number to module names - rechecked_modules = {} # type: Dict[ int, Set[str]] # from run number module names - triggered = [] # type: List[str] # Active triggers (one line per incremental step) - targets = {} # type: Dict[int, List[str]] # Fine-grained targets (per fine-grained update) + files: List[Tuple[str, str]] = [] # path and contents + output_files: List[Tuple[str, str]] = [] # path and contents for output files + output: List[str] = [] # Regular output errors + output2: Dict[int, List[str]] = {} # Output errors for incremental, runs 2+ + deleted_paths: Dict[int, Set[str]] = {} # from run number of paths + stale_modules: Dict[int, Set[str]] = {} # from run number to module names + rechecked_modules: Dict[int, Set[str]] = {} # from run number module names + triggered: List[str] = [] # Active triggers (one line per incremental step) + targets: Dict[int, List[str]] = {} # Fine-grained targets (per fine-grained update) # Process the parsed items. Each item has a header of form [id args], # optionally followed by lines of text. @@ -188,31 +188,31 @@ class DataDrivenTestCase(pytest.Item): """Holds parsed data-driven test cases, and handles directory setup and teardown.""" # Override parent member type - parent = None # type: DataSuiteCollector + parent: "DataSuiteCollector" - input = None # type: List[str] - output = None # type: List[str] # Output for the first pass - output2 = None # type: Dict[int, List[str]] # Output for runs 2+, indexed by run number + input: List[str] + output: List[str] # Output for the first pass + output2: Dict[int, List[str]] # Output for runs 2+, indexed by run number # full path of test suite file = '' line = 0 # (file path, file content) tuples - files = None # type: List[Tuple[str, str]] - expected_stale_modules = None # type: Dict[int, Set[str]] - expected_rechecked_modules = None # type: Dict[int, Set[str]] - expected_fine_grained_targets = None # type: Dict[int, List[str]] + files: List[Tuple[str, str]] + expected_stale_modules: Dict[int, Set[str]] + expected_rechecked_modules: Dict[int, Set[str]] + expected_fine_grained_targets: Dict[int, List[str]] # Whether or not we should normalize the output to standardize things like # forward vs backward slashes in file paths for Windows vs Linux. normalize_output = True # Extra attributes used by some tests. - last_line = None # type: int - output_files = None # type: List[Tuple[str, str]] # Path and contents for output files - deleted_paths = None # type: Dict[int, Set[str]] # Mapping run number -> paths - triggered = None # type: List[str] # Active triggers (one line per incremental step) + last_line: int + output_files: List[Tuple[str, str]] # Path and contents for output files + deleted_paths: Dict[int, Set[str]] # Mapping run number -> paths + triggered: List[str] # Active triggers (one line per incremental step) def __init__(self, parent: 'DataSuiteCollector', @@ -238,8 +238,8 @@ def __init__(self, self.xfail = xfail self.data = data self.line = line - self.old_cwd = None # type: Optional[str] - self.tmpdir = None # type: Optional[tempfile.TemporaryDirectory[str]] + self.old_cwd: Optional[str] = None + self.tmpdir: Optional[tempfile.TemporaryDirectory[str]] = None def runtest(self) -> None: if self.skip: @@ -253,7 +253,7 @@ def runtest(self) -> None: suite.run_case(self) except Exception: # As a debugging aid, support copying the contents of the tmp directory somewhere - save_dir = self.config.getoption('--save-failures-to', None) # type: Optional[str] + save_dir: Optional[str] = self.config.getoption("--save-failures-to", None) if save_dir: assert self.tmpdir is not None target_dir = os.path.join(save_dir, os.path.basename(self.tmpdir.name)) @@ -312,7 +312,7 @@ def find_steps(self) -> List[List[FileOperation]]: Defaults to having two steps if there aern't any operations. """ - steps = {} # type: Dict[int, List[FileOperation]] + steps: Dict[int, List[FileOperation]] = {} for path, _ in self.files: m = re.match(r'.*\.([0-9]+)$', path) if m: @@ -350,11 +350,11 @@ class TestItem: .. data .. """ - id = '' - arg = '' # type: Optional[str] + id = "" + arg: Optional[str] = "" # Text data, array of 8-bit strings - data = None # type: List[str] + data: List[str] file = '' line = 0 # Line number in file @@ -371,11 +371,11 @@ def parse_test_data(raw_data: str, name: str) -> List[TestItem]: """Parse a list of lines that represent a sequence of test items.""" lines = ['', '[case ' + name + ']'] + raw_data.split('\n') - ret = [] # type: List[TestItem] - data = [] # type: List[str] + ret: List[TestItem] = [] + data: List[str] = [] - id = None # type: Optional[str] - arg = None # type: Optional[str] + id: Optional[str] = None + arg: Optional[str] = None i = 0 i0 = 0 @@ -419,7 +419,7 @@ def strip_list(l: List[str]) -> List[str]: lines from the end of the array. """ - r = [] # type: List[str] + r: List[str] = [] for s in l: # Strip spaces at end of line r.append(re.sub(r'\s+$', '', s)) @@ -431,7 +431,7 @@ def strip_list(l: List[str]) -> List[str]: def collapse_line_continuation(l: List[str]) -> List[str]: - r = [] # type: List[str] + r: List[str] = [] cont = False for s in l: ss = re.sub(r'\\$', '', s) @@ -593,7 +593,7 @@ def collect(self) -> Iterator[pytest.Item]: """Called by pytest on each of the object returned from pytest_pycollect_makeitem""" # obj is the object for which pytest_pycollect_makeitem returned self. - suite = self.obj # type: DataSuite + suite: DataSuite = self.obj for f in suite.files: yield from split_test_cases(self, suite, os.path.join(suite.data_prefix, f)) @@ -626,7 +626,7 @@ def has_stable_flags(testcase: DataDrivenTestCase) -> bool: class DataSuite: # option fields - class variables - files = None # type: List[str] + files: List[str] base_path = test_temp_dir diff --git a/mypy/test/helpers.py b/mypy/test/helpers.py index 6f4252da9fe4..ef9834ed4fa4 100644 --- a/mypy/test/helpers.py +++ b/mypy/test/helpers.py @@ -152,7 +152,7 @@ def update_testcase_output(testcase: DataDrivenTestCase, output: List[str]) -> N data_lines = f.read().splitlines() test = '\n'.join(data_lines[testcase.line:testcase.last_line]) - mapping = {} # type: Dict[str, List[str]] + mapping: Dict[str, List[str]] = {} for old, new in zip(testcase.output, output): PREFIX = 'error:' ind = old.find(PREFIX) diff --git a/mypy/test/testdaemon.py b/mypy/test/testdaemon.py index 641bd8a70372..804a562e71f1 100644 --- a/mypy/test/testdaemon.py +++ b/mypy/test/testdaemon.py @@ -64,7 +64,7 @@ def parse_script(input: List[str]) -> List[List[str]]: The remaining lines are expected output. """ steps = [] - step = [] # type: List[str] + step: List[str] = [] for line in input: if line.startswith('$'): if step: diff --git a/mypy/test/testdeps.py b/mypy/test/testdeps.py index ea58d49533fc..0b6f4958db75 100644 --- a/mypy/test/testdeps.py +++ b/mypy/test/testdeps.py @@ -49,7 +49,7 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: if not a: a = ['Unknown compile error (likely syntax error in test case or fixture)'] else: - deps = defaultdict(set) # type: DefaultDict[str, Set[str]] + deps: DefaultDict[str, Set[str]] = defaultdict(set) for module in files: if module in dumped_modules or dump_all and module not in ('abc', 'typing', diff --git a/mypy/test/testerrorstream.py b/mypy/test/testerrorstream.py index a9fbb95a7643..278fc1152504 100644 --- a/mypy/test/testerrorstream.py +++ b/mypy/test/testerrorstream.py @@ -26,7 +26,7 @@ def test_error_stream(testcase: DataDrivenTestCase) -> None: options = Options() options.show_traceback = True - logged_messages = [] # type: List[str] + logged_messages: List[str] = [] def flush_errors(msgs: List[str], serious: bool) -> None: if msgs: diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py index f95748db0a24..11c77afe218b 100644 --- a/mypy/test/testfinegrained.py +++ b/mypy/test/testfinegrained.py @@ -230,9 +230,9 @@ def perform_step(self, else: new_messages = self.run_check(server, sources) - updated = [] # type: List[str] - changed = [] # type: List[str] - targets = [] # type: List[str] + updated: List[str] = [] + changed: List[str] = [] + targets: List[str] = [] triggered = [] if server.fine_grained_manager: if CHECK_CONSISTENCY: @@ -309,7 +309,7 @@ def parse_sources(self, program_text: str, allow_empty_dir=True) def maybe_suggest(self, step: int, server: Server, src: str, tmp_dir: str) -> List[str]: - output = [] # type: List[str] + output: List[str] = [] targets = self.get_suggest(src, step) for flags, target in targets: json = '--json' in flags diff --git a/mypy/test/testgraph.py b/mypy/test/testgraph.py index 3a6a8f70899a..d713828ca44c 100644 --- a/mypy/test/testgraph.py +++ b/mypy/test/testgraph.py @@ -22,16 +22,13 @@ def test_topsort(self) -> None: b = frozenset({'B'}) c = frozenset({'C'}) d = frozenset({'D'}) - data = {a: {b, c}, b: {d}, c: {d}} # type: Dict[AbstractSet[str], Set[AbstractSet[str]]] + data: Dict[AbstractSet[str], Set[AbstractSet[str]]] = {a: {b, c}, b: {d}, c: {d}} res = list(topsort(data)) assert_equal(res, [{d}, {b, c}, {a}]) def test_scc(self) -> None: - vertices = {'A', 'B', 'C', 'D'} - edges = {'A': ['B', 'C'], - 'B': ['C'], - 'C': ['B', 'D'], - 'D': []} # type: Dict[str, List[str]] + vertices = {"A", "B", "C", "D"} + edges: Dict[str, List[str]] = {"A": ["B", "C"], "B": ["C"], "C": ["B", "D"], "D": []} sccs = set(frozenset(x) for x in strongly_connected_components(vertices, edges)) assert_equal(sccs, {frozenset({'A'}), diff --git a/mypy/test/testinfer.py b/mypy/test/testinfer.py index 0c2f55bc69ad..1e7f176bc1bb 100644 --- a/mypy/test/testinfer.py +++ b/mypy/test/testinfer.py @@ -202,7 +202,7 @@ def assert_vararg_map(self, def expand_caller_kinds(kinds_or_names: List[Union[int, str]] ) -> Tuple[List[int], List[Optional[str]]]: kinds = [] - names = [] # type: List[Optional[str]] + names: List[Optional[str]] = [] for k in kinds_or_names: if isinstance(k, str): kinds.append(ARG_NAMED) @@ -216,7 +216,7 @@ def expand_caller_kinds(kinds_or_names: List[Union[int, str]] def expand_callee_kinds(kinds_and_names: List[Union[int, Tuple[int, str]]] ) -> Tuple[List[int], List[Optional[str]]]: kinds = [] - names = [] # type: List[Optional[str]] + names: List[Optional[str]] = [] for v in kinds_and_names: if isinstance(v, tuple): kinds.append(v[0]) @@ -287,7 +287,7 @@ def test_merge_with_multiple_overlaps(self) -> None: class OperandComparisonGroupingSuite(Suite): """Test cases for checker.group_comparison_operands.""" def literal_keymap(self, assignable_operands: Dict[int, NameExpr]) -> Dict[int, Key]: - output = {} # type: Dict[int, Key] + output: Dict[int, Key] = {} for index, expr in assignable_operands.items(): output[index] = ('FakeExpr', expr.name) return output @@ -437,10 +437,10 @@ def test_single_pair(self) -> None: single_comparison = [('==', x0, x1)] expected_output = [('==', [0, 1])] - assignable_combinations = [ + assignable_combinations: List[Dict[int, NameExpr]] = [ {}, {0: x0}, {1: x1}, {0: x0, 1: x1}, - ] # type: List[Dict[int, NameExpr]] - to_group_by = [set(), {'=='}, {'is'}] # type: List[Set[str]] + ] + to_group_by: List[Set[str]] = [set(), {"=="}, {"is"}] for combo in assignable_combinations: for operators in to_group_by: diff --git a/mypy/test/testipc.py b/mypy/test/testipc.py index 7dd829a59079..462fd44c8800 100644 --- a/mypy/test/testipc.py +++ b/mypy/test/testipc.py @@ -23,7 +23,7 @@ def server(msg: str, q: 'Queue[str]') -> None: class IPCTests(TestCase): def test_transaction_large(self) -> None: - queue = Queue() # type: Queue[str] + queue: Queue[str] = Queue() msg = 't' * 200000 # longer than the max read size of 100_000 p = Process(target=server, args=(msg, queue), daemon=True) p.start() @@ -36,7 +36,7 @@ def test_transaction_large(self) -> None: p.join() def test_connect_twice(self) -> None: - queue = Queue() # type: Queue[str] + queue: Queue[str] = Queue() msg = 'this is a test message' p = Process(target=server, args=(msg, queue), daemon=True) p.start() diff --git a/mypy/test/testmerge.py b/mypy/test/testmerge.py index c7fcbda01c04..92562d10134d 100644 --- a/mypy/test/testmerge.py +++ b/mypy/test/testmerge.py @@ -49,7 +49,7 @@ def setup(self) -> None: super().setup() self.str_conv = StrConv(show_ids=True) assert self.str_conv.id_mapper is not None - self.id_mapper = self.str_conv.id_mapper # type: IdMapper + self.id_mapper: IdMapper = self.str_conv.id_mapper self.type_str_conv = TypeStrVisitor(self.id_mapper) def run_case(self, testcase: DataDrivenTestCase) -> None: diff --git a/mypy/test/testsamples.py b/mypy/test/testsamples.py index 24c7702a274b..27b26af16f36 100644 --- a/mypy/test/testsamples.py +++ b/mypy/test/testsamples.py @@ -16,9 +16,9 @@ def test_samples(self) -> None: run_mypy(mypy_args + [f]) def test_stdlibsamples(self) -> None: - seen = set() # type: Set[str] + seen: Set[str] = set() stdlibsamples_dir = os.path.join('test-data', 'stdlib-samples', '3.2', 'test') - modules = [] # type: List[str] + modules: List[str] = [] for f in find_files(stdlibsamples_dir, prefix='test_', suffix='.py'): if f not in seen: seen.add(f) diff --git a/mypy/test/testsemanal.py b/mypy/test/testsemanal.py index e42a84e8365b..8ef702741784 100644 --- a/mypy/test/testsemanal.py +++ b/mypy/test/testsemanal.py @@ -195,7 +195,7 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: class TypeInfoMap(Dict[str, TypeInfo]): def __str__(self) -> str: - a = ['TypeInfoMap('] # type: List[str] + a: List[str] = ["TypeInfoMap("] for x, y in sorted(self.items()): if isinstance(x, str) and (not x.startswith('builtins.') and not x.startswith('typing.') and diff --git a/mypy/test/testsolve.py b/mypy/test/testsolve.py index 172e4e4743c4..fd4189277907 100644 --- a/mypy/test/testsolve.py +++ b/mypy/test/testsolve.py @@ -115,7 +115,7 @@ def assert_solve(self, constraints: List[Constraint], results: List[Union[None, Type, Tuple[Type, Type]]], ) -> None: - res = [] # type: List[Optional[Type]] + res: List[Optional[Type]] = [] for r in results: if isinstance(r, tuple): res.append(r[0]) diff --git a/mypy/test/teststubgen.py b/mypy/test/teststubgen.py index 62feb0784a42..41de33d52e17 100644 --- a/mypy/test/teststubgen.py +++ b/mypy/test/teststubgen.py @@ -577,7 +577,7 @@ def run_case_inner(self, testcase: DataDrivenTestCase) -> None: if not testcase.name.endswith('_semanal'): options.parse_only = True generate_stubs(options) - a = [] # type: List[str] + a: List[str] = [] for module in modules: fnam = module_to_path(out_dir, module) self.add_file(fnam, a, header=len(modules) > 1) @@ -659,9 +659,9 @@ def test_infer_unary_op_sig(self) -> None: assert_equal(infer_method_sig('__%s__' % op), [self_arg]) def test_generate_c_type_stub_no_crash_for_object(self) -> None: - output = [] # type: List[str] + output: List[str] = [] mod = ModuleType('module', '') # any module is fine - imports = [] # type: List[str] + imports: List[str] = [] generate_c_type_stub(mod, 'alias', object, output, imports) assert_equal(imports, []) assert_equal(output[0], 'class alias:') @@ -671,8 +671,8 @@ def test_generate_c_type_stub_variable_type_annotation(self) -> None: class TestClassVariableCls: x = 1 - output = [] # type: List[str] - imports = [] # type: List[str] + output: List[str] = [] + imports: List[str] = [] mod = ModuleType('module', '') # any module is fine generate_c_type_stub(mod, 'C', TestClassVariableCls, output, imports) assert_equal(imports, []) @@ -682,16 +682,16 @@ def test_generate_c_type_inheritance(self) -> None: class TestClass(KeyError): pass - output = [] # type: List[str] - imports = [] # type: List[str] + output: List[str] = [] + imports: List[str] = [] mod = ModuleType('module, ') generate_c_type_stub(mod, 'C', TestClass, output, imports) assert_equal(output, ['class C(KeyError): ...', ]) assert_equal(imports, []) def test_generate_c_type_inheritance_same_module(self) -> None: - output = [] # type: List[str] - imports = [] # type: List[str] + output: List[str] = [] + imports: List[str] = [] mod = ModuleType(TestBaseClass.__module__, '') generate_c_type_stub(mod, 'C', TestClass, output, imports) assert_equal(output, ['class C(TestBaseClass): ...', ]) @@ -703,8 +703,8 @@ def test_generate_c_type_inheritance_other_module(self) -> None: class TestClass(argparse.Action): pass - output = [] # type: List[str] - imports = [] # type: List[str] + output: List[str] = [] + imports: List[str] = [] mod = ModuleType('module', '') generate_c_type_stub(mod, 'C', TestClass, output, imports) assert_equal(output, ['class C(argparse.Action): ...', ]) @@ -713,8 +713,9 @@ class TestClass(argparse.Action): def test_generate_c_type_inheritance_builtin_type(self) -> None: class TestClass(type): pass - output = [] # type: List[str] - imports = [] # type: List[str] + + output: List[str] = [] + imports: List[str] = [] mod = ModuleType('module', '') generate_c_type_stub(mod, 'C', TestClass, output, imports) assert_equal(output, ['class C(type): ...', ]) @@ -727,8 +728,9 @@ def test(self, arg0: str) -> None: test(self: TestClass, arg0: int) """ pass - output = [] # type: List[str] - imports = [] # type: List[str] + + output: List[str] = [] + imports: List[str] = [] mod = ModuleType(TestClass.__module__, '') generate_c_function_stub(mod, 'test', TestClass.test, output, imports, self_var='self', class_name='TestClass') @@ -742,8 +744,9 @@ def test(self, arg0: str = "") -> None: test(self: TestClass, arg0: str = "") """ pass - output = [] # type: List[str] - imports = [] # type: List[str] + + output: List[str] = [] + imports: List[str] = [] mod = ModuleType(TestClass.__module__, '') generate_c_function_stub(mod, 'test', TestClass.test, output, imports, self_var='self', class_name='TestClass') @@ -759,8 +762,9 @@ def test(arg0: str) -> None: test(arg0: argparse.Action) """ pass - output = [] # type: List[str] - imports = [] # type: List[str] + + output: List[str] = [] + imports: List[str] = [] mod = ModuleType(self.__module__, '') generate_c_function_stub(mod, 'test', test, output, imports) assert_equal(output, ['def test(arg0: argparse.Action) -> Any: ...']) @@ -777,8 +781,9 @@ def test(arg0: str) -> None: test(arg0: argparse.Action) """ pass - output = [] # type: List[str] - imports = [] # type: List[str] + + output: List[str] = [] + imports: List[str] = [] mod = ModuleType('argparse', '') generate_c_function_stub(mod, 'test', test, output, imports) assert_equal(output, ['def test(arg0: Action) -> Any: ...']) @@ -791,8 +796,9 @@ def test(arg0: str) -> None: test(arg0: str) -> argparse.Action """ pass - output = [] # type: List[str] - imports = [] # type: List[str] + + output: List[str] = [] + imports: List[str] = [] mod = ModuleType(self.__module__, '') generate_c_function_stub(mod, 'test', test, output, imports) assert_equal(output, ['def test(arg0: str) -> argparse.Action: ...']) @@ -807,8 +813,9 @@ def test(arg0: str) -> None: test(arg0: str) -> argparse.Action """ pass - output = [] # type: List[str] - imports = [] # type: List[str] + + output: List[str] = [] + imports: List[str] = [] mod = ModuleType('argparse', '') generate_c_function_stub(mod, 'test', test, output, imports) assert_equal(output, ['def test(arg0: str) -> Action: ...']) @@ -824,7 +831,7 @@ def get_attribute(self) -> None: pass attribute = property(get_attribute, doc="") - output = [] # type: List[str] + output: List[str] = [] generate_c_property_stub('attribute', TestClass.attribute, [], [], output, readonly=True) assert_equal(output, ['@property', 'def attribute(self) -> str: ...']) @@ -835,8 +842,9 @@ def test(self, arg0: str) -> None: test(self: TestClass, arg0: List[int]) """ pass - output = [] # type: List[str] - imports = [] # type: List[str] + + output: List[str] = [] + imports: List[str] = [] mod = ModuleType(TestClass.__module__, '') generate_c_function_stub(mod, 'test', TestClass.test, output, imports, self_var='self', class_name='TestClass') @@ -850,8 +858,9 @@ def test(self, arg0: str) -> None: test(self: TestClass, arg0: Dict[str, int]) """ pass - output = [] # type: List[str] - imports = [] # type: List[str] + + output: List[str] = [] + imports: List[str] = [] mod = ModuleType(TestClass.__module__, '') generate_c_function_stub(mod, 'test', TestClass.test, output, imports, self_var='self', class_name='TestClass') @@ -865,8 +874,9 @@ def test(self, arg0: str) -> None: test(self: TestClass, arg0: Dict[str, List[int]]) """ pass - output = [] # type: List[str] - imports = [] # type: List[str] + + output: List[str] = [] + imports: List[str] = [] mod = ModuleType(TestClass.__module__, '') generate_c_function_stub(mod, 'test', TestClass.test, output, imports, self_var='self', class_name='TestClass') @@ -880,8 +890,9 @@ def test(self, arg0: str) -> None: test(self: TestClass, arg0: Dict[argparse.Action, int]) """ pass - output = [] # type: List[str] - imports = [] # type: List[str] + + output: List[str] = [] + imports: List[str] = [] mod = ModuleType(TestClass.__module__, '') generate_c_function_stub(mod, 'test', TestClass.test, output, imports, self_var='self', class_name='TestClass') @@ -895,8 +906,9 @@ def test(self, arg0: str) -> None: test(self: TestClass, arg0: Dict[str, argparse.Action]) """ pass - output = [] # type: List[str] - imports = [] # type: List[str] + + output: List[str] = [] + imports: List[str] = [] mod = ModuleType(TestClass.__module__, '') generate_c_function_stub(mod, 'test', TestClass.test, output, imports, self_var='self', class_name='TestClass') @@ -915,8 +927,9 @@ def __init__(self, arg0: str) -> None: 2. __init__(self: TestClass, arg0: str, arg1: str) -> None """ pass - output = [] # type: List[str] - imports = [] # type: List[str] + + output: List[str] = [] + imports: List[str] = [] mod = ModuleType(TestClass.__module__, '') generate_c_function_stub(mod, '__init__', TestClass.__init__, output, imports, self_var='self', class_name='TestClass') diff --git a/mypy/test/testtypes.py b/mypy/test/testtypes.py index 5fc9a730db91..1115d54b8e2e 100644 --- a/mypy/test/testtypes.py +++ b/mypy/test/testtypes.py @@ -494,7 +494,7 @@ def callable(self, vars: List[str], *a: Type) -> CallableType: argument types a1, ... an and return type r and type arguments vars. """ - tv = [] # type: List[TypeVarDef] + tv: List[TypeVarDef] = [] n = -1 for v in vars: tv.append(TypeVarDef(v, v, n, [], self.fx.o)) diff --git a/mypy/test/typefixture.py b/mypy/test/typefixture.py index 3debe85aa22c..41e4105c20f0 100644 --- a/mypy/test/typefixture.py +++ b/mypy/test/typefixture.py @@ -223,7 +223,7 @@ def make_type_info(self, name: str, module_name = '__main__' if typevars: - v = [] # type: List[TypeVarDef] + v: List[TypeVarDef] = [] for id, n in enumerate(typevars, 1): if variances: variance = variances[id - 1] diff --git a/mypy/test/visitors.py b/mypy/test/visitors.py index 2ba4ab52d135..b1a84e3529e1 100644 --- a/mypy/test/visitors.py +++ b/mypy/test/visitors.py @@ -20,7 +20,7 @@ # from testtypegen class SkippedNodeSearcher(TraverserVisitor): def __init__(self) -> None: - self.nodes = set() # type: Set[Expression] + self.nodes: Set[Expression] = set() self.is_typing = False def visit_mypy_file(self, f: MypyFile) -> None: diff --git a/mypy/traverser.py b/mypy/traverser.py index c4834c9acb6b..d0b656c7a77f 100644 --- a/mypy/traverser.py +++ b/mypy/traverser.py @@ -321,7 +321,7 @@ def has_return_statement(fdef: FuncBase) -> bool: class ReturnCollector(TraverserVisitor): def __init__(self) -> None: - self.return_statements = [] # type: List[ReturnStmt] + self.return_statements: List[ReturnStmt] = [] self.inside_func = False def visit_func_def(self, defn: FuncDef) -> None: diff --git a/mypy/treetransform.py b/mypy/treetransform.py index bd8a623455f7..cdd4f604be86 100644 --- a/mypy/treetransform.py +++ b/mypy/treetransform.py @@ -55,12 +55,12 @@ def __init__(self) -> None: self.test_only = False # There may be multiple references to a Var node. Keep track of # Var translations using a dictionary. - self.var_map = {} # type: Dict[Var, Var] + self.var_map: Dict[Var, Var] = {} # These are uninitialized placeholder nodes used temporarily for nested # functions while we are transforming a top-level function. This maps an # untransformed node to a placeholder (which will later become the # transformed node). - self.func_placeholder_map = {} # type: Dict[FuncDef, FuncDef] + self.func_placeholder_map: Dict[FuncDef, FuncDef] = {} def visit_mypy_file(self, node: MypyFile) -> MypyFile: assert self.test_only, "This visitor should not be used for whole files." @@ -596,7 +596,7 @@ def names(self, names: List[NameExpr]) -> List[NameExpr]: return [self.duplicate_name(name) for name in names] def optional_names(self, names: Iterable[Optional[NameExpr]]) -> List[Optional[NameExpr]]: - result = [] # type: List[Optional[NameExpr]] + result: List[Optional[NameExpr]] = [] for name in names: if name: result.append(self.duplicate_name(name)) diff --git a/mypy/tvar_scope.py b/mypy/tvar_scope.py index 4c7a165036a2..57b9a965b39c 100644 --- a/mypy/tvar_scope.py +++ b/mypy/tvar_scope.py @@ -21,7 +21,7 @@ def __init__(self, prohibited: Type variables that aren't strictly in scope exactly, but can't be bound because they're part of an outer class's scope. """ - self.scope = {} # type: Dict[str, TypeVarLikeDef] + self.scope: Dict[str, TypeVarLikeDef] = {} self.parent = parent self.func_id = 0 self.class_id = 0 @@ -33,7 +33,7 @@ def __init__(self, def get_function_scope(self) -> 'Optional[TypeVarLikeScope]': """Get the nearest parent that's a function scope, not a class scope""" - it = self # type: Optional[TypeVarLikeScope] + it: Optional[TypeVarLikeScope] = self while it is not None and it.is_class_scope: it = it.parent return it @@ -63,7 +63,7 @@ def bind_new(self, name: str, tvar_expr: TypeVarLikeExpr) -> TypeVarLikeDef: self.func_id -= 1 i = self.func_id if isinstance(tvar_expr, TypeVarExpr): - tvar_def = TypeVarDef( + tvar_def: TypeVarLikeDef = TypeVarDef( name, tvar_expr.fullname, i, @@ -72,7 +72,7 @@ def bind_new(self, name: str, tvar_expr: TypeVarLikeExpr) -> TypeVarLikeDef: variance=tvar_expr.variance, line=tvar_expr.line, column=tvar_expr.column - ) # type: TypeVarLikeDef + ) elif isinstance(tvar_expr, ParamSpecExpr): tvar_def = ParamSpecDef( name, diff --git a/mypy/type_visitor.py b/mypy/type_visitor.py index aa61e58d4c67..5656b926bf7d 100644 --- a/mypy/type_visitor.py +++ b/mypy/type_visitor.py @@ -167,7 +167,7 @@ def visit_deleted_type(self, t: DeletedType) -> Type: return t def visit_instance(self, t: Instance) -> Type: - last_known_value = None # type: Optional[LiteralType] + last_known_value: Optional[LiteralType] = None if t.last_known_value is not None: raw_last_known_value = t.last_known_value.accept(self) assert isinstance(raw_last_known_value, LiteralType) # type: ignore @@ -232,7 +232,7 @@ def translate_variables(self, return variables def visit_overloaded(self, t: Overloaded) -> Type: - items = [] # type: List[CallableType] + items: List[CallableType] = [] for item in t.items(): new = item.accept(self) assert isinstance(new, CallableType) # type: ignore @@ -269,7 +269,7 @@ def __init__(self, strategy: Callable[[Iterable[T]], T]) -> None: self.strategy = strategy # Keep track of the type aliases already visited. This is needed to avoid # infinite recursion on types like A = Union[int, List[A]]. - self.seen_aliases = set() # type: Set[TypeAliasType] + self.seen_aliases: Set[TypeAliasType] = set() def visit_unbound_type(self, t: UnboundType) -> T: return self.query_types(t.args) @@ -350,7 +350,7 @@ def query_types(self, types: Iterable[Type]) -> T: Use the strategy to combine the results. Skip type aliases already visited types to avoid infinite recursion. """ - res = [] # type: List[T] + res: List[T] = [] for t in types: if isinstance(t, TypeAliasType): # Avoid infinite recursion for recursive type aliases. diff --git a/mypy/typeanal.py b/mypy/typeanal.py index 084d955f7c08..bcec18bd925f 100644 --- a/mypy/typeanal.py +++ b/mypy/typeanal.py @@ -35,7 +35,7 @@ T = TypeVar('T') -type_constructors = { +type_constructors: Final = { 'typing.Callable', 'typing.Optional', 'typing.Tuple', @@ -45,22 +45,22 @@ 'typing_extensions.Literal', 'typing.Annotated', 'typing_extensions.Annotated', -} # type: Final +} -ARG_KINDS_BY_CONSTRUCTOR = { +ARG_KINDS_BY_CONSTRUCTOR: Final = { 'mypy_extensions.Arg': ARG_POS, 'mypy_extensions.DefaultArg': ARG_OPT, 'mypy_extensions.NamedArg': ARG_NAMED, 'mypy_extensions.DefaultNamedArg': ARG_NAMED_OPT, 'mypy_extensions.VarArg': ARG_STAR, 'mypy_extensions.KwArg': ARG_STAR2, -} # type: Final +} -GENERIC_STUB_NOT_AT_RUNTIME_TYPES = { +GENERIC_STUB_NOT_AT_RUNTIME_TYPES: Final = { 'queue.Queue', 'builtins._PathLike', 'asyncio.futures.Future', -} # type: Final +} def analyze_type_alias(node: Expression, @@ -114,9 +114,9 @@ class TypeAnalyser(SyntheticTypeVisitor[Type], TypeAnalyzerPluginInterface): """ # Is this called from an untyped function definition? - in_dynamic_func = False # type: bool + in_dynamic_func: bool = False # Is this called from global scope? - global_scope = True # type: bool + global_scope: bool = True def __init__(self, api: SemanticAnalyzerCoreInterface, @@ -157,7 +157,7 @@ def __init__(self, self.options = options self.is_typeshed_stub = is_typeshed_stub # Names of type aliases encountered while analysing a type will be collected here. - self.aliases_used = set() # type: Set[str] + self.aliases_used: Set[str] = set() def visit_unbound_type(self, t: UnboundType, defining_literal: bool = False) -> Type: typ = self.visit_unbound_type_nonoptional(t, defining_literal) @@ -458,7 +458,7 @@ def analyze_unbound_type_without_type_info(self, t: UnboundType, sym: SymbolTabl # to make sure there are no remaining semanal-only types, then give up. t = t.copy_modified(args=self.anal_array(t.args)) # TODO: Move this message building logic to messages.py. - notes = [] # type: List[str] + notes: List[str] = [] if isinstance(sym.node, Var): notes.append('See https://mypy.readthedocs.io/en/' 'stable/common_issues.html#variables-vs-type-aliases') @@ -756,9 +756,9 @@ def analyze_callable_type(self, t: UnboundType) -> Type: def analyze_callable_args(self, arglist: TypeList) -> Optional[Tuple[List[Type], List[int], List[Optional[str]]]]: - args = [] # type: List[Type] - kinds = [] # type: List[int] - names = [] # type: List[Optional[str]] + args: List[Type] = [] + kinds: List[int] = [] + names: List[Optional[str]] = [] for arg in arglist.items: if isinstance(arg, CallableArgument): args.append(arg.typ) @@ -795,7 +795,7 @@ def analyze_literal_type(self, t: UnboundType) -> Type: self.fail('Literal[...] must have at least one parameter', t) return AnyType(TypeOfAny.from_error) - output = [] # type: List[Type] + output: List[Type] = [] for i, arg in enumerate(t.args): analyzed_types = self.analyze_literal_param(i + 1, arg, t) if analyzed_types is None: @@ -897,8 +897,8 @@ def tvar_scope_frame(self) -> Iterator[None]: def infer_type_variables(self, type: CallableType) -> List[Tuple[str, TypeVarLikeExpr]]: """Return list of unique type variables referred to in a callable.""" - names = [] # type: List[str] - tvars = [] # type: List[TypeVarLikeExpr] + names: List[str] = [] + tvars: List[TypeVarLikeExpr] = [] for arg in type.arg_types: for name, tvar_expr in arg.accept( TypeVarLikeQuery(self.lookup_qualified, self.tvar_scope) @@ -934,7 +934,7 @@ def bind_function_type_variables( # Do not define a new type variable if already defined in scope. typevars = [(name, tvar) for name, tvar in typevars if not self.is_defined_type_var(name, defn)] - defs = [] # type: List[TypeVarLikeDef] + defs: List[TypeVarLikeDef] = [] for name, tvar in typevars: if not self.tvar_scope.allow_binding(tvar.fullname): self.fail('Type variable "{}" is bound by an outer class'.format(name), defn) @@ -952,7 +952,7 @@ def is_defined_type_var(self, tvar: str, context: Context) -> bool: return self.tvar_scope.get_binding(tvar_node) is not None def anal_array(self, a: Iterable[Type], nested: bool = True) -> List[Type]: - res = [] # type: List[Type] + res: List[Type] = [] for t in a: res.append(self.anal_type(t, nested)) return res @@ -1069,7 +1069,7 @@ def fix_instance(t: Instance, fail: MsgCallback, note: MsgCallback, """ if len(t.args) == 0: if use_generic_error: - fullname = None # type: Optional[str] + fullname: Optional[str] = None else: fullname = t.type.fullname any_type = get_omitted_any(disallow_any, fail, note, t, python_version, fullname, @@ -1168,8 +1168,8 @@ def set_any_tvars(node: TypeAlias, def remove_dups(tvars: Iterable[T]) -> List[T]: # Get unique elements in order of appearance - all_tvars = set() # type: Set[T] - new_tvars = [] # type: List[T] + all_tvars: Set[T] = set() + new_tvars: List[T] = [] for t in tvars: if t not in all_tvars: new_tvars.append(t) @@ -1289,7 +1289,7 @@ def visit_any(self, t: AnyType) -> List[AnyType]: @classmethod def combine_lists_strategy(cls, it: Iterable[List[AnyType]]) -> List[AnyType]: - result = [] # type: List[AnyType] + result: List[AnyType] = [] for l in it: result.extend(l) return result diff --git a/mypy/typeops.py b/mypy/typeops.py index ec9d58d7a126..718800967b44 100644 --- a/mypy/typeops.py +++ b/mypy/typeops.py @@ -94,7 +94,7 @@ def type_object_type_from_function(signature: FunctionLike, signature = bind_self(signature, original_type=default_self, is_classmethod=is_new) signature = cast(FunctionLike, map_type_from_supertype(signature, info, def_info)) - special_sig = None # type: Optional[str] + special_sig: Optional[str] = None if def_info.fullname == 'builtins.dict': # Special signature! special_sig = 'dict' @@ -104,7 +104,7 @@ def type_object_type_from_function(signature: FunctionLike, else: # Overloaded __init__/__new__. assert isinstance(signature, Overloaded) - items = [] # type: List[CallableType] + items: List[CallableType] = [] for item, orig_self in zip(signature.items(), orig_self_types): items.append(class_callable(item, info, fallback, special_sig, is_new, orig_self)) return Overloaded(items) @@ -114,7 +114,7 @@ def class_callable(init_type: CallableType, info: TypeInfo, type_type: Instance, special_sig: Optional[str], is_new: bool, orig_self_type: Optional[Type] = None) -> CallableType: """Create a type object type based on the signature of __init__.""" - variables = [] # type: List[TypeVarLikeDef] + variables: List[TypeVarLikeDef] = [] variables.extend(info.defn.type_vars) variables.extend(init_type.variables) @@ -130,7 +130,7 @@ def class_callable(init_type: CallableType, info: TypeInfo, type_type: Instance, # if it is actually returning a subtype of what we would return otherwise. and is_subtype(explicit_type, default_ret_type, ignore_type_params=True) ): - ret_type = explicit_type # type: Type + ret_type: Type = explicit_type else: ret_type = default_ret_type @@ -229,7 +229,7 @@ class B(A): pass return cast(F, func) self_param_type = get_proper_type(func.arg_types[0]) - variables = [] # type: Sequence[TypeVarLikeDef] + variables: Sequence[TypeVarLikeDef] = [] if func.variables and supported_self_type(self_param_type): if original_type is None: # TODO: type check method override (see #7861). @@ -337,7 +337,7 @@ def make_simplified_union(items: Sequence[Type], """ items = get_proper_types(items) while any(isinstance(typ, UnionType) for typ in items): - all_items = [] # type: List[ProperType] + all_items: List[ProperType] = [] for typ in items: if isinstance(typ, UnionType): all_items.extend(get_proper_types(typ.items)) @@ -347,13 +347,13 @@ def make_simplified_union(items: Sequence[Type], from mypy.subtypes import is_proper_subtype - removed = set() # type: Set[int] + removed: Set[int] = set() # Avoid slow nested for loop for Union of Literal of strings (issue #9169) if all((isinstance(item, LiteralType) and item.fallback.type.fullname == 'builtins.str') for item in items): - seen = set() # type: Set[str] + seen: Set[str] = set() for index, item in enumerate(items): assert isinstance(item, LiteralType) assert isinstance(item.value, str) @@ -525,7 +525,7 @@ def callable_type(fdef: FuncItem, fallback: Instance, ret_type: Optional[Type] = None) -> CallableType: # TODO: somewhat unfortunate duplication with prepare_method_signature in semanal if fdef.info and not fdef.is_static and fdef.arg_names: - self_type = fill_typevars(fdef.info) # type: Type + self_type: Type = fill_typevars(fdef.info) if fdef.is_class or fdef.name == '__new__': self_type = TypeType.make_normalized(self_type) args = [self_type] + [AnyType(TypeOfAny.unannotated)] * (len(fdef.arg_names)-1) @@ -600,13 +600,13 @@ def try_getting_literals_from_type(typ: Type, typ = get_proper_type(typ) if isinstance(typ, Instance) and typ.last_known_value is not None: - possible_literals = [typ.last_known_value] # type: List[Type] + possible_literals: List[Type] = [typ.last_known_value] elif isinstance(typ, UnionType): possible_literals = list(typ.items) else: possible_literals = [typ] - literals = [] # type: List[T] + literals: List[T] = [] for lit in get_proper_types(possible_literals): if isinstance(lit, LiteralType) and lit.fallback.type.fullname == target_fullname: val = lit.value @@ -725,7 +725,7 @@ def try_contracting_literals_in_union(types: Sequence[Type]) -> List[ProperType] this function will return Color. """ proper_types = [get_proper_type(typ) for typ in types] - sum_types = {} # type: Dict[str, Tuple[Set[Any], List[int]]] + sum_types: Dict[str, Tuple[Set[Any], List[int]]] = {} marked_for_deletion = set() for idx, typ in enumerate(proper_types): if isinstance(typ, LiteralType): diff --git a/mypy/types.py b/mypy/types.py index 7aa83a75f431..c285df7ecb75 100644 --- a/mypy/types.py +++ b/mypy/types.py @@ -68,17 +68,21 @@ ) # Supported names of TypedDict type constructors. -TPDICT_NAMES = ('typing.TypedDict', - 'typing_extensions.TypedDict', - 'mypy_extensions.TypedDict') # type: Final +TPDICT_NAMES: Final = ( + "typing.TypedDict", + "typing_extensions.TypedDict", + "mypy_extensions.TypedDict", +) # Supported fallback instance type names for TypedDict types. -TPDICT_FB_NAMES = ('typing._TypedDict', - 'typing_extensions._TypedDict', - 'mypy_extensions._TypedDict') # type: Final +TPDICT_FB_NAMES: Final = ( + "typing._TypedDict", + "typing_extensions._TypedDict", + "mypy_extensions._TypedDict", +) # A placeholder used for Bogus[...] parameters -_dummy = object() # type: Final[Any] +_dummy: Final[Any] = object() class TypeOfAny: @@ -86,27 +90,27 @@ class TypeOfAny: This class describes different types of Any. Each 'Any' can be of only one type at a time. """ # Was this Any type inferred without a type annotation? - unannotated = 1 # type: Final + unannotated: Final = 1 # Does this Any come from an explicit type annotation? - explicit = 2 # type: Final + explicit: Final = 2 # Does this come from an unfollowed import? See --disallow-any-unimported option - from_unimported_type = 3 # type: Final + from_unimported_type: Final = 3 # Does this Any type come from omitted generics? - from_omitted_generics = 4 # type: Final + from_omitted_generics: Final = 4 # Does this Any come from an error? - from_error = 5 # type: Final + from_error: Final = 5 # Is this a type that can't be represented in mypy's type system? For instance, type of # call to NewType...). Even though these types aren't real Anys, we treat them as such. # Also used for variables named '_'. - special_form = 6 # type: Final + special_form: Final = 6 # Does this Any come from interaction with another Any? - from_another_any = 7 # type: Final + from_another_any: Final = 7 # Does this Any come from an implementation limitation/bug? - implementation_artifact = 8 # type: Final + implementation_artifact: Final = 8 # Does this Any come from use in the suggestion engine? This is # used to ignore Anys inserted by the suggestion engine when # generating constraints. - suggestion_engine = 9 # type: Final + suggestion_engine: Final = 9 def deserialize_type(data: Union[JsonDict, str]) -> 'Type': @@ -172,7 +176,7 @@ def __init__(self, alias: Optional[mypy.nodes.TypeAlias], args: List[Type], line: int = -1, column: int = -1) -> None: self.alias = alias self.args = args - self.type_ref = None # type: Optional[str] + self.type_ref: Optional[str] = None super().__init__(line, column) def _expand_once(self) -> Type: @@ -245,15 +249,17 @@ def __eq__(self, other: object) -> bool: def serialize(self) -> JsonDict: assert self.alias is not None - data = {'.class': 'TypeAliasType', - 'type_ref': self.alias.fullname, - 'args': [arg.serialize() for arg in self.args]} # type: JsonDict + data: JsonDict = { + ".class": "TypeAliasType", + "type_ref": self.alias.fullname, + "args": [arg.serialize() for arg in self.args], + } return data @classmethod def deserialize(cls, data: JsonDict) -> 'TypeAliasType': assert data['.class'] == 'TypeAliasType' - args = [] # type: List[Type] + args: List[Type] = [] if 'args' in data: args_list = data['args'] assert isinstance(args_list, list) @@ -302,14 +308,14 @@ class TypeVarId: # function type variables. # Metavariables are allocated unique ids starting from 1. - raw_id = 0 # type: int + raw_id: int = 0 # Level of the variable in type inference. Currently either 0 for # declared types, or 1 for type inference metavariables. - meta_level = 0 # type: int + meta_level: int = 0 # Class variable used for allocating fresh ids for metavariables. - next_raw_id = 1 # type: ClassVar[int] + next_raw_id: ClassVar[int] = 1 def __init__(self, raw_id: int, meta_level: int = 0) -> None: self.raw_id = raw_id @@ -344,7 +350,7 @@ def is_meta_var(self) -> bool: class TypeVarLikeDef(mypy.nodes.Context): name = '' # Name (may be qualified) fullname = '' # Fully qualified name - id = None # type: TypeVarId + id: TypeVarId def __init__( self, name: str, fullname: str, id: Union[TypeVarId, int], line: int = -1, column: int = -1 @@ -369,9 +375,10 @@ def deserialize(cls, data: JsonDict) -> 'TypeVarLikeDef': class TypeVarDef(TypeVarLikeDef): """Definition of a single type variable.""" - values = None # type: List[Type] # Value restriction, empty list if no restriction - upper_bound = None # type: Type - variance = INVARIANT # type: int + + values: List[Type] # Value restriction, empty list if no restriction + upper_bound: Type + variance: int = INVARIANT def __init__(self, name: str, fullname: str, id: Union[TypeVarId, int], values: List[Type], upper_bound: Type, variance: int = INVARIANT, line: int = -1, @@ -535,9 +542,10 @@ class CallableArgument(ProperType): Note that this is a synthetic type for helping parse ASTs, not a real type. """ - typ = None # type: Type - name = None # type: Optional[str] - constructor = None # type: Optional[str] + + typ: Type + name: Optional[str] = None + constructor: Optional[str] = None def __init__(self, typ: Type, name: Optional[str], constructor: Optional[str], line: int = -1, column: int = -1) -> None: @@ -563,7 +571,7 @@ class TypeList(ProperType): types before they are processed into Callable types. """ - items = None # type: List[Type] + items: List[Type] def __init__(self, items: List[Type], line: int = -1, column: int = -1) -> None: super().__init__(line, column) @@ -752,7 +760,7 @@ class DeletedType(ProperType): These can be used as lvalues but not rvalues. """ - source = '' # type: Optional[str] # May be None; name that generated this value + source: Optional[str] = "" # May be None; name that generated this value def __init__(self, source: Optional[str] = None, line: int = -1, column: int = -1) -> None: super().__init__(line, column) @@ -772,7 +780,7 @@ def deserialize(cls, data: JsonDict) -> 'DeletedType': # Fake TypeInfo to be used as a placeholder during Instance de-serialization. -NOT_READY = mypy.nodes.FakeInfo('De-serialization failure: TypeInfo not fixed') # type: Final +NOT_READY: Final = mypy.nodes.FakeInfo("De-serialization failure: TypeInfo not fixed") class Instance(ProperType): @@ -789,7 +797,7 @@ def __init__(self, typ: mypy.nodes.TypeInfo, args: Sequence[Type], super().__init__(line, column) self.type = typ self.args = tuple(args) - self.type_ref = None # type: Optional[str] + self.type_ref: Optional[str] = None # True if result of type variable substitution self.erased = erased @@ -860,10 +868,11 @@ def serialize(self) -> Union[JsonDict, str]: type_ref = self.type.fullname if not self.args and not self.last_known_value: return type_ref - data = {'.class': 'Instance', - } # type: JsonDict - data['type_ref'] = type_ref - data['args'] = [arg.serialize() for arg in self.args] + data: JsonDict = { + ".class": "Instance", + } + data["type_ref"] = type_ref + data["args"] = [arg.serialize() for arg in self.args] if self.last_known_value is not None: data['last_known_value'] = self.last_known_value.serialize() return data @@ -875,7 +884,7 @@ def deserialize(cls, data: Union[JsonDict, str]) -> 'Instance': inst.type_ref = data return inst assert data['.class'] == 'Instance' - args = [] # type: List[Type] + args: List[Type] = [] if 'args' in data: args_list = data['args'] assert isinstance(args_list, list) @@ -915,14 +924,14 @@ class TypeVarType(ProperType): def __init__(self, binder: TypeVarDef, line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.name = binder.name # Name of the type variable (for messages and debugging) - self.fullname = binder.fullname # type: str - self.id = binder.id # type: TypeVarId + self.fullname: str = binder.fullname + self.id: TypeVarId = binder.id # Value restriction, empty list if no restriction - self.values = binder.values # type: List[Type] + self.values: List[Type] = binder.values # Upper bound for values - self.upper_bound = binder.upper_bound # type: Type + self.upper_bound: Type = binder.upper_bound # See comments in TypeVarDef for more about variance. - self.variance = binder.variance # type: int + self.variance: int = binder.variance def accept(self, visitor: 'TypeVisitor[T]') -> T: return visitor.visit_type_var(self) @@ -1249,7 +1258,7 @@ def is_generic(self) -> bool: return bool(self.variables) def type_var_ids(self) -> List[TypeVarId]: - a = [] # type: List[TypeVarId] + a: List[TypeVarId] = [] for tv in self.variables: a.append(tv.id) return a @@ -1321,7 +1330,7 @@ class Overloaded(FunctionLike): implementation. """ - _items = None # type: List[CallableType] # Must not be empty + _items: List[CallableType] # Must not be empty def __init__(self, items: List[CallableType]) -> None: super().__init__(items[0].line, items[0].column) @@ -1345,7 +1354,7 @@ def type_object(self) -> mypy.nodes.TypeInfo: return self._items[0].type_object() def with_name(self, name: str) -> 'Overloaded': - ni = [] # type: List[CallableType] + ni: List[CallableType] = [] for it in self._items: ni.append(it.with_name(name)) return Overloaded(ni) @@ -1388,8 +1397,8 @@ class TupleType(ProperType): implicit: If True, derived from a tuple expression (t,....) instead of Tuple[t, ...] """ - items = None # type: List[Type] - partial_fallback = None # type: Instance + items: List[Type] + partial_fallback: Instance implicit = False def __init__(self, items: List[Type], fallback: Instance, line: int = -1, @@ -1463,9 +1472,9 @@ class TypedDictType(ProperType): TODO: The fallback structure is perhaps overly complicated. """ - items = None # type: OrderedDict[str, Type] # item_name -> item_type - required_keys = None # type: Set[str] - fallback = None # type: Instance + items: "OrderedDict[str, Type]" # item_name -> item_type + required_keys: Set[str] + fallback: Instance def __init__(self, items: 'OrderedDict[str, Type]', required_keys: Set[str], fallback: Instance, line: int = -1, column: int = -1) -> None: @@ -1727,7 +1736,7 @@ class StarType(ProperType): This is not a real type but a syntactic AST construct. """ - type = None # type: Type + type: Type def __init__(self, type: Type, line: int = -1, column: int = -1) -> None: super().__init__(line, column) @@ -1833,11 +1842,11 @@ class PartialType(ProperType): """ # None for the 'None' partial type; otherwise a generic class - type = None # type: Optional[mypy.nodes.TypeInfo] - var = None # type: mypy.nodes.Var + type: Optional[mypy.nodes.TypeInfo] = None + var: mypy.nodes.Var # For partial defaultdict[K, V], the type V (K is unknown). If V is generic, # the type argument is Any and will be replaced later. - value_type = None # type: Optional[Instance] + value_type: Optional[Instance] = None def __init__(self, type: 'Optional[mypy.nodes.TypeInfo]', @@ -1898,7 +1907,7 @@ class TypeType(ProperType): # This can't be everything, but it can be a class reference, # a generic class instance, a union, Any, a type variable... - item = None # type: ProperType + item: ProperType def __init__(self, item: Bogus[Union[Instance, AnyType, TypeVarType, TupleType, NoneType, CallableType]], *, @@ -2324,7 +2333,7 @@ def flatten_nested_unions(types: Iterable[Type], # This and similar functions on unions can cause infinite recursion # if passed a "pathological" alias like A = Union[int, A] or similar. # TODO: ban such aliases in semantic analyzer. - flat_items = [] # type: List[Type] + flat_items: List[Type] = [] if handle_type_alias_type: types = get_proper_types(types) for tp in types: @@ -2382,10 +2391,10 @@ def is_literal_type(typ: ProperType, fallback_fullname: str, value: LiteralValue return typ.value == value -names = globals().copy() # type: Final +names: Final = globals().copy() names.pop('NOT_READY', None) -deserialize_map = { +deserialize_map: Final = { key: obj.deserialize for key, obj in names.items() if isinstance(obj, type) and issubclass(obj, Type) and obj is not Type -} # type: Final +} diff --git a/mypy/typestate.py b/mypy/typestate.py index 39eca3e318ef..73376ee7157a 100644 --- a/mypy/typestate.py +++ b/mypy/typestate.py @@ -38,7 +38,7 @@ class TypeState: # was done in strict optional mode and of the specific *kind* of subtyping relationship, # which we represent as an arbitrary hashable tuple. # We need the caches, since subtype checks for structural types are very slow. - _subtype_caches = {} # type: Final[SubtypeCache] + _subtype_caches: Final[SubtypeCache] = {} # This contains protocol dependencies generated after running a full build, # or after an update. These dependencies are special because: @@ -51,7 +51,7 @@ class TypeState: # A blocking error will be generated in this case, since we can't proceed safely. # For the description of kinds of protocol dependencies and corresponding examples, # see _snapshot_protocol_deps. - proto_deps = {} # type: ClassVar[Optional[Dict[str, Set[str]]]] + proto_deps: ClassVar[Optional[Dict[str, Set[str]]]] = {} # Protocols (full names) a given class attempted to implement. # Used to calculate fine grained protocol dependencies and optimize protocol @@ -59,13 +59,13 @@ class TypeState: # of type a.A to a function expecting something compatible with protocol p.P, # we'd have 'a.A' -> {'p.P', ...} in the map. This map is flushed after every incremental # update. - _attempted_protocols = {} # type: Final[Dict[str, Set[str]]] + _attempted_protocols: Final[Dict[str, Set[str]]] = {} # We also snapshot protocol members of the above protocols. For example, if we pass # a value of type a.A to a function expecting something compatible with Iterable, we'd have # 'a.A' -> {'__iter__', ...} in the map. This map is also flushed after every incremental # update. This map is needed to only generate dependencies like -> # instead of a wildcard to avoid unnecessarily invalidating classes. - _checked_against_members = {} # type: Final[Dict[str, Set[str]]] + _checked_against_members: Final[Dict[str, Set[str]]] = {} # TypeInfos that appeared as a left type (subtype) in a subtype check since latest # dependency snapshot update. This is an optimisation for fine grained mode; during a full # run we only take a dependency snapshot at the very end, so this set will contain all @@ -73,16 +73,16 @@ class TypeState: # dependencies generated from (typically) few TypeInfos that were subtype-checked # (i.e. appeared as r.h.s. in an assignment or an argument in a function call in # a re-checked target) during the update. - _rechecked_types = set() # type: Final[Set[TypeInfo]] + _rechecked_types: Final[Set[TypeInfo]] = set() # The two attributes below are assumption stacks for subtyping relationships between # recursive type aliases. Normally, one would pass type assumptions as an additional # arguments to is_subtype(), but this would mean updating dozens of related functions # threading this through all callsites (see also comment for TypeInfo.assuming). - _assuming = [] # type: Final[List[Tuple[TypeAliasType, TypeAliasType]]] - _assuming_proper = [] # type: Final[List[Tuple[TypeAliasType, TypeAliasType]]] + _assuming: Final[List[Tuple[TypeAliasType, TypeAliasType]]] = [] + _assuming_proper: Final[List[Tuple[TypeAliasType, TypeAliasType]]] = [] # Ditto for inference of generic constraints against recursive type aliases. - _inferring = [] # type: Final[List[TypeAliasType]] + _inferring: Final[List[TypeAliasType]] = [] # N.B: We do all of the accesses to these properties through # TypeState, instead of making these classmethods and accessing @@ -187,7 +187,7 @@ def __iter__(self) -> Iterator[int]: proper subtype checks, and calculating meets and joins, if this involves calling 'subtypes.is_protocol_implementation'). """ - deps = {} # type: Dict[str, Set[str]] + deps: Dict[str, Set[str]] = {} for info in TypeState._rechecked_types: for attr in TypeState._checked_against_members[info.fullname]: # The need for full MRO here is subtle, during an update, base classes of diff --git a/mypy/typevars.py b/mypy/typevars.py index 113569874ceb..513343acb867 100644 --- a/mypy/typevars.py +++ b/mypy/typevars.py @@ -11,7 +11,7 @@ def fill_typevars(typ: TypeInfo) -> Union[Instance, TupleType]: For a generic G type with parameters T1, .., Tn, return G[T1, ..., Tn]. """ - tv = [] # type: List[Type] + tv: List[Type] = [] # TODO: why do we need to keep both typ.type_vars and typ.defn.type_vars? for i in range(len(typ.defn.type_vars)): tv.append(TypeVarType(typ.defn.type_vars[i])) diff --git a/mypy/util.py b/mypy/util.py index 2c1ffbce43cf..bae4821cd535 100644 --- a/mypy/util.py +++ b/mypy/util.py @@ -23,11 +23,10 @@ T = TypeVar('T') -ENCODING_RE = \ - re.compile(br'([ \t\v]*#.*(\r\n?|\n))??[ \t\v]*#.*coding[:=][ \t]*([-\w.]+)') # type: Final +ENCODING_RE: Final = re.compile(br"([ \t\v]*#.*(\r\n?|\n))??[ \t\v]*#.*coding[:=][ \t]*([-\w.]+)") -DEFAULT_SOURCE_OFFSET = 4 # type: Final -DEFAULT_COLUMNS = 80 # type: Final +DEFAULT_SOURCE_OFFSET: Final = 4 +DEFAULT_COLUMNS: Final = 80 # At least this number of columns will be shown on each side of # error location when printing source code snippet. @@ -40,8 +39,12 @@ MINIMUM_WINDOWS_MAJOR_VT100 = 10 MINIMUM_WINDOWS_BUILD_VT100 = 10586 -default_python2_interpreter = \ - ['python2', 'python', '/usr/bin/python', 'C:\\Python27\\python.exe'] # type: Final +default_python2_interpreter: Final = [ + "python2", + "python", + "/usr/bin/python", + "C:\\Python27\\python.exe", +] def split_module_names(mod_name: str) -> List[str]: @@ -65,7 +68,7 @@ def module_prefix(modules: Iterable[str], target: str) -> Optional[str]: def split_target(modules: Iterable[str], target: str) -> Optional[Tuple[str, str]]: - remaining = [] # type: List[str] + remaining: List[str] = [] while True: if target in modules: return target, '.'.join(remaining) @@ -193,7 +196,7 @@ def get_mypy_comments(source: str) -> List[Tuple[int, str]]: return results -_python2_interpreter = None # type: Optional[str] +_python2_interpreter: Optional[str] = None def try_find_python2_interpreter() -> Optional[str]: @@ -214,28 +217,28 @@ def try_find_python2_interpreter() -> Optional[str]: return None -PASS_TEMPLATE = """ +PASS_TEMPLATE: Final = """ -""" # type: Final +""" -FAIL_TEMPLATE = """ +FAIL_TEMPLATE: Final = """ {text} -""" # type: Final +""" -ERROR_TEMPLATE = """ +ERROR_TEMPLATE: Final = """ {text} -""" # type: Final +""" def write_junit_xml(dt: float, serious: bool, messages: List[str], path: str, @@ -269,7 +272,7 @@ class IdMapper: """ def __init__(self) -> None: - self.id_map = {} # type: Dict[object, int] + self.id_map: Dict[object, int] = {} self.next_id = 0 def id(self, o: object) -> int: @@ -311,7 +314,7 @@ def correct_relative_import(cur_mod_id: str, return cur_mod_id + (("." + target) if target else ""), ok -fields_cache = {} # type: Final[Dict[Type[object], List[str]]] +fields_cache: Final[Dict[Type[object], List[str]]] = {} def get_class_descriptors(cls: 'Type[object]') -> Sequence[str]: @@ -408,7 +411,7 @@ def count_stats(errors: List[str]) -> Tuple[int, int]: def split_words(msg: str) -> List[str]: """Split line of text into words (but not within quoted groups).""" next_word = '' - res = [] # type: List[str] + res: List[str] = [] allow_break = True for c in msg: if c == ' ' and allow_break: @@ -451,7 +454,7 @@ def soft_wrap(msg: str, max_len: int, first_offset: int, """ words = split_words(msg) next_line = words.pop(0) - lines = [] # type: List[str] + lines: List[str] = [] while words: next_word = words.pop(0) max_line_len = max_len - num_indent if lines else max_len - first_offset diff --git a/mypyc/analysis/dataflow.py b/mypyc/analysis/dataflow.py index 431976fae21f..a44ad029d1f0 100644 --- a/mypyc/analysis/dataflow.py +++ b/mypyc/analysis/dataflow.py @@ -46,7 +46,7 @@ def get_cfg(blocks: List[BasicBlock]) -> CFG: basic block index -> (successors blocks, predecesssor blocks) """ succ_map = {} - pred_map = {} # type: Dict[BasicBlock, List[BasicBlock]] + pred_map: Dict[BasicBlock, List[BasicBlock]] = {} exits = set() for block in blocks: @@ -480,8 +480,8 @@ def run_analysis(blocks: List[BasicBlock], # Calculate kill and gen sets for entire basic blocks. for block in blocks: - gen = set() # type: Set[T] - kill = set() # type: Set[T] + gen: Set[T] = set() + kill: Set[T] = set() ops = block.ops if backward: ops = list(reversed(ops)) @@ -497,8 +497,8 @@ def run_analysis(blocks: List[BasicBlock], if not backward: worklist = worklist[::-1] # Reverse for a small performance improvement workset = set(worklist) - before = {} # type: Dict[BasicBlock, Set[T]] - after = {} # type: Dict[BasicBlock, Set[T]] + before: Dict[BasicBlock, Set[T]] = {} + after: Dict[BasicBlock, Set[T]] = {} for block in blocks: if kind == MAYBE_ANALYSIS: before[block] = set() @@ -520,7 +520,7 @@ def run_analysis(blocks: List[BasicBlock], label = worklist.pop() workset.remove(label) if pred_map[label]: - new_before = None # type: Union[Set[T], None] + new_before: Union[Set[T], None] = None for pred in pred_map[label]: if new_before is None: new_before = set(after[pred]) @@ -541,12 +541,12 @@ def run_analysis(blocks: List[BasicBlock], after[label] = new_after # Run algorithm for each basic block to generate opcode-level sets. - op_before = {} # type: Dict[Tuple[BasicBlock, int], Set[T]] - op_after = {} # type: Dict[Tuple[BasicBlock, int], Set[T]] + op_before: Dict[Tuple[BasicBlock, int], Set[T]] = {} + op_after: Dict[Tuple[BasicBlock, int], Set[T]] = {} for block in blocks: label = block cur = before[label] - ops_enum = enumerate(block.ops) # type: Iterator[Tuple[int, Op]] + ops_enum: Iterator[Tuple[int, Op]] = enumerate(block.ops) if backward: ops_enum = reversed(list(ops_enum)) for idx, op in ops_enum: diff --git a/mypyc/build.py b/mypyc/build.py index 088e481fc241..285ecc66b572 100644 --- a/mypyc/build.py +++ b/mypyc/build.py @@ -296,7 +296,7 @@ def write_file(path: str, contents: str) -> None: encoded_contents = contents.encode('utf-8') try: with open(path, 'rb') as f: - old_contents = f.read() # type: Optional[bytes] + old_contents: Optional[bytes] = f.read() except IOError: old_contents = None if old_contents != encoded_contents: @@ -328,9 +328,7 @@ def construct_groups( """ if separate is True: - groups = [ - ([source], None) for source in sources - ] # type: emitmodule.Groups + groups: emitmodule.Groups = [([source], None) for source in sources] elif isinstance(separate, list): groups = [] used_sources = set() @@ -362,7 +360,7 @@ def get_header_deps(cfiles: List[Tuple[str, str]]) -> List[str]: Arguments: cfiles: A list of (file name, file contents) pairs. """ - headers = set() # type: Set[str] + headers: Set[str] = set() for _, contents in cfiles: headers.update(re.findall(r'#include "(.*)"', contents)) @@ -406,7 +404,7 @@ def mypyc_build( # Write out the generated C and collect the files for each group # Should this be here?? - group_cfilenames = [] # type: List[Tuple[List[str], List[str]]] + group_cfilenames: List[Tuple[List[str], List[str]]] = [] for cfiles in group_cfiles: cfilenames = [] for cfile, ctext in cfiles: @@ -498,12 +496,12 @@ def mypycify( # Create a compiler object so we can make decisions based on what # compiler is being used. typeshed is missing some attribues on the # compiler object so we give it type Any - compiler = ccompiler.new_compiler() # type: Any + compiler: Any = ccompiler.new_compiler() sysconfig.customize_compiler(compiler) build_dir = compiler_options.target_dir - cflags = [] # type: List[str] + cflags: List[str] = [] if compiler.compiler_type == 'unix': cflags += [ '-O{}'.format(opt_level), '-Werror', '-Wno-unused-function', '-Wno-unused-label', diff --git a/mypyc/codegen/cstring.py b/mypyc/codegen/cstring.py index 424a66e6f5ac..3626d2625e84 100644 --- a/mypyc/codegen/cstring.py +++ b/mypyc/codegen/cstring.py @@ -23,7 +23,7 @@ from typing_extensions import Final -CHAR_MAP = ['\\{:03o}'.format(i) for i in range(256)] # type: Final +CHAR_MAP: Final = ["\\{:03o}".format(i) for i in range(256)] # It is safe to use string.printable as it always uses the C locale. for c in string.printable: diff --git a/mypyc/codegen/emit.py b/mypyc/codegen/emit.py index 76dc641edd6c..23e87e2be55a 100644 --- a/mypyc/codegen/emit.py +++ b/mypyc/codegen/emit.py @@ -75,7 +75,7 @@ def __init__(self, self.group_name = group_name self.group_map = group_map or {} # Groups that this group depends on - self.group_deps = set() # type: Set[str] + self.group_deps: Set[str] = set() # The map below is used for generating declarations and # definitions at the top of the C file. The main idea is that they can @@ -84,7 +84,7 @@ def __init__(self, # A map of a C identifier to whatever the C identifier declares. Currently this is # used for declaring structs and the key corresponds to the name of the struct. # The declaration contains the body of the struct. - self.declarations = OrderedDict() # type: Dict[str, HeaderDeclaration] + self.declarations: Dict[str, HeaderDeclaration] = OrderedDict() self.literals = Literals() @@ -123,7 +123,7 @@ def __init__(self, self.capi_version = capi_version or sys.version_info[:2] self.names = context.names self.value_names = value_names or {} - self.fragments = [] # type: List[str] + self.fragments: List[str] = [] self._indent = 0 # Low-level operations diff --git a/mypyc/codegen/emitclass.py b/mypyc/codegen/emitclass.py index 2cc8d3ca58a3..755f5c0b3e8e 100644 --- a/mypyc/codegen/emitclass.py +++ b/mypyc/codegen/emitclass.py @@ -34,7 +34,7 @@ def wrapper_slot(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: SlotGenerator = Callable[[ClassIR, FuncIR, Emitter], str] SlotTable = Mapping[str, Tuple[str, SlotGenerator]] -SLOT_DEFS = { +SLOT_DEFS: SlotTable = { '__init__': ('tp_init', lambda c, t, e: generate_init_for_class(c, t, e)), '__call__': ('tp_call', lambda c, t, e: generate_call_wrapper(c, t, e)), '__str__': ('tp_str', native_slot), @@ -43,20 +43,20 @@ def wrapper_slot(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: '__iter__': ('tp_iter', native_slot), '__hash__': ('tp_hash', generate_hash_wrapper), '__get__': ('tp_descr_get', generate_get_wrapper), -} # type: SlotTable +} -AS_MAPPING_SLOT_DEFS = { +AS_MAPPING_SLOT_DEFS: SlotTable = { '__getitem__': ('mp_subscript', generate_dunder_wrapper), '__setitem__': ('mp_ass_subscript', generate_set_del_item_wrapper), '__delitem__': ('mp_ass_subscript', generate_set_del_item_wrapper), '__len__': ('mp_length', generate_len_wrapper), -} # type: SlotTable +} -AS_SEQUENCE_SLOT_DEFS = { +AS_SEQUENCE_SLOT_DEFS: SlotTable = { '__contains__': ('sq_contains', generate_contains_wrapper), -} # type: SlotTable +} -AS_NUMBER_SLOT_DEFS = { +AS_NUMBER_SLOT_DEFS: SlotTable = { '__bool__': ('nb_bool', generate_bool_wrapper), '__neg__': ('nb_negative', generate_dunder_wrapper), '__invert__': ('nb_invert', generate_dunder_wrapper), @@ -98,13 +98,13 @@ def wrapper_slot(cl: ClassIR, fn: FuncIR, emitter: Emitter) -> str: '__ior__': ('nb_inplace_or', generate_dunder_wrapper), '__ixor__': ('nb_inplace_xor', generate_dunder_wrapper), '__imatmul__': ('nb_inplace_matrix_multiply', generate_dunder_wrapper), -} # type: SlotTable +} -AS_ASYNC_SLOT_DEFS = { +AS_ASYNC_SLOT_DEFS: SlotTable = { '__await__': ('am_await', native_slot), '__aiter__': ('am_aiter', native_slot), '__anext__': ('am_anext', native_slot), -} # type: SlotTable +} SIDE_TABLES = [ ('as_mapping', 'PyMappingMethods', AS_MAPPING_SLOT_DEFS), @@ -140,8 +140,8 @@ def slot_key(attr: str) -> str: def generate_slots(cl: ClassIR, table: SlotTable, emitter: Emitter) -> Dict[str, str]: - fields = OrderedDict() # type: Dict[str, str] - generated = {} # type: Dict[str, str] + fields: Dict[str, str] = OrderedDict() + generated: Dict[str, str] = {} # Sort for determinism on Python 3.5 for name, (slot, generator) in sorted(table.items(), key=lambda x: slot_key(x[0])): method_cls = cl.get_method_and_class(name) @@ -199,7 +199,7 @@ def generate_class(cl: ClassIR, module: str, emitter: Emitter) -> None: methods_name = '{}_methods'.format(name_prefix) vtable_setup_name = '{}_trait_vtable_setup'.format(name_prefix) - fields = OrderedDict() # type: Dict[str, str] + fields: Dict[str, str] = OrderedDict() fields['tp_name'] = '"{}"'.format(name) generate_full = not cl.is_trait and not cl.builtin_base @@ -294,9 +294,9 @@ def emit_line() -> None: emit_line() if cl.allow_interpreted_subclasses: - shadow_vtable_name = generate_vtables( + shadow_vtable_name: Optional[str] = generate_vtables( cl, vtable_setup_name + "_shadow", vtable_name + "_shadow", emitter, shadow=True - ) # type: Optional[str] + ) emit_line() else: shadow_vtable_name = None @@ -352,8 +352,8 @@ def setter_name(cl: ClassIR, attribute: str, names: NameGenerator) -> str: def generate_object_struct(cl: ClassIR, emitter: Emitter) -> None: - seen_attrs = set() # type: Set[Tuple[str, RType]] - lines = [] # type: List[str] + seen_attrs: Set[Tuple[str, RType]] = set() + lines: List[str] = [] lines += ['typedef struct {', 'PyObject_HEAD', 'CPyVTableItem *vtable;'] diff --git a/mypyc/codegen/emitfunc.py b/mypyc/codegen/emitfunc.py index dbdc445f5627..15f17a3b0754 100644 --- a/mypyc/codegen/emitfunc.py +++ b/mypyc/codegen/emitfunc.py @@ -105,7 +105,7 @@ def __init__(self, self.source_path = source_path self.module_name = module_name self.literals = emitter.context.literals - self.next_block = None # type: Optional[BasicBlock] + self.next_block: Optional[BasicBlock] = None def temp_name(self) -> str: return self.emitter.temp_name() @@ -327,11 +327,11 @@ def visit_set_attr(self, op: SetAttr) -> None: '{} = 1;'.format(dest), ) - PREFIX_MAP = { + PREFIX_MAP: Final = { NAMESPACE_STATIC: STATIC_PREFIX, NAMESPACE_TYPE: TYPE_PREFIX, NAMESPACE_MODULE: MODULE_PREFIX, - } # type: Final + } def visit_load_static(self, op: LoadStatic) -> None: dest = self.reg(op) diff --git a/mypyc/codegen/emitmodule.py b/mypyc/codegen/emitmodule.py index 11662cb8666a..c42b084062b6 100644 --- a/mypyc/codegen/emitmodule.py +++ b/mypyc/codegen/emitmodule.py @@ -97,7 +97,7 @@ class MypycPlugin(Plugin): def __init__( self, options: Options, compiler_options: CompilerOptions, groups: Groups) -> None: super().__init__(options) - self.group_map = {} # type: Dict[str, Tuple[Optional[str], List[str]]] + self.group_map: Dict[str, Tuple[Optional[str], List[str]]] = {} for sources, name in groups: modules = sorted(source.module for source in sources) for id in modules: @@ -280,7 +280,7 @@ def compile_ir_to_c( # Generate C code for each compilation group. Each group will be # compiled into a separate extension module. - ctext = {} # type: Dict[Optional[str], List[Tuple[str, str]]] + ctext: Dict[Optional[str], List[Tuple[str, str]]] = {} for group_sources, group_name in groups: group_modules = [(source.module, modules[source.module]) for source in group_sources if source.module in modules] @@ -478,7 +478,7 @@ def __init__(self, self.names = names # Initializations of globals to simple values that we can't # do statically because the windows loader is bad. - self.simple_inits = [] # type: List[Tuple[str, str]] + self.simple_inits: List[Tuple[str, str]] = [] self.group_name = group_name self.use_shared_lib = group_name is not None self.compiler_options = compiler_options @@ -904,7 +904,7 @@ def generate_module_def(self, emitter: Emitter, module_name: str, module: Module ' goto fail;') # HACK: Manually instantiate generated classes here - type_structs = [] # type: List[str] + type_structs: List[str] = [] for cl in module.classes: type_struct = emitter.type_struct_name(cl) type_structs.append(type_struct) @@ -961,7 +961,7 @@ def toposort_declarations(self) -> List[HeaderDeclaration]: This runs in O(V + E). """ result = [] - marked_declarations = OrderedDict() # type: Dict[str, MarkedDeclaration] + marked_declarations: Dict[str, MarkedDeclaration] = OrderedDict() for k, v in self.context.declarations.items(): marked_declarations[k] = MarkedDeclaration(v, False) @@ -1050,7 +1050,7 @@ def declare_static_pyobject(self, identifier: str, emitter: Emitter) -> None: def sort_classes(classes: List[Tuple[str, ClassIR]]) -> List[Tuple[str, ClassIR]]: mod_name = {ir: name for name, ir in classes} irs = [ir for _, ir in classes] - deps = OrderedDict() # type: Dict[ClassIR, Set[ClassIR]] + deps: Dict[ClassIR, Set[ClassIR]] = OrderedDict() for ir in irs: if ir not in deps: deps[ir] = set() @@ -1070,7 +1070,7 @@ def toposort(deps: Dict[T, Set[T]]) -> List[T]: This runs in O(V + E). """ result = [] - visited = set() # type: Set[T] + visited: Set[T] = set() def visit(item: T) -> None: if item in visited: @@ -1123,7 +1123,7 @@ def c_array_initializer(components: List[str]) -> str: If the result is long, split it into multiple lines. """ res = [] - current = [] # type: List[str] + current: List[str] = [] cur_len = 0 for c in components: if not current or cur_len + 2 + len(c) < 70: diff --git a/mypyc/codegen/emitwrapper.py b/mypyc/codegen/emitwrapper.py index 32284cc3f6ed..1a4811fb2bff 100644 --- a/mypyc/codegen/emitwrapper.py +++ b/mypyc/codegen/emitwrapper.py @@ -155,7 +155,7 @@ def generate_wrapper_function(fn: FuncIR, cleanups = ['CPy_DECREF(obj_{});'.format(arg.name) for arg in groups[ARG_STAR] + groups[ARG_STAR2]] - arg_ptrs = [] # type: List[str] + arg_ptrs: List[str] = [] if groups[ARG_STAR] or groups[ARG_STAR2]: arg_ptrs += ['&obj_{}'.format(groups[ARG_STAR][0].name) if groups[ARG_STAR] else 'NULL'] arg_ptrs += ['&obj_{}'.format(groups[ARG_STAR2][0].name) if groups[ARG_STAR2] else 'NULL'] @@ -234,7 +234,7 @@ def generate_legacy_wrapper_function(fn: FuncIR, cleanups = ['CPy_DECREF(obj_{});'.format(arg.name) for arg in groups[ARG_STAR] + groups[ARG_STAR2]] - arg_ptrs = [] # type: List[str] + arg_ptrs: List[str] = [] if groups[ARG_STAR] or groups[ARG_STAR2]: arg_ptrs += ['&obj_{}'.format(groups[ARG_STAR][0].name) if groups[ARG_STAR] else 'NULL'] arg_ptrs += ['&obj_{}'.format(groups[ARG_STAR2][0].name) if groups[ARG_STAR2] else 'NULL'] @@ -742,8 +742,8 @@ class WrapperGenerator: def __init__(self, cl: ClassIR, emitter: Emitter) -> None: self.cl = cl self.emitter = emitter - self.cleanups = [] # type: List[str] - self.optional_args = [] # type: List[RuntimeArg] + self.cleanups: List[str] = [] + self.optional_args: List[RuntimeArg] = [] self.traceback_code = '' def set_target(self, fn: FuncIR) -> None: diff --git a/mypyc/codegen/literals.py b/mypyc/codegen/literals.py index f890c06a0713..2bbc5e6f585c 100644 --- a/mypyc/codegen/literals.py +++ b/mypyc/codegen/literals.py @@ -9,7 +9,7 @@ # Some literals are singletons and handled specially (None, False and True) -NUM_SINGLETONS = 3 # type: Final +NUM_SINGLETONS: Final = 3 class Literals: @@ -17,12 +17,12 @@ class Literals: def __init__(self) -> None: # Each dict maps value to literal index (0, 1, ...) - self.str_literals = {} # type: Dict[str, int] - self.bytes_literals = {} # type: Dict[bytes, int] - self.int_literals = {} # type: Dict[int, int] - self.float_literals = {} # type: Dict[float, int] - self.complex_literals = {} # type: Dict[complex, int] - self.tuple_literals = {} # type: Dict[Tuple[object, ...], int] + self.str_literals: Dict[str, int] = {} + self.bytes_literals: Dict[bytes, int] = {} + self.int_literals: Dict[int, int] = {} + self.float_literals: Dict[float, int] = {} + self.complex_literals: Dict[complex, int] = {} + self.tuple_literals: Dict[Tuple[object, ...], int] = {} def record_literal(self, value: LiteralValue) -> None: """Ensure that the literal value is available in generated code.""" @@ -146,7 +146,7 @@ def _encode_str_values(values: Dict[str, int]) -> List[bytes]: for value, index in values.items(): value_by_index[index] = value result = [] - line = [] # type: List[bytes] + line: List[bytes] = [] line_len = 0 for i in range(len(values)): value = value_by_index[i] @@ -169,7 +169,7 @@ def _encode_bytes_values(values: Dict[bytes, int]) -> List[bytes]: for value, index in values.items(): value_by_index[index] = value result = [] - line = [] # type: List[bytes] + line: List[bytes] = [] line_len = 0 for i in range(len(values)): value = value_by_index[i] @@ -216,7 +216,7 @@ def _encode_int_values(values: Dict[int, int]) -> List[bytes]: for value, index in values.items(): value_by_index[index] = value result = [] - line = [] # type: List[bytes] + line: List[bytes] = [] line_len = 0 for i in range(len(values)): value = value_by_index[i] diff --git a/mypyc/common.py b/mypyc/common.py index f0adfe019669..428a2db63781 100644 --- a/mypyc/common.py +++ b/mypyc/common.py @@ -4,31 +4,31 @@ from typing_extensions import Final -PREFIX = 'CPyPy_' # type: Final # Python wrappers -NATIVE_PREFIX = 'CPyDef_' # type: Final # Native functions etc. -DUNDER_PREFIX = 'CPyDunder_' # type: Final # Wrappers for exposing dunder methods to the API -REG_PREFIX = 'cpy_r_' # type: Final # Registers -STATIC_PREFIX = 'CPyStatic_' # type: Final # Static variables (for literals etc.) -TYPE_PREFIX = 'CPyType_' # type: Final # Type object struct -MODULE_PREFIX = 'CPyModule_' # type: Final # Cached modules -ATTR_PREFIX = '_' # type: Final # Attributes - -ENV_ATTR_NAME = '__mypyc_env__' # type: Final -NEXT_LABEL_ATTR_NAME = '__mypyc_next_label__' # type: Final -TEMP_ATTR_NAME = '__mypyc_temp__' # type: Final -LAMBDA_NAME = '__mypyc_lambda__' # type: Final -PROPSET_PREFIX = '__mypyc_setter__' # type: Final -SELF_NAME = '__mypyc_self__' # type: Final +PREFIX: Final = "CPyPy_" # Python wrappers +NATIVE_PREFIX: Final = "CPyDef_" # Native functions etc. +DUNDER_PREFIX: Final = "CPyDunder_" # Wrappers for exposing dunder methods to the API +REG_PREFIX: Final = "cpy_r_" # Registers +STATIC_PREFIX: Final = "CPyStatic_" # Static variables (for literals etc.) +TYPE_PREFIX: Final = "CPyType_" # Type object struct +MODULE_PREFIX: Final = "CPyModule_" # Cached modules +ATTR_PREFIX: Final = "_" # Attributes + +ENV_ATTR_NAME: Final = "__mypyc_env__" +NEXT_LABEL_ATTR_NAME: Final = "__mypyc_next_label__" +TEMP_ATTR_NAME: Final = "__mypyc_temp__" +LAMBDA_NAME: Final = "__mypyc_lambda__" +PROPSET_PREFIX: Final = "__mypyc_setter__" +SELF_NAME: Final = "__mypyc_self__" # Max short int we accept as a literal is based on 32-bit platforms, # so that we can just always emit the same code. -TOP_LEVEL_NAME = '__top_level__' # type: Final # Special function representing module top level +TOP_LEVEL_NAME: Final = "__top_level__" # Special function representing module top level # Maximal number of subclasses for a class to trigger fast path in isinstance() checks. -FAST_ISINSTANCE_MAX_SUBCLASSES = 2 # type: Final +FAST_ISINSTANCE_MAX_SUBCLASSES: Final = 2 -IS_32_BIT_PLATFORM = sys.maxsize < (1 << 31) # type: Final +IS_32_BIT_PLATFORM: Final = sys.maxsize < (1 << 31) PLATFORM_SIZE = 4 if IS_32_BIT_PLATFORM else 8 @@ -37,20 +37,19 @@ # wheels (for an unknown reason). # # Note that we use "in ['darwin']" because of https://github.com/mypyc/mypyc/issues/761. -IS_MIXED_32_64_BIT_BUILD = sys.platform in ['darwin'] and sys.version_info < (3, 6) # type: Final +IS_MIXED_32_64_BIT_BUILD: Final = sys.platform in ["darwin"] and sys.version_info < (3, 6) # Maximum value for a short tagged integer. -MAX_SHORT_INT = sys.maxsize >> 1 # type: Final +MAX_SHORT_INT: Final = sys.maxsize >> 1 # Maximum value for a short tagged integer represented as a C integer literal. # # Note: Assume that the compiled code uses the same bit width as mypyc, except for # Python 3.5 on macOS. -MAX_LITERAL_SHORT_INT = (sys.maxsize >> 1 if not IS_MIXED_32_64_BIT_BUILD - else 2**30 - 1) # type: Final +MAX_LITERAL_SHORT_INT: Final = sys.maxsize >> 1 if not IS_MIXED_32_64_BIT_BUILD else 2 ** 30 - 1 # Runtime C library files -RUNTIME_C_FILES = [ +RUNTIME_C_FILES: Final = [ 'init.c', 'getargs.c', 'getargsfast.c', @@ -63,7 +62,7 @@ 'exc_ops.c', 'misc_ops.c', 'generic_ops.c', -] # type: Final +] JsonDict = Dict[str, Any] diff --git a/mypyc/ir/class_ir.py b/mypyc/ir/class_ir.py index ec8bc09d7aef..506e32402e69 100644 --- a/mypyc/ir/class_ir.py +++ b/mypyc/ir/class_ir.py @@ -105,49 +105,49 @@ def __init__(self, name: str, module_name: str, is_trait: bool = False, # If this a subclass of some built-in python class, the name # of the object for that class. We currently only support this # in a few ad-hoc cases. - self.builtin_base = None # type: Optional[str] + self.builtin_base: Optional[str] = None # Default empty constructor self.ctor = FuncDecl(name, None, module_name, FuncSignature([], RInstance(self))) - self.attributes = OrderedDict() # type: OrderedDict[str, RType] + self.attributes: OrderedDict[str, RType] = OrderedDict() # Deletable attributes - self.deletable = [] # type: List[str] + self.deletable: List[str] = [] # We populate method_types with the signatures of every method before # we generate methods, and we rely on this information being present. - self.method_decls = OrderedDict() # type: OrderedDict[str, FuncDecl] + self.method_decls: OrderedDict[str, FuncDecl] = OrderedDict() # Map of methods that are actually present in an extension class - self.methods = OrderedDict() # type: OrderedDict[str, FuncIR] + self.methods: OrderedDict[str, FuncIR] = OrderedDict() # Glue methods for boxing/unboxing when a class changes the type # while overriding a method. Maps from (parent class overrided, method) # to IR of glue method. - self.glue_methods = OrderedDict() # type: Dict[Tuple[ClassIR, str], FuncIR] + self.glue_methods: Dict[Tuple[ClassIR, str], FuncIR] = OrderedDict() # Properties are accessed like attributes, but have behavior like method calls. # They don't belong in the methods dictionary, since we don't want to expose them to # Python's method API. But we want to put them into our own vtable as methods, so that # they are properly handled and overridden. The property dictionary values are a tuple # containing a property getter and an optional property setter. - self.properties = OrderedDict() # type: OrderedDict[str, Tuple[FuncIR, Optional[FuncIR]]] + self.properties: OrderedDict[str, Tuple[FuncIR, Optional[FuncIR]]] = OrderedDict() # We generate these in prepare_class_def so that we have access to them when generating # other methods and properties that rely on these types. - self.property_types = OrderedDict() # type: OrderedDict[str, RType] + self.property_types: OrderedDict[str, RType] = OrderedDict() - self.vtable = None # type: Optional[Dict[str, int]] - self.vtable_entries = [] # type: VTableEntries - self.trait_vtables = OrderedDict() # type: OrderedDict[ClassIR, VTableEntries] + self.vtable: Optional[Dict[str, int]] = None + self.vtable_entries: VTableEntries = [] + self.trait_vtables: OrderedDict[ClassIR, VTableEntries] = OrderedDict() # N.B: base might not actually quite be the direct base. # It is the nearest concrete base, but we allow a trait in between. - self.base = None # type: Optional[ClassIR] - self.traits = [] # type: List[ClassIR] + self.base: Optional[ClassIR] = None + self.traits: List[ClassIR] = [] # Supply a working mro for most generated classes. Real classes will need to # fix it up. - self.mro = [self] # type: List[ClassIR] + self.mro: List[ClassIR] = [self] # base_mro is the chain of concrete (non-trait) ancestors - self.base_mro = [self] # type: List[ClassIR] + self.base_mro: List[ClassIR] = [self] # Direct subclasses of this class (use subclasses() to also incude non-direct ones) # None if separate compilation prevents this from working - self.children = [] # type: Optional[List[ClassIR]] + self.children: Optional[List[ClassIR]] = [] @property def fullname(self) -> str: diff --git a/mypyc/ir/func_ir.py b/mypyc/ir/func_ir.py index 8b4731f88d34..8eddba8e28b0 100644 --- a/mypyc/ir/func_ir.py +++ b/mypyc/ir/func_ir.py @@ -66,9 +66,9 @@ def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> 'FuncSignature': ) -FUNC_NORMAL = 0 # type: Final -FUNC_STATICMETHOD = 1 # type: Final -FUNC_CLASSMETHOD = 2 # type: Final +FUNC_NORMAL: Final = 0 +FUNC_STATICMETHOD: Final = 1 +FUNC_CLASSMETHOD: Final = 2 class FuncDecl: @@ -94,7 +94,7 @@ def __init__(self, self.is_prop_setter = is_prop_setter self.is_prop_getter = is_prop_getter if class_name is None: - self.bound_sig = None # type: Optional[FuncSignature] + self.bound_sig: Optional[FuncSignature] = None else: if kind == FUNC_STATICMETHOD: self.bound_sig = sig @@ -220,7 +220,7 @@ def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> 'FuncIR': ) -INVALID_FUNC_DEF = FuncDef('', [], Block([])) # type: Final +INVALID_FUNC_DEF: Final = FuncDef("", [], Block([])) def all_values(args: List[Register], blocks: List[BasicBlock]) -> List[Value]: @@ -228,7 +228,7 @@ def all_values(args: List[Register], blocks: List[BasicBlock]) -> List[Value]: This omits registers that are only read. """ - values = list(args) # type: List[Value] + values: List[Value] = list(args) seen_registers = set(args) for block in blocks: @@ -254,7 +254,7 @@ def all_values(args: List[Register], blocks: List[BasicBlock]) -> List[Value]: def all_values_full(args: List[Register], blocks: List[BasicBlock]) -> List[Value]: """Return set of all values that are initialized or accessed.""" - values = list(args) # type: List[Value] + values: List[Value] = list(args) seen_registers = set(args) for block in blocks: diff --git a/mypyc/ir/ops.py b/mypyc/ir/ops.py index 33e6875aa939..d7dea48cdeb4 100644 --- a/mypyc/ir/ops.py +++ b/mypyc/ir/ops.py @@ -63,8 +63,8 @@ class BasicBlock: def __init__(self, label: int = -1) -> None: self.label = label - self.ops = [] # type: List[Op] - self.error_handler = None # type: Optional[BasicBlock] + self.ops: List[Op] = [] + self.error_handler: Optional[BasicBlock] = None @property def terminated(self) -> bool: @@ -77,13 +77,13 @@ def terminated(self) -> bool: # Never generates an exception -ERR_NEVER = 0 # type: Final +ERR_NEVER: Final = 0 # Generates magic value (c_error_value) based on target RType on exception -ERR_MAGIC = 1 # type: Final +ERR_MAGIC: Final = 1 # Generates false (bool) on exception -ERR_FALSE = 2 # type: Final +ERR_FALSE: Final = 2 # Always fails -ERR_ALWAYS = 3 # type: Final +ERR_ALWAYS: Final = 3 # Hack: using this line number for an op will suppress it in tracebacks NO_TRACEBACK_LINE_NO = -10000 @@ -108,7 +108,7 @@ class Value: # Source line number (-1 for no/unknown line) line = -1 # Type of the value or the result of the operation - type = void_rtype # type: RType + type: RType = void_rtype is_borrowed = False @property @@ -197,7 +197,7 @@ def stolen(self) -> List[Value]: return [] def unique_sources(self) -> List[Value]: - result = [] # type: List[Value] + result: List[Value] = [] for reg in self.sources(): if reg not in result: result.append(reg) @@ -300,8 +300,8 @@ class Branch(ControlOp): # Branch ops never raise an exception. error_kind = ERR_NEVER - BOOL = 100 # type: Final - IS_ERROR = 101 # type: Final + BOOL: Final = 100 + IS_ERROR: Final = 101 def __init__(self, value: Value, @@ -323,7 +323,7 @@ def __init__(self, # If True, the condition is negated self.negated = False # If not None, the true label should generate a traceback entry (func name, line number) - self.traceback_entry = None # type: Optional[Tuple[str, int]] + self.traceback_entry: Optional[Tuple[str, int]] = None # If True, the condition is expected to be usually False (for optimization purposes) self.rare = rare @@ -399,7 +399,7 @@ class RegisterOp(Op): error_kind = -1 # Can this raise exception and how is it signalled; one of ERR_* - _type = None # type: Optional[RType] + _type: Optional[RType] = None def __init__(self, line: int) -> None: super().__init__(line) @@ -609,13 +609,13 @@ def accept(self, visitor: 'OpVisitor[T]') -> T: # Default name space for statics, variables -NAMESPACE_STATIC = 'static' # type: Final +NAMESPACE_STATIC: Final = "static" # Static namespace for pointers to native type objects -NAMESPACE_TYPE = 'type' # type: Final +NAMESPACE_TYPE: Final = "type" # Namespace for modules -NAMESPACE_MODULE = 'module' # type: Final +NAMESPACE_MODULE: Final = "module" class LoadStatic(RegisterOp): @@ -809,12 +809,12 @@ class RaiseStandardError(RegisterOp): error_kind = ERR_FALSE - VALUE_ERROR = 'ValueError' # type: Final - ASSERTION_ERROR = 'AssertionError' # type: Final - STOP_ITERATION = 'StopIteration' # type: Final - UNBOUND_LOCAL_ERROR = 'UnboundLocalError' # type: Final - RUNTIME_ERROR = 'RuntimeError' # type: Final - NAME_ERROR = 'NameError' # type: Final + VALUE_ERROR: Final = "ValueError" + ASSERTION_ERROR: Final = "AssertionError" + STOP_ITERATION: Final = "StopIteration" + UNBOUND_LOCAL_ERROR: Final = "UnboundLocalError" + RUNTIME_ERROR: Final = "RuntimeError" + NAME_ERROR: Final = "NameError" def __init__(self, class_name: str, value: Optional[Union[str, Value]], line: int) -> None: super().__init__(line) @@ -950,20 +950,20 @@ class IntOp(RegisterOp): error_kind = ERR_NEVER # Arithmetic ops - ADD = 0 # type: Final - SUB = 1 # type: Final - MUL = 2 # type: Final - DIV = 3 # type: Final - MOD = 4 # type: Final + ADD: Final = 0 + SUB: Final = 1 + MUL: Final = 2 + DIV: Final = 3 + MOD: Final = 4 # Bitwise ops - AND = 200 # type: Final - OR = 201 # type: Final - XOR = 202 # type: Final - LEFT_SHIFT = 203 # type: Final - RIGHT_SHIFT = 204 # type: Final + AND: Final = 200 + OR: Final = 201 + XOR: Final = 202 + LEFT_SHIFT: Final = 203 + RIGHT_SHIFT: Final = 204 - op_str = { + op_str: Final = { ADD: '+', SUB: '-', MUL: '*', @@ -974,7 +974,7 @@ class IntOp(RegisterOp): XOR: '^', LEFT_SHIFT: '<<', RIGHT_SHIFT: '>>', - } # type: Final + } def __init__(self, type: RType, lhs: Value, rhs: Value, op: int, line: int = -1) -> None: super().__init__(line) @@ -1007,18 +1007,18 @@ class ComparisonOp(RegisterOp): error_kind = ERR_NEVER # S for signed and U for unsigned - EQ = 100 # type: Final - NEQ = 101 # type: Final - SLT = 102 # type: Final - SGT = 103 # type: Final - SLE = 104 # type: Final - SGE = 105 # type: Final - ULT = 106 # type: Final - UGT = 107 # type: Final - ULE = 108 # type: Final - UGE = 109 # type: Final - - op_str = { + EQ: Final = 100 + NEQ: Final = 101 + SLT: Final = 102 + SGT: Final = 103 + SLE: Final = 104 + SGE: Final = 105 + ULT: Final = 106 + UGT: Final = 107 + ULE: Final = 108 + UGE: Final = 109 + + op_str: Final = { EQ: '==', NEQ: '!=', SLT: '<', @@ -1029,7 +1029,7 @@ class ComparisonOp(RegisterOp): UGT: '>', ULE: '<=', UGE: '>=', - } # type: Final + } def __init__(self, lhs: Value, rhs: Value, op: int, line: int = -1) -> None: super().__init__(line) diff --git a/mypyc/ir/pprint.py b/mypyc/ir/pprint.py index ab4155007657..af21328a33e0 100644 --- a/mypyc/ir/pprint.py +++ b/mypyc/ir/pprint.py @@ -29,10 +29,10 @@ def __init__(self, names: Dict[Value, str]) -> None: def visit_goto(self, op: Goto) -> str: return self.format('goto %l', op.label) - branch_op_names = { + branch_op_names: Final = { Branch.BOOL: ('%r', 'bool'), Branch.IS_ERROR: ('is_error(%r)', ''), - } # type: Final + } def visit_branch(self, op: Branch) -> str: fmt, typ = self.branch_op_names[op.op] @@ -275,7 +275,7 @@ def format_blocks(blocks: List[BasicBlock], for i, block in enumerate(blocks): block.label = i - handler_map = {} # type: Dict[BasicBlock, List[BasicBlock]] + handler_map: Dict[BasicBlock, List[BasicBlock]] = {} for b in blocks: if b.error_handler: handler_map.setdefault(b.error_handler, []).append(b) @@ -333,7 +333,7 @@ def generate_names_for_ir(args: List[Register], blocks: List[BasicBlock]) -> Dic Give names such as 'r5' to temp values in IR which are useful when pretty-printing or generating C. Ensure generated names are unique. """ - names = {} # type: Dict[Value, str] + names: Dict[Value, str] = {} used_names = set() temp_index = 0 diff --git a/mypyc/ir/rtypes.py b/mypyc/ir/rtypes.py index d0148090b1e3..f18a0040820a 100644 --- a/mypyc/ir/rtypes.py +++ b/mypyc/ir/rtypes.py @@ -38,18 +38,18 @@ class RType: """Abstract base class for runtime types (erased, only concrete; no generics).""" - name = None # type: str + name: str # If True, the type has a special unboxed representation. If False, the # type is represented as PyObject *. Even if True, the representation # may contain pointers. is_unboxed = False # This is the C undefined value for this type. It's used for initialization # if there's no value yet, and for function return value on error/exception. - c_undefined = None # type: str + c_undefined: str # If unboxed: does the unboxed version use reference counting? is_refcounted = True # C type; use Emitter.ctype() to access - _ctype = None # type: str + _ctype: str @abstractmethod def accept(self, visitor: 'RTypeVisitor[T]') -> T: @@ -151,7 +151,7 @@ def __hash__(self) -> int: # Singleton instance of RVoid -void_rtype = RVoid() # type: Final +void_rtype: Final = RVoid() class RPrimitive(RType): @@ -169,7 +169,7 @@ class RPrimitive(RType): """ # Map from primitive names to primitive types and is used by deserialization - primitive_map = {} # type: ClassVar[Dict[str, RPrimitive]] + primitive_map: ClassVar[Dict[str, "RPrimitive"]] = {} def __init__(self, name: str, @@ -232,12 +232,12 @@ def __hash__(self) -> int: # little as possible, as generic ops are typically slow. Other types, # including other primitive types and RInstance, are usually much # faster. -object_rprimitive = RPrimitive('builtins.object', is_unboxed=False, - is_refcounted=True) # type: Final +object_rprimitive: Final = RPrimitive("builtins.object", is_unboxed=False, is_refcounted=True) # represents a low level pointer of an object -object_pointer_rprimitive = RPrimitive('object_ptr', is_unboxed=False, - is_refcounted=False, ctype='PyObject **') # type: Final +object_pointer_rprimitive: Final = RPrimitive( + "object_ptr", is_unboxed=False, is_refcounted=False, ctype="PyObject **" +) # Arbitrary-precision integer (corresponds to Python 'int'). Small # enough values are stored unboxed, while large integers are @@ -251,25 +251,31 @@ def __hash__(self) -> int: # # This cannot represent a subclass of int. An instance of a subclass # of int is coerced to the corresponding 'int' value. -int_rprimitive = RPrimitive('builtins.int', is_unboxed=True, is_refcounted=True, - ctype='CPyTagged') # type: Final +int_rprimitive: Final = RPrimitive( + "builtins.int", is_unboxed=True, is_refcounted=True, ctype="CPyTagged" +) # An unboxed integer. The representation is the same as for unboxed # int_rprimitive (shifted left by one). These can be used when an # integer is known to be small enough to fit size_t (CPyTagged). -short_int_rprimitive = RPrimitive('short_int', is_unboxed=True, is_refcounted=False, - ctype='CPyTagged') # type: Final +short_int_rprimitive: Final = RPrimitive( + "short_int", is_unboxed=True, is_refcounted=False, ctype="CPyTagged" +) # Low level integer types (correspond to C integer types) -int32_rprimitive = RPrimitive('int32', is_unboxed=True, is_refcounted=False, - ctype='int32_t', size=4) # type: Final -int64_rprimitive = RPrimitive('int64', is_unboxed=True, is_refcounted=False, - ctype='int64_t', size=8) # type: Final -uint32_rprimitive = RPrimitive('uint32', is_unboxed=True, is_refcounted=False, - ctype='uint32_t', size=4) # type: Final -uint64_rprimitive = RPrimitive('uint64', is_unboxed=True, is_refcounted=False, - ctype='uint64_t', size=8) # type: Final +int32_rprimitive: Final = RPrimitive( + "int32", is_unboxed=True, is_refcounted=False, ctype="int32_t", size=4 +) +int64_rprimitive: Final = RPrimitive( + "int64", is_unboxed=True, is_refcounted=False, ctype="int64_t", size=8 +) +uint32_rprimitive: Final = RPrimitive( + "uint32", is_unboxed=True, is_refcounted=False, ctype="uint32_t", size=4 +) +uint64_rprimitive: Final = RPrimitive( + "uint64", is_unboxed=True, is_refcounted=False, ctype="uint64_t", size=8 +) # The C 'int' type c_int_rprimitive = int32_rprimitive @@ -284,51 +290,50 @@ def __hash__(self) -> int: ctype='int64_t', size=8) # Low level pointer, represented as integer in C backends -pointer_rprimitive = RPrimitive('ptr', is_unboxed=True, is_refcounted=False, - ctype='CPyPtr') # type: Final +pointer_rprimitive: Final = RPrimitive("ptr", is_unboxed=True, is_refcounted=False, ctype="CPyPtr") # Floats are represent as 'float' PyObject * values. (In the future # we'll likely switch to a more efficient, unboxed representation.) -float_rprimitive = RPrimitive('builtins.float', is_unboxed=False, - is_refcounted=True) # type: Final +float_rprimitive: Final = RPrimitive("builtins.float", is_unboxed=False, is_refcounted=True) # An unboxed Python bool value. This actually has three possible values # (0 -> False, 1 -> True, 2 -> error). If you only need True/False, use # bit_rprimitive instead. -bool_rprimitive = RPrimitive('builtins.bool', is_unboxed=True, is_refcounted=False, - ctype='char', size=1) # type: Final +bool_rprimitive: Final = RPrimitive( + "builtins.bool", is_unboxed=True, is_refcounted=False, ctype="char", size=1 +) # A low-level boolean value with two possible values: 0 and 1. Any # other value results in undefined behavior. Undefined or error values # are not supported. -bit_rprimitive = RPrimitive('bit', is_unboxed=True, is_refcounted=False, - ctype='char', size=1) # type: Final +bit_rprimitive: Final = RPrimitive( + "bit", is_unboxed=True, is_refcounted=False, ctype="char", size=1 +) # The 'None' value. The possible values are 0 -> None and 2 -> error. -none_rprimitive = RPrimitive('builtins.None', is_unboxed=True, is_refcounted=False, - ctype='char', size=1) # type: Final +none_rprimitive: Final = RPrimitive( + "builtins.None", is_unboxed=True, is_refcounted=False, ctype="char", size=1 +) # Python list object (or an instance of a subclass of list). -list_rprimitive = RPrimitive('builtins.list', is_unboxed=False, is_refcounted=True) # type: Final +list_rprimitive: Final = RPrimitive("builtins.list", is_unboxed=False, is_refcounted=True) # Python dict object (or an instance of a subclass of dict). -dict_rprimitive = RPrimitive('builtins.dict', is_unboxed=False, is_refcounted=True) # type: Final +dict_rprimitive: Final = RPrimitive("builtins.dict", is_unboxed=False, is_refcounted=True) # Python set object (or an instance of a subclass of set). -set_rprimitive = RPrimitive('builtins.set', is_unboxed=False, is_refcounted=True) # type: Final +set_rprimitive: Final = RPrimitive("builtins.set", is_unboxed=False, is_refcounted=True) # Python str object. At the C layer, str is referred to as unicode # (PyUnicode). -str_rprimitive = RPrimitive('builtins.str', is_unboxed=False, is_refcounted=True) # type: Final +str_rprimitive: Final = RPrimitive("builtins.str", is_unboxed=False, is_refcounted=True) # Tuple of an arbitrary length (corresponds to Tuple[t, ...], with # explicit '...'). -tuple_rprimitive = RPrimitive('builtins.tuple', is_unboxed=False, - is_refcounted=True) # type: Final +tuple_rprimitive: Final = RPrimitive("builtins.tuple", is_unboxed=False, is_refcounted=True) # Python range object. -range_rprimitive = RPrimitive('builtins.range', is_unboxed=False, - is_refcounted=True) # type: Final +range_rprimitive: Final = RPrimitive("builtins.range", is_unboxed=False, is_refcounted=True) def is_tagged(rtype: RType) -> bool: diff --git a/mypyc/irbuild/builder.py b/mypyc/irbuild/builder.py index 075fd29e359c..e71d00bfcbcf 100644 --- a/mypyc/irbuild/builder.py +++ b/mypyc/irbuild/builder.py @@ -87,20 +87,20 @@ def __init__(self, options: CompilerOptions) -> None: self.builder = LowLevelIRBuilder(current_module, mapper, options) self.builders = [self.builder] - self.symtables = [OrderedDict()] # type: List[OrderedDict[SymbolNode, SymbolTarget]] - self.runtime_args = [[]] # type: List[List[RuntimeArg]] - self.function_name_stack = [] # type: List[str] - self.class_ir_stack = [] # type: List[ClassIR] + self.symtables: List[OrderedDict[SymbolNode, SymbolTarget]] = [OrderedDict()] + self.runtime_args: List[List[RuntimeArg]] = [[]] + self.function_name_stack: List[str] = [] + self.class_ir_stack: List[ClassIR] = [] self.current_module = current_module self.mapper = mapper self.types = types self.graph = graph - self.ret_types = [] # type: List[RType] - self.functions = [] # type: List[FuncIR] - self.classes = [] # type: List[ClassIR] - self.final_names = [] # type: List[Tuple[str, RType]] - self.callable_class_names = set() # type: Set[str] + self.ret_types: List[RType] = [] + self.functions: List[FuncIR] = [] + self.classes: List[ClassIR] = [] + self.final_names: List[Tuple[str, RType]] = [] + self.callable_class_names: Set[str] = set() self.options = options # These variables keep track of the number of lambdas, implicit indices, and implicit @@ -124,17 +124,17 @@ def __init__(self, # be generated) is stored in that FuncInfo instance. When the function is done being # generated, its corresponding FuncInfo is popped off the stack. self.fn_info = FuncInfo(INVALID_FUNC_DEF, '', '') - self.fn_infos = [self.fn_info] # type: List[FuncInfo] + self.fn_infos: List[FuncInfo] = [self.fn_info] # This list operates as a stack of constructs that modify the # behavior of nonlocal control flow constructs. - self.nonlocal_control = [] # type: List[NonlocalControl] + self.nonlocal_control: List[NonlocalControl] = [] self.errors = errors # Notionally a list of all of the modules imported by the # module being compiled, but stored as an OrderedDict so we # can also do quick lookups. - self.imports = OrderedDict() # type: OrderedDict[str, None] + self.imports: OrderedDict[str, None] = OrderedDict() # High-level control @@ -486,7 +486,7 @@ def get_assignment_target(self, lvalue: Lvalue, return AssignmentTargetAttr(obj, lvalue.name) elif isinstance(lvalue, TupleExpr): # Multiple assignment a, ..., b = e - star_idx = None # type: Optional[int] + star_idx: Optional[int] = None lvalues = [] for idx, item in enumerate(lvalue.items): targ = self.get_assignment_target(item) @@ -757,7 +757,7 @@ def _analyze_iterable_item_type(self, expr: Expression) -> Type: from mypy.join import join_types if isinstance(iterable, TupleType): - joined = UninhabitedType() # type: Type + joined: Type = UninhabitedType() for item in iterable.items: joined = join_types(joined, item) return joined @@ -942,7 +942,7 @@ def flatten_classes(self, arg: Union[RefExpr, TupleExpr]) -> Optional[List[Class return [ir] return None else: - res = [] # type: List[ClassIR] + res: List[ClassIR] = [] for item in arg.items: if isinstance(item, (RefExpr, TupleExpr)): item_part = self.flatten_classes(item) diff --git a/mypyc/irbuild/classdef.py b/mypyc/irbuild/classdef.py index 5c478a012c50..5c3da4aaba54 100644 --- a/mypyc/irbuild/classdef.py +++ b/mypyc/irbuild/classdef.py @@ -62,8 +62,8 @@ def transform_class_def(builder: IRBuilder, cdef: ClassDef) -> None: # apply here, and are handled in a different way. if ir.is_ext_class: # If the class is not decorated, generate an extension class for it. - type_obj = allocate_class(builder, cdef) # type: Optional[Value] - non_ext = None # type: Optional[NonExtClassInfo] + type_obj: Optional[Value] = allocate_class(builder, cdef) + non_ext: Optional[NonExtClassInfo] = None dataclass_non_ext = dataclass_non_ext_info(builder, cdef) else: non_ext_bases = populate_non_ext_bases(builder, cdef) @@ -77,7 +77,7 @@ def transform_class_def(builder: IRBuilder, cdef: ClassDef) -> None: dataclass_non_ext = None type_obj = None - attrs_to_cache = [] # type: List[Tuple[Lvalue, RType]] + attrs_to_cache: List[Tuple[Lvalue, RType]] = [] for stmt in cdef.defs.body: if isinstance(stmt, OverloadedFuncDef) and stmt.is_property: @@ -202,10 +202,10 @@ def allocate_class(builder: IRBuilder, cdef: ClassDef) -> Value: # Mypy uses these internally as base classes of TypedDict classes. These are # lies and don't have any runtime equivalent. -MAGIC_TYPED_DICT_CLASSES = ( +MAGIC_TYPED_DICT_CLASSES: Final[Tuple[str, ...]] = ( 'typing._TypedDict', 'typing_extensions._TypedDict', -) # type: Final[Tuple[str, ...]] +) def populate_non_ext_bases(builder: IRBuilder, cdef: ClassDef) -> Value: diff --git a/mypyc/irbuild/context.py b/mypyc/irbuild/context.py index 9424913591c4..c76d5535950f 100644 --- a/mypyc/irbuild/context.py +++ b/mypyc/irbuild/context.py @@ -29,18 +29,18 @@ def __init__(self, self.ns = namespace # Callable classes implement the '__call__' method, and are used to represent functions # that are nested inside of other functions. - self._callable_class = None # type: Optional[ImplicitClass] + self._callable_class: Optional[ImplicitClass] = None # Environment classes are ClassIR instances that contain attributes representing the # variables in the environment of the function they correspond to. Environment classes are # generated for functions that contain nested functions. - self._env_class = None # type: Optional[ClassIR] + self._env_class: Optional[ClassIR] = None # Generator classes implement the '__next__' method, and are used to represent generators # returned by generator functions. - self._generator_class = None # type: Optional[GeneratorClass] + self._generator_class: Optional[GeneratorClass] = None # Environment class registers are the local registers associated with instances of an # environment class, used for getting and setting attributes. curr_env_reg is the register # associated with the current environment. - self._curr_env_reg = None # type: Optional[Value] + self._curr_env_reg: Optional[Value] = None # These are flags denoting whether a given function is nested, contains a nested function, # is decorated, or is within a non-extension class. self.is_nested = is_nested @@ -107,13 +107,13 @@ def __init__(self, ir: ClassIR) -> None: # The ClassIR instance associated with this class. self.ir = ir # The register associated with the 'self' instance for this generator class. - self._self_reg = None # type: Optional[Value] + self._self_reg: Optional[Value] = None # Environment class registers are the local registers associated with instances of an # environment class, used for getting and setting attributes. curr_env_reg is the register # associated with the current environment. prev_env_reg is the self.__mypyc_env__ field # associated with the previous environment. - self._curr_env_reg = None # type: Optional[Value] - self._prev_env_reg = None # type: Optional[Value] + self._curr_env_reg: Optional[Value] = None + self._prev_env_reg: Optional[Value] = None @property def self_reg(self) -> Value: @@ -150,20 +150,20 @@ def __init__(self, ir: ClassIR) -> None: super().__init__(ir) # This register holds the label number that the '__next__' function should go to the next # time it is called. - self._next_label_reg = None # type: Optional[Value] - self._next_label_target = None # type: Optional[AssignmentTarget] + self._next_label_reg: Optional[Value] = None + self._next_label_target: Optional[AssignmentTarget] = None # These registers hold the error values for the generator object for the case that the # 'throw' function is called. - self.exc_regs = None # type: Optional[Tuple[Value, Value, Value]] + self.exc_regs: Optional[Tuple[Value, Value, Value]] = None # Holds the arg passed to send - self.send_arg_reg = None # type: Optional[Value] + self.send_arg_reg: Optional[Value] = None # The switch block is used to decide which instruction to go using the value held in the # next-label register. self.switch_block = BasicBlock() - self.continuation_blocks = [] # type: List[BasicBlock] + self.continuation_blocks: List[BasicBlock] = [] @property def next_label_reg(self) -> Value: diff --git a/mypyc/irbuild/expression.py b/mypyc/irbuild/expression.py index 645b2eeb0e0d..9e835f5a37e6 100644 --- a/mypyc/irbuild/expression.py +++ b/mypyc/irbuild/expression.py @@ -147,7 +147,7 @@ def transform_super_expr(builder: IRBuilder, o: SuperExpr) -> Value: ir = builder.mapper.type_to_ir[o.info] iter_env = iter(builder.builder.args) # Grab first argument - vself = next(iter_env) # type: Value + vself: Value = next(iter_env) if builder.fn_info.is_generator: # grab sixth argument (see comment in translate_super_method_call) self_targ = list(builder.symtables[-1].values())[6] @@ -313,7 +313,7 @@ def translate_super_method_call(builder: IRBuilder, expr: CallExpr, callee: Supe if decl.kind != FUNC_STATICMETHOD: # Grab first argument - vself = builder.self() # type: Value + vself: Value = builder.self() if decl.kind == FUNC_CLASSMETHOD: vself = builder.call_c(type_op, [vself], expr.line) elif builder.fn_info.is_generator: @@ -460,7 +460,7 @@ def transform_comparison_expr(builder: IRBuilder, e: ComparisonExpr) -> Value: builder.types[expr] = bool_type exprs.append(expr) - or_expr = exprs.pop(0) # type: Expression + or_expr: Expression = exprs.pop(0) for expr in exprs: or_expr = OpExpr(bin_op, or_expr, expr) builder.types[or_expr] = bool_type @@ -637,7 +637,7 @@ def _visit_display(builder: IRBuilder, else: accepted_items.append((False, builder.accept(item))) - result = None # type: Union[Value, None] + result: Union[Value, None] = None initial_items = [] for starred, value in accepted_items: if result is None and not starred and is_list: diff --git a/mypyc/irbuild/for_helpers.py b/mypyc/irbuild/for_helpers.py index 2e44ec3afaf5..2bf5feefeb0d 100644 --- a/mypyc/irbuild/for_helpers.py +++ b/mypyc/irbuild/for_helpers.py @@ -318,7 +318,7 @@ def make_for_loop_generator(builder: IRBuilder, # seem worth the hassle of supporting dynamically determining which # direction of comparison to do. if len(expr.args) == 1: - start_reg = Integer(0) # type: Value + start_reg: Value = Integer(0) end_reg = builder.accept(expr.args[0]) else: start_reg = builder.accept(expr.args[0]) @@ -377,7 +377,7 @@ def make_for_loop_generator(builder: IRBuilder, if (is_dict_rprimitive(rtype) and expr.callee.name in ('keys', 'values', 'items')): expr_reg = builder.accept(expr.callee.expr) - for_dict_type = None # type: Optional[Type[ForGenerator]] + for_dict_type: Optional[Type[ForGenerator]] = None if expr.callee.name == 'keys': target_type = builder.get_dict_key_type(expr.callee.expr) for_dict_type = ForDictionaryKeys @@ -530,7 +530,7 @@ def init(self, expr_reg: Value, target_type: RType, reverse: bool) -> None: # environment class. self.expr_target = builder.maybe_spill(expr_reg) if not reverse: - index_reg = Integer(0) # type: Value + index_reg: Value = Integer(0) else: index_reg = builder.binary_op(self.load_len(self.expr_target), Integer(1), '-', self.line) @@ -600,8 +600,9 @@ class ForDictionaryCommon(ForGenerator): since they may override some iteration methods in subtly incompatible manner. The fallback logic is implemented in CPy.h via dynamic type check. """ - dict_next_op = None # type: ClassVar[CFunctionDescription] - dict_iter_op = None # type: ClassVar[CFunctionDescription] + + dict_next_op: ClassVar[CFunctionDescription] + dict_iter_op: ClassVar[CFunctionDescription] def need_cleanup(self) -> bool: # Technically, a dict subclass can raise an unrelated exception @@ -732,8 +733,9 @@ def init(self, start_reg: Value, end_reg: Value, step: int) -> None: builder.assign(index_reg, start_reg, -1) self.index_reg = builder.maybe_spill_assignable(index_reg) # Initialize loop index to 0. Assert that the index target is assignable. - self.index_target = builder.get_assignment_target( - self.index) # type: Union[Register, AssignmentTarget] + self.index_target: Union[Register, AssignmentTarget] = builder.get_assignment_target( + self.index + ) builder.assign(self.index_target, builder.read(self.index_reg, self.line), self.line) def gen_condition(self) -> None: @@ -773,8 +775,9 @@ def init(self) -> None: # initialize this register along with the loop index to 0. zero = Integer(0) self.index_reg = builder.maybe_spill_assignable(zero) - self.index_target = builder.get_assignment_target( - self.index) # type: Union[Register, AssignmentTarget] + self.index_target: Union[Register, AssignmentTarget] = builder.get_assignment_target( + self.index + ) builder.assign(self.index_target, zero, self.line) def gen_step(self) -> None: @@ -846,7 +849,7 @@ def init(self, indexes: List[Lvalue], exprs: List[Expression]) -> None: # Condition check will require multiple basic blocks, since there will be # multiple conditions to check. self.cond_blocks = [BasicBlock() for _ in range(len(indexes) - 1)] + [self.body_block] - self.gens = [] # type: List[ForGenerator] + self.gens: List[ForGenerator] = [] for index, expr, next_block in zip(indexes, exprs, self.cond_blocks): gen = make_for_loop_generator( self.builder, diff --git a/mypyc/irbuild/function.py b/mypyc/irbuild/function.py index dcfc71c9cc28..c849b526f2f3 100644 --- a/mypyc/irbuild/function.py +++ b/mypyc/irbuild/function.py @@ -198,7 +198,7 @@ def c() -> None: # TODO: do something about abstract methods. - func_reg = None # type: Optional[Value] + func_reg: Optional[Value] = None # We treat lambdas as always being nested because we always generate # a class for lambdas, no matter where they are. (It would probably also @@ -255,7 +255,7 @@ def c() -> None: # them even if they are declared after the nested function's definition. # Note that this is done before visiting the body of this function. - env_for_func = builder.fn_info # type: Union[FuncInfo, ImplicitClass] + env_for_func: Union[FuncInfo, ImplicitClass] = builder.fn_info if builder.fn_info.is_generator: env_for_func = builder.fn_info.generator_class elif builder.fn_info.is_nested or builder.fn_info.in_non_ext: @@ -319,7 +319,7 @@ def gen_func_ir(builder: IRBuilder, also returns the register containing the instance of the corresponding callable class. """ - func_reg = None # type: Optional[Value] + func_reg: Optional[Value] = None if fn_info.is_nested or fn_info.in_non_ext: func_ir = add_call_to_callable_class(builder, args, blocks, sig, fn_info) add_get_to_callable_class(builder, fn_info) diff --git a/mypyc/irbuild/ll_builder.py b/mypyc/irbuild/ll_builder.py index 27fdcbd10728..29da0861507f 100644 --- a/mypyc/irbuild/ll_builder.py +++ b/mypyc/irbuild/ll_builder.py @@ -77,7 +77,7 @@ # From CPython -PY_VECTORCALL_ARGUMENTS_OFFSET = 1 << (PLATFORM_SIZE * 8 - 1) # type: Final +PY_VECTORCALL_ARGUMENTS_OFFSET: Final = 1 << (PLATFORM_SIZE * 8 - 1) class LowLevelIRBuilder: @@ -90,10 +90,10 @@ def __init__( self.current_module = current_module self.mapper = mapper self.options = options - self.args = [] # type: List[Register] - self.blocks = [] # type: List[BasicBlock] + self.args: List[Register] = [] + self.blocks: List[BasicBlock] = [] # Stack of except handler entry blocks - self.error_handlers = [None] # type: List[Optional[BasicBlock]] + self.error_handlers: List[Optional[BasicBlock]] = [None] # Basic operations @@ -277,7 +277,7 @@ def py_call(self, assert arg_names is not None pos_arg_values = [] - kw_arg_key_value_pairs = [] # type: List[DictEntry] + kw_arg_key_value_pairs: List[DictEntry] = [] star_arg_values = [] for value, kind, name in zip(arg_values, arg_kinds, arg_names): if kind == ARG_POS: @@ -879,9 +879,9 @@ def unary_op(self, return target def make_dict(self, key_value_pairs: Sequence[DictEntry], line: int) -> Value: - result = None # type: Union[Value, None] - keys = [] # type: List[Value] - values = [] # type: List[Value] + result: Union[Value, None] = None + keys: List[Value] = [] + values: List[Value] = [] for key, value in key_value_pairs: if key is not None: # key:value @@ -1094,7 +1094,7 @@ def matching_call_c(self, args: List[Value], line: int, result_type: Optional[RType] = None) -> Optional[Value]: - matching = None # type: Optional[CFunctionDescription] + matching: Optional[CFunctionDescription] = None for desc in candidates: if len(desc.arg_types) != len(args): continue @@ -1172,7 +1172,7 @@ def builtin_len(self, val: Value, line: int, use_pyssize_t: bool = False) -> Val return self.call_c(generic_len_op, [val], line) def new_tuple(self, items: List[Value], line: int) -> Value: - size = Integer(len(items), c_pyssize_t_rprimitive) # type: Value + size: Value = Integer(len(items), c_pyssize_t_rprimitive) return self.call_c(new_tuple_op, [size] + items, line) def new_tuple_with_length(self, length: Value, line: int) -> Value: @@ -1338,7 +1338,7 @@ def _create_dict(self, # keys and values should have the same number of items size = len(keys) if size > 0: - size_value = Integer(size, c_pyssize_t_rprimitive) # type: Value + size_value: Value = Integer(size, c_pyssize_t_rprimitive) # merge keys and values items = [i for t in list(zip(keys, values)) for i in t] return self.call_c(dict_build_op, [size_value] + items, line) diff --git a/mypyc/irbuild/main.py b/mypyc/irbuild/main.py index c11ff967f357..0c42c83d6478 100644 --- a/mypyc/irbuild/main.py +++ b/mypyc/irbuild/main.py @@ -59,7 +59,7 @@ def build_ir(modules: List[MypyFile], build_type_map(mapper, modules, graph, types, options, errors) - result = OrderedDict() # type: ModuleIRs + result: ModuleIRs = OrderedDict() # Generate IR for all modules. class_irs = [] diff --git a/mypyc/irbuild/mapper.py b/mypyc/irbuild/mapper.py index 8e9dd8a9c578..fdba8b39b7aa 100644 --- a/mypyc/irbuild/mapper.py +++ b/mypyc/irbuild/mapper.py @@ -30,8 +30,8 @@ class Mapper: def __init__(self, group_map: Dict[str, Optional[str]]) -> None: self.group_map = group_map - self.type_to_ir = {} # type: Dict[TypeInfo, ClassIR] - self.func_to_decl = {} # type: Dict[SymbolNode, FuncDecl] + self.type_to_ir: Dict[TypeInfo, ClassIR] = {} + self.func_to_decl: Dict[SymbolNode, FuncDecl] = {} def type_to_rtype(self, typ: Optional[Type]) -> RType: if typ is None: diff --git a/mypyc/irbuild/nonlocalcontrol.py b/mypyc/irbuild/nonlocalcontrol.py index 27ec7e36eb6d..7eaa7cc02ffe 100644 --- a/mypyc/irbuild/nonlocalcontrol.py +++ b/mypyc/irbuild/nonlocalcontrol.py @@ -132,7 +132,7 @@ class TryFinallyNonlocalControl(NonlocalControl): def __init__(self, target: BasicBlock) -> None: self.target = target - self.ret_reg = None # type: Optional[Register] + self.ret_reg: Optional[Register] = None def gen_break(self, builder: 'IRBuilder', line: int) -> None: builder.error("break inside try/finally block is unimplemented", line) diff --git a/mypyc/irbuild/prebuildvisitor.py b/mypyc/irbuild/prebuildvisitor.py index 9050920813b2..26a7d97b7d42 100644 --- a/mypyc/irbuild/prebuildvisitor.py +++ b/mypyc/irbuild/prebuildvisitor.py @@ -25,30 +25,30 @@ def __init__(self) -> None: # Dict from a function to symbols defined directly in the # function that are used as non-local (free) variables within a # nested function. - self.free_variables = {} # type: Dict[FuncItem, Set[SymbolNode]] + self.free_variables: Dict[FuncItem, Set[SymbolNode]] = {} # Intermediate data structure used to find the function where # a SymbolNode is declared. Initially this may point to a # function nested inside the function with the declaration, # but we'll eventually update this to refer to the function # with the declaration. - self.symbols_to_funcs = {} # type: Dict[SymbolNode, FuncItem] + self.symbols_to_funcs: Dict[SymbolNode, FuncItem] = {} # Stack representing current function nesting. - self.funcs = [] # type: List[FuncItem] + self.funcs: List[FuncItem] = [] # All property setters encountered so far. - self.prop_setters = set() # type: Set[FuncDef] + self.prop_setters: Set[FuncDef] = set() # A map from any function that contains nested functions to # a set of all the functions that are nested within it. - self.encapsulating_funcs = {} # type: Dict[FuncItem, List[FuncItem]] + self.encapsulating_funcs: Dict[FuncItem, List[FuncItem]] = {} # Map nested function to its parent/encapsulating function. - self.nested_funcs = {} # type: Dict[FuncItem, FuncItem] + self.nested_funcs: Dict[FuncItem, FuncItem] = {} # Map function to its non-special decorators. - self.funcs_to_decorators = {} # type: Dict[FuncDef, List[Expression]] + self.funcs_to_decorators: Dict[FuncDef, List[Expression]] = {} def visit_decorator(self, dec: Decorator) -> None: if dec.decorators: diff --git a/mypyc/irbuild/specialize.py b/mypyc/irbuild/specialize.py index cf524fe96fd8..10f3b4555080 100644 --- a/mypyc/irbuild/specialize.py +++ b/mypyc/irbuild/specialize.py @@ -52,7 +52,7 @@ # # Specializers can operate on methods as well, and are keyed on the # name and RType in that case. -specializers = {} # type: Dict[Tuple[str, Optional[RType]], List[Specializer]] +specializers: Dict[Tuple[str, Optional[RType]], List[Specializer]] = {} def specialize_function( diff --git a/mypyc/irbuild/targets.py b/mypyc/irbuild/targets.py index 67369126af9d..f6346d4fa7e7 100644 --- a/mypyc/irbuild/targets.py +++ b/mypyc/irbuild/targets.py @@ -7,7 +7,7 @@ class AssignmentTarget: """Abstract base class for assignment targets during IR building.""" - type = object_rprimitive # type: RType + type: RType = object_rprimitive class AssignmentTargetRegister(AssignmentTarget): @@ -40,7 +40,7 @@ def __init__(self, obj: Value, attr: str) -> None: self.attr = attr if isinstance(obj.type, RInstance) and obj.type.class_ir.has_attr(attr): # Native attribute reference - self.obj_type = obj.type # type: RType + self.obj_type: RType = obj.type self.type = obj.type.attr_type(attr) else: # Python attribute reference diff --git a/mypyc/irbuild/util.py b/mypyc/irbuild/util.py index 93bc4e5d3f00..eb2d42606674 100644 --- a/mypyc/irbuild/util.py +++ b/mypyc/irbuild/util.py @@ -60,7 +60,7 @@ def get_mypyc_attr_call(d: Expression) -> Optional[CallExpr]: def get_mypyc_attrs(stmt: Union[ClassDef, Decorator]) -> Dict[str, Any]: """Collect all the mypyc_attr attributes on a class definition or a function.""" - attrs = {} # type: Dict[str, Any] + attrs: Dict[str, Any] = {} for dec in stmt.decorators: d = get_mypyc_attr_call(dec) if d: diff --git a/mypyc/irbuild/visitor.py b/mypyc/irbuild/visitor.py index 67b8f04a7dc2..1a6a84809707 100644 --- a/mypyc/irbuild/visitor.py +++ b/mypyc/irbuild/visitor.py @@ -95,7 +95,7 @@ class IRBuilderVisitor(IRVisitor): # This gets passed to all the implementations and contains all the # state and many helpers. The attribute is initialized outside # this class since this class and IRBuilder form a reference loop. - builder = None # type: IRBuilder + builder: IRBuilder def visit_mypy_file(self, mypyfile: MypyFile) -> None: assert False, "use transform_mypy_file instead" diff --git a/mypyc/namegen.py b/mypyc/namegen.py index a6c0c24dd85c..acf901caf93c 100644 --- a/mypyc/namegen.py +++ b/mypyc/namegen.py @@ -43,11 +43,11 @@ def __init__(self, groups: Iterable[List[str]]) -> None: names are supported for generated names, but uncompiled modules will use long names. """ - self.module_map = {} # type: Dict[str, str] + self.module_map: Dict[str, str] = {} for names in groups: self.module_map.update(make_module_translation_map(names)) - self.translations = {} # type: Dict[Tuple[str, str], str] - self.used_names = set() # type: Set[str] + self.translations: Dict[Tuple[str, str], str] = {} + self.used_names: Set[str] = set() def private_name(self, module: str, partial_name: Optional[str] = None) -> str: """Return a C name usable for a static definition. @@ -90,7 +90,7 @@ def exported_name(fullname: str) -> str: def make_module_translation_map(names: List[str]) -> Dict[str, str]: - num_instances = {} # type: Dict[str, int] + num_instances: Dict[str, int] = {} for name in names: for suffix in candidate_suffixes(name): num_instances[suffix] = num_instances.get(suffix, 0) + 1 diff --git a/mypyc/primitives/int_ops.py b/mypyc/primitives/int_ops.py index ce10b8b9c66e..e06831cf4a78 100644 --- a/mypyc/primitives/int_ops.py +++ b/mypyc/primitives/int_ops.py @@ -155,11 +155,11 @@ def int_unary_op(name: str, c_function_name: str) -> CFunctionDescription: # Provide mapping from textual op to short int's op variant and boxed int's description. # Note that these are not complete implementations and require extra IR. -int_comparison_op_mapping = { +int_comparison_op_mapping: Dict[str, IntComparisonOpDescription] = { '==': IntComparisonOpDescription(ComparisonOp.EQ, int_equal_, False, False), '!=': IntComparisonOpDescription(ComparisonOp.NEQ, int_equal_, True, False), '<': IntComparisonOpDescription(ComparisonOp.SLT, int_less_than_, False, False), '<=': IntComparisonOpDescription(ComparisonOp.SLE, int_less_than_, True, True), '>': IntComparisonOpDescription(ComparisonOp.SGT, int_less_than_, False, True), '>=': IntComparisonOpDescription(ComparisonOp.SGE, int_less_than_, True, False), -} # type: Dict[str, IntComparisonOpDescription] +} diff --git a/mypyc/primitives/registry.py b/mypyc/primitives/registry.py index cefbda71cfdf..dbfdd2fe0bc6 100644 --- a/mypyc/primitives/registry.py +++ b/mypyc/primitives/registry.py @@ -43,7 +43,7 @@ # Error kind for functions that return negative integer on exception. This # is only used for primitives. We translate it away during IR building. -ERR_NEG_INT = 10 # type: Final +ERR_NEG_INT: Final = 10 CFunctionDescription = NamedTuple( @@ -68,18 +68,18 @@ # CallC op for method call(such as 'str.join') -method_call_ops = {} # type: Dict[str, List[CFunctionDescription]] +method_call_ops: Dict[str, List[CFunctionDescription]] = {} # CallC op for top level function call(such as 'builtins.list') -function_ops = {} # type: Dict[str, List[CFunctionDescription]] +function_ops: Dict[str, List[CFunctionDescription]] = {} # CallC op for binary ops -binary_ops = {} # type: Dict[str, List[CFunctionDescription]] +binary_ops: Dict[str, List[CFunctionDescription]] = {} # CallC op for unary ops -unary_ops = {} # type: Dict[str, List[CFunctionDescription]] +unary_ops: Dict[str, List[CFunctionDescription]] = {} -builtin_names = {} # type: Dict[str, Tuple[RType, str]] +builtin_names: Dict[str, Tuple[RType, str]] = {} def method_op(name: str, diff --git a/mypyc/primitives/str_ops.py b/mypyc/primitives/str_ops.py index 4e56d885528b..b87710f1a377 100644 --- a/mypyc/primitives/str_ops.py +++ b/mypyc/primitives/str_ops.py @@ -71,12 +71,13 @@ ) # str.split(...) -str_split_types = [str_rprimitive, str_rprimitive, int_rprimitive] # type: List[RType] -str_split_functions = ['PyUnicode_Split', 'PyUnicode_Split', 'CPyStr_Split'] -str_split_constants = [[(0, pointer_rprimitive), (-1, c_int_rprimitive)], - [(-1, c_int_rprimitive)], - []] \ - # type: List[List[Tuple[int, RType]]] +str_split_types: List[RType] = [str_rprimitive, str_rprimitive, int_rprimitive] +str_split_functions = ["PyUnicode_Split", "PyUnicode_Split", "CPyStr_Split"] +str_split_constants: List[List[Tuple[int, RType]]] = [ + [(0, pointer_rprimitive), (-1, c_int_rprimitive)], + [(-1, c_int_rprimitive)], + [], +] for i in range(len(str_split_types)): method_op( name='split', diff --git a/mypyc/test-data/fixtures/ir.py b/mypyc/test-data/fixtures/ir.py index 91c17e727cf7..45e43da5828d 100644 --- a/mypyc/test-data/fixtures/ir.py +++ b/mypyc/test-data/fixtures/ir.py @@ -275,4 +275,4 @@ def exit() -> None: ... class classmethod: pass class staticmethod: pass -NotImplemented = ... # type: Any +NotImplemented: Any = ... diff --git a/mypyc/test-data/fixtures/testutil.py b/mypyc/test-data/fixtures/testutil.py index ad53e474c8bf..8f36e8d13521 100644 --- a/mypyc/test-data/fixtures/testutil.py +++ b/mypyc/test-data/fixtures/testutil.py @@ -20,7 +20,7 @@ def assertRaises(typ: type, msg: str = '') -> Iterator[None]: def run_generator(gen: Generator[T, V, U], inputs: Optional[List[V]] = None, p: bool = False) -> Tuple[Sequence[T], Union[U, str]]: - res = [] # type: List[T] + res: List[T] = [] i = -1 while True: try: diff --git a/mypyc/test/test_analysis.py b/mypyc/test/test_analysis.py index 82317ef34765..b71983705b65 100644 --- a/mypyc/test/test_analysis.py +++ b/mypyc/test/test_analysis.py @@ -45,7 +45,7 @@ def run_case(self, testcase: DataDrivenTestCase) -> None: exceptions.insert_exception_handling(fn) actual.extend(format_func(fn)) cfg = dataflow.get_cfg(fn.blocks) - args = set(fn.arg_regs) # type: Set[Value] + args: Set[Value] = set(fn.arg_regs) name = testcase.name if name.endswith('_MaybeDefined'): # Forward, maybe diff --git a/mypyc/test/test_emit.py b/mypyc/test/test_emit.py index 45227fd0524e..1721a6876984 100644 --- a/mypyc/test/test_emit.py +++ b/mypyc/test/test_emit.py @@ -17,7 +17,7 @@ def test_label(self) -> None: assert emitter.label(BasicBlock(4)) == 'CPyL4' def test_reg(self) -> None: - names = {self.n: 'n'} # type: Dict[Value, str] + names: Dict[Value, str] = {self.n: "n"} emitter = Emitter(self.context, names) assert emitter.reg(self.n) == 'cpy_r_n' diff --git a/mypyc/test/test_emitfunc.py b/mypyc/test/test_emitfunc.py index 81974789daa3..139923aa57c6 100644 --- a/mypyc/test/test_emitfunc.py +++ b/mypyc/test/test_emitfunc.py @@ -37,7 +37,7 @@ class TestFunctionEmitterVisitor(unittest.TestCase): """Test generation of fragments of C from individual IR ops.""" def setUp(self) -> None: - self.registers = [] # type: List[Register] + self.registers: List[Register] = [] def add_local(name: str, rtype: RType) -> Register: reg = Register(rtype, name) diff --git a/mypyc/test/test_external.py b/mypyc/test/test_external.py index f7f5463b9e91..5e8e5b54dd8a 100644 --- a/mypyc/test/test_external.py +++ b/mypyc/test/test_external.py @@ -19,7 +19,7 @@ def test_c_unit_test(self) -> None: """Run C unit tests in a subprocess.""" # Build Google Test, the C++ framework we use for testing C code. # The source code for Google Test is copied to this repository. - cppflags = [] # type: List[str] + cppflags: List[str] = [] env = os.environ.copy() if sys.platform == 'darwin': cppflags += ['-mmacosx-version-min=10.10', '-stdlib=libc++'] diff --git a/mypyc/test/testutil.py b/mypyc/test/testutil.py index 8e5d830b8f6a..dc359bee5835 100644 --- a/mypyc/test/testutil.py +++ b/mypyc/test/testutil.py @@ -30,7 +30,7 @@ class MypycDataSuite(DataSuite): # Need to list no files, since this will be picked up as a suite of tests - files = [] # type: List[str] + files: List[str] = [] data_prefix = test_data_prefix diff --git a/mypyc/transform/exceptions.py b/mypyc/transform/exceptions.py index 6501286a55ae..52b25aceffe3 100644 --- a/mypyc/transform/exceptions.py +++ b/mypyc/transform/exceptions.py @@ -45,7 +45,7 @@ def add_handler_block(ir: FuncIR) -> BasicBlock: def split_blocks_at_errors(blocks: List[BasicBlock], default_error_handler: BasicBlock, func_name: Optional[str]) -> List[BasicBlock]: - new_blocks = [] # type: List[BasicBlock] + new_blocks: List[BasicBlock] = [] # First split blocks on ops that may raise. for block in blocks: @@ -60,7 +60,7 @@ def split_blocks_at_errors(blocks: List[BasicBlock], block.error_handler = None for op in ops: - target = op # type: Value + target: Value = op cur_block.ops.append(op) if isinstance(op, RegisterOp) and op.error_kind != ERR_NEVER: # Split diff --git a/mypyc/transform/refcount.py b/mypyc/transform/refcount.py index 3927c969260b..3b78cee07b2d 100644 --- a/mypyc/transform/refcount.py +++ b/mypyc/transform/refcount.py @@ -50,12 +50,12 @@ def insert_ref_count_opcodes(ir: FuncIR) -> None: values = all_values(ir.arg_regs, ir.blocks) borrowed = {value for value in values if value.is_borrowed} - args = set(ir.arg_regs) # type: Set[Value] + args: Set[Value] = set(ir.arg_regs) live = analyze_live_regs(ir.blocks, cfg) borrow = analyze_borrowed_arguments(ir.blocks, cfg, borrowed) defined = analyze_must_defined_regs(ir.blocks, cfg, args, values) ordering = make_value_ordering(ir) - cache = {} # type: BlockCache + cache: BlockCache = {} for block in ir.blocks[:]: if isinstance(block.ops[-1], (Branch, Goto)): insert_branch_inc_and_decrefs(block, @@ -92,7 +92,7 @@ def transform_block(block: BasicBlock, pre_borrow: 'AnalysisDict[Value]', post_must_defined: 'AnalysisDict[Value]') -> None: old_ops = block.ops - ops = [] # type: List[Op] + ops: List[Op] = [] for i, op in enumerate(old_ops): key = (block, i) @@ -249,7 +249,7 @@ def make_value_ordering(ir: FuncIR) -> Dict[Value, int]: This omits registers that are only ever read. """ # TODO: Never initialized values?? - result = {} # type: Dict[Value, int] + result: Dict[Value, int] = {} n = 0 for arg in ir.arg_regs: diff --git a/mypyc/transform/uninit.py b/mypyc/transform/uninit.py index 8df92c36a7de..47544e511734 100644 --- a/mypyc/transform/uninit.py +++ b/mypyc/transform/uninit.py @@ -32,7 +32,7 @@ def insert_uninit_checks(ir: FuncIR) -> None: def split_blocks_at_uninits(blocks: List[BasicBlock], pre_must_defined: 'AnalysisDict[Value]') -> List[BasicBlock]: - new_blocks = [] # type: List[BasicBlock] + new_blocks: List[BasicBlock] = [] init_registers = [] init_registers_set = set() @@ -80,7 +80,7 @@ def split_blocks_at_uninits(blocks: List[BasicBlock], cur_block.ops.append(op) if init_registers: - new_ops = [] # type: List[Op] + new_ops: List[Op] = [] for reg in init_registers: err = LoadErrorValue(reg.type, undefines=True) new_ops.append(err)