diff --git a/mypy/build.py b/mypy/build.py
index 51e82e6a8543..682fa1e00398 100644
--- a/mypy/build.py
+++ b/mypy/build.py
@@ -65,8 +65,9 @@ class BuildResult:
errors: List of error messages.
"""
- def __init__(self, manager: 'BuildManager') -> None:
+ def __init__(self, manager: 'BuildManager', graph: Graph) -> None:
self.manager = manager
+ self.graph = graph
self.files = manager.modules
self.types = manager.all_types
self.errors = manager.errors.messages()
@@ -184,8 +185,8 @@ def build(sources: List[BuildSource],
)
try:
- dispatch(sources, manager)
- return BuildResult(manager)
+ graph = dispatch(sources, manager)
+ return BuildResult(manager, graph)
finally:
manager.log("Build finished in %.3f seconds with %d modules, %d types, and %d errors" %
(time.time() - manager.start_time,
@@ -474,7 +475,7 @@ def parse_file(self, id: str, path: str, source: str, ignore_errors: bool) -> My
return tree
def module_not_found(self, path: str, line: int, id: str) -> None:
- self.errors.set_file(path)
+ self.errors.set_file(path, id)
stub_msg = "(Stub files are from https://github.com/python/typeshed)"
if ((self.options.python_version[0] == 2 and moduleinfo.is_py2_std_lib_module(id)) or
(self.options.python_version[0] >= 3 and moduleinfo.is_py3_std_lib_module(id))):
@@ -1230,7 +1231,7 @@ def skipping_ancestor(self, id: str, path: str, ancestor_for: 'State') -> None:
# so we'd need to cache the decision.
manager = self.manager
manager.errors.set_import_context([])
- manager.errors.set_file(ancestor_for.xpath)
+ manager.errors.set_file(ancestor_for.xpath, ancestor_for.id)
manager.errors.report(-1, -1, "Ancestor package '%s' ignored" % (id,),
severity='note', only_once=True)
manager.errors.report(-1, -1,
@@ -1242,7 +1243,7 @@ def skipping_module(self, id: str, path: str) -> None:
manager = self.manager
save_import_context = manager.errors.import_context()
manager.errors.set_import_context(self.caller_state.import_context)
- manager.errors.set_file(self.caller_state.xpath)
+ manager.errors.set_file(self.caller_state.xpath, self.caller_state.id)
line = self.caller_line
manager.errors.report(line, 0,
"Import of '%s' ignored" % (id,),
@@ -1429,7 +1430,7 @@ def parse_file(self) -> None:
continue
if id == '':
# Must be from a relative import.
- manager.errors.set_file(self.xpath)
+ manager.errors.set_file(self.xpath, self.id)
manager.errors.report(line, 0,
"No parent module -- cannot perform relative import",
blocker=True)
@@ -1545,20 +1546,21 @@ def write_cache(self) -> None:
self.interface_hash = new_interface_hash
-def dispatch(sources: List[BuildSource], manager: BuildManager) -> None:
+def dispatch(sources: List[BuildSource], manager: BuildManager) -> Graph:
manager.log("Mypy version %s" % __version__)
graph = load_graph(sources, manager)
if not graph:
print("Nothing to do?!")
- return
+ return graph
manager.log("Loaded graph with %d nodes" % len(graph))
if manager.options.dump_graph:
dump_graph(graph)
- return
+ return graph
process_graph(graph, manager)
if manager.options.warn_unused_ignores:
# TODO: This could also be a per-module option.
manager.errors.generate_unused_ignore_notes()
+ return graph
class NodeInfo:
@@ -1633,7 +1635,7 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
except ModuleNotFound:
continue
if st.id in graph:
- manager.errors.set_file(st.xpath)
+ manager.errors.set_file(st.xpath, st.id)
manager.errors.report(-1, -1, "Duplicate module named '%s'" % st.id)
manager.errors.raise_error()
graph[st.id] = st
diff --git a/mypy/checker.py b/mypy/checker.py
index 6433763e2de8..9a36af56ef5e 100644
--- a/mypy/checker.py
+++ b/mypy/checker.py
@@ -65,13 +65,16 @@
LAST_PASS = 1 # Pass numbers start at 0
-# A node which is postponed to be type checked during the next pass.
+# A node which is postponed to be processed during the next pass.
+# This is used for both batch mode and fine-grained incremental mode.
DeferredNode = NamedTuple(
'DeferredNode',
[
- ('node', FuncItem),
+ # In batch mode only FuncDef and LambdaExpr are supported
+ ('node', Union[FuncDef, LambdaExpr, MypyFile]),
('context_type_name', Optional[str]), # Name of the surrounding class (for error messages)
- ('active_class', Optional[Type]), # And its type (for selftype handling)
+ ('active_typeinfo', Optional[TypeInfo]), # And its TypeInfo (for semantic analysis
+ # self type handling)
])
@@ -167,7 +170,7 @@ def check_first_pass(self) -> None:
Deferred functions will be processed by check_second_pass().
"""
- self.errors.set_file(self.path)
+ self.errors.set_file(self.path, self.tree.fullname())
with self.enter_partial_types():
with self.binder.top_frame_context():
for d in self.tree.defs:
@@ -187,19 +190,22 @@ def check_first_pass(self) -> None:
self.fail(messages.ALL_MUST_BE_SEQ_STR.format(str_seq_s, all_s),
all_.node)
- def check_second_pass(self) -> bool:
+ def check_second_pass(self, todo: List[DeferredNode] = None) -> bool:
"""Run second or following pass of type checking.
This goes through deferred nodes, returning True if there were any.
"""
- if not self.deferred_nodes:
+ if not todo and not self.deferred_nodes:
return False
- self.errors.set_file(self.path)
+ self.errors.set_file(self.path, self.tree.fullname())
self.pass_num += 1
- todo = self.deferred_nodes
+ if not todo:
+ todo = self.deferred_nodes
+ else:
+ assert not self.deferred_nodes
self.deferred_nodes = []
- done = set() # type: Set[FuncItem]
- for node, type_name, active_class in todo:
+ done = set() # type: Set[Union[FuncDef, LambdaExpr, MypyFile]]
+ for node, type_name, active_typeinfo in todo:
if node in done:
continue
# This is useful for debugging:
@@ -207,18 +213,34 @@ def check_second_pass(self) -> bool:
# (self.pass_num, type_name, node.fullname() or node.name()))
done.add(node)
with self.errors.enter_type(type_name) if type_name else nothing():
- with self.scope.push_class(active_class) if active_class else nothing():
- if isinstance(node, Statement):
- self.accept(node)
- elif isinstance(node, Expression):
- self.expr_checker.accept(node)
- else:
- assert False
+ with self.scope.push_class(active_typeinfo) if active_typeinfo else nothing():
+ self.check_partial(node)
return True
+ def check_partial(self, node: Union[FuncDef, LambdaExpr, MypyFile]) -> None:
+ if isinstance(node, MypyFile):
+ self.check_top_level(node)
+ elif isinstance(node, LambdaExpr):
+ self.expr_checker.accept(node)
+ else:
+ self.accept(node)
+
+ def check_top_level(self, node: MypyFile) -> None:
+ """Check only the top-level of a module, skipping function definitions."""
+ with self.enter_partial_types():
+ with self.binder.top_frame_context():
+ for d in node.defs:
+ # TODO: Type check class bodies.
+ if not isinstance(d, (FuncDef, ClassDef)):
+ d.accept(self)
+
+ assert not self.current_node_deferred
+ # TODO: Handle __all__
+
def handle_cannot_determine_type(self, name: str, context: Context) -> None:
node = self.scope.top_function()
- if self.pass_num < LAST_PASS and node is not None:
+ if (self.pass_num < LAST_PASS and node is not None
+ and isinstance(node, (FuncDef, LambdaExpr))):
# Don't report an error yet. Just defer.
if self.errors.type_name:
type_name = self.errors.type_name[-1]
@@ -635,7 +657,7 @@ def is_implicit_any(t: Type) -> bool:
for i in range(len(typ.arg_types)):
arg_type = typ.arg_types[i]
- ref_type = self.scope.active_class()
+ ref_type = self.scope.active_self_type() # type: Optional[Type]
if (isinstance(defn, FuncDef) and ref_type is not None and i == 0
and not defn.is_static
and typ.arg_kinds[0] not in [nodes.ARG_STAR, nodes.ARG_STAR2]):
@@ -946,7 +968,7 @@ def check_method_override_for_base_with_name(
# The name of the method is defined in the base class.
# Construct the type of the overriding method.
- typ = bind_self(self.function_type(defn), self.scope.active_class())
+ typ = bind_self(self.function_type(defn), self.scope.active_self_type())
# Map the overridden method type to subtype context so that
# it can be checked for compatibility.
original_type = base_attr.type
@@ -959,7 +981,7 @@ def check_method_override_for_base_with_name(
assert False, str(base_attr.node)
if isinstance(original_type, FunctionLike):
original = map_type_from_supertype(
- bind_self(original_type, self.scope.active_class()),
+ bind_self(original_type, self.scope.active_self_type()),
defn.info, base)
# Check that the types are compatible.
# TODO overloaded signatures
@@ -1051,7 +1073,7 @@ def visit_class_def(self, defn: ClassDef) -> None:
old_binder = self.binder
self.binder = ConditionalTypeBinder()
with self.binder.top_frame_context():
- with self.scope.push_class(fill_typevars(defn.info)):
+ with self.scope.push_class(defn.info):
self.accept(defn.defs)
self.binder = old_binder
if not defn.has_incompatible_baseclass:
@@ -1317,8 +1339,8 @@ def check_compatibility_super(self, lvalue: NameExpr, lvalue_type: Type, rvalue:
# Class-level function objects and classmethods become bound
# methods: the former to the instance, the latter to the
# class
- base_type = bind_self(base_type, self.scope.active_class())
- compare_type = bind_self(compare_type, self.scope.active_class())
+ base_type = bind_self(base_type, self.scope.active_self_type())
+ compare_type = bind_self(compare_type, self.scope.active_self_type())
# If we are a static method, ensure to also tell the
# lvalue it now contains a static method
@@ -1347,7 +1369,8 @@ def lvalue_type_from_base(self, expr_node: Var,
if base_type:
if not has_no_typevars(base_type):
- instance = cast(Instance, self.scope.active_class())
+ # TODO: Handle TupleType, don't cast
+ instance = cast(Instance, self.scope.active_self_type())
itype = map_instance_to_supertype(instance, base)
base_type = expand_type_by_instance(base_type, itype)
@@ -2996,7 +3019,7 @@ def is_node_static(node: Node) -> Optional[bool]:
class Scope:
# We keep two stacks combined, to maintain the relative order
- stack = None # type: List[Union[Type, FuncItem, MypyFile]]
+ stack = None # type: List[Union[TypeInfo, FuncItem, MypyFile]]
def __init__(self, module: MypyFile) -> None:
self.stack = [module]
@@ -3007,11 +3030,17 @@ def top_function(self) -> Optional[FuncItem]:
return e
return None
- def active_class(self) -> Optional[Type]:
- if isinstance(self.stack[-1], Type):
+ def active_class(self) -> Optional[TypeInfo]:
+ if isinstance(self.stack[-1], TypeInfo):
return self.stack[-1]
return None
+ def active_self_type(self) -> Optional[Union[Instance, TupleType]]:
+ info = self.active_class()
+ if info:
+ return fill_typevars(info)
+ return None
+
@contextmanager
def push_function(self, item: FuncItem) -> Iterator[None]:
self.stack.append(item)
@@ -3019,8 +3048,8 @@ def push_function(self, item: FuncItem) -> Iterator[None]:
self.stack.pop()
@contextmanager
- def push_class(self, t: Type) -> Iterator[None]:
- self.stack.append(t)
+ def push_class(self, info: TypeInfo) -> Iterator[None]:
+ self.stack.append(info)
yield
self.stack.pop()
diff --git a/mypy/errors.py b/mypy/errors.py
index 3e66e29e6f5c..f0fadaeed035 100644
--- a/mypy/errors.py
+++ b/mypy/errors.py
@@ -4,7 +4,7 @@
from collections import OrderedDict, defaultdict
from contextlib import contextmanager
-from typing import Tuple, List, TypeVar, Set, Dict, Iterator
+from typing import Tuple, List, TypeVar, Set, Dict, Iterator, Optional
from mypy.options import Options
@@ -22,6 +22,9 @@ class ErrorInfo:
# The source file that was the source of this error.
file = ''
+ # The fully-qualified id of the source module for this error.
+ module = None # type: Optional[str]
+
# The name of the type in which this error is located at.
type = '' # Unqualified, may be None
@@ -46,12 +49,26 @@ class ErrorInfo:
# Only report this particular messages once per program.
only_once = False
- def __init__(self, import_ctx: List[Tuple[str, int]], file: str, typ: str,
- function_or_member: str, line: int, column: int, severity: str,
- message: str, blocker: bool, only_once: bool,
- origin: Tuple[str, int] = None) -> None:
+ # Fine-grained incremental target where this was reported
+ target = None # type: Optional[str]
+
+ def __init__(self,
+ import_ctx: List[Tuple[str, int]],
+ file: str,
+ module: Optional[str],
+ typ: str,
+ function_or_member: str,
+ line: int,
+ column: int,
+ severity: str,
+ message: str,
+ blocker: bool,
+ only_once: bool,
+ origin: Tuple[str, int] = None,
+ target: str = None) -> None:
self.import_ctx = import_ctx
self.file = file
+ self.module = module
self.type = typ
self.function_or_member = function_or_member
self.line = line
@@ -61,6 +78,7 @@ def __init__(self, import_ctx: List[Tuple[str, int]], file: str, typ: str,
self.blocker = blocker
self.only_once = only_once
self.origin = origin or (file, line)
+ self.target = target
class Errors:
@@ -106,11 +124,21 @@ class Errors:
# Set to True to show "In function "foo":" messages.
show_error_context = False # type: bool
- # Set to True to show column numbers in error messages
+ # Set to True to show column numbers in error messages.
show_column_numbers = False # type: bool
+ # Stack of active fine-grained incremental checking targets within
+ # a module. The first item is always the current module id.
+ # (See mypy.server.update for more about targets.)
+ target = None # type: List[str]
+
def __init__(self, show_error_context: bool = False,
show_column_numbers: bool = False) -> None:
+ self.show_error_context = show_error_context
+ self.show_column_numbers = show_column_numbers
+ self.initialize()
+
+ def initialize(self) -> None:
self.error_info = []
self.import_ctx = []
self.error_files = set()
@@ -120,8 +148,10 @@ def __init__(self, show_error_context: bool = False,
self.used_ignored_lines = defaultdict(set)
self.ignored_files = set()
self.only_once_messages = set()
- self.show_error_context = show_error_context
- self.show_column_numbers = show_column_numbers
+ self.target = []
+
+ def reset(self) -> None:
+ self.initialize()
def copy(self) -> 'Errors':
new = Errors(self.show_error_context, self.show_column_numbers)
@@ -129,6 +159,7 @@ def copy(self) -> 'Errors':
new.import_ctx = self.import_ctx[:]
new.type_name = self.type_name[:]
new.function_or_member = self.function_or_member[:]
+ new.target = self.target[:]
return new
def set_ignore_prefix(self, prefix: str) -> None:
@@ -143,8 +174,8 @@ def simplify_path(self, file: str) -> str:
file = os.path.normpath(file)
return remove_path_prefix(file, self.ignore_prefix)
- def set_file(self, file: str, ignored_lines: Set[int] = None) -> None:
- """Set the path of the current file."""
+ def set_file(self, file: str, module: Optional[str], ignored_lines: Set[int] = None) -> None:
+ """Set the path and module id of the current file."""
# The path will be simplified later, in render_messages. That way
# * 'file' is always a key that uniquely identifies a source file
# that mypy read (simplified paths might not be unique); and
@@ -152,6 +183,8 @@ def set_file(self, file: str, ignored_lines: Set[int] = None) -> None:
# reporting errors for files other than the one currently being
# processed.
self.file = file
+ if module:
+ self.target = [module]
def set_file_ignored_lines(self, file: str,
ignored_lines: Set[int] = None,
@@ -162,10 +195,12 @@ def set_file_ignored_lines(self, file: str,
def push_function(self, name: str) -> None:
"""Set the current function or member short name (it can be None)."""
+ self.push_target_component(name)
self.function_or_member.append(name)
def pop_function(self) -> None:
self.function_or_member.pop()
+ self.pop_target_component()
@contextmanager
def enter_function(self, name: str) -> Iterator[None]:
@@ -175,10 +210,30 @@ def enter_function(self, name: str) -> Iterator[None]:
def push_type(self, name: str) -> None:
"""Set the short name of the current type (it can be None)."""
+ self.push_target_component(name)
self.type_name.append(name)
def pop_type(self) -> None:
self.type_name.pop()
+ self.pop_target_component()
+
+ def push_target_component(self, name: str) -> None:
+ if self.target and not self.function_or_member[-1]:
+ self.target.append('{}.{}'.format(self.target[-1], name))
+
+ def pop_target_component(self) -> None:
+ if self.target and not self.function_or_member[-1]:
+ self.target.pop()
+
+ def current_target(self) -> Optional[str]:
+ if self.target:
+ return self.target[-1]
+ return None
+
+ def current_module(self) -> Optional[str]:
+ if self.target:
+ return self.target[0]
+ return None
@contextmanager
def enter_type(self, name: str) -> Iterator[None]:
@@ -214,10 +269,11 @@ def report(self, line: int, column: int, message: str, blocker: bool = False,
type = None # Omit type context if nested function
if file is None:
file = self.file
- info = ErrorInfo(self.import_context(), file, type,
+ info = ErrorInfo(self.import_context(), file, self.current_module(), type,
self.function_or_member[-1], line, column, severity, message,
blocker, only_once,
- origin=(self.file, origin_line) if origin_line else None)
+ origin=(self.file, origin_line) if origin_line else None,
+ target=self.current_target())
self.add_error_info(info)
def add_error_info(self, info: ErrorInfo) -> None:
@@ -241,9 +297,9 @@ def generate_unused_ignore_notes(self) -> None:
if not self.is_typeshed_file(file):
for line in ignored_lines - self.used_ignored_lines[file]:
# Don't use report since add_error_info will ignore the error!
- info = ErrorInfo(self.import_context(), file, None, None,
- line, -1, 'note', "unused 'type: ignore' comment",
- False, False)
+ info = ErrorInfo(self.import_context(), file, self.current_module(), None,
+ None, line, -1, 'note', "unused 'type: ignore' comment",
+ False, False)
self.error_info.append(info)
def is_typeshed_file(self, file: str) -> bool:
@@ -297,6 +353,14 @@ def messages(self) -> List[str]:
a.append(s)
return a
+ def targets(self) -> Set[str]:
+ """Return a set of all targets that contain errors."""
+ # TODO: Make sure that either target is always defined or that not being defined
+ # is okay for fine-grained incremental checking.
+ return set(info.target
+ for info in self.error_info
+ if info.target)
+
def render_messages(self, errors: List[ErrorInfo]) -> List[Tuple[str, int, int,
str, str]]:
"""Translate the messages into a sequence of tuples.
diff --git a/mypy/fastparse.py b/mypy/fastparse.py
index 1699f351f4f0..19619cf58c6b 100644
--- a/mypy/fastparse.py
+++ b/mypy/fastparse.py
@@ -71,7 +71,7 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
if errors is None:
errors = Errors()
raise_on_error = True
- errors.set_file('' if fnam is None else fnam)
+ errors.set_file('' if fnam is None else fnam, None)
is_stub_file = bool(fnam) and fnam.endswith('.pyi')
try:
assert pyversion[0] >= 3 or is_stub_file
diff --git a/mypy/fastparse2.py b/mypy/fastparse2.py
index a8039766384d..aca04187e57c 100644
--- a/mypy/fastparse2.py
+++ b/mypy/fastparse2.py
@@ -87,7 +87,7 @@ def parse(source: Union[str, bytes], fnam: str = None, errors: Errors = None,
if errors is None:
errors = Errors()
raise_on_error = True
- errors.set_file('' if fnam is None else fnam)
+ errors.set_file('' if fnam is None else fnam, None)
is_stub_file = bool(fnam) and fnam.endswith('.pyi')
try:
assert pyversion[0] < 3 and not is_stub_file
diff --git a/mypy/nodes.py b/mypy/nodes.py
index 4584245b9904..c1e3fc8e5f1c 100644
--- a/mypy/nodes.py
+++ b/mypy/nodes.py
@@ -9,7 +9,7 @@
import mypy.strconv
from mypy.visitor import NodeVisitor, StatementVisitor, ExpressionVisitor
-from mypy.util import dump_tagged, short_type
+from mypy.util import short_type, IdMapper
class Context:
@@ -368,6 +368,8 @@ class FuncBase(Node):
# Type signature. This is usually CallableType or Overloaded, but it can be something else for
# decorated functions/
type = None # type: mypy.types.Type
+ # Original, not semantically analyzed type (used for reprocessing)
+ unanalyzed_type = None # type: mypy.types.Type
# If method, reference to TypeInfo
info = None # type: TypeInfo
is_property = False
@@ -512,6 +514,7 @@ def __init__(self, arguments: List[Argument], body: 'Block',
self.max_pos = self.arg_kinds.count(ARG_POS) + self.arg_kinds.count(ARG_OPT)
self.body = body
self.type = typ
+ self.unanalyzed_type = typ
self.expanded = []
self.min_args = 0
@@ -835,6 +838,8 @@ class AssignmentStmt(Statement):
rvalue = None # type: Expression
# Declared type in a comment, may be None.
type = None # type: mypy.types.Type
+ # Original, not semantically analyzed type in annotation (used for reprocessing)
+ unanalyzed_type = None # type: Optional[mypy.types.Type]
# This indicates usage of PEP 526 type annotation syntax in assignment.
new_syntax = False # type: bool
@@ -843,6 +848,7 @@ def __init__(self, lvalues: List[Lvalue], rvalue: Expression,
self.lvalues = lvalues
self.rvalue = rvalue
self.type = type
+ self.unanalyzed_type = type
self.new_syntax = new_syntax
def accept(self, visitor: StatementVisitor[T]) -> T:
@@ -2003,8 +2009,11 @@ def __init__(self, names: 'SymbolTable', defn: ClassDef, module_name: str) -> No
self._fullname = defn.fullname
self.is_abstract = False
self.abstract_attributes = []
- if defn.type_vars:
- for vd in defn.type_vars:
+ self.add_type_vars()
+
+ def add_type_vars(self) -> None:
+ if self.defn.type_vars:
+ for vd in self.defn.type_vars:
self.type_vars.append(vd.name)
def name(self) -> str:
@@ -2121,14 +2130,41 @@ def __str__(self) -> str:
This includes the most important information about the type.
"""
+ return self.dump()
+
+ def dump(self,
+ str_conv: 'mypy.strconv.StrConv' = None,
+ type_str_conv: 'mypy.types.TypeStrVisitor' = None) -> str:
+ """Return a string dump of the contents of the TypeInfo."""
+ if not str_conv:
+ str_conv = mypy.strconv.StrConv()
base = None # type: str
+
+ def type_str(typ: 'mypy.types.Type') -> str:
+ if type_str_conv:
+ return typ.accept(type_str_conv)
+ return str(typ)
+
+ head = 'TypeInfo' + str_conv.format_id(self)
if self.bases:
- base = 'Bases({})'.format(', '.join(str(base)
+ base = 'Bases({})'.format(', '.join(type_str(base)
for base in self.bases))
- return dump_tagged(['Name({})'.format(self.fullname()),
- base,
- ('Names', sorted(self.names.keys()))],
- 'TypeInfo')
+ mro = 'Mro({})'.format(', '.join(item.fullname() + str_conv.format_id(item)
+ for item in self.mro))
+ names = []
+ for name in sorted(self.names):
+ description = name + str_conv.format_id(self.names[name].node)
+ node = self.names[name].node
+ if isinstance(node, Var) and node.type:
+ description += ' ({})'.format(type_str(node.type))
+ names.append(description)
+ return mypy.strconv.dump_tagged(
+ ['Name({})'.format(self.fullname()),
+ base,
+ mro,
+ ('Names', names)],
+ head,
+ str_conv=str_conv)
def serialize(self) -> JsonDict:
# NOTE: This is where all ClassDefs originate, so there shouldn't be duplicates.
diff --git a/mypy/semanal.py b/mypy/semanal.py
index 8f285c135242..e08c806f0656 100644
--- a/mypy/semanal.py
+++ b/mypy/semanal.py
@@ -44,8 +44,9 @@
"""
from collections import OrderedDict
+from contextlib import contextmanager
from typing import (
- List, Dict, Set, Tuple, cast, TypeVar, Union, Optional, Callable
+ List, Dict, Set, Tuple, cast, TypeVar, Union, Optional, Callable, Iterator
)
from mypy.nodes import (
@@ -235,7 +236,7 @@ def __init__(self,
def visit_file(self, file_node: MypyFile, fnam: str, options: Options) -> None:
self.options = options
- self.errors.set_file(fnam)
+ self.errors.set_file(fnam, file_node.fullname())
self.cur_mod_node = file_node
self.cur_mod_id = file_node.fullname()
self.is_stub_file = fnam.lower().endswith('.pyi')
@@ -267,6 +268,52 @@ def visit_file(self, file_node: MypyFile, fnam: str, options: Options) -> None:
del self.options
+ def refresh_partial(self, node: Union[MypyFile, FuncItem]) -> None:
+ """Refresh a stale target in fine-grained incremental mode."""
+ if isinstance(node, MypyFile):
+ self.refresh_top_level(node)
+ else:
+ self.accept(node)
+
+ def refresh_top_level(self, file_node: MypyFile) -> None:
+ """Reanalyze a stale module top-level in fine-grained incremental mode."""
+ for d in file_node.defs:
+ if isinstance(d, ClassDef):
+ self.refresh_class_def(d)
+ elif not isinstance(d, FuncItem):
+ self.accept(d)
+
+ def refresh_class_def(self, defn: ClassDef) -> None:
+ with self.analyze_class_body(defn) as should_continue:
+ if should_continue:
+ for d in defn.defs.body:
+ # TODO: Make sure refreshing class bodies works.
+ if isinstance(d, ClassDef):
+ self.refresh_class_def(d)
+ elif not isinstance(d, FuncItem):
+ self.accept(d)
+
+ @contextmanager
+ def file_context(self, file_node: MypyFile, fnam: str, options: Options,
+ active_type: Optional[TypeInfo]) -> Iterator[None]:
+ # TODO: Use this above in visit_file
+ self.options = options
+ self.errors.set_file(fnam, file_node.fullname())
+ self.cur_mod_node = file_node
+ self.cur_mod_id = file_node.fullname()
+ self.is_stub_file = fnam.lower().endswith('.pyi')
+ self.globals = file_node.names
+ if active_type:
+ self.enter_class(active_type.defn)
+ # TODO: Bind class type vars
+
+ yield
+
+ if active_type:
+ self.leave_class()
+ self.type = None
+ del self.options
+
def visit_func_def(self, defn: FuncDef) -> None:
phase_info = self.postpone_nested_functions_stack[-1]
if phase_info != FUNCTION_SECOND_PHASE:
@@ -289,7 +336,8 @@ def visit_func_def(self, defn: FuncDef) -> None:
# Method definition
defn.info = self.type
if not defn.is_decorated and not defn.is_overload:
- if defn.name() in self.type.names:
+ if (defn.name() in self.type.names and
+ self.type.names[defn.name()].node != defn):
# Redefinition. Conditional redefinition is okay.
n = self.type.names[defn.name()].node
if not self.set_original_def(n, defn):
@@ -420,8 +468,12 @@ def find_type_variables_in_type(self, type: Type) -> List[Tuple[str, TypeVarExpr
result.extend(self.find_type_variables_in_type(item))
elif isinstance(type, AnyType):
pass
- elif isinstance(type, EllipsisType) or isinstance(type, TupleType):
+ elif isinstance(type, (EllipsisType, TupleType)):
+ # TODO: Need to process tuple items?
pass
+ elif isinstance(type, Instance):
+ for arg in type.args:
+ result.extend(self.find_type_variables_in_type(arg))
else:
assert False, 'Unsupported type %s' % type
return result
@@ -652,13 +704,21 @@ def check_function_signature(self, fdef: FuncItem) -> None:
self.fail('Type signature has too many arguments', fdef, blocker=True)
def visit_class_def(self, defn: ClassDef) -> None:
+ with self.analyze_class_body(defn) as should_continue:
+ if should_continue:
+ # Analyze class body.
+ defn.defs.accept(self)
+
+ @contextmanager
+ def analyze_class_body(self, defn: ClassDef) -> Iterator[bool]:
self.clean_up_bases_and_infer_type_variables(defn)
if self.analyze_typeddict_classdef(defn):
+ yield False
return
if self.analyze_namedtuple_classdef(defn):
# just analyze the class body so we catch type errors in default values
self.enter_class(defn)
- defn.defs.accept(self)
+ yield True
self.leave_class()
else:
self.setup_class_def_analysis(defn)
@@ -673,8 +733,7 @@ def visit_class_def(self, defn: ClassDef) -> None:
self.enter_class(defn)
- # Analyze class body.
- defn.defs.accept(self)
+ yield True
self.calculate_abstract_status(defn.info)
self.setup_type_promotion(defn)
@@ -3359,7 +3418,7 @@ def visit_file(self, file: MypyFile, fnam: str, mod_id: str, options: Options) -
self.pyversion = options.python_version
self.platform = options.platform
sem.cur_mod_id = mod_id
- sem.errors.set_file(fnam)
+ sem.errors.set_file(fnam, mod_id)
sem.globals = SymbolTable()
sem.global_decls = [set()]
sem.nonlocal_decls = [set()]
@@ -3604,10 +3663,23 @@ def __init__(self, modules: Dict[str, MypyFile], errors: Errors) -> None:
self.errors = errors
def visit_file(self, file_node: MypyFile, fnam: str, options: Options) -> None:
- self.errors.set_file(fnam)
+ self.errors.set_file(fnam, file_node.fullname())
self.options = options
self.accept(file_node)
+ def refresh_partial(self, node: Union[MypyFile, FuncItem]) -> None:
+ """Refresh a stale target in fine-grained incremental mode."""
+ if isinstance(node, MypyFile):
+ self.refresh_top_level(node)
+ else:
+ self.accept(node)
+
+ def refresh_top_level(self, file_node: MypyFile) -> None:
+ """Reanalyze a stale module top-level in fine-grained incremental mode."""
+ for d in file_node.defs:
+ if not isinstance(d, (FuncItem, ClassDef)):
+ self.accept(d)
+
def accept(self, node: Node) -> None:
try:
node.accept(self)
diff --git a/mypy/server/__init__.py b/mypy/server/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/mypy/server/astdiff.py b/mypy/server/astdiff.py
new file mode 100644
index 000000000000..9b9659d2f4ea
--- /dev/null
+++ b/mypy/server/astdiff.py
@@ -0,0 +1,213 @@
+"""Compare two versions of a module symbol table.
+
+The goal is to find which AST nodes have externally visible changes, so
+that we can fire triggers and re-type-check other parts of the program
+that are stale because of the changes.
+
+Only look at detail at definitions at the current module.
+"""
+
+from typing import Set, List, TypeVar
+
+from mypy.nodes import SymbolTable, SymbolTableNode, FuncBase, TypeInfo, Var
+from mypy.types import (
+ Type, TypeVisitor, UnboundType, TypeList, AnyType, NoneTyp, UninhabitedType,
+ ErasedType, DeletedType, Instance, TypeVarType, CallableType, TupleType, TypedDictType,
+ UnionType, Overloaded, PartialType, TypeType
+)
+
+
+def compare_symbol_tables(name_prefix: str, table1: SymbolTable, table2: SymbolTable) -> Set[str]:
+ """Return names that are different in two versions of a symbol table.
+
+ Return a set of fully-qualified names (e.g., 'mod.func' or 'mod.Class.method').
+ """
+ # Find names only defined only in one version.
+ names1 = {'%s.%s' % (name_prefix, name) for name in table1}
+ names2 = {'%s.%s' % (name_prefix, name) for name in table2}
+ triggers = names1 ^ names2
+
+ # Look for names defined in both versions that are different.
+ for name in set(table1.keys()) & set(table2.keys()):
+ if not is_similar_node_shallow(table1[name], table2[name]):
+ triggers.add('%s.%s' % (name_prefix, name))
+ else:
+ # Nodes are the same when using shallow comparison. Now look into contents of
+ # classes to find changed items.
+ node1 = table1[name].node
+ node2 = table2[name].node
+
+ if node1.fullname() and get_prefix(node1.fullname()) != name_prefix:
+ # Only look inside things defined in the current module.
+ # TODO: This probably doesn't work generally...
+ continue
+
+ if isinstance(node1, TypeInfo) and isinstance(node2, TypeInfo):
+ # TODO: Only do this is the class is defined in this module.
+ prefix = '%s.%s' % (name_prefix, node1.name())
+ triggers |= compare_symbol_tables(prefix, node1.names, node2.names)
+
+ return triggers
+
+
+def is_similar_node_shallow(n: SymbolTableNode, m: SymbolTableNode) -> bool:
+ # TODO:
+ # cross_ref
+ # tvar_def
+ # type_override
+ if (n.kind != m.kind
+ or n.mod_id != m.mod_id
+ or n.module_public != m.module_public):
+ return False
+ if type(n.node) != type(m.node): # noqa
+ return False
+ if n.node.fullname() != m.node.fullname():
+ return False
+ if isinstance(n.node, FuncBase) and isinstance(m.node, FuncBase):
+ # TODO: info
+ return (n.node.is_property == m.node.is_property and
+ is_identical_type(n.node.type, m.node.type))
+ if isinstance(n.node, TypeInfo) and isinstance(m.node, TypeInfo):
+ # TODO:
+ # type_vars
+ # bases
+ # _promote
+ # tuple_type
+ # typeddict_type
+ nn = n.node
+ mn = m.node
+ return (nn.is_abstract == mn.is_abstract and
+ nn.is_enum == mn.is_enum and
+ nn.fallback_to_any == mn.fallback_to_any and
+ nn.is_named_tuple == mn.is_named_tuple and
+ nn.is_newtype == mn.is_newtype and
+ nn.alt_fullname == mn.alt_fullname and
+ is_same_mro(nn.mro, mn.mro))
+ if isinstance(n.node, Var) and isinstance(m.node, Var):
+ return is_identical_type(n.node.type, m.node.type)
+ return True
+
+
+def is_same_mro(mro1: List[TypeInfo], mro2: List[TypeInfo]) -> bool:
+ return (len(mro1) == len(mro2)
+ and all(x.fullname() == y.fullname() for x, y in zip(mro1, mro2)))
+
+
+def get_prefix(id: str) -> str:
+ """Drop the final component of a qualified name (e.g. ('x.y' -> 'x')."""
+ return id.rsplit('.', 1)[0]
+
+
+def is_identical_type(t: Type, s: Type) -> bool:
+ return t.accept(IdenticalTypeVisitor(s))
+
+
+TT = TypeVar('TT', bound=Type)
+
+
+def is_identical_types(a: List[TT], b: List[TT]) -> bool:
+ return len(a) == len(b) and all(is_identical_type(t, s) for t, s in zip(a, b))
+
+
+class IdenticalTypeVisitor(TypeVisitor[bool]):
+ """Visitor for checking whether two types are identical.
+
+ This may be conservative -- it's okay for two types to be considered
+ different even if they are actually the same. The results are only
+ used to improve performance, not relied on for correctness.
+
+ Differences from mypy.sametypes:
+
+ * Types with the same name but different AST nodes are considered
+ identical.
+
+ * If one of the types is not valid for whatever reason, they are
+ considered different.
+
+ * Sometimes require types to be structurally identical, even if they
+ are semantically the same type.
+ """
+
+ def __init__(self, right: Type) -> None:
+ self.right = right
+
+ # visit_x(left) means: is left (which is an instance of X) the same type as
+ # right?
+
+ def visit_unbound_type(self, left: UnboundType) -> bool:
+ return False
+
+ def visit_type_list(self, t: TypeList) -> bool:
+ assert False, 'Not supported'
+
+ def visit_any(self, left: AnyType) -> bool:
+ return isinstance(self.right, AnyType)
+
+ def visit_none_type(self, left: NoneTyp) -> bool:
+ return isinstance(self.right, NoneTyp)
+
+ def visit_uninhabited_type(self, t: UninhabitedType) -> bool:
+ return isinstance(self.right, UninhabitedType)
+
+ def visit_erased_type(self, left: ErasedType) -> bool:
+ return False
+
+ def visit_deleted_type(self, left: DeletedType) -> bool:
+ return isinstance(self.right, DeletedType)
+
+ def visit_instance(self, left: Instance) -> bool:
+ return (isinstance(self.right, Instance) and
+ left.type.fullname() == self.right.type.fullname() and
+ is_identical_types(left.args, self.right.args))
+
+ def visit_type_var(self, left: TypeVarType) -> bool:
+ return (isinstance(self.right, TypeVarType) and
+ left.id == self.right.id)
+
+ def visit_callable_type(self, left: CallableType) -> bool:
+ # FIX generics
+ if isinstance(self.right, CallableType):
+ cright = self.right
+ return (is_identical_type(left.ret_type, cright.ret_type) and
+ is_identical_types(left.arg_types, cright.arg_types) and
+ left.arg_names == cright.arg_names and
+ left.arg_kinds == cright.arg_kinds and
+ left.is_type_obj() == cright.is_type_obj() and
+ left.is_ellipsis_args == cright.is_ellipsis_args)
+ return False
+
+ def visit_tuple_type(self, left: TupleType) -> bool:
+ if isinstance(self.right, TupleType):
+ return is_identical_types(left.items, self.right.items)
+ return False
+
+ def visit_typeddict_type(self, left: TypedDictType) -> bool:
+ if isinstance(self.right, TypedDictType):
+ if left.items.keys() != self.right.items.keys():
+ return False
+ for (_, left_item_type, right_item_type) in left.zip(self.right):
+ if not is_identical_type(left_item_type, right_item_type):
+ return False
+ return True
+ return False
+
+ def visit_union_type(self, left: UnionType) -> bool:
+ if isinstance(self.right, UnionType):
+ # Require structurally identical types.
+ return is_identical_types(left.items, self.right.items)
+ return False
+
+ def visit_overloaded(self, left: Overloaded) -> bool:
+ if isinstance(self.right, Overloaded):
+ return is_identical_types(left.items(), self.right.items())
+ return False
+
+ def visit_partial_type(self, left: PartialType) -> bool:
+ # A partial type is not fully defined, so the result is indeterminate. We shouldn't
+ # get here.
+ raise RuntimeError
+
+ def visit_type_type(self, left: TypeType) -> bool:
+ if isinstance(self.right, TypeType):
+ return is_identical_type(left.item, self.right.item)
+ return False
diff --git a/mypy/server/astmerge.py b/mypy/server/astmerge.py
new file mode 100644
index 000000000000..5591c9ac7862
--- /dev/null
+++ b/mypy/server/astmerge.py
@@ -0,0 +1,222 @@
+"""Merge a new version of a module AST to an old version.
+
+See the main entry point merge_asts for details.
+"""
+
+from typing import Dict, List, cast, TypeVar
+
+from mypy.nodes import (
+ Node, MypyFile, SymbolTable, Block, AssignmentStmt, NameExpr, MemberExpr, RefExpr, TypeInfo,
+ FuncDef, ClassDef, SymbolNode, Var, Statement, MDEF
+)
+from mypy.traverser import TraverserVisitor
+from mypy.types import (
+ Type, TypeVisitor, Instance, AnyType, NoneTyp, CallableType, DeletedType, PartialType,
+ TupleType, TypeType, TypeVarType, TypedDictType, UnboundType, UninhabitedType, UnionType,
+ Overloaded
+)
+
+
+def merge_asts(old: MypyFile, old_symbols: SymbolTable,
+ new: MypyFile, new_symbols: SymbolTable) -> None:
+ """Merge a new version of a module AST to a previous version.
+
+ The main idea is to preserve the identities of externally visible
+ nodes in the old AST (that have a corresponding node in the new AST).
+ All old node state (outside identity) will come from the new AST.
+
+ When this returns, 'old' will refer to the merged AST, but 'new_symbols'
+ will be the new symbol table. 'new' and 'old_symbols' will no longer be
+ valid.
+ """
+ assert new.fullname() == old.fullname()
+ replacement_map = replacement_map_from_symbol_table(
+ old_symbols, new_symbols, prefix=old.fullname())
+ replacement_map[new] = old
+ node = replace_nodes_in_ast(new, replacement_map)
+ assert node is old
+ replace_nodes_in_symbol_table(new_symbols, replacement_map)
+
+
+def replacement_map_from_symbol_table(
+ old: SymbolTable, new: SymbolTable, prefix: str) -> Dict[SymbolNode, SymbolNode]:
+ replacements = {}
+ for name, node in old.items():
+ if (name in new and (node.kind == MDEF
+ or get_prefix(node.node.fullname()) == prefix)):
+ new_node = new[name]
+ if (type(new_node.node) == type(node.node) # noqa
+ and new_node.node.fullname() == node.node.fullname()
+ and new_node.kind == node.kind):
+ replacements[new_node.node] = node.node
+ if isinstance(node.node, TypeInfo) and isinstance(new_node.node, TypeInfo):
+ type_repl = replacement_map_from_symbol_table(
+ node.node.names,
+ new_node.node.names,
+ prefix)
+ replacements.update(type_repl)
+ return replacements
+
+
+def replace_nodes_in_ast(node: SymbolNode,
+ replacements: Dict[SymbolNode, SymbolNode]) -> SymbolNode:
+ visitor = NodeReplaceVisitor(replacements)
+ node.accept(visitor)
+ return replacements.get(node, node)
+
+
+SN = TypeVar('SN', bound=SymbolNode)
+
+
+class NodeReplaceVisitor(TraverserVisitor):
+ """Transform some nodes to new identities in an AST.
+
+ Only nodes that live in the symbol table may be
+ replaced, which simplifies the implementation some.
+ """
+
+ def __init__(self, replacements: Dict[SymbolNode, SymbolNode]) -> None:
+ self.replacements = replacements
+
+ def visit_mypy_file(self, node: MypyFile) -> None:
+ node = self.fixup(node)
+ node.defs = self.replace_statements(node.defs)
+ super().visit_mypy_file(node)
+
+ def visit_block(self, node: Block) -> None:
+ super().visit_block(node)
+ node.body = self.replace_statements(node.body)
+
+ def visit_func_def(self, node: FuncDef) -> None:
+ node = self.fixup(node)
+ if node.type:
+ self.fixup_type(node.type)
+ super().visit_func_def(node)
+
+ def visit_class_def(self, node: ClassDef) -> None:
+ # TODO additional things like the MRO
+ node.defs.body = self.replace_statements(node.defs.body)
+ replace_nodes_in_symbol_table(node.info.names, self.replacements)
+ info = node.info
+ for i, item in enumerate(info.mro):
+ info.mro[i] = self.fixup(info.mro[i])
+ for i, base in enumerate(info.bases):
+ self.fixup_type(info.bases[i])
+ super().visit_class_def(node)
+
+ def visit_assignment_stmt(self, node: AssignmentStmt) -> None:
+ if node.type:
+ self.fixup_type(node.type)
+ super().visit_assignment_stmt(node)
+
+ # Expressions
+
+ def visit_name_expr(self, node: NameExpr) -> None:
+ self.visit_ref_expr(node)
+
+ def visit_member_expr(self, node: MemberExpr) -> None:
+ self.visit_ref_expr(node)
+ super().visit_member_expr(node)
+
+ def visit_ref_expr(self, node: RefExpr) -> None:
+ node.node = self.fixup(node.node)
+
+ # Helpers
+
+ def fixup(self, node: SN) -> SN:
+ if node in self.replacements:
+ new = self.replacements[node]
+ new.__dict__ = node.__dict__
+ return cast(SN, new)
+ return node
+
+ def fixup_type(self, typ: Type) -> None:
+ typ.accept(TypeReplaceVisitor(self.replacements))
+
+ def replace_statements(self, nodes: List[Statement]) -> List[Statement]:
+ result = []
+ for node in nodes:
+ if isinstance(node, SymbolNode):
+ node = self.fixup(node)
+ result.append(node)
+ return result
+
+
+class TypeReplaceVisitor(TypeVisitor[None]):
+ def __init__(self, replacements: Dict[SymbolNode, SymbolNode]) -> None:
+ self.replacements = replacements
+
+ def visit_instance(self, typ: Instance) -> None:
+ typ.type = self.fixup(typ.type)
+ for arg in typ.args:
+ arg.accept(self)
+
+ def visit_any(self, typ: AnyType) -> None:
+ pass
+
+ def visit_none_type(self, typ: NoneTyp) -> None:
+ pass
+
+ def visit_callable_type(self, typ: CallableType) -> None:
+ for arg in typ.arg_types:
+ arg.accept(self)
+ typ.ret_type.accept(self)
+ # TODO: typ.definition
+ # TODO: typ.fallback
+ assert not typ.variables # TODO
+
+ def visit_overloaded(self, t: Overloaded) -> None:
+ raise NotImplementedError
+
+ def visit_deleted_type(self, typ: DeletedType) -> None:
+ pass
+
+ def visit_partial_type(self, typ: PartialType) -> None:
+ raise RuntimeError
+
+ def visit_tuple_type(self, typ: TupleType) -> None:
+ raise NotImplementedError
+
+ def visit_type_type(self, typ: TypeType) -> None:
+ raise NotImplementedError
+
+ def visit_type_var(self, typ: TypeVarType) -> None:
+ raise NotImplementedError
+
+ def visit_typeddict_type(self, typ: TypedDictType) -> None:
+ raise NotImplementedError
+
+ def visit_unbound_type(self, typ: UnboundType) -> None:
+ raise RuntimeError
+
+ def visit_uninhabited_type(self, typ: UninhabitedType) -> None:
+ pass
+
+ def visit_union_type(self, typ: UnionType) -> None:
+ raise NotImplementedError
+
+ # Helpers
+
+ def fixup(self, node: SN) -> SN:
+ if node in self.replacements:
+ new = self.replacements[node]
+ new.__dict__ = node.__dict__
+ return cast(SN, new)
+ return node
+
+
+def replace_nodes_in_symbol_table(symbols: SymbolTable,
+ replacements: Dict[SymbolNode, SymbolNode]) -> None:
+ for name, node in symbols.items():
+ if node.node in replacements:
+ new = replacements[node.node]
+ new.__dict__ = node.node.__dict__
+ node.node = new
+ if isinstance(node.node, Var) and node.node.type:
+ node.node.type.accept(TypeReplaceVisitor(replacements))
+ node.node.info = cast(TypeInfo, replacements.get(node.node.info, node.node.info))
+
+
+def get_prefix(fullname: str) -> str:
+ """Drop the final component of a qualified name (e.g. ('x.y' -> 'x')."""
+ return fullname.rsplit('.', 1)[0]
diff --git a/mypy/server/aststrip.py b/mypy/server/aststrip.py
new file mode 100644
index 000000000000..dd7ae96f0f92
--- /dev/null
+++ b/mypy/server/aststrip.py
@@ -0,0 +1,99 @@
+"""Strip AST from semantic information."""
+
+import contextlib
+from typing import Union, Iterator
+
+from mypy.nodes import (
+ Node, FuncDef, NameExpr, MemberExpr, RefExpr, MypyFile, FuncItem, ClassDef, AssignmentStmt,
+ TypeInfo, Var
+)
+from mypy.traverser import TraverserVisitor
+
+
+def strip_target(node: Union[MypyFile, FuncItem]) -> None:
+ NodeStripVisitor().strip_target(node)
+
+
+class NodeStripVisitor(TraverserVisitor):
+ def __init__(self) -> None:
+ self.type = None # type: TypeInfo
+
+ def strip_target(self, node: Union[MypyFile, FuncItem]) -> None:
+ """Strip a fine-grained incremental mode target."""
+ if isinstance(node, MypyFile):
+ self.strip_top_level(node)
+ else:
+ node.accept(self)
+
+ def strip_top_level(self, file_node: MypyFile) -> None:
+ """Strip a module top-level (don't recursive into functions)."""
+ for node in file_node.defs:
+ if not isinstance(node, (FuncItem, ClassDef)):
+ node.accept(self)
+ elif isinstance(node, ClassDef):
+ self.strip_class_body(node)
+
+ def strip_class_body(self, node: ClassDef) -> None:
+ """Strip class body and type info, but don't strip methods."""
+ node.info.type_vars = []
+ node.info.bases = []
+ node.info.abstract_attributes = []
+ node.info.mro = []
+ node.info.add_type_vars()
+
+ def visit_func_def(self, node: FuncDef) -> None:
+ node.expanded = []
+ node.type = node.unanalyzed_type
+ with self.enter_class(node.info) if node.info else nothing():
+ super().visit_func_def(node)
+
+ @contextlib.contextmanager
+ def enter_class(self, info: TypeInfo) -> Iterator[None]:
+ old = self.type
+ self.type = info
+ yield
+ self.type = old
+
+ def visit_assignment_stmt(self, node: AssignmentStmt) -> None:
+ node.type = node.unanalyzed_type
+ super().visit_assignment_stmt(node)
+
+ def visit_name_expr(self, node: NameExpr) -> None:
+ self.strip_ref_expr(node)
+
+ def visit_member_expr(self, node: MemberExpr) -> None:
+ self.strip_ref_expr(node)
+ if self.is_duplicate_attribute_def(node):
+ # This is marked as a instance variable definition but a base class
+ # defines an attribute with the same name, and we can't have
+ # multiple definitions for an attribute. Defer to the base class
+ # definition.
+ del self.type.names[node.name]
+ node.is_def = False
+ node.def_var = None
+
+ def is_duplicate_attribute_def(self, node: MemberExpr) -> bool:
+ if not node.is_def or node.name not in self.type.names:
+ return False
+ return any(info.get(node.name) is not None for info in self.type.mro[1:])
+
+ def strip_ref_expr(self, node: RefExpr) -> None:
+ node.kind = None
+ node.node = None
+ node.fullname = None
+
+ # TODO: handle more node types
+
+
+def is_self_member_ref(memberexpr: MemberExpr) -> bool:
+ """Does memberexpr refer to an attribute of self?"""
+ # TODO: Merge with is_self_member_ref in semanal.py.
+ if not isinstance(memberexpr.expr, NameExpr):
+ return False
+ node = memberexpr.expr.node
+ return isinstance(node, Var) and node.is_self
+
+
+@contextlib.contextmanager
+def nothing() -> Iterator[None]:
+ yield
diff --git a/mypy/server/deps.py b/mypy/server/deps.py
new file mode 100644
index 000000000000..6710cfa49bf0
--- /dev/null
+++ b/mypy/server/deps.py
@@ -0,0 +1,230 @@
+"""Generate fine-grained dependencies for AST nodes."""
+
+from typing import Dict, List, Set
+
+from mypy.checkmember import bind_self
+from mypy.nodes import (
+ Node, Expression, MypyFile, FuncDef, ClassDef, AssignmentStmt, NameExpr, MemberExpr, Import,
+ ImportFrom, CallExpr, TypeInfo, Var, LDEF
+)
+from mypy.traverser import TraverserVisitor
+from mypy.types import (
+ Type, Instance, AnyType, NoneTyp, TypeVisitor, CallableType, DeletedType, PartialType,
+ TupleType, TypeType, TypeVarType, TypedDictType, UnboundType, UninhabitedType, UnionType,
+ FunctionLike
+)
+from mypy.server.trigger import make_trigger
+
+
+def get_dependencies(prefix: str, node: Node,
+ type_map: Dict[Expression, Type]) -> Dict[str, Set[str]]:
+ """Get all dependencies of a node, recursively."""
+ visitor = DependencyVisitor(prefix, type_map)
+ node.accept(visitor)
+ return visitor.map
+
+
+def get_dependencies_of_target(prefix: str, node: Node,
+ type_map: Dict[Expression, Type]) -> Dict[str, Set[str]]:
+ """Get dependencies of a target -- don't recursive into nested targets."""
+ visitor = DependencyVisitor(prefix, type_map)
+ if isinstance(node, MypyFile):
+ for defn in node.defs:
+ if not isinstance(defn, (ClassDef, FuncDef)):
+ defn.accept(visitor)
+ else:
+ node.accept(visitor)
+ return visitor.map
+
+
+class DependencyVisitor(TraverserVisitor):
+ def __init__(self, prefix: str, type_map: Dict[Expression, Type]) -> None:
+ self.stack = [prefix]
+ self.type_map = type_map
+ self.map = {} # type: Dict[str, Set[str]]
+ self.is_class = False
+
+ # TODO
+ # decorated functions
+ # overloads
+ # from m import *
+
+ def visit_mypy_file(self, o: MypyFile) -> None:
+ # TODO: Do we need to anything here?
+ super().visit_mypy_file(o)
+
+ def visit_func_def(self, o: FuncDef) -> None:
+ target = self.push(o.name())
+ if o.type:
+ if self.is_class and isinstance(o.type, FunctionLike):
+ signature = bind_self(o.type) # type: Type
+ else:
+ signature = o.type
+ for trigger in get_type_dependencies(signature):
+ self.add_dependency(trigger)
+ self.add_dependency(trigger, target=make_trigger(target))
+ if o.info:
+ for base in non_trivial_bases(o.info):
+ self.add_dependency(make_trigger(base.fullname() + '.' + o.name()))
+ super().visit_func_def(o)
+ self.pop()
+
+ def visit_class_def(self, o: ClassDef) -> None:
+ target = self.push(o.name)
+ self.add_dependency(make_trigger(target))
+ old_is_class = self.is_class
+ self.is_class = True
+ # TODO: Add dependencies based on MRO and other attributes.
+ super().visit_class_def(o)
+ self.is_class = old_is_class
+ info = o.info
+ for name, node in info.names.items():
+ if isinstance(node.node, Var):
+ for base in non_trivial_bases(info):
+ # If the type of an attribute changes in a base class, we make references
+ # to the attribute in the subclass stale.
+ self.add_dependency(make_trigger(base.fullname() + '.' + name),
+ target=make_trigger(info.fullname() + '.' + name))
+ for base in non_trivial_bases(info):
+ for name, node in base.names.items():
+ self.add_dependency(make_trigger(base.fullname() + '.' + name),
+ target=make_trigger(info.fullname() + '.' + name))
+ self.add_dependency(make_trigger(base.fullname() + '.__init__'),
+ target=make_trigger(info.fullname() + '.__init__'))
+ self.pop()
+
+ def visit_import(self, o: Import) -> None:
+ for id, as_id in o.ids:
+ # TODO: as_id
+ self.add_dependency(make_trigger(id), self.current())
+
+ def visit_import_from(self, o: ImportFrom) -> None:
+ assert o.relative == 0 # Relative imports not supported
+ for name, as_name in o.names:
+ assert as_name is None or as_name == name
+ self.add_dependency(make_trigger(o.id + '.' + name))
+
+ def visit_assignment_stmt(self, o: AssignmentStmt) -> None:
+ super().visit_assignment_stmt(o)
+ if o.type:
+ for trigger in get_type_dependencies(o.type):
+ self.add_dependency(trigger)
+
+ # Expressions
+
+ # TODO
+ # dependency on __init__ (e.g. ClassName())
+ # super()
+
+ def visit_name_expr(self, o: NameExpr) -> None:
+ if o.kind == LDEF:
+ # We don't track depdendencies to local variables, since they
+ # aren't externally visible.
+ return
+ trigger = make_trigger(o.fullname)
+ self.add_dependency(trigger)
+
+ def visit_member_expr(self, e: MemberExpr) -> None:
+ super().visit_member_expr(e)
+ if e.kind is not None:
+ # Reference to a module attribute
+ trigger = make_trigger(e.fullname)
+ self.add_dependency(trigger)
+ else:
+ # Reference to a non-module attribute
+ typ = self.type_map[e.expr]
+ if isinstance(typ, Instance):
+ member = '%s.%s' % (typ.type.fullname(), e.name)
+ self.add_dependency(make_trigger(member))
+ elif isinstance(typ, (AnyType, NoneTyp)):
+ pass # No dependency needed
+ elif isinstance(typ, FunctionLike) and typ.is_type_obj():
+ member = '%s.%s' % (typ.type_object().fullname(), e.name)
+ self.add_dependency(make_trigger(member))
+
+ def visit_call_expr(self, e: CallExpr) -> None:
+ super().visit_call_expr(e)
+ callee_type = self.type_map.get(e.callee)
+ if isinstance(callee_type, FunctionLike) and callee_type.is_type_obj():
+ class_name = callee_type.type_object().fullname()
+ self.add_dependency(make_trigger(class_name + '.__init__'))
+
+ # Helpers
+
+ def add_dependency(self, trigger: str, target: str = None) -> None:
+ if target is None:
+ target = self.current()
+ self.map.setdefault(trigger, set()).add(target)
+
+ def push(self, component: str) -> str:
+ target = '%s.%s' % (self.current(), component)
+ self.stack.append(target)
+ return target
+
+ def pop(self) -> None:
+ self.stack.pop()
+
+ def current(self) -> str:
+ return self.stack[-1]
+
+
+def get_type_dependencies(typ: Type) -> List[str]:
+ return typ.accept(TypeDependenciesVisitor())
+
+
+class TypeDependenciesVisitor(TypeVisitor[List[str]]):
+ def __init__(self) -> None:
+ self.deps = [] # type: List[str]
+
+ def visit_instance(self, typ: Instance) -> List[str]:
+ trigger = make_trigger(typ.type.fullname())
+ triggers = [trigger]
+ for arg in typ.args:
+ triggers.extend(get_type_dependencies(arg))
+ return triggers
+
+ def visit_any(self, typ: AnyType) -> List[str]:
+ return []
+
+ def visit_none_type(self, typ: NoneTyp) -> List[str]:
+ return []
+
+ def visit_callable_type(self, typ: CallableType) -> List[str]:
+ # TODO: generic callables
+ triggers = []
+ for arg in typ.arg_types:
+ triggers.extend(get_type_dependencies(arg))
+ triggers.extend(get_type_dependencies(typ.ret_type))
+ return triggers
+
+ def visit_deleted_type(self, typ: DeletedType) -> List[str]:
+ return []
+
+ def visit_partial_type(self, typ: PartialType) -> List[str]:
+ assert False, "Should not see a partial type here"
+
+ def visit_tuple_type(self, typ: TupleType) -> List[str]:
+ raise NotImplementedError
+
+ def visit_type_type(self, typ: TypeType) -> List[str]:
+ raise NotImplementedError
+
+ def visit_type_var(self, typ: TypeVarType) -> List[str]:
+ raise NotImplementedError
+
+ def visit_typeddict_type(self, typ: TypedDictType) -> List[str]:
+ raise NotImplementedError
+
+ def visit_unbound_type(self, typ: UnboundType) -> List[str]:
+ return []
+
+ def visit_uninhabited_type(self, typ: UninhabitedType) -> List[str]:
+ return []
+
+ def visit_union_type(self, typ: UnionType) -> List[str]:
+ raise NotImplementedError
+
+
+def non_trivial_bases(info: TypeInfo) -> List[TypeInfo]:
+ return [base for base in info.mro[1:]
+ if base.fullname() != 'builtins.object']
diff --git a/mypy/server/subexpr.py b/mypy/server/subexpr.py
new file mode 100644
index 000000000000..b20fa4de10b4
--- /dev/null
+++ b/mypy/server/subexpr.py
@@ -0,0 +1,136 @@
+"""Find all subexpressions of an AST node."""
+
+from typing import List
+
+from mypy.nodes import (
+ Expression, Node, MemberExpr, YieldFromExpr, YieldExpr, CallExpr, OpExpr, ComparisonExpr,
+ SliceExpr, CastExpr, RevealTypeExpr, UnaryExpr, ListExpr, TupleExpr, DictExpr, SetExpr,
+ IndexExpr, GeneratorExpr, ListComprehension, ConditionalExpr, TypeApplication, LambdaExpr,
+ StarExpr, BackquoteExpr, AwaitExpr
+)
+from mypy.traverser import TraverserVisitor
+
+
+def get_subexpressions(node: Node) -> List[Expression]:
+ visitor = SubexpressionFinder()
+ node.accept(visitor)
+ return visitor.expressions
+
+
+class SubexpressionFinder(TraverserVisitor):
+ def __init__(self) -> None:
+ self.expressions = [] # type: List[Expression]
+
+ def _visit_leaf(self, o: Expression) -> None:
+ self.add(o)
+
+ visit_int_expr = _visit_leaf
+ visit_name_expr = _visit_leaf
+ visit_float_expr = _visit_leaf
+ visit_str_expr = _visit_leaf
+ visit_bytes_expr = _visit_leaf
+ visit_unicode_expr = _visit_leaf
+ visit_complex_expr = _visit_leaf
+ visit_ellipsis = _visit_leaf
+ visit_super_expr = _visit_leaf
+ visit_type_var_expr = _visit_leaf
+ visit_type_alias_expr = _visit_leaf
+ visit_namedtuple_expr = _visit_leaf
+ visit_typeddict_expr = _visit_leaf
+ visit__promote_expr = _visit_leaf
+ visit_newtype_expr = _visit_leaf
+
+ def visit_member_expr(self, e: MemberExpr) -> None:
+ self.add(e)
+ super().visit_member_expr(e)
+
+ def visit_yield_from_expr(self, e: YieldFromExpr) -> None:
+ self.add(e)
+ super().visit_yield_from_expr(e)
+
+ def visit_yield_expr(self, e: YieldExpr) -> None:
+ self.add(e)
+ super().visit_yield_expr(e)
+
+ def visit_call_expr(self, e: CallExpr) -> None:
+ self.add(e)
+ super().visit_call_expr(e)
+
+ def visit_op_expr(self, e: OpExpr) -> None:
+ self.add(e)
+ super().visit_op_expr(e)
+
+ def visit_comparison_expr(self, e: ComparisonExpr) -> None:
+ self.add(e)
+ super().visit_comparison_expr(e)
+
+ def visit_slice_expr(self, e: SliceExpr) -> None:
+ self.add(e)
+ super().visit_slice_expr(e)
+
+ def visit_cast_expr(self, e: CastExpr) -> None:
+ self.add(e)
+ super().visit_cast_expr(e)
+
+ def visit_reveal_type_expr(self, e: RevealTypeExpr) -> None:
+ self.add(e)
+ super().visit_reveal_type_expr(e)
+
+ def visit_unary_expr(self, e: UnaryExpr) -> None:
+ self.add(e)
+ super().visit_unary_expr(e)
+
+ def visit_list_expr(self, e: ListExpr) -> None:
+ self.add(e)
+ super().visit_list_expr(e)
+
+ def visit_tuple_expr(self, e: TupleExpr) -> None:
+ self.add(e)
+ super().visit_tuple_expr(e)
+
+ def visit_dict_expr(self, e: DictExpr) -> None:
+ self.add(e)
+ super().visit_dict_expr(e)
+
+ def visit_set_expr(self, e: SetExpr) -> None:
+ self.add(e)
+ super().visit_set_expr(e)
+
+ def visit_index_expr(self, e: IndexExpr) -> None:
+ self.add(e)
+ super().visit_index_expr(e)
+
+ def visit_generator_expr(self, e: GeneratorExpr) -> None:
+ self.add(e)
+ super().visit_generator_expr(e)
+
+ def visit_list_comprehension(self, e: ListComprehension) -> None:
+ self.add(e)
+ super().visit_list_comprehension(e)
+
+ def visit_conditional_expr(self, e: ConditionalExpr) -> None:
+ self.add(e)
+ super().visit_conditional_expr(e)
+
+ def visit_type_application(self, e: TypeApplication) -> None:
+ self.add(e)
+ super().visit_type_application(e)
+
+ def visit_lambda_expr(self, e: LambdaExpr) -> None:
+ self.add(e)
+ super().visit_lambda_expr(e)
+
+ def visit_star_expr(self, e: StarExpr) -> None:
+ self.add(e)
+ super().visit_star_expr(e)
+
+ def visit_backquote_expr(self, e: BackquoteExpr) -> None:
+ self.add(e)
+ super().visit_backquote_expr(e)
+
+ def visit_await_expr(self, e: AwaitExpr) -> None:
+ self.add(e)
+ super().visit_await_expr(e)
+
+ def add(self, e: Expression) -> None:
+ self.expressions.append(e)
diff --git a/mypy/server/target.py b/mypy/server/target.py
new file mode 100644
index 000000000000..0b4636b0542f
--- /dev/null
+++ b/mypy/server/target.py
@@ -0,0 +1,17 @@
+from typing import Iterable, Tuple, List
+
+
+def module_prefix(modules: Iterable[str], target: str) -> str:
+ return split_target(modules, target)[0]
+
+
+def split_target(modules: Iterable[str], target: str) -> Tuple[str, str]:
+ remaining = [] # type: List[str]
+ while True:
+ if target in modules:
+ return target, '.'.join(remaining)
+ components = target.rsplit('.', 1)
+ if len(components) == 1:
+ assert False, 'Cannot find module prefix for {}'.format(target)
+ target = components[0]
+ remaining.insert(0, components[1])
diff --git a/mypy/server/trigger.py b/mypy/server/trigger.py
new file mode 100644
index 000000000000..2c161f57c57e
--- /dev/null
+++ b/mypy/server/trigger.py
@@ -0,0 +1,5 @@
+"""AST triggers that are used for fine-grained dependency handling."""
+
+
+def make_trigger(name: str) -> str:
+ return '<%s>' % name
diff --git a/mypy/server/update.py b/mypy/server/update.py
new file mode 100644
index 000000000000..f303a99d0efa
--- /dev/null
+++ b/mypy/server/update.py
@@ -0,0 +1,424 @@
+"""Update build result by incrementally processing changed modules.
+
+Use fine-grained dependencies to update targets in other modules that
+may be affected by externally-visible changes in the changed modules.
+
+Terms:
+
+* A 'target' is a function definition or the top level of a module. We
+ refer to targets using their fully qualified name (e.g. 'mod.Cls.attr').
+ Targets are the smallest units of processing during fine-grained
+ incremental checking.
+* A 'trigger' represents the properties of a part of a program, and it
+ gets triggered/activated when these properties change. For example,
+ '' refers to a module-level function, and it gets triggered
+ if the signature of the function changes, or if if the function is
+ removed.
+
+Some program state is maintained across multiple build increments:
+
+* The full ASTs of all modules in memory all the time (+ type map).
+* Maintain a fine-grained dependency map, which is from triggers to
+ targets/triggers. The latter determine what other parts of a program
+ need to be processed again due to an externally visible change to a
+ module.
+
+We perform a fine-grained incremental program update like this:
+
+* Determine which modules have changes in their source code since the
+ previous build.
+* Fully process these modules, creating new ASTs and symbol tables
+ for them. Retain the existing ASTs and symbol tables of modules that
+ have no changes in their source code.
+* Determine which parts of the changed modules have changed. The result
+ is a set of triggered triggers.
+* Using the dependency map, decide which other targets have become
+ stale and need to be reprocessed.
+* Replace old ASTs of the modules that we reprocessed earlier with
+ the new ones, but try to retain the identities of original externally
+ visible AST nodes so that we don't (always) need to patch references
+ in the rest of the program.
+* Semantically analyze and type check the stale targets.
+* Repeat the previous steps until nothing externally visible has changed.
+
+Major todo items:
+
+- Support multiple type checking passes
+"""
+
+from typing import Dict, List, Set, Tuple, Iterable, Union
+
+from mypy.build import BuildManager, State
+from mypy.checker import DeferredNode
+from mypy.errors import Errors
+from mypy.nodes import (
+ MypyFile, FuncDef, TypeInfo, Expression, SymbolNode, Var, FuncBase, ClassDef
+)
+from mypy.types import Type
+from mypy.server.astdiff import compare_symbol_tables, is_identical_type
+from mypy.server.astmerge import merge_asts
+from mypy.server.aststrip import strip_target
+from mypy.server.deps import get_dependencies, get_dependencies_of_target
+from mypy.server.target import module_prefix, split_target
+from mypy.server.trigger import make_trigger
+
+
+# If True, print out debug logging output.
+DEBUG = False
+
+
+class FineGrainedBuildManager:
+ def __init__(self,
+ manager: BuildManager,
+ graph: Dict[str, State]) -> None:
+ self.manager = manager
+ self.graph = graph
+ self.deps = get_all_dependencies(manager, graph)
+ self.previous_targets_with_errors = manager.errors.targets()
+
+ def update(self, changed_modules: List[str]) -> List[str]:
+ """Update previous build result by processing changed modules.
+
+ Also propagate changes to other modules as needed, but only process
+ those parts of other modules that are affected by the changes. Retain
+ the existing ASTs and symbol tables of unaffected modules.
+
+ TODO: What about blocking errors?
+
+ Args:
+ manager: State of the build
+ graph: Additional state of the build
+ deps: Fine-grained dependcy map for the build (mutated by this function)
+ changed_modules: Modules changed since the previous update/build (assume
+ this is correct; not validated here)
+
+ Returns:
+ A list of errors.
+ """
+ if DEBUG:
+ print('==== update ====')
+ manager = self.manager
+ graph = self.graph
+ old_modules = dict(manager.modules)
+ manager.errors.reset()
+ new_modules, new_type_maps = build_incremental_step(manager, changed_modules)
+ # TODO: What to do with stale dependencies?
+ triggered = calculate_active_triggers(manager, old_modules, new_modules)
+ if DEBUG:
+ print('triggered:', sorted(triggered))
+ replace_modules_with_new_variants(manager, graph, old_modules, new_modules, new_type_maps)
+ update_dependencies(new_modules, self.deps, graph)
+ propagate_changes_using_dependencies(manager, graph, self.deps, triggered,
+ set(changed_modules),
+ self.previous_targets_with_errors,
+ graph)
+ self.previous_targets_with_errors = manager.errors.targets()
+ return manager.errors.messages()
+
+
+def get_all_dependencies(manager: BuildManager, graph: Dict[str, State]) -> Dict[str, Set[str]]:
+ """Return the fine-grained dependency map for an entire build."""
+ deps = {} # type: Dict[str, Set[str]]
+ update_dependencies(manager.modules, deps, graph)
+ return deps
+
+
+def build_incremental_step(manager: BuildManager,
+ changed_modules: List[str]) -> Tuple[Dict[str, MypyFile],
+ Dict[str, Dict[Expression, Type]]]:
+ """Build new versions of changed modules only.
+
+ Return the new ASTs for the changed modules. They will be totally
+ separate from the existing ASTs and need to merged afterwards.
+ """
+ assert len(changed_modules) == 1
+ id = changed_modules[0]
+ path = manager.modules[id].path
+
+ # TODO: what if file is missing?
+ with open(path) as f:
+ source = f.read()
+
+ state = State(id=id,
+ path=path,
+ source=source,
+ manager=manager) # TODO: more args?
+ state.parse_file()
+ # TODO: state.fix_suppressed_dependencies()?
+ state.semantic_analysis()
+ state.semantic_analysis_pass_three()
+ state.type_check_first_pass()
+ # TODO: state.type_check_second_pass()?
+ state.finish_passes()
+ # TODO: state.write_cache()?
+ # TODO: state.mark_as_rechecked()?
+
+ return {id: state.tree}, {id: state.type_checker.type_map}
+
+
+def update_dependencies(new_modules: Dict[str, MypyFile],
+ deps: Dict[str, Set[str]],
+ graph: Dict[str, State]) -> None:
+ for id, node in new_modules.items():
+ module_deps = get_dependencies(prefix=id,
+ node=node,
+ type_map=graph[id].type_checker.type_map)
+ for trigger, targets in module_deps.items():
+ deps.setdefault(trigger, set()).update(targets)
+
+
+def calculate_active_triggers(manager: BuildManager,
+ old_modules: Dict[str, MypyFile],
+ new_modules: Dict[str, MypyFile]) -> Set[str]:
+ """Determine activated triggers by comparing old and new symbol tables.
+
+ For example, if only the signature of function m.f is different in the new
+ symbol table, return {''}.
+ """
+ names = set() # type: Set[str]
+ for id in new_modules:
+ names |= compare_symbol_tables(id, old_modules[id].names, new_modules[id].names)
+ return {make_trigger(name) for name in names}
+
+
+def replace_modules_with_new_variants(
+ manager: BuildManager,
+ graph: Dict[str, State],
+ old_modules: Dict[str, MypyFile],
+ new_modules: Dict[str, MypyFile],
+ new_type_maps: Dict[str, Dict[Expression, Type]]) -> None:
+ """Replace modules with newly builds versions.
+
+ Retain the identities of externally visible AST nodes in the
+ old ASTs so that references to the affected modules from other
+ modules will still be valid (unless something was deleted or
+ replaced with an incompatible definition, in which case there
+ will be dangling references that will be handled by
+ propagate_changes_using_dependencies).
+ """
+ for id in new_modules:
+ merge_asts(old_modules[id], old_modules[id].names,
+ new_modules[id], new_modules[id].names)
+ manager.modules[id] = old_modules[id]
+ graph[id].type_checker.type_map = new_type_maps[id]
+
+
+def propagate_changes_using_dependencies(
+ manager: BuildManager,
+ graph: Dict[str, State],
+ deps: Dict[str, Set[str]],
+ triggered: Set[str],
+ up_to_date_modules: Set[str],
+ targets_with_errors: Set[str],
+ modules: Iterable[str]) -> None:
+ # TODO: Multiple type checking passes
+ # TODO: Restrict the number of iterations to some maximum to avoid infinite loops
+
+ # Propagate changes until nothing visible has changed during the last
+ # iteration.
+ while triggered or targets_with_errors:
+ todo = find_targets_recursive(triggered, deps, manager.modules, up_to_date_modules)
+ # Also process targets that used to have errors, as otherwise some
+ # errors might be lost.
+ for target in targets_with_errors:
+ id = module_prefix(modules, target)
+ if id not in up_to_date_modules:
+ if id not in todo:
+ todo[id] = set()
+ if DEBUG:
+ print('process', target)
+ todo[id].update(lookup_target(manager.modules, target))
+ triggered = set()
+ # TODO: Preserve order (set is not optimal)
+ for id, nodes in sorted(todo.items(), key=lambda x: x[0]):
+ assert id not in up_to_date_modules
+ triggered |= reprocess_nodes(manager, graph, id, nodes, deps)
+ # Changes elsewhere may require us to reprocess modules that were
+ # previously considered up to date. For example, there may be a
+ # dependency loop that loops back to an originally processed module.
+ up_to_date_modules = set()
+ targets_with_errors = set()
+ if DEBUG:
+ print('triggered:', list(triggered))
+
+
+def find_targets_recursive(
+ triggers: Set[str],
+ deps: Dict[str, Set[str]],
+ modules: Dict[str, MypyFile],
+ up_to_date_modules: Set[str]) -> Dict[str, Set[DeferredNode]]:
+ """Find names of all targets that need to reprocessed, given some triggers.
+
+ Returns: Dictionary from module id to a set of stale targets.
+ """
+ result = {} # type: Dict[str, Set[DeferredNode]]
+ worklist = triggers
+ processed = set() # type: Set[str]
+
+ # Find AST nodes corresponding to each target.
+ #
+ # TODO: Don't rely on a set, since the items are in an unpredictable order.
+ while worklist:
+ processed |= worklist
+ current = worklist
+ worklist = set()
+ for target in current:
+ if target.startswith('<'):
+ worklist |= deps.get(target, set()) - processed
+ else:
+ module_id = module_prefix(modules, target)
+ if module_id in up_to_date_modules:
+ # Already processed.
+ continue
+ if module_id not in result:
+ result[module_id] = set()
+ if DEBUG:
+ print('process', target)
+ deferred = lookup_target(modules, target)
+ result[module_id].update(deferred)
+
+ return result
+
+
+def reprocess_nodes(manager: BuildManager,
+ graph: Dict[str, State],
+ module_id: str,
+ nodeset: Set[DeferredNode],
+ deps: Dict[str, Set[str]]) -> Set[str]:
+ """Reprocess a set of nodes within a single module.
+
+ Return fired triggers.
+ """
+ file_node = manager.modules[module_id]
+
+ def key(node: DeferredNode) -> str:
+ fullname = node.node.fullname()
+ if isinstance(node.node, FuncDef) and fullname is None:
+ assert node.node.info is not None
+ fullname = '%s.%s' % (node.node.info.fullname(), node.node.name())
+ return fullname
+
+ # Some nodes by full name so that the order of processing is deterministic.
+ nodes = sorted(nodeset, key=key)
+
+ # Strip semantic analysis information.
+ for deferred in nodes:
+ strip_target(deferred.node)
+ semantic_analyzer = manager.semantic_analyzer
+
+ # Second pass of semantic analysis. We don't redo the first pass, because it only
+ # does local things that won't go stale.
+ for deferred in nodes:
+ with semantic_analyzer.file_context(
+ file_node=file_node,
+ fnam=file_node.path,
+ options=manager.options,
+ active_type=deferred.active_typeinfo):
+ manager.semantic_analyzer.refresh_partial(deferred.node)
+
+ # Third pass of semantic analysis.
+ for deferred in nodes:
+ with semantic_analyzer.file_context(
+ file_node=file_node,
+ fnam=file_node.path,
+ options=manager.options,
+ active_type=deferred.active_typeinfo):
+ manager.semantic_analyzer_pass3.refresh_partial(deferred.node)
+
+ # Keep track of potentially affected attribute types before type checking.
+ old_types_map = get_enclosing_namespace_types(nodes)
+
+ # Type check.
+ graph[module_id].type_checker.check_second_pass(nodes) # TODO: check return value
+
+ # Check if any attribute types were changed and need to be propagated further.
+ new_triggered = get_triggered_namespace_items(old_types_map)
+
+ # Dependencies may have changed.
+ update_deps(module_id, nodes, graph, deps)
+
+ return new_triggered
+
+
+NamespaceNode = Union[TypeInfo, MypyFile]
+
+
+def get_enclosing_namespace_types(nodes: List[DeferredNode]) -> Dict[NamespaceNode,
+ Dict[str, Type]]:
+ types = {} # type: Dict[NamespaceNode, Dict[str, Type]]
+ for deferred in nodes:
+ info = deferred.active_typeinfo
+ if info:
+ target = info # type: NamespaceNode
+ elif isinstance(deferred.node, MypyFile):
+ target = deferred.node
+ else:
+ target = None
+ if target and target not in types:
+ local_types = {name: node.node.type
+ for name, node in target.names.items()
+ if isinstance(node.node, Var)}
+ types[target] = local_types
+ return types
+
+
+def get_triggered_namespace_items(old_types_map: Dict[NamespaceNode, Dict[str, Type]]) -> Set[str]:
+ new_triggered = set()
+ for namespace_node, old_types in old_types_map.items():
+ for name, node in namespace_node.names.items():
+ if (name in old_types and
+ (not isinstance(node.node, Var) or
+ not is_identical_type(node.node.type, old_types[name]))):
+ # Type checking a method changed an attribute type.
+ new_triggered.add(make_trigger('{}.{}'.format(namespace_node.fullname(), name)))
+ return new_triggered
+
+
+def update_deps(module_id: str,
+ nodes: List[DeferredNode],
+ graph: Dict[str, State],
+ deps: Dict[str, Set[str]]) -> None:
+ for deferred in nodes:
+ node = deferred.node
+ prefix = module_id
+ if isinstance(node, FuncBase) and node.info:
+ prefix += '.{}'.format(node.info.name())
+ type_map = graph[module_id].type_checker.type_map
+ new_deps = get_dependencies_of_target(prefix, node, type_map)
+ for trigger, targets in new_deps.items():
+ deps.setdefault(trigger, set()).update(targets)
+
+
+def lookup_target(modules: Dict[str, MypyFile], target: str) -> List[DeferredNode]:
+ """Look up a target by fully-qualified name."""
+ module, rest = split_target(modules, target)
+ if rest:
+ components = rest.split('.')
+ else:
+ components = []
+ node = modules[module] # type: SymbolNode
+ file = None # type: MypyFile
+ active_class = None
+ active_class_name = None
+ for c in components:
+ if isinstance(node, TypeInfo):
+ active_class = node
+ active_class_name = node.name()
+ # TODO: Is it possible for the assertion to fail?
+ if isinstance(node, MypyFile):
+ file = node
+ assert isinstance(node, (MypyFile, TypeInfo))
+ node = node.names[c].node
+ if isinstance(node, TypeInfo):
+ # A ClassDef target covers the body of the class and everything defined
+ # within it. To get the body we include the entire surrounding target,
+ # typically a module top-level, since we don't support processing class
+ # bodies as separate entitites for simplicity.
+ result = [DeferredNode(file, None, None)]
+ for name, symnode in node.names.items():
+ node = symnode.node
+ if isinstance(node, FuncDef):
+ result.extend(lookup_target(modules, target + '.' + name))
+ return result
+ assert isinstance(node, (FuncDef, MypyFile))
+ return [DeferredNode(node, active_class_name, active_class)]
diff --git a/mypy/strconv.py b/mypy/strconv.py
index b8bda6d0224c..169d44bdf9aa 100644
--- a/mypy/strconv.py
+++ b/mypy/strconv.py
@@ -3,9 +3,9 @@
import re
import os
-from typing import Any, List, Tuple, Optional, Union, Sequence
+from typing import Any, List, Tuple, Optional, Union, Sequence, Dict
-from mypy.util import dump_tagged, short_type
+from mypy.util import short_type, IdMapper
import mypy.nodes
from mypy.visitor import NodeVisitor
@@ -21,6 +21,23 @@ class StrConv(NodeVisitor[str]):
ExpressionStmt:1(
IntExpr(1)))
"""
+
+ def __init__(self, show_ids: bool = False) -> None:
+ self.show_ids = show_ids
+ if show_ids:
+ self.id_mapper = IdMapper()
+ else:
+ self.id_mapper = None
+
+ def get_id(self, o: object) -> int:
+ return self.id_mapper.id(o)
+
+ def format_id(self, o: object) -> str:
+ if self.id_mapper:
+ return '<{}>'.format(self.get_id(o))
+ else:
+ return ''
+
def dump(self, nodes: Sequence[object], obj: 'mypy.nodes.Context') -> str:
"""Convert a list of items to a multiline pretty-printed string.
@@ -28,7 +45,10 @@ def dump(self, nodes: Sequence[object], obj: 'mypy.nodes.Context') -> str:
number. See mypy.util.dump_tagged for a description of the nodes
argument.
"""
- return dump_tagged(nodes, short_type(obj) + ':' + str(obj.get_line()))
+ tag = short_type(obj) + ':' + str(obj.get_line())
+ if self.show_ids:
+ tag += '<{}>'.format(self.get_id(obj))
+ return dump_tagged(nodes, tag, self)
def func_helper(self, o: 'mypy.nodes.FuncItem') -> List[object]:
"""Return a list in a format suitable for dump() that represents the
@@ -320,29 +340,35 @@ def visit_star_expr(self, o: 'mypy.nodes.StarExpr') -> str:
return self.dump([o.expr], o)
def visit_name_expr(self, o: 'mypy.nodes.NameExpr') -> str:
- return (short_type(o) + '(' + self.pretty_name(o.name, o.kind,
- o.fullname, o.is_def)
- + ')')
+ pretty = self.pretty_name(o.name, o.kind, o.fullname, o.is_def, o.node)
+ return short_type(o) + '(' + pretty + ')'
- def pretty_name(self, name: str, kind: int, fullname: str, is_def: bool) -> str:
+ def pretty_name(self, name: str, kind: int, fullname: str, is_def: bool,
+ target_node: 'mypy.nodes.Node' = None) -> str:
n = name
if is_def:
n += '*'
+ if target_node:
+ id = self.format_id(target_node)
+ else:
+ id = ''
if kind == mypy.nodes.GDEF or (fullname != name and
fullname is not None):
# Append fully qualified name for global references.
- n += ' [{}]'.format(fullname)
+ n += ' [{}{}]'.format(fullname, id)
elif kind == mypy.nodes.LDEF:
# Add tag to signify a local reference.
- n += ' [l]'
+ n += ' [l{}]'.format(id)
elif kind == mypy.nodes.MDEF:
# Add tag to signify a member reference.
- n += ' [m]'
+ n += ' [m{}]'.format(id)
+ else:
+ n += id
return n
def visit_member_expr(self, o: 'mypy.nodes.MemberExpr') -> str:
- return self.dump([o.expr, self.pretty_name(o.name, o.kind, o.fullname,
- o.is_def)], o)
+ pretty = self.pretty_name(o.name, o.kind, o.fullname, o.is_def, o.node)
+ return self.dump([o.expr, pretty], o)
def visit_yield_expr(self, o: 'mypy.nodes.YieldExpr') -> str:
return self.dump([o.expr], o)
@@ -476,3 +502,45 @@ def visit_slice_expr(self, o: 'mypy.nodes.SliceExpr') -> str:
def visit_backquote_expr(self, o: 'mypy.nodes.BackquoteExpr') -> str:
return self.dump([o.expr], o)
+
+
+def dump_tagged(nodes: Sequence[object], tag: str, str_conv: 'StrConv') -> str:
+ """Convert an array into a pretty-printed multiline string representation.
+
+ The format is
+ tag(
+ item1..
+ itemN)
+ Individual items are formatted like this:
+ - arrays are flattened
+ - pairs (str, array) are converted recursively, so that str is the tag
+ - other items are converted to strings and indented
+ """
+ from mypy.types import Type, TypeStrVisitor
+
+ a = [] # type: List[str]
+ if tag:
+ a.append(tag + '(')
+ for n in nodes:
+ if isinstance(n, list):
+ if n:
+ a.append(dump_tagged(n, None, str_conv))
+ elif isinstance(n, tuple):
+ s = dump_tagged(n[1], n[0], str_conv)
+ a.append(indent(s, 2))
+ elif isinstance(n, mypy.nodes.Node):
+ a.append(indent(n.accept(str_conv), 2))
+ elif isinstance(n, Type):
+ a.append(indent(n.accept(TypeStrVisitor(str_conv.id_mapper)), 2))
+ elif n:
+ a.append(indent(str(n), 2))
+ if tag:
+ a[-1] += ')'
+ return '\n'.join(a)
+
+
+def indent(s: str, n: int) -> str:
+ """Indent all the lines in s (separated by newlines) by n spaces."""
+ s = ' ' * n + s
+ s = s.replace('\n', '\n' + ' ' * n)
+ return s
diff --git a/mypy/test/testdeps.py b/mypy/test/testdeps.py
new file mode 100644
index 000000000000..e1ee2b97e3e5
--- /dev/null
+++ b/mypy/test/testdeps.py
@@ -0,0 +1,64 @@
+"""Test cases for generating node-level dependencies (for fine-grained incremental checking)"""
+
+import os.path
+from typing import List, Tuple, Dict
+
+from mypy import build
+from mypy.build import BuildSource
+from mypy.errors import CompileError
+from mypy.nodes import MypyFile, Expression
+from mypy.options import Options
+from mypy.server.deps import get_dependencies
+from mypy.test.config import test_temp_dir, test_data_prefix
+from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
+from mypy.test.helpers import assert_string_arrays_equal
+from mypy.types import Type
+
+files = [
+ 'deps.test'
+]
+
+
+class GetDependenciesSuite(DataSuite):
+ def __init__(self, *, update_data: bool) -> None:
+ pass
+
+ @classmethod
+ def cases(cls) -> List[DataDrivenTestCase]:
+ c = [] # type: List[DataDrivenTestCase]
+ for f in files:
+ c += parse_test_cases(os.path.join(test_data_prefix, f),
+ None, test_temp_dir, True)
+ return c
+
+ def run_case(self, testcase: DataDrivenTestCase) -> None:
+ src = '\n'.join(testcase.input)
+ messages, files, type_map = self.build(src)
+ a = messages
+ deps = get_dependencies('__main__', files['__main__'], type_map)
+
+ for source, targets in sorted(deps.items()):
+ line = '%s -> %s' % (source, ', '.join(sorted(targets)))
+ # Clean up output a bit
+ line = line.replace('__main__', 'm')
+ a.append(line)
+
+ assert_string_arrays_equal(
+ testcase.output, a,
+ 'Invalid output ({}, line {})'.format(testcase.file,
+ testcase.line))
+
+ def build(self, source: str) -> Tuple[List[str],
+ Dict[str, MypyFile],
+ Dict[Expression, Type]]:
+ options = Options()
+ options.use_builtins_fixtures = True
+ options.show_traceback = True
+ try:
+ result = build.build(sources=[BuildSource('main', None, source)],
+ options=options,
+ alt_lib_path=test_temp_dir)
+ except CompileError as e:
+ # TODO: Should perhaps not return None here.
+ return e.messages, None, None
+ return result.errors, result.files, result.types
diff --git a/mypy/test/testdiff.py b/mypy/test/testdiff.py
new file mode 100644
index 000000000000..f379a3735ce7
--- /dev/null
+++ b/mypy/test/testdiff.py
@@ -0,0 +1,72 @@
+"""Test cases for AST diff (used for fine-grained incremental checking)"""
+
+import os.path
+from typing import List, Tuple, Dict
+
+from mypy import build
+from mypy.build import BuildSource
+from mypy.errors import CompileError
+from mypy.nodes import MypyFile
+from mypy.options import Options
+from mypy.server.astdiff import compare_symbol_tables
+from mypy.test.config import test_temp_dir, test_data_prefix
+from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
+from mypy.test.helpers import assert_string_arrays_equal
+
+
+files = [
+ 'diff.test'
+]
+
+
+class ASTDiffSuite(DataSuite):
+ def __init__(self, *, update_data: bool) -> None:
+ pass
+
+ @classmethod
+ def cases(cls) -> List[DataDrivenTestCase]:
+ c = [] # type: List[DataDrivenTestCase]
+ for f in files:
+ c += parse_test_cases(os.path.join(test_data_prefix, f),
+ None, test_temp_dir, True)
+ return c
+
+ def run_case(self, testcase: DataDrivenTestCase) -> None:
+ first_src = '\n'.join(testcase.input)
+ files_dict = dict(testcase.files)
+ second_src = files_dict['tmp/next.py']
+
+ messages1, files1 = self.build(first_src)
+ messages2, files2 = self.build(second_src)
+
+ a = []
+ if messages1:
+ a.extend(messages1)
+ if messages2:
+ a.append('== next ==')
+ a.extend(messages2)
+
+ diff = compare_symbol_tables(
+ '__main__',
+ files1['__main__'].names,
+ files2['__main__'].names)
+ for trigger in sorted(diff):
+ a.append(trigger)
+
+ assert_string_arrays_equal(
+ testcase.output, a,
+ 'Invalid output ({}, line {})'.format(testcase.file,
+ testcase.line))
+
+ def build(self, source: str) -> Tuple[List[str], Dict[str, MypyFile]]:
+ options = Options()
+ options.use_builtins_fixtures = True
+ options.show_traceback = True
+ try:
+ result = build.build(sources=[BuildSource('main', None, source)],
+ options=options,
+ alt_lib_path=test_temp_dir)
+ except CompileError as e:
+ # TODO: Is it okay to return None?
+ return e.messages, None
+ return result.errors, result.files
diff --git a/mypy/test/testfinegrained.py b/mypy/test/testfinegrained.py
new file mode 100644
index 000000000000..21afc8572b44
--- /dev/null
+++ b/mypy/test/testfinegrained.py
@@ -0,0 +1,119 @@
+"""Test cases for fine-grained incremental checking.
+
+Each test cases runs a batch build followed by one or more fine-grained
+incremental steps. We verify that each step produces the expected output.
+
+See the comment at the top of test-data/unit/fine-grained.test for more
+information.
+"""
+
+import os.path
+import re
+import shutil
+from typing import List, Tuple, Dict
+
+from mypy import build
+from mypy.build import BuildManager, BuildSource, Graph
+from mypy.errors import Errors, CompileError
+from mypy.nodes import Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression
+from mypy.options import Options
+from mypy.server.astmerge import merge_asts
+from mypy.server.subexpr import get_subexpressions
+from mypy.server.update import FineGrainedBuildManager
+from mypy.strconv import StrConv, indent
+from mypy.test.config import test_temp_dir, test_data_prefix
+from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
+from mypy.test.helpers import assert_string_arrays_equal
+from mypy.test.testtypegen import ignore_node
+from mypy.types import TypeStrVisitor, Type
+from mypy.util import short_type
+
+
+files = [
+ 'fine-grained.test'
+]
+
+
+class FineGrainedSuite(DataSuite):
+ def __init__(self, *, update_data: bool) -> None:
+ pass
+
+ @classmethod
+ def cases(cls) -> List[DataDrivenTestCase]:
+ c = [] # type: List[DataDrivenTestCase]
+ for f in files:
+ c += parse_test_cases(os.path.join(test_data_prefix, f),
+ None, test_temp_dir, True)
+ return c
+
+ def run_case(self, testcase: DataDrivenTestCase) -> None:
+ main_src = '\n'.join(testcase.input)
+ messages, manager, graph = self.build(main_src)
+
+ a = []
+ if messages:
+ a.extend(messages)
+
+ fine_grained_manager = FineGrainedBuildManager(manager, graph)
+
+ steps = find_steps()
+ for changed_paths in steps:
+ modules = []
+ for module, path in changed_paths:
+ new_path = re.sub(r'\.[0-9]+$', '', path)
+ shutil.copy(path, new_path)
+ modules.append(module)
+
+ new_messages = fine_grained_manager.update(modules)
+ new_messages = [re.sub('^tmp' + re.escape(os.sep), '', message)
+ for message in new_messages]
+
+ a.append('==')
+ a.extend(new_messages)
+
+ # Normalize paths in test output (for Windows).
+ a = [line.replace('\\', '/') for line in a]
+
+ assert_string_arrays_equal(
+ testcase.output, a,
+ 'Invalid output ({}, line {})'.format(testcase.file,
+ testcase.line))
+
+ def build(self, source: str) -> Tuple[List[str], BuildManager, Graph]:
+ options = Options()
+ options.use_builtins_fixtures = True
+ options.show_traceback = True
+ try:
+ result = build.build(sources=[BuildSource('main', None, source)],
+ options=options,
+ alt_lib_path=test_temp_dir)
+ except CompileError as e:
+ # TODO: We need a manager and a graph in this case as well
+ assert False, str('\n'.join(e.messages))
+ return e.messages, None, None
+ return result.errors, result.manager, result.graph
+
+
+def find_steps() -> List[List[Tuple[str, str]]]:
+ """Return a list of build step representations.
+
+ Each build step is a list of (module id, path) tuples, and each
+ path is of form 'dir/mod.py.2' (where 2 is the step number).
+ """
+ steps = {} # type: Dict[int, List[Tuple[str, str]]]
+ for dn, dirs, files in os.walk(test_temp_dir):
+ dnparts = dn.split(os.sep)
+ assert dnparts[0] == test_temp_dir
+ del dnparts[0]
+ for filename in files:
+ m = re.match(r'.*\.([0-9]+)$', filename)
+ if m:
+ num = int(m.group(1))
+ assert num >= 2
+ name = re.sub(r'\.py.*', '', filename)
+ module = '.'.join(dnparts + [name])
+ module = re.sub(r'\.__init__$', '', module)
+ path = os.path.join(dn, filename)
+ steps.setdefault(num, []).append((module, path))
+ max_step = max(steps)
+ return [steps[num] for num in range(2, max_step + 1)]
diff --git a/mypy/test/testmerge.py b/mypy/test/testmerge.py
new file mode 100644
index 000000000000..6802e3431091
--- /dev/null
+++ b/mypy/test/testmerge.py
@@ -0,0 +1,205 @@
+"""Test cases for AST merge (used for fine-grained incremental checking)"""
+
+import os.path
+import shutil
+from typing import List, Tuple, Dict
+
+from mypy import build
+from mypy.build import BuildManager, BuildSource, State
+from mypy.errors import Errors, CompileError
+from mypy.nodes import (
+ Node, MypyFile, SymbolTable, SymbolTableNode, TypeInfo, Expression
+)
+from mypy.options import Options
+from mypy.server.astmerge import merge_asts
+from mypy.server.subexpr import get_subexpressions
+from mypy.server.update import build_incremental_step, replace_modules_with_new_variants
+from mypy.strconv import StrConv, indent
+from mypy.test.config import test_temp_dir, test_data_prefix
+from mypy.test.data import parse_test_cases, DataDrivenTestCase, DataSuite
+from mypy.test.helpers import assert_string_arrays_equal
+from mypy.test.testtypegen import ignore_node
+from mypy.types import TypeStrVisitor, Type
+from mypy.util import short_type
+
+
+files = [
+ 'merge.test'
+]
+
+
+# Which data structures to dump in a test case?
+SYMTABLE = 'SYMTABLE'
+TYPEINFO = ' TYPEINFO'
+TYPES = 'TYPES'
+AST = 'AST'
+
+
+class ASTMergeSuite(DataSuite):
+ def __init__(self, *, update_data: bool) -> None:
+ self.str_conv = StrConv(show_ids=True)
+ self.id_mapper = self.str_conv.id_mapper
+ self.type_str_conv = TypeStrVisitor(self.id_mapper)
+
+ @classmethod
+ def cases(cls) -> List[DataDrivenTestCase]:
+ c = [] # type: List[DataDrivenTestCase]
+ for f in files:
+ c += parse_test_cases(os.path.join(test_data_prefix, f),
+ None, test_temp_dir, True)
+ return c
+
+ def run_case(self, testcase: DataDrivenTestCase) -> None:
+ name = testcase.name
+ # We use the test case name to decide which data structures to dump.
+ # Dumping everything would result in very verbose test cases.
+ if name.endswith('_symtable'):
+ kind = SYMTABLE
+ elif name.endswith('_typeinfo'):
+ kind = TYPEINFO
+ elif name.endswith('_types'):
+ kind = TYPES
+ else:
+ kind = AST
+
+ main_src = '\n'.join(testcase.input)
+ messages, manager, graph = self.build(main_src)
+
+ a = []
+ if messages:
+ a.extend(messages)
+
+ shutil.copy(os.path.join(test_temp_dir, 'target.py.next'),
+ os.path.join(test_temp_dir, 'target.py'))
+
+ a.extend(self.dump(manager.modules, graph, kind))
+
+ old_modules = dict(manager.modules)
+ old_subexpr = get_subexpressions(old_modules['target'])
+
+ new_file, new_types = self.build_increment(manager, 'target')
+ replace_modules_with_new_variants(manager,
+ graph,
+ old_modules,
+ {'target': new_file},
+ {'target': new_types})
+
+ a.append('==>')
+ a.extend(self.dump(manager.modules, graph, kind))
+
+ for expr in old_subexpr:
+ # Verify that old AST nodes are removed from the expression type map.
+ assert expr not in new_types
+
+ assert_string_arrays_equal(
+ testcase.output, a,
+ 'Invalid output ({}, line {})'.format(testcase.file,
+ testcase.line))
+
+ def build(self, source: str) -> Tuple[List[str], BuildManager, Dict[str, State]]:
+ options = Options()
+ options.use_builtins_fixtures = True
+ options.show_traceback = True
+ try:
+ result = build.build(sources=[BuildSource('main', None, source)],
+ options=options,
+ alt_lib_path=test_temp_dir)
+ except CompileError as e:
+ # TODO: Is it okay to return None?
+ return e.messages, None, {}
+ return result.errors, result.manager, result.graph
+
+ def build_increment(self, manager: BuildManager,
+ module_id: str) -> Tuple[MypyFile,
+ Dict[Expression, Type]]:
+ module_dict, type_maps = build_incremental_step(manager, [module_id])
+ return module_dict[module_id], type_maps[module_id]
+
+ def dump(self,
+ modules: Dict[str, MypyFile],
+ graph: Dict[str, State],
+ kind: str) -> List[str]:
+ if kind == AST:
+ return self.dump_asts(modules)
+ elif kind == TYPEINFO:
+ return self.dump_typeinfos(modules)
+ elif kind == SYMTABLE:
+ return self.dump_symbol_tables(modules)
+ elif kind == TYPES:
+ return self.dump_types(graph)
+ assert False, 'Invalid kind %s' % kind
+
+ def dump_asts(self, modules: Dict[str, MypyFile]) -> List[str]:
+ a = []
+ for m in sorted(modules):
+ if m == 'builtins':
+ # We don't support incremental checking of changes to builtins.
+ continue
+ s = modules[m].accept(self.str_conv)
+ a.extend(s.splitlines())
+ return a
+
+ def dump_symbol_tables(self, modules: Dict[str, MypyFile]) -> List[str]:
+ a = []
+ for id in sorted(modules):
+ if id == 'builtins':
+ # We don't support incremental checking of changes to builtins.
+ continue
+ a.extend(self.dump_symbol_table(id, modules[id].names))
+ return a
+
+ def dump_symbol_table(self, module_id: str, symtable: SymbolTable) -> List[str]:
+ a = ['{}:'.format(module_id)]
+ for name in sorted(symtable):
+ if name.startswith('__'):
+ continue
+ a.append(' {}: {}'.format(name, self.format_symbol_table_node(symtable[name])))
+ return a
+
+ def format_symbol_table_node(self, node: SymbolTableNode) -> str:
+ if node is None:
+ return 'None'
+ if isinstance(node.node, Node):
+ return '{}<{}>'.format(str(type(node.node).__name__),
+ self.id_mapper.id(node.node))
+ # TODO: type_override?
+ return '?'
+
+ def dump_typeinfos(self, modules: Dict[str, MypyFile]) -> List[str]:
+ a = []
+ for id in sorted(modules):
+ if id == 'builtins':
+ continue
+ a.extend(self.dump_typeinfos_recursive(modules[id].names))
+ return a
+
+ def dump_typeinfos_recursive(self, names: SymbolTable) -> List[str]:
+ a = []
+ for name, node in sorted(names.items(), key=lambda x: x[0]):
+ if isinstance(node.node, TypeInfo):
+ a.extend(self.dump_typeinfo(node.node))
+ a.extend(self.dump_typeinfos_recursive(node.node.names))
+ return a
+
+ def dump_typeinfo(self, info: TypeInfo) -> List[str]:
+ s = info.dump(str_conv=self.str_conv,
+ type_str_conv=self.type_str_conv)
+ return s.splitlines()
+
+ def dump_types(self, graph: Dict[str, State]) -> List[str]:
+ a = []
+ # To make the results repeatable, we try to generate unique and
+ # deterministic sort keys.
+ for module_id in sorted(graph):
+ if module_id == 'builtins':
+ continue
+ type_map = graph[module_id].type_checker.type_map
+ if type_map:
+ a.append('## {}'.format(module_id))
+ for expr in sorted(type_map, key=lambda n: (n.line, short_type(n),
+ str(n) + str(type_map[n]))):
+ typ = type_map[expr]
+ a.append('{}:{}: {}'.format(short_type(expr),
+ expr.line,
+ typ.accept(self.type_str_conv)))
+ return a
diff --git a/mypy/traverser.py b/mypy/traverser.py
index 8f3cffbca642..689bbc721a13 100644
--- a/mypy/traverser.py
+++ b/mypy/traverser.py
@@ -9,7 +9,7 @@
UnaryExpr, ListExpr, TupleExpr, DictExpr, SetExpr, IndexExpr,
GeneratorExpr, ListComprehension, ConditionalExpr, TypeApplication,
LambdaExpr, ComparisonExpr, OverloadedFuncDef, YieldFromExpr,
- YieldExpr
+ YieldExpr, StarExpr, BackquoteExpr, AwaitExpr
)
@@ -224,3 +224,12 @@ def visit_type_application(self, o: TypeApplication) -> None:
def visit_lambda_expr(self, o: LambdaExpr) -> None:
self.visit_func(o)
+
+ def visit_star_expr(self, o: StarExpr) -> None:
+ o.expr.accept(self)
+
+ def visit_backquote_expr(self, o: BackquoteExpr) -> None:
+ o.expr.accept(self)
+
+ def visit_await_expr(self, o: AwaitExpr) -> None:
+ o.expr.accept(self)
diff --git a/mypy/types.py b/mypy/types.py
index 19b4a3a795a8..1bba37724ced 100644
--- a/mypy/types.py
+++ b/mypy/types.py
@@ -13,8 +13,8 @@
INVARIANT, SymbolNode,
ARG_POS, ARG_OPT, ARG_STAR, ARG_STAR2, ARG_NAMED, ARG_NAMED_OPT,
)
-
from mypy.sharedparse import argument_elide_name
+from mypy.util import IdMapper
T = TypeVar('T')
@@ -1353,6 +1353,9 @@ class TypeStrVisitor(TypeVisitor[str]):
- Represent the NoneTyp type as None.
"""
+ def __init__(self, id_mapper: IdMapper = None) -> None:
+ self.id_mapper = id_mapper
+
def visit_unbound_type(self, t: UnboundType)-> str:
s = t.name + '?'
if t.args != []:
@@ -1390,6 +1393,8 @@ def visit_instance(self, t: Instance) -> str:
s += '*'
if t.args != []:
s += '[{}]'.format(self.list_str(t.args))
+ if self.id_mapper:
+ s += '<{}>'.format(self.id_mapper.id(t.type))
return s
def visit_type_var(self, t: TypeVarType) -> str:
@@ -1415,14 +1420,14 @@ def visit_callable_type(self, t: CallableType) -> str:
s += '**'
if t.arg_names[i]:
s += t.arg_names[i] + ': '
- s += str(t.arg_types[i])
+ s += t.arg_types[i].accept(self)
if t.arg_kinds[i] in (ARG_OPT, ARG_NAMED_OPT):
s += ' ='
s = '({})'.format(s)
if not isinstance(t.ret_type, NoneTyp):
- s += ' -> {}'.format(t.ret_type)
+ s += ' -> {}'.format(t.ret_type.accept(self))
if t.variables:
s = '{} {}'.format(t.variables, s)
diff --git a/mypy/util.py b/mypy/util.py
index e5c9e5e0275f..1e8e31898d23 100644
--- a/mypy/util.py
+++ b/mypy/util.py
@@ -3,7 +3,7 @@
import re
import subprocess
from xml.sax.saxutils import escape
-from typing import TypeVar, List, Tuple, Optional, Sequence
+from typing import TypeVar, List, Tuple, Optional, Sequence, Dict
T = TypeVar('T')
@@ -37,13 +37,6 @@ def short_type(obj: object) -> str:
return t.split('.')[-1].rstrip("'>")
-def indent(s: str, n: int) -> str:
- """Indent all the lines in s (separated by Newlines) by n spaces."""
- s = ' ' * n + s
- s = s.replace('\n', '\n' + ' ' * n)
- return s
-
-
def array_repr(a: List[T]) -> List[str]:
"""Return the items of an array converted to strings using Repr."""
aa = [] # type: List[str]
@@ -52,35 +45,6 @@ def array_repr(a: List[T]) -> List[str]:
return aa
-def dump_tagged(nodes: Sequence[object], tag: str) -> str:
- """Convert an array into a pretty-printed multiline string representation.
-
- The format is
- tag(
- item1..
- itemN)
- Individual items are formatted like this:
- - arrays are flattened
- - pairs (str : array) are converted recursively, so that str is the tag
- - other items are converted to strings and indented
- """
- a = [] # type: List[str]
- if tag:
- a.append(tag + '(')
- for n in nodes:
- if isinstance(n, list):
- if n:
- a.append(dump_tagged(n, None))
- elif isinstance(n, tuple):
- s = dump_tagged(n[1], n[0])
- a.append(indent(s, 2))
- elif n:
- a.append(indent(str(n), 2))
- if tag:
- a[-1] += ')'
- return '\n'.join(a)
-
-
def find_python_encoding(text: bytes, pyversion: Tuple[int, int]) -> Tuple[str, int]:
"""PEP-263 for detecting Python file encoding"""
result = ENCODING_RE.match(text)
@@ -150,3 +114,23 @@ def write_junit_xml(dt: float, serious: bool, messages: List[str], path: str) ->
xml = ERROR_TEMPLATE.format(text=escape('\n'.join(messages)), time=dt)
with open(path, 'wb') as f:
f.write(xml.encode('utf-8'))
+
+
+class IdMapper:
+ """Generate integer ids for objects.
+
+ Unlike id(), these start from 0 and increment by 1, and ids won't
+ get reused across the life-time of IdMapper.
+
+ Assume objects don't redefine __eq__ or __hash__.
+ """
+
+ def __init__(self) -> None:
+ self.id_map = {} # type: Dict[object, int]
+ self.next_id = 0
+
+ def id(self, o: object) -> int:
+ if o not in self.id_map:
+ self.id_map[o] = self.next_id
+ self.next_id += 1
+ return self.id_map[o]
diff --git a/runtests.py b/runtests.py
index 4be285b19cf9..83a6ffa0d3da 100755
--- a/runtests.py
+++ b/runtests.py
@@ -209,7 +209,12 @@ def add_imports(driver: Driver) -> None:
PYTEST_FILES = [os.path.join('mypy', 'test', '{}.py'.format(name)) for name in [
- 'testcheck', 'testextensions',
+ 'testcheck',
+ 'testextensions',
+ 'testdeps',
+ 'testdiff',
+ 'testfinegrained',
+ 'testmerge',
]]
diff --git a/test-data/unit/check-inference.test b/test-data/unit/check-inference.test
index 0457e49127d8..6eb38093f61a 100644
--- a/test-data/unit/check-inference.test
+++ b/test-data/unit/check-inference.test
@@ -1130,6 +1130,20 @@ def f(a: Callable[[int, int, int], int] = lambda *a, **k: 1):
pass
[builtins fixtures/dict.pyi]
+[case testLambdaDeferredSpecialCase]
+from typing import Callable
+
+class A:
+ def f(self) -> None:
+ h(lambda: self.x)
+
+ def g(self) -> None:
+ self.x = 1
+
+def h(x: Callable[[], int]) -> None:
+ pass
+
+
-- Boolean operators
-- -----------------
diff --git a/test-data/unit/deps.test b/test-data/unit/deps.test
new file mode 100644
index 000000000000..a3d65330330b
--- /dev/null
+++ b/test-data/unit/deps.test
@@ -0,0 +1,281 @@
+-- Test cases for generating dependencies between ASTs nodes.
+--
+-- The dependencies are used for fined-grained incremental checking.
+
+
+[case testCallFunction]
+def f() -> None:
+ g()
+def g() -> None:
+ pass
+[out]
+ -> m.f
+
+[case testCallMethod]
+def f(a: A) -> None:
+ a.g()
+class A:
+ def g(self) -> None: pass
+[out]
+ -> m.f
+ -> , m.A, m.f
+
+[case testAccessAttribute]
+def f(a: A) -> None:
+ a.x
+class A:
+ def g(self) -> None:
+ self.x = 1
+[out]
+ -> m.A.g, m.f
+ -> , m.A, m.f
+
+[case testConstructInstance]
+def f() -> None:
+ A()
+class A: pass
+[out]
+ -> m.f
+ -> m.A, m.f
+
+[case testAccessModuleAttribute]
+x = 1
+def f() -> None:
+ x
+[out]
+ -> m, m.f
+ -> m
+
+[case testAccessModuleAttribute2]
+import n
+def f() -> None:
+ n.x
+[file n.py]
+x = 1
+[out]
+ -> m.f
+ -> m, m.f
+
+[case testImport]
+import n
+[file n.py]
+x = 1
+[out]
+ -> m
+
+[case testCallImportedFunction]
+import n
+n.f()
+[file n.py]
+def f() -> None: pass
+[out]
+ -> m
+ -> m
+
+[case testCallImportedFunctionInFunction]
+import n
+def g() -> None:
+ n.f()
+[file n.py]
+def f() -> None: pass
+[out]
+ -> m.g
+ -> m, m.g
+
+[case testInheritanceSimple]
+class A:
+ pass
+class B(A):
+ pass
+[out]
+ ->
+ -> m.A, m.B
+ -> m.B
+
+[case testInheritanceWithMethodAndAttribute]
+class A:
+ pass
+class B(A):
+ def f(self) -> None:
+ self.x = 1
+[out]
+ ->
+ -> m.B.f
+ ->
+ -> m.A, m.B
+ -> m.B.f
+ -> m.B
+
+[case testInheritanceWithMethodAndAttributeAndDeepHierarchy]
+class A:
+ pass
+class B(A):
+ pass
+class C(B):
+ def f(self) -> None:
+ self.x = 1
+[out]
+ -> ,
+ -> m.C.f
+ ->
+ -> m.A, m.B
+ ->
+ -> m.C.f
+ ->
+ -> m.B, m.C
+ -> m.C.f
+ -> m.C
+
+[case testInheritAttribute]
+import n
+class B(n.A):
+ def f(self) -> None:
+ a = 1
+ a = self.x
+[file n.py]
+class A:
+ def g(self) -> None:
+ self.x = 1
+[out]
+ -> m.B.f
+ -> m.B
+ ->
+ -> m.B.f
+ ->
+ ->
+ -> m.B
+ -> m, m.B
+
+[case testInheritMethod]
+class A:
+ def g(self) -> None: pass
+class B(A):
+ def f(self) -> None:
+ self.g()
+[out]
+ ->
+ -> m.B.f
+ ->
+ -> m.A, m.B
+ -> m.B.f
+ -> m.B
+
+[case testPackage]
+import a.b
+def f() -> None:
+ a.b.g()
+[file a/__init__.py]
+[file a/b.py]
+def g() -> None: pass
+[out]
+ -> m.f
+ -> m, m.f
+ -> m.f
+
+[case testClassInPackage]
+import a.b
+def f(x: a.b.A) -> None:
+ x.g()
+ x.y
+[file a/__init__.py]
+[file a/b.py]
+class A:
+ def g(self) -> None:
+ self.y = 1
+[out]
+ -> m.f
+ -> m.f
+ -> , m.f
+ -> m
+
+[case testPackage__init__]
+import a
+def f() -> None:
+ a.g()
+[file a/__init__.py]
+def g() -> None: pass
+[out]
+ -> m.f
+ -> m, m.f
+
+[case testClassInPackage__init__]
+import a
+def f(x: a.A) -> None:
+ x.g()
+ x.y
+[file a/__init__.py]
+class A:
+ def g(self) -> None:
+ self.y = 1
+[out]
+ -> m.f
+ -> m.f
+ -> , m.f
+ -> m
+
+[case testConstructor]
+class A:
+ def __init__(self, x: int) -> None: pass
+def f() -> None:
+ A(1)
+[out]
+ -> m.f
+ -> m.A, m.f
+ -> , m.A.__init__
+
+[case testImportFrom]
+from n import f
+
+def g() -> None:
+ f()
+[file n.py]
+def f() -> None: pass
+[out]
+ -> m, m.g
+
+[case testNestedClass]
+def f() -> None:
+ b = A.B()
+ b.f()
+class A:
+ class B:
+ def f(self) -> None: pass
+[out]
+ -> m.f
+ -> m.f
+ -> m.A.B, m.f
+ -> m.A, m.f
+
+[case testNestedClassAttribute]
+def f() -> None:
+ b = A.B()
+ b.x
+class A:
+ class B:
+ def f(self) -> None:
+ self.x = 1
+[out]
+ -> m.f
+ -> m.A.B.f, m.f
+ -> m.A.B, m.f
+ -> m.A, m.f
+
+[case testNestedClassInAnnotation]
+def f(x: A.B) -> None:
+ pass
+class A:
+ class B: pass
+[out]
+ -> , m.A.B, m.f
+ -> m.A
+
+[case testNestedClassInAnnotation2]
+def f(x: A.B) -> None:
+ x.f()
+class A:
+ class B:
+ def f(self) -> None: pass
+[out]
+ -> m.f
+ -> , m.A.B, m.f
+ -> m.A
diff --git a/test-data/unit/diff.test b/test-data/unit/diff.test
new file mode 100644
index 000000000000..638948c40312
--- /dev/null
+++ b/test-data/unit/diff.test
@@ -0,0 +1,267 @@
+-- Test cases for taking a diff of two module ASTs/symbol tables.
+-- The diffs are used for fined-grained incremental checking.
+
+--
+-- Module top-levels
+--
+
+[case testChangeTypeOfModuleAttribute]
+x = 1
+y = 1
+[file next.py]
+x = ''
+y = 1
+[out]
+__main__.x
+
+[case testChangeSignatureOfModuleFunction]
+def f(x: int) -> None:
+ pass
+def g(y: str) -> None:
+ pass
+[file next.py]
+def f(x: str) -> None:
+ x = ''
+def g(y: str) -> None:
+ y = ''
+[out]
+__main__.f
+
+[case testAddModuleAttribute]
+x = 1
+[file next.py]
+x = 1
+y = 1
+[out]
+__main__.y
+
+[case testRemoveModuleAttribute]
+x = 1
+y = 1
+[file next.py]
+x = 1
+[out]
+__main__.y
+
+--
+-- Classes
+--
+
+[case testChangeMethodSignature]
+class A:
+ def f(self) -> None: pass
+ def g(self) -> None: pass
+[file next.py]
+class A:
+ def f(self, x: int) -> None: pass
+ def g(self) -> None: pass
+[out]
+__main__.A.f
+
+[case testChangeAttributeType]
+class A:
+ def f(self) -> None:
+ self.x = 1
+ self.y = 1
+[file next.py]
+class A:
+ def f(self) -> None:
+ self.x = 1
+ self.y = ''
+[out]
+__main__.A.y
+
+[case testAddAttribute]
+class A: pass
+[file next.py]
+class A:
+ def f(self) -> None:
+ self.x = 1
+[out]
+__main__.A.f
+__main__.A.x
+
+[case testAddAttribute2]
+class A:
+ def f(self) -> None: pass
+[file next.py]
+class A:
+ def f(self) -> None:
+ self.x = 1
+[out]
+__main__.A.x
+
+[case testRemoveAttribute]
+class A:
+ def f(self) -> None:
+ self.x = 1
+[file next.py]
+class A: pass
+[out]
+__main__.A.f
+__main__.A.x
+
+[case testAddMethod]
+class A:
+ def f(self) -> None: pass
+[file next.py]
+class A:
+ def f(self) -> None: pass
+ def g(self) -> None: pass
+[out]
+__main__.A.g
+
+[case testRemoveMethod]
+class A:
+ def f(self) -> None: pass
+ def g(self) -> None: pass
+[file next.py]
+class A:
+ def f(self) -> None: pass
+[out]
+__main__.A.g
+
+[case testAddImport]
+import nn
+[file next.py]
+import n
+import nn
+[file n.py]
+x = 1
+[file nn.py]
+y = 1
+[out]
+__main__.n
+
+[case testRemoveImport]
+import n
+[file next.py]
+[file n.py]
+x = 1
+[out]
+__main__.n
+
+[case testChangeClassIntoFunction]
+class A: pass
+[file next.py]
+def A() -> None: pass
+[out]
+__main__.A
+
+[case testDeleteClass]
+class A: pass
+[file next.py]
+[out]
+__main__.A
+
+[case testAddBaseClass]
+class A: pass
+[file next.py]
+class B: pass
+class A(B): pass
+[out]
+__main__.A
+__main__.B
+
+[case testChangeBaseClass]
+class A: pass
+class B: pass
+class C(A): pass
+[file next.py]
+class A: pass
+class B: pass
+class C(B): pass
+[out]
+__main__.C
+
+[case testRemoveBaseClass]
+class A: pass
+class B(A): pass
+[file next.py]
+class A: pass
+class B: pass
+[out]
+__main__.B
+
+[case testRemoveClassFromMiddleOfMro]
+class A: pass
+class B(A): pass
+class C(B): pass
+[file next.py]
+class A: pass
+class B: pass
+class C(B): pass
+[out]
+__main__.B
+__main__.C
+
+[case testDifferenceInConstructor]
+class A:
+ def __init__(self) -> None: pass
+[file next.py]
+class A:
+ def __init__(self, x: int) -> None: pass
+[out]
+__main__.A.__init__
+
+[case testChangeSignatureOfMethodInNestedClass]
+class A:
+ class B:
+ def f(self) -> int: pass
+[file next.py]
+class A:
+ class B:
+ def f(self) -> str: pass
+[out]
+__main__.A.B.f
+
+[case testChangeTypeOfAttributeInNestedClass]
+class A:
+ class B:
+ def f(self) -> None:
+ self.x = 1
+[file next.py]
+class A:
+ class B:
+ def f(self) -> None:
+ self.x = ''
+[out]
+__main__.A.B.x
+
+[case testAddMethodToNestedClass]
+class A:
+ class B: pass
+[file next.py]
+class A:
+ class B:
+ def f(self) -> str: pass
+[out]
+__main__.A.B.f
+
+[case testAddNestedClass]
+class A: pass
+[file next.py]
+class A:
+ class B:
+ def f(self) -> None: pass
+[out]
+__main__.A.B
+
+[case testRemoveNestedClass]
+class A:
+ class B:
+ def f(self) -> None: pass
+[file next.py]
+class A: pass
+[out]
+__main__.A.B
+
+[case testChangeNestedClassToMethod]
+class A:
+ class B: pass
+[file next.py]
+class A:
+ def B(self) -> None: pass
+
+[out]
+__main__.A.B
diff --git a/test-data/unit/fine-grained.test b/test-data/unit/fine-grained.test
new file mode 100644
index 000000000000..881315998c4c
--- /dev/null
+++ b/test-data/unit/fine-grained.test
@@ -0,0 +1,872 @@
+-- Test cases for fine-grained incremental checking
+--
+-- Test cases may define multiple versions of a file
+-- (e.g. m.py, m.py.2). There is always an initial batch
+-- pass that processes all files present initially, followed
+-- by one or more fine-grained incremental passes that use
+-- alternative versions of files, if available. If a file
+-- just has a single .py version, it is used for all passes.
+
+-- TODO: what if version for some passes but not all
+
+-- Output is laid out like this:
+--
+-- [out]
+--
+-- ==
+--
+
+[case testReprocessFunction]
+import m
+def g() -> int:
+ return m.f()
+[file m.py]
+def f() -> int:
+ pass
+[file m.py.2]
+def f() -> str:
+ pass
+[out]
+==
+main:3: error: Incompatible return value type (got "str", expected "int")
+
+[case testReprocessTopLevel]
+import m
+m.f(1)
+def g() -> None: pass
+[file m.py]
+def f(x: int) -> None: pass
+[file m.py.2]
+def f(x: str) -> None: pass
+[out]
+==
+main:2: error: Argument 1 to "f" has incompatible type "int"; expected "str"
+
+[case testReprocessMethod]
+import m
+class B:
+ def f(self, a: m.A) -> None:
+ a.g() # E
+[file m.py]
+class A:
+ def g(self) -> None: pass
+[file m.py.2]
+class A:
+ def g(self, a: A) -> None: pass
+[out]
+==
+main:4: error: Too few arguments for "g" of "A"
+
+[case testFunctionMissingModuleAttribute]
+import m
+def h() -> None:
+ m.f(1)
+[file m.py]
+def f(x: int) -> None: pass
+[file m.py.2]
+def g(x: str) -> None: pass
+[builtins fixtures/fine_grained.pyi]
+[out]
+==
+main:3: error: "module" has no attribute "f"
+
+[case testTopLevelMissingModuleAttribute]
+import m
+m.f(1)
+def g() -> None: pass
+[file m.py]
+def f(x: int) -> None: pass
+[file m.py.2]
+def g(x: int) -> None: pass
+[builtins fixtures/fine_grained.pyi]
+[out]
+==
+main:2: error: "module" has no attribute "f"
+
+[case testClassChangedIntoFunction]
+import m
+def f(a: m.A) -> None:
+ pass
+[file m.py]
+class A: pass
+[file m.py.2]
+def A() -> None: pass
+[out]
+==
+main:2: error: Invalid type "m.A"
+
+[case testClassChangedIntoFunction2]
+import m
+class B:
+ def f(self, a: m.A) -> None: pass
+[file m.py]
+class A: pass
+[file m.py.2]
+def A() -> None: pass
+[out]
+==
+main:3: error: Invalid type "m.A"
+
+[case testAttributeTypeChanged]
+import m
+def f(a: m.A) -> int:
+ return a.x
+[file m.py]
+class A:
+ def f(self) -> None:
+ self.x = 1
+[file m.py.2]
+class A:
+ def f(self) -> None:
+ self.x = 'x'
+[out]
+==
+main:3: error: Incompatible return value type (got "str", expected "int")
+
+[case testAttributeRemoved]
+import m
+def f(a: m.A) -> int:
+ return a.x
+[file m.py]
+class A:
+ def f(self) -> None:
+ self.x = 1
+[file m.py.2]
+class A:
+ def f(self) -> None: pass
+[out]
+==
+main:3: error: "A" has no attribute "x"
+
+[case testVariableTypeBecomesInvalid]
+import m
+def f() -> None:
+ a = None # type: m.A
+[file m.py]
+class A: pass
+[file m.py.2]
+[out]
+==
+main:3: error: Name 'm.A' is not defined
+
+[case testTwoIncrementalSteps]
+import m
+import n
+[file m.py]
+def f() -> None: pass
+[file n.py]
+import m
+def g() -> None:
+ m.f() # E
+[file m.py.2]
+import n
+def f(x: int) -> None:
+ n.g() # E
+[file n.py.3]
+import m
+def g(a: str) -> None:
+ m.f('') # E
+[out]
+==
+n.py:3: error: Too few arguments for "f"
+==
+n.py:3: error: Argument 1 to "f" has incompatible type "str"; expected "int"
+m.py:3: error: Too few arguments for "g"
+
+[case testTwoRounds]
+import m
+def h(a: m.A) -> int:
+ return a.x
+[file m.py]
+import n
+class A:
+ def g(self, b: n.B) -> None:
+ self.x = b.f()
+[file n.py]
+class B:
+ def f(self) -> int: pass
+[file n.py.2]
+class B:
+ def f(self) -> str: pass
+[out]
+==
+main:3: error: Incompatible return value type (got "str", expected "int")
+
+[case testFixTypeError]
+import m
+def f(a: m.A) -> None:
+ a.f(a)
+[file m.py]
+class A:
+ def f(self, a: 'A') -> None: pass
+[file m.py.2]
+class A:
+ def f(self) -> None: pass
+[file m.py.3]
+class A:
+ def f(self, a: 'A') -> None: pass
+[out]
+==
+main:3: error: Too many arguments for "f" of "A"
+==
+
+[case testFixTypeError2]
+import m
+def f(a: m.A) -> None:
+ a.f()
+[file m.py]
+class A:
+ def f(self) -> None: pass
+[file m.py.2]
+class A:
+ def g(self) -> None: pass
+[file m.py.3]
+class A:
+ def f(self) -> None: pass
+[out]
+==
+main:3: error: "A" has no attribute "f"
+==
+
+[case testFixSemanticAnalysisError]
+import m
+def f() -> None:
+ m.A()
+[file m.py]
+class A: pass
+[file m.py.2]
+class B: pass
+[file m.py.3]
+class A: pass
+[builtins fixtures/fine_grained.pyi]
+[out]
+==
+main:3: error: "module" has no attribute "A"
+==
+
+[case testContinueToReportTypeCheckError]
+import m
+def f(a: m.A) -> None:
+ a.f()
+def g(a: m.A) -> None:
+ a.g()
+[file m.py]
+class A:
+ def f(self) -> None: pass
+ def g(self) -> None: pass
+[file m.py.2]
+class A: pass
+[file m.py.3]
+class A:
+ def f(self) -> None: pass
+[out]
+==
+main:3: error: "A" has no attribute "f"
+main:5: error: "A" has no attribute "g"
+==
+main:5: error: "A" has no attribute "g"
+
+[case testContinueToReportSemanticAnalysisError]
+import m
+def f() -> None:
+ m.A()
+def g() -> None:
+ m.B()
+[file m.py]
+class A: pass
+class B: pass
+[file m.py.2]
+[file m.py.3]
+class A: pass
+[builtins fixtures/fine_grained.pyi]
+[out]
+==
+main:3: error: "module" has no attribute "A"
+main:5: error: "module" has no attribute "B"
+==
+main:5: error: "module" has no attribute "B"
+
+[case testContinueToReportErrorAtTopLevel]
+import n
+import m
+m.A().f()
+[file n.py]
+import m
+m.A().g()
+[file m.py]
+class A:
+ def f(self) -> None: pass
+ def g(self) -> None: pass
+[file m.py.2]
+class A: pass
+[file m.py.3]
+class A:
+ def f(self) -> None: pass
+[out]
+==
+main:3: error: "A" has no attribute "f"
+n.py:2: error: "A" has no attribute "g"
+==
+n.py:2: error: "A" has no attribute "g"
+
+[case testContinueToReportErrorInMethod]
+import m
+class C:
+ def f(self, a: m.A) -> None:
+ a.f()
+ def g(self, a: m.A) -> None:
+ a.g()
+[file m.py]
+class A:
+ def f(self) -> None: pass
+ def g(self) -> None: pass
+[file m.py.2]
+class A: pass
+[file m.py.3]
+class A:
+ def f(self) -> None: pass
+[out]
+==
+main:4: error: "A" has no attribute "f"
+main:6: error: "A" has no attribute "g"
+==
+main:6: error: "A" has no attribute "g"
+
+[case testInitialBatchGeneratedError]
+import m
+def g() -> None:
+ m.f()
+def h() -> None:
+ m.g()
+[file m.py]
+def f(x: object) -> None: pass
+[file m.py.2]
+def f() -> None: pass
+[file m.py.3]
+def f() -> None: pass
+def g() -> None: pass
+[builtins fixtures/fine_grained.pyi]
+[out]
+main:3: error: Too few arguments for "f"
+main:5: error: "module" has no attribute "g"
+==
+main:5: error: "module" has no attribute "g"
+==
+
+[case testKeepReportingErrorIfNoChanges]
+import m
+def h() -> None:
+ m.g()
+[file m.py]
+[file m.py.2]
+[builtins fixtures/fine_grained.pyi]
+[out]
+main:3: error: "module" has no attribute "g"
+==
+main:3: error: "module" has no attribute "g"
+
+[case testFixErrorAndReintroduce]
+import m
+def h() -> None:
+ m.g()
+[file m.py]
+[file m.py.2]
+def g() -> None: pass
+[file m.py.3]
+[builtins fixtures/fine_grained.pyi]
+[out]
+main:3: error: "module" has no attribute "g"
+==
+==
+main:3: error: "module" has no attribute "g"
+
+[case testAddBaseClassMethodCausingInvalidOverride]
+import m
+class B(m.A):
+ def f(self) -> str: pass
+[file m.py]
+class A: pass
+[file m.py.2]
+class A:
+ def f(self) -> int: pass
+[out]
+==
+main:3: error: Return type of "f" incompatible with supertype "A"
+
+[case testModifyBaseClassMethodCausingInvalidOverride]
+import m
+class B(m.A):
+ def f(self) -> str: pass
+[file m.py]
+class A:
+ def f(self) -> str: pass
+[file m.py.2]
+class A:
+ def f(self) -> int: pass
+[out]
+==
+main:3: error: Return type of "f" incompatible with supertype "A"
+
+[case testAddBaseClassAttributeCausingErrorInSubclass]
+import m
+class B(m.A):
+ def a(self) -> None:
+ x = 1
+ x = self.x
+
+ def f(self) -> None:
+ self.x = 1
+
+ def z(self) -> None:
+ x = 1
+ x = self.x
+[file m.py]
+class A: pass
+[file m.py.2]
+class A:
+ def g(self) -> None:
+ self.x = 'a'
+[out]
+==
+main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+main:8: error: Incompatible types in assignment (expression has type "int", variable has type "str")
+main:12: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testChangeBaseClassAttributeType]
+import m
+class B(m.A):
+ def f(sel) -> None:
+ sel.x = 1
+[file m.py]
+class A:
+ def g(self) -> None:
+ self.x = 1
+[file m.py.2]
+class A:
+ def g(self) -> None:
+ self.x = 'a'
+[out]
+==
+main:4: error: Incompatible types in assignment (expression has type "int", variable has type "str")
+
+[case testRemoveAttributeInBaseClass]
+import m
+class B(m.A):
+ def f(self) -> None:
+ a = 1
+ a = self.x
+[file m.py]
+class A:
+ def g(self) -> None:
+ self.x = 1
+[file m.py.2]
+class A: pass
+[out]
+==
+main:5: error: "B" has no attribute "x"
+
+[case testTestSignatureOfInheritedMethod]
+import m
+class B(m.A):
+ def f(self) -> None:
+ self.g()
+[file m.py]
+class A:
+ def g(self) -> None: pass
+[file m.py.2]
+class A:
+ def g(self, a: 'A') -> None: pass
+[out]
+==
+main:4: error: Too few arguments for "g" of "A"
+
+[case testRemoveBaseClass]
+import m
+class A(m.B):
+ def f(self) -> None:
+ self.g()
+ self.x
+ self.y = 1
+[file m.py]
+class C:
+ def g(self) -> None:
+ self.x = 1
+class B(C): pass
+[file m.py.2]
+class C: pass
+class B: pass
+[out]
+==
+main:4: error: "A" has no attribute "g"
+main:5: error: "A" has no attribute "x"
+
+[case testRemoveBaseClass2]
+import m
+class A(m.B):
+ def f(self) -> None:
+ self.g()
+ self.x
+ self.y = 1
+[file m.py]
+class C:
+ def g(self) -> None:
+ self.x = 1
+class B(C): pass
+[file m.py.2]
+class C:
+ def g(self) -> None:
+ self.x = 1
+class B: pass
+[out]
+==
+main:4: error: "A" has no attribute "g"
+main:5: error: "A" has no attribute "x"
+
+[case testChangeInPackage]
+import m.n
+def f() -> None:
+ m.n.g()
+[file m/__init__.py]
+[file m/n.py]
+def g() -> None: pass
+[file m/n.py.2]
+def g(x: int) -> None: pass
+[out]
+==
+main:3: error: Too few arguments for "g"
+
+[case testTriggerTargetInPackage]
+import m.n
+[file m/__init__.py]
+[file m/n.py]
+import a
+def f() -> None:
+ a.g()
+[file a.py]
+def g() -> None: pass
+[file a.py.2]
+def g(x: int) -> None: pass
+[out]
+==
+m/n.py:3: error: Too few arguments for "g"
+
+[case testChangeInPackage__init__]
+import m
+import m.n
+def f() -> None:
+ m.g()
+[file m/__init__.py]
+def g() -> None: pass
+[file m/__init__.py.2]
+def g(x: int) -> None: pass
+[file m/n.py]
+[out]
+==
+main:4: error: Too few arguments for "g"
+
+[case testTriggerTargetInPackage__init__]
+import m
+import m.n
+[file m/__init__.py]
+import a
+def f() -> None:
+ a.g()
+[file a.py]
+def g() -> None: pass
+[file a.py.2]
+def g(x: int) -> None: pass
+[file m/n.py]
+[out]
+==
+m/__init__.py:3: error: Too few arguments for "g"
+
+[case testModuleAttributeTypeChanges]
+import m
+def f() -> None:
+ x = 1
+ x = m.x
+[file m.py]
+x = 1
+[file m.py.2]
+x = ''
+[out]
+==
+main:4: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testTwoStepsDueToModuleAttribute]
+import m
+x = m.f()
+
+def g() -> None:
+ y = 1
+ y = x # E
+[file m.py]
+def f() -> int: pass
+[file m.py.2]
+def f() -> str: pass
+[out]
+==
+main:6: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testTwoStepsDueToMultipleNamespaces]
+import m
+
+x = m.f()
+
+def g() -> None:
+ xx = 1
+ xx = x
+
+class A:
+ def a(self) -> None:
+ self.y = m.f()
+ def b(self) -> None:
+ yy = 1
+ yy = self.y
+
+class B:
+ def c(self) -> None:
+ self.z = m.f()
+ def b(self) -> None:
+ zz = 1
+ zz = self.z
+[file m.py]
+def f() -> int: pass
+[file m.py.2]
+def f() -> str: pass
+[out]
+==
+main:7: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+main:14: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+main:21: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testConstructorSignatureChanged]
+import m
+
+def f() -> None:
+ m.A()
+[file m.py]
+class A:
+ def __init__(self) -> None: pass
+[file m.py.2]
+class A:
+ def __init__(self, x: int) -> None: pass
+[out]
+==
+main:4: error: Too few arguments for "A"
+
+[case testConstructorAdded]
+import m
+
+def f() -> None:
+ m.A()
+[file m.py]
+class A: pass
+[file m.py.2]
+class A:
+ def __init__(self, x: int) -> None: pass
+[out]
+==
+main:4: error: Too few arguments for "A"
+
+[case testConstructorDeleted]
+import m
+
+def f() -> None:
+ m.A(1)
+[file m.py]
+class A:
+ def __init__(self, x: int) -> None: pass
+[file m.py.2]
+class A: pass
+[out]
+==
+main:4: error: Too many arguments for "A"
+
+[case testBaseClassConstructorChanged]
+import m
+
+def f() -> None:
+ m.B()
+[file m.py]
+class A:
+ def __init__(self) -> None: pass
+class B(A): pass
+[file m.py.2]
+class A:
+ def __init__(self, x: int) -> None: pass
+class B(A): pass
+[out]
+==
+main:4: error: Too few arguments for "B"
+
+[case testImportFrom]
+from m import f
+
+def g() -> None:
+ f()
+[file m.py]
+def f() -> None: pass
+[file m.py.2]
+def f(x: int) -> None: pass
+[builtins fixtures/fine_grained.pyi]
+[out]
+==
+main:4: error: Too few arguments for "f"
+
+[case testImportFrom2]
+from m import f
+f()
+[file m.py]
+def f() -> None: pass
+[file m.py.2]
+def f(x: int) -> None: pass
+[out]
+==
+main:2: error: Too few arguments for "f"
+
+[case testImportFromTargetsClass]
+from m import C
+
+def f(c: C) -> None:
+ c.g()
+[file m.py]
+class C:
+ def g(self) -> None: pass
+[file m.py.2]
+class C:
+ def g(self, x: int) -> None: pass
+[out]
+==
+main:4: error: Too few arguments for "g" of "C"
+
+[case testImportFromTargetsVariable]
+from m import x
+
+def f() -> None:
+ y = 1
+ y = x
+[file m.py]
+x = 1
+[file m.py.2]
+x = ''
+[out]
+==
+main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testImportFromSubmoduleOfPackage]
+from m import n
+
+def f() -> None:
+ n.g()
+[file m/__init__.py]
+[file m/n.py]
+def g() -> None: pass
+[file m/n.py.2]
+def g(x: int) -> None: pass
+[out]
+==
+main:4: error: Too few arguments for "g"
+
+[case testImportedFunctionGetsImported]
+from m import f
+
+def g() -> None:
+ f()
+[file m.py]
+from n import f
+[file n.py]
+def f() -> None: pass
+[file n.py.2]
+def f(x: int) -> None: pass
+[out]
+==
+main:4: error: Too few arguments for "f"
+
+[case testNestedClassMethodSignatureChanges]
+from m import A
+
+def f(x: A.B) -> None:
+ x.g()
+[file m.py]
+class A:
+ class B:
+ def g(self) -> None: pass
+[file m.py.2]
+class A:
+ class B:
+ def g(self, x: int) -> None: pass
+[out]
+==
+main:4: error: Too few arguments for "g" of "B"
+
+[case testNestedClassAttributeTypeChanges]
+from m import A
+
+def f(x: A.B) -> None:
+ z = 1
+ z = x.y
+[file m.py]
+class A:
+ class B:
+ def g(self) -> None:
+ self.y = 1
+[file m.py.2]
+class A:
+ class B:
+ def g(self) -> None:
+ self.y = ''
+[out]
+==
+main:5: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testReprocessMethodInNestedClass]
+from m import f
+
+class A:
+ class B:
+ def g(self) -> None:
+ x = 1
+ x = f()
+[file m.py]
+def f() -> int: pass
+[file m.py.2]
+def f() -> str: pass
+[out]
+==
+main:7: error: Incompatible types in assignment (expression has type "str", variable has type "int")
+
+[case testBaseClassDeleted]
+import m
+
+class A(m.C):
+ def f(self) -> None:
+ self.g() # No error here because m.C becomes an Any base class
+ def g(self) -> None:
+ self.x
+[file m.py]
+class C:
+ def g(self) -> None: pass
+[file m.py.2]
+[out]
+main:7: error: "A" has no attribute "x"
+==
+main:3: error: Name 'm.C' is not defined
+
+[case testBaseClassOfNestedClassDeleted]
+import m
+
+class A:
+ class B(m.C):
+ def f(self) -> None:
+ self.g() # No error here because m.C becomes an Any base class
+ def g(self) -> None:
+ self.x
+[file m.py]
+class C:
+ def g(self) -> None: pass
+[file m.py.2]
+[out]
+main:8: error: "B" has no attribute "x"
+==
+main:4: error: Name 'm.C' is not defined
diff --git a/test-data/unit/fixtures/fine_grained.pyi b/test-data/unit/fixtures/fine_grained.pyi
new file mode 100644
index 000000000000..5959df68835b
--- /dev/null
+++ b/test-data/unit/fixtures/fine_grained.pyi
@@ -0,0 +1,24 @@
+# Small stub for fine-grained incremental checking test cases
+#
+# TODO: Migrate to regular stubs once fine-grained incremental is robust
+# enough to handle them.
+
+class Any: pass
+
+class object:
+ def __init__(self) -> None: pass
+
+class type:
+ def __init__(self, x: Any) -> None: pass
+
+class int:
+ def __add__(self, other: 'int') -> 'int': pass
+class str:
+ def __add__(self, other: 'str') -> 'str': pass
+
+class float: pass
+class bytes: pass
+class tuple: pass
+class function: pass
+class ellipsis: pass
+class module: pass
diff --git a/test-data/unit/merge.test b/test-data/unit/merge.test
new file mode 100644
index 000000000000..a6d2a424f975
--- /dev/null
+++ b/test-data/unit/merge.test
@@ -0,0 +1,608 @@
+-- Test cases for AST merge (user for fine-grained incremental checking)
+
+[case testFunction]
+import target
+[file target.py]
+def f() -> int:
+ pass
+[file target.py.next]
+def f() -> int:
+ pass
+[out]
+MypyFile:1<0>(
+ Import:1(target))
+MypyFile:1<1>(
+ tmp/target.py
+ FuncDef:1<2>(
+ f
+ def () -> builtins.int<3>
+ Block:1<4>(
+ PassStmt:2<5>())))
+==>
+MypyFile:1<0>(
+ Import:1(target))
+MypyFile:1<1>(
+ tmp/target.py
+ FuncDef:1<2>(
+ f
+ def () -> builtins.int<3>
+ Block:1<6>(
+ PassStmt:2<7>())))
+
+[case testClass]
+import target
+[file target.py]
+class A:
+ def f(self, x: str) -> int:
+ pass
+[file target.py.next]
+class A:
+ def f(self, x: int) -> str:
+ pass
+[out]
+MypyFile:1<0>(
+ Import:1(target))
+MypyFile:1<1>(
+ tmp/target.py
+ ClassDef:1<2>(
+ A
+ FuncDef:2<3>(
+ f
+ Args(
+ Var(self)
+ Var(x))
+ def (self: target.A<4>, x: builtins.str<5>) -> builtins.int<6>
+ Block:2<7>(
+ PassStmt:3<8>()))))
+==>
+MypyFile:1<0>(
+ Import:1(target))
+MypyFile:1<1>(
+ tmp/target.py
+ ClassDef:1<9>(
+ A
+ FuncDef:2<3>(
+ f
+ Args(
+ Var(self)
+ Var(x))
+ def (self: target.A<4>, x: builtins.int<6>) -> builtins.str<5>
+ Block:2<10>(
+ PassStmt:3<11>()))))
+
+[case testClass_typeinfo]
+import target
+[file target.py]
+class A:
+ def f(self, x: str) -> int: pass
+ def g(self, x: str) -> int: pass
+[file target.py.next]
+class A:
+ def f(self, x: int) -> str: pass
+ def h(self, x: int) -> str: pass
+[out]
+TypeInfo<0>(
+ Name(target.A)
+ Bases(builtins.object<1>)
+ Mro(target.A<0>, builtins.object<1>)
+ Names(
+ f<2>
+ g<3>))
+==>
+TypeInfo<0>(
+ Name(target.A)
+ Bases(builtins.object<1>)
+ Mro(target.A<0>, builtins.object<1>)
+ Names(
+ f<2>
+ h<4>))
+
+[case testConstructInstance]
+import target
+[file target.py]
+class A:
+ def f(self) -> B:
+ return B()
+class B: pass
+[file target.py.next]
+class B: pass
+class A:
+ def f(self) -> B:
+ 1
+ return B()
+[out]
+MypyFile:1<0>(
+ Import:1(target))
+MypyFile:1<1>(
+ tmp/target.py
+ ClassDef:1<2>(
+ A
+ FuncDef:2<3>(
+ f
+ Args(
+ Var(self))
+ def (self: target.A<4>) -> target.B<5>
+ Block:2<6>(
+ ReturnStmt:3<7>(
+ CallExpr:3<8>(
+ NameExpr(B [target.B<5>])
+ Args())))))
+ ClassDef:4<9>(
+ B
+ PassStmt:4<10>()))
+==>
+MypyFile:1<0>(
+ Import:1(target))
+MypyFile:1<1>(
+ tmp/target.py
+ ClassDef:1<11>(
+ B
+ PassStmt:1<12>())
+ ClassDef:2<13>(
+ A
+ FuncDef:3<3>(
+ f
+ Args(
+ Var(self))
+ def (self: target.A<4>) -> target.B<5>
+ Block:3<14>(
+ ExpressionStmt:4<15>(
+ IntExpr(1))
+ ReturnStmt:5<16>(
+ CallExpr:5<17>(
+ NameExpr(B [target.B<5>])
+ Args()))))))
+
+[case testCallMethod]
+import target
+[file target.py]
+class A:
+ def f(self) -> None:
+ self.f()
+[file target.py.next]
+class A:
+ def f(self) -> None:
+ self.f()
+[out]
+MypyFile:1<0>(
+ Import:1(target))
+MypyFile:1<1>(
+ tmp/target.py
+ ClassDef:1<2>(
+ A
+ FuncDef:2<3>(
+ f
+ Args(
+ Var(self))
+ def (self: target.A<4>)
+ Block:2<5>(
+ ExpressionStmt:3<6>(
+ CallExpr:3<7>(
+ MemberExpr:3<8>(
+ NameExpr(self [l<9>])
+ f)
+ Args()))))))
+==>
+MypyFile:1<0>(
+ Import:1(target))
+MypyFile:1<1>(
+ tmp/target.py
+ ClassDef:1<10>(
+ A
+ FuncDef:2<3>(
+ f
+ Args(
+ Var(self))
+ def (self: target.A<4>)
+ Block:2<11>(
+ ExpressionStmt:3<12>(
+ CallExpr:3<13>(
+ MemberExpr:3<14>(
+ NameExpr(self [l<15>])
+ f)
+ Args()))))))
+
+[case testClassAttribute]
+import target
+[file target.py]
+class A:
+ def f(self) -> None:
+ self.x = 1
+ self.x
+[file target.py.next]
+class A:
+ def f(self) -> None:
+ self.x = 1
+ self.x
+[out]
+MypyFile:1<0>(
+ Import:1(target))
+MypyFile:1<1>(
+ tmp/target.py
+ ClassDef:1<2>(
+ A
+ FuncDef:2<3>(
+ f
+ Args(
+ Var(self))
+ def (self: target.A<4>)
+ Block:2<5>(
+ AssignmentStmt:3<6>(
+ MemberExpr:3<8>(
+ NameExpr(self [l<9>])
+ x*<7>)
+ IntExpr(1))
+ ExpressionStmt:4<10>(
+ MemberExpr:4<11>(
+ NameExpr(self [l<9>])
+ x))))))
+==>
+MypyFile:1<0>(
+ Import:1(target))
+MypyFile:1<1>(
+ tmp/target.py
+ ClassDef:1<12>(
+ A
+ FuncDef:2<3>(
+ f
+ Args(
+ Var(self))
+ def (self: target.A<4>)
+ Block:2<13>(
+ AssignmentStmt:3<14>(
+ MemberExpr:3<15>(
+ NameExpr(self [l<16>])
+ x*<7>)
+ IntExpr(1))
+ ExpressionStmt:4<17>(
+ MemberExpr:4<18>(
+ NameExpr(self [l<16>])
+ x))))))
+
+[case testClassAttribute_typeinfo]
+import target
+[file target.py]
+class A:
+ def f(self) -> None:
+ self.x = 1
+ self.x
+ self.y = A()
+[file target.py.next]
+class A:
+ def f(self) -> None:
+ self.x = 1
+ self.x
+ self.y = A()
+[out]
+TypeInfo<0>(
+ Name(target.A)
+ Bases(builtins.object<1>)
+ Mro(target.A<0>, builtins.object<1>)
+ Names(
+ f<2>
+ x<3> (builtins.int<4>)
+ y<5> (target.A<0>)))
+==>
+TypeInfo<0>(
+ Name(target.A)
+ Bases(builtins.object<1>)
+ Mro(target.A<0>, builtins.object<1>)
+ Names(
+ f<2>
+ x<3> (builtins.int<4>)
+ y<5> (target.A<0>)))
+
+[case testFunction_symtable]
+import target
+[file target.py]
+def f() -> int:
+ pass
+[file target.py.next]
+def f() -> int:
+ pass
+[out]
+__main__:
+ target: MypyFile<0>
+target:
+ f: FuncDef<1>
+==>
+__main__:
+ target: MypyFile<0>
+target:
+ f: FuncDef<1>
+
+[case testClass_symtable]
+import target
+[file target.py]
+class A: pass
+class B: pass
+[file target.py.next]
+class A: pass
+class C: pass
+[out]
+__main__:
+ target: MypyFile<0>
+target:
+ A: TypeInfo<1>
+ B: TypeInfo<2>
+==>
+__main__:
+ target: MypyFile<0>
+target:
+ A: TypeInfo<1>
+ C: TypeInfo<3>
+
+[case testTopLevelExpression]
+import target
+[file target.py]
+class A: pass
+A()
+[file target.py.next]
+class A: pass
+class B: pass
+A()
+B()
+[out]
+MypyFile:1<0>(
+ Import:1(target))
+MypyFile:1<1>(
+ tmp/target.py
+ ClassDef:1<2>(
+ A
+ PassStmt:1<3>())
+ ExpressionStmt:2<4>(
+ CallExpr:2<5>(
+ NameExpr(A [target.A<6>])
+ Args())))
+==>
+MypyFile:1<0>(
+ Import:1(target))
+MypyFile:1<1>(
+ tmp/target.py
+ ClassDef:1<7>(
+ A
+ PassStmt:1<8>())
+ ClassDef:2<9>(
+ B
+ PassStmt:2<10>())
+ ExpressionStmt:3<11>(
+ CallExpr:3<12>(
+ NameExpr(A [target.A<6>])
+ Args()))
+ ExpressionStmt:4<13>(
+ CallExpr:4<14>(
+ NameExpr(B [target.B<15>])
+ Args())))
+
+[case testExpression_types]
+import target
+[file target.py]
+class A: pass
+def f(a: A) -> None:
+ 1
+ a
+[file target.py.next]
+class A: pass
+def f(a: A) -> None:
+ a
+ 1
+[out]
+## target
+IntExpr:3: builtins.int<0>
+NameExpr:4: target.A<1>
+==>
+## target
+NameExpr:3: target.A<1>
+IntExpr:4: builtins.int<0>
+
+[case testClassAttribute_types]
+import target
+[file target.py]
+class A:
+ def f(self) -> None:
+ self.x = A()
+ self.x
+ self.y = 1
+ self.y
+[file target.py.next]
+class A:
+ def f(self) -> None:
+ self.y = 1
+ self.y
+ self.x = A()
+ self.x
+[out]
+## target
+CallExpr:3: target.A<0>
+MemberExpr:3: target.A<0>
+NameExpr:3: def () -> target.A<0>
+NameExpr:3: target.A<0>
+MemberExpr:4: target.A<0>
+NameExpr:4: target.A<0>
+IntExpr:5: builtins.int<1>
+MemberExpr:5: builtins.int<1>
+NameExpr:5: target.A<0>
+MemberExpr:6: builtins.int<1>
+NameExpr:6: target.A<0>
+==>
+## target
+IntExpr:3: builtins.int<1>
+MemberExpr:3: builtins.int<1>
+NameExpr:3: target.A<0>
+MemberExpr:4: builtins.int<1>
+NameExpr:4: target.A<0>
+CallExpr:5: target.A<0>
+MemberExpr:5: target.A<0>
+NameExpr:5: def () -> target.A<0>
+NameExpr:5: target.A<0>
+MemberExpr:6: target.A<0>
+NameExpr:6: target.A<0>
+
+[case testMethod_types]
+import target
+[file target.py]
+class A:
+ def f(self) -> A:
+ return self.f()
+[file target.py.next]
+class A:
+ # Extra line to change line numbers
+ def f(self) -> A:
+ return self.f()
+[out]
+## target
+CallExpr:3: target.A<0>
+MemberExpr:3: def () -> target.A<0>
+NameExpr:3: target.A<0>
+==>
+## target
+CallExpr:4: target.A<0>
+MemberExpr:4: def () -> target.A<0>
+NameExpr:4: target.A<0>
+
+[case testRenameFunction]
+import target
+[file target.py]
+def f() -> int: pass
+[file target.py.next]
+def g() -> int: pass
+[out]
+MypyFile:1<0>(
+ Import:1(target))
+MypyFile:1<1>(
+ tmp/target.py
+ FuncDef:1<2>(
+ f
+ def () -> builtins.int<3>
+ Block:1<4>(
+ PassStmt:1<5>())))
+==>
+MypyFile:1<0>(
+ Import:1(target))
+MypyFile:1<1>(
+ tmp/target.py
+ FuncDef:1<6>(
+ g
+ def () -> builtins.int<3>
+ Block:1<7>(
+ PassStmt:1<8>())))
+
+[case testRenameFunction_symtable]
+import target
+[file target.py]
+def f() -> int: pass
+[file target.py.next]
+def g() -> int: pass
+[out]
+__main__:
+ target: MypyFile<0>
+target:
+ f: FuncDef<1>
+==>
+__main__:
+ target: MypyFile<0>
+target:
+ g: FuncDef<2>
+
+[case testMergeWithBaseClass_typeinfo]
+import target
+[file target.py]
+class A: pass
+class B(A):
+ def f(self) -> None: pass
+[file target.py.next]
+class C: pass
+class A: pass
+class B(A):
+ def f(self) -> None: pass
+[out]
+TypeInfo<0>(
+ Name(target.A)
+ Bases(builtins.object<1>)
+ Mro(target.A<0>, builtins.object<1>)
+ Names())
+TypeInfo<2>(
+ Name(target.B)
+ Bases(target.A<0>)
+ Mro(target.B<2>, target.A<0>, builtins.object<1>)
+ Names(
+ f<3>))
+==>
+TypeInfo<0>(
+ Name(target.A)
+ Bases(builtins.object<1>)
+ Mro(target.A<0>, builtins.object<1>)
+ Names())
+TypeInfo<2>(
+ Name(target.B)
+ Bases(target.A<0>)
+ Mro(target.B<2>, target.A<0>, builtins.object<1>)
+ Names(
+ f<3>))
+TypeInfo<4>(
+ Name(target.C)
+ Bases(builtins.object<1>)
+ Mro(target.C<4>, builtins.object<1>)
+ Names())
+
+[case testModuleAttribute]
+import target
+[file target.py]
+x = 1
+[file target.py.next]
+x = 2
+[out]
+MypyFile:1<0>(
+ Import:1(target))
+MypyFile:1<1>(
+ tmp/target.py
+ AssignmentStmt:1<2>(
+ NameExpr(x [target.x<3>])
+ IntExpr(1)
+ builtins.int<4>))
+==>
+MypyFile:1<0>(
+ Import:1(target))
+MypyFile:1<1>(
+ tmp/target.py
+ AssignmentStmt:1<5>(
+ NameExpr(x [target.x<3>])
+ IntExpr(2)
+ builtins.int<4>))
+
+[case testNestedClassMethod_typeinfo]
+import target
+[file target.py]
+class A:
+ class B:
+ def f(self) -> None: pass
+[file target.py.next]
+class A:
+ class B:
+ def f(self) -> None: pass
+[out]
+TypeInfo<0>(
+ Name(target.A)
+ Bases(builtins.object<1>)
+ Mro(target.A<0>, builtins.object<1>)
+ Names(
+ B<2>))
+TypeInfo<2>(
+ Name(target.A.B)
+ Bases(builtins.object<1>)
+ Mro(target.A.B<2>, builtins.object<1>)
+ Names(
+ f<3>))
+==>
+TypeInfo<0>(
+ Name(target.A)
+ Bases(builtins.object<1>)
+ Mro(target.A<0>, builtins.object<1>)
+ Names(
+ B<2>))
+TypeInfo<2>(
+ Name(target.A.B)
+ Bases(builtins.object<1>)
+ Mro(target.A.B<2>, builtins.object<1>)
+ Names(
+ f<3>))
diff --git a/test-data/unit/semanal-typeinfo.test b/test-data/unit/semanal-typeinfo.test
index 6bb62e1c57ce..098ce0b114ad 100644
--- a/test-data/unit/semanal-typeinfo.test
+++ b/test-data/unit/semanal-typeinfo.test
@@ -9,6 +9,7 @@ TypeInfoMap(
__main__.c : TypeInfo(
Name(__main__.c)
Bases(builtins.object)
+ Mro(__main__.c, builtins.object)
Names()))
[case testClassWithMethod]
@@ -19,6 +20,7 @@ TypeInfoMap(
__main__.c : TypeInfo(
Name(__main__.c)
Bases(builtins.object)
+ Mro(__main__.c, builtins.object)
Names(
f)))
@@ -32,6 +34,7 @@ TypeInfoMap(
__main__.c : TypeInfo(
Name(__main__.c)
Bases(builtins.object)
+ Mro(__main__.c, builtins.object)
Names(
__init__
y
@@ -45,10 +48,12 @@ TypeInfoMap(
__main__.base : TypeInfo(
Name(__main__.base)
Bases(builtins.object)
+ Mro(__main__.base, builtins.object)
Names())
__main__.c : TypeInfo(
Name(__main__.c)
Bases(__main__.base)
+ Mro(__main__.c, __main__.base, builtins.object)
Names()))
[case testClassAndAbstractClass]
@@ -62,10 +67,12 @@ TypeInfoMap(
__main__.c : TypeInfo(
Name(__main__.c)
Bases(__main__.i)
+ Mro(__main__.c, __main__.i, builtins.object)
Names())
__main__.i : TypeInfo(
Name(__main__.i)
Bases(builtins.object)
+ Mro(__main__.i, builtins.object)
Names()))
[case testAttributeWithoutType]
@@ -76,5 +83,6 @@ TypeInfoMap(
__main__.A : TypeInfo(
Name(__main__.A)
Bases(builtins.object)
+ Mro(__main__.A, builtins.object)
Names(
a)))