Skip to content

Commit

Permalink
Foundation for fine-grained incremental checking (python#2838)
Browse files Browse the repository at this point in the history
See python#2838 for details.
  • Loading branch information
JukkaL authored and gvanrossum committed Apr 5, 2017
1 parent 72565f0 commit 737434b
Show file tree
Hide file tree
Showing 32 changed files with 4,283 additions and 129 deletions.
24 changes: 13 additions & 11 deletions mypy/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,9 @@ class BuildResult:
errors: List of error messages.
"""

def __init__(self, manager: 'BuildManager') -> None:
def __init__(self, manager: 'BuildManager', graph: Graph) -> None:
self.manager = manager
self.graph = graph
self.files = manager.modules
self.types = manager.all_types
self.errors = manager.errors.messages()
Expand Down Expand Up @@ -184,8 +185,8 @@ def build(sources: List[BuildSource],
)

try:
dispatch(sources, manager)
return BuildResult(manager)
graph = dispatch(sources, manager)
return BuildResult(manager, graph)
finally:
manager.log("Build finished in %.3f seconds with %d modules, %d types, and %d errors" %
(time.time() - manager.start_time,
Expand Down Expand Up @@ -474,7 +475,7 @@ def parse_file(self, id: str, path: str, source: str, ignore_errors: bool) -> My
return tree

def module_not_found(self, path: str, line: int, id: str) -> None:
self.errors.set_file(path)
self.errors.set_file(path, id)
stub_msg = "(Stub files are from https://github.com/python/typeshed)"
if ((self.options.python_version[0] == 2 and moduleinfo.is_py2_std_lib_module(id)) or
(self.options.python_version[0] >= 3 and moduleinfo.is_py3_std_lib_module(id))):
Expand Down Expand Up @@ -1230,7 +1231,7 @@ def skipping_ancestor(self, id: str, path: str, ancestor_for: 'State') -> None:
# so we'd need to cache the decision.
manager = self.manager
manager.errors.set_import_context([])
manager.errors.set_file(ancestor_for.xpath)
manager.errors.set_file(ancestor_for.xpath, ancestor_for.id)
manager.errors.report(-1, -1, "Ancestor package '%s' ignored" % (id,),
severity='note', only_once=True)
manager.errors.report(-1, -1,
Expand All @@ -1242,7 +1243,7 @@ def skipping_module(self, id: str, path: str) -> None:
manager = self.manager
save_import_context = manager.errors.import_context()
manager.errors.set_import_context(self.caller_state.import_context)
manager.errors.set_file(self.caller_state.xpath)
manager.errors.set_file(self.caller_state.xpath, self.caller_state.id)
line = self.caller_line
manager.errors.report(line, 0,
"Import of '%s' ignored" % (id,),
Expand Down Expand Up @@ -1429,7 +1430,7 @@ def parse_file(self) -> None:
continue
if id == '':
# Must be from a relative import.
manager.errors.set_file(self.xpath)
manager.errors.set_file(self.xpath, self.id)
manager.errors.report(line, 0,
"No parent module -- cannot perform relative import",
blocker=True)
Expand Down Expand Up @@ -1545,20 +1546,21 @@ def write_cache(self) -> None:
self.interface_hash = new_interface_hash


def dispatch(sources: List[BuildSource], manager: BuildManager) -> None:
def dispatch(sources: List[BuildSource], manager: BuildManager) -> Graph:
manager.log("Mypy version %s" % __version__)
graph = load_graph(sources, manager)
if not graph:
print("Nothing to do?!")
return
return graph
manager.log("Loaded graph with %d nodes" % len(graph))
if manager.options.dump_graph:
dump_graph(graph)
return
return graph
process_graph(graph, manager)
if manager.options.warn_unused_ignores:
# TODO: This could also be a per-module option.
manager.errors.generate_unused_ignore_notes()
return graph


class NodeInfo:
Expand Down Expand Up @@ -1633,7 +1635,7 @@ def load_graph(sources: List[BuildSource], manager: BuildManager) -> Graph:
except ModuleNotFound:
continue
if st.id in graph:
manager.errors.set_file(st.xpath)
manager.errors.set_file(st.xpath, st.id)
manager.errors.report(-1, -1, "Duplicate module named '%s'" % st.id)
manager.errors.raise_error()
graph[st.id] = st
Expand Down
89 changes: 59 additions & 30 deletions mypy/checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,13 +65,16 @@
LAST_PASS = 1 # Pass numbers start at 0


# A node which is postponed to be type checked during the next pass.
# A node which is postponed to be processed during the next pass.
# This is used for both batch mode and fine-grained incremental mode.
DeferredNode = NamedTuple(
'DeferredNode',
[
('node', FuncItem),
# In batch mode only FuncDef and LambdaExpr are supported
('node', Union[FuncDef, LambdaExpr, MypyFile]),
('context_type_name', Optional[str]), # Name of the surrounding class (for error messages)
('active_class', Optional[Type]), # And its type (for selftype handling)
('active_typeinfo', Optional[TypeInfo]), # And its TypeInfo (for semantic analysis
# self type handling)
])


Expand Down Expand Up @@ -167,7 +170,7 @@ def check_first_pass(self) -> None:
Deferred functions will be processed by check_second_pass().
"""
self.errors.set_file(self.path)
self.errors.set_file(self.path, self.tree.fullname())
with self.enter_partial_types():
with self.binder.top_frame_context():
for d in self.tree.defs:
Expand All @@ -187,38 +190,57 @@ def check_first_pass(self) -> None:
self.fail(messages.ALL_MUST_BE_SEQ_STR.format(str_seq_s, all_s),
all_.node)

def check_second_pass(self) -> bool:
def check_second_pass(self, todo: List[DeferredNode] = None) -> bool:
"""Run second or following pass of type checking.
This goes through deferred nodes, returning True if there were any.
"""
if not self.deferred_nodes:
if not todo and not self.deferred_nodes:
return False
self.errors.set_file(self.path)
self.errors.set_file(self.path, self.tree.fullname())
self.pass_num += 1
todo = self.deferred_nodes
if not todo:
todo = self.deferred_nodes
else:
assert not self.deferred_nodes
self.deferred_nodes = []
done = set() # type: Set[FuncItem]
for node, type_name, active_class in todo:
done = set() # type: Set[Union[FuncDef, LambdaExpr, MypyFile]]
for node, type_name, active_typeinfo in todo:
if node in done:
continue
# This is useful for debugging:
# print("XXX in pass %d, class %s, function %s" %
# (self.pass_num, type_name, node.fullname() or node.name()))
done.add(node)
with self.errors.enter_type(type_name) if type_name else nothing():
with self.scope.push_class(active_class) if active_class else nothing():
if isinstance(node, Statement):
self.accept(node)
elif isinstance(node, Expression):
self.expr_checker.accept(node)
else:
assert False
with self.scope.push_class(active_typeinfo) if active_typeinfo else nothing():
self.check_partial(node)
return True

def check_partial(self, node: Union[FuncDef, LambdaExpr, MypyFile]) -> None:
if isinstance(node, MypyFile):
self.check_top_level(node)
elif isinstance(node, LambdaExpr):
self.expr_checker.accept(node)
else:
self.accept(node)

def check_top_level(self, node: MypyFile) -> None:
"""Check only the top-level of a module, skipping function definitions."""
with self.enter_partial_types():
with self.binder.top_frame_context():
for d in node.defs:
# TODO: Type check class bodies.
if not isinstance(d, (FuncDef, ClassDef)):
d.accept(self)

assert not self.current_node_deferred
# TODO: Handle __all__

def handle_cannot_determine_type(self, name: str, context: Context) -> None:
node = self.scope.top_function()
if self.pass_num < LAST_PASS and node is not None:
if (self.pass_num < LAST_PASS and node is not None
and isinstance(node, (FuncDef, LambdaExpr))):
# Don't report an error yet. Just defer.
if self.errors.type_name:
type_name = self.errors.type_name[-1]
Expand Down Expand Up @@ -635,7 +657,7 @@ def is_implicit_any(t: Type) -> bool:
for i in range(len(typ.arg_types)):
arg_type = typ.arg_types[i]

ref_type = self.scope.active_class()
ref_type = self.scope.active_self_type() # type: Optional[Type]
if (isinstance(defn, FuncDef) and ref_type is not None and i == 0
and not defn.is_static
and typ.arg_kinds[0] not in [nodes.ARG_STAR, nodes.ARG_STAR2]):
Expand Down Expand Up @@ -946,7 +968,7 @@ def check_method_override_for_base_with_name(
# The name of the method is defined in the base class.

# Construct the type of the overriding method.
typ = bind_self(self.function_type(defn), self.scope.active_class())
typ = bind_self(self.function_type(defn), self.scope.active_self_type())
# Map the overridden method type to subtype context so that
# it can be checked for compatibility.
original_type = base_attr.type
Expand All @@ -959,7 +981,7 @@ def check_method_override_for_base_with_name(
assert False, str(base_attr.node)
if isinstance(original_type, FunctionLike):
original = map_type_from_supertype(
bind_self(original_type, self.scope.active_class()),
bind_self(original_type, self.scope.active_self_type()),
defn.info, base)
# Check that the types are compatible.
# TODO overloaded signatures
Expand Down Expand Up @@ -1051,7 +1073,7 @@ def visit_class_def(self, defn: ClassDef) -> None:
old_binder = self.binder
self.binder = ConditionalTypeBinder()
with self.binder.top_frame_context():
with self.scope.push_class(fill_typevars(defn.info)):
with self.scope.push_class(defn.info):
self.accept(defn.defs)
self.binder = old_binder
if not defn.has_incompatible_baseclass:
Expand Down Expand Up @@ -1317,8 +1339,8 @@ def check_compatibility_super(self, lvalue: NameExpr, lvalue_type: Type, rvalue:
# Class-level function objects and classmethods become bound
# methods: the former to the instance, the latter to the
# class
base_type = bind_self(base_type, self.scope.active_class())
compare_type = bind_self(compare_type, self.scope.active_class())
base_type = bind_self(base_type, self.scope.active_self_type())
compare_type = bind_self(compare_type, self.scope.active_self_type())

# If we are a static method, ensure to also tell the
# lvalue it now contains a static method
Expand Down Expand Up @@ -1347,7 +1369,8 @@ def lvalue_type_from_base(self, expr_node: Var,

if base_type:
if not has_no_typevars(base_type):
instance = cast(Instance, self.scope.active_class())
# TODO: Handle TupleType, don't cast
instance = cast(Instance, self.scope.active_self_type())
itype = map_instance_to_supertype(instance, base)
base_type = expand_type_by_instance(base_type, itype)

Expand Down Expand Up @@ -2996,7 +3019,7 @@ def is_node_static(node: Node) -> Optional[bool]:

class Scope:
# We keep two stacks combined, to maintain the relative order
stack = None # type: List[Union[Type, FuncItem, MypyFile]]
stack = None # type: List[Union[TypeInfo, FuncItem, MypyFile]]

def __init__(self, module: MypyFile) -> None:
self.stack = [module]
Expand All @@ -3007,20 +3030,26 @@ def top_function(self) -> Optional[FuncItem]:
return e
return None

def active_class(self) -> Optional[Type]:
if isinstance(self.stack[-1], Type):
def active_class(self) -> Optional[TypeInfo]:
if isinstance(self.stack[-1], TypeInfo):
return self.stack[-1]
return None

def active_self_type(self) -> Optional[Union[Instance, TupleType]]:
info = self.active_class()
if info:
return fill_typevars(info)
return None

@contextmanager
def push_function(self, item: FuncItem) -> Iterator[None]:
self.stack.append(item)
yield
self.stack.pop()

@contextmanager
def push_class(self, t: Type) -> Iterator[None]:
self.stack.append(t)
def push_class(self, info: TypeInfo) -> Iterator[None]:
self.stack.append(info)
yield
self.stack.pop()

Expand Down
Loading

0 comments on commit 737434b

Please sign in to comment.