Skip to content

Commit

Permalink
Merge branch 'master' into import-cycle
Browse files Browse the repository at this point in the history
* master: (32 commits)
  Fix some fine-grained cache/fswatcher problems (python#4560)
  Sync typeshed (python#4559)
  Add _cached suffix to test cases in fine-grained tests with cache (python#4558)
  Add back support for simplified fine-grained logging (python#4557)
  Type checking of class decorators (python#4544)
  Sync typeshed (python#4556)
  When loading from a fine-grained cache, use the real path, not the cached (python#4555)
  Switch all of the fine-grained debug logging to use manager.log (python#4550)
  Caching for fine-grained incremental mode (python#4483)
  Fix --warn-return-any for NotImplemented (python#4545)
  Remove myunit (python#4369)
  Store line numbers of imports in the cache metadata (python#4533)
  README.md: Fix a typo (python#4529)
  Enable generation and caching of fine-grained dependencies from normal runs (python#4526)
  Move argument parsing for the fine-grained flag into the main arg parsing code (python#4524)
  Don't warn about unrecognized options starting with 'x_' (python#4522)
  stubgen: don't append star arg when args list already has varargs appended (python#4518)
  Handle TypedDict in diff and deps (python#4510)
  Fix Options.__repr__ to not infinite recurse (python#4514)
  Fix some fine-grained incremental bugs with newly imported files (python#4502)
  ...
  • Loading branch information
carljm committed Feb 9, 2018
2 parents 2255e1c + f0cd049 commit bc57349
Show file tree
Hide file tree
Showing 81 changed files with 1,838 additions and 900 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ sure you've found a bug please search our issue trackers for a
duplicate before filing a new issue:

- [mypy tracker](https://github.com/python/mypy/issues)
for mypy isues
for mypy issues
- [typeshed tracker](https://github.com/python/typeshed/issues)
for issues with specific modules
- [typing tracker](https://github.com/python/typing/issues)
Expand Down
86 changes: 80 additions & 6 deletions mypy/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,14 @@
from mypy.version import __version__
from mypy.plugin import Plugin, DefaultPlugin, ChainedPlugin
from mypy.defaults import PYTHON3_VERSION_MIN
from mypy.server.deps import get_dependencies


# Switch to True to produce debug output related to fine-grained incremental
# mode only that is useful during development. This produces only a subset of
# output compared to --verbose output. We use a global flag to enable this so
# that it's easy to enable this when running tests.
DEBUG_FINE_GRAINED = False


PYTHON_EXTENSIONS = ['.pyi', '.py']
Expand Down Expand Up @@ -392,6 +400,7 @@ def default_lib_path(data_dir: str,
('child_modules', List[str]), # all submodules of the given module
('options', Optional[Dict[str, object]]), # build options
('dep_prios', List[int]),
('dep_lines', List[int]),
('interface_hash', str), # hash representing the public interface
('version_id', str), # mypy version for cache invalidation
('ignore_all', bool), # if errors were ignored
Expand All @@ -417,6 +426,7 @@ def cache_meta_from_dict(meta: Dict[str, Any], data_json: str) -> CacheMeta:
meta.get('child_modules', []),
meta.get('options'),
meta.get('dep_prios', []),
meta.get('dep_lines', []),
meta.get('interface_hash', ''),
meta.get('version_id', sentinel),
meta.get('ignore_all', True),
Expand Down Expand Up @@ -731,6 +741,17 @@ def log(self, *message: str) -> None:
print(file=sys.stderr)
sys.stderr.flush()

def log_fine_grained(self, *message: str) -> None:
if self.options.verbosity >= 1:
self.log('fine-grained:', *message)
elif DEBUG_FINE_GRAINED:
# Output log in a simplified format that is quick to browse.
if message:
print(*message, file=sys.stderr)
else:
print(file=sys.stderr)
sys.stderr.flush()

def trace(self, *message: str) -> None:
if self.options.verbosity >= 2:
print('TRACE:', *message, file=sys.stderr)
Expand Down Expand Up @@ -1039,7 +1060,8 @@ def find_cache_meta(id: str, path: str, manager: BuildManager) -> Optional[Cache
# Ignore cache if generated by an older mypy version.
if ((m.version_id != manager.version_id and not manager.options.skip_version_check)
or m.options is None
or len(m.dependencies) != len(m.dep_prios)):
or len(m.dependencies) != len(m.dep_prios)
or len(m.dependencies) != len(m.dep_lines)):
manager.log('Metadata abandoned for {}: new attributes are missing'.format(id))
return None

Expand Down Expand Up @@ -1127,6 +1149,17 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str],
if not stat.S_ISREG(st.st_mode):
manager.log('Metadata abandoned for {}: file {} does not exist'.format(id, path))
return None

# When we are using a fine-grained cache, we want our initial
# build() to load all of the cache information and then do a
# fine-grained incremental update to catch anything that has
# changed since the cache was generated. We *don't* want to do a
# coarse-grained incremental rebuild, so we accept the cache
# metadata even if it doesn't match the source file.
if manager.options.use_fine_grained_cache:
manager.log('Using potentially stale metadata for {}'.format(id))
return meta

size = st.st_size
if size != meta.size:
manager.log('Metadata abandoned for {}: file {} has different size'.format(id, path))
Expand Down Expand Up @@ -1156,6 +1189,7 @@ def validate_meta(meta: Optional[CacheMeta], id: str, path: Optional[str],
'options': (manager.options.clone_for_module(id)
.select_options_affecting_cache()),
'dep_prios': meta.dep_prios,
'dep_lines': meta.dep_lines,
'interface_hash': meta.interface_hash,
'version_id': manager.version_id,
'ignore_all': meta.ignore_all,
Expand Down Expand Up @@ -1183,8 +1217,9 @@ def compute_hash(text: str) -> str:


def write_cache(id: str, path: str, tree: MypyFile,
serialized_fine_grained_deps: Dict[str, List[str]],
dependencies: List[str], suppressed: List[str],
child_modules: List[str], dep_prios: List[int],
child_modules: List[str], dep_prios: List[int], dep_lines: List[int],
old_interface_hash: str, source_hash: str,
ignore_all: bool, manager: BuildManager) -> Tuple[str, Optional[CacheMeta]]:
"""Write cache files for a module.
Expand All @@ -1201,6 +1236,7 @@ def write_cache(id: str, path: str, tree: MypyFile,
suppressed: module IDs which were suppressed as dependencies
child_modules: module IDs which are this package's direct submodules
dep_prios: priorities (parallel array to dependencies)
dep_lines: import line locations (parallel array to dependencies)
old_interface_hash: the hash from the previous version of the data cache file
source_hash: the hash of the source code
ignore_all: the ignore_all flag for this module
Expand All @@ -1221,7 +1257,9 @@ def write_cache(id: str, path: str, tree: MypyFile,
assert os.path.dirname(meta_json) == parent

# Serialize data and analyze interface
data = tree.serialize()
data = {'tree': tree.serialize(),
'fine_grained_deps': serialized_fine_grained_deps,
}
if manager.options.debug_cache:
data_str = json.dumps(data, indent=2, sort_keys=True)
else:
Expand Down Expand Up @@ -1282,6 +1320,7 @@ def write_cache(id: str, path: str, tree: MypyFile,
'child_modules': child_modules,
'options': options.select_options_affecting_cache(),
'dep_prios': dep_prios,
'dep_lines': dep_lines,
'interface_hash': interface_hash,
'version_id': manager.version_id,
'ignore_all': ignore_all,
Expand Down Expand Up @@ -1523,6 +1562,8 @@ class State:
# Whether the module has an error or any of its dependencies have one.
transitive_error = False

fine_grained_deps = None # type: Dict[str, Set[str]]

# Type checker used for checking this file. Use type_checker() for
# access and to construct this on demand.
_type_checker = None # type: Optional[TypeChecker]
Expand Down Expand Up @@ -1551,6 +1592,7 @@ def __init__(self,
self.id = id or '__main__'
self.options = manager.options.clone_for_module(self.id)
self._type_checker = None
self.fine_grained_deps = {}
if not path and source is None:
assert id is not None
file_id = id
Expand Down Expand Up @@ -1626,8 +1668,10 @@ def __init__(self,
assert len(self.meta.dependencies) == len(self.meta.dep_prios)
self.priorities = {id: pri
for id, pri in zip(self.meta.dependencies, self.meta.dep_prios)}
assert len(self.meta.dependencies) == len(self.meta.dep_lines)
self.dep_line_map = {id: line
for id, line in zip(self.meta.dependencies, self.meta.dep_lines)}
self.child_modules = set(self.meta.child_modules)
self.dep_line_map = {}
else:
# Parse the file (and then some) to get the dependencies.
self.parse_file()
Expand Down Expand Up @@ -1734,7 +1778,9 @@ def load_tree(self) -> None:
with open(self.meta.data_json) as f:
data = json.load(f)
# TODO: Assert data file wasn't changed.
self.tree = MypyFile.deserialize(data)
self.tree = MypyFile.deserialize(data['tree'])
self.fine_grained_deps = {k: set(v) for k, v in data['fine_grained_deps'].items()}

self.manager.modules[self.id] = self.tree
self.manager.add_stats(fresh_trees=1)

Expand Down Expand Up @@ -1977,6 +2023,19 @@ def _patch_indirect_dependencies(self,
elif dep not in self.suppressed and dep in self.manager.missing_modules:
self.suppressed.append(dep)

def compute_fine_grained_deps(self) -> None:
assert self.tree is not None
if '/typeshed/' in self.xpath or self.xpath.startswith('typeshed/'):
# We don't track changes to typeshed -- the assumption is that they are only changed
# as part of mypy updates, which will invalidate everything anyway.
#
# TODO: Not a reliable test, as we could have a package named typeshed.
# TODO: Consider relaxing this -- maybe allow some typeshed changes to be tracked.
return
self.fine_grained_deps = get_dependencies(target=self.tree,
type_map=self.type_map(),
python_version=self.options.python_version)

def valid_references(self) -> Set[str]:
assert self.ancestors is not None
valid_refs = set(self.dependencies + self.suppressed + self.ancestors)
Expand All @@ -2001,10 +2060,12 @@ def write_cache(self) -> None:
self.mark_interface_stale(on_errors=True)
return
dep_prios = self.dependency_priorities()
dep_lines = self.dependency_lines()
new_interface_hash, self.meta = write_cache(
self.id, self.path, self.tree,
{k: list(v) for k, v in self.fine_grained_deps.items()},
list(self.dependencies), list(self.suppressed), list(self.child_modules),
dep_prios, self.interface_hash, self.source_hash, self.ignore_all,
dep_prios, dep_lines, self.interface_hash, self.source_hash, self.ignore_all,
self.manager)
if new_interface_hash == self.interface_hash:
self.manager.log("Cached module {} has same interface".format(self.id))
Expand All @@ -2016,6 +2077,9 @@ def write_cache(self) -> None:
def dependency_priorities(self) -> List[int]:
return [self.priorities.get(dep, PRI_HIGH) for dep in self.dependencies]

def dependency_lines(self) -> List[int]:
return [self.dep_line_map.get(dep, 1) for dep in self.dependencies]

def generate_unused_ignore_notes(self) -> None:
if self.options.warn_unused_ignores:
self.manager.errors.generate_unused_ignore_notes(self.xpath)
Expand Down Expand Up @@ -2348,6 +2412,14 @@ def process_graph(graph: Graph, manager: BuildManager) -> None:
manager.log("Processing SCC of size %d (%s) as %s" % (size, scc_str, fresh_msg))
process_stale_scc(graph, scc, manager)

# If we are running in fine-grained incremental mode with caching,
# we always process fresh SCCs so that we have all of the symbol
# tables and fine-grained dependencies available.
if manager.options.use_fine_grained_cache:
for prev_scc in fresh_scc_queue:
process_fresh_scc(graph, prev_scc, manager)
fresh_scc_queue = []

sccs_left = len(fresh_scc_queue)
nodes_left = sum(len(scc) for scc in fresh_scc_queue)
manager.add_stats(sccs_left=sccs_left, nodes_left=nodes_left)
Expand Down Expand Up @@ -2534,6 +2606,8 @@ def process_stale_scc(graph: Graph, scc: List[str], manager: BuildManager) -> No
graph[id].transitive_error = True
for id in stale:
graph[id].finish_passes()
if manager.options.cache_fine_grained or manager.options.fine_grained_incremental:
graph[id].compute_fine_grained_deps()
graph[id].generate_unused_ignore_notes()
manager.flush_errors(manager.errors.file_messages(graph[id].xpath), False)
graph[id].write_cache()
Expand Down
43 changes: 37 additions & 6 deletions mypy/checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
from mypy.sametypes import is_same_type, is_same_types
from mypy.messages import MessageBuilder, make_inferred_type_note
import mypy.checkexpr
from mypy.checkmember import map_type_from_supertype, bind_self, erase_to_bound
from mypy.checkmember import map_type_from_supertype, bind_self, erase_to_bound, type_object_type
from mypy import messages
from mypy.subtypes import (
is_subtype, is_equivalent, is_proper_subtype, is_more_precise,
Expand All @@ -59,6 +59,7 @@
from mypy.meet import is_overlapping_types
from mypy.options import Options
from mypy.plugin import Plugin, CheckerPluginInterface
from mypy.sharedparse import BINARY_MAGIC_METHODS

from mypy import experiments

Expand Down Expand Up @@ -1254,6 +1255,29 @@ def visit_class_def(self, defn: ClassDef) -> None:
# Otherwise we've already found errors; more errors are not useful
self.check_multiple_inheritance(typ)

if defn.decorators:
sig = type_object_type(defn.info, self.named_type)
# Decorators are applied in reverse order.
for decorator in reversed(defn.decorators):
if (isinstance(decorator, CallExpr)
and isinstance(decorator.analyzed, PromoteExpr)):
# _promote is a special type checking related construct.
continue

dec = self.expr_checker.accept(decorator)
temp = self.temp_node(sig)
fullname = None
if isinstance(decorator, RefExpr):
fullname = decorator.fullname

# TODO: Figure out how to have clearer error messages.
# (e.g. "class decorator must be a function that accepts a type."
sig, _ = self.expr_checker.check_call(dec, [temp],
[nodes.ARG_POS], defn,
callable_name=fullname)
# TODO: Apply the sig to the actual TypeInfo so we can handle decorators
# that completely swap out the type. (e.g. Callable[[Type[A]], Type[B]])

def check_protocol_variance(self, defn: ClassDef) -> None:
"""Check that protocol definition is compatible with declared
variances of type variables.
Expand Down Expand Up @@ -1940,13 +1964,13 @@ def infer_variable_type(self, name: Var, lvalue: Lvalue,
# partial type which will be made more specific later. A partial type
# gets generated in assignment like 'x = []' where item type is not known.
if not self.infer_partial_type(name, lvalue, init_type):
self.fail(messages.NEED_ANNOTATION_FOR_VAR, context)
self.msg.need_annotation_for_var(name, context)
self.set_inference_error_fallback_type(name, lvalue, init_type, context)
elif (isinstance(lvalue, MemberExpr) and self.inferred_attribute_types is not None
and lvalue.def_var and lvalue.def_var in self.inferred_attribute_types
and not is_same_type(self.inferred_attribute_types[lvalue.def_var], init_type)):
# Multiple, inconsistent types inferred for an attribute.
self.fail(messages.NEED_ANNOTATION_FOR_VAR, context)
self.msg.need_annotation_for_var(name, context)
name.type = AnyType(TypeOfAny.from_error)
else:
# Infer type of the target.
Expand Down Expand Up @@ -2179,8 +2203,11 @@ def check_return_stmt(self, s: ReturnStmt) -> None:
if isinstance(typ, AnyType):
# (Unless you asked to be warned in that case, and the
# function is not declared to return Any)
if (self.options.warn_return_any and not self.current_node_deferred and
not is_proper_subtype(AnyType(TypeOfAny.special_form), return_type)):
if (self.options.warn_return_any
and not self.current_node_deferred
and not is_proper_subtype(AnyType(TypeOfAny.special_form), return_type)
and not (defn.name() in BINARY_MAGIC_METHODS and
is_literal_not_implemented(s.expr))):
self.msg.incorrectly_returning_any(return_type, s)
return

Expand Down Expand Up @@ -3101,7 +3128,7 @@ def enter_partial_types(self) -> Iterator[None]:
var.type = NoneTyp()
else:
if var not in self.partial_reported:
self.msg.fail(messages.NEED_ANNOTATION_FOR_VAR, context)
self.msg.need_annotation_for_var(var, context)
self.partial_reported.add(var)
var.type = AnyType(TypeOfAny.from_error)

Expand Down Expand Up @@ -3232,6 +3259,10 @@ def remove_optional(typ: Type) -> Type:
return typ


def is_literal_not_implemented(n: Expression) -> bool:
return isinstance(n, NameExpr) and n.fullname == 'builtins.NotImplemented'


def builtin_item_type(tp: Type) -> Optional[Type]:
"""Get the item type of a builtin container.
Expand Down
2 changes: 1 addition & 1 deletion mypy/checkexpr.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def analyze_ref_expr(self, e: RefExpr, lvalue: bool = False) -> Type:
partial_types = self.chk.find_partial_types(node)
if partial_types is not None and not self.chk.current_node_deferred:
context = partial_types[node]
self.msg.fail(messages.NEED_ANNOTATION_FOR_VAR, context)
self.msg.need_annotation_for_var(node, context)
result = AnyType(TypeOfAny.special_form)
elif isinstance(node, FuncDef):
# Reference to a global function.
Expand Down
4 changes: 2 additions & 2 deletions mypy/checkmember.py
Original file line number Diff line number Diff line change
Expand Up @@ -366,7 +366,7 @@ def freeze_type_vars(member_type: Type) -> None:


def handle_partial_attribute_type(typ: PartialType, is_lvalue: bool, msg: MessageBuilder,
context: Context) -> Type:
node: SymbolNode) -> Type:
if typ.type is None:
# 'None' partial type. It has a well-defined type -- 'None'.
# In an lvalue context we want to preserver the knowledge of
Expand All @@ -375,7 +375,7 @@ def handle_partial_attribute_type(typ: PartialType, is_lvalue: bool, msg: Messag
return NoneTyp()
return typ
else:
msg.fail(messages.NEED_ANNOTATION_FOR_VAR, context)
msg.need_annotation_for_var(node, node)
return AnyType(TypeOfAny.from_error)


Expand Down
Loading

0 comments on commit bc57349

Please sign in to comment.