Skip to content

Commit

Permalink
Let AstroidManager.clear_cache act on other caches (#1521)
Browse files Browse the repository at this point in the history
Co-authored-by: Pierre Sassoulas <[email protected]>
  • Loading branch information
jacobtylerwalls and Pierre-Sassoulas authored May 6, 2022
1 parent c2d2057 commit e7103e6
Show file tree
Hide file tree
Showing 7 changed files with 83 additions and 36 deletions.
38 changes: 13 additions & 25 deletions astroid/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,6 @@
Union,
)

import wrapt

from astroid import bases, decorators, helpers, nodes, protocols, util
from astroid.context import (
CallContext,
Expand Down Expand Up @@ -1037,42 +1035,32 @@ def infer_ifexp(self, context=None):
nodes.IfExp._infer = infer_ifexp # type: ignore[assignment]


# pylint: disable=dangerous-default-value
@wrapt.decorator
def _cached_generator(
func, instance: _FunctionDefT, args, kwargs, _cache={} # noqa: B006
):
node = instance
try:
return iter(_cache[func, id(node)])
except KeyError:
result = func(*args, **kwargs)
# Need to keep an iterator around
original, copy = itertools.tee(result)
_cache[func, id(node)] = list(copy)
return original


# When inferring a property, we instantiate a new `objects.Property` object,
# which in turn, because it inherits from `FunctionDef`, sets itself in the locals
# of the wrapping frame. This means that every time we infer a property, the locals
# are mutated with a new instance of the property. This is why we cache the result
# of the function's inference.
@_cached_generator
def infer_functiondef(
self: _FunctionDefT, context: Optional[InferenceContext] = None
) -> Generator[Union["Property", _FunctionDefT], None, InferenceErrorInfo]:
if not self.decorators or not bases._is_property(self):
yield self
return InferenceErrorInfo(node=self, context=context)

# When inferring a property, we instantiate a new `objects.Property` object,
# which in turn, because it inherits from `FunctionDef`, sets itself in the locals
# of the wrapping frame. This means that every time we infer a property, the locals
# are mutated with a new instance of the property. To avoid this, we detect this
# scenario and avoid passing the `parent` argument to the constructor.
parent_frame = self.parent.frame(future=True)
property_already_in_parent_locals = self.name in parent_frame.locals and any(
isinstance(val, objects.Property) for val in parent_frame.locals[self.name]
)

prop_func = objects.Property(
function=self,
name=self.name,
lineno=self.lineno,
parent=self.parent,
parent=self.parent if not property_already_in_parent_locals else None,
col_offset=self.col_offset,
)
if property_already_in_parent_locals:
prop_func.parent = self.parent
prop_func.postinit(body=[], args=self.args, doc_node=self.doc_node)
yield prop_func
return InferenceErrorInfo(node=self, context=context)
Expand Down
2 changes: 2 additions & 0 deletions astroid/interpreter/objectmodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import os
import pprint
import types
from functools import lru_cache
from typing import TYPE_CHECKING, List, Optional

import astroid
Expand Down Expand Up @@ -100,6 +101,7 @@ def __get__(self, instance, cls=None):
def __contains__(self, name):
return name in self.attributes()

@lru_cache() # noqa
def attributes(self) -> List[str]:
"""Get the attributes which are exported by this object model."""
return [o[LEN_OF_IMPL_PREFIX:] for o in dir(self) if o.startswith(IMPL_PREFIX)]
Expand Down
17 changes: 17 additions & 0 deletions astroid/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from astroid.interpreter._import import spec
from astroid.modutils import (
NoSourceFile,
_cache_normalize_path_,
file_info_from_modpath,
get_source_file,
is_module_name_part_of_extension_package_whitelist,
Expand Down Expand Up @@ -365,8 +366,24 @@ def bootstrap(self):
def clear_cache(self) -> None:
"""Clear the underlying cache, bootstrap the builtins module and
re-register transforms."""
# import here because of cyclic imports
# pylint: disable=import-outside-toplevel
from astroid.inference_tip import clear_inference_tip_cache
from astroid.interpreter.objectmodel import ObjectModel
from astroid.nodes.node_classes import LookupMixIn

clear_inference_tip_cache()

self.astroid_cache.clear()
AstroidManager.brain["_transform"] = TransformVisitor()

for lru_cache in (
LookupMixIn.lookup,
_cache_normalize_path_,
ObjectModel.attributes,
):
lru_cache.cache_clear()

self.bootstrap()

# Reload brain plugins. During initialisation this is done in astroid.__init__.py
Expand Down
17 changes: 8 additions & 9 deletions astroid/modutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,9 @@
import sys
import sysconfig
import types
from functools import lru_cache
from pathlib import Path
from typing import Dict, Set
from typing import Set

from astroid.const import IS_JYTHON, IS_PYPY
from astroid.interpreter._import import spec, util
Expand Down Expand Up @@ -138,21 +139,19 @@ def _handle_blacklist(blacklist, dirnames, filenames):
filenames.remove(norecurs)


_NORM_PATH_CACHE: Dict[str, str] = {}
@lru_cache()
def _cache_normalize_path_(path: str) -> str:
return _normalize_path(path)


def _cache_normalize_path(path: str) -> str:
"""Normalize path with caching."""
# _module_file calls abspath on every path in sys.path every time it's
# called; on a larger codebase this easily adds up to half a second just
# assembling path components. This cache alleviates that.
try:
return _NORM_PATH_CACHE[path]
except KeyError:
if not path: # don't cache result for ''
return _normalize_path(path)
result = _NORM_PATH_CACHE[path] = _normalize_path(path)
return result
if not path: # don't cache result for ''
return _normalize_path(path)
return _cache_normalize_path_(path)


def load_module_from_name(dotted_name: str) -> types.ModuleType:
Expand Down
2 changes: 1 addition & 1 deletion astroid/nodes/node_classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -367,7 +367,7 @@ def get_children(self):
class LookupMixIn:
"""Mixin to look up a name in the right scope."""

@lru_cache(maxsize=None) # pylint: disable=cache-max-size-none # noqa
@lru_cache() # noqa
def lookup(self, name: str) -> typing.Tuple[str, typing.List[NodeNG]]:
"""Lookup where the given variable is assigned.
Expand Down
39 changes: 39 additions & 0 deletions tests/unittest_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@
from astroid import manager, test_utils
from astroid.const import IS_JYTHON
from astroid.exceptions import AstroidBuildingError, AstroidImportError
from astroid.modutils import is_standard_module
from astroid.nodes import Const
from astroid.nodes.scoped_nodes import ClassDef

from . import resources

Expand Down Expand Up @@ -317,6 +319,43 @@ def test_borg(self) -> None:


class ClearCacheTest(unittest.TestCase, resources.AstroidCacheSetupMixin):
def test_clear_cache_clears_other_lru_caches(self) -> None:
lrus = (
astroid.nodes.node_classes.LookupMixIn.lookup,
astroid.modutils._cache_normalize_path_,
astroid.interpreter.objectmodel.ObjectModel.attributes,
)

# Get a baseline for the size of the cache after simply calling bootstrap()
baseline_cache_infos = [lru.cache_info() for lru in lrus]

# Generate some hits and misses
ClassDef().lookup("garbage")
is_standard_module("unittest", std_path=["garbage_path"])
astroid.interpreter.objectmodel.ObjectModel().attributes()

# Did the hits or misses actually happen?
incremented_cache_infos = [lru.cache_info() for lru in lrus]
for incremented_cache, baseline_cache in zip(
incremented_cache_infos, baseline_cache_infos
):
with self.subTest(incremented_cache=incremented_cache):
self.assertGreater(
incremented_cache.hits + incremented_cache.misses,
baseline_cache.hits + baseline_cache.misses,
)

astroid.MANAGER.clear_cache() # also calls bootstrap()

# The cache sizes are now as low or lower than the original baseline
cleared_cache_infos = [lru.cache_info() for lru in lrus]
for cleared_cache, baseline_cache in zip(
cleared_cache_infos, baseline_cache_infos
):
with self.subTest(cleared_cache=cleared_cache):
# less equal because the "baseline" might have had multiple calls to bootstrap()
self.assertLessEqual(cleared_cache.currsize, baseline_cache.currsize)

def test_brain_plugins_reloaded_after_clearing_cache(self) -> None:
astroid.MANAGER.clear_cache()
format_call = astroid.extract_node("''.format()")
Expand Down
4 changes: 3 additions & 1 deletion tests/unittest_modutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ def test_load_packages_without_init(self) -> None:
https://github.com/PyCQA/astroid/issues/1327
"""
tmp_dir = Path(tempfile.gettempdir())
self.addCleanup(os.chdir, os.curdir)
self.addCleanup(os.chdir, os.getcwd())
os.chdir(tmp_dir)

self.addCleanup(shutil.rmtree, tmp_dir / "src")
Expand Down Expand Up @@ -288,6 +288,8 @@ def test_custom_path(self) -> None:
self.assertTrue(
modutils.is_standard_module("data.module", (os.path.abspath(datadir),))
)
# "" will evaluate to cwd
self.assertTrue(modutils.is_standard_module("data.module", ("",)))

def test_failing_edge_cases(self) -> None:
# using a subpackage/submodule path as std_path argument
Expand Down

0 comments on commit e7103e6

Please sign in to comment.