Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable Python warnings during tests and fix all in the code #1233

Merged
merged 1 commit into from
Nov 26, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions piptools/repositories/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ class BaseRepository(object):
def clear_caches(self):
"""Should clear any caches used by the implementation."""

@abstractmethod
@contextmanager
def freshen_build_caches(self):
"""Should start with fresh build/source caches."""

Expand Down
4 changes: 3 additions & 1 deletion piptools/repositories/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,10 @@ def DEFAULT_INDEX_URL(self):
def clear_caches(self):
self.repository.clear_caches()

@contextmanager
def freshen_build_caches(self):
self.repository.freshen_build_caches()
with self.repository.freshen_build_caches():
yield

def find_best_match(self, ireq, prereleases=None):
key = key_from_ireq(ireq)
Expand Down
15 changes: 12 additions & 3 deletions piptools/repositories/pypi.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,29 +87,38 @@ def __init__(self, pip_args, cache_dir):
self._dependencies_cache = {}

# Setup file paths
self.freshen_build_caches()
self._build_dir = None
self._source_dir = None
self._cache_dir = normalize_path(cache_dir)
self._download_dir = fs_str(os.path.join(self._cache_dir, "pkgs"))
if PIP_VERSION[:2] <= (20, 2):
self._wheel_download_dir = fs_str(os.path.join(self._cache_dir, "wheels"))

self._setup_logging()

@contextmanager
def freshen_build_caches(self):
"""
Start with fresh build/source caches. Will remove any old build
caches from disk automatically.
"""
self._build_dir = TemporaryDirectory(fs_str("build"))
self._source_dir = TemporaryDirectory(fs_str("source"))
try:
yield
finally:
self._build_dir.cleanup()
self._build_dir = None
self._source_dir.cleanup()
self._source_dir = None

@property
def build_dir(self):
return self._build_dir.name
return self._build_dir.name if self._build_dir else None

@property
def source_dir(self):
return self._source_dir.name
return self._source_dir.name if self._source_dir else None

def clear_caches(self):
rmtree(self._download_dir, ignore_errors=True)
Expand Down
15 changes: 7 additions & 8 deletions piptools/resolver.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,13 @@ def resolve(self, max_rounds=10):

log.debug("")
log.debug(magenta("{:^60}".format("ROUND {}".format(current_round))))
has_changed, best_matches = self._resolve_one_round()
# If a package version (foo==2.0) was built in a previous round,
# and in this round a different version of foo needs to be built
# (i.e. foo==1.0), the directory will exist already, which will
# cause a pip build failure. The trick is to start with a new
# build cache dir for every round, so this can never happen.
with self.repository.freshen_build_caches():
has_changed, best_matches = self._resolve_one_round()
log.debug("-" * 60)
log.debug(
"Result of round {}: {}".format(
Expand All @@ -180,13 +186,6 @@ def resolve(self, max_rounds=10):
if not has_changed:
break

# If a package version (foo==2.0) was built in a previous round,
# and in this round a different version of foo needs to be built
# (i.e. foo==1.0), the directory will exist already, which will
# cause a pip build failure. The trick is to start with a new
# build cache dir for every round, so this can never happen.
self.repository.freshen_build_caches()

del os.environ["PIP_EXISTS_ACTION"]

# Only include hard requirements and not pip constraints
Expand Down
1 change: 1 addition & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ universal = 1
norecursedirs = .* build dist venv test_data piptools/_compat/*
testpaths = tests piptools
filterwarnings =
always
ignore::PendingDeprecationWarning:pip\._vendor.+
ignore::DeprecationWarning:pip\._vendor.+
markers =
Expand Down
4 changes: 4 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,10 @@ def __init__(self):
with open("tests/test_data/fake-editables.json", "r") as f:
self.editables = json.load(f)

@contextmanager
def freshen_build_caches(self):
yield

def get_hashes(self, ireq):
# Some fake hashes
return {
Expand Down
2 changes: 1 addition & 1 deletion tests/test_cli_compile.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ def test_redacted_urls_in_verbose_output(runner, option):
cli,
[
"--no-header",
"--no-index",
"--no-emit-index-url",
"--no-emit-find-links",
"--verbose",
option,
Expand Down
25 changes: 16 additions & 9 deletions tests/test_repository_pypi.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,23 +109,30 @@ def test_open_local_or_remote_file__remote_file(
response_file_path.write_bytes(content)

mock_response = mock.Mock()
mock_response.raw = response_file_path.open("rb")
mock_response.headers = {"content-length": content_length}
with response_file_path.open("rb") as fp:
mock_response.raw = fp
mock_response.headers = {"content-length": content_length}

with mock.patch.object(session, "get", return_value=mock_response):
with open_local_or_remote_file(link, session) as file_stream:
assert file_stream.stream.read() == content
assert file_stream.size == expected_content_length
with mock.patch.object(session, "get", return_value=mock_response):
with open_local_or_remote_file(link, session) as file_stream:
assert file_stream.stream.read() == content
assert file_stream.size == expected_content_length

mock_response.close.assert_called_once()
mock_response.close.assert_called_once()


def test_pypirepo_build_dir_is_str(pypi_repository):
assert isinstance(pypi_repository.build_dir, str)
assert pypi_repository.build_dir is None
with pypi_repository.freshen_build_caches():
assert isinstance(pypi_repository.build_dir, str)
assert pypi_repository.build_dir is None


def test_pypirepo_source_dir_is_str(pypi_repository):
assert isinstance(pypi_repository.source_dir, str)
assert pypi_repository.source_dir is None
with pypi_repository.freshen_build_caches():
assert isinstance(pypi_repository.source_dir, str)
assert pypi_repository.source_dir is None


def test_relative_path_cache_dir_is_normalized(from_line):
Expand Down