Skip to content

Commit

Permalink
fix pypa#11847 for sdists
Browse files Browse the repository at this point in the history
  • Loading branch information
cosmicexplorer committed Aug 2, 2023
1 parent 3dc5ac5 commit e860f2d
Showing 1 changed file with 17 additions and 31 deletions.
48 changes: 17 additions & 31 deletions src/pip/_internal/operations/prepare.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
InstallationError,
MetadataInconsistent,
NetworkConnectionError,
PreviousBuildDirError,
VcsHashUnsupported,
)
from pip._internal.index.package_finder import PackageFinder
Expand All @@ -47,7 +46,6 @@
display_path,
hash_file,
hide_url,
is_installable_dir,
)
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.unpacking import unpack_file
Expand Down Expand Up @@ -305,7 +303,6 @@ def _ensure_link_req_src_dir(
# We don't need to unpack wheels, so no need for a source
# directory.
return
assert req.source_dir is None
if req.link.is_existing_dir():
# build local directories in-tree
req.source_dir = req.link.file_path
Expand All @@ -318,21 +315,6 @@ def _ensure_link_req_src_dir(
parallel_builds=parallel_builds,
)

# If a checkout exists, it's unwise to keep going. version
# inconsistencies are logged later, but do not fail the
# installation.
# FIXME: this won't upgrade when there's an existing
# package unpacked in `req.source_dir`
# TODO: this check is now probably dead code
if is_installable_dir(req.source_dir):
raise PreviousBuildDirError(
"pip can't proceed with requirements '{}' due to a"
"pre-existing build directory ({}). This is likely "
"due to a previous installation that failed . pip is "
"being responsible and not assuming it can delete this. "
"Please delete it and try again.".format(req, req.source_dir)
)

def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes:
# By the time this is called, the requirement's link should have
# been checked so we can tell what kind of requirements req is
Expand Down Expand Up @@ -479,20 +461,24 @@ def _complete_partial_requirements(
for link, (filepath, _) in batch_download:
logger.debug("Downloading link %s to %s", link, filepath)
req = links_to_fully_download[link]
# Record the downloaded file path so wheel reqs can extract a Distribution
# in .get_dist().
req.local_file_path = filepath
# TODO: This needs fixing for sdists
# This is an emergency fix for #11847, which reports that
# distributions get downloaded twice when metadata is loaded
# from a PEP 658 standalone metadata file. Setting _downloaded
# fixes this for wheels, but breaks the sdist case (tests
# test_download_metadata). As PyPI is currently only serving
# metadata for wheels, this is not an immediate issue.
# Fixing the problem properly looks like it will require a
# complete refactoring of the `prepare_linked_requirements_more`
# logic, and I haven't a clue where to start on that, so for now
# I have fixed the issue *just* for wheels.
if req.is_wheel:
self._downloaded[req.link.url] = filepath
# Record that the file is downloaded so we don't do it again in
# _prepare_linked_requirement().
self._downloaded[req.link.url] = filepath

# If this is an sdist, we need to unpack it and set the .source_dir
# immediately after downloading, as _prepare_linked_requirement() assumes
# the req is either not downloaded at all, or both downloaded and
# unpacked. The downloading and unpacking is is typically done with
# unpack_url(), but we separate the downloading and unpacking steps here in
# order to use the BatchDownloader.
if not req.is_wheel:
hashes = self._get_linked_req_hashes(req)
assert filepath == _check_download_dir(req.link, temp_dir, hashes)
self._ensure_link_req_src_dir(req, parallel_builds)
unpack_file(filepath, req.source_dir)

# This step is necessary to ensure all lazy wheels are processed
# successfully by the 'download', 'wheel', and 'install' commands.
Expand Down

0 comments on commit e860f2d

Please sign in to comment.