Skip to content

Commit

Permalink
feat: export cross-platform requirements (#2418)
Browse files Browse the repository at this point in the history
  • Loading branch information
frostming committed Dec 1, 2023
1 parent 5b259c0 commit 99cfa33
Show file tree
Hide file tree
Showing 24 changed files with 359 additions and 297 deletions.
1 change: 1 addition & 0 deletions news/2418.feature.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
When exporting requirements, record the environment markers from all parents for each requirement. This allows the exported requirements to work on different platforms and Python versions.
59 changes: 14 additions & 45 deletions pdm.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ dependencies = [
"pyproject-hooks",
"requests-toolbelt",
"unearth>=0.12.1",
"dep-logic>=0.0.2,<1.0",
"findpython>=0.4.0,<1.0.0a0",
"tomlkit>=0.11.1,<1",
"shellingham>=1.3.2",
Expand Down
7 changes: 6 additions & 1 deletion src/pdm/cli/actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,9 @@ def do_lock(
return mapping


def resolve_candidates_from_lockfile(project: Project, requirements: Iterable[Requirement]) -> dict[str, Candidate]:
def resolve_candidates_from_lockfile(
project: Project, requirements: Iterable[Requirement], cross_platform: bool = False
) -> dict[str, Candidate]:
ui = project.core.ui
resolve_max_rounds = int(project.config["strategy.resolve_max_rounds"])
reqs = [
Expand All @@ -138,12 +140,15 @@ def resolve_candidates_from_lockfile(project: Project, requirements: Iterable[Re
with ui.open_spinner("Resolving packages from lockfile...") as spinner:
reporter = BaseReporter()
provider = project.get_provider(for_install=True)
if cross_platform:
provider.repository.ignore_compatibility = True
resolver: Resolver = project.core.resolver_class(provider, reporter)
mapping, *_ = resolve(
resolver,
reqs,
project.environment.python_requires,
resolve_max_rounds,
record_markers=cross_platform,
)
spinner.update("Fetching hashes for resolved packages...")
fetch_hashes(provider.repository, mapping)
Expand Down
7 changes: 2 additions & 5 deletions src/pdm/cli/commands/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,11 +60,8 @@ def handle(self, project: Project, options: argparse.Namespace) -> None:
else:
if not project.lockfile.exists():
raise PdmUsageError("No lockfile found, please run `pdm lock` first.")
project.core.ui.warn(
"The exported requirements file is no longer cross-platform. "
"Using it on other platforms may cause unexpected result.",
)
candidates = resolve_candidates_from_lockfile(project, requirements.values())

candidates = resolve_candidates_from_lockfile(project, requirements.values(), cross_platform=True)
# Remove candidates with [extras] because the bare candidates are already
# included
packages = (candidate for candidate in candidates.values() if not candidate.req.extras)
Expand Down
2 changes: 1 addition & 1 deletion src/pdm/cli/commands/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def _interpolate_pdm(script: str) -> str:
executable_path = Path(sys.executable)

def replace(m: re.Match[str]) -> str:
return sh_join([executable_path.as_posix(), "-m", "pdm"])
return shlex.join([executable_path.as_posix(), "-m", "pdm"])

interpolated = RE_PDM_PLACEHOLDER.sub(replace, script)
return interpolated
Expand Down
7 changes: 1 addition & 6 deletions src/pdm/cli/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,12 +200,7 @@ def add_package(key: str, dist: Distribution | None) -> Package:
if dist:
requirements = (
parse_requirement(r)
for r in filter_requirements_with_extras(
cast(str, dist.metadata["Name"]),
dist.requires or [],
extras,
include_default=True,
)
for r in filter_requirements_with_extras(dist.requires or [], extras, include_default=True)
)
for req in requirements:
if not req.marker or req.marker.evaluate(marker_env):
Expand Down
6 changes: 3 additions & 3 deletions src/pdm/formats/pipfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

from pdm.compat import tomllib
from pdm.formats.base import make_array
from pdm.models.markers import Marker
from pdm.models.markers import Marker, get_marker
from pdm.models.requirements import FileRequirement, Requirement

if TYPE_CHECKING:
Expand All @@ -27,10 +27,10 @@ def convert_pipfile_requirement(name: str, req: RequirementDict, backend: BuildB
if isinstance(req, dict):
markers: list[Marker] = []
if "markers" in req:
markers.append(Marker(req["markers"])) # type: ignore[arg-type]
markers.append(get_marker(req["markers"])) # type: ignore[arg-type]
for key in MARKER_KEYS:
if key in req:
marker = Marker(f"{key}{req[key]}")
marker = get_marker(f"{key}{req[key]}")
markers.append(marker)
del req[key]

Expand Down
6 changes: 3 additions & 3 deletions src/pdm/formats/poetry.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
make_inline_table,
parse_name_email,
)
from pdm.models.markers import Marker
from pdm.models.markers import Marker, get_marker
from pdm.models.requirements import Requirement
from pdm.models.specifiers import PySpecSet
from pdm.utils import cd
Expand Down Expand Up @@ -80,9 +80,9 @@ def _convert_req(name: str, req_dict: RequirementDict | list[RequirementDict]) -
req_dict["version"] = _convert_specifier(str(req_dict["version"]))
markers: list[Marker] = []
if "markers" in req_dict:
markers.append(Marker(req_dict.pop("markers"))) # type: ignore[arg-type]
markers.append(get_marker(req_dict.pop("markers"))) # type: ignore[arg-type]
if "python" in req_dict:
markers.append(Marker(_convert_python(str(req_dict.pop("python"))).as_marker_string()))
markers.append(get_marker(_convert_python(str(req_dict.pop("python"))).as_marker_string()))
if markers:
req_dict["marker"] = str(functools.reduce(operator.and_, markers)).replace('"', "'")
if "rev" in req_dict or "branch" in req_dict or "tag" in req_dict:
Expand Down
4 changes: 1 addition & 3 deletions src/pdm/formats/requirements.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from __future__ import annotations

import argparse
import dataclasses
import hashlib
import shlex
import urllib.parse
Expand All @@ -10,7 +9,6 @@

from pdm.formats.base import make_array
from pdm.models.requirements import FileRequirement, Requirement, parse_requirement
from pdm.models.specifiers import get_specifier
from pdm.utils import expand_env_vars_in_auth

if TYPE_CHECKING:
Expand Down Expand Up @@ -189,7 +187,7 @@ def export(
collected_req: set[str] = set()
for candidate in sorted(candidates, key=lambda x: x.identify()): # type: ignore[attr-defined]
if isinstance(candidate, Candidate):
req = dataclasses.replace(candidate.req, specifier=get_specifier(f"=={candidate.version}"), marker=None)
req = candidate.req.as_pinned_version(candidate.version)
else:
assert isinstance(candidate, Requirement)
req = candidate
Expand Down
6 changes: 1 addition & 5 deletions src/pdm/models/candidates.py
Original file line number Diff line number Diff line change
Expand Up @@ -611,11 +611,7 @@ def metadata(self) -> im.Distribution:
def get_dependencies_from_metadata(self) -> list[str]:
"""Get the dependencies of a candidate from metadata."""
extras = self.req.extras or ()
return filter_requirements_with_extras(
self.req.project_name, # type: ignore[arg-type]
self.metadata.requires or [],
extras,
)
return filter_requirements_with_extras(self.metadata.requires or [], extras)

def should_cache(self) -> bool:
"""Determine whether to cache the dependencies and built wheel."""
Expand Down
Loading

0 comments on commit 99cfa33

Please sign in to comment.