diff --git a/docs/hooks.md b/docs/hooks.md index ab505d6..13239c0 100644 --- a/docs/hooks.md +++ b/docs/hooks.md @@ -137,7 +137,7 @@ pdm-build-mypyc version = "0.1.0" description = "A pdm build hook to compile Python code with mypyc" authors = [{name = "...", email = "..."}] - license = {text = "MIT"} + license = "MIT" readme = "README.md" [project.entry-points."pdm.build.hook"] @@ -156,7 +156,6 @@ pdm-build-mypyc mypyc_build(context.build_dir) ``` - The plugin must be distributed with an entry point under `pdm.build.hook` group. The entry point value can be any of the following: - A module containing hook functions diff --git a/docs/index.md b/docs/index.md index 6aaf78e..1af8467 100644 --- a/docs/index.md +++ b/docs/index.md @@ -40,7 +40,7 @@ authors = [{name = "John Doe", email="me@johndoe.org"}] dependencies = ["requests"] requires-python = ">=3.8" readme = "README.md" -license = {text = "MIT"} +license = "MIT" ``` Then run the build command to build the project as wheel and sdist: diff --git a/pyproject.toml b/pyproject.toml index ee75219..21cd507 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ description = "The build backend used by PDM that supports latest packaging stan authors = [ { name = "Frost Ming", email = "me@frostming.com" } ] -license = {text = "MIT"} +license = "MIT" requires-python = ">=3.8" readme = "README.md" keywords = ["packaging", "PEP 517", "build"] diff --git a/scripts/patches/pyproject_metadata.patch b/scripts/patches/pyproject_metadata.patch index b4c6c89..00629e6 100644 --- a/scripts/patches/pyproject_metadata.patch +++ b/scripts/patches/pyproject_metadata.patch @@ -1,8 +1,16 @@ diff --git a/src/pdm/backend/_vendor/pyproject_metadata/__init__.py b/src/pdm/backend/_vendor/pyproject_metadata/__init__.py -index df826f8..7f69f3a 100644 +index 52289dc..7ef9fc9 100644 --- a/src/pdm/backend/_vendor/pyproject_metadata/__init__.py +++ b/src/pdm/backend/_vendor/pyproject_metadata/__init__.py -@@ -24,11 +24,11 @@ if typing.TYPE_CHECKING: +@@ -20,18 +20,18 @@ if typing.TYPE_CHECKING: + from collections.abc import Generator, Iterable, Mapping + from typing import Any + +- from packaging.requirements import Requirement ++ from pdm.backend._vendor.packaging.requirements import Requirement + + if sys.version_info < (3, 11): + from typing_extensions import Self else: from typing import Self @@ -18,22 +26,46 @@ index df826f8..7f69f3a 100644 +import pdm.backend._vendor.packaging.version as pkg_version - __version__ = '0.8.0' -@@ -175,11 +175,11 @@ class Readme(typing.NamedTuple): + __version__ = '0.9.0b3' +@@ -351,8 +351,8 @@ class ProjectFetcher(DataFetcher): + requirements: list[Requirement] = [] + for req in requirement_strings: + try: +- requirements.append(packaging.requirements.Requirement(req)) +- except packaging.requirements.InvalidRequirement as e: ++ requirements.append(pkg_requirements.Requirement(req)) ++ except pkg_requirements.InvalidRequirement as e: + msg = ( + 'Field "project.dependencies" contains an invalid PEP 508 ' + f'requirement string "{req}" ("{e}")' +@@ -393,9 +393,9 @@ class ProjectFetcher(DataFetcher): + raise ConfigurationError(msg) + try: + requirements_dict[extra].append( +- packaging.requirements.Requirement(req) ++ pkg_requirements.Requirement(req) + ) +- except packaging.requirements.InvalidRequirement as e: ++ except pkg_requirements.InvalidRequirement as e: + msg = ( + f'Field "project.optional-dependencies.{extra}" contains ' + f'an invalid PEP 508 requirement string "{req}" ("{e}")' +@@ -453,12 +453,12 @@ class Readme(typing.NamedTuple): @dataclasses.dataclass class StandardMetadata: name: str - version: packaging.version.Version | None = None + version: pkg_version.Version | None = None description: str | None = None - license: License | None = None + license: License | str | None = None + license_files: list[pathlib.Path] | None = None readme: Readme | None = None - requires_python: packaging.specifiers.SpecifierSet | None = None + requires_python: pkg_specifiers.SpecifierSet | None = None dependencies: list[Requirement] = dataclasses.field(default_factory=list) - optional_dependencies: dict[str, list[Requirement]] = dataclasses.field(default_factory=dict) - entrypoints: dict[str, dict[str, str]] = dataclasses.field(default_factory=dict) -@@ -202,7 +202,7 @@ class StandardMetadata: + optional_dependencies: dict[str, list[Requirement]] = dataclasses.field( + default_factory=dict +@@ -547,7 +547,7 @@ class StandardMetadata: @property def canonical_name(self) -> str: @@ -42,7 +74,7 @@ index df826f8..7f69f3a 100644 @classmethod def from_pyproject( -@@ -235,7 +235,7 @@ class StandardMetadata: +@@ -590,7 +590,7 @@ class StandardMetadata: version_string = fetcher.get_str('project.version') requires_python_string = fetcher.get_str('project.requires-python') @@ -51,16 +83,16 @@ index df826f8..7f69f3a 100644 if version is None and 'version' not in dynamic: msg = 'Field "project.version" missing and "version" not specified in "project.dynamic"' -@@ -256,7 +256,7 @@ class StandardMetadata: - description, - cls._get_license(fetcher, project_dir), - cls._get_readme(fetcher, project_dir), -- packaging.specifiers.SpecifierSet(requires_python_string) if requires_python_string else None, -+ pkg_specifiers.SpecifierSet(requires_python_string) if requires_python_string else None, - cls._get_dependencies(fetcher), - cls._get_optional_dependencies(fetcher), - cls._get_entrypoints(fetcher), -@@ -358,15 +358,15 @@ class StandardMetadata: +@@ -608,7 +608,7 @@ class StandardMetadata: + fetcher.get_license(project_dir), + fetcher.get_license_files(project_dir), + fetcher.get_readme(project_dir), +- packaging.specifiers.SpecifierSet(requires_python_string) ++ pkg_specifiers.SpecifierSet(requires_python_string) + if requires_python_string + else None, + fetcher.get_dependencies(), +@@ -720,15 +720,15 @@ class StandardMetadata: requirement = copy.copy(requirement) if requirement.marker: if 'or' in requirement.marker._markers: @@ -78,26 +110,4 @@ index df826f8..7f69f3a 100644 + requirement.marker = pkg_markers.Marker(f'extra == "{extra}"') return requirement - @staticmethod -@@ -462,8 +462,8 @@ class StandardMetadata: - requirements: list[Requirement] = [] - for req in requirement_strings: - try: -- requirements.append(packaging.requirements.Requirement(req)) -- except packaging.requirements.InvalidRequirement as e: -+ requirements.append(pkg_requirements.Requirement(req)) -+ except pkg_requirements.InvalidRequirement as e: - msg = ( - 'Field "project.dependencies" contains an invalid PEP 508 ' - f'requirement string "{req}" ("{e}")' -@@ -502,8 +502,8 @@ class StandardMetadata: - ) - raise ConfigurationError(msg) - try: -- requirements_dict[extra].append(packaging.requirements.Requirement(req)) -- except packaging.requirements.InvalidRequirement as e: -+ requirements_dict[extra].append(pkg_requirements.Requirement(req)) -+ except pkg_requirements.InvalidRequirement as e: - msg = ( - f'Field "project.optional-dependencies.{extra}" contains ' - f'an invalid PEP 508 requirement string "{req}" ("{e}")' + diff --git a/src/pdm/backend/_vendor/packaging/__init__.py b/src/pdm/backend/_vendor/packaging/__init__.py index e7c0aa1..9ba41d8 100644 --- a/src/pdm/backend/_vendor/packaging/__init__.py +++ b/src/pdm/backend/_vendor/packaging/__init__.py @@ -6,7 +6,7 @@ __summary__ = "Core utilities for Python packages" __uri__ = "https://github.com/pypa/packaging" -__version__ = "24.0" +__version__ = "24.1" __author__ = "Donald Stufft and individual contributors" __email__ = "donald@stufft.io" diff --git a/src/pdm/backend/_vendor/packaging/_elffile.py b/src/pdm/backend/_vendor/packaging/_elffile.py index 6fb19b3..f7a0218 100644 --- a/src/pdm/backend/_vendor/packaging/_elffile.py +++ b/src/pdm/backend/_vendor/packaging/_elffile.py @@ -8,10 +8,12 @@ ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html """ +from __future__ import annotations + import enum import os import struct -from typing import IO, Optional, Tuple +from typing import IO class ELFInvalid(ValueError): @@ -87,11 +89,11 @@ def __init__(self, f: IO[bytes]) -> None: except struct.error as e: raise ELFInvalid("unable to parse machine and section information") from e - def _read(self, fmt: str) -> Tuple[int, ...]: + def _read(self, fmt: str) -> tuple[int, ...]: return struct.unpack(fmt, self._f.read(struct.calcsize(fmt))) @property - def interpreter(self) -> Optional[str]: + def interpreter(self) -> str | None: """ The path recorded in the ``PT_INTERP`` section header. """ diff --git a/src/pdm/backend/_vendor/packaging/_manylinux.py b/src/pdm/backend/_vendor/packaging/_manylinux.py index ad62505..08f651f 100644 --- a/src/pdm/backend/_vendor/packaging/_manylinux.py +++ b/src/pdm/backend/_vendor/packaging/_manylinux.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import collections import contextlib import functools @@ -5,7 +7,7 @@ import re import sys import warnings -from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple +from typing import Generator, Iterator, NamedTuple, Sequence from ._elffile import EIClass, EIData, ELFFile, EMachine @@ -17,7 +19,7 @@ # `os.PathLike` not a generic type until Python 3.9, so sticking with `str` # as the type for `path` until then. @contextlib.contextmanager -def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]: +def _parse_elf(path: str) -> Generator[ELFFile | None, None, None]: try: with open(path, "rb") as f: yield ELFFile(f) @@ -72,7 +74,7 @@ def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool: # For now, guess what the highest minor version might be, assume it will # be 50 for testing. Once this actually happens, update the dictionary # with the actual value. -_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50) +_LAST_GLIBC_MINOR: dict[int, int] = collections.defaultdict(lambda: 50) class _GLibCVersion(NamedTuple): @@ -80,7 +82,7 @@ class _GLibCVersion(NamedTuple): minor: int -def _glibc_version_string_confstr() -> Optional[str]: +def _glibc_version_string_confstr() -> str | None: """ Primary implementation of glibc_version_string using os.confstr. """ @@ -90,7 +92,7 @@ def _glibc_version_string_confstr() -> Optional[str]: # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 try: # Should be a string like "glibc 2.17". - version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION") + version_string: str | None = os.confstr("CS_GNU_LIBC_VERSION") assert version_string is not None _, version = version_string.rsplit() except (AssertionError, AttributeError, OSError, ValueError): @@ -99,7 +101,7 @@ def _glibc_version_string_confstr() -> Optional[str]: return version -def _glibc_version_string_ctypes() -> Optional[str]: +def _glibc_version_string_ctypes() -> str | None: """ Fallback implementation of glibc_version_string using ctypes. """ @@ -143,12 +145,12 @@ def _glibc_version_string_ctypes() -> Optional[str]: return version_str -def _glibc_version_string() -> Optional[str]: +def _glibc_version_string() -> str | None: """Returns glibc version string, or None if not using glibc.""" return _glibc_version_string_confstr() or _glibc_version_string_ctypes() -def _parse_glibc_version(version_str: str) -> Tuple[int, int]: +def _parse_glibc_version(version_str: str) -> tuple[int, int]: """Parse glibc version. We use a regexp instead of str.split because we want to discard any @@ -167,8 +169,8 @@ def _parse_glibc_version(version_str: str) -> Tuple[int, int]: return int(m.group("major")), int(m.group("minor")) -@functools.lru_cache() -def _get_glibc_version() -> Tuple[int, int]: +@functools.lru_cache +def _get_glibc_version() -> tuple[int, int]: version_str = _glibc_version_string() if version_str is None: return (-1, -1) diff --git a/src/pdm/backend/_vendor/packaging/_musllinux.py b/src/pdm/backend/_vendor/packaging/_musllinux.py index 86419df..d2bf30b 100644 --- a/src/pdm/backend/_vendor/packaging/_musllinux.py +++ b/src/pdm/backend/_vendor/packaging/_musllinux.py @@ -4,11 +4,13 @@ linked against musl, and what musl version is used. """ +from __future__ import annotations + import functools import re import subprocess import sys -from typing import Iterator, NamedTuple, Optional, Sequence +from typing import Iterator, NamedTuple, Sequence from ._elffile import ELFFile @@ -18,7 +20,7 @@ class _MuslVersion(NamedTuple): minor: int -def _parse_musl_version(output: str) -> Optional[_MuslVersion]: +def _parse_musl_version(output: str) -> _MuslVersion | None: lines = [n for n in (n.strip() for n in output.splitlines()) if n] if len(lines) < 2 or lines[0][:4] != "musl": return None @@ -28,8 +30,8 @@ def _parse_musl_version(output: str) -> Optional[_MuslVersion]: return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) -@functools.lru_cache() -def _get_musl_version(executable: str) -> Optional[_MuslVersion]: +@functools.lru_cache +def _get_musl_version(executable: str) -> _MuslVersion | None: """Detect currently-running musl runtime version. This is done by checking the specified executable's dynamic linking diff --git a/src/pdm/backend/_vendor/packaging/_parser.py b/src/pdm/backend/_vendor/packaging/_parser.py index 684df75..c1238c0 100644 --- a/src/pdm/backend/_vendor/packaging/_parser.py +++ b/src/pdm/backend/_vendor/packaging/_parser.py @@ -1,11 +1,13 @@ """Handwritten parser of dependency specifiers. -The docstring for each __parse_* function contains ENBF-inspired grammar representing +The docstring for each __parse_* function contains EBNF-inspired grammar representing the implementation. """ +from __future__ import annotations + import ast -from typing import Any, List, NamedTuple, Optional, Tuple, Union +from typing import NamedTuple, Sequence, Tuple, Union from ._tokenizer import DEFAULT_RULES, Tokenizer @@ -41,20 +43,16 @@ def serialize(self) -> str: MarkerVar = Union[Variable, Value] MarkerItem = Tuple[MarkerVar, Op, MarkerVar] -# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]] -# MarkerList = List[Union["MarkerList", MarkerAtom, str]] -# mypy does not support recursive type definition -# https://github.com/python/mypy/issues/731 -MarkerAtom = Any -MarkerList = List[Any] +MarkerAtom = Union[MarkerItem, Sequence["MarkerAtom"]] +MarkerList = Sequence[Union["MarkerList", MarkerAtom, str]] class ParsedRequirement(NamedTuple): name: str url: str - extras: List[str] + extras: list[str] specifier: str - marker: Optional[MarkerList] + marker: MarkerList | None # -------------------------------------------------------------------------------------- @@ -87,7 +85,7 @@ def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement: def _parse_requirement_details( tokenizer: Tokenizer, -) -> Tuple[str, str, Optional[MarkerList]]: +) -> tuple[str, str, MarkerList | None]: """ requirement_details = AT URL (WS requirement_marker?)? | specifier WS? (requirement_marker)? @@ -156,7 +154,7 @@ def _parse_requirement_marker( return marker -def _parse_extras(tokenizer: Tokenizer) -> List[str]: +def _parse_extras(tokenizer: Tokenizer) -> list[str]: """ extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)? """ @@ -175,11 +173,11 @@ def _parse_extras(tokenizer: Tokenizer) -> List[str]: return extras -def _parse_extras_list(tokenizer: Tokenizer) -> List[str]: +def _parse_extras_list(tokenizer: Tokenizer) -> list[str]: """ extras_list = identifier (wsp* ',' wsp* identifier)* """ - extras: List[str] = [] + extras: list[str] = [] if not tokenizer.check("IDENTIFIER"): return extras diff --git a/src/pdm/backend/_vendor/packaging/_tokenizer.py b/src/pdm/backend/_vendor/packaging/_tokenizer.py index dd0d648..89d0416 100644 --- a/src/pdm/backend/_vendor/packaging/_tokenizer.py +++ b/src/pdm/backend/_vendor/packaging/_tokenizer.py @@ -1,7 +1,9 @@ +from __future__ import annotations + import contextlib import re from dataclasses import dataclass -from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union +from typing import Iterator, NoReturn from .specifiers import Specifier @@ -21,7 +23,7 @@ def __init__( message: str, *, source: str, - span: Tuple[int, int], + span: tuple[int, int], ) -> None: self.span = span self.message = message @@ -34,7 +36,7 @@ def __str__(self) -> str: return "\n ".join([self.message, self.source, marker]) -DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = { +DEFAULT_RULES: dict[str, str | re.Pattern[str]] = { "LEFT_PARENTHESIS": r"\(", "RIGHT_PARENTHESIS": r"\)", "LEFT_BRACKET": r"\[", @@ -96,13 +98,13 @@ def __init__( self, source: str, *, - rules: "Dict[str, Union[str, re.Pattern[str]]]", + rules: dict[str, str | re.Pattern[str]], ) -> None: self.source = source - self.rules: Dict[str, re.Pattern[str]] = { + self.rules: dict[str, re.Pattern[str]] = { name: re.compile(pattern) for name, pattern in rules.items() } - self.next_token: Optional[Token] = None + self.next_token: Token | None = None self.position = 0 def consume(self, name: str) -> None: @@ -154,8 +156,8 @@ def raise_syntax_error( self, message: str, *, - span_start: Optional[int] = None, - span_end: Optional[int] = None, + span_start: int | None = None, + span_end: int | None = None, ) -> NoReturn: """Raise ParserSyntaxError at the given position.""" span = ( diff --git a/src/pdm/backend/_vendor/packaging/markers.py b/src/pdm/backend/_vendor/packaging/markers.py index 8b98fca..7ac7bb6 100644 --- a/src/pdm/backend/_vendor/packaging/markers.py +++ b/src/pdm/backend/_vendor/packaging/markers.py @@ -2,20 +2,16 @@ # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. +from __future__ import annotations + import operator import os import platform import sys -from typing import Any, Callable, Dict, List, Optional, Tuple, Union - -from ._parser import ( - MarkerAtom, - MarkerList, - Op, - Value, - Variable, - parse_marker as _parse_marker, -) +from typing import Any, Callable, TypedDict, cast + +from ._parser import MarkerAtom, MarkerList, Op, Value, Variable +from ._parser import parse_marker as _parse_marker from ._tokenizer import ParserSyntaxError from .specifiers import InvalidSpecifier, Specifier from .utils import canonicalize_name @@ -50,6 +46,78 @@ class UndefinedEnvironmentName(ValueError): """ +class Environment(TypedDict): + implementation_name: str + """The implementation's identifier, e.g. ``'cpython'``.""" + + implementation_version: str + """ + The implementation's version, e.g. ``'3.13.0a2'`` for CPython 3.13.0a2, or + ``'7.3.13'`` for PyPy3.10 v7.3.13. + """ + + os_name: str + """ + The value of :py:data:`os.name`. The name of the operating system dependent module + imported, e.g. ``'posix'``. + """ + + platform_machine: str + """ + Returns the machine type, e.g. ``'i386'``. + + An empty string if the value cannot be determined. + """ + + platform_release: str + """ + The system's release, e.g. ``'2.2.0'`` or ``'NT'``. + + An empty string if the value cannot be determined. + """ + + platform_system: str + """ + The system/OS name, e.g. ``'Linux'``, ``'Windows'`` or ``'Java'``. + + An empty string if the value cannot be determined. + """ + + platform_version: str + """ + The system's release version, e.g. ``'#3 on degas'``. + + An empty string if the value cannot be determined. + """ + + python_full_version: str + """ + The Python version as string ``'major.minor.patchlevel'``. + + Note that unlike the Python :py:data:`sys.version`, this value will always include + the patchlevel (it defaults to 0). + """ + + platform_python_implementation: str + """ + A string identifying the Python implementation, e.g. ``'CPython'``. + """ + + python_version: str + """The Python version as string ``'major.minor'``.""" + + sys_platform: str + """ + This string contains a platform identifier that can be used to append + platform-specific components to :py:data:`sys.path`, for instance. + + For Unix systems, except on Linux and AIX, this is the lowercased OS name as + returned by ``uname -s`` with the first part of the version as returned by + ``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, at the time when Python + was built. + """ + + def _normalize_extra_values(results: Any) -> Any: """ Normalize extra values. @@ -67,9 +135,8 @@ def _normalize_extra_values(results: Any) -> Any: def _format_marker( - marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True + marker: list[str] | MarkerAtom | str, first: bool | None = True ) -> str: - assert isinstance(marker, (list, tuple, str)) # Sometimes we have a structure like [[...]] which is a single item list @@ -95,7 +162,7 @@ def _format_marker( return marker -_operators: Dict[str, Operator] = { +_operators: dict[str, Operator] = { "in": lambda lhs, rhs: lhs in rhs, "not in": lambda lhs, rhs: lhs not in rhs, "<": operator.lt, @@ -115,14 +182,14 @@ def _eval_op(lhs: str, op: Op, rhs: str) -> bool: else: return spec.contains(lhs, prereleases=True) - oper: Optional[Operator] = _operators.get(op.serialize()) + oper: Operator | None = _operators.get(op.serialize()) if oper is None: raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") return oper(lhs, rhs) -def _normalize(*values: str, key: str) -> Tuple[str, ...]: +def _normalize(*values: str, key: str) -> tuple[str, ...]: # PEP 685 – Comparison of extra names for optional distribution dependencies # https://peps.python.org/pep-0685/ # > When comparing extra names, tools MUST normalize the names being @@ -134,8 +201,8 @@ def _normalize(*values: str, key: str) -> Tuple[str, ...]: return values -def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool: - groups: List[List[bool]] = [[]] +def _evaluate_markers(markers: MarkerList, environment: dict[str, str]) -> bool: + groups: list[list[bool]] = [[]] for marker in markers: assert isinstance(marker, (list, tuple, str)) @@ -164,7 +231,7 @@ def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool: return any(all(item) for item in groups) -def format_full_version(info: "sys._version_info") -> str: +def format_full_version(info: sys._version_info) -> str: version = "{0.major}.{0.minor}.{0.micro}".format(info) kind = info.releaselevel if kind != "final": @@ -172,7 +239,7 @@ def format_full_version(info: "sys._version_info") -> str: return version -def default_environment() -> Dict[str, str]: +def default_environment() -> Environment: iver = format_full_version(sys.implementation.version) implementation_name = sys.implementation.name return { @@ -231,7 +298,7 @@ def __eq__(self, other: Any) -> bool: return str(self) == str(other) - def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: + def evaluate(self, environment: dict[str, str] | None = None) -> bool: """Evaluate a marker. Return the boolean from evaluating the given marker against the @@ -240,8 +307,14 @@ def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: The environment is determined from the current Python process. """ - current_environment = default_environment() + current_environment = cast("dict[str, str]", default_environment()) current_environment["extra"] = "" + # Work around platform.python_version() returning something that is not PEP 440 + # compliant for non-tagged Python builds. We preserve default_environment()'s + # behavior of returning platform.python_version() verbatim, and leave it to the + # caller to provide a syntactically valid version if they want to override it. + if current_environment["python_full_version"].endswith("+"): + current_environment["python_full_version"] += "local" if environment is not None: current_environment.update(environment) # The API used to allow setting extra to None. We need to handle this diff --git a/src/pdm/backend/_vendor/packaging/metadata.py b/src/pdm/backend/_vendor/packaging/metadata.py index fb27493..eb8dc84 100644 --- a/src/pdm/backend/_vendor/packaging/metadata.py +++ b/src/pdm/backend/_vendor/packaging/metadata.py @@ -1,50 +1,31 @@ +from __future__ import annotations + import email.feedparser import email.header import email.message import email.parser import email.policy -import sys import typing from typing import ( Any, Callable, - Dict, Generic, - List, - Optional, - Tuple, - Type, - Union, + Literal, + TypedDict, cast, ) -from . import requirements, specifiers, utils, version as version_module +from . import requirements, specifiers, utils +from . import version as version_module T = typing.TypeVar("T") -if sys.version_info[:2] >= (3, 8): # pragma: no cover - from typing import Literal, TypedDict -else: # pragma: no cover - if typing.TYPE_CHECKING: - from typing_extensions import Literal, TypedDict - else: - try: - from typing_extensions import Literal, TypedDict - except ImportError: - - class Literal: - def __init_subclass__(*_args, **_kwargs): - pass - - class TypedDict: - def __init_subclass__(*_args, **_kwargs): - pass try: ExceptionGroup except NameError: # pragma: no cover - class ExceptionGroup(Exception): # noqa: N818 + class ExceptionGroup(Exception): """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11. If :external:exc:`ExceptionGroup` is already defined by Python itself, @@ -52,9 +33,9 @@ class ExceptionGroup(Exception): # noqa: N818 """ message: str - exceptions: List[Exception] + exceptions: list[Exception] - def __init__(self, message: str, exceptions: List[Exception]) -> None: + def __init__(self, message: str, exceptions: list[Exception]) -> None: self.message = message self.exceptions = exceptions @@ -100,32 +81,32 @@ class RawMetadata(TypedDict, total=False): metadata_version: str name: str version: str - platforms: List[str] + platforms: list[str] summary: str description: str - keywords: List[str] + keywords: list[str] home_page: str author: str author_email: str license: str # Metadata 1.1 - PEP 314 - supported_platforms: List[str] + supported_platforms: list[str] download_url: str - classifiers: List[str] - requires: List[str] - provides: List[str] - obsoletes: List[str] + classifiers: list[str] + requires: list[str] + provides: list[str] + obsoletes: list[str] # Metadata 1.2 - PEP 345 maintainer: str maintainer_email: str - requires_dist: List[str] - provides_dist: List[str] - obsoletes_dist: List[str] + requires_dist: list[str] + provides_dist: list[str] + obsoletes_dist: list[str] requires_python: str - requires_external: List[str] - project_urls: Dict[str, str] + requires_external: list[str] + project_urls: dict[str, str] # Metadata 2.0 # PEP 426 attempted to completely revamp the metadata format @@ -138,10 +119,10 @@ class RawMetadata(TypedDict, total=False): # Metadata 2.1 - PEP 566 description_content_type: str - provides_extra: List[str] + provides_extra: list[str] # Metadata 2.2 - PEP 643 - dynamic: List[str] + dynamic: list[str] # Metadata 2.3 - PEP 685 # No new fields were added in PEP 685, just some edge case were @@ -185,12 +166,12 @@ class RawMetadata(TypedDict, total=False): } -def _parse_keywords(data: str) -> List[str]: +def _parse_keywords(data: str) -> list[str]: """Split a string of comma-separate keyboards into a list of keywords.""" return [k.strip() for k in data.split(",")] -def _parse_project_urls(data: List[str]) -> Dict[str, str]: +def _parse_project_urls(data: list[str]) -> dict[str, str]: """Parse a list of label/URL string pairings separated by a comma.""" urls = {} for pair in data: @@ -230,7 +211,7 @@ def _parse_project_urls(data: List[str]) -> Dict[str, str]: return urls -def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str: +def _get_payload(msg: email.message.Message, source: bytes | str) -> str: """Get the body of the message.""" # If our source is a str, then our caller has managed encodings for us, # and we don't need to deal with it. @@ -292,7 +273,7 @@ def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str: _RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()} -def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]: +def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]: """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``). This function returns a two-item tuple of dicts. The first dict is of @@ -308,8 +289,8 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st included in this dict. """ - raw: Dict[str, Union[str, List[str], Dict[str, str]]] = {} - unparsed: Dict[str, List[str]] = {} + raw: dict[str, str | list[str] | dict[str, str]] = {} + unparsed: dict[str, list[str]] = {} if isinstance(data, str): parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data) @@ -357,7 +338,7 @@ def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[st # The Header object stores it's data as chunks, and each chunk # can be independently encoded, so we'll need to check each # of them. - chunks: List[Tuple[bytes, Optional[str]]] = [] + chunks: list[tuple[bytes, str | None]] = [] for bin, encoding in email.header.decode_header(h): try: bin.decode("utf8", "strict") @@ -499,11 +480,11 @@ def __init__( ) -> None: self.added = added - def __set_name__(self, _owner: "Metadata", name: str) -> None: + def __set_name__(self, _owner: Metadata, name: str) -> None: self.name = name self.raw_name = _RAW_TO_EMAIL_MAPPING[name] - def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T: + def __get__(self, instance: Metadata, _owner: type[Metadata]) -> T: # With Python 3.8, the caching can be replaced with functools.cached_property(). # No need to check the cache as attribute lookup will resolve into the # instance's __dict__ before __get__ is called. @@ -531,7 +512,7 @@ def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T: return cast(T, value) def _invalid_metadata( - self, msg: str, cause: Optional[Exception] = None + self, msg: str, cause: Exception | None = None ) -> InvalidMetadata: exc = InvalidMetadata( self.raw_name, msg.format_map({"field": repr(self.raw_name)}) @@ -606,7 +587,7 @@ def _process_description_content_type(self, value: str) -> str: ) return value - def _process_dynamic(self, value: List[str]) -> List[str]: + def _process_dynamic(self, value: list[str]) -> list[str]: for dynamic_field in map(str.lower, value): if dynamic_field in {"name", "version", "metadata-version"}: raise self._invalid_metadata( @@ -618,8 +599,8 @@ def _process_dynamic(self, value: List[str]) -> List[str]: def _process_provides_extra( self, - value: List[str], - ) -> List[utils.NormalizedName]: + value: list[str], + ) -> list[utils.NormalizedName]: normalized_names = [] try: for name in value: @@ -641,8 +622,8 @@ def _process_requires_python(self, value: str) -> specifiers.SpecifierSet: def _process_requires_dist( self, - value: List[str], - ) -> List[requirements.Requirement]: + value: list[str], + ) -> list[requirements.Requirement]: reqs = [] try: for req in value: @@ -665,7 +646,7 @@ class Metadata: _raw: RawMetadata @classmethod - def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata": + def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata: """Create an instance from :class:`RawMetadata`. If *validate* is true, all metadata will be validated. All exceptions @@ -675,7 +656,7 @@ def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata": ins._raw = data.copy() # Mutations occur due to caching enriched values. if validate: - exceptions: List[Exception] = [] + exceptions: list[Exception] = [] try: metadata_version = ins.metadata_version metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version) @@ -722,9 +703,7 @@ def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata": return ins @classmethod - def from_email( - cls, data: Union[bytes, str], *, validate: bool = True - ) -> "Metadata": + def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata: """Parse metadata from email headers. If *validate* is true, the metadata will be validated. All exceptions @@ -760,66 +739,66 @@ def from_email( *validate* parameter)""" version: _Validator[version_module.Version] = _Validator() """:external:ref:`core-metadata-version` (required)""" - dynamic: _Validator[Optional[List[str]]] = _Validator( + dynamic: _Validator[list[str] | None] = _Validator( added="2.2", ) """:external:ref:`core-metadata-dynamic` (validated against core metadata field names and lowercased)""" - platforms: _Validator[Optional[List[str]]] = _Validator() + platforms: _Validator[list[str] | None] = _Validator() """:external:ref:`core-metadata-platform`""" - supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1") + supported_platforms: _Validator[list[str] | None] = _Validator(added="1.1") """:external:ref:`core-metadata-supported-platform`""" - summary: _Validator[Optional[str]] = _Validator() + summary: _Validator[str | None] = _Validator() """:external:ref:`core-metadata-summary` (validated to contain no newlines)""" - description: _Validator[Optional[str]] = _Validator() # TODO 2.1: can be in body + description: _Validator[str | None] = _Validator() # TODO 2.1: can be in body """:external:ref:`core-metadata-description`""" - description_content_type: _Validator[Optional[str]] = _Validator(added="2.1") + description_content_type: _Validator[str | None] = _Validator(added="2.1") """:external:ref:`core-metadata-description-content-type` (validated)""" - keywords: _Validator[Optional[List[str]]] = _Validator() + keywords: _Validator[list[str] | None] = _Validator() """:external:ref:`core-metadata-keywords`""" - home_page: _Validator[Optional[str]] = _Validator() + home_page: _Validator[str | None] = _Validator() """:external:ref:`core-metadata-home-page`""" - download_url: _Validator[Optional[str]] = _Validator(added="1.1") + download_url: _Validator[str | None] = _Validator(added="1.1") """:external:ref:`core-metadata-download-url`""" - author: _Validator[Optional[str]] = _Validator() + author: _Validator[str | None] = _Validator() """:external:ref:`core-metadata-author`""" - author_email: _Validator[Optional[str]] = _Validator() + author_email: _Validator[str | None] = _Validator() """:external:ref:`core-metadata-author-email`""" - maintainer: _Validator[Optional[str]] = _Validator(added="1.2") + maintainer: _Validator[str | None] = _Validator(added="1.2") """:external:ref:`core-metadata-maintainer`""" - maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2") + maintainer_email: _Validator[str | None] = _Validator(added="1.2") """:external:ref:`core-metadata-maintainer-email`""" - license: _Validator[Optional[str]] = _Validator() + license: _Validator[str | None] = _Validator() """:external:ref:`core-metadata-license`""" - classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1") + classifiers: _Validator[list[str] | None] = _Validator(added="1.1") """:external:ref:`core-metadata-classifier`""" - requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator( + requires_dist: _Validator[list[requirements.Requirement] | None] = _Validator( added="1.2" ) """:external:ref:`core-metadata-requires-dist`""" - requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator( + requires_python: _Validator[specifiers.SpecifierSet | None] = _Validator( added="1.2" ) """:external:ref:`core-metadata-requires-python`""" # Because `Requires-External` allows for non-PEP 440 version specifiers, we # don't do any processing on the values. - requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2") + requires_external: _Validator[list[str] | None] = _Validator(added="1.2") """:external:ref:`core-metadata-requires-external`""" - project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2") + project_urls: _Validator[dict[str, str] | None] = _Validator(added="1.2") """:external:ref:`core-metadata-project-url`""" # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation # regardless of metadata version. - provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator( + provides_extra: _Validator[list[utils.NormalizedName] | None] = _Validator( added="2.1", ) """:external:ref:`core-metadata-provides-extra`""" - provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2") + provides_dist: _Validator[list[str] | None] = _Validator(added="1.2") """:external:ref:`core-metadata-provides-dist`""" - obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2") + obsoletes_dist: _Validator[list[str] | None] = _Validator(added="1.2") """:external:ref:`core-metadata-obsoletes-dist`""" - requires: _Validator[Optional[List[str]]] = _Validator(added="1.1") + requires: _Validator[list[str] | None] = _Validator(added="1.1") """``Requires`` (deprecated)""" - provides: _Validator[Optional[List[str]]] = _Validator(added="1.1") + provides: _Validator[list[str] | None] = _Validator(added="1.1") """``Provides`` (deprecated)""" - obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1") + obsoletes: _Validator[list[str] | None] = _Validator(added="1.1") """``Obsoletes`` (deprecated)""" diff --git a/src/pdm/backend/_vendor/packaging/requirements.py b/src/pdm/backend/_vendor/packaging/requirements.py index bdc43a7..4e068c9 100644 --- a/src/pdm/backend/_vendor/packaging/requirements.py +++ b/src/pdm/backend/_vendor/packaging/requirements.py @@ -1,8 +1,9 @@ # This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. +from __future__ import annotations -from typing import Any, Iterator, Optional, Set +from typing import Any, Iterator from ._parser import parse_requirement as _parse_requirement from ._tokenizer import ParserSyntaxError @@ -37,10 +38,10 @@ def __init__(self, requirement_string: str) -> None: raise InvalidRequirement(str(e)) from e self.name: str = parsed.name - self.url: Optional[str] = parsed.url or None - self.extras: Set[str] = set(parsed.extras or []) + self.url: str | None = parsed.url or None + self.extras: set[str] = set(parsed.extras or []) self.specifier: SpecifierSet = SpecifierSet(parsed.specifier) - self.marker: Optional[Marker] = None + self.marker: Marker | None = None if parsed.marker is not None: self.marker = Marker.__new__(Marker) self.marker._markers = _normalize_extra_values(parsed.marker) diff --git a/src/pdm/backend/_vendor/packaging/specifiers.py b/src/pdm/backend/_vendor/packaging/specifiers.py index 5640d68..771c258 100644 --- a/src/pdm/backend/_vendor/packaging/specifiers.py +++ b/src/pdm/backend/_vendor/packaging/specifiers.py @@ -8,10 +8,12 @@ from pdm.backend._vendor.packaging.version import Version """ +from __future__ import annotations + import abc import itertools import re -from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union +from typing import Callable, Iterable, Iterator, TypeVar, Union from .utils import canonicalize_version from .version import Version @@ -64,7 +66,7 @@ def __eq__(self, other: object) -> bool: @property @abc.abstractmethod - def prereleases(self) -> Optional[bool]: + def prereleases(self) -> bool | None: """Whether or not pre-releases as a whole are allowed. This can be set to either ``True`` or ``False`` to explicitly enable or disable @@ -79,14 +81,14 @@ def prereleases(self, value: bool) -> None: """ @abc.abstractmethod - def contains(self, item: str, prereleases: Optional[bool] = None) -> bool: + def contains(self, item: str, prereleases: bool | None = None) -> bool: """ Determines if the given item is contained within this specifier. """ @abc.abstractmethod def filter( - self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None ) -> Iterator[UnparsedVersionVar]: """ Takes an iterable of items and filters them so that only items which @@ -217,7 +219,7 @@ class Specifier(BaseSpecifier): "===": "arbitrary", } - def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: + def __init__(self, spec: str = "", prereleases: bool | None = None) -> None: """Initialize a Specifier instance. :param spec: @@ -234,7 +236,7 @@ def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: if not match: raise InvalidSpecifier(f"Invalid specifier: '{spec}'") - self._spec: Tuple[str, str] = ( + self._spec: tuple[str, str] = ( match.group("operator").strip(), match.group("version").strip(), ) @@ -318,7 +320,7 @@ def __str__(self) -> str: return "{}{}".format(*self._spec) @property - def _canonical_spec(self) -> Tuple[str, str]: + def _canonical_spec(self) -> tuple[str, str]: canonical_version = canonicalize_version( self._spec[1], strip_trailing_zero=(self._spec[0] != "~="), @@ -364,7 +366,6 @@ def _get_operator(self, op: str) -> CallableOperator: return operator_callable def _compare_compatible(self, prospective: Version, spec: str) -> bool: - # Compatible releases have an equivalent combination of >= and ==. That # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to # implement this in terms of the other specifiers instead of @@ -385,7 +386,6 @@ def _compare_compatible(self, prospective: Version, spec: str) -> bool: ) def _compare_equal(self, prospective: Version, spec: str) -> bool: - # We need special logic to handle prefix matching if spec.endswith(".*"): # In the case of prefix matching we want to ignore local segment. @@ -429,21 +429,18 @@ def _compare_not_equal(self, prospective: Version, spec: str) -> bool: return not self._compare_equal(prospective, spec) def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool: - # NB: Local version identifiers are NOT permitted in the version # specifier, so local version labels can be universally removed from # the prospective version. return Version(prospective.public) <= Version(spec) def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool: - # NB: Local version identifiers are NOT permitted in the version # specifier, so local version labels can be universally removed from # the prospective version. return Version(prospective.public) >= Version(spec) def _compare_less_than(self, prospective: Version, spec_str: str) -> bool: - # Convert our spec to a Version instance, since we'll want to work with # it as a version. spec = Version(spec_str) @@ -468,7 +465,6 @@ def _compare_less_than(self, prospective: Version, spec_str: str) -> bool: return True def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool: - # Convert our spec to a Version instance, since we'll want to work with # it as a version. spec = Version(spec_str) @@ -501,7 +497,7 @@ def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool: def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: return str(prospective).lower() == str(spec).lower() - def __contains__(self, item: Union[str, Version]) -> bool: + def __contains__(self, item: str | Version) -> bool: """Return whether or not the item is contained in this specifier. :param item: The item to check for. @@ -522,9 +518,7 @@ def __contains__(self, item: Union[str, Version]) -> bool: """ return self.contains(item) - def contains( - self, item: UnparsedVersion, prereleases: Optional[bool] = None - ) -> bool: + def contains(self, item: UnparsedVersion, prereleases: bool | None = None) -> bool: """Return whether or not the item is contained in this specifier. :param item: @@ -569,7 +563,7 @@ def contains( return operator_callable(normalized_item, self.version) def filter( - self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None ) -> Iterator[UnparsedVersionVar]: """Filter items in the given iterable, that match the specifier. @@ -633,7 +627,7 @@ def filter( _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") -def _version_split(version: str) -> List[str]: +def _version_split(version: str) -> list[str]: """Split version into components. The split components are intended for version comparison. The logic does @@ -641,7 +635,7 @@ def _version_split(version: str) -> List[str]: components back with :func:`_version_join` may not produce the original version string. """ - result: List[str] = [] + result: list[str] = [] epoch, _, rest = version.rpartition("!") result.append(epoch or "0") @@ -655,7 +649,7 @@ def _version_split(version: str) -> List[str]: return result -def _version_join(components: List[str]) -> str: +def _version_join(components: list[str]) -> str: """Join split version components into a version string. This function assumes the input came from :func:`_version_split`, where the @@ -672,7 +666,7 @@ def _is_not_suffix(segment: str) -> bool: ) -def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]: +def _pad_version(left: list[str], right: list[str]) -> tuple[list[str], list[str]]: left_split, right_split = [], [] # Get the release segment of our versions @@ -700,9 +694,7 @@ class SpecifierSet(BaseSpecifier): specifiers (``>=3.0,!=3.1``), or no specifier at all. """ - def __init__( - self, specifiers: str = "", prereleases: Optional[bool] = None - ) -> None: + def __init__(self, specifiers: str = "", prereleases: bool | None = None) -> None: """Initialize a SpecifierSet instance. :param specifiers: @@ -730,7 +722,7 @@ def __init__( self._prereleases = prereleases @property - def prereleases(self) -> Optional[bool]: + def prereleases(self) -> bool | None: # If we have been given an explicit prerelease modifier, then we'll # pass that through here. if self._prereleases is not None: @@ -787,7 +779,7 @@ def __str__(self) -> str: def __hash__(self) -> int: return hash(self._specs) - def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet": + def __and__(self, other: SpecifierSet | str) -> SpecifierSet: """Return a SpecifierSet which is a combination of the two sets. :param other: The other object to combine with. @@ -883,8 +875,8 @@ def __contains__(self, item: UnparsedVersion) -> bool: def contains( self, item: UnparsedVersion, - prereleases: Optional[bool] = None, - installed: Optional[bool] = None, + prereleases: bool | None = None, + installed: bool | None = None, ) -> bool: """Return whether or not the item is contained in this SpecifierSet. @@ -938,7 +930,7 @@ def contains( return all(s.contains(item, prereleases=prereleases) for s in self._specs) def filter( - self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None + self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None ) -> Iterator[UnparsedVersionVar]: """Filter items in the given iterable, that match the specifiers in this set. @@ -995,8 +987,8 @@ def filter( # which will filter out any pre-releases, unless there are no final # releases. else: - filtered: List[UnparsedVersionVar] = [] - found_prereleases: List[UnparsedVersionVar] = [] + filtered: list[UnparsedVersionVar] = [] + found_prereleases: list[UnparsedVersionVar] = [] for item in iterable: parsed_version = _coerce_version(item) diff --git a/src/pdm/backend/_vendor/packaging/tags.py b/src/pdm/backend/_vendor/packaging/tags.py index 89f1926..6667d29 100644 --- a/src/pdm/backend/_vendor/packaging/tags.py +++ b/src/pdm/backend/_vendor/packaging/tags.py @@ -2,6 +2,8 @@ # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. +from __future__ import annotations + import logging import platform import re @@ -11,15 +13,10 @@ import sysconfig from importlib.machinery import EXTENSION_SUFFIXES from typing import ( - Dict, - FrozenSet, Iterable, Iterator, - List, - Optional, Sequence, Tuple, - Union, cast, ) @@ -30,7 +27,7 @@ PythonVersion = Sequence[int] MacVersion = Tuple[int, int] -INTERPRETER_SHORT_NAMES: Dict[str, str] = { +INTERPRETER_SHORT_NAMES: dict[str, str] = { "python": "py", # Generic. "cpython": "cp", "pypy": "pp", @@ -96,7 +93,7 @@ def __repr__(self) -> str: return f"<{self} @ {id(self)}>" -def parse_tag(tag: str) -> FrozenSet[Tag]: +def parse_tag(tag: str) -> frozenset[Tag]: """ Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. @@ -112,8 +109,8 @@ def parse_tag(tag: str) -> FrozenSet[Tag]: return frozenset(tags) -def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]: - value: Union[int, str, None] = sysconfig.get_config_var(name) +def _get_config_var(name: str, warn: bool = False) -> int | str | None: + value: int | str | None = sysconfig.get_config_var(name) if value is None and warn: logger.debug( "Config variable '%s' is unset, Python ABI tag may be incorrect", name @@ -125,7 +122,7 @@ def _normalize_string(string: str) -> str: return string.replace(".", "_").replace("-", "_").replace(" ", "_") -def _is_threaded_cpython(abis: List[str]) -> bool: +def _is_threaded_cpython(abis: list[str]) -> bool: """ Determine if the ABI corresponds to a threaded (`--disable-gil`) build. @@ -151,7 +148,7 @@ def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool: return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading -def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]: +def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> list[str]: py_version = tuple(py_version) # To allow for version comparison. abis = [] version = _version_nodot(py_version[:2]) @@ -185,9 +182,9 @@ def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]: def cpython_tags( - python_version: Optional[PythonVersion] = None, - abis: Optional[Iterable[str]] = None, - platforms: Optional[Iterable[str]] = None, + python_version: PythonVersion | None = None, + abis: Iterable[str] | None = None, + platforms: Iterable[str] | None = None, *, warn: bool = False, ) -> Iterator[Tag]: @@ -244,7 +241,7 @@ def cpython_tags( yield Tag(interpreter, "abi3", platform_) -def _generic_abi() -> List[str]: +def _generic_abi() -> list[str]: """ Return the ABI tag based on EXT_SUFFIX. """ @@ -286,9 +283,9 @@ def _generic_abi() -> List[str]: def generic_tags( - interpreter: Optional[str] = None, - abis: Optional[Iterable[str]] = None, - platforms: Optional[Iterable[str]] = None, + interpreter: str | None = None, + abis: Iterable[str] | None = None, + platforms: Iterable[str] | None = None, *, warn: bool = False, ) -> Iterator[Tag]: @@ -332,9 +329,9 @@ def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: def compatible_tags( - python_version: Optional[PythonVersion] = None, - interpreter: Optional[str] = None, - platforms: Optional[Iterable[str]] = None, + python_version: PythonVersion | None = None, + interpreter: str | None = None, + platforms: Iterable[str] | None = None, ) -> Iterator[Tag]: """ Yields the sequence of tags that are compatible with a specific version of Python. @@ -366,7 +363,7 @@ def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: return "i386" -def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]: +def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> list[str]: formats = [cpu_arch] if cpu_arch == "x86_64": if version < (10, 4): @@ -399,7 +396,7 @@ def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]: def mac_platforms( - version: Optional[MacVersion] = None, arch: Optional[str] = None + version: MacVersion | None = None, arch: str | None = None ) -> Iterator[str]: """ Yields the platform tags for a macOS system. diff --git a/src/pdm/backend/_vendor/packaging/utils.py b/src/pdm/backend/_vendor/packaging/utils.py index c2c2f75..d33da5b 100644 --- a/src/pdm/backend/_vendor/packaging/utils.py +++ b/src/pdm/backend/_vendor/packaging/utils.py @@ -2,8 +2,10 @@ # 2.0, and the BSD License. See the LICENSE file in the root of this repository # for complete details. +from __future__ import annotations + import re -from typing import FrozenSet, NewType, Tuple, Union, cast +from typing import NewType, Tuple, Union, cast from .tags import Tag, parse_tag from .version import InvalidVersion, Version @@ -53,7 +55,7 @@ def is_normalized_name(name: str) -> bool: def canonicalize_version( - version: Union[Version, str], *, strip_trailing_zero: bool = True + version: Version | str, *, strip_trailing_zero: bool = True ) -> str: """ This is very similar to Version.__str__, but has one subtle difference @@ -102,7 +104,7 @@ def canonicalize_version( def parse_wheel_filename( filename: str, -) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]: +) -> tuple[NormalizedName, Version, BuildTag, frozenset[Tag]]: if not filename.endswith(".whl"): raise InvalidWheelFilename( f"Invalid wheel filename (extension must be '.whl'): {filename}" @@ -143,7 +145,7 @@ def parse_wheel_filename( return (name, version, build, tags) -def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]: +def parse_sdist_filename(filename: str) -> tuple[NormalizedName, Version]: if filename.endswith(".tar.gz"): file_stem = filename[: -len(".tar.gz")] elif filename.endswith(".zip"): diff --git a/src/pdm/backend/_vendor/packaging/version.py b/src/pdm/backend/_vendor/packaging/version.py index 545e5f6..bdd4565 100644 --- a/src/pdm/backend/_vendor/packaging/version.py +++ b/src/pdm/backend/_vendor/packaging/version.py @@ -7,9 +7,11 @@ from pdm.backend._vendor.packaging.version import parse, Version """ +from __future__ import annotations + import itertools import re -from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union +from typing import Any, Callable, NamedTuple, SupportsInt, Tuple, Union from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType @@ -35,14 +37,14 @@ class _Version(NamedTuple): epoch: int - release: Tuple[int, ...] - dev: Optional[Tuple[str, int]] - pre: Optional[Tuple[str, int]] - post: Optional[Tuple[str, int]] - local: Optional[LocalType] + release: tuple[int, ...] + dev: tuple[str, int] | None + pre: tuple[str, int] | None + post: tuple[str, int] | None + local: LocalType | None -def parse(version: str) -> "Version": +def parse(version: str) -> Version: """Parse the given version string. >>> parse('1.0.dev1') @@ -65,7 +67,7 @@ class InvalidVersion(ValueError): class _BaseVersion: - _key: Tuple[Any, ...] + _key: tuple[Any, ...] def __hash__(self) -> int: return hash(self._key) @@ -73,13 +75,13 @@ def __hash__(self) -> int: # Please keep the duplicated `isinstance` check # in the six comparisons hereunder # unless you find a way to avoid adding overhead function calls. - def __lt__(self, other: "_BaseVersion") -> bool: + def __lt__(self, other: _BaseVersion) -> bool: if not isinstance(other, _BaseVersion): return NotImplemented return self._key < other._key - def __le__(self, other: "_BaseVersion") -> bool: + def __le__(self, other: _BaseVersion) -> bool: if not isinstance(other, _BaseVersion): return NotImplemented @@ -91,13 +93,13 @@ def __eq__(self, other: object) -> bool: return self._key == other._key - def __ge__(self, other: "_BaseVersion") -> bool: + def __ge__(self, other: _BaseVersion) -> bool: if not isinstance(other, _BaseVersion): return NotImplemented return self._key >= other._key - def __gt__(self, other: "_BaseVersion") -> bool: + def __gt__(self, other: _BaseVersion) -> bool: if not isinstance(other, _BaseVersion): return NotImplemented @@ -274,7 +276,7 @@ def epoch(self) -> int: return self._version.epoch @property - def release(self) -> Tuple[int, ...]: + def release(self) -> tuple[int, ...]: """The components of the "release" segment of the version. >>> Version("1.2.3").release @@ -290,7 +292,7 @@ def release(self) -> Tuple[int, ...]: return self._version.release @property - def pre(self) -> Optional[Tuple[str, int]]: + def pre(self) -> tuple[str, int] | None: """The pre-release segment of the version. >>> print(Version("1.2.3").pre) @@ -305,7 +307,7 @@ def pre(self) -> Optional[Tuple[str, int]]: return self._version.pre @property - def post(self) -> Optional[int]: + def post(self) -> int | None: """The post-release number of the version. >>> print(Version("1.2.3").post) @@ -316,7 +318,7 @@ def post(self) -> Optional[int]: return self._version.post[1] if self._version.post else None @property - def dev(self) -> Optional[int]: + def dev(self) -> int | None: """The development number of the version. >>> print(Version("1.2.3").dev) @@ -327,7 +329,7 @@ def dev(self) -> Optional[int]: return self._version.dev[1] if self._version.dev else None @property - def local(self) -> Optional[str]: + def local(self) -> str | None: """The local version segment of the version. >>> print(Version("1.2.3").local) @@ -450,9 +452,8 @@ def micro(self) -> int: def _parse_letter_version( - letter: Optional[str], number: Union[str, bytes, SupportsInt, None] -) -> Optional[Tuple[str, int]]: - + letter: str | None, number: str | bytes | SupportsInt | None +) -> tuple[str, int] | None: if letter: # We consider there to be an implicit 0 in a pre-release if there is # not a numeral associated with it. @@ -488,7 +489,7 @@ def _parse_letter_version( _local_version_separators = re.compile(r"[\._-]") -def _parse_local_version(local: Optional[str]) -> Optional[LocalType]: +def _parse_local_version(local: str | None) -> LocalType | None: """ Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). """ @@ -502,13 +503,12 @@ def _parse_local_version(local: Optional[str]) -> Optional[LocalType]: def _cmpkey( epoch: int, - release: Tuple[int, ...], - pre: Optional[Tuple[str, int]], - post: Optional[Tuple[str, int]], - dev: Optional[Tuple[str, int]], - local: Optional[LocalType], + release: tuple[int, ...], + pre: tuple[str, int] | None, + post: tuple[str, int] | None, + dev: tuple[str, int] | None, + local: LocalType | None, ) -> CmpKey: - # When we compare a release version, we want to compare it with all of the # trailing zeros removed. So we'll use a reverse the list, drop all the now # leading zeros until we come to something non zero, then take the rest diff --git a/src/pdm/backend/_vendor/pyproject_metadata/__init__.py b/src/pdm/backend/_vendor/pyproject_metadata/__init__.py index 4f831e7..7ef9fc9 100644 --- a/src/pdm/backend/_vendor/pyproject_metadata/__init__.py +++ b/src/pdm/backend/_vendor/pyproject_metadata/__init__.py @@ -2,19 +2,22 @@ from __future__ import annotations -import collections import copy import dataclasses +import email.message +import email.policy import email.utils import os import os.path import pathlib +import re import sys import typing +import warnings if typing.TYPE_CHECKING: - from collections.abc import Mapping + from collections.abc import Generator, Iterable, Mapping from typing import Any from pdm.backend._vendor.packaging.requirements import Requirement @@ -31,13 +34,77 @@ import pdm.backend._vendor.packaging.version as pkg_version -__version__ = '0.8.0' - -KNOWN_METADATA_VERSIONS = {'2.1', '2.2', '2.3'} +__version__ = '0.9.0b3' + +KNOWN_METADATA_VERSIONS = {'2.1', '2.2', '2.3', '2.4'} +PRE_SPDX_METADATA_VERSIONS = {'2.1', '2.2', '2.3'} + +KNOWN_TOPLEVEL_FIELDS = {'build-system', 'project', 'tool'} +KNOWN_BUILD_SYSTEM_FIELDS = {'backend-path', 'build-backend', 'requires'} +KNOWN_PROJECT_FIELDS = { + 'authors', + 'classifiers', + 'dependencies', + 'description', + 'dynamic', + 'entry-points', + 'gui-scripts', + 'keywords', + 'license', + 'license-files', + 'maintainers', + 'name', + 'optional-dependencies', + 'readme', + 'requires-python', + 'scripts', + 'urls', + 'version', +} + + +__all__ = [ + 'ConfigurationError', + 'ConfigurationWarning', + 'License', + 'RFC822Message', + 'RFC822Policy', + 'Readme', + 'StandardMetadata', + 'validate_build_system', + 'validate_project', + 'validate_top_level', +] + + +def __dir__() -> list[str]: + return __all__ + + +def validate_top_level(pyproject: Mapping[str, Any]) -> None: + extra_keys = set(pyproject) - KNOWN_TOPLEVEL_FIELDS + if extra_keys: + msg = f'Extra keys present in pyproject.toml: {extra_keys}' + raise ConfigurationError(msg) + + +def validate_build_system(pyproject: Mapping[str, Any]) -> None: + extra_keys = set(pyproject.get('build-system', [])) - KNOWN_BUILD_SYSTEM_FIELDS + if extra_keys: + msg = f'Extra keys present in "build-system": {extra_keys}' + raise ConfigurationError(msg) + + +def validate_project(pyproject: Mapping[str, Any]) -> None: + extra_keys = set(pyproject.get('project', [])) - KNOWN_PROJECT_FIELDS + if extra_keys: + msg = f'Extra keys present in "project": {extra_keys}' + raise ConfigurationError(msg) class ConfigurationError(Exception): - '''Error in the backend metadata.''' + """Error in the backend metadata.""" + def __init__(self, msg: str, *, key: str | None = None): super().__init__(msg) self._key = key @@ -47,43 +114,65 @@ def key(self) -> str | None: # pragma: no cover return self._key -class RFC822Message: - '''Python-flavored RFC 822 message implementation.''' +class ConfigurationWarning(UserWarning): + """Warnings about backend metadata.""" - def __init__(self) -> None: - self.headers: collections.OrderedDict[str, list[str]] = collections.OrderedDict() - self.body: str | None = None + +@dataclasses.dataclass +class _SmartMessageSetter: + """ + This provides a nice internal API for setting values in an Message to + reduce boilerplate. + + If a value is None, do nothing. + If a value contains a newline, indent it (may produce a warning in the future). + """ + + message: email.message.Message def __setitem__(self, name: str, value: str | None) -> None: if not value: return - if name not in self.headers: - self.headers[name] = [] - self.headers[name].append(value) - - def __str__(self) -> str: - text = '' - for name, entries in self.headers.items(): - for entry in entries: - lines = entry.strip('\n').split('\n') - text += f'{name}: {lines[0]}\n' - for line in lines[1:]: - text += ' ' * 8 + line + '\n' - if self.body: - text += '\n' + self.body - return text - - def __bytes__(self) -> bytes: - return str(self).encode() + self.message[name] = value + + +class RFC822Policy(email.policy.EmailPolicy): + """ + This is `email.policy.EmailPolicy`, but with a simple ``header_store_parse`` + implementation that handles multiline values, and some nice defaults. + """ + + utf8 = True + mangle_from_ = False + max_line_length = 0 + + def header_store_parse(self, name: str, value: str) -> tuple[str, str]: + size = len(name) + 2 + value = value.replace('\n', '\n' + ' ' * size) + return (name, value) + + +class RFC822Message(email.message.EmailMessage): + """ + This is `email.message.EmailMessage` with two small changes: it defaults to + our `RFC822Policy`, and it correctly writes unicode when being called + with `bytes()`. + """ + + def __init__(self) -> None: + super().__init__(policy=RFC822Policy()) + + def as_bytes( + self, unixfrom: bool = False, policy: email.policy.Policy | None = None + ) -> bytes: + return self.as_string(unixfrom, policy=policy).encode('utf-8') class DataFetcher: def __init__(self, data: Mapping[str, Any]) -> None: self._data = data - def __contains__(self, key: Any) -> bool: - if not isinstance(key, str): - return False + def __contains__(self, key: str) -> bool: val = self._data try: for part in key.split('.'): @@ -108,7 +197,7 @@ def get_str(self, key: str) -> str | None: except KeyError: return None - def get_list(self, key: str) -> list[str]: + def get_list(self, key: str) -> list[str] | None: try: val = self.get(key) if not isinstance(val, list): @@ -120,7 +209,7 @@ def get_list(self, key: str) -> list[str]: raise ConfigurationError(msg, key=key) return val except KeyError: - return [] + return None def get_dict(self, key: str) -> dict[str, str]: try: @@ -136,7 +225,7 @@ def get_dict(self, key: str) -> dict[str, str]: except KeyError: return {} - def get_people(self, key: str) -> list[tuple[str, str]]: + def get_people(self, key: str) -> list[tuple[str, str | None]]: try: val = self.get(key) if not ( @@ -153,14 +242,203 @@ def get_people(self, key: str) -> list[tuple[str, str]]: f'dictionaries containing the "name" and/or "email" keys (got "{val}")' ) raise ConfigurationError(msg, key=key) - return [ - (entry.get('name', 'Unknown'), entry.get('email')) - for entry in val - ] + return [(entry.get('name', 'Unknown'), entry.get('email')) for entry in val] except KeyError: return [] +class ProjectFetcher(DataFetcher): + def get_license(self, project_dir: pathlib.Path) -> License | str | None: + if 'project.license' not in self: + return None + + val = self.get('project.license') + if isinstance(val, str): + return self.get_str('project.license') + + if isinstance(val, dict): + _license = self.get_dict('project.license') + else: + msg = f'Field "project.license" has an invalid type, expecting a string or dictionary of strings (got "{val}")' + raise ConfigurationError(msg) + + for field in _license: + if field not in ('file', 'text'): + msg = f'Unexpected field "project.license.{field}"' + raise ConfigurationError(msg, key=f'project.license.{field}') + + file: pathlib.Path | None = None + filename = self.get_str('project.license.file') + text = self.get_str('project.license.text') + + if (filename and text) or (not filename and not text): + msg = f'Invalid "project.license" value, expecting either "file" or "text" (got "{_license}")' + raise ConfigurationError(msg, key='project.license') + + if filename: + file = project_dir.joinpath(filename) + if not file.is_file(): + msg = f'License file not found ("{filename}")' + raise ConfigurationError(msg, key='project.license.file') + text = file.read_text(encoding='utf-8') + + assert text is not None + return License(text, file) + + def get_license_files(self, project_dir: pathlib.Path) -> list[pathlib.Path] | None: + license_files = self.get_list('project.license-files') + if license_files is None: + return None + + return list(_get_files_from_globs(project_dir, license_files)) + + def get_readme(self, project_dir: pathlib.Path) -> Readme | None: # noqa: C901 + if 'project.readme' not in self: + return None + + filename: str | None + file: pathlib.Path | None = None + text: str | None + content_type: str | None + + readme = self.get('project.readme') + if isinstance(readme, str): + # readme is a file + text = None + filename = readme + if filename.endswith('.md'): + content_type = 'text/markdown' + elif filename.endswith('.rst'): + content_type = 'text/x-rst' + else: + msg = f'Could not infer content type for readme file "{filename}"' + raise ConfigurationError(msg, key='project.readme') + elif isinstance(readme, dict): + # readme is a dict containing either 'file' or 'text', and content-type + for field in readme: + if field not in ('content-type', 'file', 'text'): + msg = f'Unexpected field "project.readme.{field}"' + raise ConfigurationError(msg, key=f'project.readme.{field}') + content_type = self.get_str('project.readme.content-type') + filename = self.get_str('project.readme.file') + text = self.get_str('project.readme.text') + if (filename and text) or (not filename and not text): + msg = f'Invalid "project.readme" value, expecting either "file" or "text" (got "{readme}")' + raise ConfigurationError(msg, key='project.readme') + if not content_type: + msg = 'Field "project.readme.content-type" missing' + raise ConfigurationError(msg, key='project.readme.content-type') + else: + msg = ( + f'Field "project.readme" has an invalid type, expecting either, ' + f'a string or dictionary of strings (got "{readme}")' + ) + raise ConfigurationError(msg, key='project.readme') + + if filename: + file = project_dir.joinpath(filename) + if not file.is_file(): + msg = f'Readme file not found ("{filename}")' + raise ConfigurationError(msg, key='project.readme.file') + text = file.read_text(encoding='utf-8') + + assert text is not None + return Readme(text, file, content_type) + + def get_dependencies(self) -> list[Requirement]: + requirement_strings = self.get_list('project.dependencies') or [] + + requirements: list[Requirement] = [] + for req in requirement_strings: + try: + requirements.append(pkg_requirements.Requirement(req)) + except pkg_requirements.InvalidRequirement as e: + msg = ( + 'Field "project.dependencies" contains an invalid PEP 508 ' + f'requirement string "{req}" ("{e}")' + ) + raise ConfigurationError(msg) from None + return requirements + + def get_optional_dependencies( + self, + ) -> dict[str, list[Requirement]]: + try: + val = self.get('project.optional-dependencies') + except KeyError: + return {} + + requirements_dict: dict[str, list[Requirement]] = {} + if not isinstance(val, dict): + msg = ( + 'Field "project.optional-dependencies" has an invalid type, expecting a ' + f'dictionary of PEP 508 requirement strings (got "{val}")' + ) + raise ConfigurationError(msg) + for extra, requirements in val.copy().items(): + assert isinstance(extra, str) + if not isinstance(requirements, list): + msg = ( + f'Field "project.optional-dependencies.{extra}" has an invalid type, expecting a ' + f'dictionary PEP 508 requirement strings (got "{requirements}")' + ) + raise ConfigurationError(msg) + requirements_dict[extra] = [] + for req in requirements: + if not isinstance(req, str): + msg = ( + f'Field "project.optional-dependencies.{extra}" has an invalid type, ' + f'expecting a PEP 508 requirement string (got "{req}")' + ) + raise ConfigurationError(msg) + try: + requirements_dict[extra].append( + pkg_requirements.Requirement(req) + ) + except pkg_requirements.InvalidRequirement as e: + msg = ( + f'Field "project.optional-dependencies.{extra}" contains ' + f'an invalid PEP 508 requirement string "{req}" ("{e}")' + ) + raise ConfigurationError(msg) from None + return dict(requirements_dict) + + def get_entrypoints(self) -> dict[str, dict[str, str]]: + try: + val = self.get('project.entry-points') + except KeyError: + return {} + if not isinstance(val, dict): + msg = ( + 'Field "project.entry-points" has an invalid type, expecting a ' + f'dictionary of entrypoint sections (got "{val}")' + ) + raise ConfigurationError(msg) + for section, entrypoints in val.items(): + assert isinstance(section, str) + if not re.match(r'^\w+(\.\w+)*$', section): + msg = ( + 'Field "project.entry-points" has an invalid value, expecting a name ' + f'containing only alphanumeric, underscore, or dot characters (got "{section}")' + ) + raise ConfigurationError(msg) + if not isinstance(entrypoints, dict): + msg = ( + f'Field "project.entry-points.{section}" has an invalid type, expecting a ' + f'dictionary of entrypoints (got "{entrypoints}")' + ) + raise ConfigurationError(msg) + for name, entrypoint in entrypoints.items(): + assert isinstance(name, str) + if not isinstance(entrypoint, str): + msg = ( + f'Field "project.entry-points.{section}.{name}" has an invalid type, ' + f'expecting a string (got "{entrypoint}")' + ) + raise ConfigurationError(msg) + return val + + class License(typing.NamedTuple): text: str file: pathlib.Path | None @@ -177,14 +455,17 @@ class StandardMetadata: name: str version: pkg_version.Version | None = None description: str | None = None - license: License | None = None + license: License | str | None = None + license_files: list[pathlib.Path] | None = None readme: Readme | None = None requires_python: pkg_specifiers.SpecifierSet | None = None dependencies: list[Requirement] = dataclasses.field(default_factory=list) - optional_dependencies: dict[str, list[Requirement]] = dataclasses.field(default_factory=dict) + optional_dependencies: dict[str, list[Requirement]] = dataclasses.field( + default_factory=dict + ) entrypoints: dict[str, dict[str, str]] = dataclasses.field(default_factory=dict) - authors: list[tuple[str, str]] = dataclasses.field(default_factory=list) - maintainers: list[tuple[str, str]] = dataclasses.field(default_factory=list) + authors: list[tuple[str, str | None]] = dataclasses.field(default_factory=list) + maintainers: list[tuple[str, str | None]] = dataclasses.field(default_factory=list) urls: dict[str, str] = dataclasses.field(default_factory=dict) classifiers: list[str] = dataclasses.field(default_factory=list) keywords: list[str] = dataclasses.field(default_factory=list) @@ -194,10 +475,74 @@ class StandardMetadata: _metadata_version: str | None = None + def __post_init__(self) -> None: + self.validate() + + def validate(self, *, warn: bool = True) -> None: + if ( + self._metadata_version + and self._metadata_version not in KNOWN_METADATA_VERSIONS + ): + msg = f'The metadata_version must be one of {KNOWN_METADATA_VERSIONS} or None (default)' + raise ConfigurationError(msg) + + # See https://packaging.python.org/en/latest/specifications/core-metadata/#name and + # https://packaging.python.org/en/latest/specifications/name-normalization/#name-format + if not re.match( + r'^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$', self.name, re.IGNORECASE + ): + msg = ( + f'Invalid project name "{self.name}". A valid name consists only of ASCII letters and ' + 'numbers, period, underscore and hyphen. It must start and end with a letter or number' + ) + raise ConfigurationError(msg) + + if self.license_files is not None and isinstance(self.license, License): + msg = '"project.license-files" must not be used when "project.license" is not a SPDX license expression' + raise ConfigurationError(msg) + + if isinstance(self.license, str) and any( + c.startswith('License ::') for c in self.classifiers + ): + msg = 'Setting "project.license" to an SPDX license expression is not compatible with "License ::" classifiers' + raise ConfigurationError(msg) + + if warn and self.metadata_version not in PRE_SPDX_METADATA_VERSIONS: + if isinstance(self.license, License): + warnings.warn( + 'Set "project.license" to an SPDX license expression for metadata >= 2.4', + ConfigurationWarning, + stacklevel=2, + ) + elif any(c.startswith('License ::') for c in self.classifiers): + warnings.warn( + '"License ::" classifiers are deprecated for metadata >= 2.4, use a SPDX license expression for "project.license" instead', + ConfigurationWarning, + stacklevel=2, + ) + + if ( + isinstance(self.license, str) + and self._metadata_version in PRE_SPDX_METADATA_VERSIONS + ): + msg = 'Setting "project.license" to an SPDX license expression is supported only when emitting metadata version >= 2.4' + raise ConfigurationError(msg) + + if ( + self.license_files is not None + and self._metadata_version in PRE_SPDX_METADATA_VERSIONS + ): + msg = '"project.license-files" is supported only when emitting metadata version >= 2.4' + raise ConfigurationError(msg) + @property def metadata_version(self) -> str: if self._metadata_version is None: - return '2.2' if self.dynamic else '2.1' + if isinstance(self.license, str) or self.license_files is not None: + return '2.4' + if self.dynamic: + return '2.2' + return '2.1' return self._metadata_version @property @@ -210,15 +555,25 @@ def from_pyproject( data: Mapping[str, Any], project_dir: str | os.PathLike[str] = os.path.curdir, metadata_version: str | None = None, + *, + allow_extra_keys: bool | None = None, ) -> Self: - fetcher = DataFetcher(data) + fetcher = ProjectFetcher(data) project_dir = pathlib.Path(project_dir) if 'project' not in fetcher: msg = 'Section "project" missing in pyproject.toml' raise ConfigurationError(msg) - dynamic = fetcher.get_list('project.dynamic') + if allow_extra_keys is None: + try: + validate_project(data) + except ConfigurationError as err: + warnings.warn(str(err), ConfigurationWarning, stacklevel=2) + elif not allow_extra_keys: + validate_project(data) + + dynamic = fetcher.get_list('project.dynamic') or [] if 'name' in dynamic: msg = 'Unsupported field "name" in "project.dynamic"' raise ConfigurationError(msg) @@ -246,25 +601,24 @@ def from_pyproject( # so leave it up to the users for now. description = fetcher.get_str('project.description') - if metadata_version and metadata_version not in KNOWN_METADATA_VERSIONS: - msg = f'The metadata_version must be one of {KNOWN_METADATA_VERSIONS} or None (default)' - raise ConfigurationError(msg) - return cls( name, version, description, - cls._get_license(fetcher, project_dir), - cls._get_readme(fetcher, project_dir), - pkg_specifiers.SpecifierSet(requires_python_string) if requires_python_string else None, - cls._get_dependencies(fetcher), - cls._get_optional_dependencies(fetcher), - cls._get_entrypoints(fetcher), + fetcher.get_license(project_dir), + fetcher.get_license_files(project_dir), + fetcher.get_readme(project_dir), + pkg_specifiers.SpecifierSet(requires_python_string) + if requires_python_string + else None, + fetcher.get_dependencies(), + fetcher.get_optional_dependencies(), + fetcher.get_entrypoints(), fetcher.get_people('project.authors'), fetcher.get_people('project.maintainers'), fetcher.get_dict('project.urls'), - fetcher.get_list('project.classifiers'), - fetcher.get_list('project.keywords'), + fetcher.get_list('project.classifiers') or [], + fetcher.get_list('project.keywords') or [], fetcher.get_dict('project.scripts'), fetcher.get_dict('project.gui-scripts'), dynamic, @@ -286,67 +640,75 @@ def as_rfc822(self) -> RFC822Message: self.write_to_rfc822(message) return message - def write_to_rfc822(self, message: RFC822Message) -> None: # noqa: C901 - message['Metadata-Version'] = self.metadata_version - message['Name'] = self.name + def write_to_rfc822(self, message: email.message.Message) -> None: # noqa: C901 + self.validate(warn=False) + + smart_message = _SmartMessageSetter(message) + + smart_message['Metadata-Version'] = self.metadata_version + smart_message['Name'] = self.name if not self.version: msg = 'Missing version field' raise ConfigurationError(msg) - message['Version'] = str(self.version) + smart_message['Version'] = str(self.version) # skip 'Platform' # skip 'Supported-Platform' if self.description: - message['Summary'] = self.description - message['Keywords'] = ','.join(self.keywords) + smart_message['Summary'] = self.description + smart_message['Keywords'] = ','.join(self.keywords) if 'homepage' in self.urls: - message['Home-page'] = self.urls['homepage'] + smart_message['Home-page'] = self.urls['homepage'] # skip 'Download-URL' - message['Author'] = self._name_list(self.authors) - message['Author-Email'] = self._email_list(self.authors) - message['Maintainer'] = self._name_list(self.maintainers) - message['Maintainer-Email'] = self._email_list(self.maintainers) - if self.license: - message['License'] = self.license.text + smart_message['Author'] = self._name_list(self.authors) + smart_message['Author-Email'] = self._email_list(self.authors) + smart_message['Maintainer'] = self._name_list(self.maintainers) + smart_message['Maintainer-Email'] = self._email_list(self.maintainers) + + if isinstance(self.license, License): + smart_message['License'] = self.license.text + elif isinstance(self.license, str): + smart_message['License-Expression'] = self.license + + if self.license_files is not None: + for license_file in sorted(set(self.license_files)): + smart_message['License-File'] = os.fspath(license_file.as_posix()) + for classifier in self.classifiers: - message['Classifier'] = classifier + smart_message['Classifier'] = classifier # skip 'Provides-Dist' # skip 'Obsoletes-Dist' # skip 'Requires-External' for name, url in self.urls.items(): - message['Project-URL'] = f'{name.capitalize()}, {url}' + smart_message['Project-URL'] = f'{name.capitalize()}, {url}' if self.requires_python: - message['Requires-Python'] = str(self.requires_python) + smart_message['Requires-Python'] = str(self.requires_python) for dep in self.dependencies: - message['Requires-Dist'] = str(dep) + smart_message['Requires-Dist'] = str(dep) for extra, requirements in self.optional_dependencies.items(): norm_extra = extra.replace('.', '-').replace('_', '-').lower() - message['Provides-Extra'] = norm_extra + smart_message['Provides-Extra'] = norm_extra for requirement in requirements: - message['Requires-Dist'] = str(self._build_extra_req(norm_extra, requirement)) + smart_message['Requires-Dist'] = str( + self._build_extra_req(norm_extra, requirement) + ) if self.readme: if self.readme.content_type: - message['Description-Content-Type'] = self.readme.content_type - message.body = self.readme.text + smart_message['Description-Content-Type'] = self.readme.content_type + message.set_payload(self.readme.text) # Core Metadata 2.2 if self.metadata_version != '2.1': for field in self.dynamic: if field in ('name', 'version'): msg = f'Field cannot be dynamic: {field}' raise ConfigurationError(msg) - message['Dynamic'] = field + smart_message['Dynamic'] = field - def _name_list(self, people: list[tuple[str, str]]) -> str: - return ', '.join( - name - for name, email_ in people - if not email_ - ) + def _name_list(self, people: list[tuple[str, str | None]]) -> str: + return ', '.join(name for name, email_ in people if not email_) - def _email_list(self, people: list[tuple[str, str]]) -> str: + def _email_list(self, people: list[tuple[str, str | None]]) -> str: return ', '.join( - email.utils.formataddr((name, _email)) - for name, _email in people - if _email + email.utils.formataddr((name, _email)) for name, _email in people if _email ) def _build_extra_req( @@ -369,174 +731,17 @@ def _build_extra_req( requirement.marker = pkg_markers.Marker(f'extra == "{extra}"') return requirement - @staticmethod - def _get_license(fetcher: DataFetcher, project_dir: pathlib.Path) -> License | None: - if 'project.license' not in fetcher: - return None - - _license = fetcher.get_dict('project.license') - for field in _license: - if field not in ('file', 'text'): - msg = f'Unexpected field "project.license.{field}"' - raise ConfigurationError(msg, key=f'project.license.{field}') - - file: pathlib.Path | None = None - filename = fetcher.get_str('project.license.file') - text = fetcher.get_str('project.license.text') - if (filename and text) or (not filename and not text): - msg = f'Invalid "project.license" value, expecting either "file" or "text" (got "{_license}")' - raise ConfigurationError(msg, key='project.license') - - if filename: - file = project_dir.joinpath(filename) - if not file.is_file(): - msg = f'License file not found ("{filename}")' - raise ConfigurationError(msg, key='project.license.file') - text = file.read_text(encoding='utf-8') - - assert text is not None - return License(text, file) - - @staticmethod - def _get_readme(fetcher: DataFetcher, project_dir: pathlib.Path) -> Readme | None: # noqa: C901 - if 'project.readme' not in fetcher: - return None - - filename: str | None - file: pathlib.Path | None = None - text: str | None - content_type: str | None - - readme = fetcher.get('project.readme') - if isinstance(readme, str): - # readme is a file - text = None - filename = readme - if filename.endswith('.md'): - content_type = 'text/markdown' - elif filename.endswith('.rst'): - content_type = 'text/x-rst' - else: - msg = f'Could not infer content type for readme file "{filename}"' - raise ConfigurationError(msg, key='project.readme') - elif isinstance(readme, dict): - # readme is a dict containing either 'file' or 'text', and content-type - for field in readme: - if field not in ('content-type', 'file', 'text'): - msg = f'Unexpected field "project.readme.{field}"' - raise ConfigurationError(msg, key=f'project.readme.{field}') - content_type = fetcher.get_str('project.readme.content-type') - filename = fetcher.get_str('project.readme.file') - text = fetcher.get_str('project.readme.text') - if (filename and text) or (not filename and not text): - msg = f'Invalid "project.readme" value, expecting either "file" or "text" (got "{readme}")' - raise ConfigurationError(msg, key='project.readme') - if not content_type: - msg = 'Field "project.readme.content-type" missing' - raise ConfigurationError(msg, key='project.readme.content-type') - else: - msg = ( - f'Field "project.readme" has an invalid type, expecting either, ' - f'a string or dictionary of strings (got "{readme}")' - ) - raise ConfigurationError(msg, key='project.readme') - - if filename: - file = project_dir.joinpath(filename) - if not file.is_file(): - msg = f'Readme file not found ("{filename}")' - raise ConfigurationError(msg, key='project.readme.file') - text = file.read_text(encoding='utf-8') - - assert text is not None - return Readme(text, file, content_type) - - @staticmethod - def _get_dependencies(fetcher: DataFetcher) -> list[Requirement]: - try: - requirement_strings = fetcher.get_list('project.dependencies') - except KeyError: - return [] - - requirements: list[Requirement] = [] - for req in requirement_strings: - try: - requirements.append(pkg_requirements.Requirement(req)) - except pkg_requirements.InvalidRequirement as e: - msg = ( - 'Field "project.dependencies" contains an invalid PEP 508 ' - f'requirement string "{req}" ("{e}")' - ) - raise ConfigurationError(msg) from None - return requirements - - @staticmethod - def _get_optional_dependencies(fetcher: DataFetcher) -> dict[str, list[Requirement]]: - try: - val = fetcher.get('project.optional-dependencies') - except KeyError: - return {} - - requirements_dict: dict[str, list[Requirement]] = {} - if not isinstance(val, dict): - msg = ( - 'Field "project.optional-dependencies" has an invalid type, expecting a ' - f'dictionary of PEP 508 requirement strings (got "{val}")' - ) +def _get_files_from_globs( + project_dir: pathlib.Path, globs: Iterable[str] +) -> Generator[pathlib.Path, None, None]: + for glob in globs: + if glob.startswith(('..', '/')): + msg = f'"{glob}" is an invalid "project.license-files" glob: the pattern must match files within the project directory' raise ConfigurationError(msg) - for extra, requirements in val.copy().items(): - assert isinstance(extra, str) - if not isinstance(requirements, list): - msg = ( - f'Field "project.optional-dependencies.{extra}" has an invalid type, expecting a ' - f'dictionary PEP 508 requirement strings (got "{requirements}")' - ) - raise ConfigurationError(msg) - requirements_dict[extra] = [] - for req in requirements: - if not isinstance(req, str): - msg = ( - f'Field "project.optional-dependencies.{extra}" has an invalid type, ' - f'expecting a PEP 508 requirement string (got "{req}")' - ) - raise ConfigurationError(msg) - try: - requirements_dict[extra].append(pkg_requirements.Requirement(req)) - except pkg_requirements.InvalidRequirement as e: - msg = ( - f'Field "project.optional-dependencies.{extra}" contains ' - f'an invalid PEP 508 requirement string "{req}" ("{e}")' - ) - raise ConfigurationError(msg) from None - return dict(requirements_dict) - - @staticmethod - def _get_entrypoints(fetcher: DataFetcher) -> dict[str, dict[str, str]]: - try: - val = fetcher.get('project.entry-points') - except KeyError: - return {} - if not isinstance(val, dict): - msg = ( - 'Field "project.entry-points" has an invalid type, expecting a ' - f'dictionary of entrypoint sections (got "{val}")' - ) + files = [f for f in project_dir.glob(glob) if f.is_file()] + if not files: + msg = f'Every pattern in "project.license-files" must match at least one file: "{glob}" did not match any' raise ConfigurationError(msg) - for section, entrypoints in val.items(): - assert isinstance(section, str) - if not isinstance(entrypoints, dict): - msg = ( - f'Field "project.entry-points.{section}" has an invalid type, expecting a ' - f'dictionary of entrypoints (got "{entrypoints}")' - ) - raise ConfigurationError(msg) - for name, entrypoint in entrypoints.items(): - assert isinstance(name, str) - if not isinstance(entrypoint, str): - msg = ( - f'Field "project.entry-points.{section}.{name}" has an invalid type, ' - f'expecting a string (got "{entrypoint}")' - ) - raise ConfigurationError(msg) - return val + for f in files: + yield f.relative_to(project_dir) diff --git a/src/pdm/backend/_vendor/vendor.txt b/src/pdm/backend/_vendor/vendor.txt index c2749ac..77cae35 100644 --- a/src/pdm/backend/_vendor/vendor.txt +++ b/src/pdm/backend/_vendor/vendor.txt @@ -1,5 +1,5 @@ -packaging==24.0 +packaging==24.1 tomli==2.0.1 tomli_w==1.0.0 -pyproject-metadata==0.8.0 +pyproject-metadata==0.9.0b3 editables==0.5 diff --git a/src/pdm/backend/base.py b/src/pdm/backend/base.py index fa8fa1d..e4a75b8 100644 --- a/src/pdm/backend/base.py +++ b/src/pdm/backend/base.py @@ -4,7 +4,6 @@ import os import shutil import sys -import warnings from fnmatch import fnmatch from pathlib import Path from typing import ( @@ -17,8 +16,8 @@ cast, ) +from pdm.backend._vendor.pyproject_metadata import StandardMetadata from pdm.backend.config import Config -from pdm.backend.exceptions import PDMWarning, ValidationError from pdm.backend.hooks import BuildHookInterface, Context from pdm.backend.hooks.version import DynamicVersionBuildHook from pdm.backend.structures import FileMap @@ -275,34 +274,21 @@ def _collect_build_files(self, context: Context) -> FileMap: files[rel_path] = p return files - def find_license_files(self) -> list[str]: - """Return a list of license files from the PEP 639 metadata.""" - root = self.location - license_files = self.config.metadata.license_files - if "paths" in license_files: - invalid_paths = [ - p for p in license_files["paths"] if not (root / p).is_file() - ] - if invalid_paths: - raise ValidationError( - "license-files", f"License files not found: {invalid_paths}" - ) - return license_files["paths"] - else: - paths = [ - p.relative_to(root).as_posix() - for pattern in license_files["globs"] - for p in root.glob(pattern) - if (root / p).is_file() - ] - if license_files["globs"] and not paths: - warnings.warn( - f"No license files are matched with glob patterns " - f"{license_files['globs']}.", - PDMWarning, - stacklevel=2, - ) - return paths + def find_license_files(self, metadata: StandardMetadata) -> list[str]: + result: list[str] = [] + if file := getattr(metadata.license, "file", None): + result.append(file.relative_to(self.location).as_posix()) + if metadata.license_files: + for file in metadata.license_files: + result.append(file.as_posix()) + if ( + not result and metadata.license_files is None + ): # no license files specified, find from default patterns for backward compatibility + for pattern in ["LICEN[CS]E*", "COPYING*", "NOTICE*"]: + for path in self.location.glob(pattern): + if path.is_file(): + result.append(path.relative_to(self.location).as_posix()) + return result def _get_include_and_exclude(self) -> tuple[set[str], set[str]]: includes = set() diff --git a/src/pdm/backend/config.py b/src/pdm/backend/config.py index 5bc13d4..382e5f4 100644 --- a/src/pdm/backend/config.py +++ b/src/pdm/backend/config.py @@ -3,6 +3,7 @@ import glob import os import sys +from functools import cached_property from pathlib import Path from typing import TYPE_CHECKING, Any, TypeVar @@ -41,29 +42,27 @@ class Config: """ def __init__(self, root: Path, data: dict[str, Any]) -> None: - self.validate(data, root) self.root = root self.data = data - self.metadata = Metadata(data["project"]) - self.build_config = BuildConfig( - root, data.setdefault("tool", {}).get("pdm", {}).get("build", {}) - ) - - def to_coremetadata(self) -> str: - """Return the metadata as a Core Metadata string.""" - metadata = StandardMetadata.from_pyproject(self.data, project_dir=self.root) - # Fix the name field to unnormalized form. - metadata.name = self.metadata["name"] - return str(metadata.as_rfc822()) + self.validate() - @classmethod - def validate(cls, data: dict[str, Any], root: Path) -> None: + def validate(self) -> StandardMetadata: """Validate the pyproject.toml data.""" try: - StandardMetadata.from_pyproject(data, project_dir=root) + return StandardMetadata.from_pyproject(self.data, project_dir=self.root) except ConfigurationError as e: raise ValidationError(e.args[0], e.key) from e + @property + def metadata(self) -> dict[str, Any]: + return self.data["project"] + + @cached_property + def build_config(self) -> BuildConfig: + return BuildConfig( + self.root, self.data.setdefault("tool", {}).get("pdm", {}).get("build", {}) + ) + @classmethod def from_pyproject(cls, root: str | Path) -> Config: """Load the pyproject.toml file from the given project root.""" @@ -154,69 +153,6 @@ def convert_package_paths(self) -> dict[str, list | dict]: } -class Metadata(Table): - """The project metadata table""" - - @property - def readme_file(self) -> str | None: - """The readme file path, if not exists, returns None""" - readme = self.get("readme") - if not readme: - return None - if isinstance(readme, str): - return readme - if isinstance(readme, dict) and "file" in readme: - return readme["file"] - return None - - @property - def license_files(self) -> dict[str, list[str]]: - """The license files configuration""" - subtable_files = None - if ( - "license" in self - and isinstance(self["license"], dict) - and "files" in self["license"] - ): - subtable_files = self["license"]["files"] - if "license-files" not in self: - if subtable_files is not None: - return {"paths": [self["license"]["file"]]} - return { - "globs": [ - "LICENSES/*", - "LICEN[CS]E*", - "COPYING*", - "NOTICE*", - "AUTHORS*", - ] - } - if subtable_files is not None: - raise ValidationError( - "license-files", - "Can't specify both 'license.files' and 'license-files' fields", - ) - rv = self["license-files"] - valid_keys = {"globs", "paths"} & set(rv) - if len(valid_keys) == 2: - raise ValidationError( - "license-files", "Can't specify both 'paths' and 'globs'" - ) - if not valid_keys: - raise ValidationError("license-files", "Must specify 'paths' or 'globs'") - return rv - - @property - def entry_points(self) -> dict[str, dict[str, str]]: - """The entry points mapping""" - entry_points: dict[str, dict[str, str]] = self.get("entry-points", {}) - if "scripts" in self: - entry_points["console_scripts"] = self["scripts"] - if "gui-scripts" in self: - entry_points["gui_scripts"] = self["gui-scripts"] - return entry_points - - class BuildConfig(Table): """The `[tool.pdm.build]` table""" diff --git a/src/pdm/backend/hooks/setuptools.py b/src/pdm/backend/hooks/setuptools.py index a247c78..21c72ff 100644 --- a/src/pdm/backend/hooks/setuptools.py +++ b/src/pdm/backend/hooks/setuptools.py @@ -127,12 +127,12 @@ def cleanup() -> None: def format_setup_py(self, context: Context) -> str: before, extra, after = [], [], [] - meta = context.config.metadata + meta = context.config.validate() kwargs = { - "name": meta["name"], - "version": meta.get("version", "0.0.0"), - "description": meta.get("description", "UNKNOWN"), - "url": (meta.get("project-urls", {})).get("homepage", ""), + "name": meta.name, + "version": str(meta.version or "0.0.0"), + "description": meta.description or "UNKNOWN", + "url": meta.urls.get("homepage", ""), } # Run the pdm_build_update_setup_kwargs hook to update the kwargs @@ -160,19 +160,20 @@ def format_setup_py(self, context: Context) -> str: ) ) - if meta.get("dependencies"): - before.append(f"INSTALL_REQUIRES = {_format_list(meta['dependencies'])}\n") + if meta.dependencies: + before.append(f"INSTALL_REQUIRES = {_format_list(meta.dependencies)}\n") extra.append(" 'install_requires': INSTALL_REQUIRES,\n") - if meta.get("optional-dependencies"): + if meta.optional_dependencies: before.append( - "EXTRAS_REQUIRE = {}\n".format( - _format_dict_list(meta["optional-dependencies"]) - ) + f"EXTRAS_REQUIRE = {_format_dict_list(meta.optional_dependencies)}\n" ) extra.append(" 'extras_require': EXTRAS_REQUIRE,\n") - if meta.get("requires-python"): - extra.append(f" 'python_requires': {meta['requires-python']!r},\n") - entry_points = meta.entry_points + if meta.requires_python is not None: + extra.append(f" 'python_requires': '{meta.requires_python}',\n") + entry_points = meta.entrypoints.copy() + entry_points.update( + {"console_scripts": meta.scripts, "gui_scripts": meta.gui_scripts} + ) if entry_points: entry_points_list = { group: [f"{k} = {v}" for k, v in values.items()] diff --git a/src/pdm/backend/sdist.py b/src/pdm/backend/sdist.py index 0dae4bd..b86be89 100644 --- a/src/pdm/backend/sdist.py +++ b/src/pdm/backend/sdist.py @@ -43,21 +43,22 @@ class SdistBuilder(Builder): def get_files(self, context: Context) -> Iterable[tuple[str, Path]]: collected = dict(super().get_files(context)) - local_hook = self.config.build_config.custom_hook context.ensure_build_dir() context.config.write_to(context.build_dir / "pyproject.toml") collected["pyproject.toml"] = context.build_dir / "pyproject.toml" + metadata = self.config.validate() + + def gen_additional_files() -> Iterable[str]: + if local_hook := self.config.build_config.custom_hook: + yield local_hook + if metadata.readme and metadata.readme.file: + yield metadata.readme.file.relative_to(self.location).as_posix() + yield from self.find_license_files(metadata) - additional_files: Iterable[str] = filter( - lambda f: f is not None and f not in collected, - ( - local_hook, - self.config.metadata.readme_file, - *self.find_license_files(), - ), - ) root = self.location - for file in additional_files: + for file in gen_additional_files(): + if file in collected: + continue if root.joinpath(file).exists(): collected[file] = root / file return collected.items() @@ -82,7 +83,7 @@ def build_artifact( tar.addfile(tar_info) self._show_add_file(relpath, path) - pkg_info = self.config.to_coremetadata().encode("utf-8") + pkg_info = str(self.config.validate().as_rfc822()).encode("utf-8") tar_info = tarfile.TarInfo(pjoin(dist_info, "PKG-INFO")) tar_info.size = len(pkg_info) tar_info = clean_tarinfo(tar_info) diff --git a/src/pdm/backend/wheel.py b/src/pdm/backend/wheel.py index 7fd52fa..6cd743c 100644 --- a/src/pdm/backend/wheel.py +++ b/src/pdm/backend/wheel.py @@ -228,8 +228,11 @@ def _write_dist_info(self, parent: Path) -> Path: """write the dist-info directory and return the path to it""" dist_info = parent / self.dist_info_name dist_info.mkdir(0o700, exist_ok=True) - meta = self.config.metadata - entry_points = meta.entry_points + meta = self.config.validate() + entry_points = meta.entrypoints.copy() + entry_points.update( + {"console_scripts": meta.scripts, "gui_scripts": meta.gui_scripts} + ) if entry_points: with _open_for_write(dist_info / "entry_points.txt") as f: self._write_entry_points(f, entry_points) @@ -238,9 +241,9 @@ def _write_dist_info(self, parent: Path) -> Path: self._write_wheel_file(f, is_purelib=self.config.build_config.is_purelib) with _open_for_write(dist_info / "METADATA") as f: - f.write(self.config.to_coremetadata()) + f.write(str(meta.as_rfc822())) - for file in self.find_license_files(): + for file in self.find_license_files(meta): target = dist_info / "licenses" / file target.parent.mkdir(0o700, parents=True, exist_ok=True) shutil.copy2(self.location / file, target) diff --git a/tests/fixtures/projects/demo-cextension-in-src/pyproject.toml b/tests/fixtures/projects/demo-cextension-in-src/pyproject.toml index 52990e2..013439f 100644 --- a/tests/fixtures/projects/demo-cextension-in-src/pyproject.toml +++ b/tests/fixtures/projects/demo-cextension-in-src/pyproject.toml @@ -8,7 +8,7 @@ authors = [ ] dynamic = ["version"] requires-python = ">=3.5" -license = {text = "MIT"} +license = { text = "MIT" } dependencies = [] description = "" name = "demo-package" diff --git a/tests/fixtures/projects/demo-cextension/pyproject.toml b/tests/fixtures/projects/demo-cextension/pyproject.toml index 16d7aaa..7668ef4 100644 --- a/tests/fixtures/projects/demo-cextension/pyproject.toml +++ b/tests/fixtures/projects/demo-cextension/pyproject.toml @@ -8,7 +8,7 @@ authors = [ ] dynamic = ["version"] requires-python = ">=3.5" -license = {text = "MIT"} +license = { text = "MIT" } dependencies = [] description = "" name = "demo-package" diff --git a/tests/fixtures/projects/demo-combined-extras/pyproject.toml b/tests/fixtures/projects/demo-combined-extras/pyproject.toml index 8a7e40f..4d5b6cd 100644 --- a/tests/fixtures/projects/demo-combined-extras/pyproject.toml +++ b/tests/fixtures/projects/demo-combined-extras/pyproject.toml @@ -5,7 +5,7 @@ build-backend = "pdm.backend" [project] authors = [{name = "frostming", email = "mianghong@gmail.com"}] description = "" -license = {text = "MIT"} +license = "MIT" name = "demo-package-extra" requires-python = ">=3.5" version = "0.1.0" diff --git a/tests/fixtures/projects/demo-explicit-package-dir/pyproject.toml b/tests/fixtures/projects/demo-explicit-package-dir/pyproject.toml index 2dbedd4..5c6a20c 100644 --- a/tests/fixtures/projects/demo-explicit-package-dir/pyproject.toml +++ b/tests/fixtures/projects/demo-explicit-package-dir/pyproject.toml @@ -8,7 +8,7 @@ authors = [ ] dynamic = ["version"] requires-python = ">=3.5" -license = {text = "MIT"} +license = "MIT" dependencies = [] description = "" name = "demo-package" diff --git a/tests/fixtures/projects/demo-licenses/LICENSE b/tests/fixtures/projects/demo-licenses/LICENSE new file mode 100644 index 0000000..e69de29 diff --git a/tests/fixtures/projects/demo-no-license/README.md b/tests/fixtures/projects/demo-licenses/README.md similarity index 100% rename from tests/fixtures/projects/demo-no-license/README.md rename to tests/fixtures/projects/demo-licenses/README.md diff --git a/tests/fixtures/projects/demo-licenses/licenses/LICENSE.APACHE.md b/tests/fixtures/projects/demo-licenses/licenses/LICENSE.APACHE.md new file mode 100644 index 0000000..e69de29 diff --git a/tests/fixtures/projects/demo-licenses/licenses/LICENSE.MIT.md b/tests/fixtures/projects/demo-licenses/licenses/LICENSE.MIT.md new file mode 100644 index 0000000..e69de29 diff --git a/tests/fixtures/projects/demo-no-license/pyproject.toml b/tests/fixtures/projects/demo-licenses/pyproject.toml similarity index 88% rename from tests/fixtures/projects/demo-no-license/pyproject.toml rename to tests/fixtures/projects/demo-licenses/pyproject.toml index bbf94b1..50b0349 100644 --- a/tests/fixtures/projects/demo-no-license/pyproject.toml +++ b/tests/fixtures/projects/demo-licenses/pyproject.toml @@ -8,7 +8,7 @@ authors = [ ] dynamic = ["version"] requires-python = ">=3.5" -license = {text = "MIT"} +license-files = ["LICENSE", "licenses/LICENSE*"] dependencies = [] description = "" name = "demo-module" diff --git a/tests/fixtures/projects/demo-no-license/src/foo_module.py b/tests/fixtures/projects/demo-licenses/src/foo_module.py similarity index 100% rename from tests/fixtures/projects/demo-no-license/src/foo_module.py rename to tests/fixtures/projects/demo-licenses/src/foo_module.py diff --git a/tests/fixtures/projects/demo-metadata-test/pyproject.toml b/tests/fixtures/projects/demo-metadata-test/pyproject.toml index 5e60a9c..a25c3ac 100644 --- a/tests/fixtures/projects/demo-metadata-test/pyproject.toml +++ b/tests/fixtures/projects/demo-metadata-test/pyproject.toml @@ -9,7 +9,7 @@ authors = [ ] name = "demo-metadata-test" requires-python = ">=3.8" -license = {text = "MIT"} +license = "MIT" dependencies = [] description = "" readme = "README.md" diff --git a/tests/fixtures/projects/demo-module/pyproject.toml b/tests/fixtures/projects/demo-module/pyproject.toml index 3698973..75795fe 100644 --- a/tests/fixtures/projects/demo-module/pyproject.toml +++ b/tests/fixtures/projects/demo-module/pyproject.toml @@ -8,7 +8,7 @@ authors = [ ] dynamic = ["version"] requires-python = ">=3.5" -license = {text = "MIT"} +license = "MIT" dependencies = [] description = "" name = "demo-module" diff --git a/tests/fixtures/projects/demo-no-version/pyproject.toml b/tests/fixtures/projects/demo-no-version/pyproject.toml index 8d162c5..de4a2ef 100644 --- a/tests/fixtures/projects/demo-no-version/pyproject.toml +++ b/tests/fixtures/projects/demo-no-version/pyproject.toml @@ -8,7 +8,7 @@ authors = [ ] name = "demo" requires-python = ">=3.5" -license = {text = "MIT"} +license = "MIT" dependencies = [] description = "" readme = "README.md" diff --git a/tests/fixtures/projects/demo-package-include-error/pyproject.toml b/tests/fixtures/projects/demo-package-include-error/pyproject.toml index c9a7dc8..cb51b64 100644 --- a/tests/fixtures/projects/demo-package-include-error/pyproject.toml +++ b/tests/fixtures/projects/demo-package-include-error/pyproject.toml @@ -8,7 +8,7 @@ authors = [ ] dynamic = ["version"] requires-python = ">=3.5" -license = {text = "MIT"} +license = "MIT" dependencies = ["flask"] description = "" name = "demo-package" diff --git a/tests/fixtures/projects/demo-package-include/pyproject.toml b/tests/fixtures/projects/demo-package-include/pyproject.toml index 2a78d38..865a515 100644 --- a/tests/fixtures/projects/demo-package-include/pyproject.toml +++ b/tests/fixtures/projects/demo-package-include/pyproject.toml @@ -8,7 +8,7 @@ authors = [ ] dynamic = ["version"] requires-python = ">=3.5" -license = {text = "MIT"} +license = "MIT" dependencies = ["flask"] description = "" name = "demo-package" diff --git a/tests/fixtures/projects/demo-package-with-deep-path/pyproject.toml b/tests/fixtures/projects/demo-package-with-deep-path/pyproject.toml index 1a5d2eb..2deba3e 100644 --- a/tests/fixtures/projects/demo-package-with-deep-path/pyproject.toml +++ b/tests/fixtures/projects/demo-package-with-deep-path/pyproject.toml @@ -8,7 +8,7 @@ authors = [ ] dynamic = ["version"] requires-python = ">=3.5" -license = {text = "MIT"} +license = "MIT" dependencies = ["flask"] description = "" name = "demo-package" diff --git a/tests/fixtures/projects/demo-package-with-tests/pyproject.toml b/tests/fixtures/projects/demo-package-with-tests/pyproject.toml index f46db9b..2b7686b 100644 --- a/tests/fixtures/projects/demo-package-with-tests/pyproject.toml +++ b/tests/fixtures/projects/demo-package-with-tests/pyproject.toml @@ -8,7 +8,7 @@ authors = [ ] dynamic = ["version"] requires-python = ">=3.5" -license = {text = "MIT"} +license = "MIT" dependencies = ["flask"] description = "" name = "demo-package" diff --git a/tests/fixtures/projects/demo-package/pyproject.toml b/tests/fixtures/projects/demo-package/pyproject.toml index f1fbcd3..48b1417 100644 --- a/tests/fixtures/projects/demo-package/pyproject.toml +++ b/tests/fixtures/projects/demo-package/pyproject.toml @@ -8,7 +8,7 @@ authors = [ ] dynamic = ["version"] requires-python = ">=2.7" -license = {text = "MIT"} +license = "MIT" dependencies = ["flask"] description = "" name = "demo-package" diff --git a/tests/fixtures/projects/demo-pep420-package/pyproject.toml b/tests/fixtures/projects/demo-pep420-package/pyproject.toml index bfec6d4..8897b3c 100644 --- a/tests/fixtures/projects/demo-pep420-package/pyproject.toml +++ b/tests/fixtures/projects/demo-pep420-package/pyproject.toml @@ -8,7 +8,7 @@ authors = [ ] dynamic = ["version"] requires-python = ">=3.5" -license = {text = "MIT"} +license = "MIT" dependencies = [] description = "" name = "demo-package" diff --git a/tests/fixtures/projects/demo-purelib-with-build/pyproject.toml b/tests/fixtures/projects/demo-purelib-with-build/pyproject.toml index 839426a..e0342eb 100644 --- a/tests/fixtures/projects/demo-purelib-with-build/pyproject.toml +++ b/tests/fixtures/projects/demo-purelib-with-build/pyproject.toml @@ -8,7 +8,7 @@ authors = [ ] dynamic = ["version"] requires-python = ">=3.5" -license = {text = "MIT"} +license = "MIT" dependencies = [] description = "" name = "demo-package" diff --git a/tests/fixtures/projects/demo-reuse-spec/pyproject.toml b/tests/fixtures/projects/demo-reuse-spec/pyproject.toml index 0664ed8..52b41e5 100644 --- a/tests/fixtures/projects/demo-reuse-spec/pyproject.toml +++ b/tests/fixtures/projects/demo-reuse-spec/pyproject.toml @@ -8,7 +8,7 @@ authors = [ ] dynamic = ["version"] requires-python = ">=3.5" -license = {text = "MPL-2.0"} +license = "MPL-2.0" dependencies = [] description = "" name = "demo-module" diff --git a/tests/fixtures/projects/demo-src-package-include/pyproject.toml b/tests/fixtures/projects/demo-src-package-include/pyproject.toml index 9891176..a25a099 100644 --- a/tests/fixtures/projects/demo-src-package-include/pyproject.toml +++ b/tests/fixtures/projects/demo-src-package-include/pyproject.toml @@ -8,7 +8,7 @@ authors = [ ] dynamic = ["version"] requires-python = ">=3.5" -license = {text = "MIT"} +license = "MIT" dependencies = [] description = "" name = "demo-package" diff --git a/tests/fixtures/projects/demo-src-package/pyproject.toml b/tests/fixtures/projects/demo-src-package/pyproject.toml index a627934..fa4a41b 100644 --- a/tests/fixtures/projects/demo-src-package/pyproject.toml +++ b/tests/fixtures/projects/demo-src-package/pyproject.toml @@ -8,7 +8,7 @@ authors = [ ] dynamic = ["version"] requires-python = ">=3.5" -license = {text = "MIT"} +license = "MIT" dependencies = [] description = "" name = "demo-package" diff --git a/tests/fixtures/projects/demo-src-pymodule/pyproject.toml b/tests/fixtures/projects/demo-src-pymodule/pyproject.toml index bbf94b1..1b596ed 100644 --- a/tests/fixtures/projects/demo-src-pymodule/pyproject.toml +++ b/tests/fixtures/projects/demo-src-pymodule/pyproject.toml @@ -8,7 +8,7 @@ authors = [ ] dynamic = ["version"] requires-python = ">=3.5" -license = {text = "MIT"} +license = "MIT" dependencies = [] description = "" name = "demo-module" diff --git a/tests/fixtures/projects/demo-using-scm/pyproject.toml b/tests/fixtures/projects/demo-using-scm/pyproject.toml index 8b31300..054e66e 100644 --- a/tests/fixtures/projects/demo-using-scm/pyproject.toml +++ b/tests/fixtures/projects/demo-using-scm/pyproject.toml @@ -8,7 +8,7 @@ authors = [ ] dynamic = ["version"] requires-python = ">=3.5" -license = {text = "MIT"} +license = "MIT" dependencies = [] description = "" name = "foo" diff --git a/tests/test_api.py b/tests/test_api.py index b997cc7..dab1704 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -203,8 +203,8 @@ def test_demo_metadata_test__sdist__pkg_info( "Author-Email": '"Corporation, Inc." , Example ' "", "Description-Content-Type": "text/markdown", - "License": "MIT", - "Metadata-Version": "2.1", + "License-Expression": "MIT", + "Metadata-Version": "2.4", "Name": name, "Requires-Python": ">=3.8", "Version": "3.2.1", @@ -537,3 +537,18 @@ def test_clean_not_called_if_config_settings_exist( assert not os.path.exists(test_file) else: assert os.path.exists(test_file) + + +@pytest.mark.parametrize("name", ["demo-licenses"]) +def test_build_wheel_with_license_file(fixture_project: Path, dist: Path) -> None: + wheel_name = api.build_wheel(dist.as_posix()) + sdist_name = api.build_sdist(dist.as_posix()) + + tar_names = get_tarball_names(dist / sdist_name) + licenses = ["LICENSE", "licenses/LICENSE.MIT.md", "licenses/LICENSE.APACHE.md"] + for file in licenses: + assert f"demo_module-0.1.0/{file}" in tar_names + + zip_names = get_wheel_names(dist / wheel_name) + for file in licenses: + assert f"demo_module-0.1.0.dist-info/licenses/{file}" in zip_names diff --git a/tests/test_file_finder.py b/tests/test_file_finder.py index f0bd701..746ef28 100644 --- a/tests/test_file_finder.py +++ b/tests/test_file_finder.py @@ -5,6 +5,7 @@ import pytest from pdm.backend.base import Builder, is_same_or_descendant_path +from pdm.backend.exceptions import ValidationError from pdm.backend.sdist import SdistBuilder from pdm.backend.wheel import WheelBuilder from tests import FIXTURES @@ -104,42 +105,46 @@ def test_merge_includes_and_excludes( assert (data_b in include_files) == data_b_exist -def test_license_file_globs_no_matching() -> None: - builder = WheelBuilder(FIXTURES / "projects/demo-no-license") +def test_license_file_matching() -> None: + builder = WheelBuilder(FIXTURES / "projects/demo-licenses") + builder.config.metadata["license-files"] = ["LICENSE"] with builder: - with pytest.warns(UserWarning) as warns: - license_files = builder.find_license_files() - - assert not license_files - assert len(warns) == 1 - assert str(warns.pop(UserWarning).message).startswith( - "No license files are matched with glob patterns" - ) + license_files = builder.find_license_files(builder.config.validate()) + assert license_files == ["LICENSE"] -def test_license_file_paths_no_matching() -> None: - builder = WheelBuilder(FIXTURES / "projects/demo-no-license") - builder.config.metadata["license-files"] = {"paths": ["LICENSE"]} +def test_license_file_glob_matching() -> None: + builder = WheelBuilder(FIXTURES / "projects/demo-licenses") with builder: - with pytest.raises(ValueError, match="License files not found"): - builder.find_license_files() + license_files = sorted(builder.find_license_files(builder.config.validate())) + assert license_files == [ + "LICENSE", + "licenses/LICENSE.APACHE.md", + "licenses/LICENSE.MIT.md", + ] -@pytest.mark.parametrize("key", ["paths", "globs"]) -def test_license_file_explicit_empty(recwarn, key) -> None: - builder = WheelBuilder(FIXTURES / "projects/demo-no-license") - builder.config.metadata["license-files"] = {key: []} +def test_default_license_files() -> None: + builder = WheelBuilder(FIXTURES / "projects/demo-licenses") + del builder.config.metadata["license-files"] with builder: - license_files = builder.find_license_files() - assert not license_files - assert len(recwarn) == 0 + license_files = builder.find_license_files(builder.config.validate()) + assert license_files == ["LICENSE"] -def test_reuse_spec_licenses_dir() -> None: - builder = WheelBuilder(FIXTURES / "projects/demo-reuse-spec") +def test_license_file_paths_no_matching() -> None: + builder = WheelBuilder(FIXTURES / "projects/demo-licenses") + builder.config.metadata["license-files"] = ["LICENSE.md"] + with pytest.raises(ValidationError, match=".*must match at least one file"): + builder.config.validate() + + +def test_license_file_explicit_empty() -> None: + builder = WheelBuilder(FIXTURES / "projects/demo-licenses") + builder.config.metadata["license-files"] = [] with builder: - license_files = builder.find_license_files() - assert license_files == ["LICENSES/MPL-2.0.txt"] + license_files = list(builder.find_license_files(builder.config.validate())) + assert not license_files def test_collect_build_files_with_src_layout(tmp_path) -> None: diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 91b2f65..f12da72 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -1,6 +1,6 @@ import pytest -from pdm.backend.config import Config, Metadata +from pdm.backend.config import Config from tests import FIXTURES @@ -73,17 +73,3 @@ def test_src_dir_containing_modules(monkeypatch: pytest.MonkeyPatch) -> None: assert paths["package_dir"] == {"": "src"} assert not paths["packages"] assert paths["py_modules"] == ["foo_module"] - - -def test_default_license_files() -> None: - metadata = Metadata( - { - "description": "test package", - "name": "demo", - "version": "0.1.0", - "license": "MIT", - } - ) - assert metadata.license_files == { - "globs": ["LICENSES/*", "LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*"] - }