diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d3a2bbaeeab3..36f6934c301b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,7 +29,7 @@ jobs: with: fetch-depth: 0 - - uses: tj-actions/changed-files@v44 + - uses: tj-actions/changed-files@v45 id: changed with: files_yaml: | diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c99cd3a2043b..0e879e7e2c18 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,7 +12,7 @@ repos: - id: validate-pyproject - repo: https://github.com/crate-ci/typos - rev: v1.23.6 + rev: v1.24.1 hooks: - id: typos @@ -32,7 +32,7 @@ repos: types_or: [yaml, json5] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.1 + rev: v0.6.2 hooks: - id: ruff-format - id: ruff diff --git a/CHANGELOG.md b/CHANGELOG.md index 954c174db02f..c85021aecca4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,36 @@ # Changelog +## 0.3.4 + +### CLI + +- Show `--editable` on the `uv add` CLI ([#6608](https://github.com/astral-sh/uv/pull/6608)) +- Add `--refresh` to `tool run` warning for `--with` dependencies ([#6609](https://github.com/astral-sh/uv/pull/6609)) + +### Bug fixes + +- Allow per dependency build isolation for `setup.py`-based projects ([#6517](https://github.com/astral-sh/uv/pull/6517)) +- Avoid un-strict syncing by-default for build isolation ([#6606](https://github.com/astral-sh/uv/pull/6606)) +- Respect `--no-build-isolation-package` in `uv sync` ([#6605](https://github.com/astral-sh/uv/pull/6605)) +- Respect extras and markers on virtual dev dependencies ([#6620](https://github.com/astral-sh/uv/pull/6620)) +- Support PEP 723 scripts in GUI files ([#6611](https://github.com/astral-sh/uv/pull/6611)) +- Update lockfile after setting minimum bounds in `uv add` ([#6618](https://github.com/astral-sh/uv/pull/6618)) +- Use relative paths for `--find-links` and local registries ([#6566](https://github.com/astral-sh/uv/pull/6566)) +- Use separate types to represent raw vs. resolver markers ([#6646](https://github.com/astral-sh/uv/pull/6646)) +- Parse wheels `WHEEL` and `METADATA` files as email messages ([#6616](https://github.com/astral-sh/uv/pull/6616)) +- Support unquoted hrefs in `--find-links` and other HTML sources ([#6622](https://github.com/astral-sh/uv/pull/6622)) +- Don't canonicalize paths to user requirements ([#6560](https://github.com/astral-sh/uv/pull/6560)) + +### Documentation + +- Add FastAPI guide to overview ([#6603](https://github.com/astral-sh/uv/pull/6603)) +- Add docs for disabling build isolation with `uv sync` ([#6607](https://github.com/astral-sh/uv/pull/6607)) +- Add example of reading script from stdin using echo ([#6567](https://github.com/astral-sh/uv/pull/6567)) +- Add tip to use intermediate layers in Docker builds ([#6650](https://github.com/astral-sh/uv/pull/6650)) +- Clarify need to include `pyproject.toml` with `--no-install-project` ([#6581](https://github.com/astral-sh/uv/pull/6581)) +- Move `WORKDIR` directive in Docker examples ([#6652](https://github.com/astral-sh/uv/pull/6652)) +- Remove duplicate `WORKDIR` directive in Docker example ([#6651](https://github.com/astral-sh/uv/pull/6651)) + ## 0.3.3 ### Enhancements @@ -2779,3 +2810,4 @@ path ([#1433](https://github.com/astral-sh/uv/pull/1433)) + diff --git a/Cargo.lock b/Cargo.lock index a2f188be3762..d6df45f6f67c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -17,6 +17,12 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +[[package]] +name = "adler2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" + [[package]] name = "ahash" version = "0.7.8" @@ -226,7 +232,7 @@ checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] @@ -357,7 +363,7 @@ dependencies = [ "cc", "cfg-if", "libc", - "miniz_oxide", + "miniz_oxide 0.7.4", "object", "rustc-demangle", ] @@ -750,7 +756,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] @@ -1252,13 +1258,13 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.31" +version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f211bbe8e69bbd0cfdea405084f128ae8b4aaa6b0b522fc8f2b009084797920" +checksum = "324a1be68054ef05ad64b861cc9eaf1d623d2d8cb25b4bf2cb9cdd902b4bf253" dependencies = [ "crc32fast", "libz-ng-sys", - "miniz_oxide", + "miniz_oxide 0.8.0", ] [[package]] @@ -1401,7 +1407,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] @@ -1840,6 +1846,7 @@ dependencies = [ "distribution-filename", "fs-err", "indoc", + "mailparse", "pathdiff", "pep440_rs", "platform-info", @@ -1938,13 +1945,13 @@ checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jiff" -version = "0.1.7" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94fda28c0e0f7e5ac1e40c34a06393ae6ecf4c9ce6b6421a2217a32680bb8038" +checksum = "8ef8bc400f8312944a9f879db116fed372c4f0859af672eba2a80f79c767dd19" dependencies = [ "jiff-tzdb-platform", "serde", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -2049,9 +2056,9 @@ dependencies = [ [[package]] name = "libz-ng-sys" -version = "1.1.15" +version = "1.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6409efc61b12687963e602df8ecf70e8ddacf95bc6576bcf16e3ac6328083c5" +checksum = "4436751a01da56f1277f323c80d584ffad94a3d14aecd959dd0dff75aa73a438" dependencies = [ "cmake", "libc", @@ -2207,7 +2214,7 @@ checksum = "dcf09caffaac8068c346b6df2a7fc27a177fd20b39421a39ce0a211bde679a6c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] @@ -2235,6 +2242,15 @@ dependencies = [ "simd-adler32", ] +[[package]] +name = "miniz_oxide" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" +dependencies = [ + "adler2", +] + [[package]] name = "mio" version = "1.0.1" @@ -2454,24 +2470,6 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" -[[package]] -name = "path-absolutize" -version = "3.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4af381fe79fa195b4909485d99f73a80792331df0625188e707854f0b3383f5" -dependencies = [ - "path-dedot", -] - -[[package]] -name = "path-dedot" -version = "3.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07ba0ad7e047712414213ff67533e6dd477af0a4e1d14fb52343e53d30ea9397" -dependencies = [ - "once_cell", -] - [[package]] name = "path-slash" version = "0.2.1" @@ -2563,7 +2561,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] @@ -2610,7 +2608,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] @@ -2667,7 +2665,7 @@ dependencies = [ "crc32fast", "fdeflate", "flate2", - "miniz_oxide", + "miniz_oxide 0.7.4", ] [[package]] @@ -2796,7 +2794,7 @@ dependencies = [ "indoc", "libc", "memoffset 0.9.1", - "parking_lot 0.12.3", + "parking_lot 0.11.2", "portable-atomic", "pyo3-build-config", "pyo3-ffi", @@ -2844,7 +2842,7 @@ dependencies = [ "proc-macro2", "pyo3-macros-backend", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] @@ -2857,7 +2855,7 @@ dependencies = [ "proc-macro2", "pyo3-build-config", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] @@ -2934,9 +2932,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.36" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" dependencies = [ "proc-macro2", ] @@ -3143,9 +3141,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.5" +version = "0.12.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7d6d2a27d57148378eb5e111173f4276ad26340ecc5c49a4a2152167a2d6a37" +checksum = "f8f4955649ef5c38cc7f9e8aa41761d48fb9677197daea9984dc54f56aad5e63" dependencies = [ "async-compression", "base64 0.22.1", @@ -3185,7 +3183,7 @@ dependencies = [ "wasm-streams", "web-sys", "webpki-roots", - "winreg", + "windows-registry", ] [[package]] @@ -3277,9 +3275,9 @@ dependencies = [ [[package]] name = "rkyv" -version = "0.7.44" +version = "0.7.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cba464629b3394fc4dbc6f940ff8f5b4ff5c7aef40f29166fd4ad12acbc99c0" +checksum = "9008cd6385b9e161d8229e1f6549dd23c3d022f132a2ea37ac3a10ac4935779b" dependencies = [ "bitvec", "bytecheck", @@ -3295,9 +3293,9 @@ dependencies = [ [[package]] name = "rkyv_derive" -version = "0.7.44" +version = "0.7.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7dddfff8de25e6f62b9d64e6e432bf1c6736c57d20323e15ee10435fbda7c65" +checksum = "503d1d27590a2b0a3a4ca4c94755aa2875657196ecbf401a42eff41d7de532c0" dependencies = [ "proc-macro2", "quote", @@ -3504,7 +3502,7 @@ dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] @@ -3550,22 +3548,22 @@ checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" [[package]] name = "serde" -version = "1.0.208" +version = "1.0.209" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cff085d2cb684faa248efb494c39b68e522822ac0de72ccf08109abde717cfb2" +checksum = "99fce0ffe7310761ca6bf9faf5115afbc19688edd00171d81b1bb1b116c63e09" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.208" +version = "1.0.209" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf" +checksum = "a5831b979fd7b5439637af1752d535ff49f4860c0f341d1baeb6faf0f4242170" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] @@ -3576,14 +3574,14 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] name = "serde_json" -version = "1.0.125" +version = "1.0.127" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83c8e735a073ccf5be70aa8066aa984eaf2fa000db6c8d0100ae605b366d31ed" +checksum = "8043c06d9f82bd7271361ed64f415fe5e12a77fdb52e573e7f06a516dea329ad" dependencies = [ "itoa", "memchr", @@ -3808,9 +3806,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.75" +version = "2.0.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6af063034fc1935ede7be0122941bafa9bacb949334d090b77ca98b5817c7d9" +checksum = "578e081a14e0cefc3279b0472138c513f37b41a08d5a3cca9b6e4e8ceb6cd525" dependencies = [ "proc-macro2", "quote", @@ -3822,6 +3820,9 @@ name = "sync_wrapper" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +dependencies = [ + "futures-core", +] [[package]] name = "sys-info" @@ -3907,7 +3908,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] @@ -3918,7 +3919,7 @@ checksum = "5c89e72a01ed4c579669add59014b9a524d609c0c88c6a585ce37485879f6ffb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", "test-case-core", ] @@ -3940,7 +3941,7 @@ checksum = "5999e24eaa32083191ba4e425deb75cdf25efefabe5aaccb7446dd0d4122a3f5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] @@ -3980,7 +3981,7 @@ checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] @@ -4066,8 +4067,7 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tl" version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b130bd8a58c163224b44e217b4239ca7b927d82bf6cc2fea1fc561d15056e3f7" +source = "git+https://github.com/charliermarsh/tl.git?rev=6e25b2ee2513d75385101a8ff9f591ef51f314ec#6e25b2ee2513d75385101a8ff9f591ef51f314ec" [[package]] name = "tokio" @@ -4095,7 +4095,7 @@ checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] @@ -4231,7 +4231,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] @@ -4492,7 +4492,7 @@ checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" [[package]] name = "uv" -version = "0.3.3" +version = "0.3.4" dependencies = [ "anstream", "anyhow", @@ -4892,7 +4892,6 @@ dependencies = [ "fs-err", "fs2", "junction", - "path-absolutize", "path-slash", "serde", "tempfile", @@ -4964,7 +4963,7 @@ version = "0.0.1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", "textwrap", ] @@ -5243,7 +5242,7 @@ dependencies = [ [[package]] name = "uv-version" -version = "0.3.3" +version = "0.3.4" [[package]] name = "uv-virtualenv" @@ -5366,7 +5365,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", "wasm-bindgen-shared", ] @@ -5400,7 +5399,7 @@ checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -5505,7 +5504,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.48.0", ] [[package]] @@ -5566,7 +5565,7 @@ checksum = "12168c33176773b86799be25e2a2ba07c7aab9968b37541f1094dbd7a60c8946" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] @@ -5577,7 +5576,7 @@ checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] @@ -5588,7 +5587,7 @@ checksum = "9d8dc32e0095a7eeccebd0e3f09e9509365ecb3fc6ac4d6f5f14a3f6392942d1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] @@ -5599,7 +5598,18 @@ checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", +] + +[[package]] +name = "windows-registry" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" +dependencies = [ + "windows-result", + "windows-strings", + "windows-targets 0.52.6", ] [[package]] @@ -5897,7 +5907,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.75", + "syn 2.0.76", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 142a15513bec..bf299e6a8e2c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -106,7 +106,6 @@ memchr = { version = "2.7.4" } miette = { version = "7.2.0" } nanoid = { version = "0.4.0" } owo-colors = { version = "4.0.0" } -path-absolutize = { version = "3.1.1" } path-slash = { version = "0.2.1" } pathdiff = { version = "0.2.1" } petgraph = { version = "0.6.4" } @@ -140,7 +139,7 @@ target-lexicon = { version = "0.12.14" } tempfile = { version = "3.9.0" } textwrap = { version = "0.16.1" } thiserror = { version = "1.0.56" } -tl = { version = "0.7.7" } +tl = { git = "https://github.com/charliermarsh/tl.git", rev = "6e25b2ee2513d75385101a8ff9f591ef51f314ec" } tokio = { version = "1.35.1", features = ["fs", "io-util", "macros", "process", "signal", "sync"] } tokio-stream = { version = "0.1.14" } tokio-tar = { version = "0.3.1" } diff --git a/crates/bench/benches/uv.rs b/crates/bench/benches/uv.rs index b7d3accee59a..dae7558cf5b2 100644 --- a/crates/bench/benches/uv.rs +++ b/crates/bench/benches/uv.rs @@ -88,6 +88,7 @@ mod resolver { use pep440_rs::Version; use pep508_rs::{MarkerEnvironment, MarkerEnvironmentBuilder}; use platform_tags::{Arch, Os, Platform, Tags}; + use pypi_types::ResolverMarkerEnvironment; use uv_cache::Cache; use uv_client::RegistryClient; use uv_configuration::{ @@ -192,7 +193,7 @@ mod resolver { let markers = if universal { ResolverMarkers::universal(vec![]) } else { - ResolverMarkers::specific_environment(MARKERS.clone()) + ResolverMarkers::specific_environment(ResolverMarkerEnvironment::from(MARKERS.clone())) }; let resolver = Resolver::new( diff --git a/crates/distribution-types/src/file.rs b/crates/distribution-types/src/file.rs index 427e72dc6083..c68ee1d82716 100644 --- a/crates/distribution-types/src/file.rs +++ b/crates/distribution-types/src/file.rs @@ -143,32 +143,17 @@ impl Display for FileLocation { PartialOrd, Ord, Hash, + Serialize, + Deserialize, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize, )] +#[serde(transparent)] #[archive(check_bytes)] #[archive_attr(derive(Debug))] pub struct UrlString(String); -impl serde::Serialize for UrlString { - fn serialize(&self, serializer: S) -> Result - where - S: serde::ser::Serializer, - { - String::serialize(&self.0, serializer) - } -} - -impl<'de> serde::de::Deserialize<'de> for UrlString { - fn deserialize(deserializer: D) -> Result - where - D: serde::de::Deserializer<'de>, - { - String::deserialize(deserializer).map(UrlString) - } -} - impl UrlString { /// Converts a [`UrlString`] to a [`Url`]. pub fn to_url(&self) -> Url { diff --git a/crates/install-wheel-rs/Cargo.toml b/crates/install-wheel-rs/Cargo.toml index eec879177f6e..7ca22b8ed5c6 100644 --- a/crates/install-wheel-rs/Cargo.toml +++ b/crates/install-wheel-rs/Cargo.toml @@ -33,6 +33,7 @@ configparser = { workspace = true } csv = { workspace = true } data-encoding = { workspace = true } fs-err = { workspace = true } +mailparse = { workspace = true } pathdiff = { workspace = true } platform-info = { workspace = true } reflink-copy = { workspace = true } diff --git a/crates/install-wheel-rs/src/wheel.rs b/crates/install-wheel-rs/src/wheel.rs index 3698fca56cb5..f2f46d118145 100644 --- a/crates/install-wheel-rs/src/wheel.rs +++ b/crates/install-wheel-rs/src/wheel.rs @@ -1,11 +1,12 @@ use std::collections::HashMap; -use std::io::{BufRead, BufReader, Cursor, Read, Seek, Write}; +use std::io::{BufReader, Cursor, Read, Seek, Write}; use std::path::{Path, PathBuf}; use std::{env, io}; use data_encoding::BASE64URL_NOPAD; use fs_err as fs; use fs_err::{DirEntry, File}; +use mailparse::parse_headers; use rustc_hash::FxHashMap; use sha2::{Digest, Sha256}; use tracing::{instrument, warn}; @@ -353,10 +354,10 @@ pub enum LibKind { /// Parse WHEEL file. /// /// > {distribution}-{version}.dist-info/WHEEL is metadata about the archive itself in the same -/// > basic key: value format: +/// > email message format: pub fn parse_wheel_file(wheel_text: &str) -> Result { - // {distribution}-{version}.dist-info/WHEEL is metadata about the archive itself in the same basic key: value format: - let data = parse_key_value_file(&mut wheel_text.as_bytes(), "WHEEL")?; + // {distribution}-{version}.dist-info/WHEEL is metadata about the archive itself in the same email message format: + let data = parse_email_message_file(&mut wheel_text.as_bytes(), "WHEEL")?; // Determine whether Root-Is-Purelib == ‘true’. // If it is, the wheel is pure, and should be installed into purelib. @@ -797,29 +798,39 @@ pub fn read_record_file(record: &mut impl Read) -> Result, Erro .collect() } -/// Parse a file with `Key: value` entries such as WHEEL and METADATA -fn parse_key_value_file( +/// Parse a file with email message format such as WHEEL and METADATA +fn parse_email_message_file( file: impl Read, debug_filename: &str, ) -> Result>, Error> { let mut data: FxHashMap> = FxHashMap::default(); let file = BufReader::new(file); - for (line_no, line) in file.lines().enumerate() { - let line = line?.trim().to_string(); - if line.is_empty() { - continue; + let content = file.bytes().collect::, _>>()?; + + let headers = parse_headers(content.as_slice()) + .map_err(|err| { + Error::InvalidWheel(format!("Failed to parse {debug_filename} file: {err}")) + })? + .0; + + for header in headers { + let mut name = header.get_key(); + let mut value = header.get_value(); + + // Trim the name and value only if needed, avoiding unnecessary allocations with .trim().to_string(). + let trimmed_name = name.trim(); + if name == trimmed_name { + name = trimmed_name.to_string(); } - let (key, value) = line.split_once(':').ok_or_else(|| { - Error::InvalidWheel(format!( - "Line {} of the {debug_filename} file is invalid", - line_no + 1 - )) - })?; - data.entry(key.trim().to_string()) - .or_default() - .push(value.trim().to_string()); + let trimmed_value = value.trim(); + if value == trimmed_value { + value = trimmed_value.to_string(); + } + + data.entry(name).or_default().push(value); } + Ok(data) } @@ -836,11 +847,11 @@ mod test { use crate::Error; use super::{ - get_script_executable, parse_key_value_file, parse_wheel_file, read_record_file, Script, + get_script_executable, parse_email_message_file, parse_wheel_file, read_record_file, Script, }; #[test] - fn test_parse_key_value_file() { + fn test_parse_email_message_file() { let text = indoc! {" Wheel-Version: 1.0 Generator: bdist_wheel (0.37.1) @@ -849,7 +860,21 @@ mod test { Tag: cp38-cp38-manylinux2014_x86_64 "}; - parse_key_value_file(&mut text.as_bytes(), "WHEEL").unwrap(); + parse_email_message_file(&mut text.as_bytes(), "WHEEL").unwrap(); + } + + #[test] + fn test_parse_email_message_file_with_value_starting_with_linesep_and_two_space() { + // Check: https://files.pythonhosted.org/packages/0c/b7/ecfdce6368cc3664d301f7f52db4fe1004aa7da7a12c4a9bf1de534ff6ab/ziglang-0.13.0-py3-none-manylinux_2_12_x86_64.manylinux2010_x86_64.musllinux_1_1_x86_64.whl + let text = indoc! {" + Wheel-Version: 1.0 + Generator: ziglang make_wheels.py + Root-Is-Purelib: false + Tag: + py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64 + "}; + + parse_email_message_file(&mut text.as_bytes(), "WHEEL").unwrap(); } #[test] @@ -996,7 +1021,7 @@ mod test { " }; let reader = Cursor::new(wheel.to_string().into_bytes()); - let wheel_file = parse_key_value_file(reader, "WHEEL")?; + let wheel_file = parse_email_message_file(reader, "WHEEL")?; assert_eq!( wheel_file.get("Wheel-Version"), Some(&["1.0".to_string()].to_vec()) diff --git a/crates/pypi-types/src/lib.rs b/crates/pypi-types/src/lib.rs index 73efeba94888..a38dc248797a 100644 --- a/crates/pypi-types/src/lib.rs +++ b/crates/pypi-types/src/lib.rs @@ -1,6 +1,7 @@ pub use base_url::*; pub use direct_url::*; pub use lenient_requirement::*; +pub use marker_environment::*; pub use metadata::*; pub use parsed_url::*; pub use requirement::*; @@ -10,6 +11,7 @@ pub use simple_json::*; mod base_url; mod direct_url; mod lenient_requirement; +mod marker_environment; mod metadata; mod parsed_url; mod requirement; diff --git a/crates/pypi-types/src/marker_environment.rs b/crates/pypi-types/src/marker_environment.rs new file mode 100644 index 000000000000..063ceb41cd43 --- /dev/null +++ b/crates/pypi-types/src/marker_environment.rs @@ -0,0 +1,53 @@ +use tracing::debug; + +use pep508_rs::MarkerEnvironment; + +/// A wrapper type around [`MarkerEnvironment`] that ensures the Python version markers are +/// release-only, to match the resolver's semantics. +#[derive(Debug, Clone)] +pub struct ResolverMarkerEnvironment(MarkerEnvironment); + +impl ResolverMarkerEnvironment { + /// Returns the underlying [`MarkerEnvironment`]. + pub fn markers(&self) -> &MarkerEnvironment { + &self.0 + } +} + +impl From for ResolverMarkerEnvironment { + fn from(value: MarkerEnvironment) -> Self { + // Strip `python_version`. + let python_version = value.python_version().only_release(); + let value = if python_version == **value.python_version() { + value + } else { + debug!( + "Stripping pre-release from `python_version`: {}", + value.python_version() + ); + value.with_python_version(python_version) + }; + + // Strip `python_full_version`. + let python_full_version = value.python_full_version().only_release(); + let value = if python_full_version == **value.python_full_version() { + value + } else { + debug!( + "Stripping pre-release from `python_full_version`: {}", + value.python_full_version() + ); + value.with_python_full_version(python_full_version) + }; + + Self(value) + } +} + +impl std::ops::Deref for ResolverMarkerEnvironment { + type Target = MarkerEnvironment; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} diff --git a/crates/uv-build/src/lib.rs b/crates/uv-build/src/lib.rs index 3c0f1b469a4b..479a4b35d5f8 100644 --- a/crates/uv-build/src/lib.rs +++ b/crates/uv-build/src/lib.rs @@ -398,6 +398,7 @@ impl SourceBuild { pub async fn setup( source: &Path, subdirectory: Option<&Path>, + fallback_package_name: Option<&PackageName>, interpreter: &Interpreter, build_context: &impl BuildContext, source_build_context: SourceBuildContext, @@ -422,10 +423,10 @@ impl SourceBuild { let (pep517_backend, project) = Self::extract_pep517_backend(&source_tree, &default_backend).map_err(|err| *err)?; - let package_name = project.clone().map(|p| p.name); + let package_name = project.as_ref().map(|p| &p.name).or(fallback_package_name); // Create a virtual environment, or install into the shared environment if requested. - let venv = if let Some(venv) = build_isolation.shared_environment(package_name.as_ref()) { + let venv = if let Some(venv) = build_isolation.shared_environment(package_name) { venv.clone() } else { uv_virtualenv::create_venv( @@ -440,7 +441,7 @@ impl SourceBuild { // Setup the build environment. If build isolation is disabled, we assume the build // environment is already setup. - if build_isolation.is_isolated(package_name.as_ref()) { + if build_isolation.is_isolated(package_name) { let resolved_requirements = Self::get_resolved_requirements( build_context, source_build_context, @@ -490,7 +491,7 @@ impl SourceBuild { // Create the PEP 517 build environment. If build isolation is disabled, we assume the build // environment is already setup. let runner = PythonRunner::new(concurrent_builds); - if build_isolation.is_isolated(package_name.as_ref()) { + if build_isolation.is_isolated(package_name) { create_pep517_build_environment( &runner, &source_tree, diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs index 729a93c09831..593d030f9ada 100644 --- a/crates/uv-cli/src/lib.rs +++ b/crates/uv-cli/src/lib.rs @@ -2261,10 +2261,7 @@ pub struct SyncArgs { /// Do not remove extraneous packages present in the environment. /// /// When enabled, uv will make the minimum necessary changes to satisfy the requirements. - /// - /// By default, syncing will remove any extraneous packages from the environment, unless - /// `--no-build-isolation` is enabled, in which case extra packages are considered necessary for - /// builds. + /// By default, syncing will remove any extraneous packages from the environment #[arg(long, overrides_with("exact"), alias = "no-exact")] pub inexact: bool, @@ -2426,11 +2423,11 @@ pub struct AddArgs { #[arg(long, conflicts_with("dev"))] pub optional: Option, - #[arg(long, overrides_with = "no_editable", hide = true)] + /// Add the requirements as editable. + #[arg(long, overrides_with = "no_editable")] pub editable: bool, - /// Don't add the requirements as editables. - #[arg(long, overrides_with = "editable")] + #[arg(long, overrides_with = "editable", hide = true)] pub no_editable: bool, /// Add source requirements to `project.dependencies`, rather than `tool.uv.sources`. diff --git a/crates/uv-dev/src/build.rs b/crates/uv-dev/src/build.rs index 0b7a3584f7fb..8170c2080d93 100644 --- a/crates/uv-dev/src/build.rs +++ b/crates/uv-dev/src/build.rs @@ -98,6 +98,7 @@ pub(crate) async fn build(args: BuildArgs) -> Result { let builder = SourceBuild::setup( &args.sdist, args.subdirectory.as_deref(), + None, python.interpreter(), &build_dispatch, SourceBuildContext::default(), diff --git a/crates/uv-dispatch/src/lib.rs b/crates/uv-dispatch/src/lib.rs index 9b813452303c..b44837b4dd6d 100644 --- a/crates/uv-dispatch/src/lib.rs +++ b/crates/uv-dispatch/src/lib.rs @@ -138,7 +138,7 @@ impl<'a> BuildContext for BuildDispatch<'a> { async fn resolve<'data>(&'data self, requirements: &'data [Requirement]) -> Result { let python_requirement = PythonRequirement::from_interpreter(self.interpreter); - let markers = self.interpreter.markers(); + let markers = self.interpreter.resolver_markers(); let tags = self.interpreter.tags()?; let resolver = Resolver::new( @@ -148,7 +148,7 @@ impl<'a> BuildContext for BuildDispatch<'a> { .index_strategy(self.index_strategy) .build(), &python_requirement, - ResolverMarkers::specific_environment(markers.clone()), + ResolverMarkers::specific_environment(markers), Some(tags), self.flat_index, self.index, @@ -189,6 +189,7 @@ impl<'a> BuildContext for BuildDispatch<'a> { // Determine the current environment markers. let tags = self.interpreter.tags()?; + let markers = self.interpreter.resolver_markers(); // Determine the set of installed packages. let site_packages = SitePackages::from_environment(venv)?; @@ -208,6 +209,7 @@ impl<'a> BuildContext for BuildDispatch<'a> { self.index_locations, self.cache(), venv, + &markers, tags, )?; @@ -298,11 +300,12 @@ impl<'a> BuildContext for BuildDispatch<'a> { dist: Option<&'data SourceDist>, build_kind: BuildKind, ) -> Result { + let dist_name = dist.map(distribution_types::Name::name); // Note we can only prevent builds by name for packages with names // unless all builds are disabled. if self .build_options - .no_build_requirement(dist.map(distribution_types::Name::name)) + .no_build_requirement(dist_name) // We always allow editable builds && !matches!(build_kind, BuildKind::Editable) { @@ -318,6 +321,7 @@ impl<'a> BuildContext for BuildDispatch<'a> { let builder = SourceBuild::setup( source, subdirectory, + dist_name, self.interpreter, self, self.source_build_context.clone(), diff --git a/crates/uv-distribution/src/metadata/lowering.rs b/crates/uv-distribution/src/metadata/lowering.rs index c0caf41190ba..3d81a1a6a4b4 100644 --- a/crates/uv-distribution/src/metadata/lowering.rs +++ b/crates/uv-distribution/src/metadata/lowering.rs @@ -9,7 +9,6 @@ use distribution_filename::DistExtension; use pep440_rs::VersionSpecifiers; use pep508_rs::{VerbatimUrl, VersionOrUrl}; use pypi_types::{ParsedUrlError, Requirement, RequirementSource, VerbatimParsedUrl}; -use uv_fs::Simplified; use uv_git::GitReference; use uv_normalize::PackageName; use uv_warnings::warn_user_once; @@ -250,8 +249,6 @@ pub enum LoweringError { InvalidVerbatimUrl(#[from] pep508_rs::VerbatimUrlError), #[error("Can't combine URLs from both `project.dependencies` and `tool.uv.sources`")] ConflictingUrls, - #[error("Could not normalize path: `{}`", _0.user_display())] - Absolutize(PathBuf, #[source] io::Error), #[error("Fragments are not allowed in URLs: `{0}`")] ForbiddenFragment(Url), #[error("`workspace = false` is not yet supported")] diff --git a/crates/uv-fs/Cargo.toml b/crates/uv-fs/Cargo.toml index 3b2c58a13d43..e95382bbc28d 100644 --- a/crates/uv-fs/Cargo.toml +++ b/crates/uv-fs/Cargo.toml @@ -22,7 +22,6 @@ either = { workspace = true } encoding_rs_io = { workspace = true } fs-err = { workspace = true } fs2 = { workspace = true } -path-absolutize = { workspace = true } path-slash = { workspace = true } serde = { workspace = true, optional = true } tempfile = { workspace = true } diff --git a/crates/uv-fs/src/path.rs b/crates/uv-fs/src/path.rs index 25e435b92a6a..695bc5e42247 100644 --- a/crates/uv-fs/src/path.rs +++ b/crates/uv-fs/src/path.rs @@ -240,16 +240,6 @@ pub fn normalize_path(path: &Path) -> PathBuf { normalized } -/// Convert a path to an absolute path, relative to the current working directory. -/// -/// Unlike [`std::fs::canonicalize`], this function does not resolve symlinks and does not require -/// the path to exist. -pub fn absolutize_path(path: &Path) -> Result, std::io::Error> { - use path_absolutize::Absolutize; - - path.absolutize_from(CWD.simplified()) -} - /// Like `fs_err::canonicalize`, but avoids attempting to resolve symlinks on Windows. pub fn canonicalize_executable(path: impl AsRef) -> std::io::Result { let path = path.as_ref(); diff --git a/crates/uv-installer/src/plan.rs b/crates/uv-installer/src/plan.rs index 8f396c8e08a4..e0340cf25977 100644 --- a/crates/uv-installer/src/plan.rs +++ b/crates/uv-installer/src/plan.rs @@ -12,7 +12,7 @@ use distribution_types::{ PathSourceDist, RemoteSource, Verbatim, }; use platform_tags::Tags; -use pypi_types::{Requirement, RequirementSource}; +use pypi_types::{Requirement, RequirementSource, ResolverMarkerEnvironment}; use uv_cache::{ArchiveTimestamp, Cache, CacheBucket, WheelCache}; use uv_configuration::{BuildOptions, Reinstall}; use uv_distribution::{ @@ -58,6 +58,7 @@ impl<'a> Planner<'a> { index_locations: &IndexLocations, cache: &Cache, venv: &PythonEnvironment, + markers: &ResolverMarkerEnvironment, tags: &Tags, ) -> Result { // Index all the already-downloaded wheels in the cache. @@ -72,7 +73,7 @@ impl<'a> Planner<'a> { for requirement in self.requirements { // Filter out incompatible requirements. - if !requirement.evaluate_markers(Some(venv.interpreter().markers()), &[]) { + if !requirement.evaluate_markers(Some(markers), &[]) { continue; } diff --git a/crates/uv-installer/src/site_packages.rs b/crates/uv-installer/src/site_packages.rs index 003c3c5d3891..b6de14853155 100644 --- a/crates/uv-installer/src/site_packages.rs +++ b/crates/uv-installer/src/site_packages.rs @@ -11,7 +11,7 @@ use distribution_types::{ Diagnostic, InstalledDist, Name, UnresolvedRequirement, UnresolvedRequirementSpecification, }; use pep440_rs::{Version, VersionSpecifiers}; -use pypi_types::{Requirement, VerbatimParsedUrl}; +use pypi_types::{Requirement, ResolverMarkerEnvironment, VerbatimParsedUrl}; use uv_fs::Simplified; use uv_normalize::PackageName; use uv_python::{Interpreter, PythonEnvironment}; @@ -181,7 +181,10 @@ impl SitePackages { } /// Validate the installed packages in the virtual environment. - pub fn diagnostics(&self) -> Result> { + pub fn diagnostics( + &self, + markers: &ResolverMarkerEnvironment, + ) -> Result> { let mut diagnostics = Vec::new(); for (package, indexes) in &self.by_name { @@ -220,7 +223,7 @@ impl SitePackages { // Verify that the package is compatible with the current Python version. if let Some(requires_python) = metadata.requires_python.as_ref() { - if !requires_python.contains(self.interpreter.python_version()) { + if !requires_python.contains(markers.python_full_version()) { diagnostics.push(SitePackagesDiagnostic::IncompatiblePythonVersion { package: package.clone(), version: self.interpreter.python_version().clone(), @@ -231,7 +234,7 @@ impl SitePackages { // Verify that the dependencies are installed. for dependency in &metadata.requires_dist { - if !dependency.evaluate_markers(self.interpreter.markers(), &[]) { + if !dependency.evaluate_markers(markers, &[]) { continue; } @@ -281,6 +284,7 @@ impl SitePackages { &self, requirements: &[UnresolvedRequirementSpecification], constraints: &[Requirement], + markers: &ResolverMarkerEnvironment, ) -> Result { // Collect the constraints. let constraints: FxHashMap<&PackageName, Vec<&Requirement>> = @@ -299,10 +303,7 @@ impl SitePackages { // Add the direct requirements to the queue. for entry in requirements { - if entry - .requirement - .evaluate_markers(Some(self.interpreter.markers()), &[]) - { + if entry.requirement.evaluate_markers(Some(markers), &[]) { if seen.insert(entry.clone()) { stack.push(entry.clone()); } @@ -353,10 +354,7 @@ impl SitePackages { // Add the dependencies to the queue. for dependency in metadata.requires_dist { - if dependency.evaluate_markers( - self.interpreter.markers(), - entry.requirement.extras(), - ) { + if dependency.evaluate_markers(markers, entry.requirement.extras()) { let dependency = UnresolvedRequirementSpecification { requirement: UnresolvedRequirement::Named(Requirement::from( dependency, diff --git a/crates/uv-python/src/interpreter.rs b/crates/uv-python/src/interpreter.rs index 3e7378f351c1..0f7d4aead09d 100644 --- a/crates/uv-python/src/interpreter.rs +++ b/crates/uv-python/src/interpreter.rs @@ -17,7 +17,7 @@ use pep440_rs::Version; use pep508_rs::{MarkerEnvironment, StringVersion}; use platform_tags::Platform; use platform_tags::{Tags, TagsError}; -use pypi_types::Scheme; +use pypi_types::{ResolverMarkerEnvironment, Scheme}; use uv_cache::{Cache, CacheBucket, CachedByTimestamp, Freshness, Timestamp}; use uv_fs::{write_atomic_sync, PythonExt, Simplified}; @@ -142,6 +142,11 @@ impl Interpreter { &self.markers } + /// Return the [`ResolverMarkerEnvironment`] for this Python executable. + pub fn resolver_markers(&self) -> ResolverMarkerEnvironment { + ResolverMarkerEnvironment::from(self.markers().clone()) + } + /// Returns the [`PythonInstallationKey`] for this interpreter. pub fn key(&self) -> PythonInstallationKey { PythonInstallationKey::new( @@ -686,7 +691,7 @@ impl InterpreterInfo { /// unless the Python executable changes, so we use the executable's last modified /// time as a cache key. pub(crate) fn query_cached(executable: &Path, cache: &Cache) -> Result { - let absolute = uv_fs::absolutize_path(executable)?; + let absolute = std::path::absolute(executable)?; let cache_entry = cache.entry( CacheBucket::Interpreter, diff --git a/crates/uv-python/src/lib.rs b/crates/uv-python/src/lib.rs index 9f8652b2c06f..d51fedeb5da8 100644 --- a/crates/uv-python/src/lib.rs +++ b/crates/uv-python/src/lib.rs @@ -1562,6 +1562,32 @@ mod tests { Ok(()) } + #[test] + fn find_python_venv_symlink() -> Result<()> { + let context = TestContext::new()?; + + let venv = context.tempdir.child("target").child("env"); + TestContext::mock_venv(&venv, "3.10.6")?; + let symlink = context.tempdir.child("proj").child(".venv"); + context.tempdir.child("proj").create_dir_all()?; + symlink.symlink_to_dir(venv)?; + + let python = context.run(|| { + find_python_installation( + &PythonRequest::parse("../proj/.venv"), + EnvironmentPreference::Any, + PythonPreference::OnlySystem, + &context.cache, + ) + })??; + assert_eq!( + python.interpreter().python_full_version().to_string(), + "3.10.6", + "We should find the symlinked venv" + ); + Ok(()) + } + #[test] fn find_python_treats_missing_file_path_as_file() -> Result<()> { let context = TestContext::new()?; diff --git a/crates/uv-python/src/python_version.rs b/crates/uv-python/src/python_version.rs index d261e256444f..07ee3edacf56 100644 --- a/crates/uv-python/src/python_version.rs +++ b/crates/uv-python/src/python_version.rs @@ -84,7 +84,7 @@ impl PythonVersion { /// /// The returned [`MarkerEnvironment`] will preserve the base environment's platform markers, /// but override its Python version markers. - pub fn markers(self, base: &MarkerEnvironment) -> MarkerEnvironment { + pub fn markers(&self, base: &MarkerEnvironment) -> MarkerEnvironment { let mut markers = base.clone(); // Ex) `implementation_version == "3.12.0"` diff --git a/crates/uv-resolver/src/candidate_selector.rs b/crates/uv-resolver/src/candidate_selector.rs index f4d17b813fea..2146f7e7e9ee 100644 --- a/crates/uv-resolver/src/candidate_selector.rs +++ b/crates/uv-resolver/src/candidate_selector.rs @@ -6,7 +6,7 @@ use tracing::{debug, trace}; use distribution_types::{CompatibleDist, IncompatibleDist, IncompatibleSource}; use distribution_types::{DistributionMetadata, IncompatibleWheel, Name, PrioritizedDist}; use pep440_rs::Version; -use pep508_rs::{MarkerEnvironment, MarkerTree}; +use pep508_rs::MarkerTree; use uv_configuration::IndexStrategy; use uv_normalize::PackageName; use uv_types::InstalledPackagesProvider; @@ -30,7 +30,7 @@ impl CandidateSelector { pub(crate) fn for_resolution( options: Options, manifest: &Manifest, - markers: Option<&MarkerEnvironment>, + markers: &ResolverMarkers, ) -> Self { Self { resolution_strategy: ResolutionStrategy::from_mode( diff --git a/crates/uv-resolver/src/lock.rs b/crates/uv-resolver/src/lock.rs index 3c78c664a0bd..b3b014ba3367 100644 --- a/crates/uv-resolver/src/lock.rs +++ b/crates/uv-resolver/src/lock.rs @@ -27,7 +27,7 @@ use pep508_rs::{split_scheme, MarkerEnvironment, MarkerTree, VerbatimUrl, Verbat use platform_tags::{TagCompatibility, TagPriority, Tags}; use pypi_types::{ redact_git_credentials, HashDigest, ParsedArchiveUrl, ParsedGitUrl, Requirement, - RequirementSource, + RequirementSource, ResolverMarkerEnvironment, }; use uv_configuration::ExtrasSpecification; use uv_distribution::DistributionDatabase; @@ -419,7 +419,7 @@ impl Lock { pub fn to_resolution( &self, project: &VirtualProject, - marker_env: &MarkerEnvironment, + marker_env: &ResolverMarkerEnvironment, tags: &Tags, extras: &ExtrasSpecification, dev: &[GroupName], @@ -457,11 +457,20 @@ impl Lock { // dependencies in virtual workspaces). for group in dev { for dependency in project.group(group) { - let root = self - .find_by_name(dependency) - .expect("found too many packages matching root") - .expect("could not find root"); - queue.push_back((root, None)); + if dependency.marker.evaluate(marker_env, &[]) { + let root = self + .find_by_markers(&dependency.name, marker_env) + .expect("found too many packages matching root") + .expect("could not find root"); + + // Add the base package. + queue.push_back((root, None)); + + // Add any extras. + for extra in &dependency.extras { + queue.push_back((root, Some(extra))); + } + } } } @@ -674,6 +683,39 @@ impl Lock { Ok(found_dist) } + /// Returns the package with the given name. + /// + /// If there are multiple matching packages, returns the package that + /// corresponds to the given marker tree. + /// + /// If there are multiple packages that are relevant to the current + /// markers, then an error is returned. + /// + /// If there are no matching packages, then `Ok(None)` is returned. + fn find_by_markers( + &self, + name: &PackageName, + marker_env: &MarkerEnvironment, + ) -> Result, String> { + let mut found_dist = None; + for dist in &self.packages { + if &dist.id.name == name { + if dist.fork_markers.is_empty() + || dist + .fork_markers + .iter() + .any(|marker| marker.evaluate(marker_env, &[])) + { + if found_dist.is_some() { + return Err(format!("found multiple packages matching `{name}`")); + } + found_dist = Some(dist); + } + } + } + Ok(found_dist) + } + fn find_by_id(&self, id: &PackageId) -> &Package { let index = *self.by_id.get(id).expect("locked package for ID"); let dist = self.packages.get(index).expect("valid index for package"); @@ -3599,7 +3641,7 @@ impl<'env> TreeDisplay<'env> { /// Create a new [`DisplayDependencyGraph`] for the set of installed packages. pub fn new( lock: &'env Lock, - markers: Option<&'env MarkerEnvironment>, + markers: Option<&'env ResolverMarkerEnvironment>, depth: usize, prune: Vec, package: Vec, diff --git a/crates/uv-resolver/src/manifest.rs b/crates/uv-resolver/src/manifest.rs index 741ae98d0f13..82de403ed1fa 100644 --- a/crates/uv-resolver/src/manifest.rs +++ b/crates/uv-resolver/src/manifest.rs @@ -1,15 +1,15 @@ -use either::Either; use std::borrow::Cow; use std::collections::BTreeSet; -use pep508_rs::MarkerEnvironment; +use either::Either; + use pypi_types::Requirement; use uv_configuration::{Constraints, Overrides}; use uv_normalize::{GroupName, PackageName}; use uv_types::RequestedRequirements; use crate::preferences::Preferences; -use crate::{DependencyMode, Exclusions}; +use crate::{DependencyMode, Exclusions, ResolverMarkers}; /// A manifest of requirements, constraints, and preferences. #[derive(Clone, Debug)] @@ -109,7 +109,7 @@ impl Manifest { /// - Determining which requirements should allow local version specifiers (e.g., `torch==2.2.0+cpu`). pub fn requirements<'a>( &'a self, - markers: Option<&'a MarkerEnvironment>, + markers: &'a ResolverMarkers, mode: DependencyMode, ) -> impl Iterator> + 'a { self.requirements_no_overrides(markers, mode) @@ -119,39 +119,48 @@ impl Manifest { /// Like [`Self::requirements`], but without the overrides. pub fn requirements_no_overrides<'a>( &'a self, - markers: Option<&'a MarkerEnvironment>, + markers: &'a ResolverMarkers, mode: DependencyMode, ) -> impl Iterator> + 'a { match mode { // Include all direct and transitive requirements, with constraints and overrides applied. - DependencyMode::Transitive => Either::Left( - self.lookaheads - .iter() - .flat_map(move |lookahead| { - self.overrides - .apply(lookahead.requirements()) - .filter(move |requirement| { - requirement.evaluate_markers(markers, lookahead.extras()) - }) - }) - .chain( - self.overrides - .apply(&self.requirements) - .filter(move |requirement| requirement.evaluate_markers(markers, &[])), - ) - .chain( - self.constraints - .requirements() - .filter(move |requirement| requirement.evaluate_markers(markers, &[])) - .map(Cow::Borrowed), - ), - ), + DependencyMode::Transitive => { + Either::Left( + self.lookaheads + .iter() + .flat_map(move |lookahead| { + self.overrides.apply(lookahead.requirements()).filter( + move |requirement| { + requirement.evaluate_markers( + markers.marker_environment(), + lookahead.extras(), + ) + }, + ) + }) + .chain(self.overrides.apply(&self.requirements).filter( + move |requirement| { + requirement.evaluate_markers(markers.marker_environment(), &[]) + }, + )) + .chain( + self.constraints + .requirements() + .filter(move |requirement| { + requirement.evaluate_markers(markers.marker_environment(), &[]) + }) + .map(Cow::Borrowed), + ), + ) + } // Include direct requirements, with constraints and overrides applied. DependencyMode::Direct => Either::Right( self.overrides .apply(&self.requirements) .chain(self.constraints.requirements().map(Cow::Borrowed)) - .filter(move |requirement| requirement.evaluate_markers(markers, &[])), + .filter(move |requirement| { + requirement.evaluate_markers(markers.marker_environment(), &[]) + }), ), } } @@ -159,7 +168,7 @@ impl Manifest { /// Only the overrides from [`Self::requirements`]. pub fn overrides<'a>( &'a self, - markers: Option<&'a MarkerEnvironment>, + markers: &'a ResolverMarkers, mode: DependencyMode, ) -> impl Iterator> + 'a { match mode { @@ -167,14 +176,18 @@ impl Manifest { DependencyMode::Transitive => Either::Left( self.overrides .requirements() - .filter(move |requirement| requirement.evaluate_markers(markers, &[])) + .filter(move |requirement| { + requirement.evaluate_markers(markers.marker_environment(), &[]) + }) .map(Cow::Borrowed), ), // Include direct requirements, with constraints and overrides applied. DependencyMode::Direct => Either::Right( self.overrides .requirements() - .filter(move |requirement| requirement.evaluate_markers(markers, &[])) + .filter(move |requirement| { + requirement.evaluate_markers(markers.marker_environment(), &[]) + }) .map(Cow::Borrowed), ), } @@ -192,36 +205,43 @@ impl Manifest { /// the `lowest-direct` strategy is in use. pub fn user_requirements<'a>( &'a self, - markers: Option<&'a MarkerEnvironment>, + markers: &'a ResolverMarkers, mode: DependencyMode, ) -> impl Iterator> + 'a { match mode { // Include direct requirements, dependencies of editables, and transitive dependencies // of local packages. - DependencyMode::Transitive => Either::Left( - self.lookaheads - .iter() - .filter(|lookahead| lookahead.direct()) - .flat_map(move |lookahead| { - self.overrides - .apply(lookahead.requirements()) - .filter(move |requirement| { - requirement.evaluate_markers(markers, lookahead.extras()) - }) - }) - .chain( - self.overrides - .apply(&self.requirements) - .filter(move |requirement| requirement.evaluate_markers(markers, &[])), - ), - ), + DependencyMode::Transitive => { + Either::Left( + self.lookaheads + .iter() + .filter(|lookahead| lookahead.direct()) + .flat_map(move |lookahead| { + self.overrides.apply(lookahead.requirements()).filter( + move |requirement| { + requirement.evaluate_markers( + markers.marker_environment(), + lookahead.extras(), + ) + }, + ) + }) + .chain(self.overrides.apply(&self.requirements).filter( + move |requirement| { + requirement.evaluate_markers(markers.marker_environment(), &[]) + }, + )), + ) + } // Restrict to the direct requirements. - DependencyMode::Direct => Either::Right( - self.overrides - .apply(self.requirements.iter()) - .filter(move |requirement| requirement.evaluate_markers(markers, &[])), - ), + DependencyMode::Direct => { + Either::Right(self.overrides.apply(self.requirements.iter()).filter( + move |requirement| { + requirement.evaluate_markers(markers.marker_environment(), &[]) + }, + )) + } } } @@ -232,11 +252,13 @@ impl Manifest { /// resolution (assuming the user enabled development dependencies). pub fn direct_requirements<'a>( &'a self, - markers: Option<&'a MarkerEnvironment>, + markers: &'a ResolverMarkers, ) -> impl Iterator> + 'a { self.overrides .apply(self.requirements.iter()) - .filter(move |requirement| requirement.evaluate_markers(markers, &[])) + .filter(move |requirement| { + requirement.evaluate_markers(markers.marker_environment(), &[]) + }) } /// Apply the overrides and constraints to a set of requirements. diff --git a/crates/uv-resolver/src/preferences.rs b/crates/uv-resolver/src/preferences.rs index 4631ac595345..b031045fe15e 100644 --- a/crates/uv-resolver/src/preferences.rs +++ b/crates/uv-resolver/src/preferences.rs @@ -5,11 +5,13 @@ use tracing::trace; use distribution_types::{InstalledDist, InstalledMetadata, InstalledVersion, Name}; use pep440_rs::{Operator, Version}; -use pep508_rs::{MarkerEnvironment, MarkerTree, VersionOrUrl}; +use pep508_rs::{MarkerTree, VersionOrUrl}; use pypi_types::{HashDigest, HashError}; use requirements_txt::{RequirementEntry, RequirementsTxtRequirement}; use uv_normalize::PackageName; +use crate::ResolverMarkers; + #[derive(thiserror::Error, Debug)] pub enum PreferenceError { #[error(transparent)] @@ -121,12 +123,12 @@ impl Preferences { /// to an applicable subset. pub fn from_iter>( preferences: PreferenceIterator, - markers: Option<&MarkerEnvironment>, + markers: &ResolverMarkers, ) -> Self { let mut slf = Self::default(); for preference in preferences { // Filter non-matching preferences when resolving for an environment. - if let Some(markers) = markers { + if let Some(markers) = markers.marker_environment() { if !preference.marker.evaluate(markers, &[]) { trace!("Excluding {preference} from preferences due to unmatched markers"); continue; diff --git a/crates/uv-resolver/src/prerelease.rs b/crates/uv-resolver/src/prerelease.rs index dc1c216fef16..a0e26fc840ca 100644 --- a/crates/uv-resolver/src/prerelease.rs +++ b/crates/uv-resolver/src/prerelease.rs @@ -1,6 +1,5 @@ use pypi_types::RequirementSource; -use pep508_rs::MarkerEnvironment; use uv_normalize::PackageName; use crate::resolver::ForkSet; @@ -68,7 +67,7 @@ impl PrereleaseStrategy { pub(crate) fn from_mode( mode: PrereleaseMode, manifest: &Manifest, - markers: Option<&MarkerEnvironment>, + markers: &ResolverMarkers, dependencies: DependencyMode, ) -> Self { let mut packages = ForkSet::default(); diff --git a/crates/uv-resolver/src/resolution_mode.rs b/crates/uv-resolver/src/resolution_mode.rs index eb433212fda0..cb0f66bd34a8 100644 --- a/crates/uv-resolver/src/resolution_mode.rs +++ b/crates/uv-resolver/src/resolution_mode.rs @@ -1,9 +1,8 @@ use rustc_hash::FxHashSet; -use pep508_rs::MarkerEnvironment; use uv_normalize::PackageName; -use crate::{DependencyMode, Manifest}; +use crate::{DependencyMode, Manifest, ResolverMarkers}; #[derive(Debug, Default, Clone, Copy, PartialEq, Eq, serde::Deserialize, serde::Serialize)] #[serde(deny_unknown_fields, rename_all = "kebab-case")] @@ -47,7 +46,7 @@ impl ResolutionStrategy { pub(crate) fn from_mode( mode: ResolutionMode, manifest: &Manifest, - markers: Option<&MarkerEnvironment>, + markers: &ResolverMarkers, dependencies: DependencyMode, ) -> Self { match mode { diff --git a/crates/uv-resolver/src/resolver/groups.rs b/crates/uv-resolver/src/resolver/groups.rs index a10314222452..c2a5b77e8753 100644 --- a/crates/uv-resolver/src/resolver/groups.rs +++ b/crates/uv-resolver/src/resolver/groups.rs @@ -1,9 +1,8 @@ use rustc_hash::FxHashMap; -use pep508_rs::MarkerEnvironment; use uv_normalize::{GroupName, PackageName}; -use crate::Manifest; +use crate::{Manifest, ResolverMarkers}; /// A map of package names to their activated dependency groups. #[derive(Debug, Default, Clone)] @@ -11,7 +10,7 @@ pub(crate) struct Groups(FxHashMap>); impl Groups { /// Determine the set of enabled dependency groups in the [`Manifest`]. - pub(crate) fn from_manifest(manifest: &Manifest, markers: Option<&MarkerEnvironment>) -> Self { + pub(crate) fn from_manifest(manifest: &Manifest, markers: &ResolverMarkers) -> Self { let mut groups = FxHashMap::default(); // Enable the groups for all direct dependencies. In practice, this tends to mean: when diff --git a/crates/uv-resolver/src/resolver/locals.rs b/crates/uv-resolver/src/resolver/locals.rs index e2e034603535..b4a2a2666752 100644 --- a/crates/uv-resolver/src/resolver/locals.rs +++ b/crates/uv-resolver/src/resolver/locals.rs @@ -3,7 +3,7 @@ use std::str::FromStr; use distribution_filename::{SourceDistFilename, WheelFilename}; use distribution_types::RemoteSource; use pep440_rs::{Operator, Version, VersionSpecifier, VersionSpecifierBuildError}; -use pep508_rs::{MarkerEnvironment, PackageName}; +use pep508_rs::PackageName; use pypi_types::RequirementSource; use crate::resolver::ForkMap; @@ -17,7 +17,7 @@ impl Locals { /// Determine the set of permitted local versions in the [`Manifest`]. pub(crate) fn from_manifest( manifest: &Manifest, - markers: Option<&MarkerEnvironment>, + markers: &ResolverMarkers, dependencies: DependencyMode, ) -> Self { let mut locals = ForkMap::default(); diff --git a/crates/uv-resolver/src/resolver/mod.rs b/crates/uv-resolver/src/resolver/mod.rs index af435f8719ff..1d755f466179 100644 --- a/crates/uv-resolver/src/resolver/mod.rs +++ b/crates/uv-resolver/src/resolver/mod.rs @@ -157,11 +157,7 @@ impl<'a, Context: BuildContext, InstalledPackages: InstalledPackagesProvider> python_requirement .target() .and_then(|target| target.as_requires_python()), - AllowedYanks::from_manifest( - &manifest, - markers.marker_environment(), - options.dependency_mode, - ), + AllowedYanks::from_manifest(&manifest, &markers, options.dependency_mode), hasher, options.exclude_newer, build_context.build_options(), @@ -199,24 +195,11 @@ impl let state = ResolverState { index: index.clone(), git: git.clone(), - selector: CandidateSelector::for_resolution( - options, - &manifest, - markers.marker_environment(), - ), + selector: CandidateSelector::for_resolution(options, &manifest, &markers), dependency_mode: options.dependency_mode, - urls: Urls::from_manifest( - &manifest, - markers.marker_environment(), - git, - options.dependency_mode, - )?, - locals: Locals::from_manifest( - &manifest, - markers.marker_environment(), - options.dependency_mode, - ), - groups: Groups::from_manifest(&manifest, markers.marker_environment()), + urls: Urls::from_manifest(&manifest, &markers, git, options.dependency_mode)?, + locals: Locals::from_manifest(&manifest, &markers, options.dependency_mode), + groups: Groups::from_manifest(&manifest, &markers), project: manifest.project, workspace_members: manifest.workspace_members, requirements: manifest.requirements, diff --git a/crates/uv-resolver/src/resolver/resolver_markers.rs b/crates/uv-resolver/src/resolver/resolver_markers.rs index 6832eee92375..da38f0563b39 100644 --- a/crates/uv-resolver/src/resolver/resolver_markers.rs +++ b/crates/uv-resolver/src/resolver/resolver_markers.rs @@ -1,7 +1,7 @@ use std::fmt::{Display, Formatter}; -use tracing::debug; use pep508_rs::{MarkerEnvironment, MarkerTree}; +use pypi_types::ResolverMarkerEnvironment; #[derive(Debug, Clone)] /// Whether we're solving for a specific environment, universally or for a specific fork. @@ -20,8 +20,8 @@ pub enum ResolverMarkers { impl ResolverMarkers { /// Set the resolver to perform a resolution for a specific environment. - pub fn specific_environment(markers: MarkerEnvironment) -> Self { - Self::SpecificEnvironment(ResolverMarkerEnvironment::from(markers)) + pub fn specific_environment(markers: ResolverMarkerEnvironment) -> Self { + Self::SpecificEnvironment(markers) } /// Set the resolver to perform a universal resolution. @@ -71,46 +71,3 @@ impl Display for ResolverMarkers { } } } - -/// A wrapper type around [`MarkerEnvironment`] that ensures the Python version markers are -/// release-only, to match the resolver's semantics. -#[derive(Debug, Clone)] -pub struct ResolverMarkerEnvironment(MarkerEnvironment); - -impl From for ResolverMarkerEnvironment { - fn from(value: MarkerEnvironment) -> Self { - // Strip `python_version`. - let python_version = value.python_version().only_release(); - let value = if python_version == **value.python_version() { - value - } else { - debug!( - "Stripping pre-release from `python_version`: {}", - value.python_version() - ); - value.with_python_version(python_version) - }; - - // Strip `python_full_version`. - let python_full_version = value.python_full_version().only_release(); - let value = if python_full_version == **value.python_full_version() { - value - } else { - debug!( - "Stripping pre-release from `python_full_version`: {}", - value.python_full_version() - ); - value.with_python_full_version(python_full_version) - }; - - Self(value) - } -} - -impl std::ops::Deref for ResolverMarkerEnvironment { - type Target = MarkerEnvironment; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} diff --git a/crates/uv-resolver/src/resolver/urls.rs b/crates/uv-resolver/src/resolver/urls.rs index 2ddf482c6f81..fe8f09040a19 100644 --- a/crates/uv-resolver/src/resolver/urls.rs +++ b/crates/uv-resolver/src/resolver/urls.rs @@ -6,12 +6,12 @@ use tracing::debug; use cache_key::CanonicalUrl; use distribution_types::Verbatim; -use pep508_rs::{MarkerEnvironment, VerbatimUrl}; +use pep508_rs::VerbatimUrl; use pypi_types::{ParsedDirectoryUrl, ParsedUrl, VerbatimParsedUrl}; use uv_git::GitResolver; use uv_normalize::PackageName; -use crate::{DependencyMode, Manifest, ResolveError}; +use crate::{DependencyMode, Manifest, ResolveError, ResolverMarkers}; /// The URLs that are allowed for packages. /// @@ -36,7 +36,7 @@ pub(crate) struct Urls { impl Urls { pub(crate) fn from_manifest( manifest: &Manifest, - markers: Option<&MarkerEnvironment>, + markers: &ResolverMarkers, git: &GitResolver, dependencies: DependencyMode, ) -> Result { diff --git a/crates/uv-resolver/src/yanks.rs b/crates/uv-resolver/src/yanks.rs index 34772e01d6c5..59f27cd18fc1 100644 --- a/crates/uv-resolver/src/yanks.rs +++ b/crates/uv-resolver/src/yanks.rs @@ -1,12 +1,12 @@ -use pypi_types::RequirementSource; -use rustc_hash::{FxHashMap, FxHashSet}; use std::sync::Arc; +use rustc_hash::{FxHashMap, FxHashSet}; + use pep440_rs::Version; -use pep508_rs::MarkerEnvironment; +use pypi_types::RequirementSource; use uv_normalize::PackageName; -use crate::{DependencyMode, Manifest}; +use crate::{DependencyMode, Manifest, ResolverMarkers}; /// A set of package versions that are permitted, even if they're marked as yanked by the /// relevant index. @@ -16,7 +16,7 @@ pub struct AllowedYanks(Arc>>); impl AllowedYanks { pub fn from_manifest( manifest: &Manifest, - markers: Option<&MarkerEnvironment>, + markers: &ResolverMarkers, dependencies: DependencyMode, ) -> Self { let mut allowed_yanks = FxHashMap::>::default(); diff --git a/crates/uv-trampoline/Cargo.lock b/crates/uv-trampoline/Cargo.lock index 7dcf14ef88b5..a51cf1317bce 100644 --- a/crates/uv-trampoline/Cargo.lock +++ b/crates/uv-trampoline/Cargo.lock @@ -421,24 +421,6 @@ version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "caff54706df99d2a78a5a4e3455ff45448d81ef1bb63c22cd14052ca0e993a3f" -[[package]] -name = "path-absolutize" -version = "3.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4af381fe79fa195b4909485d99f73a80792331df0625188e707854f0b3383f5" -dependencies = [ - "path-dedot", -] - -[[package]] -name = "path-dedot" -version = "3.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07ba0ad7e047712414213ff67533e6dd477af0a4e1d14fb52343e53d30ea9397" -dependencies = [ - "once_cell", -] - [[package]] name = "path-slash" version = "0.2.1" @@ -751,7 +733,6 @@ dependencies = [ "fs-err", "fs2", "junction", - "path-absolutize", "path-slash", "tempfile", "tracing", diff --git a/crates/uv-types/src/hash.rs b/crates/uv-types/src/hash.rs index 8a9d80e620d2..1e8faf3783ab 100644 --- a/crates/uv-types/src/hash.rs +++ b/crates/uv-types/src/hash.rs @@ -7,8 +7,9 @@ use distribution_types::{ DistributionMetadata, HashPolicy, Name, Resolution, UnresolvedRequirement, VersionId, }; use pep440_rs::Version; -use pep508_rs::MarkerEnvironment; -use pypi_types::{HashDigest, HashError, Requirement, RequirementSource}; +use pypi_types::{ + HashDigest, HashError, Requirement, RequirementSource, ResolverMarkerEnvironment, +}; use uv_configuration::HashCheckingMode; use uv_normalize::PackageName; @@ -125,7 +126,7 @@ impl HashStrategy { /// to "only evaluate marker expressions that reference an extra name.") pub fn from_requirements<'a>( requirements: impl Iterator, - markers: Option<&MarkerEnvironment>, + marker_env: Option<&ResolverMarkerEnvironment>, mode: HashCheckingMode, ) -> Result { let mut hashes = FxHashMap::>::default(); @@ -133,7 +134,9 @@ impl HashStrategy { // For each requirement, map from name to allowed hashes. We use the last entry for each // package. for (requirement, digests) in requirements { - if !requirement.evaluate_markers(markers, &[]) { + if !requirement + .evaluate_markers(marker_env.map(ResolverMarkerEnvironment::markers), &[]) + { continue; } diff --git a/crates/uv-version/Cargo.toml b/crates/uv-version/Cargo.toml index 6bf4fed5482b..1c566f6f8222 100644 --- a/crates/uv-version/Cargo.toml +++ b/crates/uv-version/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv-version" -version = "0.3.3" +version = "0.3.4" edition = { workspace = true } rust-version = { workspace = true } homepage = { workspace = true } diff --git a/crates/uv-workspace/src/workspace.rs b/crates/uv-workspace/src/workspace.rs index dce3ddc098a2..9e5c5604401b 100644 --- a/crates/uv-workspace/src/workspace.rs +++ b/crates/uv-workspace/src/workspace.rs @@ -9,8 +9,8 @@ use rustc_hash::FxHashSet; use tracing::{debug, trace, warn}; use pep508_rs::{MarkerTree, RequirementOrigin, VerbatimUrl}; -use pypi_types::{Requirement, RequirementSource}; -use uv_fs::{absolutize_path, Simplified}; +use pypi_types::{Requirement, RequirementSource, VerbatimParsedUrl}; +use uv_fs::Simplified; use uv_normalize::{GroupName, PackageName, DEV_DEPENDENCIES}; use uv_warnings::warn_user; @@ -88,9 +88,9 @@ impl Workspace { path: &Path, options: &DiscoveryOptions<'_>, ) -> Result { - let path = absolutize_path(path) + let path = std::path::absolute(path) .map_err(WorkspaceError::Normalize)? - .to_path_buf(); + .clone(); let project_path = path .ancestors() @@ -527,9 +527,9 @@ impl Workspace { if !seen.insert(member_root.clone()) { continue; } - let member_root = absolutize_path(&member_root) + let member_root = std::path::absolute(&member_root) .map_err(WorkspaceError::Normalize)? - .to_path_buf(); + .clone(); // If the directory is explicitly ignored, skip it. if options.ignore.contains(member_root.as_path()) { @@ -869,9 +869,9 @@ impl ProjectWorkspace { project_pyproject_toml: &PyProjectToml, options: &DiscoveryOptions<'_>, ) -> Result { - let project_path = absolutize_path(install_path) + let project_path = std::path::absolute(install_path) .map_err(WorkspaceError::Normalize)? - .to_path_buf(); + .clone(); // Check if workspaces are explicitly disabled for the project. if project_pyproject_toml @@ -1229,9 +1229,9 @@ impl VirtualProject { .and_then(|uv| uv.workspace.as_ref()) { // Otherwise, if it contains a `tool.uv.workspace` table, it's a virtual workspace. - let project_path = absolutize_path(project_root) + let project_path = std::path::absolute(project_root) .map_err(WorkspaceError::Normalize)? - .to_path_buf(); + .clone(); check_nested_workspaces(&project_path, options); @@ -1309,7 +1309,10 @@ impl VirtualProject { /// Returns dependencies that apply to the workspace root, but not any of its members. As such, /// only returns a non-empty iterator for virtual workspaces, which can include dev dependencies /// on the virtual root. - pub fn group(&self, name: &GroupName) -> impl Iterator { + pub fn group( + &self, + name: &GroupName, + ) -> impl Iterator> { match self { VirtualProject::Project(_) => { // For non-virtual projects, dev dependencies are attached to the members. @@ -1326,7 +1329,7 @@ impl VirtualProject { .as_ref() .and_then(|tool| tool.uv.as_ref()) .and_then(|uv| uv.dev_dependencies.as_ref()) - .map(|dev| dev.iter().map(|req| &req.name)) + .map(|dev| dev.iter()) .into_iter() .flatten(), ) diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml index 7bfd96c29d5f..84f54597adb7 100644 --- a/crates/uv/Cargo.toml +++ b/crates/uv/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv" -version = "0.3.3" +version = "0.3.4" edition = { workspace = true } rust-version = { workspace = true } homepage = { workspace = true } diff --git a/crates/uv/src/commands/pip/check.rs b/crates/uv/src/commands/pip/check.rs index b8b28bba73e0..28a7edb21ca3 100644 --- a/crates/uv/src/commands/pip/check.rs +++ b/crates/uv/src/commands/pip/check.rs @@ -52,8 +52,12 @@ pub(crate) fn pip_check( .dimmed() )?; + // Determine the markers to use for resolution. + let markers = environment.interpreter().resolver_markers(); + + // Run the diagnostics. let diagnostics: Vec = - site_packages.diagnostics()?.into_iter().collect(); + site_packages.diagnostics(&markers)?.into_iter().collect(); if diagnostics.is_empty() { writeln!( diff --git a/crates/uv/src/commands/pip/compile.rs b/crates/uv/src/commands/pip/compile.rs index 29c870204bbe..89613564131b 100644 --- a/crates/uv/src/commands/pip/compile.rs +++ b/crates/uv/src/commands/pip/compile.rs @@ -248,10 +248,7 @@ pub(crate) async fn pip_compile( } else { let (tags, markers) = resolution_environment(python_version, python_platform, &interpreter)?; - ( - Some(tags), - ResolverMarkers::specific_environment((*markers).clone()), - ) + (Some(tags), ResolverMarkers::specific_environment(markers)) }; // Generate, but don't enforce hashes for the requirements. diff --git a/crates/uv/src/commands/pip/freeze.rs b/crates/uv/src/commands/pip/freeze.rs index 5aae401f0fd2..97fce7cc37be 100644 --- a/crates/uv/src/commands/pip/freeze.rs +++ b/crates/uv/src/commands/pip/freeze.rs @@ -68,7 +68,10 @@ pub(crate) fn pip_freeze( // Validate that the environment is consistent. if strict { - for diagnostic in site_packages.diagnostics()? { + // Determine the markers to use for resolution. + let markers = environment.interpreter().resolver_markers(); + + for diagnostic in site_packages.diagnostics(&markers)? { writeln!( printer.stderr(), "{}{} {}", diff --git a/crates/uv/src/commands/pip/install.rs b/crates/uv/src/commands/pip/install.rs index 0ca873e1d416..a52fb7ce3822 100644 --- a/crates/uv/src/commands/pip/install.rs +++ b/crates/uv/src/commands/pip/install.rs @@ -32,7 +32,7 @@ use uv_types::{BuildIsolation, HashStrategy}; use crate::commands::pip::loggers::{DefaultInstallLogger, DefaultResolveLogger, InstallLogger}; use crate::commands::pip::operations::Modifications; -use crate::commands::pip::{operations, resolution_environment}; +use crate::commands::pip::{operations, resolution_markers, resolution_tags}; use crate::commands::{ExitStatus, SharedState}; use crate::printer::Printer; @@ -183,6 +183,14 @@ pub(crate) async fn pip_install( let _lock = environment.lock()?; + // Determine the markers to use for the resolution. + let interpreter = environment.interpreter(); + let markers = resolution_markers( + python_version.as_ref(), + python_platform.as_ref(), + interpreter, + ); + // Determine the set of installed packages. let site_packages = SitePackages::from_environment(&environment)?; @@ -190,7 +198,7 @@ pub(crate) async fn pip_install( // Ideally, the resolver would be fast enough to let us remove this check. But right now, for large environments, // it's an order of magnitude faster to validate the environment than to resolve the requirements. if reinstall.is_none() && upgrade.is_none() && source_trees.is_empty() && overrides.is_empty() { - match site_packages.satisfies(&requirements, &constraints)? { + match site_packages.satisfies(&requirements, &constraints, &markers)? { // If the requirements are already satisfied, we're done. SatisfiesResult::Fresh { recursive_requirements, @@ -216,8 +224,6 @@ pub(crate) async fn pip_install( } } - let interpreter = environment.interpreter(); - // Determine the Python requirement, if the user requested a specific version. let python_requirement = if let Some(python_version) = python_version.as_ref() { PythonRequirement::from_python_version(interpreter, python_version) @@ -225,8 +231,12 @@ pub(crate) async fn pip_install( PythonRequirement::from_interpreter(interpreter) }; - // Determine the environment for the resolution. - let (tags, markers) = resolution_environment(python_version, python_platform, interpreter)?; + // Determine the tags to use for the resolution. + let tags = resolution_tags( + python_version.as_ref(), + python_platform.as_ref(), + interpreter, + )?; // Collect the set of required hashes. let hasher = if let Some(hash_checking) = hash_checking { @@ -262,7 +272,7 @@ pub(crate) async fn pip_install( .cache(cache.clone()) .index_urls(index_locations.index_urls()) .index_strategy(index_strategy) - .markers(&markers) + .markers(interpreter.markers()) .platform(interpreter.platform()) .build(); @@ -333,7 +343,7 @@ pub(crate) async fn pip_install( &reinstall, &upgrade, Some(&tags), - ResolverMarkers::specific_environment((*markers).clone()), + ResolverMarkers::specific_environment(markers.clone()), python_requirement, &client, &flat_index, @@ -366,6 +376,7 @@ pub(crate) async fn pip_install( compile, &index_locations, &hasher, + &markers, &tags, &client, &state.in_flight, @@ -384,7 +395,7 @@ pub(crate) async fn pip_install( // Notify the user of any environment diagnostics. if strict && !dry_run { - operations::diagnose_environment(&resolution, &environment, printer)?; + operations::diagnose_environment(&resolution, &environment, &markers, printer)?; } Ok(ExitStatus::Success) diff --git a/crates/uv/src/commands/pip/list.rs b/crates/uv/src/commands/pip/list.rs index 1c629b1947f4..e10c47062b42 100644 --- a/crates/uv/src/commands/pip/list.rs +++ b/crates/uv/src/commands/pip/list.rs @@ -119,7 +119,10 @@ pub(crate) fn pip_list( // Validate that the environment is consistent. if strict { - for diagnostic in site_packages.diagnostics()? { + // Determine the markers to use for resolution. + let markers = environment.interpreter().resolver_markers(); + + for diagnostic in site_packages.diagnostics(&markers)? { writeln!( printer.stderr(), "{}{} {}", diff --git a/crates/uv/src/commands/pip/mod.rs b/crates/uv/src/commands/pip/mod.rs index 4821ad54583d..d2a2fce005e1 100644 --- a/crates/uv/src/commands/pip/mod.rs +++ b/crates/uv/src/commands/pip/mod.rs @@ -1,7 +1,7 @@ use std::borrow::Cow; -use pep508_rs::MarkerEnvironment; use platform_tags::{Tags, TagsError}; +use pypi_types::ResolverMarkerEnvironment; use uv_configuration::TargetTriple; use uv_python::{Interpreter, PythonVersion}; @@ -17,12 +17,65 @@ pub(crate) mod sync; pub(crate) mod tree; pub(crate) mod uninstall; +pub(crate) fn resolution_markers( + python_version: Option<&PythonVersion>, + python_platform: Option<&TargetTriple>, + interpreter: &Interpreter, +) -> ResolverMarkerEnvironment { + match (python_platform, python_version) { + (Some(python_platform), Some(python_version)) => ResolverMarkerEnvironment::from( + python_version.markers(&python_platform.markers(interpreter.markers())), + ), + (Some(python_platform), None) => { + ResolverMarkerEnvironment::from(python_platform.markers(interpreter.markers())) + } + (None, Some(python_version)) => { + ResolverMarkerEnvironment::from(python_version.markers(interpreter.markers())) + } + (None, None) => interpreter.resolver_markers(), + } +} + +pub(crate) fn resolution_tags<'env>( + python_version: Option<&PythonVersion>, + python_platform: Option<&TargetTriple>, + interpreter: &'env Interpreter, +) -> Result, TagsError> { + Ok(match (python_platform, python_version.as_ref()) { + (Some(python_platform), Some(python_version)) => Cow::Owned(Tags::from_env( + &python_platform.platform(), + (python_version.major(), python_version.minor()), + interpreter.implementation_name(), + interpreter.implementation_tuple(), + interpreter.manylinux_compatible(), + interpreter.gil_disabled(), + )?), + (Some(python_platform), None) => Cow::Owned(Tags::from_env( + &python_platform.platform(), + interpreter.python_tuple(), + interpreter.implementation_name(), + interpreter.implementation_tuple(), + interpreter.manylinux_compatible(), + interpreter.gil_disabled(), + )?), + (None, Some(python_version)) => Cow::Owned(Tags::from_env( + interpreter.platform(), + (python_version.major(), python_version.minor()), + interpreter.implementation_name(), + interpreter.implementation_tuple(), + interpreter.manylinux_compatible(), + interpreter.gil_disabled(), + )?), + (None, None) => Cow::Borrowed(interpreter.tags()?), + }) +} + /// Determine the tags, markers, and interpreter to use for resolution. pub(crate) fn resolution_environment( python_version: Option, python_platform: Option, interpreter: &Interpreter, -) -> Result<(Cow<'_, Tags>, Cow<'_, MarkerEnvironment>), TagsError> { +) -> Result<(Cow<'_, Tags>, ResolverMarkerEnvironment), TagsError> { let tags = match (python_platform, python_version.as_ref()) { (Some(python_platform), Some(python_version)) => Cow::Owned(Tags::from_env( &python_platform.platform(), @@ -53,12 +106,16 @@ pub(crate) fn resolution_environment( // Apply the platform tags to the markers. let markers = match (python_platform, python_version) { - (Some(python_platform), Some(python_version)) => { - Cow::Owned(python_version.markers(&python_platform.markers(interpreter.markers()))) + (Some(python_platform), Some(python_version)) => ResolverMarkerEnvironment::from( + python_version.markers(&python_platform.markers(interpreter.markers())), + ), + (Some(python_platform), None) => { + ResolverMarkerEnvironment::from(python_platform.markers(interpreter.markers())) + } + (None, Some(python_version)) => { + ResolverMarkerEnvironment::from(python_version.markers(interpreter.markers())) } - (Some(python_platform), None) => Cow::Owned(python_platform.markers(interpreter.markers())), - (None, Some(python_version)) => Cow::Owned(python_version.markers(interpreter.markers())), - (None, None) => Cow::Borrowed(interpreter.markers()), + (None, None) => interpreter.resolver_markers(), }; Ok((tags, markers)) diff --git a/crates/uv/src/commands/pip/operations.rs b/crates/uv/src/commands/pip/operations.rs index 7f236d3f1476..4e2bd312b549 100644 --- a/crates/uv/src/commands/pip/operations.rs +++ b/crates/uv/src/commands/pip/operations.rs @@ -17,7 +17,7 @@ use distribution_types::{ }; use install_wheel_rs::linker::LinkMode; use platform_tags::Tags; -use pypi_types::Requirement; +use pypi_types::{Requirement, ResolverMarkerEnvironment}; use uv_cache::Cache; use uv_client::{BaseClientBuilder, RegistryClient}; use uv_configuration::{ @@ -199,7 +199,7 @@ pub(crate) async fn resolve( .chain(upgrade.constraints().cloned()), ); let overrides = Overrides::from_requirements(overrides); - let preferences = Preferences::from_iter(preferences, markers.marker_environment()); + let preferences = Preferences::from_iter(preferences, &markers); // Determine any lookahead requirements. let lookaheads = match options.dependency_mode { @@ -349,6 +349,7 @@ pub(crate) async fn install( compile: bool, index_urls: &IndexLocations, hasher: &HashStrategy, + markers: &ResolverMarkerEnvironment, tags: &Tags, client: &RegistryClient, in_flight: &InFlight, @@ -376,6 +377,7 @@ pub(crate) async fn install( index_urls, cache, venv, + markers, tags, ) .context("Failed to determine installation plan")?; @@ -661,10 +663,11 @@ pub(crate) fn diagnose_resolution( pub(crate) fn diagnose_environment( resolution: &Resolution, venv: &PythonEnvironment, + markers: &ResolverMarkerEnvironment, printer: Printer, ) -> Result<(), Error> { let site_packages = SitePackages::from_environment(venv)?; - for diagnostic in site_packages.diagnostics()? { + for diagnostic in site_packages.diagnostics(markers)? { // Only surface diagnostics that are "relevant" to the current resolution. if resolution .packages() diff --git a/crates/uv/src/commands/pip/show.rs b/crates/uv/src/commands/pip/show.rs index 3119f635dc05..f7f62307c2c4 100644 --- a/crates/uv/src/commands/pip/show.rs +++ b/crates/uv/src/commands/pip/show.rs @@ -55,7 +55,7 @@ pub(crate) fn pip_show( let site_packages = SitePackages::from_environment(&environment)?; // Determine the markers to use for resolution. - let markers = environment.interpreter().markers(); + let markers = environment.interpreter().resolver_markers(); // Sort and deduplicate the packages, which are keyed by name. packages.sort_unstable(); @@ -101,7 +101,7 @@ pub(crate) fn pip_show( metadata .requires_dist .into_iter() - .filter(|req| req.evaluate_markers(markers, &[])) + .filter(|req| req.evaluate_markers(&markers, &[])) .map(|req| req.name) .sorted_unstable() .dedup() @@ -119,7 +119,7 @@ pub(crate) fn pip_show( let requires = metadata .requires_dist .into_iter() - .filter(|req| req.evaluate_markers(markers, &[])) + .filter(|req| req.evaluate_markers(&markers, &[])) .map(|req| req.name) .collect_vec(); if !requires.is_empty() { @@ -192,7 +192,7 @@ pub(crate) fn pip_show( // Validate that the environment is consistent. if strict { - for diagnostic in site_packages.diagnostics()? { + for diagnostic in site_packages.diagnostics(&markers)? { writeln!( printer.stderr(), "{}{} {}", diff --git a/crates/uv/src/commands/pip/sync.rs b/crates/uv/src/commands/pip/sync.rs index 85f5021876ae..f624610628b1 100644 --- a/crates/uv/src/commands/pip/sync.rs +++ b/crates/uv/src/commands/pip/sync.rs @@ -31,7 +31,7 @@ use uv_types::{BuildIsolation, HashStrategy}; use crate::commands::pip::loggers::{DefaultInstallLogger, DefaultResolveLogger}; use crate::commands::pip::operations::Modifications; -use crate::commands::pip::{operations, resolution_environment}; +use crate::commands::pip::{operations, resolution_markers, resolution_tags}; use crate::commands::{ExitStatus, SharedState}; use crate::printer::Printer; @@ -183,8 +183,17 @@ pub(crate) async fn pip_sync( PythonRequirement::from_interpreter(interpreter) }; - // Determine the environment for the resolution. - let (tags, markers) = resolution_environment(python_version, python_platform, interpreter)?; + // Determine the markers and tags to use for resolution. + let markers = resolution_markers( + python_version.as_ref(), + python_platform.as_ref(), + interpreter, + ); + let tags = resolution_tags( + python_version.as_ref(), + python_platform.as_ref(), + interpreter, + )?; // Collect the set of required hashes. let hasher = if let Some(hash_checking) = hash_checking { @@ -213,7 +222,7 @@ pub(crate) async fn pip_sync( .cache(cache.clone()) .index_urls(index_locations.index_urls()) .index_strategy(index_strategy) - .markers(&markers) + .markers(interpreter.markers()) .platform(interpreter.platform()) .build(); @@ -292,7 +301,7 @@ pub(crate) async fn pip_sync( &reinstall, &upgrade, Some(&tags), - ResolverMarkers::specific_environment((*markers).clone()), + ResolverMarkers::specific_environment(markers.clone()), python_requirement, &client, &flat_index, @@ -325,6 +334,7 @@ pub(crate) async fn pip_sync( compile, &index_locations, &hasher, + &markers, &tags, &client, &state.in_flight, @@ -343,7 +353,7 @@ pub(crate) async fn pip_sync( // Notify the user of any environment diagnostics. if strict && !dry_run { - operations::diagnose_environment(&resolution, &environment, printer)?; + operations::diagnose_environment(&resolution, &environment, &markers, printer)?; } Ok(ExitStatus::Success) diff --git a/crates/uv/src/commands/pip/tree.rs b/crates/uv/src/commands/pip/tree.rs index 2e2442e902e7..d106b18ac192 100644 --- a/crates/uv/src/commands/pip/tree.rs +++ b/crates/uv/src/commands/pip/tree.rs @@ -7,8 +7,7 @@ use rustc_hash::{FxHashMap, FxHashSet}; use tracing::debug; use distribution_types::{Diagnostic, Name}; -use pep508_rs::MarkerEnvironment; -use pypi_types::RequirementSource; +use pypi_types::{RequirementSource, ResolverMarkerEnvironment}; use uv_cache::Cache; use uv_distribution::Metadata; use uv_fs::Simplified; @@ -60,6 +59,9 @@ pub(crate) fn pip_tree( .push(metadata); } + // Determine the markers to use for the resolution. + let markers = environment.interpreter().resolver_markers(); + // Render the tree. let rendered_tree = DisplayDependencyGraph::new( depth.into(), @@ -68,7 +70,7 @@ pub(crate) fn pip_tree( no_dedupe, invert, show_version_specifiers, - environment.interpreter().markers(), + &markers, packages, ) .render() @@ -87,7 +89,7 @@ pub(crate) fn pip_tree( // Validate that the environment is consistent. if strict { - for diagnostic in site_packages.diagnostics()? { + for diagnostic in site_packages.diagnostics(&markers)? { writeln!( printer.stderr(), "{}{} {}", @@ -129,7 +131,7 @@ impl DisplayDependencyGraph { no_dedupe: bool, invert: bool, show_version_specifiers: bool, - markers: &MarkerEnvironment, + markers: &ResolverMarkerEnvironment, packages: IndexMap>, ) -> Self { let mut requirements: FxHashMap<_, Vec<_>> = FxHashMap::default(); diff --git a/crates/uv/src/commands/project/add.rs b/crates/uv/src/commands/project/add.rs index d8bf9b4ffd52..37f5f83d8306 100644 --- a/crates/uv/src/commands/project/add.rs +++ b/crates/uv/src/commands/project/add.rs @@ -32,7 +32,9 @@ use uv_workspace::pyproject::{DependencyType, Source, SourceError}; use uv_workspace::pyproject_mut::{ArrayEdit, DependencyTarget, PyProjectTomlMut}; use uv_workspace::{DiscoveryOptions, VirtualProject, Workspace}; -use crate::commands::pip::loggers::{DefaultInstallLogger, DefaultResolveLogger}; +use crate::commands::pip::loggers::{ + DefaultInstallLogger, DefaultResolveLogger, SummaryResolveLogger, +}; use crate::commands::pip::operations::Modifications; use crate::commands::pip::resolution_environment; use crate::commands::project::ProjectError; @@ -457,13 +459,13 @@ pub(crate) async fn add( let existing = project.pyproject_toml(); // Update the `pypackage.toml` in-memory. - let project = project + let mut project = project .clone() .with_pyproject_toml(toml::from_str(&content)?) .context("Failed to update `pyproject.toml`")?; // Lock and sync the environment, if necessary. - let lock = match project::lock::do_safe_lock( + let mut lock = match project::lock::do_safe_lock( locked, frozen, project.workspace(), @@ -567,7 +569,51 @@ pub(crate) async fn add( // string content, since the above loop _must_ change an empty specifier to a non-empty // specifier. if modified { - fs_err::write(project.root().join("pyproject.toml"), toml.to_string())?; + let content = toml.to_string(); + + // Write the updated `pyproject.toml` to disk. + fs_err::write(project.root().join("pyproject.toml"), &content)?; + + // Update the `pypackage.toml` in-memory. + project = project + .clone() + .with_pyproject_toml(toml::from_str(&content)?) + .context("Failed to update `pyproject.toml`")?; + + // If the file was modified, we have to lock again, though the only expected change is + // the addition of the minimum version specifiers. + lock = match project::lock::do_safe_lock( + locked, + frozen, + project.workspace(), + venv.interpreter(), + settings.as_ref().into(), + Box::new(SummaryResolveLogger), + connectivity, + concurrency, + native_tls, + cache, + printer, + ) + .await + { + Ok(result) => result.into_lock(), + Err(ProjectError::Operation(pip::operations::Error::Resolve( + uv_resolver::ResolveError::NoSolution(err), + ))) => { + let header = err.header(); + let report = miette::Report::new(WithHelp { header, cause: err, help: Some("If this is intentional, run `uv add --frozen` to skip the lock and sync steps.") }); + anstream::eprint!("{report:?}"); + + // Revert the changes to the `pyproject.toml`, if necessary. + if modified { + fs_err::write(project.root().join("pyproject.toml"), existing)?; + } + + return Ok(ExitStatus::Failure); + } + Err(err) => return Err(err.into()), + }; } } diff --git a/crates/uv/src/commands/project/init.rs b/crates/uv/src/commands/project/init.rs index b60d2276ec18..7180d9157b3a 100644 --- a/crates/uv/src/commands/project/init.rs +++ b/crates/uv/src/commands/project/init.rs @@ -8,7 +8,7 @@ use pep508_rs::PackageName; use tracing::{debug, warn}; use uv_cache::Cache; use uv_client::{BaseClientBuilder, Connectivity}; -use uv_fs::{absolutize_path, Simplified, CWD}; +use uv_fs::{Simplified, CWD}; use uv_python::{ EnvironmentPreference, PythonDownloads, PythonInstallation, PythonPreference, PythonRequest, VersionRequest, @@ -41,7 +41,7 @@ pub(crate) async fn init( // Default to the current directory if a path was not provided. let path = match explicit_path { None => CWD.to_path_buf(), - Some(ref path) => absolutize_path(Path::new(path))?.to_path_buf(), + Some(ref path) => std::path::absolute(path)?, }; // Make sure a project does not already exist in the given directory. diff --git a/crates/uv/src/commands/project/mod.rs b/crates/uv/src/commands/project/mod.rs index 4c5258391d31..ed56bd9ec734 100644 --- a/crates/uv/src/commands/project/mod.rs +++ b/crates/uv/src/commands/project/mod.rs @@ -534,7 +534,7 @@ pub(crate) async fn resolve_environment<'a>( // Determine the tags, markers, and interpreter to use for resolution. let tags = interpreter.tags()?; - let markers = interpreter.markers(); + let markers = interpreter.resolver_markers(); let python_requirement = PythonRequirement::from_interpreter(interpreter); // Add all authenticated sources to the cache. @@ -549,7 +549,7 @@ pub(crate) async fn resolve_environment<'a>( .index_urls(index_locations.index_urls()) .index_strategy(index_strategy) .keyring(keyring_provider) - .markers(markers) + .markers(interpreter.markers()) .platform(interpreter.platform()) .build(); @@ -629,7 +629,7 @@ pub(crate) async fn resolve_environment<'a>( &reinstall, &upgrade, Some(tags), - ResolverMarkers::specific_environment(markers.clone()), + ResolverMarkers::specific_environment(markers), python_requirement, &client, &flat_index, @@ -662,6 +662,7 @@ pub(crate) async fn sync_environment( keyring_provider, config_setting, no_build_isolation, + no_build_isolation_package, exclude_newer, link_mode, compile_bytecode, @@ -672,10 +673,10 @@ pub(crate) async fn sync_environment( let site_packages = SitePackages::from_environment(&venv)?; - // Determine the tags, markers, and interpreter to use for resolution. + // Determine the markers tags to use for resolution. let interpreter = venv.interpreter(); let tags = venv.interpreter().tags()?; - let markers = venv.interpreter().markers(); + let markers = interpreter.resolver_markers(); // Add all authenticated sources to the cache. for url in index_locations.urls() { @@ -689,15 +690,17 @@ pub(crate) async fn sync_environment( .index_urls(index_locations.index_urls()) .index_strategy(index_strategy) .keyring(keyring_provider) - .markers(markers) + .markers(interpreter.markers()) .platform(interpreter.platform()) .build(); // Determine whether to enable build isolation. let build_isolation = if no_build_isolation { BuildIsolation::Shared(&venv) - } else { + } else if no_build_isolation_package.is_empty() { BuildIsolation::Isolated + } else { + BuildIsolation::SharedPackage(&venv, no_build_isolation_package) }; // TODO(charlie): These are all default values. We should consider whether we want to make them @@ -745,6 +748,7 @@ pub(crate) async fn sync_environment( compile_bytecode, index_locations, &hasher, + &markers, tags, &client, &state.in_flight, @@ -824,10 +828,14 @@ pub(crate) async fn update_environment( .. } = spec; + // Determine markers to use for resolution. + let interpreter = venv.interpreter(); + let markers = venv.interpreter().resolver_markers(); + // Check if the current environment satisfies the requirements let site_packages = SitePackages::from_environment(&venv)?; if source_trees.is_empty() && reinstall.is_none() && upgrade.is_none() && overrides.is_empty() { - match site_packages.satisfies(&requirements, &constraints)? { + match site_packages.satisfies(&requirements, &constraints, &markers)? { // If the requirements are already satisfied, we're done. SatisfiesResult::Fresh { recursive_requirements, @@ -851,12 +859,6 @@ pub(crate) async fn update_environment( } } - // Determine the tags, markers, and interpreter to use for resolution. - let interpreter = venv.interpreter(); - let tags = venv.interpreter().tags()?; - let markers = venv.interpreter().markers(); - let python_requirement = PythonRequirement::from_interpreter(interpreter); - // Add all authenticated sources to the cache. for url in index_locations.urls() { store_credentials_from_url(url); @@ -869,7 +871,7 @@ pub(crate) async fn update_environment( .index_urls(index_locations.index_urls()) .index_strategy(*index_strategy) .keyring(*keyring_provider) - .markers(markers) + .markers(interpreter.markers()) .platform(interpreter.platform()) .build(); @@ -898,6 +900,10 @@ pub(crate) async fn update_environment( let hasher = HashStrategy::default(); let preferences = Vec::default(); + // Determine the tags to use for resolution. + let tags = venv.interpreter().tags()?; + let python_requirement = PythonRequirement::from_interpreter(interpreter); + // Resolve the flat indexes from `--find-links`. let flat_index = { let client = FlatIndexClient::new(&client, cache); @@ -970,6 +976,7 @@ pub(crate) async fn update_environment( *compile_bytecode, index_locations, &hasher, + &markers, tags, &client, &state.in_flight, diff --git a/crates/uv/src/commands/project/run.rs b/crates/uv/src/commands/project/run.rs index 16c635085167..59d13c203d9e 100644 --- a/crates/uv/src/commands/project/run.rs +++ b/crates/uv/src/commands/project/run.rs @@ -11,7 +11,6 @@ use itertools::Itertools; use owo_colors::OwoColorize; use tokio::process::Command; use tracing::{debug, warn}; - use uv_cache::Cache; use uv_cli::ExternalCommand; use uv_client::{BaseClientBuilder, Connectivity}; @@ -692,7 +691,11 @@ fn can_skip_ephemeral( return false; } - match site_packages.satisfies(&spec.requirements, &spec.constraints) { + match site_packages.satisfies( + &spec.requirements, + &spec.constraints, + &base_interpreter.resolver_markers(), + ) { // If the requirements are already satisfied, we're done. Ok(SatisfiesResult::Fresh { recursive_requirements, diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index c4b2c766e51e..01b5c1fa5578 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -154,6 +154,7 @@ pub(super) async fn do_sync( keyring_provider, config_setting, no_build_isolation, + no_build_isolation_package, exclude_newer, link_mode, compile_bytecode, @@ -172,11 +173,13 @@ pub(super) async fn do_sync( } } + // Determine the markers to use for resolution. + let markers = venv.interpreter().resolver_markers(); + // Validate that the platform is supported by the lockfile. let environments = lock.supported_environments(); if !environments.is_empty() { - let platform = venv.interpreter().markers(); - if !environments.iter().any(|env| env.evaluate(platform, &[])) { + if !environments.iter().any(|env| env.evaluate(&markers, &[])) { return Err(ProjectError::LockedPlatformIncompatibility( environments .iter() @@ -194,11 +197,11 @@ pub(super) async fn do_sync( vec![] }; - let markers = venv.interpreter().markers(); + // Determine the tags to use for resolution. let tags = venv.interpreter().tags()?; // Read the lockfile. - let resolution = lock.to_resolution(project, markers, tags, extras, &dev)?; + let resolution = lock.to_resolution(project, &markers, tags, extras, &dev)?; // If `--no-install-project` is set, remove the project itself. let resolution = apply_no_install_project(no_install_project, resolution, project); @@ -221,15 +224,17 @@ pub(super) async fn do_sync( .index_urls(index_locations.index_urls()) .index_strategy(index_strategy) .keyring(keyring_provider) - .markers(markers) + .markers(venv.interpreter().markers()) .platform(venv.interpreter().platform()) .build(); // Determine whether to enable build isolation. let build_isolation = if no_build_isolation { BuildIsolation::Shared(venv) - } else { + } else if no_build_isolation_package.is_empty() { BuildIsolation::Isolated + } else { + BuildIsolation::SharedPackage(venv, no_build_isolation_package) }; // TODO(charlie): These are all default values. We should consider whether we want to make them @@ -281,6 +286,7 @@ pub(super) async fn do_sync( compile_bytecode, index_locations, &hasher, + &markers, tags, &client, &state.in_flight, diff --git a/crates/uv/src/commands/project/tree.rs b/crates/uv/src/commands/project/tree.rs index effe498c8731..d9e512079e1c 100644 --- a/crates/uv/src/commands/project/tree.rs +++ b/crates/uv/src/commands/project/tree.rs @@ -1,4 +1,3 @@ -use std::borrow::Cow; use std::fmt::Write; use anyhow::Result; @@ -13,6 +12,7 @@ use uv_resolver::TreeDisplay; use uv_workspace::{DiscoveryOptions, Workspace}; use crate::commands::pip::loggers::DefaultResolveLogger; +use crate::commands::pip::resolution_markers; use crate::commands::project::FoundInterpreter; use crate::commands::{project, ExitStatus}; use crate::printer::Printer; @@ -75,20 +75,17 @@ pub(crate) async fn tree( .await? .into_lock(); - // Apply the platform tags to the markers. - let markers = match (python_platform, python_version) { - (Some(python_platform), Some(python_version)) => { - Cow::Owned(python_version.markers(&python_platform.markers(interpreter.markers()))) - } - (Some(python_platform), None) => Cow::Owned(python_platform.markers(interpreter.markers())), - (None, Some(python_version)) => Cow::Owned(python_version.markers(interpreter.markers())), - (None, None) => Cow::Borrowed(interpreter.markers()), - }; + // Determine the markers to use for resolution. + let markers = resolution_markers( + python_version.as_ref(), + python_platform.as_ref(), + &interpreter, + ); // Render the tree. let tree = TreeDisplay::new( &lock, - (!universal).then(|| markers.as_ref()), + (!universal).then_some(&markers), depth.into(), prune, package, diff --git a/crates/uv/src/commands/python/find.rs b/crates/uv/src/commands/python/find.rs index e171239389c3..3f9658b64d6c 100644 --- a/crates/uv/src/commands/python/find.rs +++ b/crates/uv/src/commands/python/find.rs @@ -70,7 +70,7 @@ pub(crate) async fn find( println!( "{}", - uv_fs::absolutize_path(python.interpreter().sys_executable())?.simplified_display() + std::path::absolute(python.interpreter().sys_executable())?.simplified_display() ); Ok(ExitStatus::Success) diff --git a/crates/uv/src/commands/tool/run.rs b/crates/uv/src/commands/tool/run.rs index 1a82267fe54c..aa7089bef84c 100644 --- a/crates/uv/src/commands/tool/run.rs +++ b/crates/uv/src/commands/tool/run.rs @@ -514,7 +514,11 @@ async fn get_or_create_environment( let constraints = []; if matches!( - site_packages.satisfies(&requirements, &constraints), + site_packages.satisfies( + &requirements, + &constraints, + &interpreter.resolver_markers() + ), Ok(SatisfiesResult::Fresh { .. }) ) { debug!("Using existing tool `{}`", from.name); diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs index 5a2db246809a..606e621d06fe 100644 --- a/crates/uv/src/lib.rs +++ b/crates/uv/src/lib.rs @@ -144,7 +144,10 @@ async fn run(cli: Cli) -> Result { // If the target is a PEP 723 script, parse it. let script = if let Commands::Project(command) = &*cli.command { if let ProjectCommand::Run(uv_cli::RunArgs { .. }) = &**command { - if let Some(RunCommand::PythonScript(script, _)) = run_command.as_ref() { + if let Some( + RunCommand::PythonScript(script, _) | RunCommand::PythonGuiScript(script, _), + ) = run_command.as_ref() + { Pep723Script::read(&script).await? } else { None diff --git a/crates/uv/src/settings.rs b/crates/uv/src/settings.rs index 3284a3ba19b4..48115c1e4890 100644 --- a/crates/uv/src/settings.rs +++ b/crates/uv/src/settings.rs @@ -296,12 +296,20 @@ impl ToolRunSettings { // If `--upgrade` was passed explicitly, warn. if installer.upgrade || !installer.upgrade_package.is_empty() { - warn_user_once!("Tools cannot be upgraded via `{invocation_source}`; use `uv tool upgrade --all` to upgrade all installed tools, or `{invocation_source} package@latest` to run the latest version of a tool"); + if with.is_empty() && with_requirements.is_empty() { + warn_user_once!("Tools cannot be upgraded via `{invocation_source}`; use `uv tool upgrade --all` to upgrade all installed tools, or `{invocation_source} package@latest` to run the latest version of a tool."); + } else { + warn_user_once!("Tools cannot be upgraded via `{invocation_source}`; use `uv tool upgrade --all` to upgrade all installed tools, `{invocation_source} package@latest` to run the latest version of a tool, or `{invocation_source} --refresh package` to upgrade any `--with` dependencies."); + } } // If `--reinstall` was passed explicitly, warn. if installer.reinstall || !installer.reinstall_package.is_empty() { - warn_user_once!("Tools cannot be reinstalled via `{invocation_source}`; use `uv tool upgrade --reinstall` to reinstall all installed tools, or `{invocation_source} package@latest` to run the latest version of a tool"); + if with.is_empty() && with_requirements.is_empty() { + warn_user_once!("Tools cannot be reinstalled via `{invocation_source}`; use `uv tool upgrade --reinstall` to reinstall all installed tools, or `{invocation_source} package@latest` to run the latest version of a tool."); + } else { + warn_user_once!("Tools cannot be reinstalled via `{invocation_source}`; use `uv tool upgrade --reinstall` to reinstall all installed tools, `{invocation_source} package@latest` to run the latest version of a tool, or `{invocation_source} --refresh package` to reinstall any `--with` dependencies."); + } } Self { @@ -613,6 +621,7 @@ impl PythonPinSettings { } } } + /// The resolved settings to use for a `sync` invocation. #[allow(clippy::struct_excessive_bools, dead_code)] #[derive(Debug, Clone)] @@ -660,16 +669,6 @@ impl SyncSettings { filesystem, ); - let exact = flag(exact, inexact).unwrap_or(true); - - // By default, sync with exact semantics, unless the user set `--no-build-isolation`; - // otherwise, we'll end up removing build dependencies. - let modifications = if !exact || settings.no_build_isolation { - Modifications::Sufficient - } else { - Modifications::Exact - }; - Self { locked, frozen, @@ -681,7 +680,11 @@ impl SyncSettings { no_install_project, no_install_workspace, no_install_package, - modifications, + modifications: if flag(exact, inexact).unwrap_or(true) { + Modifications::Exact + } else { + Modifications::Sufficient + }, package, python, refresh: Refresh::from(refresh), @@ -1586,6 +1589,7 @@ pub(crate) struct InstallerSettingsRef<'a> { pub(crate) keyring_provider: KeyringProviderType, pub(crate) config_setting: &'a ConfigSettings, pub(crate) no_build_isolation: bool, + pub(crate) no_build_isolation_package: &'a [PackageName], pub(crate) exclude_newer: Option, pub(crate) link_mode: LinkMode, pub(crate) compile_bytecode: bool, @@ -2166,6 +2170,7 @@ impl<'a> From> for InstallerSettingsRef<'a> { keyring_provider: settings.keyring_provider, config_setting: settings.config_setting, no_build_isolation: settings.no_build_isolation, + no_build_isolation_package: settings.no_build_isolation_package, exclude_newer: settings.exclude_newer, link_mode: settings.link_mode, compile_bytecode: settings.compile_bytecode, diff --git a/crates/uv/tests/edit.rs b/crates/uv/tests/edit.rs index 6dc2251909eb..83606f8c0cd9 100644 --- a/crates/uv/tests/edit.rs +++ b/crates/uv/tests/edit.rs @@ -3051,7 +3051,7 @@ fn add_lower_bound_optional() -> Result<()> { ] [package.metadata] - requires-dist = [{ name = "anyio", marker = "extra == 'io'" }] + requires-dist = [{ name = "anyio", marker = "extra == 'io'", specifier = ">=4.3.0" }] [[package]] name = "sniffio" @@ -3142,7 +3142,7 @@ fn add_lower_bound_local() -> Result<()> { ] [package.metadata] - requires-dist = [{ name = "local-simple-a" }] + requires-dist = [{ name = "local-simple-a", specifier = ">=1.2.3" }] "### ); }); @@ -3227,7 +3227,7 @@ fn add_virtual() -> Result<()> { exclude-newer = "2024-03-25T00:00:00Z" [manifest] - requirements = [{ name = "iniconfig" }] + requirements = [{ name = "iniconfig", specifier = ">=2.0.0" }] [[package]] name = "iniconfig" diff --git a/crates/uv/tests/lock.rs b/crates/uv/tests/lock.rs index 53fe9f960c49..38080386a855 100644 --- a/crates/uv/tests/lock.rs +++ b/crates/uv/tests/lock.rs @@ -10228,10 +10228,10 @@ fn lock_overlapping_environment() -> Result<()> { Ok(()) } -/// Lock a requirement from PyPI. +/// Lock a virtual project with forked dev dependencies. #[test] -fn lock_virtual() -> Result<()> { - let context = TestContext::new("3.12"); +fn lock_virtual_fork() -> Result<()> { + let context = TestContext::new("3.10"); let pyproject_toml = context.temp_dir.child("pyproject.toml"); pyproject_toml.write_str( @@ -10241,7 +10241,8 @@ fn lock_virtual() -> Result<()> { [tool.uv] dev-dependencies = [ - "anyio" + "anyio < 3 ; python_version >= '3.11'", + "anyio > 3 ; python_version < '3.11'", ] "#, )?; @@ -10252,7 +10253,7 @@ fn lock_virtual() -> Result<()> { ----- stdout ----- ----- stderr ----- - Resolved 3 packages in [TIME] + Resolved 6 packages in [TIME] "###); let lock = fs_err::read_to_string(context.temp_dir.join("uv.lock")).unwrap(); @@ -10263,27 +10264,64 @@ fn lock_virtual() -> Result<()> { assert_snapshot!( lock, @r###" version = 1 - requires-python = ">=3.12" + requires-python = ">=3.10" + resolution-markers = [ + "python_full_version < '3.11'", + "python_full_version >= '3.11'", + ] [options] exclude-newer = "2024-03-25T00:00:00Z" [manifest] - requirements = [{ name = "anyio" }] + requirements = [ + { name = "anyio", marker = "python_full_version < '3.11'", specifier = ">3" }, + { name = "anyio", marker = "python_full_version >= '3.11'", specifier = "<3" }, + ] + + [[package]] + name = "anyio" + version = "2.2.0" + source = { registry = "https://pypi.org/simple" } + resolution-markers = [ + "python_full_version >= '3.11'", + ] + dependencies = [ + { name = "idna", marker = "python_full_version >= '3.11'" }, + { name = "sniffio", marker = "python_full_version >= '3.11'" }, + ] + sdist = { url = "https://files.pythonhosted.org/packages/d3/e6/901a94731af20e7109415525666cb3753a2bd1edd19616c2730448dffd0d/anyio-2.2.0.tar.gz", hash = "sha256:4a41c5b3a65ed92e469d51b6fba3779301850ea2e352afcf9e36c46f21ee14a9", size = 97217 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/49/c3/b83a3c02c7d6f66932e9a72621d7f207cbfd2bd72b4c8931567ee386fb55/anyio-2.2.0-py3-none-any.whl", hash = "sha256:aa3da546ed17f097ca876c78024dea380a3b7fa80759abfdda59f12176a3dac8", size = 65320 }, + ] [[package]] name = "anyio" version = "4.3.0" source = { registry = "https://pypi.org/simple" } + resolution-markers = [ + "python_full_version < '3.11'", + ] dependencies = [ - { name = "idna" }, - { name = "sniffio" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna", marker = "python_full_version < '3.11'" }, + { name = "sniffio", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6", size = 159642 } wheels = [ { url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8", size = 85584 }, ] + [[package]] + name = "exceptiongroup" + version = "1.2.0" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/8e/1c/beef724eaf5b01bb44b6338c8c3494eff7cab376fab4904cfbbc3585dc79/exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68", size = 26264 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/9a/5028fd52db10e600f1c4674441b968cf2ea4959085bfb5b99fb1250e5f68/exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14", size = 16210 }, + ] + [[package]] name = "idna" version = "3.6" @@ -10301,6 +10339,15 @@ fn lock_virtual() -> Result<()> { wheels = [ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, ] + + [[package]] + name = "typing-extensions" + version = "4.10.0" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/16/3a/0d26ce356c7465a19c9ea8814b960f8a36c3b0d07c323176620b7b483e44/typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb", size = 77558 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/de/dc04a3ea60b22624b51c703a84bbe0184abcd1d0b9bc8074b5d6b7ab90bb/typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475", size = 33926 }, + ] "### ); }); @@ -10312,7 +10359,7 @@ fn lock_virtual() -> Result<()> { ----- stdout ----- ----- stderr ----- - Resolved 3 packages in [TIME] + Resolved 6 packages in [TIME] "###); // Re-run with `--offline`. We shouldn't need a network connection to validate an @@ -10323,7 +10370,7 @@ fn lock_virtual() -> Result<()> { ----- stdout ----- ----- stderr ----- - Resolved 3 packages in [TIME] + Resolved 6 packages in [TIME] "###); // Add `iniconfig`. @@ -10334,7 +10381,8 @@ fn lock_virtual() -> Result<()> { [tool.uv] dev-dependencies = [ - "anyio", + "anyio < 3 ; python_version >= '3.11'", + "anyio > 3 ; python_version < '3.11'", "iniconfig" ] "#, @@ -10346,7 +10394,7 @@ fn lock_virtual() -> Result<()> { ----- stdout ----- ----- stderr ----- - Resolved 4 packages in [TIME] + Resolved 7 packages in [TIME] Added iniconfig v2.0.0 "###); @@ -10356,12 +10404,115 @@ fn lock_virtual() -> Result<()> { ----- stdout ----- ----- stderr ----- - Prepared 4 packages in [TIME] - Installed 4 packages in [TIME] + Prepared 6 packages in [TIME] + Installed 6 packages in [TIME] + anyio==4.3.0 + + exceptiongroup==1.2.0 + idna==3.6 + iniconfig==2.0.0 + sniffio==1.3.1 + + typing-extensions==4.10.0 + "###); + + Ok(()) +} + +/// Lock a virtual project with a conditional dependency. +#[test] +fn lock_virtual_conditional() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [tool.uv.workspace] + members = [] + + [tool.uv] + dev-dependencies = [ + "anyio > 3 ; sys_platform == 'linux'", + ] + "#, + )?; + + uv_snapshot!(context.filters(), context.lock(), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + "###); + + let lock = fs_err::read_to_string(context.temp_dir.join("uv.lock")).unwrap(); + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r###" + version = 1 + requires-python = ">=3.12" + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [manifest] + requirements = [{ name = "anyio", marker = "sys_platform == 'linux'", specifier = ">3" }] + + [[package]] + name = "anyio" + version = "4.3.0" + source = { registry = "https://pypi.org/simple" } + dependencies = [ + { name = "idna" }, + { name = "sniffio" }, + ] + sdist = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6", size = 159642 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8", size = 85584 }, + ] + + [[package]] + name = "idna" + version = "3.6" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567 }, + ] + + [[package]] + name = "sniffio" + version = "1.3.1" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, + ] + "### + ); + }); + + // Re-run with `--locked`. + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + "###); + + // Re-run with `--offline`. We shouldn't need a network connection to validate an + // already-correct lockfile with immutable metadata. + uv_snapshot!(context.filters(), context.lock().arg("--locked").arg("--offline").arg("--no-cache"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] "###); Ok(()) diff --git a/crates/uv/tests/pip_install.rs b/crates/uv/tests/pip_install.rs index ba96362e5016..df5858c1db6c 100644 --- a/crates/uv/tests/pip_install.rs +++ b/crates/uv/tests/pip_install.rs @@ -6338,3 +6338,51 @@ fn no_extension() { ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ "###); } + +/// Regression test for: +#[test] +fn switch_platform() -> Result<()> { + let context = TestContext::new("3.12"); + + let requirements_txt = context.temp_dir.child("requirements.txt"); + requirements_txt.write_str("iniconfig ; python_version == '3.12'")?; + + // Install `iniconfig`. + uv_snapshot!(context.pip_install() + .arg("-r") + .arg("requirements.txt"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 1 package in [TIME] + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + iniconfig==2.0.0 + "###); + + requirements_txt + .write_str("iniconfig ; python_version == '3.12'\nanyio ; python_version < '3.12'")?; + + // Add `anyio`, though it's only installed because of `--python-version`. + uv_snapshot!(context.pip_install() + .arg("-r") + .arg("requirements.txt") + .arg("--python-version") + .arg("3.11"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + Prepared 3 packages in [TIME] + Installed 3 packages in [TIME] + + anyio==4.3.0 + + idna==3.6 + + sniffio==1.3.1 + "###); + + Ok(()) +} diff --git a/crates/uv/tests/sync.rs b/crates/uv/tests/sync.rs index 2ea551facf0f..28752eead06b 100644 --- a/crates/uv/tests/sync.rs +++ b/crates/uv/tests/sync.rs @@ -357,7 +357,7 @@ fn virtual_workspace_dev_dependencies() -> Result<()> { pyproject_toml.write_str( r#" [tool.uv] - dev-dependencies = ["anyio>3"] + dev-dependencies = ["anyio>3", "requests[socks]", "typing-extensions ; sys_platform == ''"] [tool.uv.workspace] members = ["child"] @@ -390,38 +390,45 @@ fn virtual_workspace_dev_dependencies() -> Result<()> { let init = src.child("__init__.py"); init.touch()?; - // Syncing with `--no-dev` should omit `anyio`. + // Syncing with `--no-dev` should omit all dependencies except `iniconfig`. uv_snapshot!(context.filters(), context.sync().arg("--no-dev"), @r###" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Resolved 5 packages in [TIME] + Resolved 11 packages in [TIME] Prepared 2 packages in [TIME] Installed 2 packages in [TIME] + child==0.1.0 (from file://[TEMP_DIR]/child) + iniconfig==2.0.0 "###); - // Syncing without `--no-dev` should include `anyio`. + // Syncing without `--no-dev` should include `anyio`, `requests`, `pysocks`, and their + // dependencies, but not `typing-extensions`. uv_snapshot!(context.filters(), context.sync(), @r###" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Resolved 5 packages in [TIME] - Prepared 3 packages in [TIME] - Installed 3 packages in [TIME] + Resolved 11 packages in [TIME] + Prepared 8 packages in [TIME] + Installed 8 packages in [TIME] + anyio==4.3.0 + + certifi==2024.2.2 + + charset-normalizer==3.3.2 + idna==3.6 + + pysocks==1.7.1 + + requests==2.31.0 + sniffio==1.3.1 + + urllib3==2.2.1 "###); Ok(()) } +/// Use a `pip install` step to pre-install build dependencies for `--no-build-isolation`. #[test] fn sync_build_isolation() -> Result<()> { let context = TestContext::new("3.12"); @@ -433,50 +440,34 @@ fn sync_build_isolation() -> Result<()> { name = "project" version = "0.1.0" requires-python = ">=3.12" - dependencies = ["iniconfig @ https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz"] + dependencies = ["source-distribution @ https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz"] "#, )?; - // Running `uv sync` should fail. - let filters = std::iter::once((r"exit code: 1", "exit status: 1")) - .chain(context.filters()) - .collect::>(); - uv_snapshot!(filters, context.sync().arg("--no-build-isolation"), @r###" - success: false - exit_code: 2 - ----- stdout ----- - - ----- stderr ----- - error: Failed to download and build: `iniconfig @ https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz` - Caused by: Build backend failed to determine metadata through `prepare_metadata_for_build_wheel` with exit status: 1 - --- stdout: - - --- stderr: - Traceback (most recent call last): - File "", line 8, in - ModuleNotFoundError: No module named 'hatchling' - --- - "###); + // Running `uv sync` should fail (but it could fail when building the root project, or when + // building `source-distribution`). + context + .sync() + .arg("--no-build-isolation") + .assert() + .failure(); - // Install `setuptools` (for the root project) plus `hatchling`, `hatch-vcs`, and `wheel` (for `iniconfig`). - uv_snapshot!(context.filters(), context.pip_install().arg("wheel").arg("setuptools").arg("hatchling").arg("hatch-vcs"), @r###" + // Install `setuptools` (for the root project) plus `hatchling` (for `source-distribution`). + uv_snapshot!(context.filters(), context.pip_install().arg("wheel").arg("setuptools").arg("hatchling"), @r###" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Resolved 10 packages in [TIME] - Prepared 10 packages in [TIME] - Installed 10 packages in [TIME] - + hatch-vcs==0.4.0 + Resolved 7 packages in [TIME] + Prepared 7 packages in [TIME] + Installed 7 packages in [TIME] + hatchling==1.22.4 + packaging==24.0 + pathspec==0.12.1 + pluggy==1.4.0 + setuptools==69.2.0 - + setuptools-scm==8.0.4 + trove-classifiers==2024.3.3 - + typing-extensions==4.10.0 + wheel==0.43.0 "###); @@ -489,9 +480,17 @@ fn sync_build_isolation() -> Result<()> { ----- stderr ----- Resolved 2 packages in [TIME] Prepared 2 packages in [TIME] + Uninstalled 7 packages in [TIME] Installed 2 packages in [TIME] - + iniconfig==2.0.0 (from https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz) + - hatchling==1.22.4 + - packaging==24.0 + - pathspec==0.12.1 + - pluggy==1.4.0 + project==0.1.0 (from file://[TEMP_DIR]/) + - setuptools==69.2.0 + + source-distribution==0.0.1 (from https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz) + - trove-classifiers==2024.3.3 + - wheel==0.43.0 "###); assert!(context.temp_dir.child("uv.lock").exists()); @@ -499,16 +498,9 @@ fn sync_build_isolation() -> Result<()> { Ok(()) } -/// Avoid using incompatible versions for build dependencies that are also part of the resolved -/// environment. This is a very subtle issue, but: when locking, we don't enforce platform -/// compatibility. So, if we reuse the resolver state to install, and the install itself has to -/// preform a resolution (e.g., for the build dependencies of a source distribution), that -/// resolution may choose incompatible versions. -/// -/// The key property here is that there's a shared package between the build dependencies and the -/// project dependencies. +/// Use a `pip install` step to pre-install build dependencies for `--no-build-isolation-package`. #[test] -fn sync_reset_state() -> Result<()> { +fn sync_build_isolation_package() -> Result<()> { let context = TestContext::new("3.12"); let pyproject_toml = context.temp_dir.child("pyproject.toml"); @@ -518,46 +510,74 @@ fn sync_reset_state() -> Result<()> { name = "project" version = "0.1.0" requires-python = ">=3.12" - dependencies = ["pydantic-core"] + dependencies = [ + "source-distribution @ https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz", + ] [build-system] - requires = ["setuptools", "pydantic-core"] - build-backend = "setuptools.build_meta:__legacy__" + requires = ["setuptools >= 40.9.0"] + build-backend = "setuptools.build_meta" "#, )?; - let setup_py = context.temp_dir.child("setup.py"); - setup_py.write_str(indoc::indoc! { r#" - from setuptools import setup - import pydantic_core + // Running `uv sync` should fail for iniconfig. + let filters = std::iter::once((r"exit code: 1", "exit status: 1")) + .chain(context.filters()) + .collect::>(); + uv_snapshot!(filters, context.sync().arg("--no-build-isolation-package").arg("source-distribution"), @r###" + success: false + exit_code: 2 + ----- stdout ----- - setup( - name="project", - version="0.1.0", - packages=["project"], - install_requires=["pydantic-core"], - ) - "# })?; + ----- stderr ----- + Resolved 2 packages in [TIME] + error: Failed to prepare distributions + Caused by: Failed to fetch wheel: source-distribution @ https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz + Caused by: Build backend failed to build wheel through `build_wheel()` with exit status: 1 + --- stdout: - let src = context.temp_dir.child("project"); - src.create_dir_all()?; + --- stderr: + Traceback (most recent call last): + File "", line 8, in + ModuleNotFoundError: No module named 'hatchling' + --- + "###); - let init = src.child("__init__.py"); - init.touch()?; + // Install `hatchling` for `source-distribution`. + uv_snapshot!(context.filters(), context.pip_install().arg("hatchling"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 5 packages in [TIME] + Prepared 5 packages in [TIME] + Installed 5 packages in [TIME] + + hatchling==1.22.4 + + packaging==24.0 + + pathspec==0.12.1 + + pluggy==1.4.0 + + trove-classifiers==2024.3.3 + "###); // Running `uv sync` should succeed. - uv_snapshot!(context.filters(), context.sync(), @r###" + uv_snapshot!(context.filters(), context.sync().arg("--no-build-isolation-package").arg("source-distribution"), @r###" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Resolved 3 packages in [TIME] - Prepared 3 packages in [TIME] - Installed 3 packages in [TIME] + Resolved 2 packages in [TIME] + Prepared 2 packages in [TIME] + Uninstalled 5 packages in [TIME] + Installed 2 packages in [TIME] + - hatchling==1.22.4 + - packaging==24.0 + - pathspec==0.12.1 + - pluggy==1.4.0 + project==0.1.0 (from file://[TEMP_DIR]/) - + pydantic-core==2.17.0 - + typing-extensions==4.10.0 + + source-distribution==0.0.1 (from https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz) + - trove-classifiers==2024.3.3 "###); assert!(context.temp_dir.child("uv.lock").exists()); @@ -565,8 +585,9 @@ fn sync_reset_state() -> Result<()> { Ok(()) } +/// Use dedicated extra groups to install dependencies for `--no-build-isolation-package`. #[test] -fn sync_build_isolation_package() -> Result<()> { +fn sync_build_isolation_extra() -> Result<()> { let context = TestContext::new("3.12"); let pyproject_toml = context.temp_dir.child("pyproject.toml"); @@ -576,29 +597,55 @@ fn sync_build_isolation_package() -> Result<()> { name = "project" version = "0.1.0" requires-python = ">=3.12" - dependencies = [ - "iniconfig @ https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", - ] + dependencies = [] + + [project.optional-dependencies] + build = ["hatchling"] + compile = ["source-distribution @ https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz"] + [build-system] - requires = [ - "setuptools >= 40.9.0", - ] + requires = ["setuptools >= 40.9.0"] build-backend = "setuptools.build_meta" + + [tool.uv] + no-build-isolation-package = ["source-distribution"] "#, )?; - // Running `uv sync` should fail for iniconfig. + // Running `uv sync` should fail for the `compile` extra. let filters = std::iter::once((r"exit code: 1", "exit status: 1")) .chain(context.filters()) .collect::>(); - uv_snapshot!(filters, context.sync().arg("--no-build-isolation-package").arg("iniconfig"), @r###" + uv_snapshot!(&filters, context.sync().arg("--extra").arg("compile"), @r###" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + Resolved 7 packages in [TIME] + error: Failed to prepare distributions + Caused by: Failed to fetch wheel: source-distribution @ https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz + Caused by: Build backend failed to build wheel through `build_wheel()` with exit status: 1 + --- stdout: + + --- stderr: + Traceback (most recent call last): + File "", line 8, in + ModuleNotFoundError: No module named 'hatchling' + --- + "###); + + // Running `uv sync` with `--all-extras` should also fail. + uv_snapshot!(&filters, context.sync().arg("--all-extras"), @r###" success: false exit_code: 2 ----- stdout ----- ----- stderr ----- - error: Failed to download and build: `iniconfig @ https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz` - Caused by: Build backend failed to determine metadata through `prepare_metadata_for_build_wheel` with exit status: 1 + Resolved 7 packages in [TIME] + error: Failed to prepare distributions + Caused by: Failed to fetch wheel: source-distribution @ https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz + Caused by: Build backend failed to build wheel through `build_wheel()` with exit status: 1 --- stdout: --- stderr: @@ -608,49 +655,107 @@ fn sync_build_isolation_package() -> Result<()> { --- "###); - // Install `hatchinling`, `hatch-vs` for iniconfig - uv_snapshot!(context.filters(), context.pip_install().arg("hatchling").arg("hatch-vcs"), @r###" + // Install the build dependencies. + uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("build"), @r###" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Resolved 9 packages in [TIME] - Prepared 9 packages in [TIME] - Installed 9 packages in [TIME] - + hatch-vcs==0.4.0 + Resolved 7 packages in [TIME] + Prepared 6 packages in [TIME] + Installed 6 packages in [TIME] + hatchling==1.22.4 + packaging==24.0 + pathspec==0.12.1 + pluggy==1.4.0 - + setuptools==69.2.0 - + setuptools-scm==8.0.4 + + project==0.1.0 (from file://[TEMP_DIR]/) + trove-classifiers==2024.3.3 - + typing-extensions==4.10.0 "###); - // Running `uv sync` should succeed. - uv_snapshot!(context.filters(), context.sync().arg("--no-build-isolation-package").arg("iniconfig"), @r###" + // Running `uv sync` for the `compile` extra should succeed, and remove the build dependencies. + uv_snapshot!(context.filters(), context.sync().arg("--extra").arg("compile"), @r###" success: true exit_code: 0 ----- stdout ----- ----- stderr ----- - Resolved 2 packages in [TIME] - Prepared 2 packages in [TIME] - Uninstalled 9 packages in [TIME] - Installed 2 packages in [TIME] - - hatch-vcs==0.4.0 + Resolved 7 packages in [TIME] + Prepared 1 package in [TIME] + Uninstalled 5 packages in [TIME] + Installed 1 package in [TIME] - hatchling==1.22.4 - + iniconfig==2.0.0 (from https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz) - packaging==24.0 - pathspec==0.12.1 - pluggy==1.4.0 - + project==0.1.0 (from file://[TEMP_DIR]/) - - setuptools==69.2.0 - - setuptools-scm==8.0.4 + + source-distribution==0.0.1 (from https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz) - trove-classifiers==2024.3.3 - - typing-extensions==4.10.0 + "###); + + assert!(context.temp_dir.child("uv.lock").exists()); + + Ok(()) +} + +/// Avoid using incompatible versions for build dependencies that are also part of the resolved +/// environment. This is a very subtle issue, but: when locking, we don't enforce platform +/// compatibility. So, if we reuse the resolver state to install, and the install itself has to +/// preform a resolution (e.g., for the build dependencies of a source distribution), that +/// resolution may choose incompatible versions. +/// +/// The key property here is that there's a shared package between the build dependencies and the +/// project dependencies. +#[test] +fn sync_reset_state() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["pydantic-core"] + + [build-system] + requires = ["setuptools", "pydantic-core"] + build-backend = "setuptools.build_meta:__legacy__" + "#, + )?; + + let setup_py = context.temp_dir.child("setup.py"); + setup_py.write_str(indoc::indoc! { r#" + from setuptools import setup + import pydantic_core + + setup( + name="project", + version="0.1.0", + packages=["project"], + install_requires=["pydantic-core"], + ) + "# })?; + + let src = context.temp_dir.child("project"); + src.create_dir_all()?; + + let init = src.child("__init__.py"); + init.touch()?; + + // Running `uv sync` should succeed. + uv_snapshot!(context.filters(), context.sync(), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + Prepared 3 packages in [TIME] + Installed 3 packages in [TIME] + + project==0.1.0 (from file://[TEMP_DIR]/) + + pydantic-core==2.17.0 + + typing-extensions==4.10.0 "###); assert!(context.temp_dir.child("uv.lock").exists()); diff --git a/crates/uv/tests/tool_run.rs b/crates/uv/tests/tool_run.rs index e763585531d7..2012edb9e3cd 100644 --- a/crates/uv/tests/tool_run.rs +++ b/crates/uv/tests/tool_run.rs @@ -870,7 +870,7 @@ fn tool_run_upgrade_warn() { pytest 8.1.1 ----- stderr ----- - warning: Tools cannot be upgraded via `uv tool run`; use `uv tool upgrade --all` to upgrade all installed tools, or `uv tool run package@latest` to run the latest version of a tool + warning: Tools cannot be upgraded via `uv tool run`; use `uv tool upgrade --all` to upgrade all installed tools, or `uv tool run package@latest` to run the latest version of a tool. Resolved [N] packages in [TIME] Prepared [N] packages in [TIME] Installed [N] packages in [TIME] @@ -879,6 +879,31 @@ fn tool_run_upgrade_warn() { + pluggy==1.4.0 + pytest==8.1.1 "###); + + uv_snapshot!(context.filters(), context.tool_run() + .arg("--upgrade") + .arg("--with") + .arg("typing-extensions") + .arg("pytest") + .arg("--version") + .env("UV_TOOL_DIR", tool_dir.as_os_str()) + .env("XDG_BIN_HOME", bin_dir.as_os_str()), @r###" + success: true + exit_code: 0 + ----- stdout ----- + pytest 8.1.1 + + ----- stderr ----- + warning: Tools cannot be upgraded via `uv tool run`; use `uv tool upgrade --all` to upgrade all installed tools, `uv tool run package@latest` to run the latest version of a tool, or `uv tool run --refresh package` to upgrade any `--with` dependencies. + Resolved [N] packages in [TIME] + Prepared [N] packages in [TIME] + Installed [N] packages in [TIME] + + iniconfig==2.0.0 + + packaging==24.0 + + pluggy==1.4.0 + + pytest==8.1.1 + + typing-extensions==4.10.0 + "###); } /// If we fail to resolve the tool, we should include "tool" in the error message. diff --git a/crates/uv/tests/workflow.rs b/crates/uv/tests/workflow.rs index 8c02b06f24bb..fbff72598faa 100644 --- a/crates/uv/tests/workflow.rs +++ b/crates/uv/tests/workflow.rs @@ -69,7 +69,7 @@ fn packse_add_remove_one_package() -> Result<()> { { name = "pyyaml", specifier = ">=6.0.1" }, { name = "setuptools", specifier = ">=69.1.1" }, { name = "twine", specifier = ">=4.0.2" }, - + { name = "tzdata" }, + + { name = "tzdata", specifier = ">=2024.1" }, { name = "watchfiles", marker = "extra == 'serve'", specifier = ">=0.21.0" }, ] @@ -153,7 +153,7 @@ fn packse_add_remove_one_package() -> Result<()> { { name = "pyyaml", specifier = ">=6.0.1" }, { name = "setuptools", specifier = ">=69.1.1" }, { name = "twine", specifier = ">=4.0.2" }, - - { name = "tzdata" }, + - { name = "tzdata", specifier = ">=2024.1" }, { name = "watchfiles", marker = "extra == 'serve'", specifier = ">=0.21.0" }, ] @@ -303,7 +303,7 @@ fn packse_promote_transitive_to_direct_then_remove() -> Result<()> { { name = "pypiserver", marker = "extra == 'index'", specifier = ">=2.0.1" }, { name = "pyyaml", specifier = ">=6.0.1" }, { name = "setuptools", specifier = ">=69.1.1" }, - + { name = "sniffio" }, + + { name = "sniffio", specifier = ">=1.3.1" }, { name = "twine", specifier = ">=4.0.2" }, { name = "watchfiles", marker = "extra == 'serve'", specifier = ">=0.21.0" }, ] @@ -357,7 +357,7 @@ fn packse_promote_transitive_to_direct_then_remove() -> Result<()> { { name = "pypiserver", marker = "extra == 'index'", specifier = ">=2.0.1" }, { name = "pyyaml", specifier = ">=6.0.1" }, { name = "setuptools", specifier = ">=69.1.1" }, - - { name = "sniffio" }, + - { name = "sniffio", specifier = ">=1.3.1" }, { name = "twine", specifier = ">=4.0.2" }, { name = "watchfiles", marker = "extra == 'serve'", specifier = ">=0.21.0" }, ] @@ -476,7 +476,7 @@ fn jax_instability() -> Result<()> { -requires-dist = [{ name = "jax", specifier = "==0.4.17" }] +requires-dist = [ + { name = "jax", specifier = "==0.4.17" }, - + { name = "tzdata" }, + + { name = "tzdata", specifier = ">=2024.1" }, +] [[package]] @@ -528,7 +528,7 @@ fn jax_instability() -> Result<()> { [package.metadata] -requires-dist = [ - { name = "jax", specifier = "==0.4.17" }, - - { name = "tzdata" }, + - { name = "tzdata", specifier = ">=2024.1" }, -] +requires-dist = [{ name = "jax", specifier = "==0.4.17" }] diff --git a/docs/concepts/projects.md b/docs/concepts/projects.md index af87043b6fe6..e3bc0220b422 100644 --- a/docs/concepts/projects.md +++ b/docs/concepts/projects.md @@ -233,3 +233,100 @@ dependencies listed. If working in a project composed of many packages, see the [workspaces](./workspaces.md) documentation. + +## Build isolation + +By default, uv builds all packages in isolated virtual environments, as per +[PEP 517](https://peps.python.org/pep-0517/). Some packages are incompatible with build isolation, +be it intentionally (e.g., due to the use of heavy build dependencies, mostly commonly PyTorch) or +unintentionally (e.g., due to the use of legacy packaging setups). + +To disable build isolation for a specific dependency, add it to the `no-build-isolation-package` +list in your `pyproject.toml`: + +```toml title="pyproject.toml" +[project] +name = "project" +version = "0.1.0" +description = "..." +readme = "README.md" +requires-python = ">=3.12" +dependencies = ["cchardet"] + +[tool.uv] +no-build-isolation-package = ["cchardet"] +``` + +Installing packages without build isolation requires that the package's build dependencies are +installed in the project environment _prior_ to installing the package itself. This can be achieved +by separating out the build dependencies and the packages that require them into distinct optional +groups. For example: + +```toml title="pyproject.toml" +[project] +name = "project" +version = "0.1.0" +description = "..." +readme = "README.md" +requires-python = ">=3.12" +dependencies = [] + +[project.optional-dependencies] +build = ["setuptools", "cython"] +compile = ["cchardet"] +``` + +Given the above, a user would first sync the `build` dependencies: + +```console +$ uv sync --extra build + + cython==3.0.11 + + foo==0.1.0 (from file:///Users/crmarsh/workspace/uv/foo) + + setuptools==73.0.1 +``` + +Followed by the `compile` dependencies: + +```console +$ uv sync --extra compile + + cchardet==2.1.7 + - cython==3.0.11 + - setuptools==73.0.1 +``` + +Note that `uv sync --extra compile` would, by default, uninstall the `cython` and `setuptools` +packages. To instead retain the build dependencies, include both extras in the second `uv sync` +invocation: + +```console +$ uv sync --extra build +$ uv sync --extra build --extra compile +``` + +Some packages, like `cchardet`, only require build dependencies for the _installation_ phase of +`uv sync`. Others, like `flash-atten`, require their build dependencies to be present even just to +resolve the project's lockfile during the _resolution_ phase. + +In such cases, the build dependencies must be installed prior to running any `uv lock` or `uv sync` +commands, using the lower lower-level `uv pip` API. For example, given: + +```toml title="pyproject.toml" +[project] +name = "project" +version = "0.1.0" +description = "..." +readme = "README.md" +requires-python = ">=3.12" +dependencies = ["flash-attn"] + +[tool.uv] +no-build-isolation-package = ["flash-attn"] +``` + +You could run the following sequence of commands: + +```console +$ uv venv +$ uv pip install torch +$ uv sync +``` diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md index 059dd51da89a..6f8cefe3ef15 100644 --- a/docs/getting-started/installation.md +++ b/docs/getting-started/installation.md @@ -35,11 +35,11 @@ By default, uv is installed to `~/.cargo/bin`. Request a specific version by including it in the URL: ```console title="macOS and Linux" -$ curl -LsSf https://astral.sh/uv/0.3.3/install.sh | sh +$ curl -LsSf https://astral.sh/uv/0.3.4/install.sh | sh ``` ```console title="Windows" -$ powershell -c "irm https://astral.sh/uv/0.3.3/install.ps1 | iex" +$ powershell -c "irm https://astral.sh/uv/0.3.4/install.ps1 | iex" ``` ### PyPI diff --git a/docs/guides/index.md b/docs/guides/index.md index 4f5ff70ae9ca..07a6f5473b00 100644 --- a/docs/guides/index.md +++ b/docs/guides/index.md @@ -13,6 +13,7 @@ Learn how to integrate uv with other software: - [Using with pre-commit](./integration/pre-commit.md) - [Using in GitHub Actions](./integration/github.md) - [Using with alternative package indexes](./integration/alternative-indexes.md) +- [Building a FastAPI application](./integration/fastapi.md) Or, explore the [concept documentation](../concepts/index.md) for comprehensive breakdown of each feature. diff --git a/docs/guides/integration/docker.md b/docs/guides/integration/docker.md index d8bdfce70c0f..5a0b957a026e 100644 --- a/docs/guides/integration/docker.md +++ b/docs/guides/integration/docker.md @@ -41,13 +41,13 @@ Note this requires `curl` to be available. In either case, it is best practice to pin to a specific uv version, e.g., with: ```dockerfile -COPY --from=ghcr.io/astral-sh/uv:0.3.3 /uv /bin/uv +COPY --from=ghcr.io/astral-sh/uv:0.3.4 /uv /bin/uv ``` Or, with the installer: ```dockerfile -ADD https://astral.sh/uv/0.3.3/install.sh /uv-installer.sh +ADD https://astral.sh/uv/0.3.4/install.sh /uv-installer.sh ``` ## Installing a project @@ -57,12 +57,17 @@ If you're using uv to manage your project, you can copy it into the image and in ```dockerfile title="Dockerfile" # Copy the project into the image ADD . /app -WORKDIR /app # Sync the project into a new environment, using the frozen lockfile +WORKDIR /app RUN uv sync --frozen ``` +!!! tip + + It is best practice to use [intermediate layers](#intermediate-layers) separating installation + of dependencies and the project itself to improve Docker image build times. + Once the project is installed, you can either _activate_ the virtual environment: ```dockerfile title="Dockerfile" @@ -155,9 +160,9 @@ RUN uv pip install -r requirements.txt ### Installing a project -When installing a project alongside requirements, it is prudent to separate copying the requirements -from the rest of the source code. This allows the dependencies of the project (which do not change -often) to be cached separately from the project itself (which changes very frequently). +When installing a project alongside requirements, it is best practice to separate copying the +requirements from the rest of the source code. This allows the dependencies of the project (which do +not change often) to be cached separately from the project itself (which changes very frequently). ```dockerfile title="Dockerfile" COPY pyproject.toml . @@ -173,7 +178,7 @@ RUN uv pip install -e . If uv isn't needed in the final image, the binary can be mounted in each invocation: ```dockerfile title="Dockerfile" -RUN --mount=from=uv,source=/uv,target=/bin/uv \ +RUN --mount=from=ghcr.io/astral-sh/uv,source=/uv,target=/bin/uv \ uv pip install --system ruff ``` @@ -228,17 +233,18 @@ a big time saver. FROM python:3.12-slim COPY --from=ghcr.io/astral-sh/uv:latest /uv /bin/uv +# Change the working directory to the `app` directory +WORKDIR /app + # Copy the lockfile and `pyproject.toml` into the image ADD uv.lock /app/uv.lock ADD pyproject.toml /app/pyproject.toml # Install dependencies -WORKDIR /app RUN uv sync --frozen --no-install-project # Copy the project into the image ADD . /app -WORKDIR /app # Sync the project RUN uv sync --frozen diff --git a/docs/guides/integration/github.md b/docs/guides/integration/github.md index d2af9537ee33..13b6b06e93b2 100644 --- a/docs/guides/integration/github.md +++ b/docs/guides/integration/github.md @@ -76,7 +76,7 @@ It is considered best practice to pin to a specific uv version, e.g., with: - name: Set up uv # Install a specific uv version using the installer - run: curl -LsSf https://astral.sh/uv/0.3.3/install.sh | sh + run: curl -LsSf https://astral.sh/uv/0.3.4/install.sh | sh ``` === "macOS" @@ -94,7 +94,7 @@ It is considered best practice to pin to a specific uv version, e.g., with: - name: Set up uv # Install a specific uv version using the installer - run: curl -LsSf https://astral.sh/uv/0.3.3/install.sh | sh + run: curl -LsSf https://astral.sh/uv/0.3.4/install.sh | sh ``` === "Windows" @@ -112,7 +112,7 @@ It is considered best practice to pin to a specific uv version, e.g., with: - name: Set up uv # Install a specific uv version using the installer - run: irm https://astral.sh/uv/0.3.3/install.ps1 | iex + run: irm https://astral.sh/uv/0.3.4/install.ps1 | iex shell: powershell ``` diff --git a/docs/guides/integration/pre-commit.md b/docs/guides/integration/pre-commit.md index 8ca5dea19e0d..d6cf18b873dc 100644 --- a/docs/guides/integration/pre-commit.md +++ b/docs/guides/integration/pre-commit.md @@ -8,7 +8,7 @@ To compile requirements via pre-commit, add the following to the `.pre-commit-co ```yaml title=".pre-commit-config.yaml" - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.3.3 + rev: 0.3.4 hooks: # Compile requirements - id: pip-compile @@ -20,7 +20,7 @@ To compile alternative files, modify `args` and `files`: ```yaml title=".pre-commit-config.yaml" - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.3.3 + rev: 0.3.4 hooks: # Compile requirements - id: pip-compile @@ -33,7 +33,7 @@ To run the hook over multiple files at the same time: ```yaml title=".pre-commit-config.yaml" - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.3.3 + rev: 0.3.4 hooks: # Compile requirements - id: pip-compile diff --git a/docs/reference/cli.md b/docs/reference/cli.md index f93621a709a8..a6ed6c5251a3 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -515,6 +515,8 @@ uv add [OPTIONS] >
--dev

Add the requirements as development dependencies

+
--editable

Add the requirements as editable

+
--exclude-newer exclude-newer

Limit candidate packages to those that were uploaded prior to the given date.

Accepts both RFC 3339 timestamps (e.g., 2006-12-02T02:07:43Z) and local dates in the same format (e.g., 2006-12-02) in your system’s configured time zone.

@@ -626,8 +628,6 @@ uv add [OPTIONS] >

Normally, configuration files are discovered in the current directory, parent directories, or user configuration directories.

-
--no-editable

Don’t add the requirements as editables

-
--no-index

Ignore the registry index (e.g., PyPI), instead relying on direct URL dependencies and those provided via --find-links

--no-progress

Hide all progress outputs.

@@ -1098,9 +1098,7 @@ uv sync [OPTIONS]
--inexact

Do not remove extraneous packages present in the environment.

-

When enabled, uv will make the minimum necessary changes to satisfy the requirements.

- -

By default, syncing will remove any extraneous packages from the environment, unless --no-build-isolation is enabled, in which case extra packages are considered necessary for builds.

+

When enabled, uv will make the minimum necessary changes to satisfy the requirements. By default, syncing will remove any extraneous packages from the environment

--keyring-provider keyring-provider

Attempt to use keyring for authentication for index URLs.

diff --git a/pyproject.toml b/pyproject.toml index 8610fb9da6ee..bdd6a57f5d8e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "maturin" [project] name = "uv" -version = "0.3.3" +version = "0.3.4" description = "An extremely fast Python package and project manager, written in Rust." authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }] requires-python = ">=3.8"