Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Respect dynamic extras in uv lock and uv sync #8091

Merged
merged 3 commits into from
Oct 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
150 changes: 150 additions & 0 deletions crates/uv-requirements/src/extras.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
use std::sync::Arc;

use futures::{stream::FuturesOrdered, TryStreamExt};
use thiserror::Error;

use uv_distribution::{DistributionDatabase, Reporter};
use uv_distribution_types::{BuiltDist, Dist, DistributionMetadata, SourceDist};
use uv_pypi_types::Requirement;
use uv_resolver::{InMemoryIndex, MetadataResponse};
use uv_types::{BuildContext, HashStrategy};

use crate::required_dist;

#[derive(Debug, Error)]
pub enum ExtrasError {
#[error("Failed to download: `{0}`")]
Download(BuiltDist, #[source] uv_distribution::Error),
#[error("Failed to download and build: `{0}`")]
DownloadAndBuild(SourceDist, #[source] uv_distribution::Error),
#[error("Failed to build: `{0}`")]
Build(SourceDist, #[source] uv_distribution::Error),
#[error(transparent)]
UnsupportedUrl(#[from] uv_distribution_types::Error),
}

/// A resolver to expand the requested extras for a set of requirements to include all defined
/// extras.
pub struct ExtrasResolver<'a, Context: BuildContext> {
/// Whether to check hashes for distributions.
hasher: &'a HashStrategy,
/// The in-memory index for resolving dependencies.
index: &'a InMemoryIndex,
/// The database for fetching and building distributions.
database: DistributionDatabase<'a, Context>,
}

impl<'a, Context: BuildContext> ExtrasResolver<'a, Context> {
/// Instantiate a new [`ExtrasResolver`] for a given set of requirements.
pub fn new(
hasher: &'a HashStrategy,
index: &'a InMemoryIndex,
database: DistributionDatabase<'a, Context>,
) -> Self {
Self {
hasher,
index,
database,
}
}

/// Set the [`Reporter`] to use for this resolver.
#[must_use]
pub fn with_reporter(self, reporter: impl Reporter + 'static) -> Self {
Self {
database: self.database.with_reporter(reporter),
..self
}
}

/// Expand the set of available extras for a given set of requirements.
pub async fn resolve(
self,
requirements: impl Iterator<Item = Requirement>,
) -> Result<Vec<Requirement>, ExtrasError> {
let Self {
hasher,
index,
database,
} = self;
requirements
.map(|requirement| async {
Self::resolve_requirement(requirement, hasher, index, &database)
.await
.map(Requirement::from)
})
.collect::<FuturesOrdered<_>>()
.try_collect()
.await
}

/// Expand the set of available extras for a given [`Requirement`].
async fn resolve_requirement(
requirement: Requirement,
hasher: &HashStrategy,
index: &InMemoryIndex,
database: &DistributionDatabase<'a, Context>,
) -> Result<Requirement, ExtrasError> {
// Determine whether the requirement represents a local distribution and convert to a
// buildable distribution.
let Some(dist) = required_dist(&requirement)? else {
return Ok(requirement);
};

// Fetch the metadata for the distribution.
let metadata = {
let id = dist.version_id();
if let Some(archive) = index
.distributions()
.get(&id)
.as_deref()
.and_then(|response| {
if let MetadataResponse::Found(archive, ..) = response {
Some(archive)
} else {
None
}
})
{
// If the metadata is already in the index, return it.
archive.metadata.clone()
} else {
// Run the PEP 517 build process to extract metadata from the source distribution.
let archive = database
.get_or_build_wheel_metadata(&dist, hasher.get(&dist))
.await
.map_err(|err| match &dist {
Dist::Built(built) => ExtrasError::Download(built.clone(), err),
Dist::Source(source) => {
if source.is_local() {
ExtrasError::Build(source.clone(), err)
} else {
ExtrasError::DownloadAndBuild(source.clone(), err)
}
}
})?;

let metadata = archive.metadata.clone();

// Insert the metadata into the index.
index
.distributions()
.done(id, Arc::new(MetadataResponse::Found(archive)));

metadata
}
};

// Sort extras for consistency.
let extras = {
let mut extras = metadata.provides_extras;
extras.sort_unstable();
extras
};

Ok(Requirement {
extras,
..requirement
})
}
}
63 changes: 63 additions & 0 deletions crates/uv-requirements/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,75 @@
use uv_distribution_types::{Dist, GitSourceDist, SourceDist};
use uv_git::GitUrl;
use uv_pypi_types::{Requirement, RequirementSource};

pub use crate::extras::*;
pub use crate::lookahead::*;
pub use crate::source_tree::*;
pub use crate::sources::*;
pub use crate::specification::*;
pub use crate::unnamed::*;

mod extras;
mod lookahead;
mod source_tree;
mod sources;
mod specification;
mod unnamed;
pub mod upgrade;

/// Convert a [`Requirement`] into a [`Dist`], if it is a direct URL.
pub(crate) fn required_dist(
requirement: &Requirement,
) -> Result<Option<Dist>, uv_distribution_types::Error> {
Ok(Some(match &requirement.source {
RequirementSource::Registry { .. } => return Ok(None),
RequirementSource::Url {
subdirectory,
location,
ext,
url,
} => Dist::from_http_url(
requirement.name.clone(),
url.clone(),
location.clone(),
subdirectory.clone(),
*ext,
)?,
RequirementSource::Git {
repository,
reference,
precise,
subdirectory,
url,
} => {
let git_url = if let Some(precise) = precise {
GitUrl::from_commit(repository.clone(), reference.clone(), *precise)
} else {
GitUrl::from_reference(repository.clone(), reference.clone())
};
Dist::Source(SourceDist::Git(GitSourceDist {
name: requirement.name.clone(),
git: Box::new(git_url),
subdirectory: subdirectory.clone(),
url: url.clone(),
}))
}
RequirementSource::Path {
install_path,
ext,
url,
} => Dist::from_file_url(requirement.name.clone(), url.clone(), install_path, *ext)?,
RequirementSource::Directory {
install_path,
r#virtual,
url,
editable,
} => Dist::from_directory_url(
requirement.name.clone(),
url.clone(),
install_path,
*editable,
*r#virtual,
)?,
}))
}
59 changes: 2 additions & 57 deletions crates/uv-requirements/src/lookahead.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,10 @@ use rustc_hash::FxHashSet;
use thiserror::Error;
use tracing::trace;

use crate::required_dist;
use uv_configuration::{Constraints, Overrides};
use uv_distribution::{DistributionDatabase, Reporter};
use uv_distribution_types::{BuiltDist, Dist, DistributionMetadata, GitSourceDist, SourceDist};
use uv_git::GitUrl;
use uv_distribution_types::{BuiltDist, Dist, DistributionMetadata, SourceDist};
use uv_normalize::GroupName;
use uv_pypi_types::{Requirement, RequirementSource};
use uv_resolver::{InMemoryIndex, MetadataResponse, ResolverMarkers};
Expand Down Expand Up @@ -245,58 +245,3 @@ impl<'a, Context: BuildContext> LookaheadResolver<'a, Context> {
)))
}
}

/// Convert a [`Requirement`] into a [`Dist`], if it is a direct URL.
fn required_dist(requirement: &Requirement) -> Result<Option<Dist>, uv_distribution_types::Error> {
Ok(Some(match &requirement.source {
RequirementSource::Registry { .. } => return Ok(None),
RequirementSource::Url {
subdirectory,
location,
ext,
url,
} => Dist::from_http_url(
requirement.name.clone(),
url.clone(),
location.clone(),
subdirectory.clone(),
*ext,
)?,
RequirementSource::Git {
repository,
reference,
precise,
subdirectory,
url,
} => {
let git_url = if let Some(precise) = precise {
GitUrl::from_commit(repository.clone(), reference.clone(), *precise)
} else {
GitUrl::from_reference(repository.clone(), reference.clone())
};
Dist::Source(SourceDist::Git(GitSourceDist {
name: requirement.name.clone(),
git: Box::new(git_url),
subdirectory: subdirectory.clone(),
url: url.clone(),
}))
}
RequirementSource::Path {
install_path,
ext,
url,
} => Dist::from_file_url(requirement.name.clone(), url.clone(), install_path, *ext)?,
RequirementSource::Directory {
install_path,
r#virtual,
url,
editable,
} => Dist::from_directory_url(
requirement.name.clone(),
url.clone(),
install_path,
*editable,
*r#virtual,
)?,
}))
}
15 changes: 6 additions & 9 deletions crates/uv-requirements/src/source_tree.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use std::borrow::Cow;
use std::path::{Path, PathBuf};
use std::path::Path;
use std::sync::Arc;

use anyhow::{Context, Result};
Expand Down Expand Up @@ -34,8 +34,6 @@ pub struct SourceTreeResolution {
/// Used, e.g., to determine the input requirements when a user specifies a `pyproject.toml`
/// file, which may require running PEP 517 build hooks to extract metadata.
pub struct SourceTreeResolver<'a, Context: BuildContext> {
/// The requirements for the project.
source_trees: Vec<PathBuf>,
/// The extras to include when resolving requirements.
extras: &'a ExtrasSpecification,
/// The hash policy to enforce.
Expand All @@ -49,14 +47,12 @@ pub struct SourceTreeResolver<'a, Context: BuildContext> {
impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> {
/// Instantiate a new [`SourceTreeResolver`] for a given set of `source_trees`.
pub fn new(
source_trees: Vec<PathBuf>,
extras: &'a ExtrasSpecification,
hasher: &'a HashStrategy,
index: &'a InMemoryIndex,
database: DistributionDatabase<'a, Context>,
) -> Self {
Self {
source_trees,
extras,
hasher,
index,
Expand All @@ -74,10 +70,11 @@ impl<'a, Context: BuildContext> SourceTreeResolver<'a, Context> {
}

/// Resolve the requirements from the provided source trees.
pub async fn resolve(self) -> Result<Vec<SourceTreeResolution>> {
let resolutions: Vec<_> = self
.source_trees
.iter()
pub async fn resolve(
self,
source_trees: impl Iterator<Item = &Path>,
) -> Result<Vec<SourceTreeResolution>> {
let resolutions: Vec<_> = source_trees
.map(|source_tree| async { self.resolve_source_tree(source_tree).await })
.collect::<FuturesOrdered<_>>()
.try_collect()
Expand Down
13 changes: 5 additions & 8 deletions crates/uv-requirements/src/unnamed.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,6 @@ pub enum NamedRequirementsError {

/// Like [`RequirementsSpecification`], but with concrete names for all requirements.
pub struct NamedRequirementsResolver<'a, Context: BuildContext> {
/// The requirements for the project.
requirements: Vec<UnnamedRequirement<VerbatimParsedUrl>>,
/// Whether to check hashes for distributions.
hasher: &'a HashStrategy,
/// The in-memory index for resolving dependencies.
Expand All @@ -47,15 +45,13 @@ pub struct NamedRequirementsResolver<'a, Context: BuildContext> {
}

impl<'a, Context: BuildContext> NamedRequirementsResolver<'a, Context> {
/// Instantiate a new [`NamedRequirementsResolver`] for a given set of requirements.
/// Instantiate a new [`NamedRequirementsResolver`].
pub fn new(
requirements: Vec<UnnamedRequirement<VerbatimParsedUrl>>,
hasher: &'a HashStrategy,
index: &'a InMemoryIndex,
database: DistributionDatabase<'a, Context>,
) -> Self {
Self {
requirements,
hasher,
index,
database,
Expand All @@ -72,15 +68,16 @@ impl<'a, Context: BuildContext> NamedRequirementsResolver<'a, Context> {
}

/// Resolve any unnamed requirements in the specification.
pub async fn resolve(self) -> Result<Vec<Requirement>, NamedRequirementsError> {
pub async fn resolve(
self,
requirements: impl Iterator<Item = UnnamedRequirement<VerbatimParsedUrl>>,
) -> Result<Vec<Requirement>, NamedRequirementsError> {
let Self {
requirements,
hasher,
index,
database,
} = self;
requirements
.into_iter()
.map(|requirement| async {
Self::resolve_requirement(requirement, hasher, index, &database)
.await
Expand Down
Loading
Loading