From de8b64ee59ce15776e627b9178cb5ee5d7257a1f Mon Sep 17 00:00:00 2001 From: Joe Neeman Date: Thu, 15 Aug 2024 11:22:57 +0700 Subject: [PATCH 1/5] Add publish workspace tests Co-authored-by: Tor Hovland <55164+torhovland@users.noreply.github.com> --- tests/testsuite/publish.rs | 409 ++++++++++++++++++++++++++++++++++++- 1 file changed, 408 insertions(+), 1 deletion(-) diff --git a/tests/testsuite/publish.rs b/tests/testsuite/publish.rs index b6a29741a4b..9783ade0f1f 100644 --- a/tests/testsuite/publish.rs +++ b/tests/testsuite/publish.rs @@ -4,10 +4,10 @@ use std::fs; use std::sync::{Arc, Mutex}; use cargo_test_support::git::{self, repo}; -use cargo_test_support::paths; use cargo_test_support::prelude::*; use cargo_test_support::registry::{self, Package, RegistryBuilder, Response}; use cargo_test_support::{basic_manifest, project, publish, str}; +use cargo_test_support::{paths, Project}; const CLEAN_FOO_JSON: &str = r#" { @@ -3232,6 +3232,87 @@ You may press ctrl-c to skip waiting; the crate should be available shortly. .run(); } +#[cargo_test] +fn timeout_waiting_for_dependency_publish() { + // Publish doesn't happen within the timeout window. + let registry = registry::RegistryBuilder::new() + .http_api() + .delayed_index_update(20) + .build(); + + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["main", "other", "dep"] + "#, + ) + .file( + "main/Cargo.toml", + r#" + [package] + name = "main" + version = "0.0.1" + edition = "2015" + authors = [] + license = "MIT" + description = "foo" + + [dependencies] + dep = { version = "0.0.1", path = "../dep" } + "#, + ) + .file("main/src/main.rs", "fn main() {}") + .file( + "other/Cargo.toml", + r#" + [package] + name = "other" + version = "0.0.1" + edition = "2015" + authors = [] + license = "MIT" + description = "foo" + + [dependencies] + dep = { version = "0.0.1", path = "../dep" } + "#, + ) + .file("other/src/main.rs", "fn main() {}") + .file( + "dep/Cargo.toml", + r#" + [package] + name = "dep" + version = "0.0.1" + edition = "2015" + authors = [] + license = "MIT" + description = "foo" + "#, + ) + .file("dep/src/lib.rs", "") + .file( + ".cargo/config.toml", + r#" + [publish] + timeout = 2 + "#, + ) + .build(); + + p.cargo("publish --no-verify -Zpublish-timeout -Zpackage-workspace") + .replace_crates_io(registry.index_url()) + .masquerade_as_nightly_cargo(&["publish-timeout", "package-workspace"]) + .with_status(101) + .with_stderr_data(str![[r#" +[ERROR] the `-p` argument must be specified to select a single package to publish + +"#]]) + .run(); +} + #[cargo_test] fn wait_for_git_publish() { // Slow publish to an index with a git index. @@ -3417,3 +3498,329 @@ You may press ctrl-c to skip waiting; the crate should be available shortly. validate_upload_foo(); } + +// A workspace with three projects that depend on one another (level1 -> level2 -> level3). +// level1 is a binary package, to test lockfile generation. +fn workspace_with_local_deps_project() -> Project { + project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["level1", "level2", "level3"] + + [workspace.dependencies] + level2 = { path = "level2", version = "0.0.1" } + "# + ) + .file( + "level1/Cargo.toml", + r#" + [package] + name = "level1" + version = "0.0.1" + edition = "2015" + authors = [] + license = "MIT" + description = "level1" + repository = "bar" + + [dependencies] + # Let one dependency also specify features, for the added test coverage when generating package files. + level2 = { workspace = true, features = ["foo"] } + "#, + ) + .file("level1/src/main.rs", "fn main() {}") + .file( + "level2/Cargo.toml", + r#" + [package] + name = "level2" + version = "0.0.1" + edition = "2015" + authors = [] + license = "MIT" + description = "level2" + repository = "bar" + + [features] + foo = [] + + [dependencies] + level3 = { path = "../level3", version = "0.0.1" } + "# + ) + .file("level2/src/lib.rs", "") + .file( + "level3/Cargo.toml", + r#" + [package] + name = "level3" + version = "0.0.1" + edition = "2015" + authors = [] + license = "MIT" + description = "level3" + repository = "bar" + "#, + ) + .file("level3/src/lib.rs", "") + .build() +} + +#[cargo_test] +fn workspace_with_local_deps() { + let crates_io = registry::init(); + let p = workspace_with_local_deps_project(); + + p.cargo("publish") + .replace_crates_io(crates_io.index_url()) + .with_status(101) + .with_stderr_data(str![[r#" +[ERROR] the `-p` argument must be specified to select a single package to publish + +"#]]) + .run(); +} + +#[cargo_test] +fn workspace_with_local_deps_nightly() { + let registry = RegistryBuilder::new().http_api().http_index().build(); + let p = workspace_with_local_deps_project(); + + p.cargo("publish -Zpackage-workspace") + .masquerade_as_nightly_cargo(&["package-workspace"]) + .with_status(101) + .replace_crates_io(registry.index_url()) + .with_stderr_data(str![[r#" +[ERROR] the `-p` argument must be specified to select a single package to publish + +"#]]) + .run(); +} + +#[cargo_test] +fn workspace_parallel() { + let registry = RegistryBuilder::new().http_api().http_index().build(); + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b", "c"] + "#, + ) + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + edition = "2015" + authors = [] + license = "MIT" + description = "a" + repository = "bar" + "#, + ) + .file("a/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + edition = "2015" + authors = [] + license = "MIT" + description = "b" + repository = "bar" + "#, + ) + .file("b/src/lib.rs", "") + .file( + "c/Cargo.toml", + r#" + [package] + name = "c" + version = "0.0.1" + edition = "2015" + authors = [] + license = "MIT" + description = "c" + repository = "bar" + + [dependencies] + a = { path = "../a", version = "0.0.1" } + b = { path = "../b", version = "0.0.1" } + "#, + ) + .file("c/src/lib.rs", "") + .build(); + + p.cargo("publish -Zpackage-workspace") + .masquerade_as_nightly_cargo(&["package-workspace"]) + .replace_crates_io(registry.index_url()) + .with_status(101) + .with_stderr_data( + str![[r#" +[ERROR] the `-p` argument must be specified to select a single package to publish + +"#]] + .unordered(), + ) + .run(); +} + +#[cargo_test] +fn workspace_missing_dependency() { + let registry = RegistryBuilder::new().http_api().http_index().build(); + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["a", "b"] + "#, + ) + .file( + "a/Cargo.toml", + r#" + [package] + name = "a" + version = "0.0.1" + edition = "2015" + authors = [] + license = "MIT" + description = "a" + repository = "bar" + "#, + ) + .file("a/src/lib.rs", "") + .file( + "b/Cargo.toml", + r#" + [package] + name = "b" + version = "0.0.1" + edition = "2015" + authors = [] + license = "MIT" + description = "b" + repository = "bar" + + [dependencies] + a = { path = "../a", version = "0.0.1" } + "#, + ) + .file("b/src/lib.rs", "") + .build(); + + p.cargo("publish -Zpackage-workspace -p b") + .masquerade_as_nightly_cargo(&["package-workspace"]) + .replace_crates_io(registry.index_url()) + .with_status(101) + .with_stderr_data(str![[r#" +[UPDATING] crates.io index +[PACKAGING] b v0.0.1 ([ROOT]/foo/b) +[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed) +[VERIFYING] b v0.0.1 ([ROOT]/foo/b) +[UPDATING] crates.io index +[ERROR] no matching package named `a` found +location searched: registry `crates-io` +required by package `b v0.0.1 ([ROOT]/foo/target/package/b-0.0.1)` + +"#]]) + .run(); + + p.cargo("publish -Zpackage-workspace -p a") + .masquerade_as_nightly_cargo(&["package-workspace"]) + .replace_crates_io(registry.index_url()) + .with_stderr_data(str![[r#" +[UPDATING] crates.io index +[PACKAGING] a v0.0.1 ([ROOT]/foo/a) +[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed) +[VERIFYING] a v0.0.1 ([ROOT]/foo/a) +[COMPILING] a v0.0.1 ([ROOT]/foo/target/package/a-0.0.1) +[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s +[UPLOADING] a v0.0.1 ([ROOT]/foo/a) +[UPLOADED] a v0.0.1 to registry `crates-io` +[NOTE] waiting for `a v0.0.1` to be available at registry `crates-io`. +You may press ctrl-c to skip waiting; the crate should be available shortly. +[PUBLISHED] a v0.0.1 at registry `crates-io` + +"#]]) + .run(); + + // Publishing the whole workspace now will fail, as `a` is already published. + p.cargo("publish -Zpackage-workspace") + .masquerade_as_nightly_cargo(&["package-workspace"]) + .replace_crates_io(registry.index_url()) + .with_status(101) + .with_stderr_data(str![[r#" +[ERROR] the `-p` argument must be specified to select a single package to publish + +"#]]) + .run(); +} + +#[cargo_test] +fn one_unpublishable_package() { + let _alt_reg = registry::RegistryBuilder::new() + .http_api() + .http_index() + .alternative() + .build(); + + let p = project() + .file( + "Cargo.toml", + r#" + [workspace] + members = ["dep", "main"] + "#, + ) + .file( + "main/Cargo.toml", + r#" + [package] + name = "main" + version = "0.0.1" + edition = "2015" + authors = [] + license = "MIT" + description = "main" + repository = "bar" + publish = false + + [dependencies] + dep = { path = "../dep", version = "0.1.0", registry = "alternative" } + "#, + ) + .file("main/src/main.rs", "fn main() {}") + .file( + "dep/Cargo.toml", + r#" + [package] + name = "dep" + version = "0.1.0" + edition = "2015" + authors = [] + license = "MIT" + description = "dep" + repository = "bar" + publish = ["alternative"] + "#, + ) + .file("dep/src/lib.rs", "") + .build(); + + p.cargo("publish -Zpackage-workspace") + .masquerade_as_nightly_cargo(&["package-workspace"]) + .with_status(101) + .with_stderr_data(str![[r#" +[ERROR] the `-p` argument must be specified to select a single package to publish + +"#]]) + .run(); +} From faee9a7e9114443f1f486c65580dc1b912e78b85 Mon Sep 17 00:00:00 2001 From: Joe Neeman Date: Tue, 3 Sep 2024 15:12:54 +0700 Subject: [PATCH 2/5] Use the shared infer_registry function for publishing --- src/cargo/ops/registry/publish.rs | 97 +++++++++++++++++-------------- tests/testsuite/publish.rs | 4 +- 2 files changed, 55 insertions(+), 46 deletions(-) diff --git a/src/cargo/ops/registry/publish.rs b/src/cargo/ops/registry/publish.rs index 7fd3acfbec7..4d8353ba2f6 100644 --- a/src/cargo/ops/registry/publish.rs +++ b/src/cargo/ops/registry/publish.rs @@ -77,57 +77,34 @@ pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> { // Double check. It is safe theoretically, unless logic has updated. assert_eq!(pkgs.len(), 1); - let (pkg, cli_features) = pkgs.pop().unwrap(); - - let mut publish_registry = match opts.reg_or_index.as_ref() { - Some(RegistryOrIndex::Registry(registry)) => Some(registry.clone()), - _ => None, - }; - if let Some(ref allowed_registries) = *pkg.publish() { - if publish_registry.is_none() && allowed_registries.len() == 1 { - // If there is only one allowed registry, push to that one directly, - // even though there is no registry specified in the command. - let default_registry = &allowed_registries[0]; - if default_registry != CRATES_IO_REGISTRY { - // Don't change the registry for crates.io and don't warn the user. - // crates.io will be defaulted even without this. - opts.gctx.shell().note(&format!( - "found `{}` as only allowed registry. Publishing to it automatically.", - default_registry - ))?; - publish_registry = Some(default_registry.clone()); + let just_pkgs: Vec<_> = pkgs.iter().map(|p| p.0).collect(); + let reg_or_index = match opts.reg_or_index.clone() { + Some(r) => { + validate_registry(&just_pkgs, Some(&r))?; + Some(r) + } + None => { + let reg = super::infer_registry(&just_pkgs)?; + validate_registry(&just_pkgs, reg.as_ref())?; + if let Some(RegistryOrIndex::Registry(ref registry)) = ® { + if registry != CRATES_IO_REGISTRY { + // Don't warn for crates.io. + opts.gctx.shell().note(&format!( + "found `{}` as only allowed registry. Publishing to it automatically.", + registry + ))?; + } } + reg } + }; - let reg_name = publish_registry - .clone() - .unwrap_or_else(|| CRATES_IO_REGISTRY.to_string()); - if allowed_registries.is_empty() { - bail!( - "`{}` cannot be published.\n\ - `package.publish` must be set to `true` or a non-empty list in Cargo.toml to publish.", - pkg.name(), - ); - } else if !allowed_registries.contains(®_name) { - bail!( - "`{}` cannot be published.\n\ - The registry `{}` is not listed in the `package.publish` value in Cargo.toml.", - pkg.name(), - reg_name - ); - } - } // This is only used to confirm that we can create a token before we build the package. // This causes the credential provider to be called an extra time, but keeps the same order of errors. + let (pkg, cli_features) = pkgs.pop().unwrap(); let ver = pkg.version().to_string(); let operation = Operation::Read; - let reg_or_index = match opts.reg_or_index.clone() { - Some(RegistryOrIndex::Registry(_)) | None => { - publish_registry.map(RegistryOrIndex::Registry) - } - val => val, - }; let source_ids = super::get_source_id(opts.gctx, reg_or_index.as_ref())?; let mut registry = super::registry( opts.gctx, @@ -498,3 +475,37 @@ fn transmit( Ok(()) } + +fn validate_registry(pkgs: &[&Package], reg_or_index: Option<&RegistryOrIndex>) -> CargoResult<()> { + for pkg in pkgs { + if pkg.publish() == &Some(Vec::new()) { + bail!( + "`{}` cannot be published.\n\ + `package.publish` must be set to `true` or a non-empty list in Cargo.toml to publish.", + pkg.name(), + ); + } + } + + let reg_name = match reg_or_index { + Some(RegistryOrIndex::Registry(r)) => Some(r.as_str()), + None => Some(CRATES_IO_REGISTRY), + Some(RegistryOrIndex::Index(_)) => None, + }; + if let Some(reg_name) = reg_name { + for pkg in pkgs { + if let Some(allowed) = pkg.publish().as_ref() { + if !allowed.iter().any(|a| a == reg_name) { + bail!( + "`{}` cannot be published.\n\ + The registry `{}` is not listed in the `package.publish` value in Cargo.toml.", + pkg.name(), + reg_name + ); + } + } + } + } + + Ok(()) +} diff --git a/tests/testsuite/publish.rs b/tests/testsuite/publish.rs index 9783ade0f1f..5dec9435743 100644 --- a/tests/testsuite/publish.rs +++ b/tests/testsuite/publish.rs @@ -961,7 +961,6 @@ fn publish_failed_with_index_and_only_allowed_registry() { .arg(registry.index_url().as_str()) .with_status(101) .with_stderr_data(str![[r#" -[NOTE] found `alternative` as only allowed registry. Publishing to it automatically. [ERROR] command-line argument --index requires --token to be specified "#]]) @@ -995,8 +994,7 @@ fn publish_fail_with_no_registry_specified() { p.cargo("publish") .with_status(101) .with_stderr_data(str![[r#" -[ERROR] `foo` cannot be published. -The registry `crates-io` is not listed in the `package.publish` value in Cargo.toml. +[ERROR] --registry is required to disambiguate between "alternative" or "test" registries "#]]) .run(); From 3121a7175d6b743d82a1365b503838154255185b Mon Sep 17 00:00:00 2001 From: Joe Neeman Date: Tue, 3 Sep 2024 15:56:22 +0700 Subject: [PATCH 3/5] Don't bind Operation::Read --- src/cargo/ops/registry/publish.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/cargo/ops/registry/publish.rs b/src/cargo/ops/registry/publish.rs index 4d8353ba2f6..82c10ac2444 100644 --- a/src/cargo/ops/registry/publish.rs +++ b/src/cargo/ops/registry/publish.rs @@ -103,7 +103,6 @@ pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> { // This causes the credential provider to be called an extra time, but keeps the same order of errors. let (pkg, cli_features) = pkgs.pop().unwrap(); let ver = pkg.version().to_string(); - let operation = Operation::Read; let source_ids = super::get_source_id(opts.gctx, reg_or_index.as_ref())?; let mut registry = super::registry( @@ -112,7 +111,7 @@ pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> { opts.token.as_ref().map(Secret::as_deref), reg_or_index.as_ref(), true, - Some(operation).filter(|_| !opts.dry_run), + Some(Operation::Read).filter(|_| !opts.dry_run), )?; verify_dependencies(pkg, ®istry, source_ids.original)?; From 431d84a6bf73cb680bfe9986d53772c24f43a5d8 Mon Sep 17 00:00:00 2001 From: Joe Neeman Date: Tue, 3 Sep 2024 16:25:19 +0700 Subject: [PATCH 4/5] Factor out poll_one_package --- src/cargo/ops/registry/publish.rs | 34 ++++++++++++++++++++----------- 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/src/cargo/ops/registry/publish.rs b/src/cargo/ops/registry/publish.rs index 82c10ac2444..aac2f220f7f 100644 --- a/src/cargo/ops/registry/publish.rs +++ b/src/cargo/ops/registry/publish.rs @@ -21,6 +21,7 @@ use crate::core::manifest::ManifestMetadata; use crate::core::resolver::CliFeatures; use crate::core::Dependency; use crate::core::Package; +use crate::core::PackageId; use crate::core::PackageIdSpecQuery; use crate::core::SourceId; use crate::core::Workspace; @@ -28,6 +29,7 @@ use crate::ops; use crate::ops::PackageOpts; use crate::ops::Packages; use crate::sources::source::QueryKind; +use crate::sources::source::Source; use crate::sources::SourceConfigMap; use crate::sources::CRATES_IO_REGISTRY; use crate::util::auth; @@ -189,14 +191,12 @@ fn wait_for_publish( pkg: &Package, timeout: Duration, ) -> CargoResult<()> { - let version_req = format!("={}", pkg.version()); let mut source = SourceConfigMap::empty(gctx)?.load(registry_src, &HashSet::new())?; // Disable the source's built-in progress bars. Repeatedly showing a bunch // of independent progress bars can be a little confusing. There is an // overall progress bar managed here. source.set_quiet(true); let source_description = source.source_id().to_string(); - let query = Dependency::parse(pkg.name(), Some(&version_req), registry_src)?; let now = std::time::Instant::now(); let sleep_time = Duration::from_secs(1); @@ -223,16 +223,7 @@ fn wait_for_publish( // multiple times gctx.updated_sources().remove(&source.replaced_source_id()); source.invalidate_cache(); - let summaries = loop { - // Exact to avoid returning all for path/git - match source.query_vec(&query, QueryKind::Exact) { - std::task::Poll::Ready(res) => { - break res?; - } - std::task::Poll::Pending => source.block_until_ready()?, - } - }; - if !summaries.is_empty() { + if poll_one_package(registry_src, &pkg.package_id(), &mut source)? { break true; } } @@ -262,6 +253,25 @@ fn wait_for_publish( Ok(()) } +fn poll_one_package( + registry_src: SourceId, + pkg_id: &PackageId, + source: &mut dyn Source, +) -> CargoResult { + let version_req = format!("={}", pkg_id.version()); + let query = Dependency::parse(pkg_id.name(), Some(&version_req), registry_src)?; + let summaries = loop { + // Exact to avoid returning all for path/git + match source.query_vec(&query, QueryKind::Exact) { + std::task::Poll::Ready(res) => { + break res?; + } + std::task::Poll::Pending => source.block_until_ready()?, + } + }; + Ok(!summaries.is_empty()) +} + fn verify_dependencies( pkg: &Package, registry: &Registry, From a016e5f5c2fab4edaa504e4928ca6bd747db0fbe Mon Sep 17 00:00:00 2001 From: Joe Neeman Date: Tue, 3 Sep 2024 16:59:17 +0700 Subject: [PATCH 5/5] Multi-package publishing Co-authored-by: Tor Hovland <55164+torhovland@users.noreply.github.com> Co-authored-by: Ed Page --- src/cargo/ops/cargo_package.rs | 74 ++--- src/cargo/ops/mod.rs | 2 +- src/cargo/ops/registry/publish.rs | 372 ++++++++++++++++++++------ src/cargo/util/graph.rs | 27 ++ tests/testsuite/credential_process.rs | 4 +- tests/testsuite/publish.rs | 128 ++++++++- 6 files changed, 483 insertions(+), 124 deletions(-) diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs index 7a13c849704..04708584027 100644 --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs @@ -93,30 +93,6 @@ struct GitVcsInfo { dirty: bool, } -/// Packages a single package in a workspace, returning the resulting tar file. -/// -/// # Panics -/// Panics if `opts.list` is true. In that case you probably don't want to -/// actually build the package tarball; you should just make and print the list -/// of files. (We don't currently provide a public API for that, but see how -/// [`package`] does it.) -pub fn package_one( - ws: &Workspace<'_>, - pkg: &Package, - opts: &PackageOpts<'_>, -) -> CargoResult { - assert!(!opts.list); - - let ar_files = prepare_archive(ws, pkg, opts)?; - let tarball = create_package(ws, pkg, ar_files, None)?; - - if opts.verify { - run_verify(ws, pkg, &tarball, None, opts)?; - } - - Ok(tarball) -} - // Builds a tarball and places it in the output directory. fn create_package( ws: &Workspace<'_>, @@ -193,6 +169,34 @@ pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult, + opts: &PackageOpts<'_>, + pkgs: Vec<(&Package, CliFeatures)>, +) -> CargoResult> { + let output = do_package(ws, opts, pkgs)?; + + Ok(local_deps(output.into_iter().map( + |(pkg, opts, tarball)| (pkg, (opts.cli_features, tarball)), + ))) +} + +fn do_package<'a>( + ws: &Workspace<'_>, + opts: &PackageOpts<'a>, + pkgs: Vec<(&Package, CliFeatures)>, +) -> CargoResult, FileLock)>> { if ws .lock_root() .as_path_unlocked() @@ -264,7 +268,7 @@ pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult, - graph: Graph, +pub(crate) struct LocalDependencies { + pub packages: HashMap, + pub graph: Graph, } -impl LocalDependencies { - fn sort(&self) -> Vec<(Package, CliFeatures)> { +impl LocalDependencies { + pub fn sort(&self) -> Vec<(Package, T)> { self.graph .sort() .into_iter() @@ -335,9 +338,10 @@ impl LocalDependencies { /// ignoring dev dependencies. /// /// We assume that the packages all belong to this workspace. -fn local_deps(packages: impl Iterator) -> LocalDependencies { - let packages: HashMap = - packages.map(|pkg| (pkg.0.package_id(), pkg)).collect(); +fn local_deps(packages: impl Iterator) -> LocalDependencies { + let packages: HashMap = packages + .map(|(pkg, payload)| (pkg.package_id(), (pkg, payload))) + .collect(); // Dependencies have source ids but not package ids. We draw an edge // whenever a dependency's source id matches one of our packages. This is @@ -349,7 +353,7 @@ fn local_deps(packages: impl Iterator) -> LocalDe .collect(); let mut graph = Graph::new(); - for (pkg, _features) in packages.values() { + for (pkg, _payload) in packages.values() { graph.add(pkg.package_id()); for dep in pkg.dependencies() { // Ignore local dev-dependencies because they aren't needed for intra-workspace diff --git a/src/cargo/ops/mod.rs b/src/cargo/ops/mod.rs index 0cbf8f92203..2075c9ccc4c 100644 --- a/src/cargo/ops/mod.rs +++ b/src/cargo/ops/mod.rs @@ -10,7 +10,7 @@ pub use self::cargo_fetch::{fetch, FetchOptions}; pub use self::cargo_install::{install, install_list}; pub use self::cargo_new::{init, new, NewOptions, NewProjectKind, VersionControl}; pub use self::cargo_output_metadata::{output_metadata, ExportInfo, OutputMetadataOptions}; -pub use self::cargo_package::{check_yanked, package, package_one, PackageOpts}; +pub use self::cargo_package::{check_yanked, package, PackageOpts}; pub use self::cargo_pkgid::pkgid; pub use self::cargo_read_manifest::read_package; pub use self::cargo_run::run; diff --git a/src/cargo/ops/registry/publish.rs b/src/cargo/ops/registry/publish.rs index aac2f220f7f..527b0a3fb9d 100644 --- a/src/cargo/ops/registry/publish.rs +++ b/src/cargo/ops/registry/publish.rs @@ -3,8 +3,12 @@ //! [1]: https://doc.rust-lang.org/nightly/cargo/reference/registry-web-api.html#publish use std::collections::BTreeMap; +use std::collections::BTreeSet; +use std::collections::HashMap; use std::collections::HashSet; use std::fs::File; +use std::io::Seek; +use std::io::SeekFrom; use std::time::Duration; use anyhow::bail; @@ -15,6 +19,7 @@ use cargo_util::paths; use crates_io::NewCrate; use crates_io::NewCrateDependency; use crates_io::Registry; +use itertools::Itertools; use crate::core::dependency::DepKind; use crate::core::manifest::ManifestMetadata; @@ -28,6 +33,7 @@ use crate::core::Workspace; use crate::ops; use crate::ops::PackageOpts; use crate::ops::Packages; +use crate::ops::RegistryOrIndex; use crate::sources::source::QueryKind; use crate::sources::source::Source; use crate::sources::SourceConfigMap; @@ -36,13 +42,13 @@ use crate::util::auth; use crate::util::cache_lock::CacheLockMode; use crate::util::context::JobsConfig; use crate::util::toml::prepare_for_publish; +use crate::util::Graph; use crate::util::Progress; use crate::util::ProgressStyle; use crate::CargoResult; use crate::GlobalContext; use super::super::check_dep_has_version; -use super::RegistryOrIndex; pub struct PublishOpts<'gctx> { pub gctx: &'gctx GlobalContext, @@ -59,16 +65,23 @@ pub struct PublishOpts<'gctx> { } pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> { + let multi_package_mode = ws.gctx().cli_unstable().package_workspace; let specs = opts.to_publish.to_package_id_specs(ws)?; - if specs.len() > 1 { - bail!("the `-p` argument must be specified to select a single package to publish") + + if !multi_package_mode { + if specs.len() > 1 { + bail!("the `-p` argument must be specified to select a single package to publish") + } + if Packages::Default == opts.to_publish && ws.is_virtual() { + bail!("the `-p` argument must be specified in the root of a virtual workspace") + } } - if Packages::Default == opts.to_publish && ws.is_virtual() { - bail!("the `-p` argument must be specified in the root of a virtual workspace") + + let member_ids: Vec<_> = ws.members().map(|p| p.package_id()).collect(); + // Check that the specs match members. + for spec in &specs { + spec.query(member_ids.clone())?; } - let member_ids = ws.members().map(|p| p.package_id()); - // Check that the spec matches exactly one member. - specs[0].query(member_ids)?; let mut pkgs = ws.members_with_features(&specs, &opts.cli_features)?; // In `members_with_features_old`, it will add "current" package (determined by the cwd) // So we need filter @@ -76,8 +89,6 @@ pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> { .into_iter() .filter(|(m, _)| specs.iter().any(|spec| spec.matches(m.package_id()))) .collect(); - // Double check. It is safe theoretically, unless logic has updated. - assert_eq!(pkgs.len(), 1); let just_pkgs: Vec<_> = pkgs.iter().map(|p| p.0).collect(); let reg_or_index = match opts.reg_or_index.clone() { @@ -103,9 +114,6 @@ pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> { // This is only used to confirm that we can create a token before we build the package. // This causes the credential provider to be called an extra time, but keeps the same order of errors. - let (pkg, cli_features) = pkgs.pop().unwrap(); - let ver = pkg.version().to_string(); - let source_ids = super::get_source_id(opts.gctx, reg_or_index.as_ref())?; let mut registry = super::registry( opts.gctx, @@ -115,82 +123,147 @@ pub fn publish(ws: &Workspace<'_>, opts: &PublishOpts<'_>) -> CargoResult<()> { true, Some(Operation::Read).filter(|_| !opts.dry_run), )?; - verify_dependencies(pkg, ®istry, source_ids.original)?; - // Prepare a tarball, with a non-suppressible warning if metadata - // is missing since this is being put online. - let tarball = ops::package_one( + // Validate all the packages before publishing any of them. + for (pkg, _) in &pkgs { + verify_dependencies(pkg, ®istry, source_ids.original)?; + } + + let pkg_dep_graph = ops::cargo_package::package_with_dep_graph( ws, - pkg, &PackageOpts { gctx: opts.gctx, verify: opts.verify, list: false, check_metadata: true, allow_dirty: opts.allow_dirty, - to_package: Packages::Default, + // `package_with_dep_graph` ignores this field in favor of + // the already-resolved list of packages + to_package: ops::Packages::Default, targets: opts.targets.clone(), jobs: opts.jobs.clone(), keep_going: opts.keep_going, - cli_features, - reg_or_index, + cli_features: opts.cli_features.clone(), + reg_or_index: reg_or_index.clone(), }, + pkgs, )?; - if !opts.dry_run { - let hash = cargo_util::Sha256::new() - .update_file(tarball.file())? - .finish_hex(); - let operation = Operation::Publish { - name: pkg.name().as_str(), - vers: &ver, - cksum: &hash, - }; - registry.set_token(Some(auth::auth_token( - &opts.gctx, - &source_ids.original, - None, - operation, - vec![], - false, - )?)); - } + let mut plan = PublishPlan::new(&pkg_dep_graph.graph); + // May contains packages from previous rounds as `wait_for_any_publish_confirmation` returns + // after it confirms any packages, not all packages, requiring us to handle the rest in the next + // iteration. + // + // As a side effect, any given package's "effective" timeout may be much larger. + let mut to_confirm = BTreeSet::new(); + + while !plan.is_empty() { + // There might not be any ready package, if the previous confirmations + // didn't unlock a new one. For example, if `c` depends on `a` and + // `b`, and we uploaded `a` and `b` but only confirmed `a`, then on + // the following pass through the outer loop nothing will be ready for + // upload. + for pkg_id in plan.take_ready() { + let (pkg, (_features, tarball)) = &pkg_dep_graph.packages[&pkg_id]; + opts.gctx.shell().status("Uploading", pkg.package_id())?; + + if !opts.dry_run { + let ver = pkg.version().to_string(); + + tarball.file().seek(SeekFrom::Start(0))?; + let hash = cargo_util::Sha256::new() + .update_file(tarball.file())? + .finish_hex(); + let operation = Operation::Publish { + name: pkg.name().as_str(), + vers: &ver, + cksum: &hash, + }; + registry.set_token(Some(auth::auth_token( + &opts.gctx, + &source_ids.original, + None, + operation, + vec![], + false, + )?)); + } - opts.gctx - .shell() - .status("Uploading", pkg.package_id().to_string())?; - transmit( - opts.gctx, - ws, - pkg, - tarball.file(), - &mut registry, - source_ids.original, - opts.dry_run, - )?; - if !opts.dry_run { - const DEFAULT_TIMEOUT: u64 = 60; - let timeout = if opts.gctx.cli_unstable().publish_timeout { - let timeout: Option = opts.gctx.get("publish.timeout")?; - timeout.unwrap_or(DEFAULT_TIMEOUT) + transmit( + opts.gctx, + ws, + pkg, + tarball.file(), + &mut registry, + source_ids.original, + opts.dry_run, + )?; + to_confirm.insert(pkg_id); + + if !opts.dry_run { + // Short does not include the registry name. + let short_pkg_description = format!("{} v{}", pkg.name(), pkg.version()); + let source_description = source_ids.original.to_string(); + ws.gctx().shell().status( + "Uploaded", + format!("{short_pkg_description} to {source_description}"), + )?; + } + } + + let confirmed = if opts.dry_run { + to_confirm.clone() } else { - DEFAULT_TIMEOUT + const DEFAULT_TIMEOUT: u64 = 60; + let timeout = if opts.gctx.cli_unstable().publish_timeout { + let timeout: Option = opts.gctx.get("publish.timeout")?; + timeout.unwrap_or(DEFAULT_TIMEOUT) + } else { + DEFAULT_TIMEOUT + }; + if 0 < timeout { + let timeout = Duration::from_secs(timeout); + wait_for_any_publish_confirmation( + opts.gctx, + source_ids.original, + &to_confirm, + timeout, + )? + } else { + BTreeSet::new() + } }; - if 0 < timeout { - let timeout = Duration::from_secs(timeout); - wait_for_publish(opts.gctx, source_ids.original, pkg, timeout)?; + if confirmed.is_empty() { + // If nothing finished, it means we timed out while waiting for confirmation. + // We're going to exit, but first we need to check: have we uploaded everything? + if plan.is_empty() { + // It's ok that we timed out, because nothing was waiting on dependencies to + // be confirmed. + break; + } else { + let failed_list = package_list(plan.iter(), "and"); + bail!("unable to publish {failed_list} due to time out while waiting for published dependencies to be available."); + } + } + for id in &confirmed { + to_confirm.remove(id); } + plan.mark_confirmed(confirmed); } Ok(()) } -fn wait_for_publish( +/// Poll the registry for any packages that are ready for use. +/// +/// Returns the subset of `pkgs` that are ready for use. +/// This will be an empty set if we timed out before confirming anything. +fn wait_for_any_publish_confirmation( gctx: &GlobalContext, registry_src: SourceId, - pkg: &Package, + pkgs: &BTreeSet, timeout: Duration, -) -> CargoResult<()> { +) -> CargoResult> { let mut source = SourceConfigMap::empty(gctx)?.load(registry_src, &HashSet::new())?; // Disable the source's built-in progress bars. Repeatedly showing a bunch // of independent progress bars can be a little confusing. There is an @@ -202,18 +275,14 @@ fn wait_for_publish( let sleep_time = Duration::from_secs(1); let max = timeout.as_secs() as usize; // Short does not include the registry name. - let short_pkg_description = format!("{} v{}", pkg.name(), pkg.version()); - gctx.shell().status( - "Uploaded", - format!("{short_pkg_description} to {source_description}"), - )?; + let short_pkg_descriptions = package_list(pkgs.iter().copied(), "or"); gctx.shell().note(format!( - "waiting for `{short_pkg_description}` to be available at {source_description}.\n\ + "waiting for {short_pkg_descriptions} to be available at {source_description}.\n\ You may press ctrl-c to skip waiting; the crate should be available shortly." ))?; let mut progress = Progress::with_style("Waiting", ProgressStyle::Ratio, gctx); progress.tick_now(0, max, "")?; - let is_available = loop { + let available = loop { { let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?; // Force re-fetching the source @@ -223,34 +292,48 @@ fn wait_for_publish( // multiple times gctx.updated_sources().remove(&source.replaced_source_id()); source.invalidate_cache(); - if poll_one_package(registry_src, &pkg.package_id(), &mut source)? { - break true; + let mut available = BTreeSet::new(); + for pkg in pkgs { + if poll_one_package(registry_src, pkg, &mut source)? { + available.insert(*pkg); + } + } + + // As soon as any package is available, break this loop so we can see if another + // one can be uploaded. + if !available.is_empty() { + break available; } } let elapsed = now.elapsed(); if timeout < elapsed { gctx.shell().warn(format!( - "timed out waiting for `{short_pkg_description}` to be available in {source_description}", + "timed out waiting for {short_pkg_descriptions} to be available in {source_description}", ))?; gctx.shell().note( "the registry may have a backlog that is delaying making the \ crate available. The crate should be available soon.", )?; - break false; + break BTreeSet::new(); } progress.tick_now(elapsed.as_secs() as usize, max, "")?; std::thread::sleep(sleep_time); }; - if is_available { + if !available.is_empty() { + let short_pkg_description = available + .iter() + .map(|pkg| format!("{} v{}", pkg.name(), pkg.version())) + .sorted() + .join(", "); gctx.shell().status( "Published", format!("{short_pkg_description} at {source_description}"), )?; } - Ok(()) + Ok(available) } fn poll_one_package( @@ -485,6 +568,87 @@ fn transmit( Ok(()) } +/// State for tracking dependencies during upload. +struct PublishPlan { + /// Graph of publishable packages where the edges are `(dependency -> dependent)` + dependents: Graph, + /// The weight of a package is the number of unpublished dependencies it has. + dependencies_count: HashMap, +} + +impl PublishPlan { + /// Given a package dependency graph, creates a `PublishPlan` for tracking state. + fn new(graph: &Graph) -> Self { + let dependents = graph.reversed(); + + let dependencies_count: HashMap<_, _> = dependents + .iter() + .map(|id| (*id, graph.edges(id).count())) + .collect(); + Self { + dependents, + dependencies_count, + } + } + + fn iter(&self) -> impl Iterator + '_ { + self.dependencies_count.iter().map(|(id, _)| *id) + } + + fn is_empty(&self) -> bool { + self.dependencies_count.is_empty() + } + + /// Returns the set of packages that are ready for publishing (i.e. have no outstanding dependencies). + /// + /// These will not be returned in future calls. + fn take_ready(&mut self) -> BTreeSet { + let ready: BTreeSet<_> = self + .dependencies_count + .iter() + .filter_map(|(id, weight)| (*weight == 0).then_some(*id)) + .collect(); + for pkg in &ready { + self.dependencies_count.remove(pkg); + } + ready + } + + /// Packages confirmed to be available in the registry, potentially allowing additional + /// packages to be "ready". + fn mark_confirmed(&mut self, published: impl IntoIterator) { + for id in published { + for (dependent_id, _) in self.dependents.edges(&id) { + if let Some(weight) = self.dependencies_count.get_mut(dependent_id) { + *weight = weight.saturating_sub(1); + } + } + } + } +} + +/// Format a collection of packages as a list +/// +/// e.g. "foo v0.1.0, bar v0.2.0, and baz v0.3.0". +/// +/// Note: the final separator (e.g. "and" in the previous example) can be chosen. +fn package_list(pkgs: impl IntoIterator, final_sep: &str) -> String { + let mut names: Vec<_> = pkgs + .into_iter() + .map(|pkg| format!("`{} v{}`", pkg.name(), pkg.version())) + .collect(); + names.sort(); + + match &names[..] { + [] => String::new(), + [a] => a.clone(), + [a, b] => format!("{a} {final_sep} {b}"), + [names @ .., last] => { + format!("{}, {final_sep} {last}", names.join(", ")) + } + } +} + fn validate_registry(pkgs: &[&Package], reg_or_index: Option<&RegistryOrIndex>) -> CargoResult<()> { for pkg in pkgs { if pkg.publish() == &Some(Vec::new()) { @@ -518,3 +682,59 @@ fn validate_registry(pkgs: &[&Package], reg_or_index: Option<&RegistryOrIndex>) Ok(()) } + +#[cfg(test)] +mod tests { + use crate::{ + core::{PackageId, SourceId}, + sources::CRATES_IO_INDEX, + util::{Graph, IntoUrl}, + }; + + use super::PublishPlan; + + fn pkg_id(name: &str) -> PackageId { + let loc = CRATES_IO_INDEX.into_url().unwrap(); + PackageId::try_new(name, "1.0.0", SourceId::for_registry(&loc).unwrap()).unwrap() + } + + #[test] + fn parallel_schedule() { + let mut graph: Graph = Graph::new(); + let a = pkg_id("a"); + let b = pkg_id("b"); + let c = pkg_id("c"); + let d = pkg_id("d"); + let e = pkg_id("e"); + + graph.add(a); + graph.add(b); + graph.add(c); + graph.add(d); + graph.add(e); + graph.link(a, c); + graph.link(b, c); + graph.link(c, d); + graph.link(c, e); + + let mut order = PublishPlan::new(&graph); + let ready: Vec<_> = order.take_ready().into_iter().collect(); + assert_eq!(ready, vec![d, e]); + + order.mark_confirmed(vec![d]); + let ready: Vec<_> = order.take_ready().into_iter().collect(); + assert!(ready.is_empty()); + + order.mark_confirmed(vec![e]); + let ready: Vec<_> = order.take_ready().into_iter().collect(); + assert_eq!(ready, vec![c]); + + order.mark_confirmed(vec![c]); + let ready: Vec<_> = order.take_ready().into_iter().collect(); + assert_eq!(ready, vec![a, b]); + + order.mark_confirmed(vec![a, b]); + let ready: Vec<_> = order.take_ready().into_iter().collect(); + assert!(ready.is_empty()); + } +} diff --git a/src/cargo/util/graph.rs b/src/cargo/util/graph.rs index 4ae00594430..9e8ba143f40 100644 --- a/src/cargo/util/graph.rs +++ b/src/cargo/util/graph.rs @@ -25,6 +25,20 @@ impl Graph { .or_insert_with(Default::default) } + /// Returns the graph obtained by reversing all edges. + pub fn reversed(&self) -> Graph { + let mut ret = Graph::new(); + + for n in self.iter() { + ret.add(n.clone()); + for (m, e) in self.edges(n) { + *ret.link(m.clone(), n.clone()) = e.clone(); + } + } + + ret + } + pub fn contains(&self, k: &Q) -> bool where N: Borrow, @@ -206,6 +220,19 @@ fn path_to_self() { assert_eq!(new.path_to_bottom(&0), vec![(&0, Some(&()))]); } +#[test] +fn reverse() { + let mut new: Graph = Graph::new(); + new.link(0, 1); + new.link(0, 2); + + let mut expected: Graph = Graph::new(); + expected.add(0); + expected.link(1, 0); + expected.link(2, 0); + assert_eq!(new.reversed(), expected); +} + impl Default for Graph { fn default() -> Graph { Graph::new() diff --git a/tests/testsuite/credential_process.rs b/tests/testsuite/credential_process.rs index 96ff46a5527..bba657fe8c5 100644 --- a/tests/testsuite/credential_process.rs +++ b/tests/testsuite/credential_process.rs @@ -76,8 +76,8 @@ fn publish() { {"v":1,"registry":{"index-url":"[..]","name":"alternative","headers":[..]},"kind":"get","operation":"read"} [PACKAGING] foo v0.1.0 ([ROOT]/foo) [PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed) -{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"publish","name":"foo","vers":"0.1.0","cksum":"[..]"} [UPLOADING] foo v0.1.0 ([ROOT]/foo) +{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"publish","name":"foo","vers":"0.1.0","cksum":"[..]"} [UPLOADED] foo v0.1.0 to registry `alternative` [NOTE] waiting for `foo v0.1.0` to be available at registry `alternative`. You may press ctrl-c [..] @@ -529,8 +529,8 @@ fn token_caching() { {"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"read"} [PACKAGING] foo v0.1.0 ([ROOT]/foo) [PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed) -{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"publish","name":"foo","vers":"0.1.0","cksum":"[..]"} [UPLOADING] foo v0.1.0 ([ROOT]/foo) +{"v":1,"registry":{"index-url":"[..]","name":"alternative"},"kind":"get","operation":"publish","name":"foo","vers":"0.1.0","cksum":"[..]"} [UPLOADED] foo v0.1.0 to registry `alternative` [NOTE] waiting [..] You may press ctrl-c [..] diff --git a/tests/testsuite/publish.rs b/tests/testsuite/publish.rs index 5dec9435743..af071ae0877 100644 --- a/tests/testsuite/publish.rs +++ b/tests/testsuite/publish.rs @@ -3173,6 +3173,7 @@ See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for [PACKAGING] foo v0.0.1 ([ROOT]/foo) [PACKAGED] 4 files, [FILE_SIZE]B ([FILE_SIZE]B compressed) [UPLOADING] foo v0.0.1 ([ROOT]/foo) +[UPLOADED] foo v0.0.1 to registry `crates-io` "#]]) .run(); @@ -3305,7 +3306,26 @@ fn timeout_waiting_for_dependency_publish() { .masquerade_as_nightly_cargo(&["publish-timeout", "package-workspace"]) .with_status(101) .with_stderr_data(str![[r#" -[ERROR] the `-p` argument must be specified to select a single package to publish +[UPDATING] crates.io index +[WARNING] manifest has no documentation, homepage or repository. +See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. +[PACKAGING] dep v0.0.1 ([ROOT]/foo/dep) +[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed) +[WARNING] manifest has no documentation, homepage or repository. +See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. +[PACKAGING] main v0.0.1 ([ROOT]/foo/main) +[PACKAGED] 4 files, [FILE_SIZE]B ([FILE_SIZE]B compressed) +[WARNING] manifest has no documentation, homepage or repository. +See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info. +[PACKAGING] other v0.0.1 ([ROOT]/foo/other) +[PACKAGED] 4 files, [FILE_SIZE]B ([FILE_SIZE]B compressed) +[UPLOADING] dep v0.0.1 ([ROOT]/foo/dep) +[UPLOADED] dep v0.0.1 to registry `crates-io` +[NOTE] waiting for `dep v0.0.1` to be available at registry `crates-io`. +You may press ctrl-c to skip waiting; the crate should be available shortly. +[WARNING] timed out waiting for `dep v0.0.1` to be available in registry `crates-io` +[NOTE] the registry may have a backlog that is delaying making the crate available. The crate should be available soon. +[ERROR] unable to publish `main v0.0.1` and `other v0.0.1` due to time out while waiting for published dependencies to be available. "#]]) .run(); @@ -3588,10 +3608,47 @@ fn workspace_with_local_deps_nightly() { p.cargo("publish -Zpackage-workspace") .masquerade_as_nightly_cargo(&["package-workspace"]) - .with_status(101) .replace_crates_io(registry.index_url()) .with_stderr_data(str![[r#" -[ERROR] the `-p` argument must be specified to select a single package to publish +[UPDATING] crates.io index +[PACKAGING] level3 v0.0.1 ([ROOT]/foo/level3) +[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed) +[PACKAGING] level2 v0.0.1 ([ROOT]/foo/level2) +[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed) +[PACKAGING] level1 v0.0.1 ([ROOT]/foo/level1) +[UPDATING] crates.io index +[PACKAGED] 4 files, [FILE_SIZE]B ([FILE_SIZE]B compressed) +[VERIFYING] level3 v0.0.1 ([ROOT]/foo/level3) +[COMPILING] level3 v0.0.1 ([ROOT]/foo/target/package/level3-0.0.1) +[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s +[VERIFYING] level2 v0.0.1 ([ROOT]/foo/level2) +[UPDATING] crates.io index +[UNPACKING] level3 v0.0.1 (registry `[ROOT]/foo/target/package/tmp-registry`) +[COMPILING] level3 v0.0.1 +[COMPILING] level2 v0.0.1 ([ROOT]/foo/target/package/level2-0.0.1) +[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s +[VERIFYING] level1 v0.0.1 ([ROOT]/foo/level1) +[UPDATING] crates.io index +[UNPACKING] level2 v0.0.1 (registry `[ROOT]/foo/target/package/tmp-registry`) +[COMPILING] level3 v0.0.1 +[COMPILING] level2 v0.0.1 +[COMPILING] level1 v0.0.1 ([ROOT]/foo/target/package/level1-0.0.1) +[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s +[UPLOADING] level3 v0.0.1 ([ROOT]/foo/level3) +[UPLOADED] level3 v0.0.1 to registry `crates-io` +[NOTE] waiting for `level3 v0.0.1` to be available at registry `crates-io`. +You may press ctrl-c to skip waiting; the crate should be available shortly. +[PUBLISHED] level3 v0.0.1 at registry `crates-io` +[UPLOADING] level2 v0.0.1 ([ROOT]/foo/level2) +[UPLOADED] level2 v0.0.1 to registry `crates-io` +[NOTE] waiting for `level2 v0.0.1` to be available at registry `crates-io`. +You may press ctrl-c to skip waiting; the crate should be available shortly. +[PUBLISHED] level2 v0.0.1 at registry `crates-io` +[UPLOADING] level1 v0.0.1 ([ROOT]/foo/level1) +[UPLOADED] level1 v0.0.1 to registry `crates-io` +[NOTE] waiting for `level1 v0.0.1` to be available at registry `crates-io`. +You may press ctrl-c to skip waiting; the crate should be available shortly. +[PUBLISHED] level1 v0.0.1 at registry `crates-io` "#]]) .run(); @@ -3659,10 +3716,41 @@ fn workspace_parallel() { p.cargo("publish -Zpackage-workspace") .masquerade_as_nightly_cargo(&["package-workspace"]) .replace_crates_io(registry.index_url()) - .with_status(101) .with_stderr_data( str![[r#" -[ERROR] the `-p` argument must be specified to select a single package to publish +[UPDATING] crates.io index +[PACKAGING] a v0.0.1 ([ROOT]/foo/a) +[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed) +[PACKAGING] b v0.0.1 ([ROOT]/foo/b) +[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed) +[PACKAGING] c v0.0.1 ([ROOT]/foo/c) +[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed) +[VERIFYING] a v0.0.1 ([ROOT]/foo/a) +[COMPILING] a v0.0.1 ([ROOT]/foo/target/package/a-0.0.1) +[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s +[VERIFYING] b v0.0.1 ([ROOT]/foo/b) +[COMPILING] b v0.0.1 ([ROOT]/foo/target/package/b-0.0.1) +[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s +[VERIFYING] c v0.0.1 ([ROOT]/foo/c) +[UPDATING] crates.io index +[UNPACKING] a v0.0.1 (registry `[ROOT]/foo/target/package/tmp-registry`) +[UNPACKING] b v0.0.1 (registry `[ROOT]/foo/target/package/tmp-registry`) +[COMPILING] a v0.0.1 +[COMPILING] b v0.0.1 +[COMPILING] c v0.0.1 ([ROOT]/foo/target/package/c-0.0.1) +[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s +[UPLOADED] b v0.0.1 to registry `crates-io` +[UPLOADED] a v0.0.1 to registry `crates-io` +[NOTE] waiting for `a v0.0.1` or `b v0.0.1` to be available at registry `crates-io`. +You may press ctrl-c to skip waiting; the crate should be available shortly. +[PUBLISHED] a v0.0.1, b v0.0.1 at registry `crates-io` +[UPLOADING] c v0.0.1 ([ROOT]/foo/c) +[UPLOADED] c v0.0.1 to registry `crates-io` +[NOTE] waiting for `c v0.0.1` to be available at registry `crates-io`. +You may press ctrl-c to skip waiting; the crate should be available shortly. +[PUBLISHED] c v0.0.1 at registry `crates-io` +[UPLOADING] a v0.0.1 ([ROOT]/foo/a) +[UPLOADING] b v0.0.1 ([ROOT]/foo/b) "#]] .unordered(), @@ -3724,9 +3812,12 @@ fn workspace_missing_dependency() { [PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed) [VERIFYING] b v0.0.1 ([ROOT]/foo/b) [UPDATING] crates.io index -[ERROR] no matching package named `a` found -location searched: registry `crates-io` -required by package `b v0.0.1 ([ROOT]/foo/target/package/b-0.0.1)` +[ERROR] failed to verify package tarball + +Caused by: + no matching package named `a` found + location searched: registry `crates-io` + required by package `b v0.0.1 ([ROOT]/foo/target/package/b-0.0.1)` "#]]) .run(); @@ -3756,7 +3847,23 @@ You may press ctrl-c to skip waiting; the crate should be available shortly. .replace_crates_io(registry.index_url()) .with_status(101) .with_stderr_data(str![[r#" -[ERROR] the `-p` argument must be specified to select a single package to publish +[UPDATING] crates.io index +[PACKAGING] a v0.0.1 ([ROOT]/foo/a) +[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed) +[PACKAGING] b v0.0.1 ([ROOT]/foo/b) +[PACKAGED] 3 files, [FILE_SIZE]B ([FILE_SIZE]B compressed) +[VERIFYING] a v0.0.1 ([ROOT]/foo/a) +[COMPILING] a v0.0.1 ([ROOT]/foo/target/package/a-0.0.1) +[FINISHED] `dev` profile [unoptimized + debuginfo] target(s) in [ELAPSED]s +[VERIFYING] b v0.0.1 ([ROOT]/foo/b) +[UPDATING] crates.io index +[ERROR] failed to verify package tarball + +Caused by: + failed to get `a` as a dependency of package `b v0.0.1 ([ROOT]/foo/target/package/b-0.0.1)` + +Caused by: + found a package in the remote registry and the local overlay: a@0.0.1 "#]]) .run(); @@ -3817,7 +3924,8 @@ fn one_unpublishable_package() { .masquerade_as_nightly_cargo(&["package-workspace"]) .with_status(101) .with_stderr_data(str![[r#" -[ERROR] the `-p` argument must be specified to select a single package to publish +[ERROR] `main` cannot be published. +`package.publish` must be set to `true` or a non-empty list in Cargo.toml to publish. "#]]) .run();