diff --git a/src/bin/commands/read_manifest.rs b/src/bin/commands/read_manifest.rs index 5365b5bac86..1e54c79e87f 100644 --- a/src/bin/commands/read_manifest.rs +++ b/src/bin/commands/read_manifest.rs @@ -1,6 +1,5 @@ use command_prelude::*; -use cargo::core::Package; use cargo::print_json; pub fn cli() -> App { @@ -13,8 +12,7 @@ Print a JSON representation of a Cargo.toml manifest.", } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { - let root = args.root_manifest(config)?; - let pkg = Package::for_path(&root, config)?; - print_json(&pkg); + let ws = args.workspace(config)?; + print_json(&ws.current()?); Ok(()) } diff --git a/src/cargo/core/dependency.rs b/src/cargo/core/dependency.rs index f8ef49d048b..173be4fb09f 100644 --- a/src/cargo/core/dependency.rs +++ b/src/cargo/core/dependency.rs @@ -343,9 +343,8 @@ impl Dependency { } /// Returns true if the package (`sum`) can fulfill this dependency request. - pub fn matches_ignoring_source(&self, sum: &Summary) -> bool { - self.name() == sum.package_id().name() - && self.version_req().matches(sum.package_id().version()) + pub fn matches_ignoring_source(&self, id: &PackageId) -> bool { + self.name() == id.name() && self.version_req().matches(id.version()) } /// Returns true if the package (`id`) can fulfill this dependency request. diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index d66a895540f..f3ce2eaf29c 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -12,7 +12,6 @@ use lazycell::LazyCell; use core::{Dependency, Manifest, PackageId, SourceId, Target}; use core::{SourceMap, Summary}; use core::interning::InternedString; -use ops; use util::{internal, lev_distance, Config}; use util::errors::{CargoResult, CargoResultExt}; @@ -81,14 +80,6 @@ impl Package { } } - /// Calculate the Package from the manifest path (and cargo configuration). - pub fn for_path(manifest_path: &Path, config: &Config) -> CargoResult { - let path = manifest_path.parent().unwrap(); - let source_id = SourceId::for_path(path)?; - let (pkg, _) = ops::read_package(manifest_path, &source_id, config)?; - Ok(pkg) - } - /// Get the manifest dependencies pub fn dependencies(&self) -> &[Dependency] { self.manifest.dependencies() diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs index 3885ea17ca3..ba1866bc897 100644 --- a/src/cargo/core/registry.rs +++ b/src/cargo/core/registry.rs @@ -434,7 +434,7 @@ impl<'cfg> Registry for PackageRegistry<'cfg> { patches.extend( extra .iter() - .filter(|s| dep.matches_ignoring_source(s)) + .filter(|s| dep.matches_ignoring_source(s.package_id())) .cloned(), ); } diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index 63cd778db9c..3bb318c17ad 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -6,7 +6,7 @@ use serde::ser; use serde::de; use core::{Dependency, Package, PackageId, SourceId, Workspace}; -use util::{internal, Config, Graph}; +use util::{internal, Graph}; use util::errors::{CargoError, CargoResult, CargoResultExt}; use super::Resolve; @@ -207,33 +207,33 @@ fn build_path_deps(ws: &Workspace) -> HashMap { visited.insert(member.package_id().source_id().clone()); } for member in members.iter() { - build_pkg(member, ws.config(), &mut ret, &mut visited); + build_pkg(member, ws, &mut ret, &mut visited); } for deps in ws.root_patch().values() { for dep in deps { - build_dep(dep, ws.config(), &mut ret, &mut visited); + build_dep(dep, ws, &mut ret, &mut visited); } } for &(_, ref dep) in ws.root_replace() { - build_dep(dep, ws.config(), &mut ret, &mut visited); + build_dep(dep, ws, &mut ret, &mut visited); } return ret; fn build_pkg( pkg: &Package, - config: &Config, + ws: &Workspace, ret: &mut HashMap, visited: &mut HashSet, ) { for dep in pkg.dependencies() { - build_dep(dep, config, ret, visited); + build_dep(dep, ws, ret, visited); } } fn build_dep( dep: &Dependency, - config: &Config, + ws: &Workspace, ret: &mut HashMap, visited: &mut HashSet, ) { @@ -245,13 +245,13 @@ fn build_path_deps(ws: &Workspace) -> HashMap { Ok(p) => p.join("Cargo.toml"), Err(_) => return, }; - let pkg = match Package::for_path(&path, config) { + let pkg = match ws.load(&path) { Ok(p) => p, Err(_) => return, }; ret.insert(pkg.name().to_string(), pkg.package_id().source_id().clone()); visited.insert(pkg.package_id().source_id().clone()); - build_pkg(&pkg, config, ret, visited); + build_pkg(&pkg, ws, ret, visited); } } diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 377a20236db..9d765933aaa 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -384,10 +384,39 @@ struct Context { type Activations = HashMap<(InternedString, SourceId), Rc>>; /// Builds the list of all packages required to build the first argument. +/// +/// * `summaries` - the list of package summaries along with how to resolve +/// their features. This is a list of all top-level packages that are intended +/// to be part of the lock file (resolve output). These typically are a list +/// of all workspace members. +/// +/// * `replacements` - this is a list of `[replace]` directives found in the +/// root of the workspace. The list here is a `PackageIdSpec` of what to +/// replace and a `Dependency` to replace that with. In general it's not +/// recommended to use `[replace]` any more and use `[patch]` instead, which +/// is supported elsewhere. +/// +/// * `registry` - this is the source from which all package summaries are +/// loaded. It's expected that this is extensively configured ahead of time +/// and is idempotent with our requests to it (aka returns the same results +/// for the same query every time). Typically this is an instance of a +/// `PackageRegistry`. +/// +/// * `try_to_use` - this is a list of package ids which were previously found +/// in the lock file. We heuristically prefer the ids listed in `try_to_use` +/// when sorting candidates to activate, but otherwise this isn't used +/// anywhere else. +/// +/// * `config` - a location to print warnings and such, or `None` if no warnings +/// should be printed +/// +/// * `print_warnings` - whether or not to print backwards-compatibility +/// warnings and such pub fn resolve( summaries: &[(Summary, Method)], replacements: &[(PackageIdSpec, Dependency)], registry: &mut Registry, + try_to_use: &[&PackageId], config: Option<&Config>, print_warnings: bool, ) -> CargoResult { @@ -400,12 +429,8 @@ pub fn resolve( warnings: RcList::new(), }; let _p = profile::start("resolving"); - let cx = activate_deps_loop( - cx, - &mut RegistryQueryer::new(registry, replacements), - summaries, - config, - )?; + let mut registry = RegistryQueryer::new(registry, replacements, try_to_use); + let cx = activate_deps_loop(cx, &mut registry, summaries, config)?; let mut resolve = Resolve { graph: cx.graph(), @@ -637,16 +662,22 @@ impl ConflictReason { struct RegistryQueryer<'a> { registry: &'a mut (Registry + 'a), replacements: &'a [(PackageIdSpec, Dependency)], + try_to_use: HashSet<&'a PackageId>, // TODO: with nll the Rc can be removed cache: HashMap>>, } impl<'a> RegistryQueryer<'a> { - fn new(registry: &'a mut Registry, replacements: &'a [(PackageIdSpec, Dependency)]) -> Self { + fn new( + registry: &'a mut Registry, + replacements: &'a [(PackageIdSpec, Dependency)], + try_to_use: &'a [&'a PackageId], + ) -> Self { RegistryQueryer { registry, replacements, cache: HashMap::new(), + try_to_use: try_to_use.iter().cloned().collect(), } } @@ -739,9 +770,17 @@ impl<'a> RegistryQueryer<'a> { candidate.replace = replace; } - // When we attempt versions for a package, we'll want to start at - // the maximum version and work our way down. - ret.sort_unstable_by(|a, b| b.summary.version().cmp(a.summary.version())); + // When we attempt versions for a package we'll want to do so in a + // sorted fashion to pick the "best candidates" first. Currently we try + // prioritized summaries (those in `try_to_use`) and failing that we + // list everything from the maximum version to the lowest version. + ret.sort_unstable_by(|a, b| { + let a_in_previous = self.try_to_use.contains(a.summary.package_id()); + let b_in_previous = self.try_to_use.contains(b.summary.package_id()); + let a = (a_in_previous, a.summary.version()); + let b = (b_in_previous, b.summary.version()); + a.cmp(&b).reverse() + }); let out = Rc::new(ret); diff --git a/src/cargo/core/workspace.rs b/src/cargo/core/workspace.rs index d6ba3594da9..9377b185cf7 100644 --- a/src/cargo/core/workspace.rs +++ b/src/cargo/core/workspace.rs @@ -1,17 +1,21 @@ -use std::collections::hash_map::{Entry, HashMap}; +use std::cell::RefCell; use std::collections::BTreeMap; +use std::collections::hash_map::{Entry, HashMap}; use std::path::{Path, PathBuf}; use std::slice; use glob::glob; use url::Url; +use core::registry::PackageRegistry; use core::{EitherManifest, Package, SourceId, VirtualManifest}; use core::{Dependency, PackageIdSpec, Profile, Profiles}; -use util::{Config, Filesystem}; +use ops; +use sources::PathSource; use util::errors::{CargoResult, CargoResultExt}; use util::paths; use util::toml::read_manifest; +use util::{Config, Filesystem}; /// The core abstraction in Cargo for working with a workspace of crates. /// @@ -67,6 +71,10 @@ pub struct Workspace<'cfg> { // needed by the current configuration (such as in cargo install). In some // cases `false` also results in the non-enforcement of dev-dependencies. require_optional_deps: bool, + + // A cache of lodaed packages for particular paths which is disjoint from + // `packages` up above, used in the `load` method down below. + loaded_packages: RefCell>, } // Separate structure for tracking loaded packages (to avoid loading anything @@ -137,6 +145,7 @@ impl<'cfg> Workspace<'cfg> { default_members: Vec::new(), is_ephemeral: false, require_optional_deps: true, + loaded_packages: RefCell::new(HashMap::new()), }; ws.root_manifest = ws.find_root(manifest_path)?; ws.find_members()?; @@ -172,6 +181,7 @@ impl<'cfg> Workspace<'cfg> { default_members: Vec::new(), is_ephemeral: true, require_optional_deps, + loaded_packages: RefCell::new(HashMap::new()), }; { let key = ws.current_manifest.parent().unwrap(); @@ -669,11 +679,63 @@ impl<'cfg> Workspace<'cfg> { Ok(()) } + + pub fn load(&self, manifest_path: &Path) -> CargoResult { + match self.packages.maybe_get(manifest_path) { + Some(&MaybePackage::Package(ref p)) => return Ok(p.clone()), + Some(&MaybePackage::Virtual(_)) => bail!("cannot load workspace root"), + None => {} + } + + let mut loaded = self.loaded_packages.borrow_mut(); + if let Some(p) = loaded.get(manifest_path).cloned() { + return Ok(p); + } + let source_id = SourceId::for_path(manifest_path.parent().unwrap())?; + let (package, _nested_paths) = ops::read_package(manifest_path, &source_id, self.config)?; + loaded.insert(manifest_path.to_path_buf(), package.clone()); + Ok(package) + } + + /// Preload the provided registry with already loaded packages. + /// + /// A workspace may load packages during construction/parsing/early phases + /// for various operations, and this preload step avoids doubly-loading and + /// parsing crates on the filesystem by inserting them all into the registry + /// with their in-memory formats. + pub fn preload(&self, registry: &mut PackageRegistry<'cfg>) { + // These can get weird as this generally represents a workspace during + // `cargo install`. Things like git repositories will actually have a + // `PathSource` with multiple entries in it, so the logic below is + // mostly just an optimization for normal `cargo build` in workspaces + // during development. + if self.is_ephemeral { + return; + } + + for pkg in self.packages.packages.values() { + let pkg = match *pkg { + MaybePackage::Package(ref p) => p.clone(), + MaybePackage::Virtual(_) => continue, + }; + let mut src = PathSource::new( + pkg.manifest_path(), + pkg.package_id().source_id(), + self.config, + ); + src.preload_with(pkg); + registry.add_preloaded(Box::new(src)); + } + } } impl<'cfg> Packages<'cfg> { fn get(&self, manifest_path: &Path) -> &MaybePackage { - &self.packages[manifest_path.parent().unwrap()] + self.maybe_get(manifest_path).unwrap() + } + + fn maybe_get(&self, manifest_path: &Path) -> Option<&MaybePackage> { + self.packages.get(manifest_path.parent().unwrap()) } fn load(&mut self, manifest_path: &Path) -> CargoResult<&MaybePackage> { diff --git a/src/cargo/ops/cargo_rustc/mod.rs b/src/cargo/ops/cargo_rustc/mod.rs index 375cb793dd0..700230d8592 100644 --- a/src/cargo/ops/cargo_rustc/mod.rs +++ b/src/cargo/ops/cargo_rustc/mod.rs @@ -1082,7 +1082,7 @@ fn build_deps_args<'a, 'cfg>( .pkg .dependencies() .iter() - .filter(|d| d.matches_ignoring_source(dep.pkg.summary())) + .filter(|d| d.matches_ignoring_source(dep.pkg.package_id())) .filter_map(|d| d.rename()) .next(); diff --git a/src/cargo/ops/registry.rs b/src/cargo/ops/registry.rs index cc92dda7324..60e4641c1aa 100644 --- a/src/cargo/ops/registry.rs +++ b/src/cargo/ops/registry.rs @@ -447,8 +447,8 @@ pub fn modify_owners(config: &Config, opts: &OwnersOptions) -> CargoResult<()> { Some(ref name) => name.clone(), None => { let manifest_path = find_root_manifest_for_wd(None, config.cwd())?; - let pkg = Package::for_path(&manifest_path, config)?; - pkg.name().to_string() + let ws = Workspace::new(&manifest_path, config)?; + ws.current()?.package_id().name().to_string() } }; @@ -508,8 +508,8 @@ pub fn yank( Some(name) => name, None => { let manifest_path = find_root_manifest_for_wd(None, config.cwd())?; - let pkg = Package::for_path(&manifest_path, config)?; - pkg.name().to_string() + let ws = Workspace::new(&manifest_path, config)?; + ws.current()?.package_id().name().to_string() } }; let version = match version { diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs index 6fb4914028b..618613964fa 100644 --- a/src/cargo/ops/resolve.rs +++ b/src/cargo/ops/resolve.rs @@ -101,9 +101,9 @@ pub fn resolve_ws_with_method<'a>( Ok((packages, resolved_with_overrides)) } -fn resolve_with_registry( - ws: &Workspace, - registry: &mut PackageRegistry, +fn resolve_with_registry<'cfg>( + ws: &Workspace<'cfg>, + registry: &mut PackageRegistry<'cfg>, warn: bool, ) -> CargoResult { let prev = ops::load_pkg_lockfile(ws)?; @@ -133,9 +133,9 @@ fn resolve_with_registry( /// /// The previous resolve normally comes from a lockfile. This function does not /// read or write lockfiles from the filesystem. -pub fn resolve_with_previous<'a>( - registry: &mut PackageRegistry, - ws: &Workspace, +pub fn resolve_with_previous<'a, 'cfg>( + registry: &mut PackageRegistry<'cfg>, + ws: &Workspace<'cfg>, method: Method, previous: Option<&'a Resolve>, to_avoid: Option<&HashSet<&'a PackageId>>, @@ -169,31 +169,18 @@ pub fn resolve_with_previous<'a>( // In the case where a previous instance of resolve is available, we // want to lock as many packages as possible to the previous version - // without disturbing the graph structure. To this end we perform - // two actions here: - // - // 1. We inform the package registry of all locked packages. This - // involves informing it of both the locked package's id as well - // as the versions of all locked dependencies. The registry will - // then takes this information into account when it is queried. - // - // 2. The specified package's summary will have its dependencies - // modified to their precise variants. This will instruct the - // first step of the resolution process to not query for ranges - // but rather for precise dependency versions. - // - // This process must handle altered dependencies, however, as - // it's possible for a manifest to change over time to have - // dependencies added, removed, or modified to different version - // ranges. To deal with this, we only actually lock a dependency - // to the previously resolved version if the dependency listed - // still matches the locked version. + // without disturbing the graph structure. + let mut try_to_use = Vec::new(); if let Some(r) = previous { trace!("previous: {:?}", r); - for node in r.iter().filter(keep) { - let deps = r.deps_not_replaced(node).filter(keep).cloned().collect(); - registry.register_lock(node.clone(), deps); - } + register_previous_locks(ws, registry, r, keep); + + // Everything in the previous lock file we want to keep is prioritized + // in dependency selection if it comes up, aka we want to have + // conservative updates. + try_to_use.extend(r.iter().filter(keep).inspect(|id| { + debug!("attempting to prefer {}", id); + })); } if register_patches { @@ -293,7 +280,15 @@ pub fn resolve_with_previous<'a>( None => root_replace.to_vec(), }; - let mut resolved = resolver::resolve(&summaries, &replace, registry, Some(ws.config()), warn)?; + ws.preload(registry); + let mut resolved = resolver::resolve( + &summaries, + &replace, + registry, + &try_to_use, + Some(ws.config()), + warn, + )?; resolved.register_used_patches(registry.patches()); if let Some(previous) = previous { resolved.merge_from(previous)?; @@ -336,3 +331,145 @@ fn get_resolved_packages<'a>(resolve: &Resolve, registry: PackageRegistry<'a>) - let ids: Vec = resolve.iter().cloned().collect(); registry.get(&ids) } + +/// In this function we're responsible for informing the `registry` of all +/// locked dependencies from the previous lock file we had, `resolve`. +/// +/// This gets particularly tricky for a couple of reasons. The first is that we +/// want all updates to be conservative, so we actually want to take the +/// `resolve` into account (and avoid unnecessary registry updates and such). +/// the second, however, is that we want to be resilient to updates of +/// manifests. For example if a dependency is added or a version is changed we +/// want to make sure that we properly re-resolve (conservatively) instead of +/// providing an opaque error. +/// +/// The logic here is somewhat subtle but there should be more comments below to +/// help out, and otherwise feel free to ask on IRC if there's questions! +/// +/// Note that this function, at the time of this writing, is basically the +/// entire fix for #4127 +fn register_previous_locks<'a>( + ws: &Workspace, + registry: &mut PackageRegistry, + resolve: &'a Resolve, + keep: &Fn(&&'a PackageId) -> bool, +) { + // Ok so we've been passed in a `keep` function which basically says "if I + // return true then this package wasn't listed for an update on the command + // line". AKA if we run `cargo update -p foo` then `keep(bar)` will return + // `true`, whereas `keep(foo)` will return `true` (roughly). + // + // This isn't actually quite what we want, however. Instead we want to + // further refine this `keep` function with *all transitive dependencies* of + // the packages we're not keeping. For example consider a case like this: + // + // * There's a crate `log` + // * There's a crate `serde` which depends on `log` + // + // Let's say we then run `cargo update -p serde`. This may *also* want to + // update the `log` dependency as our newer version of `serde` may have a + // new minimum version required for `log`. Now this isn't always guaranteed + // to work. What'll happen here is we *won't* lock the `log` dependency nor + // the `log` crate itself, but we will inform the registry "please prefer + // this version of `log`". That way if our newer version of serde works with + // the older version of `log`, we conservatively won't update `log`. If, + // however, nothing else in the dependency graph depends on `log` and the + // newer version of `serde` requires a new version of `log` it'll get pulled + // in (as we didn't accidentally lock it to an old version). + let mut avoid_locking = HashSet::new(); + for node in resolve.iter() { + if !keep(&node) { + add_deps(resolve, node, &mut avoid_locking); + } + } + + // Ok but the above loop isn't the entire story! Updates to the dependency + // graph can come from two locations, the `cargo update` command or + // manifests themselves. For example a manifest on the filesystem may + // have been updated to have an updated version requirement on `serde`. In + // this case both `keep(serde)` and `keep(log)` return `true` (the `keep` + // that's an argument to this function). We, however, don't want to keep + // either of those! Otherwise we'll get obscure resolve errors about locked + // versions. + // + // To solve this problem we iterate over all packages with path sources + // (aka ones with manifests that are changing) and take a look at all of + // their dependencies. If any dependency does not match something in the + // previous lock file, then we're guaranteed that the main resolver will + // update the source of this dependency no matter what. Knowing this we + // poison all packages from the same source, forcing them all to get + // updated. + // + // This may seem like a heavy hammer, and it is! It means that if you change + // anything from crates.io then all of crates.io becomes unlocked. Note, + // however, that we still want conservative updates. This currently happens + // because the first candidate the resolver picks is the previously locked + // version, and only if that fails to activate to we move on and try + // a different version. (giving the guise of conservative updates) + // + // For example let's say we had `serde = "0.1"` written in our lock file. + // When we later edit this to `serde = "0.1.3"` we don't want to lock serde + // at its old version, 0.1.1. Instead we want to allow it to update to + // `0.1.3` and update its own dependencies (like above). To do this *all + // crates from crates.io* are not locked (aka added to `avoid_locking`). + // For dependencies like `log` their previous version in the lock file will + // come up first before newer version, if newer version are available. + let mut path_deps = ws.members().cloned().collect::>(); + let mut visited = HashSet::new(); + while let Some(member) = path_deps.pop() { + if !visited.insert(member.package_id().clone()) { + continue; + } + for dep in member.dependencies() { + let source = dep.source_id(); + + // If this is a path dependency then try to push it onto our + // worklist + if source.is_path() { + if let Ok(path) = source.url().to_file_path() { + if let Ok(pkg) = ws.load(&path.join("Cargo.toml")) { + path_deps.push(pkg); + } + continue; + } + } + + // If we match *anything* in the dependency graph then we consider + // ourselves A-OK and assume that we'll resolve to that. If, + // however, nothing matches, then we poison the source of this + // dependencies and the previous lock file. + if resolve.iter().any(|id| dep.matches_ignoring_source(id)) { + continue; + } + for id in resolve.iter().filter(|id| id.source_id() == source) { + add_deps(resolve, id, &mut avoid_locking); + } + } + } + + // Alright now that we've got our new, fresh, shiny, and refined `keep` + // function let's put it to action. Take a look at the previous lockfile, + // filter everything by this callback, and then shove everything else into + // the registry as a locked dependency. + let ref keep = |id: &&'a PackageId| keep(id) && !avoid_locking.contains(id); + + for node in resolve.iter().filter(keep) { + let deps = resolve + .deps_not_replaced(node) + .filter(keep) + .cloned() + .collect(); + registry.register_lock(node.clone(), deps); + } + + /// recursively add `node` and all its transitive dependencies to `set` + fn add_deps<'a>(resolve: &'a Resolve, node: &'a PackageId, set: &mut HashSet<&'a PackageId>) { + if !set.insert(node) { + return; + } + debug!("ignoring any lock pointing directly at {}", node); + for dep in resolve.deps_not_replaced(node) { + add_deps(resolve, dep, set); + } + } +} diff --git a/src/cargo/sources/path.rs b/src/cargo/sources/path.rs index fb418ea0846..433c97d9f03 100644 --- a/src/cargo/sources/path.rs +++ b/src/cargo/sources/path.rs @@ -53,6 +53,14 @@ impl<'cfg> PathSource<'cfg> { } } + pub fn preload_with(&mut self, pkg: Package) { + assert!(!self.updated); + assert!(!self.recursive); + assert!(self.packages.is_empty()); + self.updated = true; + self.packages.push(pkg); + } + pub fn root_package(&mut self) -> CargoResult { trace!("root_package; source={:?}", self); diff --git a/tests/testsuite/cargotest/support/mod.rs b/tests/testsuite/cargotest/support/mod.rs index a2d5d59576e..089f1680234 100644 --- a/tests/testsuite/cargotest/support/mod.rs +++ b/tests/testsuite/cargotest/support/mod.rs @@ -246,6 +246,18 @@ impl Project { buffer } + pub fn uncomment_root_manifest(&self) { + let mut contents = String::new(); + fs::File::open(self.root().join("Cargo.toml")) + .unwrap() + .read_to_string(&mut contents) + .unwrap(); + fs::File::create(self.root().join("Cargo.toml")) + .unwrap() + .write_all(contents.replace("#", "").as_bytes()) + .unwrap(); + } + fn get_lib_prefix(kind: &str) -> &str { match kind { "lib" | "rlib" => "lib", diff --git a/tests/testsuite/main.rs b/tests/testsuite/main.rs index 9f66a61d49b..eb1f5190fd8 100644 --- a/tests/testsuite/main.rs +++ b/tests/testsuite/main.rs @@ -81,6 +81,7 @@ mod search; mod small_fd_limits; mod test; mod tool_paths; +mod update; mod verify_project; mod version; mod warn_on_failure; diff --git a/tests/testsuite/overrides.rs b/tests/testsuite/overrides.rs index 79a122447e7..80325197fdd 100644 --- a/tests/testsuite/overrides.rs +++ b/tests/testsuite/overrides.rs @@ -498,17 +498,22 @@ fn override_adds_some_deps() { p.cargo("update") .arg("-p") .arg(&format!("{}#bar", foo.url())), - execs() - .with_status(0) - .with_stderr("[UPDATING] git repository `file://[..]`"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] git repository `file://[..]` +[UPDATING] registry `file://[..]` +", + ), ); assert_that( p.cargo("update") .arg("-p") .arg("https://github.com/rust-lang/crates.io-index#bar"), - execs() - .with_status(0) - .with_stderr("[UPDATING] registry `file://[..]`"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `file://[..]` +", + ), ); assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); diff --git a/tests/testsuite/registry.rs b/tests/testsuite/registry.rs index 49a6ab90b4f..8597f144f51 100644 --- a/tests/testsuite/registry.rs +++ b/tests/testsuite/registry.rs @@ -1149,9 +1149,13 @@ fn update_backtracking_ok() { assert_that( p.cargo("update").arg("-p").arg("hyper"), - execs() - .with_status(0) - .with_stderr("[UPDATING] registry `[..]`"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[UPDATING] hyper v0.6.5 -> v0.6.6 +[UPDATING] openssl v0.1.0 -> v0.1.1 +", + ), ); } diff --git a/tests/testsuite/resolve.rs b/tests/testsuite/resolve.rs index 5b3538c0224..1e7baa023d4 100644 --- a/tests/testsuite/resolve.rs +++ b/tests/testsuite/resolve.rs @@ -35,7 +35,7 @@ fn resolve( let mut registry = MyRegistry(registry); let summary = Summary::new(pkg.clone(), deps, BTreeMap::new(), None).unwrap(); let method = Method::Everything; - let resolve = resolver::resolve(&[(summary, method)], &[], &mut registry, None, false)?; + let resolve = resolver::resolve(&[(summary, method)], &[], &mut registry, &[], None, false)?; let res = resolve.iter().cloned().collect(); Ok(res) } diff --git a/tests/testsuite/update.rs b/tests/testsuite/update.rs new file mode 100644 index 00000000000..c75a4c5fdad --- /dev/null +++ b/tests/testsuite/update.rs @@ -0,0 +1,316 @@ +use std::fs::File; +use std::io::prelude::*; + +use cargotest::support::{execs, project}; +use cargotest::support::registry::Package; +use hamcrest::assert_that; + +#[test] +fn minor_update_two_places() { + Package::new("log", "0.1.0").publish(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1" + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + Package::new("log", "0.1.1").publish(); + + File::create(p.root().join("foo/Cargo.toml")) + .unwrap() + .write_all( + br#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1.1" + "#, + ) + .unwrap(); + + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn transitive_minor_update() { + Package::new("log", "0.1.0").publish(); + Package::new("serde", "0.1.0").dep("log", "0.1").publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + serde = "0.1" + log = "0.1" + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + serde = "0.1" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + Package::new("log", "0.1.1").publish(); + Package::new("serde", "0.1.1").dep("log", "0.1.1").publish(); + + // Note that `serde` isn't actually updated here! The default behavior for + // `update` right now is to as conservatively as possible attempt to satisfy + // an update. In this case we previously locked the dependency graph to `log + // 0.1.0`, but nothing on the command line says we're allowed to update + // that. As a result the update of `serde` here shouldn't update to `serde + // 0.1.1` as that would also force an update to `log 0.1.1`. + // + // Also note that this is probably counterintuitive and weird. We may wish + // to change this one day. + assert_that( + p.cargo("update").arg("-p").arg("serde"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +", + ), + ); +} + +#[test] +fn conservative() { + Package::new("log", "0.1.0").publish(); + Package::new("serde", "0.1.0").dep("log", "0.1").publish(); + + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + serde = "0.1" + log = "0.1" + foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + serde = "0.1" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + Package::new("log", "0.1.1").publish(); + Package::new("serde", "0.1.1").dep("log", "0.1").publish(); + + assert_that( + p.cargo("update").arg("-p").arg("serde"), + execs().with_status(0).with_stderr( + "\ +[UPDATING] registry `[..]` +[UPDATING] serde v0.1.0 -> v0.1.1 +", + ), + ); +} + +#[test] +fn update_via_new_dep() { + Package::new("log", "0.1.0").publish(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1" + # foo = { path = "foo" } + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1.1" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + Package::new("log", "0.1.1").publish(); + + p.uncomment_root_manifest(); + assert_that( + p.cargo("build").env("RUST_LOG", "cargo=trace"), + execs().with_status(0), + ); +} + +#[test] +fn update_via_new_member() { + Package::new("log", "0.1.0").publish(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [workspace] + # members = [ "foo" ] + + [dependencies] + log = "0.1" + "#, + ) + .file("src/lib.rs", "") + .file( + "foo/Cargo.toml", + r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1.1" + "#, + ) + .file("foo/src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + Package::new("log", "0.1.1").publish(); + + p.uncomment_root_manifest(); + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn add_dep_deep_new_requirement() { + Package::new("log", "0.1.0").publish(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + log = "0.1" + # bar = "0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + Package::new("log", "0.1.1").publish(); + Package::new("bar", "0.1.0").dep("log", "0.1.1").publish(); + + p.uncomment_root_manifest(); + assert_that(p.cargo("build"), execs().with_status(0)); +} + +#[test] +fn everything_real_deep() { + Package::new("log", "0.1.0").publish(); + Package::new("foo", "0.1.0").dep("log", "0.1").publish(); + let p = project("foo") + .file( + "Cargo.toml", + r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1" + # bar = "0.1" + "#, + ) + .file("src/lib.rs", "") + .build(); + + assert_that(p.cargo("build"), execs().with_status(0)); + + Package::new("log", "0.1.1").publish(); + Package::new("bar", "0.1.0").dep("log", "0.1.1").publish(); + + p.uncomment_root_manifest(); + assert_that(p.cargo("build"), execs().with_status(0)); +}