From 118afd00ac4c1e604ab241afb1c6cbc691ee5717 Mon Sep 17 00:00:00 2001 From: Scott Schafer Date: Wed, 14 Feb 2024 10:52:39 -0700 Subject: [PATCH] chore: Rename `CompileContext` to `BuildRunner` --- src/cargo/core/compiler/artifact.rs | 6 +- src/cargo/core/compiler/build_context/mod.rs | 6 +- .../compiler/build_context/target_info.rs | 23 +- src/cargo/core/compiler/build_plan.rs | 8 +- .../compilation_files.rs | 42 ++- .../compiler/{context => build_runner}/mod.rs | 6 +- src/cargo/core/compiler/compilation.rs | 2 +- src/cargo/core/compiler/custom_build.rs | 120 +++---- src/cargo/core/compiler/fingerprint/mod.rs | 130 +++---- .../core/compiler/job_queue/job_state.rs | 2 +- src/cargo/core/compiler/job_queue/mod.rs | 154 ++++---- src/cargo/core/compiler/mod.rs | 333 ++++++++++-------- src/cargo/core/compiler/output_depinfo.rs | 35 +- src/cargo/core/compiler/rustdoc.rs | 14 +- src/cargo/core/compiler/timings.rs | 12 +- src/cargo/lib.rs | 4 +- src/cargo/ops/cargo_compile/mod.rs | 8 +- triagebot.toml | 6 +- 18 files changed, 501 insertions(+), 410 deletions(-) rename src/cargo/core/compiler/{context => build_runner}/compilation_files.rs (96%) rename src/cargo/core/compiler/{context => build_runner}/mod.rs (99%) diff --git a/src/cargo/core/compiler/artifact.rs b/src/cargo/core/compiler/artifact.rs index 4e5792819cd..1e0e31fd240 100644 --- a/src/cargo/core/compiler/artifact.rs +++ b/src/cargo/core/compiler/artifact.rs @@ -1,7 +1,7 @@ //! Generate artifact information from unit dependencies for configuring the compiler environment. use crate::core::compiler::unit_graph::UnitDep; -use crate::core::compiler::{CompileContext, CrateType, FileFlavor, Unit}; +use crate::core::compiler::{BuildRunner, CrateType, FileFlavor, Unit}; use crate::core::dependency::ArtifactKind; use crate::core::{Dependency, Target, TargetKind}; use crate::CargoResult; @@ -11,12 +11,12 @@ use std::ffi::OsString; /// Return all environment variables for the given unit-dependencies /// if artifacts are present. pub fn get_env( - cx: &CompileContext<'_, '_>, + build_runner: &BuildRunner<'_, '_>, dependencies: &[UnitDep], ) -> CargoResult> { let mut env = HashMap::new(); for unit_dep in dependencies.iter().filter(|d| d.unit.artifact.is_true()) { - for artifact_path in cx + for artifact_path in build_runner .outputs(&unit_dep.unit)? .iter() .filter_map(|f| (f.flavor == FileFlavor::Normal).then(|| &f.path)) diff --git a/src/cargo/core/compiler/build_context/mod.rs b/src/cargo/core/compiler/build_context/mod.rs index b1f45a5842f..26b134acfdb 100644 --- a/src/cargo/core/compiler/build_context/mod.rs +++ b/src/cargo/core/compiler/build_context/mod.rs @@ -20,7 +20,7 @@ pub use self::target_info::{ /// before it gets started. /// /// It is intended that this is mostly static information. Stuff that mutates -/// during the build can be found in the parent [`Context`]. (I say mostly, +/// during the build can be found in the parent [`BuildRunner`]. (I say mostly, /// because this has internal caching, but nothing that should be observable /// or require &mut.) /// @@ -39,9 +39,9 @@ pub use self::target_info::{ /// since it is often too lower-level. /// Instead, [`ops::create_bcx`] is usually what you are looking for. /// -/// After a `BuildContext` is built, the next stage of building is handled in [`Context`]. +/// After a `BuildContext` is built, the next stage of building is handled in [`BuildRunner`]. /// -/// [`Context`]: crate::core::compiler::CompileContext +/// [`BuildRunner`]: crate::core::compiler::BuildRunner /// [`ops::create_bcx`]: crate::ops::create_bcx pub struct BuildContext<'a, 'gctx> { /// The workspace the build is for. diff --git a/src/cargo/core/compiler/build_context/target_info.rs b/src/cargo/core/compiler/build_context/target_info.rs index fd0dd0d991d..864196e1ca1 100644 --- a/src/cargo/core/compiler/build_context/target_info.rs +++ b/src/cargo/core/compiler/build_context/target_info.rs @@ -9,7 +9,7 @@ use crate::core::compiler::apply_env_config; use crate::core::compiler::{ - BuildOutput, CompileContext, CompileKind, CompileMode, CompileTarget, CrateType, + BuildOutput, BuildRunner, CompileKind, CompileMode, CompileTarget, CrateType, }; use crate::core::{Dependency, Package, Target, TargetKind, Workspace}; use crate::util::config::{GlobalContext, StringList, TargetConfig}; @@ -1033,15 +1033,23 @@ impl RustDocFingerprint { /// the rustdoc fingerprint info in order to guarantee that we won't end up with mixed /// versions of the `js/html/css` files that `rustdoc` autogenerates which do not have /// any versioning. - pub fn check_rustdoc_fingerprint(cx: &CompileContext<'_, '_>) -> CargoResult<()> { - if cx.bcx.gctx.cli_unstable().skip_rustdoc_fingerprint { + pub fn check_rustdoc_fingerprint(build_runner: &BuildRunner<'_, '_>) -> CargoResult<()> { + if build_runner + .bcx + .gctx + .cli_unstable() + .skip_rustdoc_fingerprint + { return Ok(()); } let actual_rustdoc_target_data = RustDocFingerprint { - rustc_vv: cx.bcx.rustc().verbose_version.clone(), + rustc_vv: build_runner.bcx.rustc().verbose_version.clone(), }; - let fingerprint_path = cx.files().host_root().join(".rustdoc_fingerprint.json"); + let fingerprint_path = build_runner + .files() + .host_root() + .join(".rustdoc_fingerprint.json"); let write_fingerprint = || -> CargoResult<()> { paths::write( &fingerprint_path, @@ -1076,10 +1084,11 @@ impl RustDocFingerprint { "fingerprint {:?} mismatch, clearing doc directories", fingerprint_path ); - cx.bcx + build_runner + .bcx .all_kinds .iter() - .map(|kind| cx.files().layout(*kind).doc()) + .map(|kind| build_runner.files().layout(*kind).doc()) .filter(|path| path.exists()) .try_for_each(|path| clean_doc(path))?; write_fingerprint()?; diff --git a/src/cargo/core/compiler/build_plan.rs b/src/cargo/core/compiler/build_plan.rs index c3eb22a1ba4..d116c07b5be 100644 --- a/src/cargo/core/compiler/build_plan.rs +++ b/src/cargo/core/compiler/build_plan.rs @@ -11,8 +11,8 @@ use std::path::{Path, PathBuf}; use serde::Serialize; -use super::context::OutputFile; -use super::{CompileContext, CompileKind, CompileMode, Unit}; +use super::build_runner::OutputFile; +use super::{BuildRunner, CompileKind, CompileMode, Unit}; use crate::core::TargetKind; use crate::util::{internal, CargoResult, GlobalContext}; use cargo_util::ProcessBuilder; @@ -107,10 +107,10 @@ impl BuildPlan { } } - pub fn add(&mut self, cx: &CompileContext<'_, '_>, unit: &Unit) -> CargoResult<()> { + pub fn add(&mut self, build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> CargoResult<()> { let id = self.plan.invocations.len(); self.invocation_map.insert(unit.buildkey(), id); - let deps = cx + let deps = build_runner .unit_deps(unit) .iter() .map(|dep| self.invocation_map[&dep.unit.buildkey()]) diff --git a/src/cargo/core/compiler/context/compilation_files.rs b/src/cargo/core/compiler/build_runner/compilation_files.rs similarity index 96% rename from src/cargo/core/compiler/context/compilation_files.rs rename to src/cargo/core/compiler/build_runner/compilation_files.rs index 360b92cabd6..27c555a2694 100644 --- a/src/cargo/core/compiler/context/compilation_files.rs +++ b/src/cargo/core/compiler/build_runner/compilation_files.rs @@ -9,7 +9,7 @@ use std::sync::Arc; use lazycell::LazyCell; use tracing::debug; -use super::{BuildContext, CompileContext, CompileKind, FileFlavor, Layout}; +use super::{BuildContext, BuildRunner, CompileKind, FileFlavor, Layout}; use crate::core::compiler::{CompileMode, CompileTarget, CrateType, FileType, Unit}; use crate::core::{Target, TargetKind, Workspace}; use crate::util::{self, CargoResult, StableHasher}; @@ -139,13 +139,13 @@ impl OutputFile { impl<'a, 'gctx: 'a> CompilationFiles<'a, 'gctx> { pub(super) fn new( - compile_ctx: &CompileContext<'a, 'gctx>, + build_runner: &BuildRunner<'a, 'gctx>, host: Layout, target: HashMap, ) -> CompilationFiles<'a, 'gctx> { let mut metas = HashMap::new(); - for unit in &compile_ctx.bcx.roots { - metadata_of(unit, compile_ctx, &mut metas); + for unit in &build_runner.bcx.roots { + metadata_of(unit, build_runner, &mut metas); } let outputs = metas .keys() @@ -153,11 +153,11 @@ impl<'a, 'gctx: 'a> CompilationFiles<'a, 'gctx> { .map(|unit| (unit, LazyCell::new())) .collect(); CompilationFiles { - ws: compile_ctx.bcx.ws, + ws: build_runner.bcx.ws, host, target, - export_dir: compile_ctx.bcx.build_config.export_dir.clone(), - roots: compile_ctx.bcx.roots.clone(), + export_dir: build_runner.bcx.build_config.export_dir.clone(), + roots: build_runner.bcx.roots.clone(), metas, outputs, } @@ -557,14 +557,14 @@ impl<'a, 'gctx: 'a> CompilationFiles<'a, 'gctx> { /// See [`compute_metadata`] for how a single metadata hash is computed. fn metadata_of<'a>( unit: &Unit, - compile_ctx: &CompileContext<'_, '_>, + build_runner: &BuildRunner<'_, '_>, metas: &'a mut HashMap, ) -> &'a MetaInfo { if !metas.contains_key(unit) { - let meta = compute_metadata(unit, compile_ctx, metas); + let meta = compute_metadata(unit, build_runner, metas); metas.insert(unit.clone(), meta); - for dep in compile_ctx.unit_deps(unit) { - metadata_of(&dep.unit, compile_ctx, metas); + for dep in build_runner.unit_deps(unit) { + metadata_of(&dep.unit, build_runner, metas); } } &metas[unit] @@ -573,10 +573,10 @@ fn metadata_of<'a>( /// Computes the metadata hash for the given [`Unit`]. fn compute_metadata( unit: &Unit, - compile_ctx: &CompileContext<'_, '_>, + build_runner: &BuildRunner<'_, '_>, metas: &mut HashMap, ) -> MetaInfo { - let bcx = &compile_ctx.bcx; + let bcx = &build_runner.bcx; let mut hasher = StableHasher::new(); METADATA_VERSION.hash(&mut hasher); @@ -593,10 +593,10 @@ fn compute_metadata( unit.features.hash(&mut hasher); // Mix in the target-metadata of all the dependencies of this target. - let mut deps_metadata = compile_ctx + let mut deps_metadata = build_runner .unit_deps(unit) .iter() - .map(|dep| metadata_of(&dep.unit, compile_ctx, metas).meta_hash) + .map(|dep| metadata_of(&dep.unit, build_runner, metas).meta_hash) .collect::>(); deps_metadata.sort(); deps_metadata.hash(&mut hasher); @@ -606,7 +606,7 @@ fn compute_metadata( // settings like debuginfo and whatnot. unit.profile.hash(&mut hasher); unit.mode.hash(&mut hasher); - compile_ctx.lto[unit].hash(&mut hasher); + build_runner.lto[unit].hash(&mut hasher); // Artifacts compiled for the host should have a different // metadata piece than those compiled for the target, so make sure @@ -622,17 +622,21 @@ fn compute_metadata( hash_rustc_version(bcx, &mut hasher); - if compile_ctx.bcx.ws.is_member(&unit.pkg) { + if build_runner.bcx.ws.is_member(&unit.pkg) { // This is primarily here for clippy. This ensures that the clippy // artifacts are separate from the `check` ones. - if let Some(path) = &compile_ctx.bcx.rustc().workspace_wrapper { + if let Some(path) = &build_runner.bcx.rustc().workspace_wrapper { path.hash(&mut hasher); } } // Seed the contents of `__CARGO_DEFAULT_LIB_METADATA` to the hasher if present. // This should be the release channel, to get a different hash for each channel. - if let Ok(ref channel) = compile_ctx.bcx.gctx.get_env("__CARGO_DEFAULT_LIB_METADATA") { + if let Ok(ref channel) = build_runner + .bcx + .gctx + .get_env("__CARGO_DEFAULT_LIB_METADATA") + { channel.hash(&mut hasher); } diff --git a/src/cargo/core/compiler/context/mod.rs b/src/cargo/core/compiler/build_runner/mod.rs similarity index 99% rename from src/cargo/core/compiler/context/mod.rs rename to src/cargo/core/compiler/build_runner/mod.rs index a267fd8f912..1be9769ac29 100644 --- a/src/cargo/core/compiler/context/mod.rs +++ b/src/cargo/core/compiler/build_runner/mod.rs @@ -1,4 +1,4 @@ -//! [`CompileContext`] is the mutable state used during the build process. +//! [`BuildRunner`] is the mutable state used during the build process. use std::collections::{BTreeSet, HashMap, HashSet}; use std::path::{Path, PathBuf}; @@ -36,7 +36,7 @@ pub use self::compilation_files::{Metadata, OutputFile}; /// throughout the entire build process. Everything is coordinated through this. /// /// [`BuildContext`]: crate::core::compiler::BuildContext -pub struct CompileContext<'a, 'gctx> { +pub struct BuildRunner<'a, 'gctx> { /// Mostly static information about the build task. pub bcx: &'a BuildContext<'a, 'gctx>, /// A large collection of information about the result of the entire compilation. @@ -88,7 +88,7 @@ pub struct CompileContext<'a, 'gctx> { pub failed_scrape_units: Arc>>, } -impl<'a, 'gctx> CompileContext<'a, 'gctx> { +impl<'a, 'gctx> BuildRunner<'a, 'gctx> { pub fn new(bcx: &'a BuildContext<'a, 'gctx>) -> CargoResult { // Load up the jobserver that we'll use to manage our parallelism. This // is the same as the GNU make implementation of a jobserver, and diff --git a/src/cargo/core/compiler/compilation.rs b/src/cargo/core/compiler/compilation.rs index b5b1fd72c18..f6ddf34b07d 100644 --- a/src/cargo/core/compiler/compilation.rs +++ b/src/cargo/core/compiler/compilation.rs @@ -166,7 +166,7 @@ impl<'gctx> Compilation<'gctx> { /// /// `is_primary` is true if this is a "primary package", which means it /// was selected by the user on the command-line (such as with a `-p` - /// flag), see [`crate::core::compiler::CompileContext::primary_packages`]. + /// flag), see [`crate::core::compiler::BuildRunner::primary_packages`]. /// /// `is_workspace` is true if this is a workspace member. pub fn rustc_process( diff --git a/src/cargo/core/compiler/custom_build.rs b/src/cargo/core/compiler/custom_build.rs index 4ec3ed5739b..065f012b94f 100644 --- a/src/cargo/core/compiler/custom_build.rs +++ b/src/cargo/core/compiler/custom_build.rs @@ -22,7 +22,7 @@ //! of a build script. Standard output is the chosen interprocess communication //! between Cargo and build script processes. A set of strings is defined for //! that purpose. These strings, a.k.a. instructions, are interpreted by -//! [`BuildOutput::parse`] and stored in [`CompileContext::build_script_outputs`]. +//! [`BuildOutput::parse`] and stored in [`BuildRunner::build_script_outputs`]. //! The entire execution work is constructed by [`build_work`]. //! //! [build script]: https://doc.rust-lang.org/nightly/cargo/reference/build-scripts.html @@ -31,9 +31,9 @@ //! [`CompileMode::RunCustomBuild`]: super::CompileMode //! [instructions]: https://doc.rust-lang.org/cargo/reference/build-scripts.html#outputs-of-the-build-script -use super::{fingerprint, CompileContext, Job, Unit, Work}; +use super::{fingerprint, BuildRunner, Job, Unit, Work}; use crate::core::compiler::artifact; -use crate::core::compiler::context::Metadata; +use crate::core::compiler::build_runner::Metadata; use crate::core::compiler::fingerprint::DirtyReason; use crate::core::compiler::job_queue::JobState; use crate::core::{profiles::ProfileRoot, PackageId, Target}; @@ -194,24 +194,24 @@ impl LinkArgTarget { } /// Prepares a `Work` that executes the target as a custom build script. -pub fn prepare(compile_ctx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoResult { +pub fn prepare(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResult { let _p = profile::start(format!( "build script prepare: {}/{}", unit.pkg, unit.target.name() )); - let metadata = compile_ctx.get_run_build_script_metadata(unit); - if compile_ctx + let metadata = build_runner.get_run_build_script_metadata(unit); + if build_runner .build_script_outputs .lock() .unwrap() .contains_key(metadata) { // The output is already set, thus the build script is overridden. - fingerprint::prepare_target(compile_ctx, unit, false) + fingerprint::prepare_target(build_runner, unit, false) } else { - build_work(compile_ctx, unit) + build_work(build_runner, unit) } } @@ -250,23 +250,23 @@ fn emit_build_output( /// * Create the output dir (`OUT_DIR`) for the build script output. /// * Determine if the build script needs a re-run. /// * Run the build script and store its output. -fn build_work(compile_ctx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoResult { +fn build_work(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResult { assert!(unit.mode.is_run_custom_build()); - let bcx = &compile_ctx.bcx; - let dependencies = compile_ctx.unit_deps(unit); + let bcx = &build_runner.bcx; + let dependencies = build_runner.unit_deps(unit); let build_script_unit = dependencies .iter() .find(|d| !d.unit.mode.is_run_custom_build() && d.unit.target.is_custom_build()) .map(|d| &d.unit) .expect("running a script not depending on an actual script"); - let script_dir = compile_ctx.files().build_script_dir(build_script_unit); - let script_out_dir = compile_ctx.files().build_script_out_dir(unit); - let script_run_dir = compile_ctx.files().build_script_run_dir(unit); + let script_dir = build_runner.files().build_script_dir(build_script_unit); + let script_out_dir = build_runner.files().build_script_out_dir(unit); + let script_run_dir = build_runner.files().build_script_run_dir(unit); let build_plan = bcx.build_config.build_plan; let invocation_name = unit.buildkey(); if let Some(deps) = unit.pkg.manifest().metabuild() { - prepare_metabuild(compile_ctx, build_script_unit, deps)?; + prepare_metabuild(build_runner, build_script_unit, deps)?; } // Building the command to execute @@ -280,7 +280,7 @@ fn build_work(compile_ctx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoRes // `Profiles::get_profile_run_custom_build` so that those flags get // carried over. let to_exec = to_exec.into_os_string(); - let mut cmd = compile_ctx.compilation.host_process(to_exec, &unit.pkg)?; + let mut cmd = build_runner.compilation.host_process(to_exec, &unit.pkg)?; let debug = unit.profile.debuginfo.is_turned_on(); cmd.env("OUT_DIR", &script_out_dir) .env("CARGO_MANIFEST_DIR", unit.pkg.root()) @@ -298,14 +298,14 @@ fn build_work(compile_ctx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoRes .env("HOST", &bcx.host_triple()) .env("RUSTC", &bcx.rustc().path) .env("RUSTDOC", &*bcx.gctx.rustdoc()?) - .inherit_jobserver(&compile_ctx.jobserver); + .inherit_jobserver(&build_runner.jobserver); // Find all artifact dependencies and make their file and containing directory discoverable using environment variables. - for (var, value) in artifact::get_env(compile_ctx, dependencies)? { + for (var, value) in artifact::get_env(build_runner, dependencies)? { cmd.env(&var, value); } - if let Some(linker) = &compile_ctx.compilation.target_linker(unit.kind) { + if let Some(linker) = &build_runner.compilation.target_linker(unit.kind) { cmd.env("RUSTC_LINKER", linker); } @@ -352,7 +352,7 @@ fn build_work(compile_ctx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoRes cmd.env_remove("RUSTC_WRAPPER"); } cmd.env_remove("RUSTC_WORKSPACE_WRAPPER"); - if compile_ctx.bcx.ws.is_member(&unit.pkg) { + if build_runner.bcx.ws.is_member(&unit.pkg) { if let Some(wrapper) = bcx.rustc().workspace_wrapper.as_ref() { cmd.env("RUSTC_WORKSPACE_WRAPPER", wrapper); } @@ -363,7 +363,7 @@ fn build_work(compile_ctx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoRes ); cmd.env_remove("RUSTFLAGS"); - if compile_ctx.bcx.ws.gctx().extra_verbose() { + if build_runner.bcx.ws.gctx().extra_verbose() { cmd.display_env_vars(); } @@ -376,7 +376,7 @@ fn build_work(compile_ctx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoRes .iter() .filter_map(|dep| { if dep.unit.mode.is_run_custom_build() { - let dep_metadata = compile_ctx.get_run_build_script_metadata(&dep.unit); + let dep_metadata = build_runner.get_run_build_script_metadata(&dep.unit); Some(( dep.unit.pkg.manifest().links().unwrap().to_string(), dep.unit.pkg.package_id(), @@ -389,12 +389,12 @@ fn build_work(compile_ctx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoRes .collect::>(); let library_name = unit.pkg.library().map(|t| t.crate_name()); let pkg_descr = unit.pkg.to_string(); - let build_script_outputs = Arc::clone(&compile_ctx.build_script_outputs); + let build_script_outputs = Arc::clone(&build_runner.build_script_outputs); let id = unit.pkg.package_id(); let output_file = script_run_dir.join("output"); let err_file = script_run_dir.join("stderr"); let root_output_file = script_run_dir.join("root-output"); - let host_target_root = compile_ctx.files().host_dest().to_path_buf(); + let host_target_root = build_runner.files().host_dest().to_path_buf(); let all = ( id, library_name.clone(), @@ -403,17 +403,17 @@ fn build_work(compile_ctx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoRes output_file.clone(), script_out_dir.clone(), ); - let build_scripts = compile_ctx.build_scripts.get(unit).cloned(); + let build_scripts = build_runner.build_scripts.get(unit).cloned(); let json_messages = bcx.build_config.emit_json(); let extra_verbose = bcx.gctx.extra_verbose(); - let (prev_output, prev_script_out_dir) = prev_build_output(compile_ctx, unit); - let metadata_hash = compile_ctx.get_run_build_script_metadata(unit); + let (prev_output, prev_script_out_dir) = prev_build_output(build_runner, unit); + let metadata_hash = build_runner.get_run_build_script_metadata(unit); paths::create_dir_all(&script_dir)?; paths::create_dir_all(&script_out_dir)?; - let nightly_features_allowed = compile_ctx.bcx.gctx.nightly_features_allowed; - let extra_check_cfg = compile_ctx.bcx.gctx.cli_unstable().check_cfg; + let nightly_features_allowed = build_runner.bcx.gctx.nightly_features_allowed; + let extra_check_cfg = build_runner.bcx.gctx.cli_unstable().check_cfg; let targets: Vec = unit.pkg.targets().to_vec(); let msrv = unit.pkg.rust_version().cloned(); // Need a separate copy for the fresh closure. @@ -421,7 +421,7 @@ fn build_work(compile_ctx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoRes let msrv_fresh = msrv.clone(); let env_profile_name = unit.profile.name.to_uppercase(); - let built_with_debuginfo = compile_ctx + let built_with_debuginfo = build_runner .bcx .unit_graph .get(unit) @@ -608,10 +608,10 @@ fn build_work(compile_ctx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoRes Ok(()) }); - let mut job = if compile_ctx.bcx.build_config.build_plan { + let mut job = if build_runner.bcx.build_config.build_plan { Job::new_dirty(Work::noop(), DirtyReason::FreshBuild) } else { - fingerprint::prepare_target(compile_ctx, unit, false)? + fingerprint::prepare_target(build_runner, unit, false)? }; if job.freshness().is_dirty() { job.before(dirty); @@ -1065,12 +1065,12 @@ impl BuildOutput { /// /// [metabuild]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#metabuild fn prepare_metabuild( - compile_ctx: &CompileContext<'_, '_>, + build_runner: &BuildRunner<'_, '_>, unit: &Unit, deps: &[String], ) -> CargoResult<()> { let mut output = Vec::new(); - let available_deps = compile_ctx.unit_deps(unit); + let available_deps = build_runner.unit_deps(unit); // Filter out optional dependencies, and look up the actual lib name. let meta_deps: Vec<_> = deps .iter() @@ -1090,7 +1090,7 @@ fn prepare_metabuild( let path = unit .pkg .manifest() - .metabuild_path(compile_ctx.bcx.ws.target_dir()); + .metabuild_path(build_runner.bcx.ws.target_dir()); paths::create_dir_all(path.parent().unwrap())?; paths::write_if_changed(path, &output)?; Ok(()) @@ -1114,7 +1114,7 @@ impl BuildDeps { } } -/// Computes several maps in [`CompileContext`]. +/// Computes several maps in [`BuildRunner`]. /// /// - [`build_scripts`]: A map that tracks which build scripts each package /// depends on. @@ -1132,15 +1132,15 @@ impl BuildDeps { /// The given set of units to this function is the initial set of /// targets/profiles which are being built. /// -/// [`build_scripts`]: CompileContext::build_scripts -/// [`build_explicit_deps`]: CompileContext::build_explicit_deps -/// [`build_script_outputs`]: CompileContext::build_script_outputs -pub fn build_map(compile_ctx: &mut CompileContext<'_, '_>) -> CargoResult<()> { +/// [`build_scripts`]: BuildRunner::build_scripts +/// [`build_explicit_deps`]: BuildRunner::build_explicit_deps +/// [`build_script_outputs`]: BuildRunner::build_script_outputs +pub fn build_map(build_runner: &mut BuildRunner<'_, '_>) -> CargoResult<()> { let mut ret = HashMap::new(); - for unit in &compile_ctx.bcx.roots { - build(&mut ret, compile_ctx, unit)?; + for unit in &build_runner.bcx.roots { + build(&mut ret, build_runner, unit)?; } - compile_ctx + build_runner .build_scripts .extend(ret.into_iter().map(|(k, v)| (k, Arc::new(v)))); return Ok(()); @@ -1149,7 +1149,7 @@ pub fn build_map(compile_ctx: &mut CompileContext<'_, '_>) -> CargoResult<()> { // memoizes all of its return values as it goes along. fn build<'a>( out: &'a mut HashMap, - compile_ctx: &mut CompileContext<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, unit: &Unit, ) -> CargoResult<&'a BuildScripts> { // Do a quick pre-flight check to see if we've already calculated the @@ -1161,13 +1161,13 @@ pub fn build_map(compile_ctx: &mut CompileContext<'_, '_>) -> CargoResult<()> { // If there is a build script override, pre-fill the build output. if unit.mode.is_run_custom_build() { if let Some(links) = unit.pkg.manifest().links() { - if let Some(output) = compile_ctx + if let Some(output) = build_runner .bcx .target_data .script_override(links, unit.kind) { - let metadata = compile_ctx.get_run_build_script_metadata(unit); - compile_ctx.build_script_outputs.lock().unwrap().insert( + let metadata = build_runner.get_run_build_script_metadata(unit); + build_runner.build_script_outputs.lock().unwrap().insert( unit.pkg.package_id(), metadata, output.clone(), @@ -1180,21 +1180,21 @@ pub fn build_map(compile_ctx: &mut CompileContext<'_, '_>) -> CargoResult<()> { // If a package has a build script, add itself as something to inspect for linking. if !unit.target.is_custom_build() && unit.pkg.has_custom_build() { - let script_meta = compile_ctx + let script_meta = build_runner .find_build_script_metadata(unit) .expect("has_custom_build should have RunCustomBuild"); add_to_link(&mut ret, unit.pkg.package_id(), script_meta); } if unit.mode.is_run_custom_build() { - parse_previous_explicit_deps(compile_ctx, unit); + parse_previous_explicit_deps(build_runner, unit); } // We want to invoke the compiler deterministically to be cache-friendly // to rustc invocation caching schemes, so be sure to generate the same // set of build script dependency orderings via sorting the targets that // come out of the `Context`. - let mut dependencies: Vec = compile_ctx + let mut dependencies: Vec = build_runner .unit_deps(unit) .iter() .map(|d| d.unit.clone()) @@ -1202,7 +1202,7 @@ pub fn build_map(compile_ctx: &mut CompileContext<'_, '_>) -> CargoResult<()> { dependencies.sort_by_key(|u| u.pkg.package_id()); for dep_unit in dependencies.iter() { - let dep_scripts = build(out, compile_ctx, dep_unit)?; + let dep_scripts = build(out, build_runner, dep_unit)?; if dep_unit.target.for_host() { ret.plugins.extend(dep_scripts.to_link.iter().cloned()); @@ -1228,12 +1228,12 @@ pub fn build_map(compile_ctx: &mut CompileContext<'_, '_>) -> CargoResult<()> { } /// Load any dependency declarations from a previous build script run. - fn parse_previous_explicit_deps(compile_ctx: &mut CompileContext<'_, '_>, unit: &Unit) { - let script_run_dir = compile_ctx.files().build_script_run_dir(unit); + fn parse_previous_explicit_deps(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) { + let script_run_dir = build_runner.files().build_script_run_dir(unit); let output_file = script_run_dir.join("output"); - let (prev_output, _) = prev_build_output(compile_ctx, unit); + let (prev_output, _) = prev_build_output(build_runner, unit); let deps = BuildDeps::new(&output_file, prev_output.as_ref()); - compile_ctx.build_explicit_deps.insert(unit.clone(), deps); + build_runner.build_explicit_deps.insert(unit.clone(), deps); } } @@ -1243,11 +1243,11 @@ pub fn build_map(compile_ctx: &mut CompileContext<'_, '_>) -> CargoResult<()> { /// Also returns the directory containing the output, typically used later in /// processing. fn prev_build_output( - compile_ctx: &mut CompileContext<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, unit: &Unit, ) -> (Option, PathBuf) { - let script_out_dir = compile_ctx.files().build_script_out_dir(unit); - let script_run_dir = compile_ctx.files().build_script_run_dir(unit); + let script_out_dir = build_runner.files().build_script_out_dir(unit); + let script_run_dir = build_runner.files().build_script_run_dir(unit); let root_output_file = script_run_dir.join("root-output"); let output_file = script_run_dir.join("output"); @@ -1262,8 +1262,8 @@ fn prev_build_output( &unit.pkg.to_string(), &prev_script_out_dir, &script_out_dir, - compile_ctx.bcx.gctx.cli_unstable().check_cfg, - compile_ctx.bcx.gctx.nightly_features_allowed, + build_runner.bcx.gctx.cli_unstable().check_cfg, + build_runner.bcx.gctx.nightly_features_allowed, unit.pkg.targets(), &unit.pkg.rust_version().cloned(), ) diff --git a/src/cargo/core/compiler/fingerprint/mod.rs b/src/cargo/core/compiler/fingerprint/mod.rs index 88aae10da85..2cd0a14eddb 100644 --- a/src/cargo/core/compiler/fingerprint/mod.rs +++ b/src/cargo/core/compiler/fingerprint/mod.rs @@ -135,7 +135,7 @@ //! Fingerprint based on the updated information. If the Unit fails to compile, //! the fingerprint is not updated. //! -//! Fingerprints are cached in the [`CompileContext`]. This makes computing +//! Fingerprints are cached in the [`BuildRunner`]. This makes computing //! Fingerprints faster, but also is necessary for properly updating //! dependency information. Since a Fingerprint includes the Fingerprints of //! all dependencies, when it is updated, by using `Arc` clones, it @@ -381,7 +381,7 @@ use crate::util::{internal, path_args, profile, StableHasher}; use crate::{GlobalContext, CARGO_ENV}; use super::custom_build::BuildDeps; -use super::{BuildContext, CompileContext, FileFlavor, Job, Unit, Work}; +use super::{BuildContext, BuildRunner, FileFlavor, Job, Unit, Work}; pub use dirty_reason::DirtyReason; @@ -400,7 +400,7 @@ pub use dirty_reason::DirtyReason; /// one unit which is very unlikely to be what you want unless you're /// exclusively talking about top-level units. pub fn prepare_target( - cx: &mut CompileContext<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, unit: &Unit, force: bool, ) -> CargoResult { @@ -409,16 +409,16 @@ pub fn prepare_target( unit.pkg.package_id(), unit.target.name() )); - let bcx = cx.bcx; - let loc = cx.files().fingerprint_file_path(unit, ""); + let bcx = build_runner.bcx; + let loc = build_runner.files().fingerprint_file_path(unit, ""); debug!("fingerprint at: {}", loc.display()); // Figure out if this unit is up to date. After calculating the fingerprint // compare it to an old version, if any, and attempt to print diagnostic // information about failed comparisons to aid in debugging. - let fingerprint = calculate(cx, unit)?; - let mtime_on_use = cx.bcx.gctx.cli_unstable().mtime_on_use; + let fingerprint = calculate(build_runner, unit)?; + let mtime_on_use = build_runner.bcx.gctx.cli_unstable().mtime_on_use; let dirty_reason = compare_old_fingerprint(unit, &loc, &*fingerprint, mtime_on_use, force); let Some(dirty_reason) = dirty_reason else { @@ -485,10 +485,12 @@ pub fn prepare_target( // build script's fingerprint after it's executed. We do this by // using the `build_script_local_fingerprints` function which returns a // thunk we can invoke on a foreign thread to calculate this. - let build_script_outputs = Arc::clone(&cx.build_script_outputs); - let metadata = cx.get_run_build_script_metadata(unit); - let (gen_local, _overridden) = build_script_local_fingerprints(cx, unit); - let output_path = cx.build_explicit_deps[unit].build_script_output.clone(); + let build_script_outputs = Arc::clone(&build_runner.build_script_outputs); + let metadata = build_runner.get_run_build_script_metadata(unit); + let (gen_local, _overridden) = build_script_local_fingerprints(build_runner, unit); + let output_path = build_runner.build_explicit_deps[unit] + .build_script_output + .clone(); Work::new(move |_| { let outputs = build_script_outputs.lock().unwrap(); let output = outputs @@ -1246,11 +1248,11 @@ impl hash::Hash for Fingerprint { impl DepFingerprint { fn new( - cx: &mut CompileContext<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, parent: &Unit, dep: &UnitDep, ) -> CargoResult { - let fingerprint = calculate(cx, &dep.unit)?; + let fingerprint = calculate(build_runner, &dep.unit)?; // We need to be careful about what we hash here. We have a goal of // supporting renaming a project directory and not rebuilding // everything. To do that, however, we need to make sure that the cwd @@ -1272,7 +1274,7 @@ impl DepFingerprint { name: dep.extern_crate_name, public: dep.public, fingerprint, - only_requires_rmeta: cx.only_requires_rmeta(parent, &dep.unit), + only_requires_rmeta: build_runner.only_requires_rmeta(parent, &dep.unit), }) } } @@ -1323,40 +1325,44 @@ impl StaleItem { /// /// Information like file modification time is only calculated for path /// dependencies. -fn calculate(cx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoResult> { +fn calculate(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResult> { // This function is slammed quite a lot, so the result is memoized. - if let Some(s) = cx.fingerprints.get(unit) { + if let Some(s) = build_runner.fingerprints.get(unit) { return Ok(Arc::clone(s)); } let mut fingerprint = if unit.mode.is_run_custom_build() { - calculate_run_custom_build(cx, unit)? + calculate_run_custom_build(build_runner, unit)? } else if unit.mode.is_doc_test() { panic!("doc tests do not fingerprint"); } else { - calculate_normal(cx, unit)? + calculate_normal(build_runner, unit)? }; // After we built the initial `Fingerprint` be sure to update the // `fs_status` field of it. - let target_root = target_root(cx); - let cargo_exe = cx.bcx.gctx.cargo_exe()?; + let target_root = target_root(build_runner); + let cargo_exe = build_runner.bcx.gctx.cargo_exe()?; fingerprint.check_filesystem( - &mut cx.mtime_cache, + &mut build_runner.mtime_cache, unit.pkg.root(), &target_root, cargo_exe, - cx.bcx.gctx, + build_runner.bcx.gctx, )?; let fingerprint = Arc::new(fingerprint); - cx.fingerprints + build_runner + .fingerprints .insert(unit.clone(), Arc::clone(&fingerprint)); Ok(fingerprint) } /// Calculate a fingerprint for a "normal" unit, or anything that's not a build /// script. This is an internal helper of [`calculate`], don't call directly. -fn calculate_normal(cx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoResult { +fn calculate_normal( + build_runner: &mut BuildRunner<'_, '_>, + unit: &Unit, +) -> CargoResult { let deps = { // Recursively calculate the fingerprint for all of our dependencies. // @@ -1365,22 +1371,22 @@ fn calculate_normal(cx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoResult // built. The only exception here are artifact dependencies, // which is an actual dependency that needs a recompile. // - // Create Vec since mutable cx is needed in closure. - let deps = Vec::from(cx.unit_deps(unit)); + // Create Vec since mutable build_runner is needed in closure. + let deps = Vec::from(build_runner.unit_deps(unit)); let mut deps = deps .into_iter() .filter(|dep| !dep.unit.target.is_bin() || dep.unit.artifact.is_true()) - .map(|dep| DepFingerprint::new(cx, unit, &dep)) + .map(|dep| DepFingerprint::new(build_runner, unit, &dep)) .collect::>>()?; deps.sort_by(|a, b| a.pkg_id.cmp(&b.pkg_id)); deps }; // Afterwards calculate our own fingerprint information. - let target_root = target_root(cx); + let target_root = target_root(build_runner); let local = if unit.mode.is_doc() || unit.mode.is_doc_scrape() { // rustdoc does not have dep-info files. - let fingerprint = pkg_fingerprint(cx.bcx, &unit.pkg).with_context(|| { + let fingerprint = pkg_fingerprint(build_runner.bcx, &unit.pkg).with_context(|| { format!( "failed to determine package fingerprint for documenting {}", unit.pkg @@ -1388,14 +1394,14 @@ fn calculate_normal(cx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoResult })?; vec![LocalFingerprint::Precalculated(fingerprint)] } else { - let dep_info = dep_info_loc(cx, unit); + let dep_info = dep_info_loc(build_runner, unit); let dep_info = dep_info.strip_prefix(&target_root).unwrap().to_path_buf(); vec![LocalFingerprint::CheckDepInfo { dep_info }] }; // Figure out what the outputs of our unit is, and we'll be storing them // into the fingerprint as well. - let outputs = cx + let outputs = build_runner .outputs(unit)? .iter() .filter(|output| !matches!(output.flavor, FileFlavor::DebugInfo | FileFlavor::Auxiliary)) @@ -1406,32 +1412,32 @@ fn calculate_normal(cx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoResult // hashed to take up less space on disk as we just need to know when things // change. let extra_flags = if unit.mode.is_doc() || unit.mode.is_doc_scrape() { - cx.bcx.rustdocflags_args(unit) + build_runner.bcx.rustdocflags_args(unit) } else { - cx.bcx.rustflags_args(unit) + build_runner.bcx.rustflags_args(unit) } .to_vec(); let profile_hash = util::hash_u64(( &unit.profile, unit.mode, - cx.bcx.extra_args_for(unit), - cx.lto[unit], + build_runner.bcx.extra_args_for(unit), + build_runner.lto[unit], unit.pkg.manifest().lint_rustflags(), )); // Include metadata since it is exposed as environment variables. let m = unit.pkg.manifest().metadata(); let metadata = util::hash_u64((&m.authors, &m.description, &m.homepage, &m.repository)); let mut config = StableHasher::new(); - if let Some(linker) = cx.compilation.target_linker(unit.kind) { + if let Some(linker) = build_runner.compilation.target_linker(unit.kind) { linker.hash(&mut config); } - if unit.mode.is_doc() && cx.bcx.gctx.cli_unstable().rustdoc_map { - if let Ok(map) = cx.bcx.gctx.doc_extern_map() { + if unit.mode.is_doc() && build_runner.bcx.gctx.cli_unstable().rustdoc_map { + if let Ok(map) = build_runner.bcx.gctx.doc_extern_map() { map.hash(&mut config); } } - if let Some(allow_features) = &cx.bcx.gctx.cli_unstable().allow_features { + if let Some(allow_features) = &build_runner.bcx.gctx.cli_unstable().allow_features { allow_features.hash(&mut config); } let compile_kind = unit.kind.fingerprint_hash(); @@ -1439,17 +1445,17 @@ fn calculate_normal(cx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoResult declared_features.sort(); // to avoid useless rebuild if the user orders it's features // differently Ok(Fingerprint { - rustc: util::hash_u64(&cx.bcx.rustc().verbose_version), + rustc: util::hash_u64(&build_runner.bcx.rustc().verbose_version), target: util::hash_u64(&unit.target), profile: profile_hash, // Note that .0 is hashed here, not .1 which is the cwd. That doesn't // actually affect the output artifact so there's no need to hash it. - path: util::hash_u64(path_args(cx.bcx.ws, unit).0), + path: util::hash_u64(path_args(build_runner.bcx.ws, unit).0), features: format!("{:?}", unit.features), // Note we curently only populate `declared_features` when `-Zcheck-cfg` // is passed since it's the only user-facing toggle that will make this // fingerprint relevant. - declared_features: if cx.bcx.gctx.cli_unstable().check_cfg { + declared_features: if build_runner.bcx.gctx.cli_unstable().check_cfg { format!("{declared_features:?}") } else { "".to_string() @@ -1469,7 +1475,7 @@ fn calculate_normal(cx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoResult /// Calculate a fingerprint for an "execute a build script" unit. This is an /// internal helper of [`calculate`], don't call directly. fn calculate_run_custom_build( - cx: &mut CompileContext<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, unit: &Unit, ) -> CargoResult { assert!(unit.mode.is_run_custom_build()); @@ -1479,8 +1485,8 @@ fn calculate_run_custom_build( // the build script this means we'll be watching files and env vars. // Otherwise if we haven't previously executed it we'll just start watching // the whole crate. - let (gen_local, overridden) = build_script_local_fingerprints(cx, unit); - let deps = &cx.build_explicit_deps[unit]; + let (gen_local, overridden) = build_script_local_fingerprints(build_runner, unit); + let deps = &build_runner.build_explicit_deps[unit]; let local = (gen_local)( deps, Some(&|| { @@ -1491,7 +1497,7 @@ By default, if your project contains a build script, cargo scans all files in it to determine whether a rebuild is needed. If you don't expect to access the file, specify `rerun-if-changed` in your build script. See https://doc.rust-lang.org/cargo/reference/build-scripts.html#rerun-if-changed for more information."; - pkg_fingerprint(cx.bcx, &unit.pkg).map_err(|err| { + pkg_fingerprint(build_runner.bcx, &unit.pkg).map_err(|err| { let mut message = format!("failed to determine package fingerprint for build script for {}", unit.pkg); if err.root_cause().is::() { message = format!("{}\n{}", message, IO_ERR_MESSAGE) @@ -1511,16 +1517,16 @@ See https://doc.rust-lang.org/cargo/reference/build-scripts.html#rerun-if-change // Overridden build scripts don't need to track deps. vec![] } else { - // Create Vec since mutable cx is needed in closure. - let deps = Vec::from(cx.unit_deps(unit)); + // Create Vec since mutable build_runner is needed in closure. + let deps = Vec::from(build_runner.unit_deps(unit)); deps.into_iter() - .map(|dep| DepFingerprint::new(cx, unit, &dep)) + .map(|dep| DepFingerprint::new(build_runner, unit, &dep)) .collect::>>()? }; Ok(Fingerprint { local: Mutex::new(local), - rustc: util::hash_u64(&cx.bcx.rustc().verbose_version), + rustc: util::hash_u64(&build_runner.bcx.rustc().verbose_version), deps, outputs: if overridden { Vec::new() } else { vec![output] }, @@ -1568,7 +1574,7 @@ See https://doc.rust-lang.org/cargo/reference/build-scripts.html#rerun-if-change /// /// [`RunCustomBuild`]: crate::core::compiler::CompileMode::RunCustomBuild fn build_script_local_fingerprints( - cx: &mut CompileContext<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, unit: &Unit, ) -> ( Box< @@ -1583,7 +1589,7 @@ fn build_script_local_fingerprints( assert!(unit.mode.is_run_custom_build()); // First up, if this build script is entirely overridden, then we just // return the hash of what we overrode it with. This is the easy case! - if let Some(fingerprint) = build_script_override_fingerprint(cx, unit) { + if let Some(fingerprint) = build_script_override_fingerprint(build_runner, unit) { debug!("override local fingerprints deps {}", unit.pkg); return ( Box::new( @@ -1603,7 +1609,7 @@ fn build_script_local_fingerprints( // longstanding bug, in Cargo. Recent refactorings just made it painfully // obvious. let pkg_root = unit.pkg.root().to_path_buf(); - let target_dir = target_root(cx); + let target_dir = target_root(build_runner); let calculate = move |deps: &BuildDeps, pkg_fingerprint: Option<&dyn Fn() -> CargoResult>| { if deps.rerun_if_changed.is_empty() && deps.rerun_if_env_changed.is_empty() { @@ -1643,13 +1649,13 @@ fn build_script_local_fingerprints( /// Create a [`LocalFingerprint`] for an overridden build script. /// Returns None if it is not overridden. fn build_script_override_fingerprint( - cx: &mut CompileContext<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, unit: &Unit, ) -> Option { // Build script output is only populated at this stage when it is // overridden. - let build_script_outputs = cx.build_script_outputs.lock().unwrap(); - let metadata = cx.get_run_build_script_metadata(unit); + let build_script_outputs = build_runner.build_script_outputs.lock().unwrap(); + let metadata = build_runner.get_run_build_script_metadata(unit); // Returns None if it is not overridden. let output = build_script_outputs.get(metadata)?; let s = format!( @@ -1719,8 +1725,8 @@ fn write_fingerprint(loc: &Path, fingerprint: &Fingerprint) -> CargoResult<()> { } /// Prepare for work when a package starts to build -pub fn prepare_init(cx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoResult<()> { - let new1 = cx.files().fingerprint_dir(unit); +pub fn prepare_init(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResult<()> { + let new1 = build_runner.files().fingerprint_dir(unit); // Doc tests have no output, thus no fingerprint. if !new1.exists() && !unit.mode.is_doc_test() { @@ -1732,14 +1738,14 @@ pub fn prepare_init(cx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoResult /// Returns the location that the dep-info file will show up at /// for the [`Unit`] specified. -pub fn dep_info_loc(cx: &mut CompileContext<'_, '_>, unit: &Unit) -> PathBuf { - cx.files().fingerprint_file_path(unit, "dep-") +pub fn dep_info_loc(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> PathBuf { + build_runner.files().fingerprint_file_path(unit, "dep-") } /// Returns an absolute path that target directory. /// All paths are rewritten to be relative to this. -fn target_root(cx: &CompileContext<'_, '_>) -> PathBuf { - cx.bcx.ws.target_dir().into_path_unlocked() +fn target_root(build_runner: &BuildRunner<'_, '_>) -> PathBuf { + build_runner.bcx.ws.target_dir().into_path_unlocked() } /// Reads the value from the old fingerprint hash file and compare. diff --git a/src/cargo/core/compiler/job_queue/job_state.rs b/src/cargo/core/compiler/job_queue/job_state.rs index bdcccd580be..fe3a79adb98 100644 --- a/src/cargo/core/compiler/job_queue/job_state.rs +++ b/src/cargo/core/compiler/job_queue/job_state.rs @@ -4,7 +4,7 @@ use std::{cell::Cell, marker, sync::Arc}; use cargo_util::ProcessBuilder; -use crate::core::compiler::context::OutputFile; +use crate::core::compiler::build_runner::OutputFile; use crate::core::compiler::future_incompat::FutureBreakageItem; use crate::util::Queue; use crate::CargoResult; diff --git a/src/cargo/core/compiler/job_queue/mod.rs b/src/cargo/core/compiler/job_queue/mod.rs index bf29b4fd5a5..dc34c92a7ae 100644 --- a/src/cargo/core/compiler/job_queue/mod.rs +++ b/src/cargo/core/compiler/job_queue/mod.rs @@ -131,9 +131,9 @@ use tracing::{debug, trace}; pub use self::job::Freshness::{self, Dirty, Fresh}; pub use self::job::{Job, Work}; pub use self::job_state::JobState; -use super::context::OutputFile; +use super::build_runner::OutputFile; use super::timings::Timings; -use super::{BuildContext, BuildPlan, CompileContext, CompileMode, Unit}; +use super::{BuildContext, BuildPlan, BuildRunner, CompileMode, Unit}; use crate::core::compiler::descriptive_pkg_name; use crate::core::compiler::future_incompat::{ self, FutureBreakageItem, FutureIncompatReportPackage, @@ -385,11 +385,11 @@ impl<'gctx> JobQueue<'gctx> { pub fn enqueue( &mut self, - cx: &CompileContext<'_, 'gctx>, + build_runner: &BuildRunner<'_, 'gctx>, unit: &Unit, job: Job, ) -> CargoResult<()> { - let dependencies = cx.unit_deps(unit); + let dependencies = build_runner.unit_deps(unit); let mut queue_deps = dependencies .iter() .filter(|dep| { @@ -403,9 +403,9 @@ impl<'gctx> JobQueue<'gctx> { .map(|dep| { // Handle the case here where our `unit -> dep` dependency may // only require the metadata, not the full compilation to - // finish. Use the tables in `cx` to figure out what kind - // of artifact is associated with this dependency. - let artifact = if cx.only_requires_rmeta(unit, &dep.unit) { + // finish. Use the tables in `build_runner` to figure out what + // kind of artifact is associated with this dependency. + let artifact = if build_runner.only_requires_rmeta(unit, &dep.unit) { Artifact::Metadata } else { Artifact::All @@ -437,17 +437,17 @@ impl<'gctx> JobQueue<'gctx> { // transitively contains the `Metadata` edge. if unit.requires_upstream_objects() { for dep in dependencies { - depend_on_deps_of_deps(cx, &mut queue_deps, dep.unit.clone()); + depend_on_deps_of_deps(build_runner, &mut queue_deps, dep.unit.clone()); } fn depend_on_deps_of_deps( - cx: &CompileContext<'_, '_>, + build_runner: &BuildRunner<'_, '_>, deps: &mut HashMap, unit: Unit, ) { - for dep in cx.unit_deps(&unit) { + for dep in build_runner.unit_deps(&unit) { if deps.insert(dep.unit.clone(), Artifact::All).is_none() { - depend_on_deps_of_deps(cx, deps, dep.unit.clone()); + depend_on_deps_of_deps(build_runner, deps, dep.unit.clone()); } } } @@ -469,13 +469,14 @@ impl<'gctx> JobQueue<'gctx> { /// possible along each dependency chain. pub fn execute( mut self, - cx: &mut CompileContext<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, plan: &mut BuildPlan, ) -> CargoResult<()> { let _p = profile::start("executing the job graph"); self.queue.queue_finished(); - let progress = Progress::with_style("Building", ProgressStyle::Ratio, cx.bcx.gctx); + let progress = + Progress::with_style("Building", ProgressStyle::Ratio, build_runner.bcx.gctx); let state = DrainState { total_units: self.queue.len(), queue: self.queue, @@ -484,7 +485,7 @@ impl<'gctx> JobQueue<'gctx> { // typical messages. If you change this, please update the test // caching_large_output, too. messages: Arc::new(Queue::new(100)), - diag_dedupe: DiagDedupe::new(cx.bcx.gctx), + diag_dedupe: DiagDedupe::new(build_runner.bcx.gctx), warning_count: HashMap::new(), active: HashMap::new(), compiled: HashSet::new(), @@ -496,14 +497,17 @@ impl<'gctx> JobQueue<'gctx> { timings: self.timings, tokens: Vec::new(), pending_queue: Vec::new(), - print: DiagnosticPrinter::new(cx.bcx.gctx, &cx.bcx.rustc().workspace_wrapper), + print: DiagnosticPrinter::new( + build_runner.bcx.gctx, + &build_runner.bcx.rustc().workspace_wrapper, + ), finished: 0, per_package_future_incompat_reports: Vec::new(), }; // Create a helper thread for acquiring jobserver tokens let messages = state.messages.clone(); - let helper = cx + let helper = build_runner .jobserver .clone() .into_helper_thread(move |token| { @@ -517,7 +521,7 @@ impl<'gctx> JobQueue<'gctx> { // It is important that this uses `push` instead of `push_bounded` for // now. If someone wants to fix this to be bounded, the `drop` // implementation needs to be changed to avoid possible deadlocks. - let _diagnostic_server = cx + let _diagnostic_server = build_runner .bcx .build_config .rustfix_diagnostic_server @@ -525,19 +529,19 @@ impl<'gctx> JobQueue<'gctx> { .take() .map(move |srv| srv.start(move |msg| messages.push(Message::FixDiagnostic(msg)))); - thread::scope( - move |scope| match state.drain_the_queue(cx, plan, scope, &helper) { + thread::scope(move |scope| { + match state.drain_the_queue(build_runner, plan, scope, &helper) { Some(err) => Err(err), None => Ok(()), - }, - ) + } + }) } } impl<'gctx> DrainState<'gctx> { fn spawn_work_if_possible<'s>( &mut self, - cx: &mut CompileContext<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, jobserver_helper: &HelperThread, scope: &'s Scope<'s, '_>, ) -> CargoResult<()> { @@ -569,14 +573,19 @@ impl<'gctx> DrainState<'gctx> { while self.has_extra_tokens() && !self.pending_queue.is_empty() { let (unit, job, _) = self.pending_queue.pop().unwrap(); *self.counts.get_mut(&unit.pkg.package_id()).unwrap() -= 1; - if !cx.bcx.build_config.build_plan { + if !build_runner.bcx.build_config.build_plan { // Print out some nice progress information. // NOTE: An error here will drop the job without starting it. // That should be OK, since we want to exit as soon as // possible during an error. - self.note_working_on(cx.bcx.gctx, cx.bcx.ws.root(), &unit, job.freshness())?; + self.note_working_on( + build_runner.bcx.gctx, + build_runner.bcx.ws.root(), + &unit, + job.freshness(), + )?; } - self.run(&unit, job, cx, scope); + self.run(&unit, job, build_runner, scope); } Ok(()) @@ -588,23 +597,27 @@ impl<'gctx> DrainState<'gctx> { fn handle_event( &mut self, - cx: &mut CompileContext<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, plan: &mut BuildPlan, event: Message, ) -> Result<(), ErrorToHandle> { match event { Message::Run(id, cmd) => { - cx.bcx.gctx.shell().verbose(|c| c.status("Running", &cmd))?; + build_runner + .bcx + .gctx + .shell() + .verbose(|c| c.status("Running", &cmd))?; self.timings.unit_start(id, self.active[&id].clone()); } Message::BuildPlanMsg(module_name, cmd, filenames) => { plan.update(&module_name, &cmd, &filenames)?; } Message::Stdout(out) => { - writeln!(cx.bcx.gctx.shell().out(), "{}", out)?; + writeln!(build_runner.bcx.gctx.shell().out(), "{}", out)?; } Message::Stderr(err) => { - let mut shell = cx.bcx.gctx.shell(); + let mut shell = build_runner.bcx.gctx.shell(); shell.print_ansi_stderr(err.as_bytes())?; shell.err().write_all(b"\n")?; } @@ -625,7 +638,7 @@ impl<'gctx> DrainState<'gctx> { } } Message::Warning { id, warning } => { - cx.bcx.gctx.shell().warn(warning)?; + build_runner.bcx.gctx.shell().warn(warning)?; self.bump_warning_count(id, true, false); } Message::WarningCount { @@ -646,9 +659,9 @@ impl<'gctx> DrainState<'gctx> { trace!("end: {:?}", id); self.finished += 1; self.report_warning_count( - cx.bcx.gctx, + build_runner.bcx.gctx, id, - &cx.bcx.rustc().workspace_wrapper, + &build_runner.bcx.rustc().workspace_wrapper, ); self.active.remove(&id).unwrap() } @@ -661,18 +674,19 @@ impl<'gctx> DrainState<'gctx> { }; debug!("end ({:?}): {:?}", unit, result); match result { - Ok(()) => self.finish(id, &unit, artifact, cx)?, - Err(_) if cx.bcx.unit_can_fail_for_docscraping(&unit) => { - cx.failed_scrape_units + Ok(()) => self.finish(id, &unit, artifact, build_runner)?, + Err(_) if build_runner.bcx.unit_can_fail_for_docscraping(&unit) => { + build_runner + .failed_scrape_units .lock() .unwrap() - .insert(cx.files().metadata(&unit)); + .insert(build_runner.files().metadata(&unit)); self.queue.finish(&unit, &artifact); } Err(error) => { let msg = "The following warnings were emitted during compilation:"; - self.emit_warnings(Some(msg), &unit, cx)?; - self.back_compat_notice(cx, &unit)?; + self.emit_warnings(Some(msg), &unit, build_runner)?; + self.back_compat_notice(build_runner, &unit)?; return Err(ErrorToHandle { error, print_always: true, @@ -726,7 +740,7 @@ impl<'gctx> DrainState<'gctx> { /// because it is important for the loop to carefully handle errors. fn drain_the_queue<'s>( mut self, - cx: &mut CompileContext<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, plan: &mut BuildPlan, scope: &'s Scope<'s, '_>, jobserver_helper: &HelperThread, @@ -749,9 +763,9 @@ impl<'gctx> DrainState<'gctx> { // must be handled in such a way that the loop is still allowed to // drain event messages. loop { - if errors.count == 0 || cx.bcx.build_config.keep_going { - if let Err(e) = self.spawn_work_if_possible(cx, jobserver_helper, scope) { - self.handle_error(&mut cx.bcx.gctx.shell(), &mut errors, e); + if errors.count == 0 || build_runner.bcx.build_config.keep_going { + if let Err(e) = self.spawn_work_if_possible(build_runner, jobserver_helper, scope) { + self.handle_error(&mut build_runner.bcx.gctx.shell(), &mut errors, e); } } @@ -767,14 +781,14 @@ impl<'gctx> DrainState<'gctx> { // don't actually use, and if this happens just relinquish it back // to the jobserver itself. for event in self.wait_for_events() { - if let Err(event_err) = self.handle_event(cx, plan, event) { - self.handle_error(&mut cx.bcx.gctx.shell(), &mut errors, event_err); + if let Err(event_err) = self.handle_event(build_runner, plan, event) { + self.handle_error(&mut build_runner.bcx.gctx.shell(), &mut errors, event_err); } } } self.progress.clear(); - let profile_name = cx.bcx.build_config.requested_profile; + let profile_name = build_runner.bcx.build_config.requested_profile; // NOTE: this may be a bit inaccurate, since this may not display the // profile for what was actually built. Profile overrides can change // these settings, and in some cases different targets are built with @@ -782,7 +796,7 @@ impl<'gctx> DrainState<'gctx> { // list of Units built, and maybe display a list of the different // profiles used. However, to keep it simple and compatible with old // behavior, we just display what the base profile is. - let profile = cx.bcx.profiles.base_profile(); + let profile = build_runner.bcx.profiles.base_profile(); let mut opt_type = String::from(if profile.opt_level.as_str() == "0" { "unoptimized" } else { @@ -792,12 +806,12 @@ impl<'gctx> DrainState<'gctx> { opt_type += " + debuginfo"; } - let time_elapsed = util::elapsed(cx.bcx.gctx.creation_time().elapsed()); - if let Err(e) = self.timings.finished(cx, &errors.to_error()) { - self.handle_error(&mut cx.bcx.gctx.shell(), &mut errors, e); + let time_elapsed = util::elapsed(build_runner.bcx.gctx.creation_time().elapsed()); + if let Err(e) = self.timings.finished(build_runner, &errors.to_error()) { + self.handle_error(&mut build_runner.bcx.gctx.shell(), &mut errors, e); } - if cx.bcx.build_config.emit_json() { - let mut shell = cx.bcx.gctx.shell(); + if build_runner.bcx.build_config.emit_json() { + let mut shell = build_runner.bcx.gctx.shell(); let msg = machine_message::BuildFinished { success: errors.count == 0, } @@ -812,7 +826,7 @@ impl<'gctx> DrainState<'gctx> { // `display_error` inside `handle_error`. Some(anyhow::Error::new(AlreadyPrintedError::new(error))) } else if self.queue.is_empty() && self.pending_queue.is_empty() { - let profile_link = cx.bcx.gctx.shell().err_hyperlink( + let profile_link = build_runner.bcx.gctx.shell().err_hyperlink( "https://doc.rust-lang.org/cargo/reference/profiles.html#default-profiles", ); let message = format!( @@ -820,11 +834,11 @@ impl<'gctx> DrainState<'gctx> { profile_link.open(), profile_link.close() ); - if !cx.bcx.build_config.build_plan { + if !build_runner.bcx.build_config.build_plan { // It doesn't really matter if this fails. - let _ = cx.bcx.gctx.shell().status("Finished", message); + let _ = build_runner.bcx.gctx.shell().status("Finished", message); future_incompat::save_and_display_report( - cx.bcx, + build_runner.bcx, &self.per_package_future_incompat_reports, ); } @@ -917,7 +931,7 @@ impl<'gctx> DrainState<'gctx> { &mut self, unit: &Unit, job: Job, - cx: &CompileContext<'_, '_>, + build_runner: &BuildRunner<'_, '_>, scope: &'s Scope<'s, '_>, ) { let id = JobId(self.next_id); @@ -929,7 +943,7 @@ impl<'gctx> DrainState<'gctx> { let messages = self.messages.clone(); let is_fresh = job.freshness().is_fresh(); - let rmeta_required = cx.rmeta_required(unit); + let rmeta_required = build_runner.rmeta_required(unit); let doit = move |diag_dedupe| { let state = JobState::new(id, messages, diag_dedupe, rmeta_required); @@ -954,13 +968,13 @@ impl<'gctx> DrainState<'gctx> { &mut self, msg: Option<&str>, unit: &Unit, - cx: &mut CompileContext<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, ) -> CargoResult<()> { - let outputs = cx.build_script_outputs.lock().unwrap(); - let Some(metadata) = cx.find_build_script_metadata(unit) else { + let outputs = build_runner.build_script_outputs.lock().unwrap(); + let Some(metadata) = build_runner.find_build_script_metadata(unit) else { return Ok(()); }; - let bcx = &mut cx.bcx; + let bcx = &mut build_runner.bcx; if let Some(output) = outputs.get(metadata) { if !output.warnings.is_empty() { if let Some(msg) = msg { @@ -1084,10 +1098,10 @@ impl<'gctx> DrainState<'gctx> { id: JobId, unit: &Unit, artifact: Artifact, - cx: &mut CompileContext<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, ) -> CargoResult<()> { - if unit.mode.is_run_custom_build() && unit.show_warnings(cx.bcx.gctx) { - self.emit_warnings(None, unit, cx)?; + if unit.mode.is_run_custom_build() && unit.show_warnings(build_runner.bcx.gctx) { + self.emit_warnings(None, unit, build_runner)?; } let unlocked = self.queue.finish(unit, &artifact); match artifact { @@ -1161,16 +1175,20 @@ impl<'gctx> DrainState<'gctx> { Ok(()) } - fn back_compat_notice(&self, cx: &CompileContext<'_, '_>, unit: &Unit) -> CargoResult<()> { + fn back_compat_notice( + &self, + build_runner: &BuildRunner<'_, '_>, + unit: &Unit, + ) -> CargoResult<()> { if unit.pkg.name() != "diesel" || unit.pkg.version() >= &Version::new(1, 4, 8) - || cx.bcx.ws.resolve_behavior() == ResolveBehavior::V1 + || build_runner.bcx.ws.resolve_behavior() == ResolveBehavior::V1 || !unit.pkg.package_id().source_id().is_registry() || !unit.features.is_empty() { return Ok(()); } - if !cx + if !build_runner .bcx .unit_graph .keys() @@ -1178,7 +1196,7 @@ impl<'gctx> DrainState<'gctx> { { return Ok(()); } - cx.bcx.gctx.shell().note( + build_runner.bcx.gctx.shell().note( "\ This error may be due to an interaction between diesel and Cargo's new feature resolver. Try updating to diesel 1.4.8 to fix this error. diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index 04befdea853..8aed78f315a 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -13,7 +13,7 @@ //! //! * [`BuildContext`] is a static context containing all information you need //! before a build gets started. -//! * [`CompileContext`] is the center of the world, coordinating a running build and +//! * [`BuildRunner`] is the center of the world, coordinating a running build and //! collecting information from it. //! * [`custom_build`] is the home of build script executions and output parsing. //! * [`fingerprint`] not only defines but also executes a set of rules to @@ -35,9 +35,9 @@ pub mod artifact; mod build_config; pub(crate) mod build_context; mod build_plan; +pub(crate) mod build_runner; mod compilation; mod compile_kind; -pub(crate) mod context; mod crate_type; mod custom_build; pub(crate) mod fingerprint; @@ -72,9 +72,9 @@ pub use self::build_context::{ BuildContext, FileFlavor, FileType, RustDocFingerprint, RustcTargetData, TargetInfo, }; use self::build_plan::BuildPlan; +pub use self::build_runner::{BuildRunner, Metadata}; pub use self::compilation::{Compilation, Doctest, UnitOutput}; pub use self::compile_kind::{CompileKind, CompileTarget}; -pub use self::context::{CompileContext, Metadata}; pub use self::crate_type::CrateType; pub use self::custom_build::LinkArgTarget; pub use self::custom_build::{BuildOutput, BuildScriptOutputs, BuildScripts}; @@ -108,7 +108,7 @@ pub trait Executor: Send + Sync + 'static { /// Called after a rustc process invocation is prepared up-front for a given /// unit of work (may still be modified for runtime-known dependencies, when /// the work is actually executed). - fn init(&self, _cx: &CompileContext<'_, '_>, _unit: &Unit) {} + fn init(&self, _build_runner: &BuildRunner<'_, '_>, _unit: &Unit) {} /// In case of an `Err`, Cargo will not continue with the build process for /// this package. @@ -159,41 +159,44 @@ impl Executor for DefaultExecutor { /// next as part of [`JobQueue::execute`] function which will run everything /// in order with proper parallelism. fn compile<'gctx>( - cx: &mut CompileContext<'_, 'gctx>, + build_runner: &mut BuildRunner<'_, 'gctx>, jobs: &mut JobQueue<'gctx>, plan: &mut BuildPlan, unit: &Unit, exec: &Arc, force_rebuild: bool, ) -> CargoResult<()> { - let bcx = cx.bcx; + let bcx = build_runner.bcx; let build_plan = bcx.build_config.build_plan; - if !cx.compiled.insert(unit.clone()) { + if !build_runner.compiled.insert(unit.clone()) { return Ok(()); } // Build up the work to be done to compile this unit, enqueuing it once // we've got everything constructed. let p = profile::start(format!("preparing: {}/{}", unit.pkg, unit.target.name())); - fingerprint::prepare_init(cx, unit)?; + fingerprint::prepare_init(build_runner, unit)?; let job = if unit.mode.is_run_custom_build() { - custom_build::prepare(cx, unit)? + custom_build::prepare(build_runner, unit)? } else if unit.mode.is_doc_test() { // We run these targets later, so this is just a no-op for now. Job::new_fresh() } else if build_plan { - Job::new_dirty(rustc(cx, unit, &exec.clone())?, DirtyReason::FreshBuild) + Job::new_dirty( + rustc(build_runner, unit, &exec.clone())?, + DirtyReason::FreshBuild, + ) } else { let force = exec.force_rebuild(unit) || force_rebuild; - let mut job = fingerprint::prepare_target(cx, unit, force)?; + let mut job = fingerprint::prepare_target(build_runner, unit, force)?; job.before(if job.freshness().is_dirty() { let work = if unit.mode.is_doc() || unit.mode.is_doc_scrape() { - rustdoc(cx, unit)? + rustdoc(build_runner, unit)? } else { - rustc(cx, unit, exec)? + rustc(build_runner, unit, exec)? }; - work.then(link_targets(cx, unit, false)?) + work.then(link_targets(build_runner, unit, false)?) } else { // We always replay the output cache, // since it might contain future-incompat-report messages @@ -201,26 +204,26 @@ fn compile<'gctx>( unit.pkg.package_id(), PathBuf::from(unit.pkg.manifest_path()), &unit.target, - cx.files().message_cache_path(unit), - cx.bcx.build_config.message_format, + build_runner.files().message_cache_path(unit), + build_runner.bcx.build_config.message_format, unit.show_warnings(bcx.gctx), ); // Need to link targets on both the dirty and fresh. - work.then(link_targets(cx, unit, true)?) + work.then(link_targets(build_runner, unit, true)?) }); job }; - jobs.enqueue(cx, unit, job)?; + jobs.enqueue(build_runner, unit, job)?; drop(p); // Be sure to compile all dependencies of this target as well. - let deps = Vec::from(cx.unit_deps(unit)); // Create vec due to mutable borrow. + let deps = Vec::from(build_runner.unit_deps(unit)); // Create vec due to mutable borrow. for dep in deps { - compile(cx, jobs, plan, &dep.unit, exec, false)?; + compile(build_runner, jobs, plan, &dep.unit, exec, false)?; } if build_plan { - plan.add(cx, unit)?; + plan.add(build_runner, unit)?; } Ok(()) @@ -229,13 +232,13 @@ fn compile<'gctx>( /// Generates the warning message used when fallible doc-scrape units fail, /// either for rustdoc or rustc. fn make_failed_scrape_diagnostic( - cx: &CompileContext<'_, '_>, + build_runner: &BuildRunner<'_, '_>, unit: &Unit, top_line: impl Display, ) -> String { let manifest_path = unit.pkg.manifest_path(); let relative_manifest_path = manifest_path - .strip_prefix(cx.bcx.ws.root()) + .strip_prefix(build_runner.bcx.ws.root()) .unwrap_or(&manifest_path); format!( @@ -249,68 +252,71 @@ fn make_failed_scrape_diagnostic( /// Creates a unit of work invoking `rustc` for building the `unit`. fn rustc( - cx: &mut CompileContext<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, unit: &Unit, exec: &Arc, ) -> CargoResult { - let mut rustc = prepare_rustc(cx, unit)?; - let build_plan = cx.bcx.build_config.build_plan; + let mut rustc = prepare_rustc(build_runner, unit)?; + let build_plan = build_runner.bcx.build_config.build_plan; let name = unit.pkg.name(); let buildkey = unit.buildkey(); - let outputs = cx.outputs(unit)?; - let root = cx.files().out_dir(unit); + let outputs = build_runner.outputs(unit)?; + let root = build_runner.files().out_dir(unit); // Prepare the native lib state (extra `-L` and `-l` flags). - let build_script_outputs = Arc::clone(&cx.build_script_outputs); + let build_script_outputs = Arc::clone(&build_runner.build_script_outputs); let current_id = unit.pkg.package_id(); let manifest_path = PathBuf::from(unit.pkg.manifest_path()); - let build_scripts = cx.build_scripts.get(unit).cloned(); + let build_scripts = build_runner.build_scripts.get(unit).cloned(); // If we are a binary and the package also contains a library, then we // don't pass the `-l` flags. let pass_l_flag = unit.target.is_lib() || !unit.pkg.targets().iter().any(|t| t.is_lib()); - let dep_info_name = if cx.files().use_extra_filename(unit) { + let dep_info_name = if build_runner.files().use_extra_filename(unit) { format!( "{}-{}.d", unit.target.crate_name(), - cx.files().metadata(unit) + build_runner.files().metadata(unit) ) } else { format!("{}.d", unit.target.crate_name()) }; let rustc_dep_info_loc = root.join(dep_info_name); - let dep_info_loc = fingerprint::dep_info_loc(cx, unit); + let dep_info_loc = fingerprint::dep_info_loc(build_runner, unit); - let mut output_options = OutputOptions::new(cx, unit); + let mut output_options = OutputOptions::new(build_runner, unit); let package_id = unit.pkg.package_id(); let target = Target::clone(&unit.target); let mode = unit.mode; - exec.init(cx, unit); + exec.init(build_runner, unit); let exec = exec.clone(); - let root_output = cx.files().host_dest().to_path_buf(); - let target_dir = cx.bcx.ws.target_dir().into_path_unlocked(); + let root_output = build_runner.files().host_dest().to_path_buf(); + let target_dir = build_runner.bcx.ws.target_dir().into_path_unlocked(); let pkg_root = unit.pkg.root().to_path_buf(); let cwd = rustc .get_cwd() - .unwrap_or_else(|| cx.bcx.gctx.cwd()) + .unwrap_or_else(|| build_runner.bcx.gctx.cwd()) .to_path_buf(); - let fingerprint_dir = cx.files().fingerprint_dir(unit); - let script_metadata = cx.find_build_script_metadata(unit); + let fingerprint_dir = build_runner.files().fingerprint_dir(unit); + let script_metadata = build_runner.find_build_script_metadata(unit); let is_local = unit.is_local(); let artifact = unit.artifact; - let hide_diagnostics_for_scrape_unit = cx.bcx.unit_can_fail_for_docscraping(unit) - && !matches!(cx.bcx.gctx.shell().verbosity(), Verbosity::Verbose); + let hide_diagnostics_for_scrape_unit = build_runner.bcx.unit_can_fail_for_docscraping(unit) + && !matches!( + build_runner.bcx.gctx.shell().verbosity(), + Verbosity::Verbose + ); let failed_scrape_diagnostic = hide_diagnostics_for_scrape_unit.then(|| { // If this unit is needed for doc-scraping, then we generate a diagnostic that // describes the set of reverse-dependencies that cause the unit to be needed. let target_desc = unit.target.description_named(); - let mut for_scrape_units = cx + let mut for_scrape_units = build_runner .bcx .scrape_units_have_dep_on(unit) .into_iter() @@ -318,7 +324,7 @@ fn rustc( .collect::>(); for_scrape_units.sort(); let for_scrape_units = for_scrape_units.join(", "); - make_failed_scrape_diagnostic(cx, unit, format_args!("failed to check {target_desc} in package `{name}` as a prerequisite for scraping examples from: {for_scrape_units}")) + make_failed_scrape_diagnostic(build_runner, unit, format_args!("failed to check {target_desc} in package `{name}` as a prerequisite for scraping examples from: {for_scrape_units}")) }); if hide_diagnostics_for_scrape_unit { output_options.show_diagnostics = false; @@ -516,21 +522,28 @@ fn verbose_if_simple_exit_code(err: Error) -> Error { /// Link the compiled target (often of form `foo-{metadata_hash}`) to the /// final target. This must happen during both "Fresh" and "Compile". -fn link_targets(cx: &mut CompileContext<'_, '_>, unit: &Unit, fresh: bool) -> CargoResult { - let bcx = cx.bcx; - let outputs = cx.outputs(unit)?; - let export_dir = cx.files().export_dir(); +fn link_targets( + build_runner: &mut BuildRunner<'_, '_>, + unit: &Unit, + fresh: bool, +) -> CargoResult { + let bcx = build_runner.bcx; + let outputs = build_runner.outputs(unit)?; + let export_dir = build_runner.files().export_dir(); let package_id = unit.pkg.package_id(); let manifest_path = PathBuf::from(unit.pkg.manifest_path()); let profile = unit.profile.clone(); let unit_mode = unit.mode; let features = unit.features.iter().map(|s| s.to_string()).collect(); let json_messages = bcx.build_config.emit_json(); - let executable = cx.get_executable(unit)?; + let executable = build_runner.get_executable(unit)?; let mut target = Target::clone(&unit.target); if let TargetSourcePath::Metabuild = target.src_path() { // Give it something to serialize. - let path = unit.pkg.manifest().metabuild_path(cx.bcx.ws.target_dir()); + let path = unit + .pkg + .manifest() + .metabuild_path(build_runner.bcx.ws.target_dir()); target.set_src_path(TargetSourcePath::Path(path)); } @@ -659,20 +672,20 @@ where /// This builds a static view of the invocation. Flags depending on the /// completion of other units will be added later in runtime, such as flags /// from build scripts. -fn prepare_rustc(cx: &CompileContext<'_, '_>, unit: &Unit) -> CargoResult { - let is_primary = cx.is_primary_package(unit); - let is_workspace = cx.bcx.ws.is_member(&unit.pkg); +fn prepare_rustc(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> CargoResult { + let is_primary = build_runner.is_primary_package(unit); + let is_workspace = build_runner.bcx.ws.is_member(&unit.pkg); - let mut base = cx + let mut base = build_runner .compilation .rustc_process(unit, is_primary, is_workspace)?; - build_base_args(cx, &mut base, unit)?; + build_base_args(build_runner, &mut base, unit)?; - base.inherit_jobserver(&cx.jobserver); - build_deps_args(&mut base, cx, unit)?; - add_cap_lints(cx.bcx, unit, &mut base); - base.args(cx.bcx.rustflags_args(unit)); - if cx.bcx.gctx.cli_unstable().binary_dep_depinfo { + base.inherit_jobserver(&build_runner.jobserver); + build_deps_args(&mut base, build_runner, unit)?; + add_cap_lints(build_runner.bcx, unit, &mut base); + base.args(build_runner.bcx.rustflags_args(unit)); + if build_runner.bcx.gctx.cli_unstable().binary_dep_depinfo { base.arg("-Z").arg("binary-dep-depinfo"); } @@ -681,10 +694,10 @@ fn prepare_rustc(cx: &CompileContext<'_, '_>, unit: &Unit) -> CargoResult, unit: &Unit) -> CargoResult, unit: &Unit) -> CargoResult { - let bcx = cx.bcx; +fn prepare_rustdoc(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> CargoResult { + let bcx = build_runner.bcx; // script_metadata is not needed here, it is only for tests. - let mut rustdoc = cx.compilation.rustdoc_process(unit, None)?; - rustdoc.inherit_jobserver(&cx.jobserver); + let mut rustdoc = build_runner.compilation.rustdoc_process(unit, None)?; + rustdoc.inherit_jobserver(&build_runner.jobserver); let crate_name = unit.target.crate_name(); rustdoc.arg("--crate-name").arg(&crate_name); add_path_args(bcx.ws, unit, &mut rustdoc); @@ -717,24 +730,24 @@ fn prepare_rustdoc(cx: &CompileContext<'_, '_>, unit: &Unit) -> CargoResult, unit: &Unit) -> CargoResult, unit: &Unit) -> CargoResult, unit: &Unit) -> CargoResult, unit: &Unit) -> CargoResult { - let mut rustdoc = prepare_rustdoc(cx, unit)?; +fn rustdoc(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResult { + let mut rustdoc = prepare_rustdoc(build_runner, unit)?; let crate_name = unit.target.crate_name(); - let doc_dir = cx.files().out_dir(unit); + let doc_dir = build_runner.files().out_dir(unit); // Create the documentation directory ahead of time as rustdoc currently has // a bug where concurrent invocations will race to create this directory if // it doesn't already exist. @@ -790,30 +803,39 @@ fn rustdoc(cx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoResult { let target_desc = unit.target.description_named(); let name = unit.pkg.name(); - let build_script_outputs = Arc::clone(&cx.build_script_outputs); + let build_script_outputs = Arc::clone(&build_runner.build_script_outputs); let package_id = unit.pkg.package_id(); let manifest_path = PathBuf::from(unit.pkg.manifest_path()); let target = Target::clone(&unit.target); - let mut output_options = OutputOptions::new(cx, unit); - let script_metadata = cx.find_build_script_metadata(unit); - let scrape_outputs = if should_include_scrape_units(cx.bcx, unit) { + let mut output_options = OutputOptions::new(build_runner, unit); + let script_metadata = build_runner.find_build_script_metadata(unit); + let scrape_outputs = if should_include_scrape_units(build_runner.bcx, unit) { Some( - cx.bcx + build_runner + .bcx .scrape_units .iter() - .map(|unit| Ok((cx.files().metadata(unit), scrape_output_path(cx, unit)?))) + .map(|unit| { + Ok(( + build_runner.files().metadata(unit), + scrape_output_path(build_runner, unit)?, + )) + }) .collect::>>()?, ) } else { None }; - let failed_scrape_units = Arc::clone(&cx.failed_scrape_units); - let hide_diagnostics_for_scrape_unit = cx.bcx.unit_can_fail_for_docscraping(unit) - && !matches!(cx.bcx.gctx.shell().verbosity(), Verbosity::Verbose); + let failed_scrape_units = Arc::clone(&build_runner.failed_scrape_units); + let hide_diagnostics_for_scrape_unit = build_runner.bcx.unit_can_fail_for_docscraping(unit) + && !matches!( + build_runner.bcx.gctx.shell().verbosity(), + Verbosity::Verbose + ); let failed_scrape_diagnostic = hide_diagnostics_for_scrape_unit.then(|| { make_failed_scrape_diagnostic( - cx, + build_runner, unit, format_args!("failed to scan {target_desc} in package `{name}` for example code usage"), ) @@ -915,8 +937,8 @@ fn add_cap_lints(bcx: &BuildContext<'_, '_>, unit: &Unit, cmd: &mut ProcessBuild /// Forwards [`-Zallow-features`] if it is set for cargo. /// /// [`-Zallow-features`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#allow-features -fn add_allow_features(cx: &CompileContext<'_, '_>, cmd: &mut ProcessBuilder) { - if let Some(allow) = &cx.bcx.gctx.cli_unstable().allow_features { +fn add_allow_features(build_runner: &BuildRunner<'_, '_>, cmd: &mut ProcessBuilder) { + if let Some(allow) = &build_runner.bcx.gctx.cli_unstable().allow_features { use std::fmt::Write; let mut arg = String::from("-Zallow-features="); for f in allow { @@ -936,11 +958,11 @@ fn add_allow_features(cx: &CompileContext<'_, '_>, cmd: &mut ProcessBuilder) { /// which Cargo will extract and display to the user. /// /// [`--error-format`]: https://doc.rust-lang.org/nightly/rustc/command-line-arguments.html#--error-format-control-how-errors-are-produced -fn add_error_format_and_color(cx: &CompileContext<'_, '_>, cmd: &mut ProcessBuilder) { +fn add_error_format_and_color(build_runner: &BuildRunner<'_, '_>, cmd: &mut ProcessBuilder) { cmd.arg("--error-format=json"); let mut json = String::from("--json=diagnostic-rendered-ansi,artifacts,future-incompat"); - match cx.bcx.build_config.message_format { + match build_runner.bcx.build_config.message_format { MessageFormat::Short | MessageFormat::Json { short: true, .. } => { json.push_str(",diagnostic-short"); } @@ -948,7 +970,7 @@ fn add_error_format_and_color(cx: &CompileContext<'_, '_>, cmd: &mut ProcessBuil } cmd.arg(json); - let gctx = cx.bcx.gctx; + let gctx = build_runner.bcx.gctx; if let Some(width) = gctx.shell().err_width().diagnostic_terminal_width() { cmd.arg(format!("--diagnostic-width={width}")); } @@ -956,13 +978,13 @@ fn add_error_format_and_color(cx: &CompileContext<'_, '_>, cmd: &mut ProcessBuil /// Adds essential rustc flags and environment variables to the command to execute. fn build_base_args( - cx: &CompileContext<'_, '_>, + build_runner: &BuildRunner<'_, '_>, cmd: &mut ProcessBuilder, unit: &Unit, ) -> CargoResult<()> { assert!(!unit.mode.is_run_custom_build()); - let bcx = cx.bcx; + let bcx = build_runner.bcx; let Profile { ref opt_level, codegen_backend, @@ -987,8 +1009,8 @@ fn build_base_args( edition.cmd_edition_arg(cmd); add_path_args(bcx.ws, unit, cmd); - add_error_format_and_color(cx, cmd); - add_allow_features(cx, cmd); + add_error_format_and_color(build_runner, cmd); + add_allow_features(build_runner, cmd); let mut contains_dy_lib = false; if !test { @@ -1010,7 +1032,7 @@ fn build_base_args( } let prefer_dynamic = (unit.target.for_host() && !unit.target.is_custom_build()) - || (contains_dy_lib && !cx.is_primary_package(unit)); + || (contains_dy_lib && !build_runner.is_primary_package(unit)); if prefer_dynamic { cmd.arg("-C").arg("prefer-dynamic"); } @@ -1023,7 +1045,7 @@ fn build_base_args( cmd.arg("-C").arg(format!("panic={}", panic)); } - cmd.args(<o_args(cx, unit)); + cmd.args(<o_args(build_runner, unit)); if let Some(backend) = codegen_backend { cmd.arg("-Z").arg(&format!("codegen-backend={}", backend)); @@ -1044,7 +1066,7 @@ fn build_base_args( // Windows the only stable valid value for split-debuginfo is "packed", // while on Linux "unpacked" is also stable. if let Some(split) = split_debuginfo { - if cx + if build_runner .bcx .target_data .info(unit.kind) @@ -1056,12 +1078,12 @@ fn build_base_args( } if let Some(trim_paths) = trim_paths { - trim_paths_args(cmd, cx, unit, &trim_paths)?; + trim_paths_args(cmd, build_runner, unit, &trim_paths)?; } cmd.args(unit.pkg.manifest().lint_rustflags()); cmd.args(&profile_rustflags); - if let Some(args) = cx.bcx.extra_args_for(unit) { + if let Some(args) = build_runner.bcx.extra_args_for(unit) { cmd.args(args); } @@ -1104,11 +1126,11 @@ fn build_base_args( } cmd.args(&features_args(unit)); - cmd.args(&check_cfg_args(cx, unit)); + cmd.args(&check_cfg_args(build_runner, unit)); - let meta = cx.files().metadata(unit); + let meta = build_runner.files().metadata(unit); cmd.arg("-C").arg(&format!("metadata={}", meta)); - if cx.files().use_extra_filename(unit) { + if build_runner.files().use_extra_filename(unit) { cmd.arg("-C").arg(&format!("extra-filename=-{}", meta)); } @@ -1116,7 +1138,8 @@ fn build_base_args( cmd.arg("-C").arg("rpath"); } - cmd.arg("--out-dir").arg(&cx.files().out_dir(unit)); + cmd.arg("--out-dir") + .arg(&build_runner.files().out_dir(unit)); fn opt(cmd: &mut ProcessBuilder, key: &str, prefix: &str, val: Option<&OsStr>) { if let Some(val) = val { @@ -1134,13 +1157,18 @@ fn build_base_args( cmd, "-C", "linker=", - cx.compilation + build_runner + .compilation .target_linker(unit.kind) .as_ref() .map(|s| s.as_ref()), ); if incremental { - let dir = cx.files().layout(unit.kind).incremental().as_os_str(); + let dir = build_runner + .files() + .layout(unit.kind) + .incremental() + .as_os_str(); opt(cmd, "-C", "incremental=", Some(dir)); } @@ -1169,9 +1197,11 @@ fn build_base_args( .iter() .filter(|target| target.is_bin()) { - let exe_path = cx - .files() - .bin_link_for_target(bin_target, unit.kind, cx.bcx)?; + let exe_path = build_runner.files().bin_link_for_target( + bin_target, + unit.kind, + build_runner.bcx, + )?; let name = bin_target .binary_filename() .unwrap_or(bin_target.name().to_string()); @@ -1201,7 +1231,7 @@ fn features_args(unit: &Unit) -> Vec { /// [`-Ztrim-paths`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#profile-trim-paths-option fn trim_paths_args( cmd: &mut ProcessBuilder, - cx: &CompileContext<'_, '_>, + build_runner: &BuildRunner<'_, '_>, unit: &Unit, trim_paths: &TomlTrimPaths, ) -> CargoResult<()> { @@ -1214,7 +1244,7 @@ fn trim_paths_args( cmd.arg(format!("-Zremap-path-scope={trim_paths}")); let sysroot_remap = { - let sysroot = &cx.bcx.target_data.info(unit.kind).sysroot; + let sysroot = &build_runner.bcx.target_data.info(unit.kind).sysroot; let mut remap = OsString::from("--remap-path-prefix="); remap.push(sysroot); remap.push("/lib/rustlib/src/rust"); // See also `detect_sysroot_src_path()`. @@ -1222,16 +1252,16 @@ fn trim_paths_args( remap.push("/rustc/"); // This remap logic aligns with rustc: // - if let Some(commit_hash) = cx.bcx.rustc().commit_hash.as_ref() { + if let Some(commit_hash) = build_runner.bcx.rustc().commit_hash.as_ref() { remap.push(commit_hash); } else { - remap.push(cx.bcx.rustc().version.to_string()); + remap.push(build_runner.bcx.rustc().version.to_string()); } remap }; let package_remap = { let pkg_root = unit.pkg.root(); - let ws_root = cx.bcx.ws.root(); + let ws_root = build_runner.bcx.ws.root(); let mut remap = OsString::from("--remap-path-prefix="); // Remap rules for dependencies // @@ -1242,10 +1272,22 @@ fn trim_paths_args( // * otherwise remapped to `-`. let source_id = unit.pkg.package_id().source_id(); if source_id.is_git() { - remap.push(cx.bcx.gctx.git_checkouts_path().as_path_unlocked()); + remap.push( + build_runner + .bcx + .gctx + .git_checkouts_path() + .as_path_unlocked(), + ); remap.push("="); } else if source_id.is_registry() { - remap.push(cx.bcx.gctx.registry_source_path().as_path_unlocked()); + remap.push( + build_runner + .bcx + .gctx + .registry_source_path() + .as_path_unlocked(), + ); remap.push("="); } else if pkg_root.strip_prefix(ws_root).is_ok() { remap.push(ws_root); @@ -1272,8 +1314,8 @@ fn trim_paths_args( /// See unstable feature [`check-cfg`]. /// /// [`check-cfg`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#check-cfg -fn check_cfg_args(cx: &CompileContext<'_, '_>, unit: &Unit) -> Vec { - if cx.bcx.gctx.cli_unstable().check_cfg { +fn check_cfg_args(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> Vec { + if build_runner.bcx.gctx.cli_unstable().check_cfg { // The routine below generates the --check-cfg arguments. Our goals here are to // enable the checking of conditionals and pass the list of declared features. // @@ -1323,13 +1365,13 @@ fn check_cfg_args(cx: &CompileContext<'_, '_>, unit: &Unit) -> Vec { } /// Adds LTO related codegen flags. -fn lto_args(cx: &CompileContext<'_, '_>, unit: &Unit) -> Vec { +fn lto_args(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> Vec { let mut result = Vec::new(); let mut push = |arg: &str| { result.push(OsString::from("-C")); result.push(OsString::from(arg)); }; - match cx.lto[unit] { + match build_runner.lto[unit] { lto::Lto::Run(None) => push("lto"), lto::Lto::Run(Some(s)) => push(&format!("lto={}", s)), lto::Lto::Off => { @@ -1350,13 +1392,13 @@ fn lto_args(cx: &CompileContext<'_, '_>, unit: &Unit) -> Vec { /// [`--extern`]: https://doc.rust-lang.org/nightly/rustc/command-line-arguments.html#--extern-specify-where-an-external-library-is-located fn build_deps_args( cmd: &mut ProcessBuilder, - cx: &CompileContext<'_, '_>, + build_runner: &BuildRunner<'_, '_>, unit: &Unit, ) -> CargoResult<()> { - let bcx = cx.bcx; + let bcx = build_runner.bcx; cmd.arg("-L").arg(&{ let mut deps = OsString::from("dependency="); - deps.push(cx.files().deps_dir(unit)); + deps.push(build_runner.files().deps_dir(unit)); deps }); @@ -1365,12 +1407,12 @@ fn build_deps_args( if !unit.kind.is_host() { cmd.arg("-L").arg(&{ let mut deps = OsString::from("dependency="); - deps.push(cx.files().host_deps()); + deps.push(build_runner.files().host_deps()); deps }); } - let deps = cx.unit_deps(unit); + let deps = build_runner.unit_deps(unit); // If there is not one linkable target but should, rustc fails later // on if there is an `extern crate` for it. This may turn into a hard @@ -1398,15 +1440,18 @@ fn build_deps_args( for dep in deps { if dep.unit.mode.is_run_custom_build() { - cmd.env("OUT_DIR", &cx.files().build_script_out_dir(&dep.unit)); + cmd.env( + "OUT_DIR", + &build_runner.files().build_script_out_dir(&dep.unit), + ); } } - for arg in extern_args(cx, unit, &mut unstable_opts)? { + for arg in extern_args(build_runner, unit, &mut unstable_opts)? { cmd.arg(arg); } - for (var, env) in artifact::get_env(cx, deps)? { + for (var, env) in artifact::get_env(build_runner, deps)? { cmd.env(&var, env); } @@ -1449,12 +1494,12 @@ fn add_custom_flags( /// Generates a list of `--extern` arguments. pub fn extern_args( - cx: &CompileContext<'_, '_>, + build_runner: &BuildRunner<'_, '_>, unit: &Unit, unstable_opts: &mut bool, ) -> CargoResult> { let mut result = Vec::new(); - let deps = cx.unit_deps(unit); + let deps = build_runner.unit_deps(unit); // Closure to add one dependency to `result`. let mut link_to = @@ -1491,9 +1536,9 @@ pub fn extern_args( result.push(value); }; - let outputs = cx.outputs(&dep.unit)?; + let outputs = build_runner.outputs(&dep.unit)?; - if cx.only_requires_rmeta(unit, &dep.unit) || dep.unit.mode.is_check() { + if build_runner.only_requires_rmeta(unit, &dep.unit) || dep.unit.mode.is_check() { // Example: rlib dependency for an rlib, rmeta is all that is required. let output = outputs .iter() @@ -1557,13 +1602,13 @@ struct OutputOptions { } impl OutputOptions { - fn new(cx: &CompileContext<'_, '_>, unit: &Unit) -> OutputOptions { - let path = cx.files().message_cache_path(unit); + fn new(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> OutputOptions { + let path = build_runner.files().message_cache_path(unit); // Remove old cache, ignore ENOENT, which is the common case. drop(fs::remove_file(&path)); let cache_cell = Some((path, LazyCell::new())); OutputOptions { - format: cx.bcx.build_config.message_format, + format: build_runner.bcx.build_config.message_format, cache_cell, show_diagnostics: true, warnings_seen: 0, @@ -1884,7 +1929,9 @@ fn should_include_scrape_units(bcx: &BuildContext<'_, '_>, unit: &Unit) -> bool } /// Gets the file path of function call information output from `rustdoc`. -fn scrape_output_path(cx: &CompileContext<'_, '_>, unit: &Unit) -> CargoResult { +fn scrape_output_path(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> CargoResult { assert!(unit.mode.is_doc() || unit.mode.is_doc_scrape()); - cx.outputs(unit).map(|outputs| outputs[0].path.clone()) + build_runner + .outputs(unit) + .map(|outputs| outputs[0].path.clone()) } diff --git a/src/cargo/core/compiler/output_depinfo.rs b/src/cargo/core/compiler/output_depinfo.rs index ee00e6e225f..11d320c6a8c 100644 --- a/src/cargo/core/compiler/output_depinfo.rs +++ b/src/cargo/core/compiler/output_depinfo.rs @@ -6,7 +6,7 @@ use std::collections::{BTreeSet, HashSet}; use std::io::{BufWriter, Write}; use std::path::{Path, PathBuf}; -use super::{fingerprint, CompileContext, FileFlavor, Unit}; +use super::{fingerprint, BuildRunner, FileFlavor, Unit}; use crate::util::{internal, CargoResult}; use cargo_util::paths; use tracing::debug; @@ -43,7 +43,7 @@ fn render_filename>(path: P, basedir: Option<&str>) -> CargoResul /// [fingerprint dep-info]: super::fingerprint#fingerprint-dep-info-files fn add_deps_for_unit( deps: &mut BTreeSet, - cx: &mut CompileContext<'_, '_>, + build_runner: &mut BuildRunner<'_, '_>, unit: &Unit, visited: &mut HashSet, ) -> CargoResult<()> { @@ -55,10 +55,12 @@ fn add_deps_for_unit( // generate a dep info file, so we just keep on going below if !unit.mode.is_run_custom_build() { // Add dependencies from rustc dep-info output (stored in fingerprint directory) - let dep_info_loc = fingerprint::dep_info_loc(cx, unit); - if let Some(paths) = - fingerprint::parse_dep_info(unit.pkg.root(), cx.files().host_root(), &dep_info_loc)? - { + let dep_info_loc = fingerprint::dep_info_loc(build_runner, unit); + if let Some(paths) = fingerprint::parse_dep_info( + unit.pkg.root(), + build_runner.files().host_root(), + &dep_info_loc, + )? { for path in paths.files { deps.insert(path); } @@ -73,8 +75,13 @@ fn add_deps_for_unit( } // Add rerun-if-changed dependencies - if let Some(metadata) = cx.find_build_script_metadata(unit) { - if let Some(output) = cx.build_script_outputs.lock().unwrap().get(metadata) { + if let Some(metadata) = build_runner.find_build_script_metadata(unit) { + if let Some(output) = build_runner + .build_script_outputs + .lock() + .unwrap() + .get(metadata) + { for path in &output.rerun_if_changed { // The paths we have saved from the unit are of arbitrary relativeness and may be // relative to the crate root of the dependency. @@ -85,10 +92,10 @@ fn add_deps_for_unit( } // Recursively traverse all transitive dependencies - let unit_deps = Vec::from(cx.unit_deps(unit)); // Create vec due to mutable borrow. + let unit_deps = Vec::from(build_runner.unit_deps(unit)); // Create vec due to mutable borrow. for dep in unit_deps { if dep.unit.is_local() { - add_deps_for_unit(deps, cx, &dep.unit, visited)?; + add_deps_for_unit(deps, build_runner, &dep.unit, visited)?; } } Ok(()) @@ -111,11 +118,11 @@ fn add_deps_for_unit( /// `Cargo.lock`. /// /// [`fingerprint`]: super::fingerprint#dep-info-files -pub fn output_depinfo(cx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoResult<()> { - let bcx = cx.bcx; +pub fn output_depinfo(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResult<()> { + let bcx = build_runner.bcx; let mut deps = BTreeSet::new(); let mut visited = HashSet::new(); - let success = add_deps_for_unit(&mut deps, cx, unit, &mut visited).is_ok(); + let success = add_deps_for_unit(&mut deps, build_runner, unit, &mut visited).is_ok(); let basedir_string; let basedir = match bcx.gctx.build_config()?.dep_info_basedir.clone() { Some(value) => { @@ -134,7 +141,7 @@ pub fn output_depinfo(cx: &mut CompileContext<'_, '_>, unit: &Unit) -> CargoResu .map(|f| render_filename(f, basedir)) .collect::>>()?; - for output in cx + for output in build_runner .outputs(unit)? .iter() .filter(|o| !matches!(o.flavor, FileFlavor::DebugInfo | FileFlavor::Auxiliary)) diff --git a/src/cargo/core/compiler/rustdoc.rs b/src/cargo/core/compiler/rustdoc.rs index 060658e22ca..be6093fd553 100644 --- a/src/cargo/core/compiler/rustdoc.rs +++ b/src/cargo/core/compiler/rustdoc.rs @@ -1,6 +1,6 @@ //! Utilities for building with rustdoc. -use crate::core::compiler::context::CompileContext; +use crate::core::compiler::build_runner::BuildRunner; use crate::core::compiler::unit::Unit; use crate::core::compiler::{BuildContext, CompileKind}; use crate::sources::CRATES_IO_REGISTRY; @@ -108,11 +108,11 @@ impl hash::Hash for RustdocExternMap { /// [1]: https://doc.rust-lang.org/nightly/rustdoc/unstable-features.html#--extern-html-root-url-control-how-rustdoc-links-to-non-local-crates /// [2]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#rustdoc-map pub fn add_root_urls( - compiler_ctx: &CompileContext<'_, '_>, + build_runner: &BuildRunner<'_, '_>, unit: &Unit, rustdoc: &mut ProcessBuilder, ) -> CargoResult<()> { - let gctx = compiler_ctx.bcx.gctx; + let gctx = build_runner.bcx.gctx; if !gctx.cli_unstable().rustdoc_map { tracing::debug!("`doc.extern-map` ignored, requires -Zrustdoc-map flag"); return Ok(()); @@ -135,7 +135,7 @@ pub fn add_root_urls( } }) .collect(); - for dep in compiler_ctx.unit_deps(unit) { + for dep in build_runner.unit_deps(unit) { if dep.unit.target.is_linkable() && !dep.unit.mode.is_doc() { for (registry, location) in &map.registries { let sid = dep.unit.pkg.package_id().source_id(); @@ -172,7 +172,7 @@ pub fn add_root_urls( let std_url = match &map.std { None | Some(RustdocExternMode::Remote) => None, Some(RustdocExternMode::Local) => { - let sysroot = &compiler_ctx.bcx.target_data.info(CompileKind::Host).sysroot; + let sysroot = &build_runner.bcx.target_data.info(CompileKind::Host).sysroot; let html_root = sysroot.join("share").join("doc").join("rust").join("html"); if html_root.exists() { let url = Url::from_file_path(&html_root).map_err(|()| { @@ -211,11 +211,11 @@ pub fn add_root_urls( /// /// [1]: https://doc.rust-lang.org/nightly/rustdoc/unstable-features.html?highlight=output-format#-w--output-format-output-format pub fn add_output_format( - compiler_ctx: &CompileContext<'_, '_>, + build_runner: &BuildRunner<'_, '_>, unit: &Unit, rustdoc: &mut ProcessBuilder, ) -> CargoResult<()> { - let gctx = compiler_ctx.bcx.gctx; + let gctx = build_runner.bcx.gctx; if !gctx.cli_unstable().unstable_options { tracing::debug!("`unstable-options` is ignored, required -Zunstable-options flag"); return Ok(()); diff --git a/src/cargo/core/compiler/timings.rs b/src/cargo/core/compiler/timings.rs index 788c290f4b2..b4b23b6590b 100644 --- a/src/cargo/core/compiler/timings.rs +++ b/src/cargo/core/compiler/timings.rs @@ -4,7 +4,7 @@ //! long it takes for different units to compile. use super::{CompileMode, Unit}; use crate::core::compiler::job_queue::JobId; -use crate::core::compiler::{BuildContext, CompileContext, TimingOutput}; +use crate::core::compiler::{BuildContext, BuildRunner, TimingOutput}; use crate::core::PackageId; use crate::util::cpu::State; use crate::util::machine_message::{self, Message}; @@ -288,7 +288,7 @@ impl<'gctx> Timings<'gctx> { /// Call this when all units are finished. pub fn finished( &mut self, - cx: &CompileContext<'_, '_>, + build_runner: &BuildRunner<'_, '_>, error: &Option, ) -> CargoResult<()> { if !self.enabled { @@ -298,7 +298,7 @@ impl<'gctx> Timings<'gctx> { self.unit_times .sort_unstable_by(|a, b| a.start.partial_cmp(&b.start).unwrap()); if self.report_html { - self.report_html(cx, error) + self.report_html(build_runner, error) .with_context(|| "failed to save timing report")?; } Ok(()) @@ -307,12 +307,12 @@ impl<'gctx> Timings<'gctx> { /// Save HTML report to disk. fn report_html( &self, - cx: &CompileContext<'_, '_>, + build_runner: &BuildRunner<'_, '_>, error: &Option, ) -> CargoResult<()> { let duration = self.start.elapsed().as_secs_f64(); let timestamp = self.start_str.replace(&['-', ':'][..], ""); - let timings_path = cx.files().host_root().join("cargo-timings"); + let timings_path = build_runner.files().host_root().join("cargo-timings"); paths::create_dir_all(&timings_path)?; let filename = timings_path.join(format!("cargo-timing-{}.html", timestamp)); let mut f = BufWriter::new(paths::create(&filename)?); @@ -322,7 +322,7 @@ impl<'gctx> Timings<'gctx> { .map(|(name, _targets)| name.as_str()) .collect(); f.write_all(HTML_TMPL.replace("{ROOTS}", &roots.join(", ")).as_bytes())?; - self.write_summary_table(&mut f, duration, cx.bcx, error)?; + self.write_summary_table(&mut f, duration, build_runner.bcx, error)?; f.write_all(HTML_CANVAS.as_bytes())?; self.write_unit_table(&mut f)?; // It helps with pixel alignment to use whole numbers. diff --git a/src/cargo/lib.rs b/src/cargo/lib.rs index 09d4ca1782b..61fac71540a 100644 --- a/src/cargo/lib.rs +++ b/src/cargo/lib.rs @@ -30,8 +30,8 @@ //! The [`BuildContext`][core::compiler::BuildContext] is the result of the "front end" of the //! build process. This contains the graph of work to perform and any settings necessary for //! `rustc`. After this is built, the next stage of building is handled in -//! [`CompileContext`][core::compiler::CompileContext]. -//! - [`core::compiler::context`]: +//! [`BuildRunner`][core::compiler::BuildRunner]. +//! - [`core::compiler::build_runner`]: //! The `Context` is the mutable state used during the build process. This //! is the core of the build process, and everything is coordinated through //! this. diff --git a/src/cargo/ops/cargo_compile/mod.rs b/src/cargo/ops/cargo_compile/mod.rs index f0962b075dd..c7a84a087d9 100644 --- a/src/cargo/ops/cargo_compile/mod.rs +++ b/src/cargo/ops/cargo_compile/mod.rs @@ -13,7 +13,7 @@ //! from the resolver. See also [`unit_dependencies`]. //! 5. Construct the [`BuildContext`] with all of the information collected so //! far. This is the end of the "front end" of compilation. -//! 6. Create a [`CompileContext`] which coordinates the compilation process +//! 6. Create a [`BuildRunner`] which coordinates the compilation process //! and will perform the following steps: //! 1. Prepare the `target` directory (see [`Layout`]). //! 2. Create a [`JobQueue`]. The queue checks the @@ -42,7 +42,7 @@ use std::sync::Arc; use crate::core::compiler::unit_dependencies::build_unit_dependencies; use crate::core::compiler::unit_graph::{self, UnitDep, UnitGraph}; use crate::core::compiler::{standard_lib, CrateType, TargetInfo}; -use crate::core::compiler::{BuildConfig, BuildContext, Compilation, CompileContext}; +use crate::core::compiler::{BuildConfig, BuildContext, BuildRunner, Compilation}; use crate::core::compiler::{CompileKind, CompileMode, CompileTarget, RustcTargetData, Unit}; use crate::core::compiler::{DefaultExecutor, Executor, UnitInterner}; use crate::core::profiles::Profiles; @@ -155,8 +155,8 @@ pub fn compile_ws<'a>( } crate::core::gc::auto_gc(bcx.gctx); let _p = profile::start("compiling"); - let cx = CompileContext::new(&bcx)?; - cx.compile(exec) + let build_runner = BuildRunner::new(&bcx)?; + build_runner.compile(exec) } /// Executes `rustc --print `. diff --git a/triagebot.toml b/triagebot.toml index c9a1ead503a..881f8c73b1d 100644 --- a/triagebot.toml +++ b/triagebot.toml @@ -37,8 +37,8 @@ warn_non_default_branch = true "*" = ["@ehuss", "@epage", "@weihanglo"] -[review-submitted] -reviewed_label = "S-waiting-on-author" +[review-submitted] +reviewed_label = "S-waiting-on-author" review_labels = ["S-waiting-on-review"] [review-requested] @@ -153,7 +153,7 @@ trigger_files = ["crates/crates-io/", "src/cargo/ops/registry/"] [autolabel."A-layout"] trigger_files = [ - "src/cargo/core/compiler/context/compilation_files.rs", + "src/cargo/core/compiler/build_runner/compilation_files.rs", "src/cargo/core/compiler/layout.rs", ]