Skip to content

Commit

Permalink
chore: Rename CompileContext to BuildRunner
Browse files Browse the repository at this point in the history
  • Loading branch information
Muscraft committed Feb 20, 2024
1 parent 305efa6 commit 118afd0
Show file tree
Hide file tree
Showing 18 changed files with 501 additions and 410 deletions.
6 changes: 3 additions & 3 deletions src/cargo/core/compiler/artifact.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
//! Generate artifact information from unit dependencies for configuring the compiler environment.

use crate::core::compiler::unit_graph::UnitDep;
use crate::core::compiler::{CompileContext, CrateType, FileFlavor, Unit};
use crate::core::compiler::{BuildRunner, CrateType, FileFlavor, Unit};
use crate::core::dependency::ArtifactKind;
use crate::core::{Dependency, Target, TargetKind};
use crate::CargoResult;
Expand All @@ -11,12 +11,12 @@ use std::ffi::OsString;
/// Return all environment variables for the given unit-dependencies
/// if artifacts are present.
pub fn get_env(
cx: &CompileContext<'_, '_>,
build_runner: &BuildRunner<'_, '_>,
dependencies: &[UnitDep],
) -> CargoResult<HashMap<String, OsString>> {
let mut env = HashMap::new();
for unit_dep in dependencies.iter().filter(|d| d.unit.artifact.is_true()) {
for artifact_path in cx
for artifact_path in build_runner
.outputs(&unit_dep.unit)?
.iter()
.filter_map(|f| (f.flavor == FileFlavor::Normal).then(|| &f.path))
Expand Down
6 changes: 3 additions & 3 deletions src/cargo/core/compiler/build_context/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ pub use self::target_info::{
/// before it gets started.
///
/// It is intended that this is mostly static information. Stuff that mutates
/// during the build can be found in the parent [`Context`]. (I say mostly,
/// during the build can be found in the parent [`BuildRunner`]. (I say mostly,
/// because this has internal caching, but nothing that should be observable
/// or require &mut.)
///
Expand All @@ -39,9 +39,9 @@ pub use self::target_info::{
/// since it is often too lower-level.
/// Instead, [`ops::create_bcx`] is usually what you are looking for.
///
/// After a `BuildContext` is built, the next stage of building is handled in [`Context`].
/// After a `BuildContext` is built, the next stage of building is handled in [`BuildRunner`].
///
/// [`Context`]: crate::core::compiler::CompileContext
/// [`BuildRunner`]: crate::core::compiler::BuildRunner
/// [`ops::create_bcx`]: crate::ops::create_bcx
pub struct BuildContext<'a, 'gctx> {
/// The workspace the build is for.
Expand Down
23 changes: 16 additions & 7 deletions src/cargo/core/compiler/build_context/target_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

use crate::core::compiler::apply_env_config;
use crate::core::compiler::{
BuildOutput, CompileContext, CompileKind, CompileMode, CompileTarget, CrateType,
BuildOutput, BuildRunner, CompileKind, CompileMode, CompileTarget, CrateType,
};
use crate::core::{Dependency, Package, Target, TargetKind, Workspace};
use crate::util::config::{GlobalContext, StringList, TargetConfig};
Expand Down Expand Up @@ -1033,15 +1033,23 @@ impl RustDocFingerprint {
/// the rustdoc fingerprint info in order to guarantee that we won't end up with mixed
/// versions of the `js/html/css` files that `rustdoc` autogenerates which do not have
/// any versioning.
pub fn check_rustdoc_fingerprint(cx: &CompileContext<'_, '_>) -> CargoResult<()> {
if cx.bcx.gctx.cli_unstable().skip_rustdoc_fingerprint {
pub fn check_rustdoc_fingerprint(build_runner: &BuildRunner<'_, '_>) -> CargoResult<()> {
if build_runner
.bcx
.gctx
.cli_unstable()
.skip_rustdoc_fingerprint
{
return Ok(());
}
let actual_rustdoc_target_data = RustDocFingerprint {
rustc_vv: cx.bcx.rustc().verbose_version.clone(),
rustc_vv: build_runner.bcx.rustc().verbose_version.clone(),
};

let fingerprint_path = cx.files().host_root().join(".rustdoc_fingerprint.json");
let fingerprint_path = build_runner
.files()
.host_root()
.join(".rustdoc_fingerprint.json");
let write_fingerprint = || -> CargoResult<()> {
paths::write(
&fingerprint_path,
Expand Down Expand Up @@ -1076,10 +1084,11 @@ impl RustDocFingerprint {
"fingerprint {:?} mismatch, clearing doc directories",
fingerprint_path
);
cx.bcx
build_runner
.bcx
.all_kinds
.iter()
.map(|kind| cx.files().layout(*kind).doc())
.map(|kind| build_runner.files().layout(*kind).doc())
.filter(|path| path.exists())
.try_for_each(|path| clean_doc(path))?;
write_fingerprint()?;
Expand Down
8 changes: 4 additions & 4 deletions src/cargo/core/compiler/build_plan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ use std::path::{Path, PathBuf};

use serde::Serialize;

use super::context::OutputFile;
use super::{CompileContext, CompileKind, CompileMode, Unit};
use super::build_runner::OutputFile;
use super::{BuildRunner, CompileKind, CompileMode, Unit};
use crate::core::TargetKind;
use crate::util::{internal, CargoResult, GlobalContext};
use cargo_util::ProcessBuilder;
Expand Down Expand Up @@ -107,10 +107,10 @@ impl BuildPlan {
}
}

pub fn add(&mut self, cx: &CompileContext<'_, '_>, unit: &Unit) -> CargoResult<()> {
pub fn add(&mut self, build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> CargoResult<()> {
let id = self.plan.invocations.len();
self.invocation_map.insert(unit.buildkey(), id);
let deps = cx
let deps = build_runner
.unit_deps(unit)
.iter()
.map(|dep| self.invocation_map[&dep.unit.buildkey()])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use std::sync::Arc;
use lazycell::LazyCell;
use tracing::debug;

use super::{BuildContext, CompileContext, CompileKind, FileFlavor, Layout};
use super::{BuildContext, BuildRunner, CompileKind, FileFlavor, Layout};
use crate::core::compiler::{CompileMode, CompileTarget, CrateType, FileType, Unit};
use crate::core::{Target, TargetKind, Workspace};
use crate::util::{self, CargoResult, StableHasher};
Expand Down Expand Up @@ -139,25 +139,25 @@ impl OutputFile {

impl<'a, 'gctx: 'a> CompilationFiles<'a, 'gctx> {
pub(super) fn new(
compile_ctx: &CompileContext<'a, 'gctx>,
build_runner: &BuildRunner<'a, 'gctx>,
host: Layout,
target: HashMap<CompileTarget, Layout>,
) -> CompilationFiles<'a, 'gctx> {
let mut metas = HashMap::new();
for unit in &compile_ctx.bcx.roots {
metadata_of(unit, compile_ctx, &mut metas);
for unit in &build_runner.bcx.roots {
metadata_of(unit, build_runner, &mut metas);
}
let outputs = metas
.keys()
.cloned()
.map(|unit| (unit, LazyCell::new()))
.collect();
CompilationFiles {
ws: compile_ctx.bcx.ws,
ws: build_runner.bcx.ws,
host,
target,
export_dir: compile_ctx.bcx.build_config.export_dir.clone(),
roots: compile_ctx.bcx.roots.clone(),
export_dir: build_runner.bcx.build_config.export_dir.clone(),
roots: build_runner.bcx.roots.clone(),
metas,
outputs,
}
Expand Down Expand Up @@ -557,14 +557,14 @@ impl<'a, 'gctx: 'a> CompilationFiles<'a, 'gctx> {
/// See [`compute_metadata`] for how a single metadata hash is computed.
fn metadata_of<'a>(
unit: &Unit,
compile_ctx: &CompileContext<'_, '_>,
build_runner: &BuildRunner<'_, '_>,
metas: &'a mut HashMap<Unit, MetaInfo>,
) -> &'a MetaInfo {
if !metas.contains_key(unit) {
let meta = compute_metadata(unit, compile_ctx, metas);
let meta = compute_metadata(unit, build_runner, metas);
metas.insert(unit.clone(), meta);
for dep in compile_ctx.unit_deps(unit) {
metadata_of(&dep.unit, compile_ctx, metas);
for dep in build_runner.unit_deps(unit) {
metadata_of(&dep.unit, build_runner, metas);
}
}
&metas[unit]
Expand All @@ -573,10 +573,10 @@ fn metadata_of<'a>(
/// Computes the metadata hash for the given [`Unit`].
fn compute_metadata(
unit: &Unit,
compile_ctx: &CompileContext<'_, '_>,
build_runner: &BuildRunner<'_, '_>,
metas: &mut HashMap<Unit, MetaInfo>,
) -> MetaInfo {
let bcx = &compile_ctx.bcx;
let bcx = &build_runner.bcx;
let mut hasher = StableHasher::new();

METADATA_VERSION.hash(&mut hasher);
Expand All @@ -593,10 +593,10 @@ fn compute_metadata(
unit.features.hash(&mut hasher);

// Mix in the target-metadata of all the dependencies of this target.
let mut deps_metadata = compile_ctx
let mut deps_metadata = build_runner
.unit_deps(unit)
.iter()
.map(|dep| metadata_of(&dep.unit, compile_ctx, metas).meta_hash)
.map(|dep| metadata_of(&dep.unit, build_runner, metas).meta_hash)
.collect::<Vec<_>>();
deps_metadata.sort();
deps_metadata.hash(&mut hasher);
Expand All @@ -606,7 +606,7 @@ fn compute_metadata(
// settings like debuginfo and whatnot.
unit.profile.hash(&mut hasher);
unit.mode.hash(&mut hasher);
compile_ctx.lto[unit].hash(&mut hasher);
build_runner.lto[unit].hash(&mut hasher);

// Artifacts compiled for the host should have a different
// metadata piece than those compiled for the target, so make sure
Expand All @@ -622,17 +622,21 @@ fn compute_metadata(

hash_rustc_version(bcx, &mut hasher);

if compile_ctx.bcx.ws.is_member(&unit.pkg) {
if build_runner.bcx.ws.is_member(&unit.pkg) {
// This is primarily here for clippy. This ensures that the clippy
// artifacts are separate from the `check` ones.
if let Some(path) = &compile_ctx.bcx.rustc().workspace_wrapper {
if let Some(path) = &build_runner.bcx.rustc().workspace_wrapper {
path.hash(&mut hasher);
}
}

// Seed the contents of `__CARGO_DEFAULT_LIB_METADATA` to the hasher if present.
// This should be the release channel, to get a different hash for each channel.
if let Ok(ref channel) = compile_ctx.bcx.gctx.get_env("__CARGO_DEFAULT_LIB_METADATA") {
if let Ok(ref channel) = build_runner
.bcx
.gctx
.get_env("__CARGO_DEFAULT_LIB_METADATA")
{
channel.hash(&mut hasher);
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
//! [`CompileContext`] is the mutable state used during the build process.
//! [`BuildRunner`] is the mutable state used during the build process.

use std::collections::{BTreeSet, HashMap, HashSet};
use std::path::{Path, PathBuf};
Expand Down Expand Up @@ -36,7 +36,7 @@ pub use self::compilation_files::{Metadata, OutputFile};
/// throughout the entire build process. Everything is coordinated through this.
///
/// [`BuildContext`]: crate::core::compiler::BuildContext
pub struct CompileContext<'a, 'gctx> {
pub struct BuildRunner<'a, 'gctx> {
/// Mostly static information about the build task.
pub bcx: &'a BuildContext<'a, 'gctx>,
/// A large collection of information about the result of the entire compilation.
Expand Down Expand Up @@ -88,7 +88,7 @@ pub struct CompileContext<'a, 'gctx> {
pub failed_scrape_units: Arc<Mutex<HashSet<Metadata>>>,
}

impl<'a, 'gctx> CompileContext<'a, 'gctx> {
impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
pub fn new(bcx: &'a BuildContext<'a, 'gctx>) -> CargoResult<Self> {
// Load up the jobserver that we'll use to manage our parallelism. This
// is the same as the GNU make implementation of a jobserver, and
Expand Down
2 changes: 1 addition & 1 deletion src/cargo/core/compiler/compilation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ impl<'gctx> Compilation<'gctx> {
///
/// `is_primary` is true if this is a "primary package", which means it
/// was selected by the user on the command-line (such as with a `-p`
/// flag), see [`crate::core::compiler::CompileContext::primary_packages`].
/// flag), see [`crate::core::compiler::BuildRunner::primary_packages`].
///
/// `is_workspace` is true if this is a workspace member.
pub fn rustc_process(
Expand Down
Loading

0 comments on commit 118afd0

Please sign in to comment.