Skip to content

Commit

Permalink
fix(coverage): account for build_id in source identification (#9418)
Browse files Browse the repository at this point in the history
* fix: use `SourceIdentifier` in SourceFiles mapping

* fix(`coverage`): use `SourceIdentifier` in analysis and report.

* fix

* nit

* nit
  • Loading branch information
yash-atreya authored Nov 27, 2024
1 parent e90a354 commit 7d639e6
Show file tree
Hide file tree
Showing 6 changed files with 109 additions and 44 deletions.
37 changes: 31 additions & 6 deletions crates/evm/coverage/src/analysis.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use super::{CoverageItem, CoverageItemKind, SourceLocation};
use alloy_primitives::map::HashMap;
use core::fmt;
use foundry_common::TestFunctionExt;
use foundry_compilers::artifacts::ast::{self, Ast, Node, NodeType};
use rayon::prelude::*;
Expand All @@ -9,7 +10,7 @@ use std::{borrow::Cow, sync::Arc};
#[derive(Clone, Debug)]
pub struct ContractVisitor<'a> {
/// The source ID of the contract.
source_id: usize,
source_id: SourceIdentifier,
/// The source code that contains the AST being walked.
source: &'a str,

Expand All @@ -26,7 +27,7 @@ pub struct ContractVisitor<'a> {
}

impl<'a> ContractVisitor<'a> {
pub fn new(source_id: usize, source: &'a str, contract_name: &'a Arc<str>) -> Self {
pub fn new(source_id: SourceIdentifier, source: &'a str, contract_name: &'a Arc<str>) -> Self {
Self { source_id, source, contract_name, branch_id: 0, last_line: 0, items: Vec::new() }
}

Expand Down Expand Up @@ -473,7 +474,7 @@ impl<'a> ContractVisitor<'a> {
let loc_start =
self.source.char_indices().map(|(i, _)| i).nth(loc.start).unwrap_or_default();
SourceLocation {
source_id: self.source_id,
source_id: self.source_id.clone(),
contract_name: self.contract_name.clone(),
start: loc.start as u32,
length: loc.length.map(|x| x as u32),
Expand Down Expand Up @@ -538,7 +539,7 @@ impl<'a> SourceAnalyzer<'a> {
.sources
.sources
.par_iter()
.flat_map_iter(|(&source_id, SourceFile { source, ast })| {
.flat_map_iter(|(source_id, SourceFile { source, ast })| {
ast.nodes.iter().map(move |node| {
if !matches!(node.node_type, NodeType::ContractDefinition) {
return Ok(vec![]);
Expand All @@ -556,7 +557,7 @@ impl<'a> SourceAnalyzer<'a> {
.attribute("name")
.ok_or_else(|| eyre::eyre!("Contract has no name"))?;

let mut visitor = ContractVisitor::new(source_id, source, &name);
let mut visitor = ContractVisitor::new(source_id.clone(), source, &name);
visitor.visit_contract(node)?;
let mut items = visitor.items;

Expand All @@ -583,7 +584,31 @@ impl<'a> SourceAnalyzer<'a> {
#[derive(Debug, Default)]
pub struct SourceFiles<'a> {
/// The versioned sources.
pub sources: HashMap<usize, SourceFile<'a>>,
/// Keyed by build_id and source_id.
pub sources: HashMap<SourceIdentifier, SourceFile<'a>>,
}

/// Serves as a unique identifier for sources across multiple compiler runs.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct SourceIdentifier {
/// Source ID is unique for each source file per compilation job but may not be across
/// different jobs.
pub source_id: usize,
/// Artifact build id is same for all sources in a single compilation job. But always unique
/// across different jobs.
pub build_id: String,
}

impl fmt::Display for SourceIdentifier {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "source_id={} build_id={}", self.source_id, self.build_id,)
}
}

impl SourceIdentifier {
pub fn new(source_id: usize, build_id: String) -> Self {
Self { source_id, build_id }
}
}

/// The source code and AST of a file.
Expand Down
10 changes: 7 additions & 3 deletions crates/evm/coverage/src/anchors.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use crate::analysis::SourceIdentifier;

use super::{CoverageItem, CoverageItemKind, ItemAnchor, SourceLocation};
use alloy_primitives::map::{DefaultHashBuilder, HashMap, HashSet};
use eyre::ensure;
Expand All @@ -11,12 +13,13 @@ pub fn find_anchors(
source_map: &SourceMap,
ic_pc_map: &IcPcMap,
items: &[CoverageItem],
items_by_source_id: &HashMap<usize, Vec<usize>>,
items_by_source_id: &HashMap<SourceIdentifier, Vec<usize>>,
source_id: &SourceIdentifier,
) -> Vec<ItemAnchor> {
let mut seen = HashSet::with_hasher(DefaultHashBuilder::default());
source_map
.iter()
.filter_map(|element| items_by_source_id.get(&(element.index()? as usize)))
.filter_map(|_element| items_by_source_id.get(source_id))
.flatten()
.filter_map(|&item_id| {
if !seen.insert(item_id) {
Expand Down Expand Up @@ -171,7 +174,8 @@ pub fn find_anchor_branch(
/// Calculates whether `element` is within the range of the target `location`.
fn is_in_source_range(element: &SourceElement, location: &SourceLocation) -> bool {
// Source IDs must match.
let source_ids_match = element.index().is_some_and(|a| a as usize == location.source_id);
let source_ids_match =
element.index().is_some_and(|a| a as usize == location.source_id.source_id);
if !source_ids_match {
return false;
}
Expand Down
23 changes: 12 additions & 11 deletions crates/evm/coverage/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ extern crate foundry_common;
extern crate tracing;

use alloy_primitives::{map::HashMap, Bytes, B256};
use analysis::SourceIdentifier;
use eyre::{Context, Result};
use foundry_compilers::artifacts::sourcemap::SourceMap;
use semver::Version;
Expand All @@ -36,9 +37,9 @@ pub use inspector::CoverageCollector;
#[derive(Clone, Debug, Default)]
pub struct CoverageReport {
/// A map of source IDs to the source path.
pub source_paths: HashMap<(Version, usize), PathBuf>,
pub source_paths: HashMap<(Version, SourceIdentifier), PathBuf>,
/// A map of source paths to source IDs.
pub source_paths_to_ids: HashMap<(Version, PathBuf), usize>,
pub source_paths_to_ids: HashMap<(Version, PathBuf), SourceIdentifier>,
/// All coverage items for the codebase, keyed by the compiler version.
pub items: HashMap<Version, Vec<CoverageItem>>,
/// All item anchors for the codebase, keyed by their contract ID.
Expand All @@ -51,14 +52,14 @@ pub struct CoverageReport {

impl CoverageReport {
/// Add a source file path.
pub fn add_source(&mut self, version: Version, source_id: usize, path: PathBuf) {
self.source_paths.insert((version.clone(), source_id), path.clone());
pub fn add_source(&mut self, version: Version, source_id: SourceIdentifier, path: PathBuf) {
self.source_paths.insert((version.clone(), source_id.clone()), path.clone());
self.source_paths_to_ids.insert((version, path), source_id);
}

/// Get the source ID for a specific source file path.
pub fn get_source_id(&self, version: Version, path: PathBuf) -> Option<usize> {
self.source_paths_to_ids.get(&(version, path)).copied()
pub fn get_source_id(&self, version: Version, path: PathBuf) -> Option<SourceIdentifier> {
self.source_paths_to_ids.get(&(version, path)).cloned()
}

/// Add the source maps.
Expand Down Expand Up @@ -89,7 +90,7 @@ impl CoverageReport {
for (version, items) in self.items.iter() {
for item in items {
let Some(path) =
self.source_paths.get(&(version.clone(), item.loc.source_id)).cloned()
self.source_paths.get(&(version.clone(), item.loc.source_id.clone())).cloned()
else {
continue;
};
Expand All @@ -107,7 +108,7 @@ impl CoverageReport {
for (version, items) in self.items.iter() {
for item in items {
let Some(path) =
self.source_paths.get(&(version.clone(), item.loc.source_id)).cloned()
self.source_paths.get(&(version.clone(), item.loc.source_id.clone())).cloned()
else {
continue;
};
Expand Down Expand Up @@ -160,7 +161,7 @@ impl CoverageReport {
self.items.retain(|version, items| {
items.retain(|item| {
self.source_paths
.get(&(version.clone(), item.loc.source_id))
.get(&(version.clone(), item.loc.source_id.clone()))
.map(|path| filter(path))
.unwrap_or(false)
});
Expand Down Expand Up @@ -259,7 +260,7 @@ impl HitMap {
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct ContractId {
pub version: Version,
pub source_id: usize,
pub source_id: SourceIdentifier,
pub contract_name: Arc<str>,
}

Expand Down Expand Up @@ -348,7 +349,7 @@ impl Display for CoverageItem {
#[derive(Clone, Debug)]
pub struct SourceLocation {
/// The source ID.
pub source_id: usize,
pub source_id: SourceIdentifier,
/// The contract this source range is in.
pub contract_name: Arc<str>,
/// Start byte in the source code.
Expand Down
35 changes: 24 additions & 11 deletions crates/forge/bin/cmd/coverage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use clap::{Parser, ValueEnum, ValueHint};
use eyre::{Context, Result};
use forge::{
coverage::{
analysis::{SourceAnalysis, SourceAnalyzer, SourceFile, SourceFiles},
analysis::{SourceAnalysis, SourceAnalyzer, SourceFile, SourceFiles, SourceIdentifier},
anchors::find_anchors,
BytecodeReporter, ContractId, CoverageReport, CoverageReporter, DebugReporter, ItemAnchor,
LcovReporter, SummaryReporter,
Expand Down Expand Up @@ -162,7 +162,6 @@ impl CoverageArgs {
// Collect source files.
let project_paths = &project.paths;
let mut versioned_sources = HashMap::<Version, SourceFiles<'_>>::default();

// Account cached and freshly compiled sources
for (id, artifact) in output.artifact_ids() {
// Filter out dependencies
Expand All @@ -171,14 +170,16 @@ impl CoverageArgs {
}

let version = id.version;
let build_id = id.build_id;
let source_file = if let Some(source_file) = artifact.source_file() {
source_file
} else {
sh_warn!("ast source file not found for {}", id.source.display())?;
continue;
};

report.add_source(version.clone(), source_file.id as usize, id.source.clone());
let identifier = SourceIdentifier::new(source_file.id as usize, build_id.clone());
report.add_source(version.clone(), identifier.clone(), id.source.clone());

if let Some(ast) = source_file.ast {
let file = project_paths.root.join(id.source);
Expand All @@ -194,7 +195,7 @@ impl CoverageArgs {
.entry(version.clone())
.or_default()
.sources
.insert(source_file.id as usize, source);
.insert(identifier, source);
}
}

Expand All @@ -219,17 +220,23 @@ impl CoverageArgs {
);

for (item_id, item) in source_analysis.items.iter().enumerate() {
items_by_source_id.entry(item.loc.source_id).or_default().push(item_id);
items_by_source_id.entry(item.loc.source_id.clone()).or_default().push(item_id);
}

let anchors = artifacts
.par_iter()
.filter(|artifact| artifact.contract_id.version == *version)
.map(|artifact| {
let creation_code_anchors =
artifact.creation.find_anchors(&source_analysis, &items_by_source_id);
let deployed_code_anchors =
artifact.deployed.find_anchors(&source_analysis, &items_by_source_id);
let creation_code_anchors = artifact.creation.find_anchors(
&source_analysis,
&items_by_source_id,
&artifact.contract_id.source_id,
);
let deployed_code_anchors = artifact.deployed.find_anchors(
&source_analysis,
&items_by_source_id,
&artifact.contract_id.source_id,
);
(artifact.contract_id.clone(), (creation_code_anchors, deployed_code_anchors))
})
.collect::<Vec<_>>();
Expand Down Expand Up @@ -387,7 +394,11 @@ pub struct ArtifactData {
}

impl ArtifactData {
pub fn new(id: &ArtifactId, source_id: usize, artifact: &impl Artifact) -> Option<Self> {
pub fn new(
id: &ArtifactId,
source_id: SourceIdentifier,
artifact: &impl Artifact,
) -> Option<Self> {
Some(Self {
contract_id: ContractId {
version: id.version.clone(),
Expand Down Expand Up @@ -432,14 +443,16 @@ impl BytecodeData {
pub fn find_anchors(
&self,
source_analysis: &SourceAnalysis,
items_by_source_id: &HashMap<usize, Vec<usize>>,
items_by_source_id: &HashMap<SourceIdentifier, Vec<usize>>,
source_id: &SourceIdentifier,
) -> Vec<ItemAnchor> {
find_anchors(
&self.bytecode,
&self.source_map,
&self.ic_pc_map,
&source_analysis.items,
items_by_source_id,
source_id,
)
}
}
6 changes: 4 additions & 2 deletions crates/forge/src/coverage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -222,8 +222,10 @@ impl CoverageReporter for BytecodeReporter {
.map(|h| format!("[{h:03}]"))
.unwrap_or(" ".to_owned());
let source_id = source_element.index();
let source_path = source_id.and_then(|i| {
report.source_paths.get(&(contract_id.version.clone(), i as usize))
let source_path = source_id.and_then(|_i| {
report
.source_paths
.get(&(contract_id.version.clone(), contract_id.source_id.clone()))
});

let code = format!("{code:?}");
Expand Down
42 changes: 31 additions & 11 deletions crates/forge/tests/cli/coverage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1662,22 +1662,35 @@ forgetest!(test_coverage_multi_solc_versions, |prj, cmd| {
});

// checks that `clean` also works with the "out" value set in Config
forgetest!(coverage_cache, |prj, cmd| {
prj.add_source("A", r#"
// this test verifies that the coverage is preserved across compiler runs that may result in files
// with different source_id's
forgetest!(coverage_cache_across_compiler_runs, |prj, cmd| {
prj.add_source(
"A",
r#"
contract A {
function f() public pure returns (uint) {
return 1;
}
}"#).unwrap();
}"#,
)
.unwrap();

prj.add_source("B", r#"
prj.add_source(
"B",
r#"
contract B {
function f() public pure returns (uint) {
return 1;
}
}"#).unwrap();
}"#,
)
.unwrap();

let a_test = prj.add_test("A.t.sol", r#"
let a_test = prj
.add_test(
"A.t.sol",
r#"
import {A} from "../src/A.sol";
contract ATest {
Expand All @@ -1686,9 +1699,13 @@ contract ATest {
a.f();
}
}
"#).unwrap();
"#,
)
.unwrap();

prj.add_test("B.t.sol", r#"
prj.add_test(
"B.t.sol",
r#"
import {B} from "../src/B.sol";
contract BTest {
Expand All @@ -1697,8 +1714,10 @@ contract ATest {
a.f();
}
}
"#).unwrap();

"#,
)
.unwrap();

cmd.forge_fuse().arg("coverage").assert_success().stdout_eq(str![[r#"
...
Ran 2 test suites [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests)
Expand All @@ -1718,7 +1737,8 @@ Ran 2 test suites [ELAPSED]: 2 tests passed, 0 failed, 0 skipped (2 total tests)
| File | % Lines | % Statements | % Branches | % Funcs |
|-----------|---------------|---------------|---------------|---------------|
| src/A.sol | 100.00% (1/1) | 100.00% (1/1) | 100.00% (0/0) | 100.00% (1/1) |
| Total | 100.00% (1/1) | 100.00% (1/1) | 100.00% (0/0) | 100.00% (1/1) |
| src/B.sol | 100.00% (1/1) | 100.00% (1/1) | 100.00% (0/0) | 100.00% (1/1) |
| Total | 100.00% (2/2) | 100.00% (2/2) | 100.00% (0/0) | 100.00% (2/2) |
"#]]);
});

0 comments on commit 7d639e6

Please sign in to comment.