diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6ff0f5e2736f2..b55ab229811ba 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -50,10 +50,10 @@ jobs: matrix: include: - name: mingw-check - os: ubuntu-20.04-16core-64gb + os: ubuntu-20.04-4core-16gb env: {} - name: mingw-check-tidy - os: ubuntu-20.04-16core-64gb + os: ubuntu-20.04-4core-16gb env: {} - name: x86_64-gnu-llvm-15 os: ubuntu-20.04-16core-64gb diff --git a/compiler/rustc_mir_transform/src/coverage/counters.rs b/compiler/rustc_mir_transform/src/coverage/counters.rs index 97bdb878ab17a..d1f2f0c76c839 100644 --- a/compiler/rustc_mir_transform/src/coverage/counters.rs +++ b/compiler/rustc_mir_transform/src/coverage/counters.rs @@ -8,25 +8,50 @@ use debug::{DebugCounters, NESTED_INDENT}; use graph::{BasicCoverageBlock, BcbBranch, CoverageGraph, TraverseCoverageGraphWithLoops}; use spans::CoverageSpan; +use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::graph::WithNumNodes; use rustc_index::bit_set::BitSet; +use rustc_index::IndexVec; use rustc_middle::mir::coverage::*; -/// Manages the counter and expression indexes/IDs to generate `CoverageKind` components for MIR -/// `Coverage` statements. +/// Generates and stores coverage counter and coverage expression information +/// associated with nodes/edges in the BCB graph. pub(super) struct CoverageCounters { function_source_hash: u64, next_counter_id: CounterId, next_expression_id: ExpressionId, + + /// Coverage counters/expressions that are associated with individual BCBs. + bcb_counters: IndexVec>, + /// Coverage counters/expressions that are associated with the control-flow + /// edge between two BCBs. + bcb_edge_counters: FxHashMap<(BasicCoverageBlock, BasicCoverageBlock), CoverageKind>, + /// Tracks which BCBs have a counter associated with some incoming edge. + /// Only used by debug assertions, to verify that BCBs with incoming edge + /// counters do not have their own physical counters (expressions are allowed). + bcb_has_incoming_edge_counters: BitSet, + /// Expression nodes that are not directly associated with any particular + /// BCB/edge, but are needed as operands to more complex expressions. + /// These are always `CoverageKind::Expression`. + pub(super) intermediate_expressions: Vec, + pub debug_counters: DebugCounters, } impl CoverageCounters { - pub fn new(function_source_hash: u64) -> Self { + pub(super) fn new(function_source_hash: u64, basic_coverage_blocks: &CoverageGraph) -> Self { + let num_bcbs = basic_coverage_blocks.num_nodes(); + Self { function_source_hash, next_counter_id: CounterId::START, next_expression_id: ExpressionId::START, + + bcb_counters: IndexVec::from_elem_n(None, num_bcbs), + bcb_edge_counters: FxHashMap::default(), + bcb_has_incoming_edge_counters: BitSet::new_empty(num_bcbs), + intermediate_expressions: Vec::new(), + debug_counters: DebugCounters::new(), } } @@ -38,15 +63,14 @@ impl CoverageCounters { } /// Makes `CoverageKind` `Counter`s and `Expressions` for the `BasicCoverageBlock`s directly or - /// indirectly associated with `CoverageSpans`, and returns additional `Expression`s + /// indirectly associated with `CoverageSpans`, and accumulates additional `Expression`s /// representing intermediate values. pub fn make_bcb_counters( &mut self, - basic_coverage_blocks: &mut CoverageGraph, + basic_coverage_blocks: &CoverageGraph, coverage_spans: &[CoverageSpan], - ) -> Result, Error> { - let mut bcb_counters = BcbCounters::new(self, basic_coverage_blocks); - bcb_counters.make_bcb_counters(coverage_spans) + ) -> Result<(), Error> { + MakeBcbCounters::new(self, basic_coverage_blocks).make_bcb_counters(coverage_spans) } fn make_counter(&mut self, debug_block_label_fn: F) -> CoverageKind @@ -106,21 +130,95 @@ impl CoverageCounters { self.next_expression_id = next.next_id(); next } + + fn set_bcb_counter( + &mut self, + bcb: BasicCoverageBlock, + counter_kind: CoverageKind, + ) -> Result { + debug_assert!( + // If the BCB has an edge counter (to be injected into a new `BasicBlock`), it can also + // have an expression (to be injected into an existing `BasicBlock` represented by this + // `BasicCoverageBlock`). + counter_kind.is_expression() || !self.bcb_has_incoming_edge_counters.contains(bcb), + "attempt to add a `Counter` to a BCB target with existing incoming edge counters" + ); + let operand = counter_kind.as_operand(); + if let Some(replaced) = self.bcb_counters[bcb].replace(counter_kind) { + Error::from_string(format!( + "attempt to set a BasicCoverageBlock coverage counter more than once; \ + {bcb:?} already had counter {replaced:?}", + )) + } else { + Ok(operand) + } + } + + fn set_bcb_edge_counter( + &mut self, + from_bcb: BasicCoverageBlock, + to_bcb: BasicCoverageBlock, + counter_kind: CoverageKind, + ) -> Result { + if level_enabled!(tracing::Level::DEBUG) { + // If the BCB has an edge counter (to be injected into a new `BasicBlock`), it can also + // have an expression (to be injected into an existing `BasicBlock` represented by this + // `BasicCoverageBlock`). + if self.bcb_counter(to_bcb).is_some_and(|c| !c.is_expression()) { + return Error::from_string(format!( + "attempt to add an incoming edge counter from {from_bcb:?} when the target BCB already \ + has a `Counter`" + )); + } + } + self.bcb_has_incoming_edge_counters.insert(to_bcb); + let operand = counter_kind.as_operand(); + if let Some(replaced) = self.bcb_edge_counters.insert((from_bcb, to_bcb), counter_kind) { + Error::from_string(format!( + "attempt to set an edge counter more than once; from_bcb: \ + {from_bcb:?} already had counter {replaced:?}", + )) + } else { + Ok(operand) + } + } + + pub(super) fn bcb_counter(&self, bcb: BasicCoverageBlock) -> Option<&CoverageKind> { + self.bcb_counters[bcb].as_ref() + } + + pub(super) fn take_bcb_counter(&mut self, bcb: BasicCoverageBlock) -> Option { + self.bcb_counters[bcb].take() + } + + pub(super) fn drain_bcb_counters( + &mut self, + ) -> impl Iterator + '_ { + self.bcb_counters + .iter_enumerated_mut() + .filter_map(|(bcb, counter)| Some((bcb, counter.take()?))) + } + + pub(super) fn drain_bcb_edge_counters( + &mut self, + ) -> impl Iterator + '_ { + self.bcb_edge_counters.drain() + } } /// Traverse the `CoverageGraph` and add either a `Counter` or `Expression` to every BCB, to be /// injected with `CoverageSpan`s. `Expressions` have no runtime overhead, so if a viable expression /// (adding or subtracting two other counters or expressions) can compute the same result as an /// embedded counter, an `Expression` should be used. -struct BcbCounters<'a> { +struct MakeBcbCounters<'a> { coverage_counters: &'a mut CoverageCounters, - basic_coverage_blocks: &'a mut CoverageGraph, + basic_coverage_blocks: &'a CoverageGraph, } -impl<'a> BcbCounters<'a> { +impl<'a> MakeBcbCounters<'a> { fn new( coverage_counters: &'a mut CoverageCounters, - basic_coverage_blocks: &'a mut CoverageGraph, + basic_coverage_blocks: &'a CoverageGraph, ) -> Self { Self { coverage_counters, basic_coverage_blocks } } @@ -135,13 +233,9 @@ impl<'a> BcbCounters<'a> { /// Returns any non-code-span expressions created to represent intermediate values (such as to /// add two counters so the result can be subtracted from another counter), or an Error with /// message for subsequent debugging. - fn make_bcb_counters( - &mut self, - coverage_spans: &[CoverageSpan], - ) -> Result, Error> { + fn make_bcb_counters(&mut self, coverage_spans: &[CoverageSpan]) -> Result<(), Error> { debug!("make_bcb_counters(): adding a counter or expression to each BasicCoverageBlock"); let num_bcbs = self.basic_coverage_blocks.num_nodes(); - let mut collect_intermediate_expressions = Vec::with_capacity(num_bcbs); let mut bcbs_with_coverage = BitSet::new_empty(num_bcbs); for covspan in coverage_spans { @@ -162,16 +256,10 @@ impl<'a> BcbCounters<'a> { while let Some(bcb) = traversal.next(self.basic_coverage_blocks) { if bcbs_with_coverage.contains(bcb) { debug!("{:?} has at least one `CoverageSpan`. Get or make its counter", bcb); - let branching_counter_operand = - self.get_or_make_counter_operand(bcb, &mut collect_intermediate_expressions)?; + let branching_counter_operand = self.get_or_make_counter_operand(bcb)?; if self.bcb_needs_branch_counters(bcb) { - self.make_branch_counters( - &mut traversal, - bcb, - branching_counter_operand, - &mut collect_intermediate_expressions, - )?; + self.make_branch_counters(&mut traversal, bcb, branching_counter_operand)?; } } else { debug!( @@ -183,7 +271,7 @@ impl<'a> BcbCounters<'a> { } if traversal.is_complete() { - Ok(collect_intermediate_expressions) + Ok(()) } else { Error::from_string(format!( "`TraverseCoverageGraphWithLoops` missed some `BasicCoverageBlock`s: {:?}", @@ -197,7 +285,6 @@ impl<'a> BcbCounters<'a> { traversal: &mut TraverseCoverageGraphWithLoops, branching_bcb: BasicCoverageBlock, branching_counter_operand: Operand, - collect_intermediate_expressions: &mut Vec, ) -> Result<(), Error> { let branches = self.bcb_branches(branching_bcb); debug!( @@ -205,9 +292,7 @@ impl<'a> BcbCounters<'a> { branching_bcb, branches .iter() - .map(|branch| { - format!("{:?}: {:?}", branch, branch.counter(&self.basic_coverage_blocks)) - }) + .map(|branch| { format!("{:?}: {:?}", branch, self.branch_counter(branch)) }) .collect::>() .join("\n "), ); @@ -233,17 +318,10 @@ impl<'a> BcbCounters<'a> { counter", branch, branching_bcb ); - self.get_or_make_counter_operand( - branch.target_bcb, - collect_intermediate_expressions, - )? + self.get_or_make_counter_operand(branch.target_bcb)? } else { debug!(" {:?} has multiple incoming edges, so adding an edge counter", branch); - self.get_or_make_edge_counter_operand( - branching_bcb, - branch.target_bcb, - collect_intermediate_expressions, - )? + self.get_or_make_edge_counter_operand(branching_bcb, branch.target_bcb)? }; if let Some(sumup_counter_operand) = some_sumup_counter_operand.replace(branch_counter_operand) @@ -259,7 +337,7 @@ impl<'a> BcbCounters<'a> { self.format_counter(&intermediate_expression) ); let intermediate_expression_operand = intermediate_expression.as_operand(); - collect_intermediate_expressions.push(intermediate_expression); + self.coverage_counters.intermediate_expressions.push(intermediate_expression); some_sumup_counter_operand.replace(intermediate_expression_operand); } } @@ -284,29 +362,24 @@ impl<'a> BcbCounters<'a> { debug!("{:?} gets an expression: {}", expression_branch, self.format_counter(&expression)); let bcb = expression_branch.target_bcb; if expression_branch.is_only_path_to_target() { - self.basic_coverage_blocks[bcb].set_counter(expression)?; + self.coverage_counters.set_bcb_counter(bcb, expression)?; } else { - self.basic_coverage_blocks[bcb].set_edge_counter_from(branching_bcb, expression)?; + self.coverage_counters.set_bcb_edge_counter(branching_bcb, bcb, expression)?; } Ok(()) } - fn get_or_make_counter_operand( - &mut self, - bcb: BasicCoverageBlock, - collect_intermediate_expressions: &mut Vec, - ) -> Result { - self.recursive_get_or_make_counter_operand(bcb, collect_intermediate_expressions, 1) + fn get_or_make_counter_operand(&mut self, bcb: BasicCoverageBlock) -> Result { + self.recursive_get_or_make_counter_operand(bcb, 1) } fn recursive_get_or_make_counter_operand( &mut self, bcb: BasicCoverageBlock, - collect_intermediate_expressions: &mut Vec, debug_indent_level: usize, ) -> Result { // If the BCB already has a counter, return it. - if let Some(counter_kind) = self.basic_coverage_blocks[bcb].counter() { + if let Some(counter_kind) = &self.coverage_counters.bcb_counters[bcb] { debug!( "{}{:?} already has a counter: {}", NESTED_INDENT.repeat(debug_indent_level), @@ -339,7 +412,7 @@ impl<'a> BcbCounters<'a> { self.format_counter(&counter_kind), ); } - return self.basic_coverage_blocks[bcb].set_counter(counter_kind); + return self.coverage_counters.set_bcb_counter(bcb, counter_kind); } // A BCB with multiple incoming edges can compute its count by `Expression`, summing up the @@ -355,7 +428,6 @@ impl<'a> BcbCounters<'a> { let first_edge_counter_operand = self.recursive_get_or_make_edge_counter_operand( predecessors.next().unwrap(), bcb, - collect_intermediate_expressions, debug_indent_level + 1, )?; let mut some_sumup_edge_counter_operand = None; @@ -363,7 +435,6 @@ impl<'a> BcbCounters<'a> { let edge_counter_operand = self.recursive_get_or_make_edge_counter_operand( predecessor, bcb, - collect_intermediate_expressions, debug_indent_level + 1, )?; if let Some(sumup_edge_counter_operand) = @@ -381,7 +452,7 @@ impl<'a> BcbCounters<'a> { self.format_counter(&intermediate_expression) ); let intermediate_expression_operand = intermediate_expression.as_operand(); - collect_intermediate_expressions.push(intermediate_expression); + self.coverage_counters.intermediate_expressions.push(intermediate_expression); some_sumup_edge_counter_operand.replace(intermediate_expression_operand); } } @@ -397,43 +468,34 @@ impl<'a> BcbCounters<'a> { bcb, self.format_counter(&counter_kind) ); - self.basic_coverage_blocks[bcb].set_counter(counter_kind) + self.coverage_counters.set_bcb_counter(bcb, counter_kind) } fn get_or_make_edge_counter_operand( &mut self, from_bcb: BasicCoverageBlock, to_bcb: BasicCoverageBlock, - collect_intermediate_expressions: &mut Vec, ) -> Result { - self.recursive_get_or_make_edge_counter_operand( - from_bcb, - to_bcb, - collect_intermediate_expressions, - 1, - ) + self.recursive_get_or_make_edge_counter_operand(from_bcb, to_bcb, 1) } fn recursive_get_or_make_edge_counter_operand( &mut self, from_bcb: BasicCoverageBlock, to_bcb: BasicCoverageBlock, - collect_intermediate_expressions: &mut Vec, debug_indent_level: usize, ) -> Result { // If the source BCB has only one successor (assumed to be the given target), an edge // counter is unnecessary. Just get or make a counter for the source BCB. let successors = self.bcb_successors(from_bcb).iter(); if successors.len() == 1 { - return self.recursive_get_or_make_counter_operand( - from_bcb, - collect_intermediate_expressions, - debug_indent_level + 1, - ); + return self.recursive_get_or_make_counter_operand(from_bcb, debug_indent_level + 1); } // If the edge already has a counter, return it. - if let Some(counter_kind) = self.basic_coverage_blocks[to_bcb].edge_counter_from(from_bcb) { + if let Some(counter_kind) = + self.coverage_counters.bcb_edge_counters.get(&(from_bcb, to_bcb)) + { debug!( "{}Edge {:?}->{:?} already has a counter: {}", NESTED_INDENT.repeat(debug_indent_level), @@ -454,7 +516,7 @@ impl<'a> BcbCounters<'a> { to_bcb, self.format_counter(&counter_kind) ); - self.basic_coverage_blocks[to_bcb].set_edge_counter_from(from_bcb, counter_kind) + self.coverage_counters.set_bcb_edge_counter(from_bcb, to_bcb, counter_kind) } /// Select a branch for the expression, either the recommended `reloop_branch`, or if none was @@ -464,8 +526,7 @@ impl<'a> BcbCounters<'a> { traversal: &TraverseCoverageGraphWithLoops, branches: &[BcbBranch], ) -> BcbBranch { - let branch_needs_a_counter = - |branch: &BcbBranch| branch.counter(&self.basic_coverage_blocks).is_none(); + let branch_needs_a_counter = |branch: &BcbBranch| self.branch_has_no_counter(branch); let some_reloop_branch = self.find_some_reloop_branch(traversal, &branches); if let Some(reloop_branch_without_counter) = @@ -478,10 +539,8 @@ impl<'a> BcbCounters<'a> { ); reloop_branch_without_counter } else { - let &branch_without_counter = branches - .iter() - .find(|&&branch| branch.counter(&self.basic_coverage_blocks).is_none()) - .expect( + let &branch_without_counter = + branches.iter().find(|&branch| self.branch_has_no_counter(branch)).expect( "needs_branch_counters was `true` so there should be at least one \ branch", ); @@ -508,8 +567,7 @@ impl<'a> BcbCounters<'a> { traversal: &TraverseCoverageGraphWithLoops, branches: &[BcbBranch], ) -> Option { - let branch_needs_a_counter = - |branch: &BcbBranch| branch.counter(&self.basic_coverage_blocks).is_none(); + let branch_needs_a_counter = |branch: &BcbBranch| self.branch_has_no_counter(branch); let mut some_reloop_branch: Option = None; for context in traversal.context_stack.iter().rev() { @@ -520,7 +578,7 @@ impl<'a> BcbCounters<'a> { self.bcb_dominates(branch.target_bcb, backedge_from_bcb) }) { if let Some(reloop_branch) = some_reloop_branch { - if reloop_branch.counter(&self.basic_coverage_blocks).is_none() { + if self.branch_has_no_counter(&reloop_branch) { // we already found a candidate reloop_branch that still // needs a counter continue; @@ -586,12 +644,24 @@ impl<'a> BcbCounters<'a> { } fn bcb_needs_branch_counters(&self, bcb: BasicCoverageBlock) -> bool { - let branch_needs_a_counter = - |branch: &BcbBranch| branch.counter(&self.basic_coverage_blocks).is_none(); + let branch_needs_a_counter = |branch: &BcbBranch| self.branch_has_no_counter(branch); let branches = self.bcb_branches(bcb); branches.len() > 1 && branches.iter().any(branch_needs_a_counter) } + fn branch_has_no_counter(&self, branch: &BcbBranch) -> bool { + self.branch_counter(branch).is_none() + } + + fn branch_counter(&self, branch: &BcbBranch) -> Option<&CoverageKind> { + let to_bcb = branch.target_bcb; + if let Some(from_bcb) = branch.edge_from_bcb { + self.coverage_counters.bcb_edge_counters.get(&(from_bcb, to_bcb)) + } else { + self.coverage_counters.bcb_counters[to_bcb].as_ref() + } + } + /// Returns true if the BasicCoverageBlock has zero or one incoming edge. (If zero, it should be /// the entry point for the function.) #[inline] diff --git a/compiler/rustc_mir_transform/src/coverage/debug.rs b/compiler/rustc_mir_transform/src/coverage/debug.rs index 26f9cfd0b86c2..d2c0c4ba06922 100644 --- a/compiler/rustc_mir_transform/src/coverage/debug.rs +++ b/compiler/rustc_mir_transform/src/coverage/debug.rs @@ -108,6 +108,7 @@ //! recursively, generating labels with nested operations, enclosed in parentheses //! (for example: `bcb2 + (bcb0 - bcb1)`). +use super::counters::CoverageCounters; use super::graph::{BasicCoverageBlock, BasicCoverageBlockData, CoverageGraph}; use super::spans::CoverageSpan; @@ -659,18 +660,21 @@ pub(super) fn dump_coverage_graphviz<'tcx>( mir_body: &mir::Body<'tcx>, pass_name: &str, basic_coverage_blocks: &CoverageGraph, - debug_counters: &DebugCounters, + coverage_counters: &CoverageCounters, graphviz_data: &GraphvizData, intermediate_expressions: &[CoverageKind], debug_used_expressions: &UsedExpressions, ) { + let debug_counters = &coverage_counters.debug_counters; + let mir_source = mir_body.source; let def_id = mir_source.def_id(); let node_content = |bcb| { bcb_to_string_sections( tcx, mir_body, - debug_counters, + coverage_counters, + bcb, &basic_coverage_blocks[bcb], graphviz_data.get_bcb_coverage_spans_with_counters(bcb), graphviz_data.get_bcb_dependency_counters(bcb), @@ -736,12 +740,15 @@ pub(super) fn dump_coverage_graphviz<'tcx>( fn bcb_to_string_sections<'tcx>( tcx: TyCtxt<'tcx>, mir_body: &mir::Body<'tcx>, - debug_counters: &DebugCounters, + coverage_counters: &CoverageCounters, + bcb: BasicCoverageBlock, bcb_data: &BasicCoverageBlockData, some_coverage_spans_with_counters: Option<&[(CoverageSpan, CoverageKind)]>, some_dependency_counters: Option<&[CoverageKind]>, some_intermediate_expressions: Option<&[CoverageKind]>, ) -> Vec { + let debug_counters = &coverage_counters.debug_counters; + let len = bcb_data.basic_blocks.len(); let mut sections = Vec::new(); if let Some(collect_intermediate_expressions) = some_intermediate_expressions { @@ -777,7 +784,7 @@ fn bcb_to_string_sections<'tcx>( .join(" \n"), )); } - if let Some(counter_kind) = &bcb_data.counter_kind { + if let Some(counter_kind) = coverage_counters.bcb_counter(bcb) { sections.push(format!("{counter_kind:?}")); } let non_term_blocks = bcb_data.basic_blocks[0..len - 1] diff --git a/compiler/rustc_mir_transform/src/coverage/graph.rs b/compiler/rustc_mir_transform/src/coverage/graph.rs index f94dad4c8da67..59b01ffec0f12 100644 --- a/compiler/rustc_mir_transform/src/coverage/graph.rs +++ b/compiler/rustc_mir_transform/src/coverage/graph.rs @@ -1,12 +1,8 @@ -use super::Error; - use itertools::Itertools; -use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::graph::dominators::{self, Dominators}; use rustc_data_structures::graph::{self, GraphSuccessors, WithNumNodes, WithStartNode}; use rustc_index::bit_set::BitSet; use rustc_index::{IndexSlice, IndexVec}; -use rustc_middle::mir::coverage::*; use rustc_middle::mir::{self, BasicBlock, BasicBlockData, Terminator, TerminatorKind}; use std::cmp::Ordering; @@ -15,10 +11,7 @@ use std::ops::{Index, IndexMut}; const ID_SEPARATOR: &str = ","; /// A coverage-specific simplification of the MIR control flow graph (CFG). The `CoverageGraph`s -/// nodes are `BasicCoverageBlock`s, which encompass one or more MIR `BasicBlock`s, plus a -/// `CoverageKind` counter (to be added by `CoverageCounters::make_bcb_counters`), and an optional -/// set of additional counters--if needed--to count incoming edges, if there are more than one. -/// (These "edge counters" are eventually converted into new MIR `BasicBlock`s.) +/// nodes are `BasicCoverageBlock`s, which encompass one or more MIR `BasicBlock`s. #[derive(Debug)] pub(super) struct CoverageGraph { bcbs: IndexVec, @@ -195,13 +188,6 @@ impl CoverageGraph { self.bcbs.iter_enumerated() } - #[inline(always)] - pub fn iter_enumerated_mut( - &mut self, - ) -> impl Iterator { - self.bcbs.iter_enumerated_mut() - } - #[inline(always)] pub fn bcb_from_bb(&self, bb: BasicBlock) -> Option { if bb.index() < self.bb_to_bcb.len() { self.bb_to_bcb[bb] } else { None } @@ -320,14 +306,12 @@ rustc_index::newtype_index! { #[derive(Debug, Clone)] pub(super) struct BasicCoverageBlockData { pub basic_blocks: Vec, - pub counter_kind: Option, - edge_from_bcbs: Option>, } impl BasicCoverageBlockData { pub fn from(basic_blocks: Vec) -> Self { assert!(basic_blocks.len() > 0); - Self { basic_blocks, counter_kind: None, edge_from_bcbs: None } + Self { basic_blocks } } #[inline(always)] @@ -345,80 +329,6 @@ impl BasicCoverageBlockData { &mir_body[self.last_bb()].terminator() } - pub fn set_counter(&mut self, counter_kind: CoverageKind) -> Result { - debug_assert!( - // If the BCB has an edge counter (to be injected into a new `BasicBlock`), it can also - // have an expression (to be injected into an existing `BasicBlock` represented by this - // `BasicCoverageBlock`). - self.edge_from_bcbs.is_none() || counter_kind.is_expression(), - "attempt to add a `Counter` to a BCB target with existing incoming edge counters" - ); - let operand = counter_kind.as_operand(); - if let Some(replaced) = self.counter_kind.replace(counter_kind) { - Error::from_string(format!( - "attempt to set a BasicCoverageBlock coverage counter more than once; \ - {self:?} already had counter {replaced:?}", - )) - } else { - Ok(operand) - } - } - - #[inline(always)] - pub fn counter(&self) -> Option<&CoverageKind> { - self.counter_kind.as_ref() - } - - #[inline(always)] - pub fn take_counter(&mut self) -> Option { - self.counter_kind.take() - } - - pub fn set_edge_counter_from( - &mut self, - from_bcb: BasicCoverageBlock, - counter_kind: CoverageKind, - ) -> Result { - if level_enabled!(tracing::Level::DEBUG) { - // If the BCB has an edge counter (to be injected into a new `BasicBlock`), it can also - // have an expression (to be injected into an existing `BasicBlock` represented by this - // `BasicCoverageBlock`). - if self.counter_kind.as_ref().is_some_and(|c| !c.is_expression()) { - return Error::from_string(format!( - "attempt to add an incoming edge counter from {from_bcb:?} when the target BCB already \ - has a `Counter`" - )); - } - } - let operand = counter_kind.as_operand(); - if let Some(replaced) = - self.edge_from_bcbs.get_or_insert_default().insert(from_bcb, counter_kind) - { - Error::from_string(format!( - "attempt to set an edge counter more than once; from_bcb: \ - {from_bcb:?} already had counter {replaced:?}", - )) - } else { - Ok(operand) - } - } - - #[inline] - pub fn edge_counter_from(&self, from_bcb: BasicCoverageBlock) -> Option<&CoverageKind> { - if let Some(edge_from_bcbs) = &self.edge_from_bcbs { - edge_from_bcbs.get(&from_bcb) - } else { - None - } - } - - #[inline] - pub fn take_edge_counters( - &mut self, - ) -> Option> { - self.edge_from_bcbs.take().map(|m| m.into_iter()) - } - pub fn id(&self) -> String { format!("@{}", self.basic_blocks.iter().map(|bb| bb.index().to_string()).join(ID_SEPARATOR)) } @@ -448,17 +358,6 @@ impl BcbBranch { Self { edge_from_bcb, target_bcb: to_bcb } } - pub fn counter<'a>( - &self, - basic_coverage_blocks: &'a CoverageGraph, - ) -> Option<&'a CoverageKind> { - if let Some(from_bcb) = self.edge_from_bcb { - basic_coverage_blocks[self.target_bcb].edge_counter_from(from_bcb) - } else { - basic_coverage_blocks[self.target_bcb].counter() - } - } - pub fn is_only_path_to_target(&self) -> bool { self.edge_from_bcb.is_none() } diff --git a/compiler/rustc_mir_transform/src/coverage/mod.rs b/compiler/rustc_mir_transform/src/coverage/mod.rs index f713613d313aa..e08b6d6f6e8d8 100644 --- a/compiler/rustc_mir_transform/src/coverage/mod.rs +++ b/compiler/rustc_mir_transform/src/coverage/mod.rs @@ -137,6 +137,8 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { let function_source_hash = hash_mir_source(tcx, hir_body); let basic_coverage_blocks = CoverageGraph::from_mir(mir_body); + let coverage_counters = CoverageCounters::new(function_source_hash, &basic_coverage_blocks); + Self { pass_name, tcx, @@ -145,7 +147,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { fn_sig_span, body_span, basic_coverage_blocks, - coverage_counters: CoverageCounters::new(function_source_hash), + coverage_counters, } } @@ -199,52 +201,47 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { // `BasicCoverageBlock`s not already associated with a `CoverageSpan`. // // Intermediate expressions (used to compute other `Expression` values), which have no - // direct associate to any `BasicCoverageBlock`, are returned in the method `Result`. - let intermediate_expressions_or_error = self + // direct association with any `BasicCoverageBlock`, are accumulated inside `coverage_counters`. + let result = self .coverage_counters .make_bcb_counters(&mut self.basic_coverage_blocks, &coverage_spans); - let (result, intermediate_expressions) = match intermediate_expressions_or_error { - Ok(intermediate_expressions) => { - // If debugging, add any intermediate expressions (which are not associated with any - // BCB) to the `debug_used_expressions` map. - if debug_used_expressions.is_enabled() { - for intermediate_expression in &intermediate_expressions { - debug_used_expressions.add_expression_operands(intermediate_expression); - } + if let Ok(()) = result { + // If debugging, add any intermediate expressions (which are not associated with any + // BCB) to the `debug_used_expressions` map. + if debug_used_expressions.is_enabled() { + for intermediate_expression in &self.coverage_counters.intermediate_expressions { + debug_used_expressions.add_expression_operands(intermediate_expression); } - - //////////////////////////////////////////////////// - // Remove the counter or edge counter from of each `CoverageSpan`s associated - // `BasicCoverageBlock`, and inject a `Coverage` statement into the MIR. - // - // `Coverage` statements injected from `CoverageSpan`s will include the code regions - // (source code start and end positions) to be counted by the associated counter. - // - // These `CoverageSpan`-associated counters are removed from their associated - // `BasicCoverageBlock`s so that the only remaining counters in the `CoverageGraph` - // are indirect counters (to be injected next, without associated code regions). - self.inject_coverage_span_counters( - coverage_spans, - &mut graphviz_data, - &mut debug_used_expressions, - ); - - //////////////////////////////////////////////////// - // For any remaining `BasicCoverageBlock` counters (that were not associated with - // any `CoverageSpan`), inject `Coverage` statements (_without_ code region `Span`s) - // to ensure `BasicCoverageBlock` counters that other `Expression`s may depend on - // are in fact counted, even though they don't directly contribute to counting - // their own independent code region's coverage. - self.inject_indirect_counters(&mut graphviz_data, &mut debug_used_expressions); - - // Intermediate expressions will be injected as the final step, after generating - // debug output, if any. - //////////////////////////////////////////////////// - - (Ok(()), intermediate_expressions) } - Err(e) => (Err(e), Vec::new()), + + //////////////////////////////////////////////////// + // Remove the counter or edge counter from of each `CoverageSpan`s associated + // `BasicCoverageBlock`, and inject a `Coverage` statement into the MIR. + // + // `Coverage` statements injected from `CoverageSpan`s will include the code regions + // (source code start and end positions) to be counted by the associated counter. + // + // These `CoverageSpan`-associated counters are removed from their associated + // `BasicCoverageBlock`s so that the only remaining counters in the `CoverageGraph` + // are indirect counters (to be injected next, without associated code regions). + self.inject_coverage_span_counters( + coverage_spans, + &mut graphviz_data, + &mut debug_used_expressions, + ); + + //////////////////////////////////////////////////// + // For any remaining `BasicCoverageBlock` counters (that were not associated with + // any `CoverageSpan`), inject `Coverage` statements (_without_ code region `Span`s) + // to ensure `BasicCoverageBlock` counters that other `Expression`s may depend on + // are in fact counted, even though they don't directly contribute to counting + // their own independent code region's coverage. + self.inject_indirect_counters(&mut graphviz_data, &mut debug_used_expressions); + + // Intermediate expressions will be injected as the final step, after generating + // debug output, if any. + //////////////////////////////////////////////////// }; if graphviz_data.is_enabled() { @@ -255,9 +252,9 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { self.mir_body, self.pass_name, &self.basic_coverage_blocks, - &self.coverage_counters.debug_counters, + &self.coverage_counters, &graphviz_data, - &intermediate_expressions, + &self.coverage_counters.intermediate_expressions, &debug_used_expressions, ); } @@ -273,7 +270,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { //////////////////////////////////////////////////// // Finally, inject the intermediate expressions collected along the way. - for intermediate_expression in intermediate_expressions { + for intermediate_expression in self.coverage_counters.intermediate_expressions.drain(..) { inject_intermediate_expression(self.mir_body, intermediate_expression); } } @@ -303,7 +300,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { let span = covspan.span; let counter_kind = if let Some(&counter_operand) = bcb_counters[bcb].as_ref() { self.coverage_counters.make_identity_counter(counter_operand) - } else if let Some(counter_kind) = self.bcb_data_mut(bcb).take_counter() { + } else if let Some(counter_kind) = self.coverage_counters.take_bcb_counter(bcb) { bcb_counters[bcb] = Some(counter_kind.as_operand()); debug_used_expressions.add_expression_operands(&counter_kind); counter_kind @@ -343,19 +340,17 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { debug_used_expressions: &mut debug::UsedExpressions, ) { let mut bcb_counters_without_direct_coverage_spans = Vec::new(); - for (target_bcb, target_bcb_data) in self.basic_coverage_blocks.iter_enumerated_mut() { - if let Some(counter_kind) = target_bcb_data.take_counter() { - bcb_counters_without_direct_coverage_spans.push((None, target_bcb, counter_kind)); - } - if let Some(edge_counters) = target_bcb_data.take_edge_counters() { - for (from_bcb, counter_kind) in edge_counters { - bcb_counters_without_direct_coverage_spans.push(( - Some(from_bcb), - target_bcb, - counter_kind, - )); - } - } + for (target_bcb, counter_kind) in self.coverage_counters.drain_bcb_counters() { + bcb_counters_without_direct_coverage_spans.push((None, target_bcb, counter_kind)); + } + for ((from_bcb, target_bcb), counter_kind) in + self.coverage_counters.drain_bcb_edge_counters() + { + bcb_counters_without_direct_coverage_spans.push(( + Some(from_bcb), + target_bcb, + counter_kind, + )); } // If debug is enabled, validate that every BCB or edge counter not directly associated @@ -430,11 +425,6 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { &self.basic_coverage_blocks[bcb] } - #[inline] - fn bcb_data_mut(&mut self, bcb: BasicCoverageBlock) -> &mut BasicCoverageBlockData { - &mut self.basic_coverage_blocks[bcb] - } - #[inline] fn format_counter(&self, counter_kind: &CoverageKind) -> String { self.coverage_counters.debug_counters.format_counter(counter_kind) diff --git a/compiler/rustc_mir_transform/src/coverage/tests.rs b/compiler/rustc_mir_transform/src/coverage/tests.rs index 248a192f8f571..d797a6057a7f5 100644 --- a/compiler/rustc_mir_transform/src/coverage/tests.rs +++ b/compiler/rustc_mir_transform/src/coverage/tests.rs @@ -675,16 +675,16 @@ fn test_make_bcb_counters() { )); } } - let mut coverage_counters = counters::CoverageCounters::new(0); - let intermediate_expressions = coverage_counters + let mut coverage_counters = counters::CoverageCounters::new(0, &basic_coverage_blocks); + let () = coverage_counters .make_bcb_counters(&mut basic_coverage_blocks, &coverage_spans) .expect("should be Ok"); - assert_eq!(intermediate_expressions.len(), 0); + assert_eq!(coverage_counters.intermediate_expressions.len(), 0); let_bcb!(1); assert_eq!( 0, // bcb1 has a `Counter` with id = 0 - match basic_coverage_blocks[bcb1].counter().expect("should have a counter") { + match coverage_counters.bcb_counter(bcb1).expect("should have a counter") { CoverageKind::Counter { id, .. } => id, _ => panic!("expected a Counter"), } @@ -694,7 +694,7 @@ fn test_make_bcb_counters() { let_bcb!(2); assert_eq!( 1, // bcb2 has a `Counter` with id = 1 - match basic_coverage_blocks[bcb2].counter().expect("should have a counter") { + match coverage_counters.bcb_counter(bcb2).expect("should have a counter") { CoverageKind::Counter { id, .. } => id, _ => panic!("expected a Counter"), } diff --git a/config.example.toml b/config.example.toml index b10922f8d0411..fc5de5f9537c9 100644 --- a/config.example.toml +++ b/config.example.toml @@ -758,8 +758,10 @@ changelog-seen = 2 # This option will override the same option under [build] section. #sanitizers = build.sanitizers (bool) -# Build the profiler runtime for this target(required when compiling with options that depend -# on this runtime, such as `-C profile-generate` or `-C instrument-coverage`). +# When true, build the profiler runtime for this target (required when compiling +# with options that depend on this runtime, such as `-C profile-generate` or +# `-C instrument-coverage`). This may also be given a path to an existing build +# of the profiling runtime library from LLVM's compiler-rt. # This option will override the same option under [build] section. #profiler = build.profiler (bool) diff --git a/library/core/src/iter/adapters/map_windows.rs b/library/core/src/iter/adapters/map_windows.rs new file mode 100644 index 0000000000000..3c0e80b2559db --- /dev/null +++ b/library/core/src/iter/adapters/map_windows.rs @@ -0,0 +1,293 @@ +use crate::{ + fmt, + iter::{ExactSizeIterator, FusedIterator}, + mem::{self, MaybeUninit}, + ptr, +}; + +/// An iterator over the mapped windows of another iterator. +/// +/// This `struct` is created by the [`Iterator::map_windows`]. See its +/// documentation for more information. +#[must_use = "iterators are lazy and do nothing unless consumed"] +#[unstable(feature = "iter_map_windows", reason = "recently added", issue = "87155")] +pub struct MapWindows { + f: F, + inner: MapWindowsInner, +} + +struct MapWindowsInner { + // We fuse the inner iterator because there shouldn't be "holes" in + // the sliding window. Once the iterator returns a `None`, we make + // our `MapWindows` iterator return `None` forever. + iter: Option, + // Since iterators are assumed lazy, i.e. it only yields an item when + // `Iterator::next()` is called, and `MapWindows` is not an exception. + // + // Before the first iteration, we keep the buffer `None`. When the user + // first call `next` or other methods that makes the iterator advance, + // we collect the first `N` items yielded from the inner iterator and + // put it into the buffer. + // + // When the inner iterator has returned a `None` (i.e. fused), we take + // away this `buffer` and leave it `None` to reclaim its resources. + // + // FIXME: should we shrink the size of `buffer` using niche optimization? + buffer: Option>, +} + +// `Buffer` uses two times of space to reduce moves among the iterations. +// `Buffer` is semantically `[MaybeUninit; 2 * N]`. However, due +// to limitations of const generics, we use this different type. Note that +// it has the same underlying memory layout. +struct Buffer { + // Invariant: `self.buffer[self.start..self.start + N]` is initialized, + // with all other elements being uninitialized. This also + // implies that `self.start <= N`. + buffer: [[MaybeUninit; N]; 2], + start: usize, +} + +impl MapWindows { + pub(in crate::iter) fn new(iter: I, f: F) -> Self { + assert!(N != 0, "array in `Iterator::map_windows` must contain more than 0 elements"); + + // Only ZST arrays' length can be so large. + if mem::size_of::() == 0 { + assert!( + N.checked_mul(2).is_some(), + "array size of `Iterator::map_windows` is too large" + ); + } + + Self { inner: MapWindowsInner::new(iter), f } + } +} + +impl MapWindowsInner { + #[inline] + fn new(iter: I) -> Self { + Self { iter: Some(iter), buffer: None } + } + + fn next_window(&mut self) -> Option<&[I::Item; N]> { + let iter = self.iter.as_mut()?; + match self.buffer { + // It is the first time to advance. We collect + // the first `N` items from `self.iter` to initialize `self.buffer`. + None => self.buffer = Buffer::try_from_iter(iter), + Some(ref mut buffer) => match iter.next() { + None => { + // Fuse the inner iterator since it yields a `None`. + self.iter.take(); + self.buffer.take(); + } + // Advance the iterator. We first call `next` before changing our buffer + // at all. This means that if `next` panics, our invariant is upheld and + // our `Drop` impl drops the correct elements. + Some(item) => buffer.push(item), + }, + } + self.buffer.as_ref().map(Buffer::as_array_ref) + } + + fn size_hint(&self) -> (usize, Option) { + let Some(ref iter) = self.iter else { return (0, Some(0)) }; + let (lo, hi) = iter.size_hint(); + if self.buffer.is_some() { + // If the first `N` items are already yielded by the inner iterator, + // the size hint is then equal to the that of the inner iterator's. + (lo, hi) + } else { + // If the first `N` items are not yet yielded by the inner iterator, + // the first `N` elements should be counted as one window, so both bounds + // should subtract `N - 1`. + (lo.saturating_sub(N - 1), hi.map(|hi| hi.saturating_sub(N - 1))) + } + } +} + +impl Buffer { + fn try_from_iter(iter: &mut impl Iterator) -> Option { + let first_half = crate::array::iter_next_chunk(iter).ok()?; + let buffer = [MaybeUninit::new(first_half).transpose(), MaybeUninit::uninit_array()]; + Some(Self { buffer, start: 0 }) + } + + #[inline] + fn buffer_ptr(&self) -> *const MaybeUninit { + self.buffer.as_ptr().cast() + } + + #[inline] + fn buffer_mut_ptr(&mut self) -> *mut MaybeUninit { + self.buffer.as_mut_ptr().cast() + } + + #[inline] + fn as_array_ref(&self) -> &[T; N] { + debug_assert!(self.start + N <= 2 * N); + + // SAFETY: our invariant guarantees these elements are initialized. + unsafe { &*self.buffer_ptr().add(self.start).cast() } + } + + #[inline] + fn as_uninit_array_mut(&mut self) -> &mut MaybeUninit<[T; N]> { + debug_assert!(self.start + N <= 2 * N); + + // SAFETY: our invariant guarantees these elements are in bounds. + unsafe { &mut *self.buffer_mut_ptr().add(self.start).cast() } + } + + /// Pushes a new item `next` to the back, and pops the front-most one. + /// + /// All the elements will be shifted to the front end when pushing reaches + /// the back end. + fn push(&mut self, next: T) { + let buffer_mut_ptr = self.buffer_mut_ptr(); + debug_assert!(self.start + N <= 2 * N); + + let to_drop = if self.start == N { + // We have reached the end of our buffer and have to copy + // everything to the start. Example layout for N = 3. + // + // 0 1 2 3 4 5 0 1 2 3 4 5 + // ┌───┬───┬───┬───┬───┬───┐ ┌───┬───┬───┬───┬───┬───┐ + // │ - │ - │ - │ a │ b │ c │ -> │ b │ c │ n │ - │ - │ - │ + // └───┴───┴───┴───┴───┴───┘ └───┴───┴───┴───┴───┴───┘ + // ↑ ↑ + // start start + + // SAFETY: the two pointers are valid for reads/writes of N -1 + // elements because our array's size is semantically 2 * N. The + // regions also don't overlap for the same reason. + // + // We leave the old elements in place. As soon as `start` is set + // to 0, we treat them as uninitialized and treat their copies + // as initialized. + let to_drop = unsafe { + ptr::copy_nonoverlapping(buffer_mut_ptr.add(self.start + 1), buffer_mut_ptr, N - 1); + (*buffer_mut_ptr.add(N - 1)).write(next); + buffer_mut_ptr.add(self.start) + }; + self.start = 0; + to_drop + } else { + // SAFETY: `self.start` is < N as guaranteed by the invariant + // plus the check above. Even if the drop at the end panics, + // the invariant is upheld. + // + // Example layout for N = 3: + // + // 0 1 2 3 4 5 0 1 2 3 4 5 + // ┌───┬───┬───┬───┬───┬───┐ ┌───┬───┬───┬───┬───┬───┐ + // │ - │ a │ b │ c │ - │ - │ -> │ - │ - │ b │ c │ n │ - │ + // └───┴───┴───┴───┴───┴───┘ └───┴───┴───┴───┴───┴───┘ + // ↑ ↑ + // start start + // + let to_drop = unsafe { + (*buffer_mut_ptr.add(self.start + N)).write(next); + buffer_mut_ptr.add(self.start) + }; + self.start += 1; + to_drop + }; + + // SAFETY: the index is valid and this is element `a` in the + // diagram above and has not been dropped yet. + unsafe { ptr::drop_in_place(to_drop.cast::()) }; + } +} + +impl Clone for Buffer { + fn clone(&self) -> Self { + let mut buffer = Buffer { + buffer: [MaybeUninit::uninit_array(), MaybeUninit::uninit_array()], + start: self.start, + }; + buffer.as_uninit_array_mut().write(self.as_array_ref().clone()); + buffer + } +} + +impl Clone for MapWindowsInner +where + I: Iterator + Clone, + I::Item: Clone, +{ + fn clone(&self) -> Self { + Self { iter: self.iter.clone(), buffer: self.buffer.clone() } + } +} + +impl Drop for Buffer { + fn drop(&mut self) { + // SAFETY: our invariant guarantees that N elements starting from + // `self.start` are initialized. We drop them here. + unsafe { + let initialized_part: *mut [T] = crate::ptr::slice_from_raw_parts_mut( + self.buffer_mut_ptr().add(self.start).cast(), + N, + ); + ptr::drop_in_place(initialized_part); + } + } +} + +#[unstable(feature = "iter_map_windows", reason = "recently added", issue = "87155")] +impl Iterator for MapWindows +where + I: Iterator, + F: FnMut(&[I::Item; N]) -> R, +{ + type Item = R; + + fn next(&mut self) -> Option { + let window = self.inner.next_window()?; + let out = (self.f)(window); + Some(out) + } + + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} + +// Note that even if the inner iterator not fused, the `MapWindows` is still fused, +// because we don't allow "holes" in the mapping window. +#[unstable(feature = "iter_map_windows", reason = "recently added", issue = "87155")] +impl FusedIterator for MapWindows +where + I: Iterator, + F: FnMut(&[I::Item; N]) -> R, +{ +} + +#[unstable(feature = "iter_map_windows", reason = "recently added", issue = "87155")] +impl ExactSizeIterator for MapWindows +where + I: ExactSizeIterator, + F: FnMut(&[I::Item; N]) -> R, +{ +} + +#[unstable(feature = "iter_map_windows", reason = "recently added", issue = "87155")] +impl fmt::Debug for MapWindows { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("MapWindows").field("iter", &self.inner.iter).finish() + } +} + +#[unstable(feature = "iter_map_windows", reason = "recently added", issue = "87155")] +impl Clone for MapWindows +where + I: Iterator + Clone, + F: Clone, + I::Item: Clone, +{ + fn clone(&self) -> Self { + Self { f: self.f.clone(), inner: self.inner.clone() } + } +} diff --git a/library/core/src/iter/adapters/mod.rs b/library/core/src/iter/adapters/mod.rs index 8cc2b7cec4165..6f4fa7010f446 100644 --- a/library/core/src/iter/adapters/mod.rs +++ b/library/core/src/iter/adapters/mod.rs @@ -16,6 +16,7 @@ mod inspect; mod intersperse; mod map; mod map_while; +mod map_windows; mod peekable; mod rev; mod scan; @@ -57,6 +58,9 @@ pub use self::intersperse::{Intersperse, IntersperseWith}; #[stable(feature = "iter_map_while", since = "1.57.0")] pub use self::map_while::MapWhile; +#[unstable(feature = "iter_map_windows", reason = "recently added", issue = "87155")] +pub use self::map_windows::MapWindows; + #[unstable(feature = "trusted_random_access", issue = "none")] pub use self::zip::TrustedRandomAccess; diff --git a/library/core/src/iter/mod.rs b/library/core/src/iter/mod.rs index be04dfe042e9f..ca977d1ef8240 100644 --- a/library/core/src/iter/mod.rs +++ b/library/core/src/iter/mod.rs @@ -440,6 +440,8 @@ pub use self::adapters::Copied; pub use self::adapters::Flatten; #[stable(feature = "iter_map_while", since = "1.57.0")] pub use self::adapters::MapWhile; +#[unstable(feature = "iter_map_windows", reason = "recently added", issue = "87155")] +pub use self::adapters::MapWindows; #[unstable(feature = "inplace_iteration", issue = "none")] pub use self::adapters::SourceIter; #[stable(feature = "iterator_step_by", since = "1.28.0")] diff --git a/library/core/src/iter/traits/iterator.rs b/library/core/src/iter/traits/iterator.rs index cecc120a6e28d..ac1fc26a1efac 100644 --- a/library/core/src/iter/traits/iterator.rs +++ b/library/core/src/iter/traits/iterator.rs @@ -10,7 +10,8 @@ use super::super::{ArrayChunks, Chain, Cloned, Copied, Cycle, Enumerate, Filter, use super::super::{FlatMap, Flatten}; use super::super::{FromIterator, Intersperse, IntersperseWith, Product, Sum, Zip}; use super::super::{ - Inspect, Map, MapWhile, Peekable, Rev, Scan, Skip, SkipWhile, StepBy, Take, TakeWhile, + Inspect, Map, MapWhile, MapWindows, Peekable, Rev, Scan, Skip, SkipWhile, StepBy, Take, + TakeWhile, }; fn _assert_is_object_safe(_: &dyn Iterator) {} @@ -1591,6 +1592,163 @@ pub trait Iterator { Flatten::new(self) } + /// Calls the given function `f` for each contiguous window of size `N` over + /// `self` and returns an iterator over the outputs of `f`. Like [`slice::windows()`], + /// the windows during mapping overlap as well. + /// + /// In the following example, the closure is called three times with the + /// arguments `&['a', 'b']`, `&['b', 'c']` and `&['c', 'd']` respectively. + /// + /// ``` + /// #![feature(iter_map_windows)] + /// + /// let strings = "abcd".chars() + /// .map_windows(|[x, y]| format!("{}+{}", x, y)) + /// .collect::>(); + /// + /// assert_eq!(strings, vec!["a+b", "b+c", "c+d"]); + /// ``` + /// + /// Note that the const parameter `N` is usually inferred by the + /// destructured argument in the closure. + /// + /// The returned iterator yields 𝑘 − `N` + 1 items (where 𝑘 is the number of + /// items yielded by `self`). If 𝑘 is less than `N`, this method yields an + /// empty iterator. + /// + /// The returned iterator implements [`FusedIterator`], because once `self` + /// returns `None`, even if it returns a `Some(T)` again in the next iterations, + /// we cannot put it into a contigious array buffer, and thus the returned iterator + /// should be fused. + /// + /// [`slice::windows()`]: slice::windows + /// [`FusedIterator`]: crate::iter::FusedIterator + /// + /// # Panics + /// + /// Panics if `N` is 0. This check will most probably get changed to a + /// compile time error before this method gets stabilized. + /// + /// ```should_panic + /// #![feature(iter_map_windows)] + /// + /// let iter = std::iter::repeat(0).map_windows(|&[]| ()); + /// ``` + /// + /// # Examples + /// + /// Building the sums of neighboring numbers. + /// + /// ``` + /// #![feature(iter_map_windows)] + /// + /// let mut it = [1, 3, 8, 1].iter().map_windows(|&[a, b]| a + b); + /// assert_eq!(it.next(), Some(4)); // 1 + 3 + /// assert_eq!(it.next(), Some(11)); // 3 + 8 + /// assert_eq!(it.next(), Some(9)); // 8 + 1 + /// assert_eq!(it.next(), None); + /// ``` + /// + /// Since the elements in the following example implement `Copy`, we can + /// just copy the array and get an iterator over the windows. + /// + /// ``` + /// #![feature(iter_map_windows)] + /// + /// let mut it = "ferris".chars().map_windows(|w: &[_; 3]| *w); + /// assert_eq!(it.next(), Some(['f', 'e', 'r'])); + /// assert_eq!(it.next(), Some(['e', 'r', 'r'])); + /// assert_eq!(it.next(), Some(['r', 'r', 'i'])); + /// assert_eq!(it.next(), Some(['r', 'i', 's'])); + /// assert_eq!(it.next(), None); + /// ``` + /// + /// You can also use this function to check the sortedness of an iterator. + /// For the simple case, rather use [`Iterator::is_sorted`]. + /// + /// ``` + /// #![feature(iter_map_windows)] + /// + /// let mut it = [0.5, 1.0, 3.5, 3.0, 8.5, 8.5, f32::NAN].iter() + /// .map_windows(|[a, b]| a <= b); + /// + /// assert_eq!(it.next(), Some(true)); // 0.5 <= 1.0 + /// assert_eq!(it.next(), Some(true)); // 1.0 <= 3.5 + /// assert_eq!(it.next(), Some(false)); // 3.5 <= 3.0 + /// assert_eq!(it.next(), Some(true)); // 3.0 <= 8.5 + /// assert_eq!(it.next(), Some(true)); // 8.5 <= 8.5 + /// assert_eq!(it.next(), Some(false)); // 8.5 <= NAN + /// assert_eq!(it.next(), None); + /// ``` + /// + /// For non-fused iterators, they are fused after `map_windows`. + /// + /// ``` + /// #![feature(iter_map_windows)] + /// + /// #[derive(Default)] + /// struct NonFusedIterator { + /// state: i32, + /// } + /// + /// impl Iterator for NonFusedIterator { + /// type Item = i32; + /// + /// fn next(&mut self) -> Option { + /// let val = self.state; + /// self.state = self.state + 1; + /// + /// // yields `0..5` first, then only even numbers since `6..`. + /// if val < 5 || val % 2 == 0 { + /// Some(val) + /// } else { + /// None + /// } + /// } + /// } + /// + /// + /// let mut iter = NonFusedIterator::default(); + /// + /// // yields 0..5 first. + /// assert_eq!(iter.next(), Some(0)); + /// assert_eq!(iter.next(), Some(1)); + /// assert_eq!(iter.next(), Some(2)); + /// assert_eq!(iter.next(), Some(3)); + /// assert_eq!(iter.next(), Some(4)); + /// // then we can see our iterator going back and forth + /// assert_eq!(iter.next(), None); + /// assert_eq!(iter.next(), Some(6)); + /// assert_eq!(iter.next(), None); + /// assert_eq!(iter.next(), Some(8)); + /// assert_eq!(iter.next(), None); + /// + /// // however, with `.map_windows()`, it is fused. + /// let mut iter = NonFusedIterator::default() + /// .map_windows(|arr: &[_; 2]| *arr); + /// + /// assert_eq!(iter.next(), Some([0, 1])); + /// assert_eq!(iter.next(), Some([1, 2])); + /// assert_eq!(iter.next(), Some([2, 3])); + /// assert_eq!(iter.next(), Some([3, 4])); + /// assert_eq!(iter.next(), None); + /// + /// // it will always return `None` after the first time. + /// assert_eq!(iter.next(), None); + /// assert_eq!(iter.next(), None); + /// assert_eq!(iter.next(), None); + /// ``` + #[inline] + #[unstable(feature = "iter_map_windows", reason = "recently added", issue = "87155")] + #[rustc_do_not_const_check] + fn map_windows(self, f: F) -> MapWindows + where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R, + { + MapWindows::new(self, f) + } + /// Creates an iterator which ends after the first [`None`]. /// /// After an iterator returns [`None`], future calls may or may not yield diff --git a/library/core/tests/iter/adapters/map_windows.rs b/library/core/tests/iter/adapters/map_windows.rs new file mode 100644 index 0000000000000..7fb2408f8acb7 --- /dev/null +++ b/library/core/tests/iter/adapters/map_windows.rs @@ -0,0 +1,283 @@ +use std::sync::atomic::{AtomicUsize, Ordering::SeqCst}; + +#[cfg(not(panic = "abort"))] +mod drop_checks { + //! These tests mainly make sure the elements are correctly dropped. + use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst}; + + #[derive(Debug)] + struct DropInfo { + dropped_twice: AtomicBool, + alive_count: AtomicUsize, + } + + impl DropInfo { + const fn new() -> Self { + Self { dropped_twice: AtomicBool::new(false), alive_count: AtomicUsize::new(0) } + } + + #[track_caller] + fn check(&self) { + assert!(!self.dropped_twice.load(SeqCst), "a value was dropped twice"); + assert_eq!(self.alive_count.load(SeqCst), 0); + } + } + + #[derive(Debug)] + struct DropCheck<'a> { + info: &'a DropInfo, + was_dropped: bool, + } + + impl<'a> DropCheck<'a> { + fn new(info: &'a DropInfo) -> Self { + info.alive_count.fetch_add(1, SeqCst); + + Self { info, was_dropped: false } + } + } + + impl Drop for DropCheck<'_> { + fn drop(&mut self) { + if self.was_dropped { + self.info.dropped_twice.store(true, SeqCst); + } + self.was_dropped = true; + + self.info.alive_count.fetch_sub(1, SeqCst); + } + } + + fn iter(info: &DropInfo, len: usize, panic_at: usize) -> impl Iterator> { + (0..len).map(move |i| { + if i == panic_at { + panic!("intended panic"); + } + DropCheck::new(info) + }) + } + + #[track_caller] + fn check(len: usize, panic_at: usize) { + check_drops(|info| { + iter(info, len, panic_at).map_windows(|_: &[_; N]| {}).last(); + }); + } + + #[track_caller] + fn check_drops(f: impl FnOnce(&DropInfo)) { + let info = DropInfo::new(); + let _ = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| { + f(&info); + })); + info.check(); + } + + #[test] + fn no_iter_panic_n1() { + check::<1>(0, 100); + check::<1>(1, 100); + check::<1>(2, 100); + check::<1>(13, 100); + } + + #[test] + fn no_iter_panic_n2() { + check::<2>(0, 100); + check::<2>(1, 100); + check::<2>(2, 100); + check::<2>(3, 100); + check::<2>(13, 100); + } + + #[test] + fn no_iter_panic_n5() { + check::<5>(0, 100); + check::<5>(1, 100); + check::<5>(2, 100); + check::<5>(13, 100); + check::<5>(30, 100); + } + + #[test] + fn panic_in_first_batch() { + check::<1>(7, 0); + + check::<2>(7, 0); + check::<2>(7, 1); + + check::<3>(7, 0); + check::<3>(7, 1); + check::<3>(7, 2); + } + + #[test] + fn panic_in_middle() { + check::<1>(7, 1); + check::<1>(7, 5); + check::<1>(7, 6); + + check::<2>(7, 2); + check::<2>(7, 5); + check::<2>(7, 6); + + check::<5>(13, 5); + check::<5>(13, 8); + check::<5>(13, 12); + } + + #[test] + fn len_equals_n() { + check::<1>(1, 100); + check::<1>(1, 0); + + check::<2>(2, 100); + check::<2>(2, 0); + check::<2>(2, 1); + + check::<5>(5, 100); + check::<5>(5, 0); + check::<5>(5, 1); + check::<5>(5, 4); + } +} + +#[test] +fn output_n1() { + assert_eq!("".chars().map_windows(|[c]| *c).collect::>(), vec![]); + assert_eq!("x".chars().map_windows(|[c]| *c).collect::>(), vec!['x']); + assert_eq!("abcd".chars().map_windows(|[c]| *c).collect::>(), vec!['a', 'b', 'c', 'd']); +} + +#[test] +fn output_n2() { + assert_eq!( + "".chars().map_windows(|a: &[_; 2]| *a).collect::>(), + >::new(), + ); + assert_eq!("ab".chars().map_windows(|a: &[_; 2]| *a).collect::>(), vec![['a', 'b']]); + assert_eq!( + "abcd".chars().map_windows(|a: &[_; 2]| *a).collect::>(), + vec![['a', 'b'], ['b', 'c'], ['c', 'd']], + ); +} + +#[test] +fn test_case_from_pr_82413_comment() { + for () in std::iter::repeat("0".to_owned()).map_windows(|_: &[_; 3]| {}).take(4) {} +} + +#[test] +#[should_panic = "array in `Iterator::map_windows` must contain more than 0 elements"] +fn check_zero_window() { + let _ = std::iter::repeat(0).map_windows(|_: &[_; 0]| ()); +} + +#[test] +fn test_zero_sized_type() { + #[derive(Copy, Clone, Debug, Eq, PartialEq)] + struct Data; + let data: Vec<_> = + std::iter::repeat(Data).take(10).map_windows(|arr: &[Data; 5]| *arr).collect(); + assert_eq!(data, [[Data; 5]; 6]); +} + +#[test] +#[should_panic = "array size of `Iterator::map_windows` is too large"] +fn test_too_large_array_size() { + let _ = std::iter::repeat(()).map_windows(|arr: &[(); usize::MAX]| *arr); +} + +#[test] +fn test_laziness() { + let counter = AtomicUsize::new(0); + let mut iter = (0..5) + .inspect(|_| { + counter.fetch_add(1, SeqCst); + }) + .map_windows(|arr: &[i32; 2]| *arr); + assert_eq!(counter.load(SeqCst), 0); + + assert_eq!(iter.next(), Some([0, 1])); + // The first iteration consumes N items (N = 2). + assert_eq!(counter.load(SeqCst), 2); + + assert_eq!(iter.next(), Some([1, 2])); + assert_eq!(counter.load(SeqCst), 3); + + assert_eq!(iter.next(), Some([2, 3])); + assert_eq!(counter.load(SeqCst), 4); + + assert_eq!(iter.next(), Some([3, 4])); + assert_eq!(counter.load(SeqCst), 5); + + assert_eq!(iter.next(), None); + assert_eq!(counter.load(SeqCst), 5); +} + +#[test] +fn test_size_hint() { + struct SizeHintCheckHelper((usize, Option)); + + impl Iterator for SizeHintCheckHelper { + type Item = i32; + + fn next(&mut self) -> Option { + let (ref mut lo, ref mut hi) = self.0; + let next = (*hi != Some(0)).then_some(0); + *lo = lo.saturating_sub(1); + if let Some(hi) = hi { + *hi = hi.saturating_sub(1); + } + next + } + + fn size_hint(&self) -> (usize, Option) { + self.0 + } + } + + fn check_size_hint( + size_hint: (usize, Option), + mut mapped_size_hint: (usize, Option), + ) { + let mut iter = SizeHintCheckHelper(size_hint); + let mut mapped_iter = iter.by_ref().map_windows(|_: &[_; N]| ()); + while mapped_iter.size_hint().0 > 0 { + assert_eq!(mapped_iter.size_hint(), mapped_size_hint); + assert!(mapped_iter.next().is_some()); + mapped_size_hint.0 -= 1; + mapped_size_hint.1 = mapped_size_hint.1.map(|hi| hi.saturating_sub(1)); + } + } + + check_size_hint::<1>((0, None), (0, None)); + check_size_hint::<1>((0, Some(0)), (0, Some(0))); + check_size_hint::<1>((0, Some(2)), (0, Some(2))); + check_size_hint::<1>((1, None), (1, None)); + check_size_hint::<1>((1, Some(1)), (1, Some(1))); + check_size_hint::<1>((1, Some(4)), (1, Some(4))); + check_size_hint::<1>((5, None), (5, None)); + check_size_hint::<1>((5, Some(5)), (5, Some(5))); + check_size_hint::<1>((5, Some(10)), (5, Some(10))); + + check_size_hint::<2>((0, None), (0, None)); + check_size_hint::<2>((0, Some(0)), (0, Some(0))); + check_size_hint::<2>((0, Some(2)), (0, Some(1))); + check_size_hint::<2>((1, None), (0, None)); + check_size_hint::<2>((1, Some(1)), (0, Some(0))); + check_size_hint::<2>((1, Some(4)), (0, Some(3))); + check_size_hint::<2>((5, None), (4, None)); + check_size_hint::<2>((5, Some(5)), (4, Some(4))); + check_size_hint::<2>((5, Some(10)), (4, Some(9))); + + check_size_hint::<5>((0, None), (0, None)); + check_size_hint::<5>((0, Some(0)), (0, Some(0))); + check_size_hint::<5>((0, Some(2)), (0, Some(0))); + check_size_hint::<5>((1, None), (0, None)); + check_size_hint::<5>((1, Some(1)), (0, Some(0))); + check_size_hint::<5>((1, Some(4)), (0, Some(0))); + check_size_hint::<5>((5, None), (1, None)); + check_size_hint::<5>((5, Some(5)), (1, Some(1))); + check_size_hint::<5>((5, Some(10)), (1, Some(6))); +} diff --git a/library/core/tests/iter/adapters/mod.rs b/library/core/tests/iter/adapters/mod.rs index ca3463aa7f782..dedb4c0a9dd5f 100644 --- a/library/core/tests/iter/adapters/mod.rs +++ b/library/core/tests/iter/adapters/mod.rs @@ -13,6 +13,7 @@ mod fuse; mod inspect; mod intersperse; mod map; +mod map_windows; mod peekable; mod scan; mod skip; diff --git a/library/core/tests/lib.rs b/library/core/tests/lib.rs index 897a5e9b87063..d849561bb21b8 100644 --- a/library/core/tests/lib.rs +++ b/library/core/tests/lib.rs @@ -110,6 +110,7 @@ #![feature(is_ascii_octdigit)] #![feature(get_many_mut)] #![feature(offset_of)] +#![feature(iter_map_windows)] #![deny(unsafe_op_in_unsafe_fn)] #![deny(fuzzy_provenance_casts)] diff --git a/library/profiler_builtins/build.rs b/library/profiler_builtins/build.rs index 1b1f11798d74d..d14d0b82229a1 100644 --- a/library/profiler_builtins/build.rs +++ b/library/profiler_builtins/build.rs @@ -6,6 +6,12 @@ use std::env; use std::path::Path; fn main() { + println!("cargo:rerun-if-env-changed=LLVM_PROFILER_RT_LIB"); + if let Ok(rt) = env::var("LLVM_PROFILER_RT_LIB") { + println!("cargo:rustc-link-lib=static:+verbatim={rt}"); + return; + } + let target = env::var("TARGET").expect("TARGET was not set"); let cfg = &mut cc::Build::new(); diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs index cad370ad75658..b3a88132c45ab 100644 --- a/src/bootstrap/compile.rs +++ b/src/bootstrap/compile.rs @@ -325,6 +325,10 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car cargo.env("MACOSX_DEPLOYMENT_TARGET", target); } + if let Some(path) = builder.config.profiler_path(target) { + cargo.env("LLVM_PROFILER_RT_LIB", path); + } + // Determine if we're going to compile in optimized C intrinsics to // the `compiler-builtins` crate. These intrinsics live in LLVM's // `compiler-rt` repository, but our `src/llvm-project` submodule isn't diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs index 830c1a3b84680..4821d20a89890 100644 --- a/src/bootstrap/config.rs +++ b/src/bootstrap/config.rs @@ -534,7 +534,7 @@ pub struct Target { pub linker: Option, pub ndk: Option, pub sanitizers: Option, - pub profiler: Option, + pub profiler: Option, pub rpath: Option, pub crt_static: Option, pub musl_root: Option, @@ -863,9 +863,9 @@ define_config! { } } -#[derive(Debug, Deserialize)] +#[derive(Clone, Debug, Deserialize)] #[serde(untagged)] -enum StringOrBool { +pub enum StringOrBool { String(String), Bool(bool), } @@ -876,6 +876,12 @@ impl Default for StringOrBool { } } +impl StringOrBool { + fn is_string_or_true(&self) -> bool { + matches!(self, Self::String(_) | Self::Bool(true)) + } +} + #[derive(Clone, Debug, PartialEq, Eq)] pub enum RustOptimize { String(String), @@ -1038,7 +1044,7 @@ define_config! { llvm_libunwind: Option = "llvm-libunwind", android_ndk: Option = "android-ndk", sanitizers: Option = "sanitizers", - profiler: Option = "profiler", + profiler: Option = "profiler", rpath: Option = "rpath", crt_static: Option = "crt-static", musl_root: Option = "musl-root", @@ -1957,12 +1963,24 @@ impl Config { self.target_config.values().any(|t| t.sanitizers == Some(true)) || self.sanitizers } + pub fn profiler_path(&self, target: TargetSelection) -> Option<&str> { + match self.target_config.get(&target)?.profiler.as_ref()? { + StringOrBool::String(s) => Some(s), + StringOrBool::Bool(_) => None, + } + } + pub fn profiler_enabled(&self, target: TargetSelection) -> bool { - self.target_config.get(&target).map(|t| t.profiler).flatten().unwrap_or(self.profiler) + self.target_config + .get(&target) + .and_then(|t| t.profiler.as_ref()) + .map(StringOrBool::is_string_or_true) + .unwrap_or(self.profiler) } pub fn any_profiler_enabled(&self) -> bool { - self.target_config.values().any(|t| t.profiler == Some(true)) || self.profiler + self.target_config.values().any(|t| matches!(&t.profiler, Some(p) if p.is_string_or_true())) + || self.profiler } pub fn rpath_enabled(&self, target: TargetSelection) -> bool { diff --git a/src/ci/github-actions/ci.yml b/src/ci/github-actions/ci.yml index f900a5eb57679..2cc0bfd9db96a 100644 --- a/src/ci/github-actions/ci.yml +++ b/src/ci/github-actions/ci.yml @@ -316,10 +316,10 @@ jobs: matrix: include: - name: mingw-check - <<: *job-linux-16c + <<: *job-linux-4c - name: mingw-check-tidy - <<: *job-linux-16c + <<: *job-linux-4c - name: x86_64-gnu-llvm-15 <<: *job-linux-16c diff --git a/tests/rustdoc-gui/search-tab.goml b/tests/rustdoc-gui/search-tab.goml index 2223598f02953..7bbde3ec23d3c 100644 --- a/tests/rustdoc-gui/search-tab.goml +++ b/tests/rustdoc-gui/search-tab.goml @@ -40,37 +40,37 @@ define-function: ( call-function: ("check-colors", { "theme": "ayu", - "background": "rgba(0, 0, 0, 0)", - "background_selected": "rgb(20, 25, 32)", - "background_hover": "rgba(0, 0, 0, 0)", - "border_bottom": "0px none rgb(197, 197, 197)", - "border_bottom_selected": "1px solid rgb(255, 180, 76)", + "background": "transparent", + "background_selected": "#141920", + "background_hover": "transparent", + "border_bottom": "0px none #c5c5c5", + "border_bottom_selected": "1px solid #ffb44c", "border_bottom_hover": "1px solid rgba(242, 151, 24, 0.3)", - "border_top": "0px none rgb(197, 197, 197)", - "border_top_selected": "0px none rgb(197, 197, 197)", - "border_top_hover": "0px none rgb(197, 197, 197)", + "border_top": "0px none #c5c5c5", + "border_top_selected": "0px none #c5c5c5", + "border_top_hover": "0px none #c5c5c5", }) call-function: ("check-colors", { "theme": "dark", - "background": "rgb(37, 37, 37)", - "background_selected": "rgb(53, 53, 53)", - "background_hover": "rgb(53, 53, 53)", - "border_bottom": "0px none rgb(221, 221, 221)", - "border_bottom_selected": "0px none rgb(221, 221, 221)", - "border_bottom_hover": "0px none rgb(221, 221, 221)", - "border_top": "2px solid rgb(37, 37, 37)", - "border_top_selected": "2px solid rgb(0, 137, 255)", - "border_top_hover": "2px solid rgb(0, 137, 255)", + "background": "#252525", + "background_selected": "#353535", + "background_hover": "#353535", + "border_bottom": "0px none #ddd", + "border_bottom_selected": "0px none #ddd", + "border_bottom_hover": "0px none #ddd", + "border_top": "2px solid #252525", + "border_top_selected": "2px solid #0089ff", + "border_top_hover": "2px solid #0089ff", }) call-function: ("check-colors", { "theme": "light", - "background": "rgb(230, 230, 230)", - "background_selected": "rgb(255, 255, 255)", - "background_hover": "rgb(255, 255, 255)", - "border_bottom": "0px none rgb(0, 0, 0)", - "border_bottom_selected": "0px none rgb(0, 0, 0)", - "border_bottom_hover": "0px none rgb(0, 0, 0)", - "border_top": "2px solid rgb(230, 230, 230)", - "border_top_selected": "2px solid rgb(0, 137, 255)", - "border_top_hover": "2px solid rgb(0, 137, 255)", + "background": "#e6e6e6", + "background_selected": "#fff", + "background_hover": "#fff", + "border_bottom": "0px none #000", + "border_bottom_selected": "0px none #000", + "border_bottom_hover": "0px none #000", + "border_top": "2px solid #e6e6e6", + "border_top_selected": "2px solid #0089ff", + "border_top_hover": "2px solid #0089ff", })