diff --git a/.noir-sync-commit b/.noir-sync-commit index b63a49f65c0..c5e4ffccf9e 100644 --- a/.noir-sync-commit +++ b/.noir-sync-commit @@ -1 +1 @@ -eb975ab28fb056cf92859377c02f2bb1a608eda3 +e88deaf4890a3c6d6af8b0760e952d10573c004d \ No newline at end of file diff --git a/noir/noir-repo/.github/workflows/reports.yml b/noir/noir-repo/.github/workflows/reports.yml index 26b5844de27..1e355dc9e6b 100644 --- a/noir/noir-repo/.github/workflows/reports.yml +++ b/noir/noir-repo/.github/workflows/reports.yml @@ -277,9 +277,8 @@ jobs: - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/private-kernel-inner } - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/private-kernel-tail } - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/private-kernel-reset } - # TODO: Bring these back once they no longer time out - # - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/rollup-base-private } - # - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/rollup-base-public } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/rollup-base-private } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/rollup-base-public } name: External repo compilation report - ${{ matrix.project.repo }}/${{ matrix.project.path }} steps: diff --git a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/mod.rs b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/mod.rs index 1b743227acf..3531825c709 100644 --- a/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/mod.rs +++ b/noir/noir-repo/acvm-repo/acvm/src/compiler/optimizers/mod.rs @@ -37,7 +37,7 @@ pub fn optimize(acir: Circuit) -> (Circuit, AcirTransformati /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] independent optimizations to a [`Circuit`]. /// /// Accepts an injected `acir_opcode_positions` to allow optimizations to be applied in a loop. -#[tracing::instrument(level = "trace", name = "optimize_acir" skip(acir))] +#[tracing::instrument(level = "trace", name = "optimize_acir" skip(acir, acir_opcode_positions))] pub(super) fn optimize_internal( acir: Circuit, acir_opcode_positions: Vec, diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs index 40c9dc03ec3..5a857ae24ab 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/checks/check_for_underconstrained_values.rs @@ -229,7 +229,7 @@ impl DependencyContext { function: &Function, all_functions: &BTreeMap, ) { - trace!("processing instructions of block {} of function {}", block, function); + trace!("processing instructions of block {} of function {}", block, function.id()); for instruction in function.dfg[block].instructions() { let mut arguments = Vec::new(); @@ -319,11 +319,7 @@ impl DependencyContext { Value::Function(callee) => match all_functions[&callee].runtime() { RuntimeType::Brillig(_) => { // Record arguments/results for each Brillig call for the check - trace!( - "Brillig function {} called at {}", - all_functions[&callee], - instruction - ); + self.tainted.insert( *instruction, BrilligTaintedIds::new(&arguments, &results), @@ -376,7 +372,7 @@ impl DependencyContext { } } - trace!("resulting Brillig involved values: {:?}", self.tainted); + trace!("Number tainted Brillig calls: {}", self.tainted.len()); } /// Every Brillig call not properly constrained should remain in the tainted set @@ -392,7 +388,11 @@ impl DependencyContext { }) .collect(); - trace!("making following reports for function {}: {:?}", function.name(), warnings); + trace!( + "making {} under constrained reports for function {}", + warnings.len(), + function.name() + ); warnings } @@ -407,8 +407,6 @@ impl DependencyContext { /// Check if any of the recorded Brillig calls have been properly constrained /// by given values after recording partial constraints, if so stop tracking them fn clear_constrained(&mut self, constrained_values: &[ValueId], function: &Function) { - trace!("attempting to clear Brillig calls constrained by values: {:?}", constrained_values); - // Remove numeric constants let constrained_values = constrained_values.iter().filter(|v| function.dfg.get_numeric_constant(**v).is_none()); diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs index 1d18683ee9e..48af34d466c 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs @@ -94,6 +94,20 @@ impl DataBus { DataBus { call_data, return_data: self.return_data.map(&mut f) } } + /// Updates the databus values in place with the provided function + pub(crate) fn map_values_mut(&mut self, mut f: impl FnMut(ValueId) -> ValueId) { + for cd in self.call_data.iter_mut() { + cd.array_id = f(cd.array_id); + + // Can't mutate a hashmap's keys so we need to collect into a new one. + cd.index_map = cd.index_map.iter().map(|(k, v)| (f(*k), *v)).collect(); + } + + if let Some(data) = self.return_data.as_mut() { + *data = f(*data); + } + } + pub(crate) fn call_data_array(&self) -> Vec<(u32, ValueId)> { self.call_data.iter().map(|cd| (cd.call_data_id, cd.array_id)).collect() } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs index 6ebd2aa1105..9ae0839c75c 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/function_inserter.rs @@ -73,25 +73,24 @@ impl<'f> FunctionInserter<'f> { /// Get an instruction and make sure all the values in it are freshly resolved. pub(crate) fn map_instruction(&mut self, id: InstructionId) -> (Instruction, CallStack) { - ( - self.function.dfg[id].clone().map_values(|id| self.resolve(id)), - self.function.dfg.get_call_stack(id), - ) + let mut instruction = self.function.dfg[id].clone(); + instruction.map_values_mut(|id| self.resolve(id)); + (instruction, self.function.dfg.get_call_stack(id)) } /// Maps a terminator in place, replacing any ValueId in the terminator with the /// resolved version of that value id from this FunctionInserter's internal value mapping. pub(crate) fn map_terminator_in_place(&mut self, block: BasicBlockId) { let mut terminator = self.function.dfg[block].take_terminator(); - terminator.mutate_values(|value| self.resolve(value)); + terminator.map_values_mut(|value| self.resolve(value)); self.function.dfg[block].set_terminator(terminator); } /// Maps the data bus in place, replacing any ValueId in the data bus with the /// resolved version of that value id from this FunctionInserter's internal value mapping. pub(crate) fn map_data_bus_in_place(&mut self) { - let data_bus = self.function.dfg.data_bus.clone(); - let data_bus = data_bus.map_values(|value| self.resolve(value)); + let mut data_bus = self.function.dfg.data_bus.clone(); + data_bus.map_values_mut(|value| self.resolve(value)); self.function.dfg.data_bus = data_bus; } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index fb35978d906..346ac3ac11b 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -669,6 +669,70 @@ impl Instruction { } } + /// Maps each ValueId inside this instruction to a new ValueId in place. + pub(crate) fn map_values_mut(&mut self, mut f: impl FnMut(ValueId) -> ValueId) { + match self { + Instruction::Binary(binary) => { + binary.lhs = f(binary.lhs); + binary.rhs = f(binary.rhs); + } + Instruction::Cast(value, _) => *value = f(*value), + Instruction::Not(value) => *value = f(*value), + Instruction::Truncate { value, bit_size: _, max_bit_size: _ } => { + *value = f(*value); + } + Instruction::Constrain(lhs, rhs, assert_message) => { + *lhs = f(*lhs); + *rhs = f(*rhs); + if let Some(ConstrainError::Dynamic(_, _, payload_values)) = assert_message { + for value in payload_values { + *value = f(*value); + } + } + } + Instruction::Call { func, arguments } => { + *func = f(*func); + for argument in arguments { + *argument = f(*argument); + } + } + Instruction::Allocate => (), + Instruction::Load { address } => *address = f(*address), + Instruction::Store { address, value } => { + *address = f(*address); + *value = f(*value); + } + Instruction::EnableSideEffectsIf { condition } => { + *condition = f(*condition); + } + Instruction::ArrayGet { array, index } => { + *array = f(*array); + *index = f(*index); + } + Instruction::ArraySet { array, index, value, mutable: _ } => { + *array = f(*array); + *index = f(*index); + *value = f(*value); + } + Instruction::IncrementRc { value } => *value = f(*value), + Instruction::DecrementRc { value } => *value = f(*value), + Instruction::RangeCheck { value, max_bit_size: _, assert_message: _ } => { + *value = f(*value); + } + Instruction::IfElse { then_condition, then_value, else_condition, else_value } => { + *then_condition = f(*then_condition); + *then_value = f(*then_value); + *else_condition = f(*else_condition); + *else_value = f(*else_value); + } + Instruction::MakeArray { elements, typ: _ } => { + for element in elements.iter_mut() { + *element = f(*element); + } + } + } + } + /// Applies a function to each input value this instruction holds. pub(crate) fn for_each_value(&self, mut f: impl FnMut(ValueId) -> T) { match self { @@ -1193,7 +1257,7 @@ impl TerminatorInstruction { } /// Mutate each ValueId to a new ValueId using the given mapping function - pub(crate) fn mutate_values(&mut self, mut f: impl FnMut(ValueId) -> ValueId) { + pub(crate) fn map_values_mut(&mut self, mut f: impl FnMut(ValueId) -> ValueId) { use TerminatorInstruction::*; match self { JmpIf { condition, .. } => { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index e2379043541..faa0594f3f0 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -372,7 +372,7 @@ impl<'brillig> Context<'brillig> { dom: &mut DominatorTree, constraint_simplification_mapping: &HashMap, ) -> Instruction { - let instruction = dfg[instruction_id].clone(); + let mut instruction = dfg[instruction_id].clone(); // Alternate between resolving `value_id` in the `dfg` and checking to see if the resolved value // has been constrained to be equal to some simpler value in the current block. @@ -400,9 +400,10 @@ impl<'brillig> Context<'brillig> { } // Resolve any inputs to ensure that we're comparing like-for-like instructions. - instruction.map_values(|value_id| { + instruction.map_values_mut(|value_id| { resolve_cache(block, dfg, dom, constraint_simplification_mapping, value_id) - }) + }); + instruction } /// Pushes a new [`Instruction`] into the [`DataFlowGraph`] which applies any optimizations diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs index b0843f327c1..675d7fd854e 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/die.rs @@ -10,12 +10,14 @@ use crate::ssa::{ function::Function, instruction::{BinaryOp, Instruction, InstructionId, Intrinsic}, post_order::PostOrder, - types::NumericType, + types::{NumericType, Type}, value::{Value, ValueId}, }, ssa_gen::Ssa, }; +use super::rc::{pop_rc_for, RcInstruction}; + impl Ssa { /// Performs Dead Instruction Elimination (DIE) to remove any instructions with /// unused results. @@ -104,6 +106,8 @@ impl Context { let instructions_len = block.instructions().len(); + let mut rc_tracker = RcTracker::default(); + // Indexes of instructions that might be out of bounds. // We'll remove those, but before that we'll insert bounds checks for them. let mut possible_index_out_of_bounds_indexes = Vec::new(); @@ -131,8 +135,13 @@ impl Context { }); } } + + rc_tracker.track_inc_rcs_to_remove(*instruction_id, function); } + self.instructions_to_remove.extend(rc_tracker.get_non_mutated_arrays(&function.dfg)); + self.instructions_to_remove.extend(rc_tracker.rc_pairs_to_remove); + // If there are some instructions that might trigger an out of bounds error, // first add constrain checks. Then run the DIE pass again, which will remove those // but leave the constrains (any any value needed by those constrains) @@ -517,6 +526,112 @@ fn apply_side_effects( (lhs, rhs) } +#[derive(Default)] +struct RcTracker { + // We can track IncrementRc instructions per block to determine whether they are useless. + // IncrementRc and DecrementRc instructions are normally side effectual instructions, but we remove + // them if their value is not used anywhere in the function. However, even when their value is used, their existence + // is pointless logic if there is no array set between the increment and the decrement of the reference counter. + // We track per block whether an IncrementRc instruction has a paired DecrementRc instruction + // with the same value but no array set in between. + // If we see an inc/dec RC pair within a block we can safely remove both instructions. + rcs_with_possible_pairs: HashMap>, + rc_pairs_to_remove: HashSet, + // We also separately track all IncrementRc instructions and all array types which have been mutably borrowed. + // If an array is the same type as one of those non-mutated array types, we can safely remove all IncrementRc instructions on that array. + inc_rcs: HashMap>, + mutated_array_types: HashSet, + // The SSA often creates patterns where after simplifications we end up with repeat + // IncrementRc instructions on the same value. We track whether the previous instruction was an IncrementRc, + // and if the current instruction is also an IncrementRc on the same value we remove the current instruction. + // `None` if the previous instruction was anything other than an IncrementRc + previous_inc_rc: Option, +} + +impl RcTracker { + fn track_inc_rcs_to_remove(&mut self, instruction_id: InstructionId, function: &Function) { + let instruction = &function.dfg[instruction_id]; + + if let Instruction::IncrementRc { value } = instruction { + if let Some(previous_value) = self.previous_inc_rc { + if previous_value == *value { + self.rc_pairs_to_remove.insert(instruction_id); + } + } + self.previous_inc_rc = Some(*value); + } else { + self.previous_inc_rc = None; + } + + // DIE loops over a block in reverse order, so we insert an RC instruction for possible removal + // when we see a DecrementRc and check whether it was possibly mutated when we see an IncrementRc. + match instruction { + Instruction::IncrementRc { value } => { + if let Some(inc_rc) = + pop_rc_for(*value, function, &mut self.rcs_with_possible_pairs) + { + if !inc_rc.possibly_mutated { + self.rc_pairs_to_remove.insert(inc_rc.id); + self.rc_pairs_to_remove.insert(instruction_id); + } + } + + self.inc_rcs.entry(*value).or_default().insert(instruction_id); + } + Instruction::DecrementRc { value } => { + let typ = function.dfg.type_of_value(*value); + + // We assume arrays aren't mutated until we find an array_set + let dec_rc = + RcInstruction { id: instruction_id, array: *value, possibly_mutated: false }; + self.rcs_with_possible_pairs.entry(typ).or_default().push(dec_rc); + } + Instruction::ArraySet { array, .. } => { + let typ = function.dfg.type_of_value(*array); + if let Some(dec_rcs) = self.rcs_with_possible_pairs.get_mut(&typ) { + for dec_rc in dec_rcs { + dec_rc.possibly_mutated = true; + } + } + + self.mutated_array_types.insert(typ); + } + Instruction::Store { value, .. } => { + // We are very conservative and say that any store of an array type means it has the potential to be mutated. + let typ = function.dfg.type_of_value(*value); + if matches!(&typ, Type::Array(..) | Type::Slice(..)) { + self.mutated_array_types.insert(typ); + } + } + Instruction::Call { arguments, .. } => { + for arg in arguments { + let typ = function.dfg.type_of_value(*arg); + if matches!(&typ, Type::Array(..) | Type::Slice(..)) { + self.mutated_array_types.insert(typ); + } + } + } + _ => {} + } + } + + fn get_non_mutated_arrays(&self, dfg: &DataFlowGraph) -> HashSet { + self.inc_rcs + .keys() + .filter_map(|value| { + let typ = dfg.type_of_value(*value); + if !self.mutated_array_types.contains(&typ) { + Some(&self.inc_rcs[value]) + } else { + None + } + }) + .flatten() + .copied() + .collect() + } +} + #[cfg(test)] mod test { use std::sync::Arc; @@ -600,6 +715,30 @@ mod test { assert_normalized_ssa_equals(ssa, expected); } + #[test] + fn remove_useless_paired_rcs_even_when_used() { + let src = " + acir(inline) fn main f0 { + b0(v0: [Field; 2]): + inc_rc v0 + v2 = array_get v0, index u32 0 -> Field + dec_rc v0 + return v2 + } + "; + let ssa = Ssa::from_str(src).unwrap(); + + let expected = " + acir(inline) fn main f0 { + b0(v0: [Field; 2]): + v2 = array_get v0, index u32 0 -> Field + return v2 + } + "; + let ssa = ssa.dead_instruction_elimination(); + assert_normalized_ssa_equals(ssa, expected); + } + #[test] fn keep_paired_rcs_with_array_set() { let src = " @@ -669,6 +808,49 @@ mod test { assert_eq!(main.dfg[b1].instructions().len(), 2); } + #[test] + fn keep_inc_rc_on_borrowed_array_set() { + // acir(inline) fn main f0 { + // b0(v0: [u32; 2]): + // inc_rc v0 + // v3 = array_set v0, index u32 0, value u32 1 + // inc_rc v0 + // inc_rc v0 + // inc_rc v0 + // v4 = array_get v3, index u32 1 + // return v4 + // } + let src = " + acir(inline) fn main f0 { + b0(v0: [u32; 2]): + inc_rc v0 + v3 = array_set v0, index u32 0, value u32 1 + inc_rc v0 + inc_rc v0 + inc_rc v0 + v4 = array_get v3, index u32 1 -> u32 + return v4 + } + "; + let ssa = Ssa::from_str(src).unwrap(); + + // We expect the output to be unchanged + // Except for the repeated inc_rc instructions + let expected = " + acir(inline) fn main f0 { + b0(v0: [u32; 2]): + inc_rc v0 + v3 = array_set v0, index u32 0, value u32 1 + inc_rc v0 + v4 = array_get v3, index u32 1 -> u32 + return v4 + } + "; + + let ssa = ssa.dead_instruction_elimination(); + assert_normalized_ssa_equals(ssa, expected); + } + #[test] fn does_not_remove_inc_or_dec_rc_of_if_they_are_loaded_from_a_reference() { let src = " @@ -689,4 +871,69 @@ mod test { let ssa = ssa.dead_instruction_elimination(); assert_normalized_ssa_equals(ssa, src); } + + #[test] + fn remove_inc_rcs_that_are_never_mutably_borrowed() { + let src = " + acir(inline) fn main f0 { + b0(v0: [Field; 2]): + inc_rc v0 + inc_rc v0 + inc_rc v0 + v2 = array_get v0, index u32 0 -> Field + inc_rc v0 + return v2 + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + let main = ssa.main(); + + // The instruction count never includes the terminator instruction + assert_eq!(main.dfg[main.entry_block()].instructions().len(), 5); + + let expected = " + acir(inline) fn main f0 { + b0(v0: [Field; 2]): + v2 = array_get v0, index u32 0 -> Field + return v2 + } + "; + + let ssa = ssa.dead_instruction_elimination(); + assert_normalized_ssa_equals(ssa, expected); + } + + #[test] + fn do_not_remove_inc_rc_if_used_as_call_arg() { + // We do not want to remove inc_rc instructions on values + // that are passed as call arguments. + // + // We could have previously inlined a function which does the following: + // - Accepts a mutable array as an argument + // - Writes to that array + // - Passes the new array to another call + // + // It is possible then that the mutation gets simplified out after inlining. + // If we then remove the inc_rc as we see no mutations to that array in the block, + // we may end up with an the incorrect reference count. + let src = " + brillig(inline) fn main f0 { + b0(v0: Field): + v4 = make_array [Field 0, Field 1, Field 2] : [Field; 3] + inc_rc v4 + v6 = call f1(v4) -> Field + constrain v0 == v6 + return + } + brillig(inline) fn foo f1 { + b0(v0: [Field; 3]): + return u32 1 + } + "; + + let ssa = Ssa::from_str(src).unwrap(); + let ssa = ssa.dead_instruction_elimination(); + assert_normalized_ssa_equals(ssa, src); + } } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index 1cfd7b6dd57..046f4478eda 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -1207,20 +1207,15 @@ mod test { ) -> Vec { match dfg[value] { Value::Instruction { instruction, .. } => { - let mut values = vec![]; - dfg[instruction].map_values(|value| { - values.push(value); - value - }); + let mut constants = vec![]; - let mut values: Vec<_> = values - .into_iter() - .flat_map(|value| get_all_constants_reachable_from_instruction(dfg, value)) - .collect(); + dfg[instruction].for_each_value(|value| { + constants.extend(get_all_constants_reachable_from_instruction(dfg, value)); + }); - values.sort(); - values.dedup(); - values + constants.sort(); + constants.dedup(); + constants } Value::NumericConstant { constant, .. } => vec![constant.to_u128()], _ => Vec::new(), diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index 77ad53df9cf..1e5cd8bdfbd 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -599,8 +599,9 @@ impl<'f> PerFunctionContext<'f> { } fn update_data_bus(&mut self) { - let databus = self.inserter.function.dfg.data_bus.clone(); - self.inserter.function.dfg.data_bus = databus.map_values(|t| self.inserter.resolve(t)); + let mut databus = self.inserter.function.dfg.data_bus.clone(); + databus.map_values_mut(|t| self.inserter.resolve(t)); + self.inserter.function.dfg.data_bus = databus; } fn handle_terminator(&mut self, block: BasicBlockId, references: &mut Block) { diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs index f5e96224260..9420fb9b4f7 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/normalize_value_ids.rs @@ -109,9 +109,10 @@ impl Context { } let old_block = &mut old_function.dfg[old_block_id]; - let mut terminator = old_block - .take_terminator() - .map_values(|value| self.new_ids.map_value(new_function, old_function, value)); + let mut terminator = old_block.take_terminator(); + terminator + .map_values_mut(|value| self.new_ids.map_value(new_function, old_function, value)); + terminator.mutate_blocks(|old_block| self.new_ids.blocks[&old_block]); new_function.dfg.set_block_terminator(new_block_id, terminator); } diff --git a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs index 7ef793a350b..1ad03982bb8 100644 --- a/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs +++ b/noir/noir-repo/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs @@ -938,10 +938,9 @@ impl<'f> LoopIteration<'f> { } self.inserter.push_instruction(instruction, self.insert_block); } - let mut terminator = self.dfg()[self.source_block] - .unwrap_terminator() - .clone() - .map_values(|value| self.inserter.resolve(value)); + let mut terminator = self.dfg()[self.source_block].unwrap_terminator().clone(); + + terminator.map_values_mut(|value| self.inserter.resolve(value)); // Replace the blocks in the terminator with fresh one with the same parameters, // while remembering which were the original block IDs. diff --git a/noir/noir-repo/test_programs/compilation_report.sh b/noir/noir-repo/test_programs/compilation_report.sh index 13e74f0d7d2..d050e7c9c34 100755 --- a/noir/noir-repo/test_programs/compilation_report.sh +++ b/noir/noir-repo/test_programs/compilation_report.sh @@ -29,7 +29,9 @@ for dir in ${tests_to_profile[@]}; do cd $base_path/$dir + # The default package to run is the supplied list hardcoded at the top of the script PACKAGE_NAME=$dir + # Otherwise default to the current directory as the package we want to run if [ "$#" -ne 0 ]; then PACKAGE_NAME=$(basename $current_dir) fi