Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: unbundle check_array_is_initialized #5451

Merged
merged 3 commits into from
Jul 10, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
101 changes: 51 additions & 50 deletions compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -975,6 +975,8 @@ impl<'a> Context<'a> {
.into())
}
};
// Ensure that array id is fully resolved.
let array = dfg.resolve(array);

let array_id = dfg.resolve(array);
let array_typ = dfg.type_of_value(array_id);
Expand All @@ -992,7 +994,6 @@ impl<'a> Context<'a> {
// If we find one, we will use it when computing the index under the enable_side_effect predicate
// If not, array_get(..) will use a fallback costing one multiplication in the worst case.
// cf. https://github.com/noir-lang/noir/pull/4971

// For simplicity we compute the offset only for simple arrays
let is_simple_array = dfg.instruction_results(instruction).len() == 1
&& can_omit_element_sizes_array(&array_typ);
Expand Down Expand Up @@ -1123,13 +1124,14 @@ impl<'a> Context<'a> {
/// It is a dummy value because in the case of a false predicate, the value stored at the requested index will be itself.
fn convert_array_operation_inputs(
&mut self,
array: ValueId,
array_id: ValueId,
dfg: &DataFlowGraph,
index: ValueId,
store_value: Option<ValueId>,
offset: usize,
) -> Result<(AcirVar, Option<AcirValue>), RuntimeError> {
let (array_id, array_typ, block_id) = self.check_array_is_initialized(array, dfg)?;
let array_typ = dfg.type_of_value(array_id);
let block_id = self.ensure_array_is_initialized(array_id, dfg)?;

let index_var = self.convert_numeric_value(index, dfg)?;
let index_var = self.get_flattened_index(&array_typ, array_id, index_var, dfg)?;
Expand Down Expand Up @@ -1248,22 +1250,22 @@ impl<'a> Context<'a> {
dfg: &DataFlowGraph,
mut index_side_effect: bool,
) -> Result<AcirValue, RuntimeError> {
let (array_id, _, block_id) = self.check_array_is_initialized(array, dfg)?;
let block_id = self.ensure_array_is_initialized(array, dfg)?;
let results = dfg.instruction_results(instruction);
let res_typ = dfg.type_of_value(results[0]);

// Get operations to call-data parameters are replaced by a get to the call-data-bus array
if let Some(call_data) = self.data_bus.call_data {
if self.data_bus.call_data_map.contains_key(&array_id) {
if self.data_bus.call_data_map.contains_key(&array) {
// TODO: the block_id of call-data must be notified to the backend
// TODO: should we do the same for return-data?
let type_size = res_typ.flattened_size();
let type_size =
self.acir_context.add_constant(FieldElement::from(type_size as i128));
let offset = self.acir_context.mul_var(var_index, type_size)?;
let bus_index = self.acir_context.add_constant(FieldElement::from(
self.data_bus.call_data_map[&array_id] as i128,
));
let bus_index = self
.acir_context
.add_constant(FieldElement::from(self.data_bus.call_data_map[&array] as i128));
let new_index = self.acir_context.add_var(offset, bus_index)?;
return self.array_get(instruction, call_data, new_index, dfg, index_side_effect);
}
Expand All @@ -1277,8 +1279,7 @@ impl<'a> Context<'a> {
let mut value = self.array_get_value(&res_typ, block_id, &mut var_index)?;

if let AcirValue::Var(value_var, typ) = &value {
let array_id = dfg.resolve(array_id);
let array_typ = dfg.type_of_value(array_id);
let array_typ = dfg.type_of_value(array);
if let (Type::Numeric(numeric_type), AcirType::NumericType(num)) =
(array_typ.first(), typ)
{
Expand Down Expand Up @@ -1362,7 +1363,7 @@ impl<'a> Context<'a> {
}
};

let (array_id, array_typ, block_id) = self.check_array_is_initialized(array, dfg)?;
let block_id = self.ensure_array_is_initialized(array, dfg)?;

// Every array has a length in its type, so we fetch that from
// the SSA IR.
Expand All @@ -1371,10 +1372,11 @@ impl<'a> Context<'a> {
// However, this size is simply the capacity of a slice. The capacity is dependent upon the witness
// and may contain data for which we want to restrict access. The true slice length is tracked in a
// a separate SSA value and restrictions on slice indices should be generated elsewhere in the SSA.
let array_typ = dfg.type_of_value(array);
let array_len = if !array_typ.contains_slice_element() {
array_typ.flattened_size()
} else {
self.flattened_slice_size(array_id, dfg)
self.flattened_slice_size(array, dfg)
};

// Since array_set creates a new array, we create a new block ID for this
Expand All @@ -1397,18 +1399,13 @@ impl<'a> Context<'a> {
self.array_set_value(&store_value, result_block_id, &mut var_index)?;

let element_type_sizes = if !can_omit_element_sizes_array(&array_typ) {
let acir_value = self.convert_value(array_id, dfg);
Some(self.init_element_type_sizes_array(
&array_typ,
array_id,
Some(&acir_value),
dfg,
)?)
let acir_value = self.convert_value(array, dfg);
Some(self.init_element_type_sizes_array(&array_typ, array, Some(&acir_value), dfg)?)
} else {
None
};

let value_types = self.convert_value(array_id, dfg).flat_numeric_types();
let value_types = self.convert_value(array, dfg).flat_numeric_types();
// Compiler sanity check
assert_eq!(value_types.len(), array_len, "ICE: The length of the flattened type array should match the length of the dynamic array");

Expand Down Expand Up @@ -1454,45 +1451,41 @@ impl<'a> Context<'a> {
Ok(())
}

fn check_array_is_initialized(
fn ensure_array_is_initialized(
&mut self,
array: ValueId,
dfg: &DataFlowGraph,
) -> Result<(ValueId, Type, BlockId), RuntimeError> {
// Fetch the internal SSA ID for the array
let array_id = dfg.resolve(array);

let array_typ = dfg.type_of_value(array_id);

) -> Result<BlockId, RuntimeError> {
// Use the SSA ID to get or create its block ID
let block_id = self.block_id(&array_id);
let block_id = self.block_id(&array);

// Check if the array has already been initialized in ACIR gen
// if not, we initialize it using the values from SSA
let already_initialized = self.initialized_arrays.contains(&block_id);
if !already_initialized {
let value = &dfg[array_id];
let value = &dfg[array];
match value {
Value::Array { .. } | Value::Instruction { .. } => {
let value = self.convert_value(array_id, dfg);
let value = self.convert_value(array, dfg);
let array_typ = dfg.type_of_value(array);
let len = if !array_typ.contains_slice_element() {
array_typ.flattened_size()
} else {
self.flattened_slice_size(array_id, dfg)
self.flattened_slice_size(array, dfg)
};
self.initialize_array(block_id, len, Some(value))?;
}
_ => {
return Err(InternalError::General {
message: format!("Array {array_id} should be initialized"),
message: format!("Array {array} should be initialized"),
call_stack: self.acir_context.get_call_stack(),
}
.into());
}
}
}

Ok((array_id, array_typ, block_id))
Ok(block_id)
}

fn init_element_type_sizes_array(
Expand Down Expand Up @@ -1746,7 +1739,7 @@ impl<'a> Context<'a> {

/// Converts an SSA terminator's return values into their ACIR representations
fn get_num_return_witnesses(
&mut self,
&self,
terminator: &TerminatorInstruction,
dfg: &DataFlowGraph,
) -> usize {
Expand Down Expand Up @@ -1800,7 +1793,7 @@ impl<'a> Context<'a> {
has_constant_return |= self.acir_context.is_constant(&acir_var);
if is_databus {
// We do not return value for the data bus.
self.check_array_is_initialized(
self.ensure_array_is_initialized(
self.data_bus.return_data.expect(
"`is_databus == true` implies `data_bus.return_data` is `Some`",
),
Expand Down Expand Up @@ -2167,8 +2160,9 @@ impl<'a> Context<'a> {
Ok(vec![AcirValue::Var(self.acir_context.add_constant(len), AcirType::field())])
}
Intrinsic::AsSlice => {
let (slice_contents, slice_typ, block_id) =
self.check_array_is_initialized(arguments[0], dfg)?;
let slice_contents = arguments[0];
let slice_typ = dfg.type_of_value(slice_contents);
let block_id = self.ensure_array_is_initialized(slice_contents, dfg)?;
assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation");

let result_block_id = self.block_id(&result_ids[1]);
Expand Down Expand Up @@ -2212,8 +2206,9 @@ impl<'a> Context<'a> {
Intrinsic::SlicePushBack => {
// arguments = [slice_length, slice_contents, ...elements_to_push]
let slice_length = self.convert_value(arguments[0], dfg).into_var()?;
let (slice_contents, slice_typ, _) =
self.check_array_is_initialized(arguments[1], dfg)?;
let slice_contents = arguments[1];
let slice_typ = dfg.type_of_value(slice_contents);

assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation");

let slice = self.convert_value(slice_contents, dfg);
Expand Down Expand Up @@ -2279,9 +2274,8 @@ impl<'a> Context<'a> {
Intrinsic::SlicePushFront => {
// arguments = [slice_length, slice_contents, ...elements_to_push]
let slice_length = self.convert_value(arguments[0], dfg).into_var()?;

let (slice_contents, slice_typ, _) =
self.check_array_is_initialized(arguments[1], dfg)?;
let slice_contents = arguments[1];
let slice_typ = dfg.type_of_value(slice_contents);
assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation");

let slice: AcirValue = self.convert_value(slice_contents, dfg);
Expand Down Expand Up @@ -2344,6 +2338,7 @@ impl<'a> Context<'a> {
Intrinsic::SlicePopBack => {
// arguments = [slice_length, slice_contents]
let slice_length = self.convert_value(arguments[0], dfg).into_var()?;
let slice_contents = arguments[1];

let one = self.acir_context.add_constant(FieldElement::one());
let new_slice_length = self.acir_context.sub_var(slice_length, one)?;
Expand All @@ -2352,8 +2347,8 @@ impl<'a> Context<'a> {
// the elements stored at that index will no longer be able to be accessed.
let mut var_index = new_slice_length;

let (slice_contents, slice_typ, block_id) =
self.check_array_is_initialized(arguments[1], dfg)?;
let slice_typ = dfg.type_of_value(slice_contents);
let block_id = self.ensure_array_is_initialized(slice_contents, dfg)?;
assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation");

let mut popped_elements = Vec::new();
Expand All @@ -2378,9 +2373,11 @@ impl<'a> Context<'a> {
Intrinsic::SlicePopFront => {
// arguments = [slice_length, slice_contents]
let slice_length = self.convert_value(arguments[0], dfg).into_var()?;
let slice_contents = arguments[1];

let slice_typ = dfg.type_of_value(slice_contents);
let block_id = self.ensure_array_is_initialized(slice_contents, dfg)?;

let (slice_contents, slice_typ, block_id) =
self.check_array_is_initialized(arguments[1], dfg)?;
assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation");

let one = self.acir_context.add_constant(FieldElement::one());
Expand Down Expand Up @@ -2419,9 +2416,11 @@ impl<'a> Context<'a> {
Intrinsic::SliceInsert => {
// arguments = [slice_length, slice_contents, insert_index, ...elements_to_insert]
let slice_length = self.convert_value(arguments[0], dfg).into_var()?;
let slice_contents = arguments[1];

let slice_typ = dfg.type_of_value(slice_contents);
let block_id = self.ensure_array_is_initialized(slice_contents, dfg)?;

let (slice_contents, slice_typ, block_id) =
self.check_array_is_initialized(arguments[1], dfg)?;
assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation");

let slice = self.convert_value(slice_contents, dfg);
Expand Down Expand Up @@ -2558,9 +2557,11 @@ impl<'a> Context<'a> {
Intrinsic::SliceRemove => {
// arguments = [slice_length, slice_contents, remove_index]
let slice_length = self.convert_value(arguments[0], dfg).into_var()?;
let slice_contents = arguments[1];

let slice_typ = dfg.type_of_value(slice_contents);
let block_id = self.ensure_array_is_initialized(slice_contents, dfg)?;

let (slice_contents, slice_typ, block_id) =
self.check_array_is_initialized(arguments[1], dfg)?;
assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation");

let slice = self.convert_value(slice_contents, dfg);
Expand Down
Loading