diff --git a/Cargo.lock b/Cargo.lock index 407b4771af5..6051d356fca 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -30,6 +30,7 @@ dependencies = [ "cfg-if", "hex", "num-bigint", + "num-traits", "serde", ] diff --git a/acvm-repo/acir_field/Cargo.toml b/acvm-repo/acir_field/Cargo.toml index 7d059e9e4be..10027519d6d 100644 --- a/acvm-repo/acir_field/Cargo.toml +++ b/acvm-repo/acir_field/Cargo.toml @@ -16,6 +16,7 @@ repository.workspace = true hex.workspace = true num-bigint.workspace = true serde.workspace = true +num-traits.workspace = true ark-bn254 = { version = "^0.4.0", optional = true, default-features = false, features = [ "curve", diff --git a/acvm-repo/acir_field/src/lib.rs b/acvm-repo/acir_field/src/lib.rs index 6d4547868cc..eafe4bb2ad4 100644 --- a/acvm-repo/acir_field/src/lib.rs +++ b/acvm-repo/acir_field/src/lib.rs @@ -3,6 +3,9 @@ #![warn(clippy::semicolon_if_nothing_returned)] #![cfg_attr(not(test), warn(unused_crate_dependencies, unused_extern_crates))] +use num_bigint::BigUint; +use num_traits::Num; + cfg_if::cfg_if! { if #[cfg(feature = "bn254")] { mod generic_ark; @@ -18,12 +21,33 @@ cfg_if::cfg_if! { } } -#[derive(Debug)] +#[derive(Debug, PartialEq, Eq)] pub enum FieldOptions { BN254, BLS12_381, } +impl FieldOptions { + pub fn to_string(&self) -> &str { + match self { + FieldOptions::BN254 => "bn254", + FieldOptions::BLS12_381 => "bls12_381", + } + } + + pub fn is_native_field(str: &str) -> bool { + let big_num = if let Some(hex) = str.strip_prefix("0x") { + BigUint::from_str_radix(hex, 16) + } else { + BigUint::from_str_radix(str, 10) + }; + if let Ok(big_num) = big_num { + big_num == FieldElement::modulus() + } else { + CHOSEN_FIELD.to_string() == str + } + } +} // This is needed because features are additive through the dependency graph; if a dependency turns on the bn254, then it // will be turned on in all crates that depend on it #[macro_export] diff --git a/acvm-repo/acvm/src/compiler/optimizers/general.rs b/acvm-repo/acvm/src/compiler/optimizers/general.rs index 50271348c11..2bd781f7bb5 100644 --- a/acvm-repo/acvm/src/compiler/optimizers/general.rs +++ b/acvm-repo/acvm/src/compiler/optimizers/general.rs @@ -31,7 +31,7 @@ fn simplify_mul_terms(mut gate: Expression) -> Expression { let mut hash_map: IndexMap<(Witness, Witness), FieldElement> = IndexMap::new(); // Canonicalize the ordering of the multiplication, lets just order by variable name - for (scale, w_l, w_r) in gate.mul_terms.clone().into_iter() { + for (scale, w_l, w_r) in gate.mul_terms.into_iter() { let mut pair = [w_l, w_r]; // Sort using rust sort algorithm pair.sort(); diff --git a/acvm-repo/acvm/src/compiler/optimizers/mod.rs b/acvm-repo/acvm/src/compiler/optimizers/mod.rs index c63cfdf9c82..627ddbb4117 100644 --- a/acvm-repo/acvm/src/compiler/optimizers/mod.rs +++ b/acvm-repo/acvm/src/compiler/optimizers/mod.rs @@ -23,20 +23,22 @@ pub fn optimize(acir: Circuit) -> (Circuit, AcirTransformationMap) { /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] independent optimizations to a [`Circuit`]. pub(super) fn optimize_internal(acir: Circuit) -> (Circuit, AcirTransformationMap) { // General optimizer pass - let mut opcodes: Vec = Vec::new(); - for opcode in acir.opcodes { - match opcode { - Opcode::Arithmetic(arith_expr) => { - opcodes.push(Opcode::Arithmetic(GeneralOptimizer::optimize(arith_expr))); + let opcodes: Vec = acir + .opcodes + .into_iter() + .map(|opcode| { + if let Opcode::Arithmetic(arith_expr) = opcode { + Opcode::Arithmetic(GeneralOptimizer::optimize(arith_expr)) + } else { + opcode } - other_opcode => opcodes.push(other_opcode), - }; - } + }) + .collect(); let acir = Circuit { opcodes, ..acir }; // Track original acir opcode positions throughout the transformation passes of the compilation // by applying the modifications done to the circuit opcodes and also to the opcode_positions (delete and insert) - let acir_opcode_positions = acir.opcodes.iter().enumerate().map(|(i, _)| i).collect(); + let acir_opcode_positions = (0..acir.opcodes.len()).collect(); // Unused memory optimization pass let memory_optimizer = UnusedMemoryOptimizer::new(acir); diff --git a/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs b/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs index 9833a31a199..b1696704108 100644 --- a/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs +++ b/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs @@ -74,13 +74,13 @@ impl RangeOptimizer { let mut new_order_list = Vec::with_capacity(order_list.len()); let mut optimized_opcodes = Vec::with_capacity(self.circuit.opcodes.len()); - for (idx, opcode) in self.circuit.opcodes.iter().enumerate() { - let (witness, num_bits) = match extract_range_opcode(opcode) { + for (idx, opcode) in self.circuit.opcodes.into_iter().enumerate() { + let (witness, num_bits) = match extract_range_opcode(&opcode) { Some(range_opcode) => range_opcode, None => { // If its not the range opcode, add it to the opcode // list and continue; - optimized_opcodes.push(opcode.clone()); + optimized_opcodes.push(opcode); new_order_list.push(order_list[idx]); continue; } @@ -101,18 +101,11 @@ impl RangeOptimizer { if is_lowest_bit_size { already_seen_witness.insert(witness); new_order_list.push(order_list[idx]); - optimized_opcodes.push(opcode.clone()); + optimized_opcodes.push(opcode); } } - ( - Circuit { - current_witness_index: self.circuit.current_witness_index, - opcodes: optimized_opcodes, - ..self.circuit - }, - new_order_list, - ) + (Circuit { opcodes: optimized_opcodes, ..self.circuit }, new_order_list) } } diff --git a/acvm-repo/acvm/src/compiler/optimizers/unused_memory.rs b/acvm-repo/acvm/src/compiler/optimizers/unused_memory.rs index eccea631723..18eefa79ac2 100644 --- a/acvm-repo/acvm/src/compiler/optimizers/unused_memory.rs +++ b/acvm-repo/acvm/src/compiler/optimizers/unused_memory.rs @@ -42,16 +42,16 @@ impl UnusedMemoryOptimizer { ) -> (Circuit, Vec) { let mut new_order_list = Vec::with_capacity(order_list.len()); let mut optimized_opcodes = Vec::with_capacity(self.circuit.opcodes.len()); - for (idx, opcode) in self.circuit.opcodes.iter().enumerate() { + for (idx, opcode) in self.circuit.opcodes.into_iter().enumerate() { match opcode { Opcode::MemoryInit { block_id, .. } - if self.unused_memory_initializations.contains(block_id) => + if self.unused_memory_initializations.contains(&block_id) => { // Drop opcode } _ => { new_order_list.push(order_list[idx]); - optimized_opcodes.push(opcode.clone()); + optimized_opcodes.push(opcode); } } } diff --git a/acvm-repo/acvm/src/compiler/transformers/mod.rs b/acvm-repo/acvm/src/compiler/transformers/mod.rs index 3a3a013e8eb..6f9d78e4b93 100644 --- a/acvm-repo/acvm/src/compiler/transformers/mod.rs +++ b/acvm-repo/acvm/src/compiler/transformers/mod.rs @@ -76,13 +76,13 @@ pub(super) fn transform_internal( // maps a normalized expression to the intermediate variable which represents the expression, along with its 'norm' // the 'norm' is simply the value of the first non zero coefficient in the expression, taken from the linear terms, or quadratic terms if there is none. let mut intermediate_variables: IndexMap = IndexMap::new(); - for (index, opcode) in acir.opcodes.iter().enumerate() { + for (index, opcode) in acir.opcodes.into_iter().enumerate() { match opcode { Opcode::Arithmetic(arith_expr) => { let len = intermediate_variables.len(); let arith_expr = transformer.transform( - arith_expr.clone(), + arith_expr, &mut intermediate_variables, &mut next_witness_index, ); @@ -104,7 +104,7 @@ pub(super) fn transform_internal( transformed_opcodes.push(Opcode::Arithmetic(opcode)); } } - Opcode::BlackBoxFuncCall(func) => { + Opcode::BlackBoxFuncCall(ref func) => { match func { acir::circuit::opcodes::BlackBoxFuncCall::AND { output, .. } | acir::circuit::opcodes::BlackBoxFuncCall::XOR { output, .. } => { @@ -146,9 +146,9 @@ pub(super) fn transform_internal( } new_acir_opcode_positions.push(acir_opcode_positions[index]); - transformed_opcodes.push(opcode.clone()); + transformed_opcodes.push(opcode); } - Opcode::Directive(directive) => { + Opcode::Directive(ref directive) => { match directive { Directive::Quotient(quotient_directive) => { transformer.mark_solvable(quotient_directive.q); @@ -166,14 +166,14 @@ pub(super) fn transform_internal( } } new_acir_opcode_positions.push(acir_opcode_positions[index]); - transformed_opcodes.push(opcode.clone()); + transformed_opcodes.push(opcode); } Opcode::MemoryInit { .. } => { // `MemoryInit` does not write values to the `WitnessMap` new_acir_opcode_positions.push(acir_opcode_positions[index]); - transformed_opcodes.push(opcode.clone()); + transformed_opcodes.push(opcode); } - Opcode::MemoryOp { op, .. } => { + Opcode::MemoryOp { ref op, .. } => { for (_, witness1, witness2) in &op.value.mul_terms { transformer.mark_solvable(*witness1); transformer.mark_solvable(*witness2); @@ -182,9 +182,9 @@ pub(super) fn transform_internal( transformer.mark_solvable(*witness); } new_acir_opcode_positions.push(acir_opcode_positions[index]); - transformed_opcodes.push(opcode.clone()); + transformed_opcodes.push(opcode); } - Opcode::Brillig(brillig) => { + Opcode::Brillig(ref brillig) => { for output in &brillig.outputs { match output { BrilligOutputs::Simple(w) => transformer.mark_solvable(*w), @@ -196,7 +196,7 @@ pub(super) fn transform_internal( } } new_acir_opcode_positions.push(acir_opcode_positions[index]); - transformed_opcodes.push(opcode.clone()); + transformed_opcodes.push(opcode); } } } diff --git a/compiler/integration-tests/test/browser/recursion.test.ts b/compiler/integration-tests/test/browser/recursion.test.ts index c773e80ea43..bdc44d8db5a 100644 --- a/compiler/integration-tests/test/browser/recursion.test.ts +++ b/compiler/integration-tests/test/browser/recursion.test.ts @@ -9,6 +9,7 @@ import { acvm, abi, generateWitness } from '@noir-lang/noir_js'; import * as TOML from 'smol-toml'; import { BarretenbergBackend } from '@noir-lang/backend_barretenberg'; import { getFile } from './utils.js'; +import { Field, InputMap } from '@noir-lang/noirc_abi'; const logger = new Logger({ name: 'test', minLevel: TEST_LOG_LEVEL }); @@ -50,7 +51,7 @@ describe('It compiles noir program code, receiving circuit bytes and abi object. it('Should generate valid inner proof for correct input, then verify proof within a proof', async () => { const main_program = await getCircuit(circuit_main_source); - const main_inputs = TOML.parse(circuit_main_toml); + const main_inputs: InputMap = TOML.parse(circuit_main_toml) as InputMap; const main_backend = new BarretenbergBackend(main_program); @@ -69,10 +70,10 @@ describe('It compiles noir program code, receiving circuit bytes and abi object. numPublicInputs, ); - const recursion_inputs = { + const recursion_inputs: InputMap = { verification_key: vkAsFields, proof: proofAsFields, - public_inputs: [main_inputs.y], + public_inputs: [main_inputs.y as Field], key_hash: vkHash, input_aggregation_object: ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0'], }; diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 3feeaba58b0..36e54132a38 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -766,6 +766,36 @@ impl Context { Ok(AcirValue::Array(elements)) } + ( + AcirValue::DynamicArray(AcirDynamicArray { block_id, len, .. }), + AcirValue::Array(dummy_values), + ) => { + let dummy_values = dummy_values + .into_iter() + .flat_map(|val| val.clone().flatten()) + .map(|(var, typ)| AcirValue::Var(var, typ)) + .collect::>(); + + assert_eq!( + *len, + dummy_values.len(), + "ICE: The store value and dummy must have the same number of inner values" + ); + + let values = try_vecmap(0..*len, |i| { + let index_var = self.acir_context.add_constant(FieldElement::from(i as u128)); + + let read = self.acir_context.read_from_memory(*block_id, &index_var)?; + Ok::(AcirValue::Var(read, AcirType::field())) + })?; + + let mut elements = im::Vector::new(); + for (val, dummy_val) in values.iter().zip(dummy_values) { + elements.push_back(self.convert_array_set_store_value(val, &dummy_val)?); + } + + Ok(AcirValue::Array(elements)) + } (AcirValue::DynamicArray(_), AcirValue::DynamicArray(_)) => { unimplemented!("ICE: setting a dynamic array not supported"); } @@ -925,8 +955,14 @@ impl Context { self.array_set_value(value, block_id, var_index)?; } } - AcirValue::DynamicArray(_) => { - unimplemented!("ICE: setting a dynamic array not supported"); + AcirValue::DynamicArray(AcirDynamicArray { block_id: inner_block_id, len, .. }) => { + let values = try_vecmap(0..len, |i| { + let index_var = self.acir_context.add_constant(FieldElement::from(i as u128)); + + let read = self.acir_context.read_from_memory(inner_block_id, &index_var)?; + Ok::(AcirValue::Var(read, AcirType::field())) + })?; + self.array_set_value(AcirValue::Array(values.into()), block_id, var_index)?; } } Ok(()) @@ -951,7 +987,7 @@ impl Context { if !already_initialized { let value = &dfg[array_id]; match value { - Value::Array { .. } => { + Value::Array { .. } | Value::Instruction { .. } => { let value = self.convert_value(array_id, dfg); let len = if matches!(array_typ, Type::Array(_, _)) { array_typ.flattened_size() @@ -965,7 +1001,7 @@ impl Context { message: format!("Array {array_id} should be initialized"), call_stack: self.acir_context.get_call_stack(), } - .into()) + .into()); } } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index 3a12d508f95..45b84cc97d9 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -646,15 +646,11 @@ impl Binary { let operand_type = dfg.type_of_value(self.lhs); if let (Some(lhs), Some(rhs)) = (lhs, rhs) { - // If the rhs of a division is zero, attempting to evaluate the divison will cause a compiler panic. - // Thus, we do not evaluate this divison as we want to avoid triggering a panic, - // and division by zero should be handled by laying down constraints during ACIR generation. - if matches!(self.operator, BinaryOp::Div | BinaryOp::Mod) && rhs == FieldElement::zero() - { - return SimplifyResult::None; - } - return match self.eval_constants(dfg, lhs, rhs, operand_type) { - Some(value) => SimplifyResult::SimplifiedTo(value), + return match eval_constant_binary_op(lhs, rhs, self.operator, operand_type) { + Some((result, result_type)) => { + let value = dfg.make_constant(result, result_type); + SimplifyResult::SimplifiedTo(value) + } None => SimplifyResult::None, }; } @@ -775,47 +771,51 @@ impl Binary { } SimplifyResult::None } +} - /// Evaluate the two constants with the operation specified by self.operator. - /// Pushes the resulting value to the given DataFlowGraph's constants and returns it. - fn eval_constants( - &self, - dfg: &mut DataFlowGraph, - lhs: FieldElement, - rhs: FieldElement, - mut operand_type: Type, - ) -> Option> { - let value = match &operand_type { - Type::Numeric(NumericType::NativeField) => { - self.operator.get_field_function()?(lhs, rhs) - } - Type::Numeric(NumericType::Unsigned { bit_size }) => { - let function = self.operator.get_u128_function(); - - let lhs = truncate(lhs.try_into_u128()?, *bit_size); - let rhs = truncate(rhs.try_into_u128()?, *bit_size); +/// Evaluate a binary operation with constant arguments. +fn eval_constant_binary_op( + lhs: FieldElement, + rhs: FieldElement, + operator: BinaryOp, + mut operand_type: Type, +) -> Option<(FieldElement, Type)> { + let value = match &operand_type { + Type::Numeric(NumericType::NativeField) => { + // If the rhs of a division is zero, attempting to evaluate the division will cause a compiler panic. + // Thus, we do not evaluate the division in this method, as we want to avoid triggering a panic, + // and the operation should be handled by ACIR generation. + if matches!(operator, BinaryOp::Div | BinaryOp::Mod) && rhs == FieldElement::zero() { + return None; + } + operator.get_field_function()?(lhs, rhs) + } + Type::Numeric(NumericType::Unsigned { bit_size }) => { + let function = operator.get_u128_function(); - // The divisor is being truncated into the type of the operand, which can potentially - // lead to the rhs being zero. - // If the rhs of a division is zero, attempting to evaluate the divison will cause a compiler panic. - // Thus, we do not evaluate the division in this method, as we want to avoid triggering a panic, - // and the operation should be handled by ACIR generation. - if matches!(self.operator, BinaryOp::Div) && rhs == 0 { - return None; - } + let lhs = truncate(lhs.try_into_u128()?, *bit_size); + let rhs = truncate(rhs.try_into_u128()?, *bit_size); - let result = function(lhs, rhs); - truncate(result, *bit_size).into() + // The divisor is being truncated into the type of the operand, which can potentially + // lead to the rhs being zero. + // If the rhs of a division is zero, attempting to evaluate the division will cause a compiler panic. + // Thus, we do not evaluate the division in this method, as we want to avoid triggering a panic, + // and the operation should be handled by ACIR generation. + if matches!(operator, BinaryOp::Div | BinaryOp::Mod) && rhs == 0 { + return None; } - _ => return None, - }; - if matches!(self.operator, BinaryOp::Eq | BinaryOp::Lt) { - operand_type = Type::bool(); + let result = function(lhs, rhs); + truncate(result, *bit_size).into() } + _ => return None, + }; - Some(dfg.make_constant(value, operand_type)) + if matches!(operator, BinaryOp::Eq | BinaryOp::Lt) { + operand_type = Type::bool(); } + + Some((value, operand_type)) } fn truncate(int: u128, bit_size: u32) -> u128 { diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index dc99546f798..803ffbc41fe 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -1,5 +1,6 @@ use std::vec; +use acvm::acir::acir_field::FieldOptions; use fm::FileId; use noirc_errors::Location; @@ -202,6 +203,13 @@ impl<'a> ModCollector<'a> { let module = ModuleId { krate, local_id: self.module_id }; for function in functions { + // check if optional field attribute is compatible with native field + if let Some(field) = function.attributes().get_field_attribute() { + if !FieldOptions::is_native_field(&field) { + continue; + } + } + let name = function.name_ident().clone(); let func_id = context.def_interner.push_empty_fn(); diff --git a/compiler/noirc_frontend/src/lexer/token.rs b/compiler/noirc_frontend/src/lexer/token.rs index ad81b163801..2ad2f3902b1 100644 --- a/compiler/noirc_frontend/src/lexer/token.rs +++ b/compiler/noirc_frontend/src/lexer/token.rs @@ -395,6 +395,15 @@ impl Attributes { _ => None, }) } + + pub fn get_field_attribute(&self) -> Option { + for secondary in &self.secondary { + if let SecondaryAttribute::Field(field) = secondary { + return Some(field.to_lowercase()); + } + } + None + } } /// An Attribute can be either a Primary Attribute or a Secondary Attribute @@ -466,6 +475,10 @@ impl Attribute { None => return Err(malformed_scope), } } + ["field", name] => { + validate(name)?; + Attribute::Secondary(SecondaryAttribute::Field(name.to_string())) + } // Secondary attributes ["deprecated"] => Attribute::Secondary(SecondaryAttribute::Deprecated(None)), ["contract_library_method"] => { @@ -550,6 +563,7 @@ pub enum SecondaryAttribute { // the entry point. ContractLibraryMethod, Event, + Field(String), Custom(String), } @@ -563,6 +577,7 @@ impl fmt::Display for SecondaryAttribute { SecondaryAttribute::Custom(ref k) => write!(f, "#[{k}]"), SecondaryAttribute::ContractLibraryMethod => write!(f, "#[contract_library_method]"), SecondaryAttribute::Event => write!(f, "#[event]"), + SecondaryAttribute::Field(ref k) => write!(f, "#[field({k})]"), } } } @@ -583,7 +598,7 @@ impl AsRef for SecondaryAttribute { match self { SecondaryAttribute::Deprecated(Some(string)) => string, SecondaryAttribute::Deprecated(None) => "", - SecondaryAttribute::Custom(string) => string, + SecondaryAttribute::Custom(string) | SecondaryAttribute::Field(string) => string, SecondaryAttribute::ContractLibraryMethod => "", SecondaryAttribute::Event => "", } diff --git a/noir_stdlib/src/field.nr b/noir_stdlib/src/field.nr index fe887aa89b0..3959f1ea175 100644 --- a/noir_stdlib/src/field.nr +++ b/noir_stdlib/src/field.nr @@ -1,23 +1,52 @@ impl Field { + pub fn to_le_bits(self: Self, bit_size: u32) -> [u1] { + crate::assert_constant(bit_size); + self.__to_le_bits(bit_size) + } + + pub fn to_be_bits(self: Self, bit_size: u32) -> [u1] { + crate::assert_constant(bit_size); + self.__to_be_bits(bit_size) + } + #[builtin(to_le_bits)] - pub fn to_le_bits(_x : Field, _bit_size: u32) -> [u1] {} + fn __to_le_bits(_self: Self, _bit_size: u32) -> [u1] {} + #[builtin(to_be_bits)] - pub fn to_be_bits(_x : Field, _bit_size: u32) -> [u1] {} + fn __to_be_bits(_self: Self, _bit_size: u32) -> [u1] {} + + pub fn to_le_bytes(self: Self, byte_size: u32) -> [u8] { + self.to_le_radix(256, byte_size) + } + + pub fn to_be_bytes(self: Self, byte_size: u32) -> [u8] { + self.to_be_radix(256, byte_size) + } + - pub fn to_le_bytes(x : Field, byte_size: u32) -> [u8] { - x.to_le_radix(256, byte_size) + pub fn to_le_radix(self: Self, radix: u32, result_len: u32) -> [u8] { + crate::assert_constant(radix); + crate::assert_constant(result_len); + self.__to_le_radix(radix, result_len) } - pub fn to_be_bytes(x : Field, byte_size: u32) -> [u8] { - x.to_be_radix(256, byte_size) + + pub fn to_be_radix(self: Self, radix: u32, result_len: u32) -> [u8] { + crate::assert_constant(radix); + crate::assert_constant(result_len); + self.__to_be_radix(radix, result_len) } + + + // decompose `_self` into a `_result_len` vector over the `_radix` basis + // `_radix` must be less than 256 #[builtin(to_le_radix)] - //decompose _x into a _result_len vector over the _radix basis - //_radix must be less than 256 - pub fn to_le_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] {} + fn __to_le_radix(_self: Self, _radix: u32, _result_len: u32) -> [u8] {} + #[builtin(to_be_radix)] - pub fn to_be_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] {} + fn __to_be_radix(_self: Self, _radix: u32, _result_len: u32) -> [u8] {} + // Returns self to the power of the given exponent value. // Caution: we assume the exponent fits into 32 bits diff --git a/tooling/nargo_cli/tests/compile_failure/radix_non_constant_length/Nargo.toml b/tooling/nargo_cli/tests/compile_failure/radix_non_constant_length/Nargo.toml new file mode 100644 index 00000000000..b8b1a2417dc --- /dev/null +++ b/tooling/nargo_cli/tests/compile_failure/radix_non_constant_length/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "radix_non_constant_length" +type = "bin" +authors = [""] +compiler_version = "0.10.2" + +[dependencies] diff --git a/tooling/nargo_cli/tests/compile_failure/radix_non_constant_length/Prover.toml b/tooling/nargo_cli/tests/compile_failure/radix_non_constant_length/Prover.toml new file mode 100644 index 00000000000..f28f2f8cc48 --- /dev/null +++ b/tooling/nargo_cli/tests/compile_failure/radix_non_constant_length/Prover.toml @@ -0,0 +1,2 @@ +x = "5" +y = "10" diff --git a/tooling/nargo_cli/tests/compile_failure/radix_non_constant_length/src/main.nr b/tooling/nargo_cli/tests/compile_failure/radix_non_constant_length/src/main.nr new file mode 100644 index 00000000000..adfbd265a1d --- /dev/null +++ b/tooling/nargo_cli/tests/compile_failure/radix_non_constant_length/src/main.nr @@ -0,0 +1,4 @@ +fn main(x : Field, y : pub u32) { + let bytes = x.to_be_bytes(y); + assert(bytes[0] == 0); +} diff --git a/tooling/nargo_cli/tests/execution_success/field_attribute/Nargo.toml b/tooling/nargo_cli/tests/execution_success/field_attribute/Nargo.toml new file mode 100644 index 00000000000..f625d7e41f2 --- /dev/null +++ b/tooling/nargo_cli/tests/execution_success/field_attribute/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "field_attribute" +type = "bin" +authors = [""] +compiler_version = "0.1" + +[dependencies] diff --git a/tooling/nargo_cli/tests/execution_success/field_attribute/Prover.toml b/tooling/nargo_cli/tests/execution_success/field_attribute/Prover.toml new file mode 100644 index 00000000000..07890234a19 --- /dev/null +++ b/tooling/nargo_cli/tests/execution_success/field_attribute/Prover.toml @@ -0,0 +1 @@ +x = "3" diff --git a/tooling/nargo_cli/tests/execution_success/field_attribute/src/main.nr b/tooling/nargo_cli/tests/execution_success/field_attribute/src/main.nr new file mode 100644 index 00000000000..d6d71781899 --- /dev/null +++ b/tooling/nargo_cli/tests/execution_success/field_attribute/src/main.nr @@ -0,0 +1,19 @@ +// Test integer addition: 3 + 4 = 7 +fn main(mut x: u32) { + assert(x > foo()); +} + +#[field(bn254)] +fn foo() -> u32 { + 1 +} + +#[field(23)] +fn foo() -> u32 { + 2 +} + +#[field(bls12_381)] +fn foo() -> u32 { + 3 +} \ No newline at end of file diff --git a/tooling/nargo_cli/tests/execution_success/nested_array_dynamic/src/main.nr b/tooling/nargo_cli/tests/execution_success/nested_array_dynamic/src/main.nr index 076c2b68f11..5f15905dfba 100644 --- a/tooling/nargo_cli/tests/execution_success/nested_array_dynamic/src/main.nr +++ b/tooling/nargo_cli/tests/execution_success/nested_array_dynamic/src/main.nr @@ -8,6 +8,11 @@ struct Foo { bar: Bar, } +struct FooParent { + array: [Field; 3], + foos: [Foo; 4], +} + fn main(mut x : [Foo; 4], y : pub Field) { assert(x[y - 3].a == 1); assert(x[y - 3].b == [2, 3, 20]); @@ -38,5 +43,37 @@ fn main(mut x : [Foo; 4], y : pub Field) { assert(x[y - 2].bar.inner == [103, 104, 105]); assert(x[y - 1].bar.inner == [106, 107, 108]); assert(x[y].bar.inner == [109, 110, 111]); + + let foo_parent_one = FooParent { array: [0, 1, 2], foos: x }; + let foo_parent_two = FooParent { array: [3, 4, 5], foos: x }; + let mut foo_parents = [foo_parent_one, foo_parent_two]; + + assert(foo_parents[y - 3].foos[y - 3].b == [2, 3, 20]); + assert(foo_parents[y - 3].foos[y - 2].b == [5, 6, 21]); + assert(foo_parents[y - 3].foos[y - 1].b == [100, 101, 102]); + assert(foo_parents[y - 3].foos[y].b == [11, 12, 23]); + + assert(foo_parents[y - 3].foos[y].a == 50); + + assert(foo_parents[1].foos[1].b == [5, 6, 21]); + if y == 2 { + foo_parents[y - 2].foos[y - 2].b = [10, 9, 8]; + } else { + foo_parents[y - 2].foos[y - 2].b = [20, 19, 18]; + } + assert(foo_parents[1].foos[1].b == [20, 19, 18]); + + assert(foo_parents[1].foos[1].b[2] == 18); + if y == 3 { + foo_parents[y - 2].foos[y - 2].b[y - 1] = 5000; + } else { + foo_parents[y - 2].foos[y - 2].b[y - 1] = 1000; + } + assert(foo_parents[1].foos[1].b[2] == 5000); + + // Set a dynamic array value + foo_parents[y - 2].foos[y - 3].b = foo_parents[y - 2].foos[y - 2].b; + assert(foo_parents[1].foos[0].b == [20, 19, 5000]); + } diff --git a/tooling/nargo_fmt/src/visitor/expr.rs b/tooling/nargo_fmt/src/visitor/expr.rs index 6ff640e4e51..5209f669ce1 100644 --- a/tooling/nargo_fmt/src/visitor/expr.rs +++ b/tooling/nargo_fmt/src/visitor/expr.rs @@ -27,6 +27,9 @@ impl FmtVisitor<'_> { ExpressionKind::Prefix(prefix) => { format!("{}{}", prefix.operator, self.format_expr(prefix.rhs)) } + ExpressionKind::Cast(cast) => { + format!("{} as {}", self.format_expr(cast.lhs), cast.r#type) + } ExpressionKind::Infix(infix) => { format!( "{} {} {}", diff --git a/tooling/nargo_fmt/tests/expected/cast.nr b/tooling/nargo_fmt/tests/expected/cast.nr new file mode 100644 index 00000000000..63008168f84 --- /dev/null +++ b/tooling/nargo_fmt/tests/expected/cast.nr @@ -0,0 +1,3 @@ +fn main() { + x as u8 +} diff --git a/tooling/nargo_fmt/tests/input/cast.nr b/tooling/nargo_fmt/tests/input/cast.nr new file mode 100644 index 00000000000..dfbb370e2bb --- /dev/null +++ b/tooling/nargo_fmt/tests/input/cast.nr @@ -0,0 +1,4 @@ +fn main() { + +x as u8 +} diff --git a/tooling/noir_js/src/witness_generation.ts b/tooling/noir_js/src/witness_generation.ts index 3b1dfd90109..f3307837736 100644 --- a/tooling/noir_js/src/witness_generation.ts +++ b/tooling/noir_js/src/witness_generation.ts @@ -1,13 +1,13 @@ -import { abiEncode } from '@noir-lang/noirc_abi'; +import { abiEncode, InputMap } from '@noir-lang/noirc_abi'; import { base64Decode } from './base64_decode.js'; import { executeCircuit } from '@noir-lang/acvm_js'; import { witnessMapToUint8Array } from './serialize.js'; import { CompiledCircuit } from '@noir-lang/types'; // Generates the witnesses needed to feed into the chosen proving system -export async function generateWitness(compiledProgram: CompiledCircuit, inputs: unknown): Promise { +export async function generateWitness(compiledProgram: CompiledCircuit, inputs: InputMap): Promise { // Throws on ABI encoding error - const witnessMap = abiEncode(compiledProgram.abi, inputs, null); + const witnessMap = abiEncode(compiledProgram.abi, inputs); // Execute the circuit to generate the rest of the witnesses and serialize // them into a Uint8Array. diff --git a/tooling/noir_js/test/node/e2e.test.ts b/tooling/noir_js/test/node/e2e.test.ts index fe0d26c7e3b..5e6b566121e 100644 --- a/tooling/noir_js/test/node/e2e.test.ts +++ b/tooling/noir_js/test/node/e2e.test.ts @@ -3,6 +3,9 @@ import assert_lt_json from '../noir_compiled_examples/assert_lt/target/assert_lt import { generateWitness } from '../../src/index.js'; import { Noir } from '../../src/program.js'; import { BarretenbergBackend as Backend } from '@noir-lang/backend_barretenberg'; +import { CompiledCircuit } from '@noir-lang/types'; + +const assert_lt_program = assert_lt_json as CompiledCircuit; it('end-to-end proof creation and verification (outer)', async () => { // Noir.Js part @@ -10,12 +13,12 @@ it('end-to-end proof creation and verification (outer)', async () => { x: '2', y: '3', }; - const serializedWitness = await generateWitness(assert_lt_json, inputs); + const serializedWitness = await generateWitness(assert_lt_program, inputs); // bb.js part // // Proof creation - const prover = new Backend(assert_lt_json); + const prover = new Backend(assert_lt_program); const proof = await prover.generateFinalProof(serializedWitness); // Proof verification @@ -31,9 +34,9 @@ it('end-to-end proof creation and verification (outer) -- Program API', async () }; // Initialize backend - const backend = new Backend(assert_lt_json); + const backend = new Backend(assert_lt_program); // Initialize program - const program = new Noir(assert_lt_json, backend); + const program = new Noir(assert_lt_program, backend); // Generate proof const proof = await program.generateFinalProof(inputs); @@ -48,12 +51,12 @@ it('end-to-end proof creation and verification (inner)', async () => { x: '2', y: '3', }; - const serializedWitness = await generateWitness(assert_lt_json, inputs); + const serializedWitness = await generateWitness(assert_lt_program, inputs); // bb.js part // // Proof creation - const prover = new Backend(assert_lt_json); + const prover = new Backend(assert_lt_program); const proof = await prover.generateIntermediateProof(serializedWitness); // Proof verification @@ -79,15 +82,15 @@ it('[BUG] -- bb.js null function or function signature mismatch (different insta x: '2', y: '3', }; - const serializedWitness = await generateWitness(assert_lt_json, inputs); + const serializedWitness = await generateWitness(assert_lt_program, inputs); // bb.js part - const prover = new Backend(assert_lt_json); + const prover = new Backend(assert_lt_program); const proof = await prover.generateFinalProof(serializedWitness); try { - const verifier = new Backend(assert_lt_json); + const verifier = new Backend(assert_lt_program); await verifier.verifyFinalProof(proof); expect.fail( 'bb.js currently returns a bug when we try to verify a proof with a different Barretenberg instance that created it.', @@ -111,13 +114,13 @@ it('[BUG] -- bb.js null function or function signature mismatch (outer-inner) ', x: '2', y: '3', }; - const serializedWitness = await generateWitness(assert_lt_json, inputs); + const serializedWitness = await generateWitness(assert_lt_program, inputs); // bb.js part // // Proof creation // - const prover = new Backend(assert_lt_json); + const prover = new Backend(assert_lt_program); // Create a proof using both proving systems, the majority of the time // one would only use outer proofs. const proofOuter = await prover.generateFinalProof(serializedWitness); diff --git a/tooling/noir_js/test/node/smoke.test.ts b/tooling/noir_js/test/node/smoke.test.ts index 4b0291c0f41..931416b2e26 100644 --- a/tooling/noir_js/test/node/smoke.test.ts +++ b/tooling/noir_js/test/node/smoke.test.ts @@ -1,13 +1,16 @@ import { expect } from 'chai'; import assert_lt_json from '../noir_compiled_examples/assert_lt/target/assert_lt.json' assert { type: 'json' }; import { generateWitness } from '../../src/index.js'; +import { CompiledCircuit } from '@noir-lang/types'; + +const assert_lt_program = assert_lt_json as CompiledCircuit; it('generates witnesses successfully', async () => { const inputs = { x: '2', y: '3', }; - expect(() => generateWitness(assert_lt_json, inputs)).to.not.throw; + expect(() => generateWitness(assert_lt_program, inputs)).to.not.throw; }); it('string input and number input are the same', async () => { @@ -19,8 +22,8 @@ it('string input and number input are the same', async () => { x: 2, y: 3, }; - const solvedWitnessString = await generateWitness(assert_lt_json, inputsString); - const solvedWitnessNumber = await generateWitness(assert_lt_json, inputsNumber); + const solvedWitnessString = await generateWitness(assert_lt_program, inputsString); + const solvedWitnessNumber = await generateWitness(assert_lt_program, inputsNumber); expect(solvedWitnessString).to.deep.equal(solvedWitnessNumber); }); @@ -34,8 +37,8 @@ it('string input and number input are the same', async () => { y: 3, }; - const solvedWitnessString = await generateWitness(assert_lt_json, inputsString); - const solvedWitnessNumber = await generateWitness(assert_lt_json, inputsNumber); + const solvedWitnessString = await generateWitness(assert_lt_program, inputsString); + const solvedWitnessNumber = await generateWitness(assert_lt_program, inputsNumber); expect(solvedWitnessString).to.deep.equal(solvedWitnessNumber); }); @@ -46,7 +49,7 @@ it('0x prefixed string input for inputs will throw', async () => { }; try { - await generateWitness(assert_lt_json, inputsHexPrefix); + await generateWitness(assert_lt_program, inputsHexPrefix); expect.fail('Expected generatedWitness to throw, due to inputs being prefixed with 0x. Currently not supported'); } catch (error) { // Successfully errored due to 0x not being supported. Update this test once/if we choose @@ -62,7 +65,7 @@ describe('input validation', () => { }; try { - await generateWitness(assert_lt_json, inputs); + await generateWitness(assert_lt_program, inputs); expect.fail('Expected generatedWitness to throw, due to x not being convertible to a uint64'); } catch (error) { const knownError = error as Error; diff --git a/tooling/noir_js_types/package.json b/tooling/noir_js_types/package.json index eb913f16ca3..f4801a546c7 100644 --- a/tooling/noir_js_types/package.json +++ b/tooling/noir_js_types/package.json @@ -28,6 +28,9 @@ "types": "./lib/esm/types.d.ts" } }, + "dependencies": { + "@noir-lang/noirc_abi": "workspace:*" + }, "devDependencies": { "@types/prettier": "^3", "eslint": "^8.50.0", diff --git a/tooling/noir_js_types/src/types.ts b/tooling/noir_js_types/src/types.ts index 357e440f155..6285972d1e9 100644 --- a/tooling/noir_js_types/src/types.ts +++ b/tooling/noir_js_types/src/types.ts @@ -1,3 +1,5 @@ +import { Abi } from '@noir-lang/noirc_abi'; + export interface Backend { // Generate an outer proof. This is the proof for the circuit which will verify // inner proofs and or can be seen as the proof created for regular circuits. @@ -19,5 +21,5 @@ export type ProofData = { export type CompiledCircuit = { bytecode: string; - abi: object; + abi: Abi; }; diff --git a/tooling/noirc_abi_wasm/src/lib.rs b/tooling/noirc_abi_wasm/src/lib.rs index 2b1fc672fc4..24e0e6506fb 100644 --- a/tooling/noirc_abi_wasm/src/lib.rs +++ b/tooling/noirc_abi_wasm/src/lib.rs @@ -22,28 +22,83 @@ use errors::JsAbiError; use js_witness_map::JsWitnessMap; use temp::{input_value_from_json_type, JsonTypes}; +#[wasm_bindgen(typescript_custom_section)] +const INPUT_MAP: &'static str = r#" +export type Field = string | number | boolean; +export type InputValue = Field | Field[] | InputMap; +export type InputMap = { [key: string]: InputValue }; +"#; + +#[wasm_bindgen] +extern "C" { + #[wasm_bindgen(extends = js_sys::Object, js_name = "InputMap", typescript_type = "InputMap")] + #[derive(Clone, Debug, PartialEq, Eq)] + pub type JsInputMap; +} + +#[wasm_bindgen] +extern "C" { + #[wasm_bindgen(extends = js_sys::Object, js_name = "InputValue", typescript_type = "InputValue")] + #[derive(Clone, Debug, PartialEq, Eq)] + pub type JsInputValue; +} + +#[wasm_bindgen(typescript_custom_section)] +const ABI: &'static str = r#" +export type Visibility = "public" | "private"; +export type Sign = "unsigned" | "signed"; +export type AbiType = + { kind: "field" } | + { kind: "boolean" } | + { kind: "string", length: number } | + { kind: "integer", sign: Sign, width: number } | + { kind: "array", length: number, type: AbiType } | + { kind: "tuple", fields: AbiType[] } | + { kind: "struct", path: string, fields: [string, AbiType][] }; + +export type AbiParameter = { + name: string, + type: AbiType, + visibility: Visibility, +}; + +export type Abi = { + parameters: AbiParameter[], + param_witnesses: Record, + return_type: AbiType | null, + return_witnesses: number[], +} +"#; + +#[wasm_bindgen] +extern "C" { + #[wasm_bindgen(extends = js_sys::Object, js_name = "Abi", typescript_type = "Abi")] + #[derive(Clone, Debug, PartialEq, Eq)] + pub type JsAbi; +} + #[wasm_bindgen(js_name = abiEncode)] pub fn abi_encode( - abi: JsValue, - inputs: JsValue, - return_value: JsValue, + abi: JsAbi, + inputs: JsInputMap, + return_value: Option, ) -> Result { console_error_panic_hook::set_once(); - let abi: Abi = JsValueSerdeExt::into_serde(&abi).map_err(|err| err.to_string())?; + let abi: Abi = + JsValueSerdeExt::into_serde(&JsValue::from(abi)).map_err(|err| err.to_string())?; let inputs: BTreeMap = - JsValueSerdeExt::into_serde(&inputs).map_err(|err| err.to_string())?; - let return_value: Option = if return_value.is_undefined() || return_value.is_null() - { - None - } else { - let toml_return_value = - JsValueSerdeExt::into_serde(&return_value).expect("could not decode return value"); - Some(input_value_from_json_type( - toml_return_value, - abi.return_type.as_ref().unwrap(), - MAIN_RETURN_NAME, - )?) - }; + JsValueSerdeExt::into_serde(&JsValue::from(inputs)).map_err(|err| err.to_string())?; + let return_value: Option = return_value + .map(|return_value| { + let toml_return_value = JsValueSerdeExt::into_serde(&JsValue::from(return_value)) + .expect("could not decode return value"); + input_value_from_json_type( + toml_return_value, + abi.return_type.as_ref().unwrap(), + MAIN_RETURN_NAME, + ) + }) + .transpose()?; let abi_map = abi.to_btree_map(); let parsed_inputs: BTreeMap = @@ -62,9 +117,10 @@ pub fn abi_encode( } #[wasm_bindgen(js_name = abiDecode)] -pub fn abi_decode(abi: JsValue, witness_map: JsWitnessMap) -> Result { +pub fn abi_decode(abi: JsAbi, witness_map: JsWitnessMap) -> Result { console_error_panic_hook::set_once(); - let abi: Abi = JsValueSerdeExt::into_serde(&abi).map_err(|err| err.to_string())?; + let abi: Abi = + JsValueSerdeExt::into_serde(&JsValue::from(abi)).map_err(|err| err.to_string())?; let witness_map = WitnessMap::from(witness_map); diff --git a/tooling/noirc_abi_wasm/test/browser/abi_encode.test.ts b/tooling/noirc_abi_wasm/test/browser/abi_encode.test.ts index 5e604aa5b66..e1aaf0dc2c0 100644 --- a/tooling/noirc_abi_wasm/test/browser/abi_encode.test.ts +++ b/tooling/noirc_abi_wasm/test/browser/abi_encode.test.ts @@ -1,5 +1,5 @@ import { expect } from '@esm-bundle/chai'; -import initNoirAbi, { abiEncode, abiDecode, WitnessMap } from '@noir-lang/noirc_abi'; +import initNoirAbi, { abiEncode, abiDecode, WitnessMap, Field } from '@noir-lang/noirc_abi'; import { DecodedInputs } from '../types'; beforeEach(async () => { @@ -9,11 +9,13 @@ beforeEach(async () => { it('recovers original inputs when abi encoding and decoding', async () => { const { abi, inputs } = await import('../shared/abi_encode'); - const initial_witness: WitnessMap = abiEncode(abi, inputs, null); + const initial_witness: WitnessMap = abiEncode(abi, inputs); const decoded_inputs: DecodedInputs = abiDecode(abi, initial_witness); - expect(BigInt(decoded_inputs.inputs.foo)).to.be.equal(BigInt(inputs.foo)); - expect(BigInt(decoded_inputs.inputs.bar[0])).to.be.equal(BigInt(inputs.bar[0])); - expect(BigInt(decoded_inputs.inputs.bar[1])).to.be.equal(BigInt(inputs.bar[1])); + const foo: Field = inputs.foo as Field; + const bar: Field[] = inputs.bar as Field[]; + expect(BigInt(decoded_inputs.inputs.foo)).to.be.equal(BigInt(foo)); + expect(BigInt(decoded_inputs.inputs.bar[0])).to.be.equal(BigInt(bar[0])); + expect(BigInt(decoded_inputs.inputs.bar[1])).to.be.equal(BigInt(bar[1])); expect(decoded_inputs.return_value).to.be.null; }); diff --git a/tooling/noirc_abi_wasm/test/browser/errors.test.ts b/tooling/noirc_abi_wasm/test/browser/errors.test.ts index 5f9b40a195c..429a2d446a3 100644 --- a/tooling/noirc_abi_wasm/test/browser/errors.test.ts +++ b/tooling/noirc_abi_wasm/test/browser/errors.test.ts @@ -8,7 +8,7 @@ beforeEach(async () => { it('errors when an integer input overflows', async () => { const { abi, inputs } = await import('../shared/uint_overflow'); - expect(() => abiEncode(abi, inputs, null)).to.throw( + expect(() => abiEncode(abi, inputs)).to.throw( 'The parameter foo is expected to be a Integer { sign: Unsigned, width: 32 } but found incompatible value Field(2³⁸)', ); }); @@ -16,11 +16,11 @@ it('errors when an integer input overflows', async () => { it('errors when passing a field in place of an array', async () => { const { abi, inputs } = await import('../shared/field_as_array'); - expect(() => abiEncode(abi, inputs, null)).to.throw('cannot parse value into Array { length: 2, typ: Field }'); + expect(() => abiEncode(abi, inputs)).to.throw('cannot parse value into Array { length: 2, typ: Field }'); }); it('errors when passing an array in place of a field', async () => { const { abi, inputs } = await import('../shared/array_as_field'); - expect(() => abiEncode(abi, inputs, null)).to.throw('cannot parse value into Field'); + expect(() => abiEncode(abi, inputs)).to.throw('cannot parse value into Field'); }); diff --git a/tooling/noirc_abi_wasm/test/node/abi_encode.test.ts b/tooling/noirc_abi_wasm/test/node/abi_encode.test.ts index 830e45cf10b..a49c10b6ea6 100644 --- a/tooling/noirc_abi_wasm/test/node/abi_encode.test.ts +++ b/tooling/noirc_abi_wasm/test/node/abi_encode.test.ts @@ -1,15 +1,17 @@ import { expect } from 'chai'; -import { abiEncode, abiDecode, WitnessMap } from '@noir-lang/noirc_abi'; +import { abiEncode, abiDecode, WitnessMap, Field } from '@noir-lang/noirc_abi'; import { DecodedInputs } from '../types'; it('recovers original inputs when abi encoding and decoding', async () => { const { abi, inputs } = await import('../shared/abi_encode'); - const initial_witness: WitnessMap = abiEncode(abi, inputs, null); + const initial_witness: WitnessMap = abiEncode(abi, inputs); const decoded_inputs: DecodedInputs = abiDecode(abi, initial_witness); - expect(BigInt(decoded_inputs.inputs.foo)).to.be.equal(BigInt(inputs.foo)); - expect(BigInt(decoded_inputs.inputs.bar[0])).to.be.equal(BigInt(inputs.bar[0])); - expect(BigInt(decoded_inputs.inputs.bar[1])).to.be.equal(BigInt(inputs.bar[1])); + const foo: Field = inputs.foo as Field; + const bar: Field[] = inputs.bar as Field[]; + expect(BigInt(decoded_inputs.inputs.foo)).to.be.equal(BigInt(foo)); + expect(BigInt(decoded_inputs.inputs.bar[0])).to.be.equal(BigInt(bar[0])); + expect(BigInt(decoded_inputs.inputs.bar[1])).to.be.equal(BigInt(bar[1])); expect(decoded_inputs.return_value).to.be.null; }); diff --git a/tooling/noirc_abi_wasm/test/node/errors.test.ts b/tooling/noirc_abi_wasm/test/node/errors.test.ts index ee0670ab5be..0d007e64803 100644 --- a/tooling/noirc_abi_wasm/test/node/errors.test.ts +++ b/tooling/noirc_abi_wasm/test/node/errors.test.ts @@ -4,7 +4,7 @@ import { abiEncode } from '@noir-lang/noirc_abi'; it('errors when an integer input overflows', async () => { const { abi, inputs } = await import('../shared/uint_overflow'); - expect(() => abiEncode(abi, inputs, null)).to.throw( + expect(() => abiEncode(abi, inputs)).to.throw( 'The parameter foo is expected to be a Integer { sign: Unsigned, width: 32 } but found incompatible value Field(2³⁸)', ); }); @@ -12,11 +12,11 @@ it('errors when an integer input overflows', async () => { it('errors when passing a field in place of an array', async () => { const { abi, inputs } = await import('../shared/field_as_array'); - expect(() => abiEncode(abi, inputs, null)).to.throw('cannot parse value into Array { length: 2, typ: Field }'); + expect(() => abiEncode(abi, inputs)).to.throw('cannot parse value into Array { length: 2, typ: Field }'); }); it('errors when passing an array in place of a field', async () => { const { abi, inputs } = await import('../shared/array_as_field'); - expect(() => abiEncode(abi, inputs, null)).to.throw('cannot parse value into Field'); + expect(() => abiEncode(abi, inputs)).to.throw('cannot parse value into Field'); }); diff --git a/tooling/noirc_abi_wasm/test/shared/abi_encode.ts b/tooling/noirc_abi_wasm/test/shared/abi_encode.ts index 0f9c93a4f76..28379745dec 100644 --- a/tooling/noirc_abi_wasm/test/shared/abi_encode.ts +++ b/tooling/noirc_abi_wasm/test/shared/abi_encode.ts @@ -1,6 +1,6 @@ -// TODO: Add type definitions for these +import { Abi, InputMap } from '@noir-lang/noirc_abi'; -export const abi = { +export const abi: Abi = { parameters: [ { name: 'foo', type: { kind: 'field' }, visibility: 'private' }, { @@ -14,7 +14,7 @@ export const abi = { return_witnesses: [], }; -export const inputs = { +export const inputs: InputMap = { foo: '1', bar: ['1', '2'], }; diff --git a/tooling/noirc_abi_wasm/test/shared/array_as_field.ts b/tooling/noirc_abi_wasm/test/shared/array_as_field.ts index 06e6a7beebf..ba58f075702 100644 --- a/tooling/noirc_abi_wasm/test/shared/array_as_field.ts +++ b/tooling/noirc_abi_wasm/test/shared/array_as_field.ts @@ -1,4 +1,6 @@ -export const abi = { +import { Abi, InputMap } from '@noir-lang/noirc_abi'; + +export const abi: Abi = { parameters: [ { name: 'foo', @@ -11,6 +13,6 @@ export const abi = { return_witnesses: [], }; -export const inputs = { +export const inputs: InputMap = { foo: ['1', '2'], }; diff --git a/tooling/noirc_abi_wasm/test/shared/field_as_array.ts b/tooling/noirc_abi_wasm/test/shared/field_as_array.ts index 89ae529d6b1..931720d5e1b 100644 --- a/tooling/noirc_abi_wasm/test/shared/field_as_array.ts +++ b/tooling/noirc_abi_wasm/test/shared/field_as_array.ts @@ -1,4 +1,6 @@ -export const abi = { +import { Abi, InputMap } from '@noir-lang/noirc_abi'; + +export const abi: Abi = { parameters: [ { name: 'foo', @@ -11,6 +13,6 @@ export const abi = { return_witnesses: [], }; -export const inputs = { +export const inputs: InputMap = { foo: '1', }; diff --git a/tooling/noirc_abi_wasm/test/shared/uint_overflow.ts b/tooling/noirc_abi_wasm/test/shared/uint_overflow.ts index 87f59b1440e..ee87e050b23 100644 --- a/tooling/noirc_abi_wasm/test/shared/uint_overflow.ts +++ b/tooling/noirc_abi_wasm/test/shared/uint_overflow.ts @@ -1,4 +1,6 @@ -export const abi = { +import { Abi, InputMap } from '@noir-lang/noirc_abi'; + +export const abi: Abi = { parameters: [ { name: 'foo', @@ -11,6 +13,6 @@ export const abi = { return_witnesses: [], }; -export const inputs = { +export const inputs: InputMap = { foo: `0x${(1n << 38n).toString(16)}`, };