From 9256a7db3769ee26d99852460762302fd9e80884 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 14 Feb 2024 15:08:11 +0000 Subject: [PATCH 01/45] chore: fix JS package publishing (#4366) # Description ## Problem\* Resolves ## Summary\* Publishing of Noir packages is currently failing due to a `_` being used in place of a `-`. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .github/workflows/publish-es-packages.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish-es-packages.yml b/.github/workflows/publish-es-packages.yml index 231a6124785..fa245883ced 100644 --- a/.github/workflows/publish-es-packages.yml +++ b/.github/workflows/publish-es-packages.yml @@ -114,7 +114,7 @@ jobs: - uses: actions/download-artifact@v4 with: - name: acvm_js + name: acvm-js path: acvm-repo/acvm_js - uses: actions/download-artifact@v4 From e0ad0b2b31f6d46be75d23aec6a82850a9c4bd75 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gustavo=20Gir=C3=A1ldez?= Date: Wed, 14 Feb 2024 10:21:11 -0500 Subject: [PATCH 02/45] feat: DAP Preflight and debugger compilation options (#4185) # Description ## Problem\* Part of #3015 ## Summary\* This PR adds a preflight mode to DAP in order to make it easier to identify and report back problems to the user when compiling the project for debugging. This preflight mode is invoked from the VS.Code extension before starting the debugging session, and with the same arguments as those that will be used for the session. If the compiler finds any error either loading or compiling the project, the error is reported to stderr which allows the extension to parse the output and present the diagnostic messages to the user. This also changes the default compilation mode to output Brillig code and adds new commands line options to Nargo's `debug` command and launch options to the DAP mode to control the mode and whether to inject instrumentation code to track variables or not. The `debug` options are: - `--acir-mode`, force output of ACIR, which disables instrumentation by default - `--skip-instrumentation={true,false}` to control injection of instrumentation code to track variables values during the debugging session. Similarly, for DAP two launch options can be provided: `generateAcir` and `skipInstrumentation`. ## Additional Context The default is to output in Brillig mode with instrumentation for tracking variables, as this makes it easier to follow along with stepping through the code. If ACIR mode is selected, instrumentation is disabled by default. Instrumentation can be forcefully enabled or disabled by the provided CLI option. ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [X] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [X] I have tested the changes locally. - [X] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: Martin Verzilli --- cspell.json | 1 + tooling/debugger/src/dap.rs | 7 +- tooling/debugger/src/errors.rs | 19 ++++ tooling/debugger/src/lib.rs | 1 + tooling/nargo_cli/src/cli/dap_cmd.rs | 148 +++++++++++++++++-------- tooling/nargo_cli/src/cli/debug_cmd.rs | 85 +++++++++----- tooling/nargo_cli/src/errors.rs | 3 +- 7 files changed, 192 insertions(+), 72 deletions(-) create mode 100644 tooling/debugger/src/errors.rs diff --git a/cspell.json b/cspell.json index 2acca0633d3..be6b7c5c7e8 100644 --- a/cspell.json +++ b/cspell.json @@ -90,6 +90,7 @@ "indexmap", "injective", "Inlines", + "instrumenter", "interner", "intrinsics", "jmp", diff --git a/tooling/debugger/src/dap.rs b/tooling/debugger/src/dap.rs index dd9a30d50da..184018e9fcc 100644 --- a/tooling/debugger/src/dap.rs +++ b/tooling/debugger/src/dap.rs @@ -510,7 +510,12 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { }; let found_index = match line_to_opcodes.binary_search_by(|x| x.0.cmp(&line)) { Ok(index) => line_to_opcodes[index].1, - Err(index) => line_to_opcodes[index].1, + Err(index) => { + if index >= line_to_opcodes.len() { + return None; + } + line_to_opcodes[index].1 + } }; Some(found_index) } diff --git a/tooling/debugger/src/errors.rs b/tooling/debugger/src/errors.rs new file mode 100644 index 00000000000..4578987d715 --- /dev/null +++ b/tooling/debugger/src/errors.rs @@ -0,0 +1,19 @@ +use thiserror::Error; + +#[derive(Debug, Error)] +pub enum DapError { + #[error("{0}")] + PreFlightGenericError(String), + + #[error(transparent)] + LoadError(#[from] LoadError), + + #[error(transparent)] + ServerError(#[from] dap::errors::ServerError), +} + +#[derive(Debug, Error)] +pub enum LoadError { + #[error("{0}")] + Generic(String), +} diff --git a/tooling/debugger/src/lib.rs b/tooling/debugger/src/lib.rs index 35014f9a8c8..4a25e3417a0 100644 --- a/tooling/debugger/src/lib.rs +++ b/tooling/debugger/src/lib.rs @@ -1,5 +1,6 @@ mod context; mod dap; +pub mod errors; mod foreign_calls; mod repl; mod source_code_printer; diff --git a/tooling/nargo_cli/src/cli/dap_cmd.rs b/tooling/nargo_cli/src/cli/dap_cmd.rs index 7c7e6056901..f4df309f1c9 100644 --- a/tooling/nargo_cli/src/cli/dap_cmd.rs +++ b/tooling/nargo_cli/src/cli/dap_cmd.rs @@ -3,38 +3,52 @@ use acvm::acir::native_types::WitnessMap; use backend_interface::Backend; use clap::Args; use nargo::constants::PROVER_INPUT_FILE; -use nargo::ops::compile_program_with_debug_instrumenter; use nargo::workspace::Workspace; -use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; -use noirc_driver::{ - file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, -}; +use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; use std::io::{BufReader, BufWriter, Read, Write}; use std::path::Path; -use dap::errors::ServerError; use dap::requests::Command; use dap::responses::ResponseBody; use dap::server::Server; use dap::types::Capabilities; use serde_json::Value; -use super::compile_cmd::report_errors; -use super::debug_cmd::instrument_package_files; +use super::debug_cmd::compile_bin_package_for_debugging; use super::fs::inputs::read_inputs_from_file; use crate::errors::CliError; use super::NargoConfig; +use noir_debugger::errors::{DapError, LoadError}; + #[derive(Debug, Clone, Args)] pub(crate) struct DapCommand { /// Override the expression width requested by the backend. #[arg(long, value_parser = parse_expression_width)] expression_width: Option, + + #[clap(long)] + preflight_check: bool, + + #[clap(long)] + preflight_project_folder: Option, + + #[clap(long)] + preflight_package: Option, + + #[clap(long)] + preflight_prover_name: Option, + + #[clap(long)] + preflight_generate_acir: bool, + + #[clap(long)] + preflight_skip_instrumentation: bool, } fn parse_expression_width(input: &str) -> Result { @@ -50,8 +64,6 @@ fn parse_expression_width(input: &str) -> Result) -> Option { let Ok(toml_path) = get_package_manifest(Path::new(project_folder)) else { eprintln!("ERROR: Failed to get package manifest"); @@ -72,55 +84,51 @@ fn find_workspace(project_folder: &str, package: Option<&str>) -> Option) -> String { + match package { + Some(pkg) => format!( + r#"Noir Debugger could not load program from {}, package {}"#, + project_folder, pkg + ), + None => format!(r#"Noir Debugger could not load program from {}"#, project_folder), + } +} + fn load_and_compile_project( project_folder: &str, package: Option<&str>, prover_name: &str, expression_width: ExpressionWidth, + acir_mode: bool, + skip_instrumentation: bool, ) -> Result<(CompiledProgram, WitnessMap), LoadError> { - let workspace = - find_workspace(project_folder, package).ok_or(LoadError("Cannot open workspace"))?; - + let workspace = find_workspace(project_folder, package) + .ok_or(LoadError::Generic(workspace_not_found_error_msg(project_folder, package)))?; let package = workspace .into_iter() .find(|p| p.is_binary()) - .ok_or(LoadError("No matching binary packages found in workspace"))?; - - let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let mut parsed_files = parse_all(&workspace_file_manager); + .ok_or(LoadError::Generic("No matching binary packages found in workspace".into()))?; - let compile_options = - CompileOptions { instrument_debug: true, force_brillig: true, ..CompileOptions::default() }; - - let debug_state = instrument_package_files(&mut parsed_files, &workspace_file_manager, package); - - let compilation_result = compile_program_with_debug_instrumenter( - &workspace_file_manager, - &parsed_files, + let compiled_program = compile_bin_package_for_debugging( + &workspace, package, - &compile_options, - None, - debug_state, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - compile_options.deny_warnings, - compile_options.silence_warnings, + acir_mode, + skip_instrumentation, + CompileOptions::default(), ) - .map_err(|_| LoadError("Failed to compile project"))?; + .map_err(|_| LoadError::Generic("Failed to compile project".into()))?; let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); let (inputs_map, _) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &compiled_program.abi) - .map_err(|_| LoadError("Failed to read program inputs"))?; + .map_err(|_| { + LoadError::Generic(format!("Failed to read program inputs from {}", prover_name)) + })?; let initial_witness = compiled_program .abi .encode(&inputs_map, None) - .map_err(|_| LoadError("Failed to encode inputs"))?; + .map_err(|_| LoadError::Generic("Failed to encode inputs".into()))?; Ok((compiled_program, initial_witness)) } @@ -128,7 +136,7 @@ fn load_and_compile_project( fn loop_uninitialized_dap( mut server: Server, expression_width: ExpressionWidth, -) -> Result<(), ServerError> { +) -> Result<(), DapError> { loop { let req = match server.poll_request()? { Some(req) => req, @@ -163,6 +171,13 @@ fn loop_uninitialized_dap( .and_then(|v| v.as_str()) .unwrap_or(PROVER_INPUT_FILE); + let generate_acir = + additional_data.get("generateAcir").and_then(|v| v.as_bool()).unwrap_or(false); + let skip_instrumentation = additional_data + .get("skipInstrumentation") + .and_then(|v| v.as_bool()) + .unwrap_or(generate_acir); + eprintln!("Project folder: {}", project_folder); eprintln!("Package: {}", package.unwrap_or("(default)")); eprintln!("Prover name: {}", prover_name); @@ -172,6 +187,8 @@ fn loop_uninitialized_dap( package, prover_name, expression_width, + generate_acir, + skip_instrumentation, ) { Ok((compiled_program, initial_witness)) => { server.respond(req.ack()?)?; @@ -186,8 +203,8 @@ fn loop_uninitialized_dap( )?; break; } - Err(LoadError(message)) => { - server.respond(req.error(message))?; + Err(LoadError::Generic(message)) => { + server.respond(req.error(message.as_str()))?; } } } @@ -206,17 +223,58 @@ fn loop_uninitialized_dap( Ok(()) } +fn run_preflight_check( + expression_width: ExpressionWidth, + args: DapCommand, +) -> Result<(), DapError> { + let project_folder = if let Some(project_folder) = args.preflight_project_folder { + project_folder + } else { + return Err(DapError::PreFlightGenericError("Noir Debugger could not initialize because the IDE (for example, VS Code) did not specify a project folder to debug.".into())); + }; + + let package = args.preflight_package.as_deref(); + let prover_name = args.preflight_prover_name.as_deref().unwrap_or(PROVER_INPUT_FILE); + + let _ = load_and_compile_project( + project_folder.as_str(), + package, + prover_name, + expression_width, + args.preflight_generate_acir, + args.preflight_skip_instrumentation, + )?; + + Ok(()) +} + pub(crate) fn run( backend: &Backend, args: DapCommand, _config: NargoConfig, ) -> Result<(), CliError> { + let expression_width = + args.expression_width.unwrap_or_else(|| backend.get_backend_info_or_default()); + + // When the --preflight-check flag is present, we run Noir's DAP server in "pre-flight mode", which test runs + // the DAP initialization code without actually starting the DAP server. + // + // This lets the client IDE present any initialization issues (compiler version mismatches, missing prover files, etc) + // in its own interface. + // + // This was necessary due to the VS Code project being reluctant to let extension authors capture + // stderr output generated by a DAP server wrapped in DebugAdapterExecutable. + // + // Exposing this preflight mode lets us gracefully handle errors that happen *before* + // the DAP loop is established, which otherwise are considered "out of band" by the maintainers of the DAP spec. + // More details here: https://github.com/microsoft/vscode/issues/108138 + if args.preflight_check { + return run_preflight_check(expression_width, args).map_err(CliError::DapError); + } + let output = BufWriter::new(std::io::stdout()); let input = BufReader::new(std::io::stdin()); let server = Server::new(input, output); - let expression_width = - args.expression_width.unwrap_or_else(|| backend.get_backend_info_or_default()); - loop_uninitialized_dap(server, expression_width).map_err(CliError::DapError) } diff --git a/tooling/nargo_cli/src/cli/debug_cmd.rs b/tooling/nargo_cli/src/cli/debug_cmd.rs index b3ee9137530..6fcfee91457 100644 --- a/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -7,8 +7,10 @@ use clap::Args; use fm::FileManager; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; -use nargo::ops::compile_program_with_debug_instrumenter; +use nargo::errors::CompileError; +use nargo::ops::{compile_program, compile_program_with_debug_instrumenter}; use nargo::package::Package; +use nargo::workspace::Workspace; use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; @@ -42,6 +44,14 @@ pub(crate) struct DebugCommand { #[clap(flatten)] compile_options: CompileOptions, + + /// Force ACIR output (disabling instrumentation) + #[clap(long)] + acir_mode: bool, + + /// Disable vars debug instrumentation (enabled by default) + #[clap(long)] + skip_instrumentation: Option, } pub(crate) fn run( @@ -49,9 +59,8 @@ pub(crate) fn run( args: DebugCommand, config: NargoConfig, ) -> Result<(), CliError> { - // Override clap default for compiler option flag - let mut args = args.clone(); - args.compile_options.instrument_debug = true; + let acir_mode = args.acir_mode; + let skip_instrumentation = args.skip_instrumentation.unwrap_or(acir_mode); let toml_path = get_package_manifest(&config.program_dir)?; let selection = args.package.map_or(PackageSelection::DefaultOrAll, PackageSelection::Selected); @@ -66,10 +75,6 @@ pub(crate) fn run( .expression_width .unwrap_or_else(|| backend.get_backend_info_or_default()); - let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); - insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); - let mut parsed_files = parse_all(&workspace_file_manager); - let Some(package) = workspace.into_iter().find(|p| p.is_binary()) else { println!( "No matching binary packages found in workspace. Only binary packages can be debugged." @@ -77,23 +82,12 @@ pub(crate) fn run( return Ok(()); }; - let debug_instrumenter = - instrument_package_files(&mut parsed_files, &workspace_file_manager, package); - - let compilation_result = compile_program_with_debug_instrumenter( - &workspace_file_manager, - &parsed_files, + let compiled_program = compile_bin_package_for_debugging( + &workspace, package, - &args.compile_options, - None, - debug_instrumenter, - ); - - let compiled_program = report_errors( - compilation_result, - &workspace_file_manager, - args.compile_options.deny_warnings, - args.compile_options.silence_warnings, + acir_mode, + skip_instrumentation, + args.compile_options.clone(), )?; let compiled_program = nargo::ops::transform_program(compiled_program, expression_width); @@ -101,9 +95,50 @@ pub(crate) fn run( run_async(package, compiled_program, &args.prover_name, &args.witness_name, target_dir) } +pub(crate) fn compile_bin_package_for_debugging( + workspace: &Workspace, + package: &Package, + acir_mode: bool, + skip_instrumentation: bool, + compile_options: CompileOptions, +) -> Result { + let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); + insert_all_files_for_workspace_into_file_manager(workspace, &mut workspace_file_manager); + let mut parsed_files = parse_all(&workspace_file_manager); + + let compile_options = CompileOptions { + instrument_debug: !skip_instrumentation, + force_brillig: !acir_mode, + ..compile_options + }; + + let compilation_result = if !skip_instrumentation { + let debug_state = + instrument_package_files(&mut parsed_files, &workspace_file_manager, package); + + compile_program_with_debug_instrumenter( + &workspace_file_manager, + &parsed_files, + package, + &compile_options, + None, + debug_state, + ) + } else { + compile_program(&workspace_file_manager, &parsed_files, package, &compile_options, None) + }; + + report_errors( + compilation_result, + &workspace_file_manager, + compile_options.deny_warnings, + compile_options.silence_warnings, + ) +} + /// Add debugging instrumentation to all parsed files belonging to the package /// being compiled -pub(crate) fn instrument_package_files( +fn instrument_package_files( parsed_files: &mut ParsedFiles, file_manager: &FileManager, package: &Package, diff --git a/tooling/nargo_cli/src/errors.rs b/tooling/nargo_cli/src/errors.rs index 4636772231b..c2996f53420 100644 --- a/tooling/nargo_cli/src/errors.rs +++ b/tooling/nargo_cli/src/errors.rs @@ -2,6 +2,7 @@ use acvm::acir::native_types::WitnessMapError; use hex::FromHexError; use nargo::{errors::CompileError, NargoError}; use nargo_toml::ManifestError; +use noir_debugger::errors::DapError; use noirc_abi::errors::{AbiError, InputParserError}; use std::path::PathBuf; use thiserror::Error; @@ -54,7 +55,7 @@ pub(crate) enum CliError { LspError(#[from] async_lsp::Error), #[error(transparent)] - DapError(#[from] dap::errors::ServerError), + DapError(#[from] DapError), /// Error from Nargo #[error(transparent)] From dcd7a1e561a68504b9038ffbb3c80f5c981f9f0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Wed, 14 Feb 2024 18:03:44 +0100 Subject: [PATCH 03/45] fix: Consistent bit size for truncate (#4370) # Description ## Problem\* Work towards #4369 ## Summary\* Uses consistent bit sizes for truncate and starts a refactor where we start to track bit sizes for values in brillig IR ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../brillig/brillig_gen/brillig_black_box.rs | 102 ++++---- .../src/brillig/brillig_gen/brillig_block.rs | 219 ++++++++++-------- .../brillig_gen/brillig_block_variables.rs | 27 ++- .../src/brillig/brillig_gen/brillig_fn.rs | 2 +- .../brillig/brillig_gen/brillig_slice_ops.rs | 74 +++--- .../noirc_evaluator/src/brillig/brillig_ir.rs | 91 ++++---- .../src/brillig/brillig_ir/artifact.rs | 4 +- .../brillig/brillig_ir/brillig_variable.rs | 20 +- .../src/brillig/brillig_ir/entry_point.rs | 44 ++-- 9 files changed, 331 insertions(+), 252 deletions(-) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index dfe23b45034..d542240a40c 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -56,12 +56,12 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::Keccak256 => { if let ( - [message, BrilligVariable::Simple(array_size)], + [message, BrilligVariable::SingleAddr(array_size)], [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { let mut message_vector = convert_array_or_vector(brillig_context, message, bb_func); - message_vector.size = *array_size; + message_vector.size = array_size.address; brillig_context.black_box_op_instruction(BlackBoxOp::Keccak256 { message: message_vector.to_heap_vector(), @@ -88,7 +88,7 @@ pub(crate) fn convert_black_box_call( BlackBoxFunc::EcdsaSecp256k1 => { if let ( [BrilligVariable::BrilligArray(public_key_x), BrilligVariable::BrilligArray(public_key_y), BrilligVariable::BrilligArray(signature), message], - [BrilligVariable::Simple(result_register)], + [BrilligVariable::SingleAddr(result_register)], ) = (function_arguments, function_results) { let message_hash_vector = @@ -98,7 +98,7 @@ pub(crate) fn convert_black_box_call( public_key_x: public_key_x.to_heap_array(), public_key_y: public_key_y.to_heap_array(), signature: signature.to_heap_array(), - result: *result_register, + result: result_register.address, }); } else { unreachable!( @@ -109,7 +109,7 @@ pub(crate) fn convert_black_box_call( BlackBoxFunc::EcdsaSecp256r1 => { if let ( [BrilligVariable::BrilligArray(public_key_x), BrilligVariable::BrilligArray(public_key_y), BrilligVariable::BrilligArray(signature), message], - [BrilligVariable::Simple(result_register)], + [BrilligVariable::SingleAddr(result_register)], ) = (function_arguments, function_results) { let message_hash_vector = @@ -119,7 +119,7 @@ pub(crate) fn convert_black_box_call( public_key_x: public_key_x.to_heap_array(), public_key_y: public_key_y.to_heap_array(), signature: signature.to_heap_array(), - result: *result_register, + result: result_register.address, }); } else { unreachable!( @@ -130,14 +130,14 @@ pub(crate) fn convert_black_box_call( BlackBoxFunc::PedersenCommitment => { if let ( - [message, BrilligVariable::Simple(domain_separator)], + [message, BrilligVariable::SingleAddr(domain_separator)], [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { let message_vector = convert_array_or_vector(brillig_context, message, bb_func); brillig_context.black_box_op_instruction(BlackBoxOp::PedersenCommitment { inputs: message_vector.to_heap_vector(), - domain_separator: *domain_separator, + domain_separator: domain_separator.address, output: result_array.to_heap_array(), }); } else { @@ -146,15 +146,15 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::PedersenHash => { if let ( - [message, BrilligVariable::Simple(domain_separator)], - [BrilligVariable::Simple(result)], + [message, BrilligVariable::SingleAddr(domain_separator)], + [BrilligVariable::SingleAddr(result)], ) = (function_arguments, function_results) { let message_vector = convert_array_or_vector(brillig_context, message, bb_func); brillig_context.black_box_op_instruction(BlackBoxOp::PedersenHash { inputs: message_vector.to_heap_vector(), - domain_separator: *domain_separator, - output: *result, + domain_separator: domain_separator.address, + output: result.address, }); } else { unreachable!("ICE: Pedersen hash expects one array argument, a register for the domain separator, and one register result") @@ -162,18 +162,18 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::SchnorrVerify => { if let ( - [BrilligVariable::Simple(public_key_x), BrilligVariable::Simple(public_key_y), BrilligVariable::BrilligArray(signature), message], - [BrilligVariable::Simple(result_register)], + [BrilligVariable::SingleAddr(public_key_x), BrilligVariable::SingleAddr(public_key_y), BrilligVariable::BrilligArray(signature), message], + [BrilligVariable::SingleAddr(result_register)], ) = (function_arguments, function_results) { let message_hash = convert_array_or_vector(brillig_context, message, bb_func); let signature = brillig_context.array_to_vector(signature); brillig_context.black_box_op_instruction(BlackBoxOp::SchnorrVerify { - public_key_x: *public_key_x, - public_key_y: *public_key_y, + public_key_x: public_key_x.address, + public_key_y: public_key_y.address, message: message_hash.to_heap_vector(), signature: signature.to_heap_vector(), - result: *result_register, + result: result_register.address, }); } else { unreachable!("ICE: Schnorr verify expects two registers for the public key, an array for signature, an array for the message hash and one result register") @@ -181,13 +181,13 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::FixedBaseScalarMul => { if let ( - [BrilligVariable::Simple(low), BrilligVariable::Simple(high)], + [BrilligVariable::SingleAddr(low), BrilligVariable::SingleAddr(high)], [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::FixedBaseScalarMul { - low: *low, - high: *high, + low: low.address, + high: high.address, result: result_array.to_heap_array(), }); } else { @@ -198,15 +198,15 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::EmbeddedCurveAdd => { if let ( - [BrilligVariable::Simple(input1_x), BrilligVariable::Simple(input1_y), BrilligVariable::Simple(input2_x), BrilligVariable::Simple(input2_y)], + [BrilligVariable::SingleAddr(input1_x), BrilligVariable::SingleAddr(input1_y), BrilligVariable::SingleAddr(input2_x), BrilligVariable::SingleAddr(input2_y)], [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::EmbeddedCurveAdd { - input1_x: *input1_x, - input1_y: *input1_y, - input2_x: *input2_x, - input2_y: *input2_y, + input1_x: input1_x.address, + input1_y: input1_y.address, + input2_x: input2_x.address, + input2_y: input2_y.address, result: result_array.to_heap_array(), }); } else { @@ -229,14 +229,14 @@ pub(crate) fn convert_black_box_call( ), BlackBoxFunc::BigIntAdd => { if let ( - [BrilligVariable::Simple(lhs), BrilligVariable::Simple(rhs)], - [BrilligVariable::Simple(output)], + [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(rhs)], + [BrilligVariable::SingleAddr(output)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::BigIntAdd { - lhs: *lhs, - rhs: *rhs, - output: *output, + lhs: lhs.address, + rhs: rhs.address, + output: output.address, }); } else { unreachable!( @@ -246,14 +246,14 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::BigIntSub => { if let ( - [BrilligVariable::Simple(lhs), BrilligVariable::Simple(rhs)], - [BrilligVariable::Simple(output)], + [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(rhs)], + [BrilligVariable::SingleAddr(output)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::BigIntSub { - lhs: *lhs, - rhs: *rhs, - output: *output, + lhs: lhs.address, + rhs: rhs.address, + output: output.address, }); } else { unreachable!( @@ -263,14 +263,14 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::BigIntMul => { if let ( - [BrilligVariable::Simple(lhs), BrilligVariable::Simple(rhs)], - [BrilligVariable::Simple(output)], + [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(rhs)], + [BrilligVariable::SingleAddr(output)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::BigIntMul { - lhs: *lhs, - rhs: *rhs, - output: *output, + lhs: lhs.address, + rhs: rhs.address, + output: output.address, }); } else { unreachable!( @@ -280,14 +280,14 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::BigIntDiv => { if let ( - [BrilligVariable::Simple(lhs), BrilligVariable::Simple(rhs)], - [BrilligVariable::Simple(output)], + [BrilligVariable::SingleAddr(lhs), BrilligVariable::SingleAddr(rhs)], + [BrilligVariable::SingleAddr(output)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::BigIntDiv { - lhs: *lhs, - rhs: *rhs, - output: *output, + lhs: lhs.address, + rhs: rhs.address, + output: output.address, }); } else { unreachable!( @@ -296,7 +296,7 @@ pub(crate) fn convert_black_box_call( } } BlackBoxFunc::BigIntFromLeBytes => { - if let ([inputs, modulus], [BrilligVariable::Simple(output)]) = + if let ([inputs, modulus], [BrilligVariable::SingleAddr(output)]) = (function_arguments, function_results) { let inputs_vector = convert_array_or_vector(brillig_context, inputs, bb_func); @@ -304,7 +304,7 @@ pub(crate) fn convert_black_box_call( brillig_context.black_box_op_instruction(BlackBoxOp::BigIntFromLeBytes { inputs: inputs_vector.to_heap_vector(), modulus: modulus_vector.to_heap_vector(), - output: *output, + output: output.address, }); } else { unreachable!( @@ -314,12 +314,12 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::BigIntToLeBytes => { if let ( - [BrilligVariable::Simple(input)], + [BrilligVariable::SingleAddr(input)], [BrilligVariable::BrilligVector(result_vector)], ) = (function_arguments, function_results) { brillig_context.black_box_op_instruction(BlackBoxOp::BigIntToLeBytes { - input: *input, + input: input.address, output: result_vector.to_heap_vector(), }); } else { @@ -330,7 +330,7 @@ pub(crate) fn convert_black_box_call( } BlackBoxFunc::Poseidon2Permutation => { if let ( - [message, BrilligVariable::Simple(state_len)], + [message, BrilligVariable::SingleAddr(state_len)], [BrilligVariable::BrilligArray(result_array)], ) = (function_arguments, function_results) { @@ -338,7 +338,7 @@ pub(crate) fn convert_black_box_call( brillig_context.black_box_op_instruction(BlackBoxOp::Poseidon2Permutation { message: message_vector.to_heap_vector(), output: result_array.to_heap_array(), - len: *state_len, + len: state_len.address, }); } else { unreachable!("ICE: Poseidon2Permutation expects one array argument, a length and one array result") diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 7697d7e65fa..c299daa158a 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -1,5 +1,5 @@ use crate::brillig::brillig_ir::brillig_variable::{ - type_to_heap_value_type, BrilligArray, BrilligVariable, BrilligVector, + type_to_heap_value_type, BrilligArray, BrilligVariable, BrilligVector, SingleAddrVariable, }; use crate::brillig::brillig_ir::{ BrilligBinaryOp, BrilligContext, BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, @@ -117,9 +117,9 @@ impl<'block> BrilligBlock<'block> { ) { match terminator_instruction { TerminatorInstruction::JmpIf { condition, then_destination, else_destination } => { - let condition = self.convert_ssa_register_value(*condition, dfg); + let condition = self.convert_ssa_single_addr_value(*condition, dfg); self.brillig_context.jump_if_instruction( - condition, + condition.address, self.create_block_label_for_current_function(*then_destination), ); self.brillig_context.jump_instruction( @@ -164,10 +164,10 @@ impl<'block> BrilligBlock<'block> { fn pass_variable(&mut self, source: BrilligVariable, destination: BrilligVariable) { match (source, destination) { ( - BrilligVariable::Simple(source_register), - BrilligVariable::Simple(destination_register), + BrilligVariable::SingleAddr(source_var), + BrilligVariable::SingleAddr(destination_var), ) => { - self.brillig_context.mov_instruction(destination_register, source_register); + self.brillig_context.mov_instruction(destination_var.address, source_var.address); } ( BrilligVariable::BrilligArray(BrilligArray { @@ -241,16 +241,19 @@ impl<'block> BrilligBlock<'block> { match instruction { Instruction::Binary(binary) => { - let result_register = self.variables.define_register_variable( + let result_var = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, dfg.instruction_results(instruction_id)[0], dfg, ); - self.convert_ssa_binary(binary, dfg, result_register); + self.convert_ssa_binary(binary, dfg, result_var); } Instruction::Constrain(lhs, rhs, assert_message) => { - let condition = self.brillig_context.allocate_register(); + let condition = SingleAddrVariable { + address: self.brillig_context.allocate_register(), + bit_size: 1, + }; self.convert_ssa_binary( &Binary { lhs: *lhs, rhs: *rhs, operator: BinaryOp::Eq }, @@ -281,12 +284,12 @@ impl<'block> BrilligBlock<'block> { None }; - self.brillig_context.constrain_instruction(condition, assert_message); - self.brillig_context.deallocate_register(condition); + self.brillig_context.constrain_instruction(condition.address, assert_message); + self.brillig_context.deallocate_register(condition.address); } Instruction::Allocate => { let result_value = dfg.instruction_results(instruction_id)[0]; - let address_register = self.variables.define_register_variable( + let address_register = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, result_value, @@ -296,15 +299,16 @@ impl<'block> BrilligBlock<'block> { Type::Reference(element) => match *element { Type::Array(..) => { self.brillig_context - .allocate_array_reference_instruction(address_register); + .allocate_array_reference_instruction(address_register.address); } Type::Slice(..) => { self.brillig_context - .allocate_vector_reference_instruction(address_register); + .allocate_vector_reference_instruction(address_register.address); } _ => { - self.brillig_context - .allocate_simple_reference_instruction(address_register); + self.brillig_context.allocate_single_addr_reference_instruction( + address_register.address, + ); } }, _ => { @@ -313,10 +317,11 @@ impl<'block> BrilligBlock<'block> { } } Instruction::Store { address, value } => { - let address_register = self.convert_ssa_register_value(*address, dfg); + let address_var = self.convert_ssa_single_addr_value(*address, dfg); let source_variable = self.convert_ssa_value(*value, dfg); - self.brillig_context.store_variable_instruction(address_register, source_variable); + self.brillig_context + .store_variable_instruction(address_var.address, source_variable); } Instruction::Load { address } => { let target_variable = self.variables.define_variable( @@ -326,34 +331,34 @@ impl<'block> BrilligBlock<'block> { dfg, ); - let address_register = self.convert_ssa_register_value(*address, dfg); + let address_variable = self.convert_ssa_single_addr_value(*address, dfg); - self.brillig_context.load_variable_instruction(target_variable, address_register); + self.brillig_context + .load_variable_instruction(target_variable, address_variable.address); } Instruction::Not(value) => { - let condition_register = self.convert_ssa_register_value(*value, dfg); - let result_register = self.variables.define_register_variable( + let condition_register = self.convert_ssa_single_addr_value(*value, dfg); + let result_register = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, dfg.instruction_results(instruction_id)[0], dfg, ); - let bit_size = get_bit_size_from_ssa_type(&dfg.type_of_value(*value)); - self.brillig_context.not_instruction(condition_register, bit_size, result_register); + self.brillig_context.not_instruction(condition_register, result_register); } Instruction::Call { func, arguments } => match &dfg[*func] { Value::ForeignFunction(func_name) => { let result_ids = dfg.instruction_results(instruction_id); let input_registers = vecmap(arguments, |value_id| { - self.convert_ssa_value(*value_id, dfg).to_register_or_memory() + self.convert_ssa_value(*value_id, dfg).to_value_or_array() }); let input_value_types = vecmap(arguments, |value_id| { let value_type = dfg.type_of_value(*value_id); type_to_heap_value_type(&value_type) }); let output_registers = vecmap(result_ids, |value_id| { - self.allocate_external_call_result(*value_id, dfg).to_register_or_memory() + self.allocate_external_call_result(*value_id, dfg).to_value_or_array() }); let output_value_types = vecmap(result_ids, |value_id| { let value_type = dfg.type_of_value(*value_id); @@ -431,7 +436,7 @@ impl<'block> BrilligBlock<'block> { ); } Value::Intrinsic(Intrinsic::ArrayLen) => { - let result_register = self.variables.define_register_variable( + let result_variable = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, dfg.instruction_results(instruction_id)[0], @@ -443,10 +448,11 @@ impl<'block> BrilligBlock<'block> { // or an array in the case of an array. if let Type::Numeric(_) = dfg.type_of_value(param_id) { let len_variable = self.convert_ssa_value(arguments[0], dfg); - let len_register_index = len_variable.extract_register(); - self.brillig_context.mov_instruction(result_register, len_register_index); + let length = len_variable.extract_single_addr(); + self.brillig_context + .mov_instruction(result_variable.address, length.address); } else { - self.convert_ssa_array_len(arguments[0], result_register, dfg); + self.convert_ssa_array_len(arguments[0], result_variable.address, dfg); } } Value::Intrinsic( @@ -465,13 +471,13 @@ impl<'block> BrilligBlock<'block> { ); } Value::Intrinsic(Intrinsic::ToRadix(endianness)) => { - let source = self.convert_ssa_register_value(arguments[0], dfg); - let radix = self.convert_ssa_register_value(arguments[1], dfg); - let limb_count = self.convert_ssa_register_value(arguments[2], dfg); + let source = self.convert_ssa_single_addr_value(arguments[0], dfg); + let radix = self.convert_ssa_single_addr_value(arguments[1], dfg); + let limb_count = self.convert_ssa_single_addr_value(arguments[2], dfg); let results = dfg.instruction_results(instruction_id); - let target_len = self.variables.define_register_variable( + let target_len = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, results[0], @@ -489,19 +495,19 @@ impl<'block> BrilligBlock<'block> { .extract_vector(); // Update the user-facing slice length - self.brillig_context.mov_instruction(target_len, limb_count); + self.brillig_context.mov_instruction(target_len.address, limb_count.address); self.brillig_context.radix_instruction( - source, + source.address, target_vector, - radix, - limb_count, + radix.address, + limb_count.address, matches!(endianness, Endian::Big), ); } Value::Intrinsic(Intrinsic::ToBits(endianness)) => { - let source = self.convert_ssa_register_value(arguments[0], dfg); - let limb_count = self.convert_ssa_register_value(arguments[1], dfg); + let source = self.convert_ssa_single_addr_value(arguments[0], dfg); + let limb_count = self.convert_ssa_single_addr_value(arguments[1], dfg); let results = dfg.instruction_results(instruction_id); @@ -511,7 +517,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ); - let target_len = target_len_variable.extract_register(); + let target_len = target_len_variable.extract_single_addr(); let target_vector = match self.variables.define_variable( self.function_context, @@ -523,7 +529,7 @@ impl<'block> BrilligBlock<'block> { self.brillig_context.array_to_vector(&array) } BrilligVariable::BrilligVector(vector) => vector, - BrilligVariable::Simple(..) => unreachable!("ICE: ToBits on non-array"), + BrilligVariable::SingleAddr(..) => unreachable!("ICE: ToBits on non-array"), }; let radix = self @@ -531,13 +537,13 @@ impl<'block> BrilligBlock<'block> { .make_constant(2_usize.into(), FieldElement::max_num_bits()); // Update the user-facing slice length - self.brillig_context.mov_instruction(target_len, limb_count); + self.brillig_context.mov_instruction(target_len.address, limb_count.address); self.brillig_context.radix_instruction( - source, + source.address, target_vector, radix, - limb_count, + limb_count.address, matches!(endianness, Endian::Big), ); @@ -549,29 +555,29 @@ impl<'block> BrilligBlock<'block> { }, Instruction::Truncate { value, bit_size, .. } => { let result_ids = dfg.instruction_results(instruction_id); - let destination_register = self.variables.define_register_variable( + let destination_register = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, result_ids[0], dfg, ); - let source_register = self.convert_ssa_register_value(*value, dfg); + let source_register = self.convert_ssa_single_addr_value(*value, dfg); self.brillig_context.truncate_instruction( destination_register, source_register, *bit_size, ); } - Instruction::Cast(value, typ) => { + Instruction::Cast(value, _) => { let result_ids = dfg.instruction_results(instruction_id); - let destination_register = self.variables.define_register_variable( + let destination_variable = self.variables.define_single_addr_variable( self.function_context, self.brillig_context, result_ids[0], dfg, ); - let source_register = self.convert_ssa_register_value(*value, dfg); - self.convert_cast(destination_register, source_register, typ); + let source_variable = self.convert_ssa_single_addr_value(*value, dfg); + self.convert_cast(destination_variable, source_variable); } Instruction::ArrayGet { array, index } => { let result_ids = dfg.instruction_results(instruction_id); @@ -589,17 +595,17 @@ impl<'block> BrilligBlock<'block> { _ => unreachable!("ICE: array get on non-array"), }; - let index_register = self.convert_ssa_register_value(*index, dfg); - self.validate_array_index(array_variable, index_register); + let index_variable = self.convert_ssa_single_addr_value(*index, dfg); + self.validate_array_index(array_variable, index_variable); self.retrieve_variable_from_array( array_pointer, - index_register, + index_variable.address, destination_variable, ); } Instruction::ArraySet { array, index, value, .. } => { let source_variable = self.convert_ssa_value(*array, dfg); - let index_register = self.convert_ssa_register_value(*index, dfg); + let index_register = self.convert_ssa_single_addr_value(*index, dfg); let value_variable = self.convert_ssa_value(*value, dfg); let result_ids = dfg.instruction_results(instruction_id); @@ -614,15 +620,18 @@ impl<'block> BrilligBlock<'block> { self.convert_ssa_array_set( source_variable, destination_variable, - index_register, + index_register.address, value_variable, ); } Instruction::RangeCheck { value, max_bit_size, assert_message } => { - let value = self.convert_ssa_register_value(*value, dfg); + let value = self.convert_ssa_single_addr_value(*value, dfg); // Cast original value to field - let left = self.brillig_context.allocate_register(); - self.convert_cast(left, value, &Type::field()); + let left = SingleAddrVariable { + address: self.brillig_context.allocate_register(), + bit_size: FieldElement::max_num_bits(), + }; + self.convert_cast(left, value); // Create a field constant with the max let max = BigUint::from(2_u128).pow(*max_bit_size) - BigUint::from(1_u128); @@ -637,11 +646,16 @@ impl<'block> BrilligBlock<'block> { bit_size: FieldElement::max_num_bits(), }; let condition = self.brillig_context.allocate_register(); - self.brillig_context.binary_instruction(left, right, condition, brillig_binary_op); + self.brillig_context.binary_instruction( + left.address, + right, + condition, + brillig_binary_op, + ); self.brillig_context.constrain_instruction(condition, assert_message.clone()); self.brillig_context.deallocate_register(condition); - self.brillig_context.deallocate_register(left); + self.brillig_context.deallocate_register(left.address); self.brillig_context.deallocate_register(right); } Instruction::IncrementRc { value } => { @@ -730,7 +744,7 @@ impl<'block> BrilligBlock<'block> { fn validate_array_index( &mut self, array_variable: BrilligVariable, - index_register: MemoryAddress, + index_register: SingleAddrVariable, ) { let (size_as_register, should_deallocate_size) = match array_variable { BrilligVariable::BrilligArray(BrilligArray { size, .. }) => { @@ -743,7 +757,7 @@ impl<'block> BrilligBlock<'block> { let condition = self.brillig_context.allocate_register(); self.brillig_context.memory_op( - index_register, + index_register.address, size_as_register, condition, BinaryIntOp::LessThan, @@ -765,8 +779,12 @@ impl<'block> BrilligBlock<'block> { destination_variable: BrilligVariable, ) { match destination_variable { - BrilligVariable::Simple(destination_register) => { - self.brillig_context.array_get(array_pointer, index_register, destination_register); + BrilligVariable::SingleAddr(destination_register) => { + self.brillig_context.array_get( + array_pointer, + index_register, + destination_register.address, + ); } BrilligVariable::BrilligArray(..) | BrilligVariable::BrilligVector(..) => { let reference = self.brillig_context.allocate_register(); @@ -868,8 +886,8 @@ impl<'block> BrilligBlock<'block> { value_variable: BrilligVariable, ) { match value_variable { - BrilligVariable::Simple(value_register) => { - ctx.array_set(destination_pointer, index_register, value_register); + BrilligVariable::SingleAddr(value_variable) => { + ctx.array_set(destination_pointer, index_register, value_variable.address); } BrilligVariable::BrilligArray(_) => { let reference: MemoryAddress = ctx.allocate_register(); @@ -924,7 +942,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -940,7 +958,7 @@ impl<'block> BrilligBlock<'block> { self.convert_ssa_value(*arg, dfg) }); - self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Add); + self.update_slice_length(target_len.address, arguments[0], dfg, BinaryIntOp::Add); self.slice_push_back_operation(target_vector, source_vector, &item_values); } @@ -951,7 +969,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -966,7 +984,7 @@ impl<'block> BrilligBlock<'block> { self.convert_ssa_value(*arg, dfg) }); - self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Add); + self.update_slice_length(target_len.address, arguments[0], dfg, BinaryIntOp::Add); self.slice_push_front_operation(target_vector, source_vector, &item_values); } @@ -977,7 +995,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -999,7 +1017,7 @@ impl<'block> BrilligBlock<'block> { ) }); - self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Sub); + self.update_slice_length(target_len.address, arguments[0], dfg, BinaryIntOp::Sub); self.slice_pop_back_operation(target_vector, source_vector, &pop_variables); } @@ -1010,7 +1028,7 @@ impl<'block> BrilligBlock<'block> { results[element_size], dfg, ) { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -1031,7 +1049,7 @@ impl<'block> BrilligBlock<'block> { ); let target_vector = target_variable.extract_vector(); - self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Sub); + self.update_slice_length(target_len.address, arguments[0], dfg, BinaryIntOp::Sub); self.slice_pop_front_operation(target_vector, source_vector, &pop_variables); } @@ -1042,7 +1060,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -1058,13 +1076,13 @@ impl<'block> BrilligBlock<'block> { // Remove if indexing in insert is changed to flattened indexing // https://github.com/noir-lang/noir/issues/1889#issuecomment-1668048587 - let user_index = self.convert_ssa_register_value(arguments[2], dfg); + let user_index = self.convert_ssa_single_addr_value(arguments[2], dfg); let converted_index = self.brillig_context.make_usize_constant(element_size.into()); self.brillig_context.memory_op( converted_index, - user_index, + user_index.address, converted_index, BinaryIntOp::Mul, ); @@ -1073,7 +1091,7 @@ impl<'block> BrilligBlock<'block> { self.convert_ssa_value(*arg, dfg) }); - self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Add); + self.update_slice_length(target_len.address, arguments[0], dfg, BinaryIntOp::Add); self.slice_insert_operation(target_vector, source_vector, converted_index, &items); self.brillig_context.deallocate_register(converted_index); @@ -1085,7 +1103,7 @@ impl<'block> BrilligBlock<'block> { results[0], dfg, ) { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(register_index) => register_index, _ => unreachable!("ICE: first value of a slice must be a register index"), }; @@ -1101,12 +1119,12 @@ impl<'block> BrilligBlock<'block> { // Remove if indexing in remove is changed to flattened indexing // https://github.com/noir-lang/noir/issues/1889#issuecomment-1668048587 - let user_index = self.convert_ssa_register_value(arguments[2], dfg); + let user_index = self.convert_ssa_single_addr_value(arguments[2], dfg); let converted_index = self.brillig_context.make_usize_constant(element_size.into()); self.brillig_context.memory_op( converted_index, - user_index, + user_index.address, converted_index, BinaryIntOp::Mul, ); @@ -1120,7 +1138,7 @@ impl<'block> BrilligBlock<'block> { ) }); - self.update_slice_length(target_len, arguments[0], dfg, BinaryIntOp::Sub); + self.update_slice_length(target_len.address, arguments[0], dfg, BinaryIntOp::Sub); self.slice_remove_operation( target_vector, @@ -1152,18 +1170,18 @@ impl<'block> BrilligBlock<'block> { binary_op: BinaryIntOp, ) { let source_len_variable = self.convert_ssa_value(source_value, dfg); - let source_len = source_len_variable.extract_register(); + let source_len = source_len_variable.extract_single_addr(); - self.brillig_context.usize_op(source_len, target_len, binary_op, 1); + self.brillig_context.usize_op(source_len.address, target_len, binary_op, 1); } /// Converts an SSA cast to a sequence of Brillig opcodes. /// Casting is only necessary when shrinking the bit size of a numeric value. - fn convert_cast(&mut self, destination: MemoryAddress, source: MemoryAddress, typ: &Type) { + fn convert_cast(&mut self, destination: SingleAddrVariable, source: SingleAddrVariable) { // We assume that `source` is a valid `target_type` as it's expected that a truncate instruction was emitted // to ensure this is the case. - self.brillig_context.cast_instruction(destination, source, get_bit_size_from_ssa_type(typ)); + self.brillig_context.cast_instruction(destination, source); } /// Converts the Binary instruction into a sequence of Brillig opcodes. @@ -1171,18 +1189,23 @@ impl<'block> BrilligBlock<'block> { &mut self, binary: &Binary, dfg: &DataFlowGraph, - result_register: MemoryAddress, + result_variable: SingleAddrVariable, ) { let binary_type = type_of_binary_operation(dfg[binary.lhs].get_type(), dfg[binary.rhs].get_type()); - let left = self.convert_ssa_register_value(binary.lhs, dfg); - let right = self.convert_ssa_register_value(binary.rhs, dfg); + let left = self.convert_ssa_single_addr_value(binary.lhs, dfg); + let right = self.convert_ssa_single_addr_value(binary.rhs, dfg); let brillig_binary_op = convert_ssa_binary_op_to_brillig_binary_op(binary.operator, &binary_type); - self.brillig_context.binary_instruction(left, right, result_register, brillig_binary_op); + self.brillig_context.binary_instruction( + left.address, + right.address, + result_variable.address, + brillig_binary_op, + ); } /// Converts an SSA `ValueId` into a `RegisterOrMemory`. Initializes if necessary. @@ -1204,10 +1227,10 @@ impl<'block> BrilligBlock<'block> { } else { let new_variable = self.variables.allocate_constant(self.brillig_context, value_id, dfg); - let register_index = new_variable.extract_register(); + let register_index = new_variable.extract_single_addr(); self.brillig_context.const_instruction( - register_index, + register_index.address, (*constant).into(), get_bit_size_from_ssa_type(typ), ); @@ -1273,10 +1296,10 @@ impl<'block> BrilligBlock<'block> { // value. let new_variable = self.variables.allocate_constant(self.brillig_context, value_id, dfg); - let register_index = new_variable.extract_register(); + let register_index = new_variable.extract_single_addr(); self.brillig_context.const_instruction( - register_index, + register_index.address, value_id.to_usize().into(), 32, ); @@ -1289,13 +1312,13 @@ impl<'block> BrilligBlock<'block> { } /// Converts an SSA `ValueId` into a `MemoryAddress`. Initializes if necessary. - fn convert_ssa_register_value( + fn convert_ssa_single_addr_value( &mut self, value_id: ValueId, dfg: &DataFlowGraph, - ) -> MemoryAddress { + ) -> SingleAddrVariable { let variable = self.convert_ssa_value(value_id, dfg); - variable.extract_register() + variable.extract_single_addr() } fn allocate_external_call_result( diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs index b4c96de1969..f463bd4de4d 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block_variables.rs @@ -1,10 +1,9 @@ -use acvm::brillig_vm::brillig::MemoryAddress; use fxhash::{FxHashMap as HashMap, FxHashSet as HashSet}; use crate::{ brillig::brillig_ir::{ - brillig_variable::{BrilligArray, BrilligVariable, BrilligVector}, - BrilligContext, + brillig_variable::{BrilligArray, BrilligVariable, BrilligVector, SingleAddrVariable}, + BrilligContext, BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, }, ssa::ir::{ basic_block::BasicBlockId, @@ -71,15 +70,15 @@ impl BlockVariables { } /// Defines a variable that fits in a single register and returns the allocated register. - pub(crate) fn define_register_variable( + pub(crate) fn define_single_addr_variable( &mut self, function_context: &mut FunctionContext, brillig_context: &mut BrilligContext, value: ValueId, dfg: &DataFlowGraph, - ) -> MemoryAddress { + ) -> SingleAddrVariable { let variable = self.define_variable(function_context, brillig_context, value, dfg); - variable.extract_register() + variable.extract_single_addr() } /// Removes a variable so it's not used anymore within this block. @@ -190,12 +189,22 @@ pub(crate) fn allocate_value( let typ = dfg.type_of_value(value_id); match typ { - Type::Numeric(_) | Type::Reference(_) | Type::Function => { + Type::Numeric(numeric_type) => BrilligVariable::SingleAddr(SingleAddrVariable { + address: brillig_context.allocate_register(), + bit_size: numeric_type.bit_size(), + }), + Type::Reference(_) => BrilligVariable::SingleAddr(SingleAddrVariable { + address: brillig_context.allocate_register(), + bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + }), + Type::Function => { // NB. function references are converted to a constant when // translating from SSA to Brillig (to allow for debugger // instrumentation to work properly) - let register = brillig_context.allocate_register(); - BrilligVariable::Simple(register) + BrilligVariable::SingleAddr(SingleAddrVariable { + address: brillig_context.allocate_register(), + bit_size: 32, + }) } Type::Array(item_typ, elem_count) => { let pointer_register = brillig_context.allocate_register(); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs index e96a756a9ee..b5da8296ba5 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_fn.rs @@ -74,7 +74,7 @@ impl FunctionContext { fn ssa_type_to_parameter(typ: &Type) -> BrilligParameter { match typ { Type::Numeric(_) | Type::Reference(_) => { - BrilligParameter::Simple(get_bit_size_from_ssa_type(typ)) + BrilligParameter::SingleAddr(get_bit_size_from_ssa_type(typ)) } Type::Array(item_type, size) => BrilligParameter::Array( vecmap(item_type.iter(), |item_typ| { diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs index 933396be0cb..3fc0e981165 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_slice_ops.rs @@ -334,7 +334,7 @@ mod tests { use crate::brillig::brillig_gen::brillig_fn::FunctionContext; use crate::brillig::brillig_ir::artifact::BrilligParameter; use crate::brillig::brillig_ir::brillig_variable::{ - BrilligArray, BrilligVariable, BrilligVector, + BrilligArray, BrilligVariable, BrilligVector, SingleAddrVariable, }; use crate::brillig::brillig_ir::tests::{ create_and_run_vm, create_context, create_entry_point_bytecode, @@ -379,13 +379,13 @@ mod tests { ) { let arguments = vec![ BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len(), ), - BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), + BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), ]; let returns = vec![BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len() + 1, )]; @@ -397,7 +397,10 @@ mod tests { size: array.len(), rc: context.allocate_register(), }; - let item_to_insert = context.allocate_register(); + let item_to_insert = SingleAddrVariable { + address: context.allocate_register(), + bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + }; // Cast the source array to a vector let source_vector = context.array_to_vector(&array_variable); @@ -415,13 +418,13 @@ mod tests { block.slice_push_back_operation( target_vector, source_vector, - &[BrilligVariable::Simple(item_to_insert)], + &[BrilligVariable::SingleAddr(item_to_insert)], ); } else { block.slice_push_front_operation( target_vector, source_vector, - &[BrilligVariable::Simple(item_to_insert)], + &[BrilligVariable::SingleAddr(item_to_insert)], ); } @@ -472,15 +475,15 @@ mod tests { expected_return_item: Value, ) { let arguments = vec![BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len(), )]; let returns = vec![ BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len() - 1, ), - BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), + BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), ]; let (_, mut function_context, mut context) = create_test_environment(); @@ -501,7 +504,10 @@ mod tests { size: context.allocate_register(), rc: context.allocate_register(), }; - let removed_item = context.allocate_register(); + let removed_item = SingleAddrVariable { + address: context.allocate_register(), + bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + }; let mut block = create_brillig_block(&mut function_context, &mut context); @@ -509,17 +515,21 @@ mod tests { block.slice_pop_back_operation( target_vector, source_vector, - &[BrilligVariable::Simple(removed_item)], + &[BrilligVariable::SingleAddr(removed_item)], ); } else { block.slice_pop_front_operation( target_vector, source_vector, - &[BrilligVariable::Simple(removed_item)], + &[BrilligVariable::SingleAddr(removed_item)], ); } - context.return_instruction(&[target_vector.pointer, target_vector.rc, removed_item]); + context.return_instruction(&[ + target_vector.pointer, + target_vector.rc, + removed_item.address, + ]); let bytecode = create_entry_point_bytecode(context, arguments, returns).byte_code; let expected_return: Vec<_> = @@ -559,14 +569,14 @@ mod tests { ) { let arguments = vec![ BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len(), ), - BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), - BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), + BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), + BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), ]; let returns = vec![BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len() + 1, )]; @@ -578,7 +588,10 @@ mod tests { size: array.len(), rc: context.allocate_register(), }; - let item_to_insert = context.allocate_register(); + let item_to_insert = SingleAddrVariable { + address: context.allocate_register(), + bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + }; let index_to_insert = context.allocate_register(); // Cast the source array to a vector @@ -597,7 +610,7 @@ mod tests { target_vector, source_vector, index_to_insert, - &[BrilligVariable::Simple(item_to_insert)], + &[BrilligVariable::SingleAddr(item_to_insert)], ); context.return_instruction(&[target_vector.pointer, target_vector.rc]); @@ -676,17 +689,17 @@ mod tests { ) { let arguments = vec![ BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len(), ), - BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), + BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), ]; let returns = vec![ BrilligParameter::Array( - vec![BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], + vec![BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE)], array.len() - 1, ), - BrilligParameter::Simple(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), + BrilligParameter::SingleAddr(BRILLIG_MEMORY_ADDRESSING_BIT_SIZE), ]; let (_, mut function_context, mut context) = create_test_environment(); @@ -708,7 +721,10 @@ mod tests { size: context.allocate_register(), rc: context.allocate_register(), }; - let removed_item = context.allocate_register(); + let removed_item = SingleAddrVariable { + address: context.allocate_register(), + bit_size: BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, + }; let mut block = create_brillig_block(&mut function_context, &mut context); @@ -716,10 +732,14 @@ mod tests { target_vector, source_vector, index_to_insert, - &[BrilligVariable::Simple(removed_item)], + &[BrilligVariable::SingleAddr(removed_item)], ); - context.return_instruction(&[target_vector.pointer, target_vector.size, removed_item]); + context.return_instruction(&[ + target_vector.pointer, + target_vector.size, + removed_item.address, + ]); let calldata: Vec<_> = array.into_iter().chain(vec![index]).collect(); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index 8bbde88c89e..073b0e6f59f 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -15,7 +15,7 @@ use crate::ssa::ir::dfg::CallStack; use self::{ artifact::{BrilligArtifact, UnresolvedJumpLocation}, - brillig_variable::{BrilligArray, BrilligVariable, BrilligVector}, + brillig_variable::{BrilligArray, BrilligVariable, BrilligVector, SingleAddrVariable}, registers::BrilligRegistersContext, }; use acvm::{ @@ -27,6 +27,7 @@ use acvm::{ FieldElement, }; use debug_show::DebugShow; +use num_bigint::BigUint; /// Integer arithmetic in Brillig is limited to 127 bit /// integers. @@ -189,7 +190,7 @@ impl BrilligContext { self.deallocate_register(size_register); } - pub(crate) fn allocate_simple_reference_instruction( + pub(crate) fn allocate_single_addr_reference_instruction( &mut self, pointer_register: MemoryAddress, ) { @@ -295,18 +296,21 @@ impl BrilligContext { // Loop body // Check if iterator < iteration_count - let iterator_less_than_iterations = self.allocate_register(); + let iterator_less_than_iterations = + SingleAddrVariable { address: self.allocate_register(), bit_size: 1 }; + self.memory_op( iterator_register, iteration_count, - iterator_less_than_iterations, + iterator_less_than_iterations.address, BinaryIntOp::LessThan, ); let (exit_loop_section, exit_loop_label) = self.reserve_next_section_label(); - self.not_instruction(iterator_less_than_iterations, 1, iterator_less_than_iterations); - self.jump_if_instruction(iterator_less_than_iterations, exit_loop_label); + self.not_instruction(iterator_less_than_iterations, iterator_less_than_iterations); + + self.jump_if_instruction(iterator_less_than_iterations.address, exit_loop_label); // Call the on iteration function on_iteration(self, iterator_register); @@ -320,7 +324,7 @@ impl BrilligContext { self.enter_section(exit_loop_section); // Deallocate our temporary registers - self.deallocate_register(iterator_less_than_iterations); + self.deallocate_register(iterator_less_than_iterations.address); self.deallocate_register(iterator_register); } @@ -507,12 +511,15 @@ impl BrilligContext { /// Cast truncates the value to the given bit size and converts the type of the value in memory to that bit size. pub(crate) fn cast_instruction( &mut self, - destination: MemoryAddress, - source: MemoryAddress, - bit_size: u32, + destination: SingleAddrVariable, + source: SingleAddrVariable, ) { - self.debug_show.cast_instruction(destination, source, bit_size); - self.push_opcode(BrilligOpcode::Cast { destination, source, bit_size }); + self.debug_show.cast_instruction(destination.address, source.address, destination.bit_size); + self.push_opcode(BrilligOpcode::Cast { + destination: destination.address, + source: source.address, + bit_size: destination.bit_size, + }); } /// Processes a binary instruction according `operation`. @@ -564,21 +571,20 @@ impl BrilligContext { /// in Brillig. pub(crate) fn not_instruction( &mut self, - input: MemoryAddress, - bit_size: u32, - result: MemoryAddress, + input: SingleAddrVariable, + result: SingleAddrVariable, ) { - self.debug_show.not_instruction(input, bit_size, result); + self.debug_show.not_instruction(input.address, input.bit_size, result.address); // Compile !x as ((-1) - x) - let u_max = FieldElement::from(2_i128).pow(&FieldElement::from(bit_size as i128)) + let u_max = FieldElement::from(2_i128).pow(&FieldElement::from(input.bit_size as i128)) - FieldElement::one(); - let max = self.make_constant(Value::from(u_max), bit_size); + let max = self.make_constant(Value::from(u_max), input.bit_size); let opcode = BrilligOpcode::BinaryIntOp { - destination: result, + destination: result.address, op: BinaryIntOp::Sub, - bit_size, + bit_size: input.bit_size, lhs: max, - rhs: input, + rhs: input.address, }; self.push_opcode(opcode); self.deallocate_register(max); @@ -626,8 +632,8 @@ impl BrilligContext { variable_pointer: MemoryAddress, ) { match destination { - BrilligVariable::Simple(register_index) => { - self.load_instruction(register_index, variable_pointer); + BrilligVariable::SingleAddr(single_addr) => { + self.load_instruction(single_addr.address, variable_pointer); } BrilligVariable::BrilligArray(BrilligArray { pointer, size: _, rc }) => { self.load_instruction(pointer, variable_pointer); @@ -676,8 +682,8 @@ impl BrilligContext { source: BrilligVariable, ) { match source { - BrilligVariable::Simple(register_index) => { - self.store_instruction(variable_pointer, register_index); + BrilligVariable::SingleAddr(single_addr) => { + self.store_instruction(variable_pointer, single_addr.address); } BrilligVariable::BrilligArray(BrilligArray { pointer, size: _, rc }) => { self.store_instruction(variable_pointer, pointer); @@ -717,31 +723,36 @@ impl BrilligContext { /// For Brillig, all integer operations will overflow as its cheap. pub(crate) fn truncate_instruction( &mut self, - destination_of_truncated_value: MemoryAddress, - value_to_truncate: MemoryAddress, + destination_of_truncated_value: SingleAddrVariable, + value_to_truncate: SingleAddrVariable, bit_size: u32, ) { self.debug_show.truncate_instruction( - destination_of_truncated_value, - value_to_truncate, + destination_of_truncated_value.address, + value_to_truncate.address, bit_size, ); assert!( - bit_size <= BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, - "tried to truncate to a bit size greater than allowed {bit_size}" + bit_size <= value_to_truncate.bit_size, + "tried to truncate to a bit size {} greater than the variable size {}", + bit_size, + value_to_truncate.bit_size + ); + + let mask = BigUint::from(2_u32).pow(bit_size) - BigUint::from(1_u32); + let mask_constant = self.make_constant( + FieldElement::from_be_bytes_reduce(&mask.to_bytes_be()).into(), + value_to_truncate.bit_size, ); - // The brillig VM performs all arithmetic operations modulo 2**bit_size - // So to truncate any value to a target bit size we can just issue a no-op arithmetic operation - // With bit size equal to target_bit_size - let zero_register = self.make_constant(Value::from(FieldElement::zero()), bit_size); self.binary_instruction( - value_to_truncate, - zero_register, - destination_of_truncated_value, - BrilligBinaryOp::Integer { op: BinaryIntOp::Add, bit_size }, + value_to_truncate.address, + mask_constant, + destination_of_truncated_value.address, + BrilligBinaryOp::Integer { op: BinaryIntOp::And, bit_size: value_to_truncate.bit_size }, ); - self.deallocate_register(zero_register); + + self.deallocate_register(mask_constant); } /// Emits a stop instruction diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs index 4ef8c9d1dfc..d10dcf13d9f 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/artifact.rs @@ -6,8 +6,8 @@ use crate::ssa::ir::dfg::CallStack; /// Represents a parameter or a return value of a function. #[derive(Debug, Clone)] pub(crate) enum BrilligParameter { - /// A simple parameter or return value. Holds the bit size of the parameter. - Simple(u32), + /// A single address parameter or return value. Holds the bit size of the parameter. + SingleAddr(u32), /// An array parameter or return value. Holds the type of an array item and its size. Array(Vec, usize), /// A slice parameter or return value. Holds the type of a slice item. diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs index 856fb709fa9..48ad3c5bae4 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/brillig_variable.rs @@ -5,6 +5,12 @@ use serde::{Deserialize, Serialize}; use crate::ssa::ir::types::Type; +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] +pub(crate) struct SingleAddrVariable { + pub(crate) address: MemoryAddress, + pub(crate) bit_size: u32, +} + /// The representation of a noir array in the Brillig IR #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] pub(crate) struct BrilligArray { @@ -52,15 +58,15 @@ impl BrilligVector { /// The representation of a noir value in the Brillig IR #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy)] pub(crate) enum BrilligVariable { - Simple(MemoryAddress), + SingleAddr(SingleAddrVariable), BrilligArray(BrilligArray), BrilligVector(BrilligVector), } impl BrilligVariable { - pub(crate) fn extract_register(self) -> MemoryAddress { + pub(crate) fn extract_single_addr(self) -> SingleAddrVariable { match self { - BrilligVariable::Simple(register_index) => register_index, + BrilligVariable::SingleAddr(single_addr) => single_addr, _ => unreachable!("ICE: Expected register, got {self:?}"), } } @@ -81,15 +87,17 @@ impl BrilligVariable { pub(crate) fn extract_registers(self) -> Vec { match self { - BrilligVariable::Simple(register_index) => vec![register_index], + BrilligVariable::SingleAddr(single_addr) => vec![single_addr.address], BrilligVariable::BrilligArray(array) => array.extract_registers(), BrilligVariable::BrilligVector(vector) => vector.extract_registers(), } } - pub(crate) fn to_register_or_memory(self) -> ValueOrArray { + pub(crate) fn to_value_or_array(self) -> ValueOrArray { match self { - BrilligVariable::Simple(register_index) => ValueOrArray::MemoryAddress(register_index), + BrilligVariable::SingleAddr(single_addr) => { + ValueOrArray::MemoryAddress(single_addr.address) + } BrilligVariable::BrilligArray(array) => ValueOrArray::HeapArray(array.to_heap_array()), BrilligVariable::BrilligVector(vector) => { ValueOrArray::HeapVector(vector.to_heap_vector()) diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs index 0eb4c8c31bd..9d186f9bc60 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs @@ -1,6 +1,6 @@ use super::{ artifact::{BrilligArtifact, BrilligParameter}, - brillig_variable::{BrilligArray, BrilligVariable}, + brillig_variable::{BrilligArray, BrilligVariable, SingleAddrVariable}, debug_show::DebugShow, registers::BrilligRegistersContext, BrilligContext, ReservedRegisters, BRILLIG_MEMORY_ADDRESSING_BIT_SIZE, @@ -63,10 +63,13 @@ impl BrilligContext { let mut argument_variables: Vec<_> = arguments .iter() .map(|argument| match argument { - BrilligParameter::Simple(_) => { - let simple_address = self.allocate_register(); - let var = BrilligVariable::Simple(simple_address); - self.mov_instruction(simple_address, MemoryAddress(current_calldata_pointer)); + BrilligParameter::SingleAddr(bit_size) => { + let single_address = self.allocate_register(); + let var = BrilligVariable::SingleAddr(SingleAddrVariable { + address: single_address, + bit_size: *bit_size, + }); + self.mov_instruction(single_address, MemoryAddress(current_calldata_pointer)); current_calldata_pointer += 1; var } @@ -116,7 +119,7 @@ impl BrilligContext { fn flat_bit_sizes(param: &BrilligParameter) -> Box + '_> { match param { - BrilligParameter::Simple(bit_size) => Box::new(std::iter::once(*bit_size)), + BrilligParameter::SingleAddr(bit_size) => Box::new(std::iter::once(*bit_size)), BrilligParameter::Array(item_types, item_count) => Box::new( (0..*item_count).flat_map(move |_| item_types.iter().flat_map(flat_bit_sizes)), ), @@ -139,7 +142,7 @@ impl BrilligContext { /// Computes the size of a parameter if it was flattened fn flattened_size(param: &BrilligParameter) -> usize { match param { - BrilligParameter::Simple(_) => 1, + BrilligParameter::SingleAddr(_) => 1, BrilligParameter::Array(item_types, item_count) => { let item_size: usize = item_types.iter().map(BrilligContext::flattened_size).sum(); item_count * item_size @@ -157,7 +160,7 @@ impl BrilligContext { /// Computes the size of a parameter if it was flattened fn has_nested_arrays(tuple: &[BrilligParameter]) -> bool { - tuple.iter().any(|param| !matches!(param, BrilligParameter::Simple(_))) + tuple.iter().any(|param| !matches!(param, BrilligParameter::SingleAddr(_))) } /// Deflatten an array by recursively allocating nested arrays and copying the plain values. @@ -194,7 +197,7 @@ impl BrilligContext { self.make_usize_constant((target_item_base_index + subitem_index).into()); match subitem { - BrilligParameter::Simple(_) => { + BrilligParameter::SingleAddr(_) => { self.array_get( flattened_array_pointer, source_index, @@ -279,7 +282,12 @@ impl BrilligContext { let returned_variables: Vec<_> = return_parameters .iter() .map(|return_parameter| match return_parameter { - BrilligParameter::Simple(_) => BrilligVariable::Simple(self.allocate_register()), + BrilligParameter::SingleAddr(bit_size) => { + BrilligVariable::SingleAddr(SingleAddrVariable { + address: self.allocate_register(), + bit_size: *bit_size, + }) + } BrilligParameter::Array(item_types, item_count) => { BrilligVariable::BrilligArray(BrilligArray { pointer: self.allocate_register(), @@ -301,10 +309,10 @@ impl BrilligContext { for (return_param, returned_variable) in return_parameters.iter().zip(&returned_variables) { match return_param { - BrilligParameter::Simple(_) => { + BrilligParameter::SingleAddr(_) => { self.mov_instruction( MemoryAddress(return_data_index), - returned_variable.extract_register(), + returned_variable.extract_single_addr().address, ); return_data_index += 1; } @@ -359,7 +367,7 @@ impl BrilligContext { self.make_usize_constant((target_item_base_index + target_offset).into()); match subitem { - BrilligParameter::Simple(_) => { + BrilligParameter::SingleAddr(_) => { self.array_get( deflattened_array_pointer, source_index, @@ -468,12 +476,12 @@ mod tests { ]; let arguments = vec![BrilligParameter::Array( vec![ - BrilligParameter::Array(vec![BrilligParameter::Simple(8)], 2), - BrilligParameter::Simple(8), + BrilligParameter::Array(vec![BrilligParameter::SingleAddr(8)], 2), + BrilligParameter::SingleAddr(8), ], 2, )]; - let returns = vec![BrilligParameter::Simple(8)]; + let returns = vec![BrilligParameter::SingleAddr(8)]; let mut context = create_context(); @@ -506,8 +514,8 @@ mod tests { ]; let array_param = BrilligParameter::Array( vec![ - BrilligParameter::Array(vec![BrilligParameter::Simple(8)], 2), - BrilligParameter::Simple(8), + BrilligParameter::Array(vec![BrilligParameter::SingleAddr(8)], 2), + BrilligParameter::SingleAddr(8), ], 2, ); From ba2c541ec45de92bba98de34771b73cbb7865c93 Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Wed, 14 Feb 2024 18:09:02 +0000 Subject: [PATCH 04/45] fix(acir): Use types on dynamic arrays (#4364) # Description ## Problem\* Resolves #4356 Supercedes https://github.com/noir-lang/noir/pull/4360 ## Summary\* An ACIR dynamic array is a pointer to flat memory. We have been treating this flat memory as a list of fields, however, this breaks if we do in fact need accurate numeric type information such as when working black box function inputs. For example for hash inputs we set up the byte array based upon the bit size. This needs to be the correct bit size or else we will get a lot of extra garbage when calling `fetch_nearest_bytes` on a FieldElement. This PR attaches a list of `Vec` to the `AcirDynamicArray` structure. This gives us the expected output result for `sha` then. We probably could restrict the `AcirDynamicArray` to be created only through a constructor where we check that the `value_types` match the supplied len in size. I left it for a follow-up as this is a quick fix but I can do it as part of this PR. ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [ ] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: TomAFrench --- .../src/ssa/acir_gen/acir_ir/acir_variable.rs | 17 +++- .../noirc_evaluator/src/ssa/acir_gen/mod.rs | 87 ++++++++++++++++--- .../array_dynamic_blackbox_input/Nargo.toml | 7 ++ .../array_dynamic_blackbox_input/Prover.toml | 4 + .../array_dynamic_blackbox_input/src/main.nr | 27 ++++++ .../Nargo.toml | 7 ++ .../Prover.toml | 23 +++++ .../src/main.nr | 20 +++++ 8 files changed, 177 insertions(+), 15 deletions(-) create mode 100644 test_programs/execution_success/array_dynamic_blackbox_input/Nargo.toml create mode 100644 test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml create mode 100644 test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr create mode 100644 test_programs/execution_success/array_dynamic_nested_blackbox_input/Nargo.toml create mode 100644 test_programs/execution_success/array_dynamic_nested_blackbox_input/Prover.toml create mode 100644 test_programs/execution_success/array_dynamic_nested_blackbox_input/src/main.nr diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index 2360d053887..fb11bae556c 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -67,6 +67,13 @@ impl AcirType { pub(crate) fn unsigned(bit_size: u32) -> Self { AcirType::NumericType(NumericType::Unsigned { bit_size }) } + + pub(crate) fn to_numeric_type(&self) -> NumericType { + match self { + AcirType::NumericType(numeric_type) => *numeric_type, + AcirType::Array(_, _) => unreachable!("cannot fetch a numeric type for an array type"), + } + } } impl From for AcirType { @@ -88,6 +95,12 @@ impl<'a> From<&'a SsaType> for AcirType { } } +impl From for AcirType { + fn from(value: NumericType) -> Self { + AcirType::NumericType(value) + } +} + #[derive(Debug, Default)] /// Context object which holds the relationship between /// `Variables`(AcirVar) and types such as `Expression` and `Witness` @@ -1415,13 +1428,13 @@ impl AcirContext { } Ok(values) } - AcirValue::DynamicArray(AcirDynamicArray { block_id, len, .. }) => { + AcirValue::DynamicArray(AcirDynamicArray { block_id, len, value_types, .. }) => { try_vecmap(0..len, |i| { let index_var = self.add_constant(i); Ok::<(AcirVar, AcirType), InternalError>(( self.read_from_memory(block_id, &index_var)?, - AcirType::field(), + value_types[i].into(), )) }) } diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 46be6efcadd..8d4d0668534 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -99,6 +99,18 @@ pub(crate) struct AcirDynamicArray { block_id: BlockId, /// Length of the array len: usize, + /// An ACIR dynamic array is a flat structure, so we use + /// the inner structure of an `AcirType::NumericType` directly. + /// Some usages of ACIR arrays (e.g. black box functions) require the bit size + /// of every value to be known, thus we store the types as part of the dynamic + /// array definition. + /// + /// A dynamic non-homogenous array can potentially have values of differing types. + /// Thus, we store a vector of types rather than a single type, as a dynamic non-homogenous array + /// is still represented in ACIR by a single `AcirDynamicArray` structure. + /// + /// The length of the value types vector must match the `len` field in this structure. + value_types: Vec, /// Identification for the ACIR dynamic array /// inner element type sizes array element_type_sizes: Option, @@ -150,6 +162,16 @@ impl AcirValue { AcirValue::DynamicArray(_) => unimplemented!("Cannot flatten a dynamic array"), } } + + fn flat_numeric_types(self) -> Vec { + match self { + AcirValue::Array(_) => { + self.flatten().into_iter().map(|(_, typ)| typ.to_numeric_type()).collect() + } + AcirValue::DynamicArray(AcirDynamicArray { value_types, .. }) => value_types, + _ => unreachable!("An AcirValue::Var cannot be used as an array value"), + } + } } impl Ssa { @@ -1007,9 +1029,15 @@ impl Context { } else { None }; + + let value_types = self.convert_value(array_id, dfg).flat_numeric_types(); + // Compiler sanity check + assert_eq!(value_types.len(), array_len, "ICE: The length of the flattened type array should match the length of the dynamic array"); + let result_value = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, len: array_len, + value_types, element_type_sizes, }); self.define_result(dfg, instruction, result_value); @@ -1093,7 +1121,7 @@ impl Context { &mut self, array_typ: &Type, array_id: ValueId, - array_acir_value: Option, + supplied_acir_value: Option<&AcirValue>, dfg: &DataFlowGraph, ) -> Result { let element_type_sizes = self.internal_block_id(&array_id); @@ -1119,26 +1147,23 @@ impl Context { Value::Instruction { .. } | Value::Param { .. } => { // An instruction representing the slice means it has been processed previously during ACIR gen. // Use the previously defined result of an array operation to fetch the internal type information. - let array_acir_value = if let Some(array_acir_value) = array_acir_value { - array_acir_value - } else { - self.convert_value(array_id, dfg) - }; + let array_acir_value = &self.convert_value(array_id, dfg); + let array_acir_value = supplied_acir_value.unwrap_or(array_acir_value); match array_acir_value { AcirValue::DynamicArray(AcirDynamicArray { element_type_sizes: inner_elem_type_sizes, .. }) => { if let Some(inner_elem_type_sizes) = inner_elem_type_sizes { - if self.initialized_arrays.contains(&inner_elem_type_sizes) { - let type_sizes_array_len = self.internal_mem_block_lengths.get(&inner_elem_type_sizes).copied().ok_or_else(|| + if self.initialized_arrays.contains(inner_elem_type_sizes) { + let type_sizes_array_len = *self.internal_mem_block_lengths.get(inner_elem_type_sizes).ok_or_else(|| InternalError::General { message: format!("Array {array_id}'s inner element type sizes array does not have a tracked length"), call_stack: self.acir_context.get_call_stack(), } )?; self.copy_dynamic_array( - inner_elem_type_sizes, + *inner_elem_type_sizes, element_type_sizes, type_sizes_array_len, )?; @@ -1683,15 +1708,24 @@ impl Context { Some(self.init_element_type_sizes_array( &slice_typ, slice_contents, - Some(new_slice_val), + Some(&new_slice_val), dfg, )?) } else { None }; + + let value_types = new_slice_val.flat_numeric_types(); + assert_eq!( + value_types.len(), + new_elem_size, + "ICE: Value types array must match new slice size" + ); + let result = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, len: new_elem_size, + value_types, element_type_sizes, }); Ok(vec![AcirValue::Var(new_slice_length, AcirType::field()), result]) @@ -1738,15 +1772,24 @@ impl Context { Some(self.init_element_type_sizes_array( &slice_typ, slice_contents, - Some(new_slice_val), + Some(&new_slice_val), dfg, )?) } else { None }; + + let value_types = new_slice_val.flat_numeric_types(); + assert_eq!( + value_types.len(), + new_slice_size, + "ICE: Value types array must match new slice size" + ); + let result = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, len: new_slice_size, + value_types, element_type_sizes, }); @@ -1943,15 +1986,24 @@ impl Context { Some(self.init_element_type_sizes_array( &slice_typ, slice_contents, - Some(slice), + Some(&slice), dfg, )?) } else { None }; + + let value_types = slice.flat_numeric_types(); + assert_eq!( + value_types.len(), + slice_size, + "ICE: Value types array must match new slice size" + ); + let result = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, len: slice_size, + value_types, element_type_sizes, }); @@ -2059,15 +2111,24 @@ impl Context { Some(self.init_element_type_sizes_array( &slice_typ, slice_contents, - Some(new_slice_val), + Some(&new_slice_val), dfg, )?) } else { None }; + + let value_types = new_slice_val.flat_numeric_types(); + assert_eq!( + value_types.len(), + slice_size, + "ICE: Value types array must match new slice size" + ); + let result = AcirValue::DynamicArray(AcirDynamicArray { block_id: result_block_id, len: slice_size, + value_types, element_type_sizes, }); diff --git a/test_programs/execution_success/array_dynamic_blackbox_input/Nargo.toml b/test_programs/execution_success/array_dynamic_blackbox_input/Nargo.toml new file mode 100644 index 00000000000..03da304acc3 --- /dev/null +++ b/test_programs/execution_success/array_dynamic_blackbox_input/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "array_dynamic_blackbox_input" +type = "bin" +authors = [""] +compiler_version = ">=0.24.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml b/test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml new file mode 100644 index 00000000000..cc60eb8a8ba --- /dev/null +++ b/test_programs/execution_success/array_dynamic_blackbox_input/Prover.toml @@ -0,0 +1,4 @@ +index = "1" +leaf = ["51", "109", "224", "175", "60", "42", "79", "222", "117", "255", "174", "79", "126", "242", "74", "34", "100", "35", "20", "200", "109", "89", "191", "219", "41", "10", "118", "217", "165", "224", "215", "109"] +path = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23", "24", "25", "26", "27", "28", "29", "30", "31", "32", "33", "34", "35", "36", "37", "38", "39", "40", "41", "42", "43", "44", "45", "46", "47", "48", "49", "50", "51", "52", "53", "54", "55", "56", "57", "58", "59", "60", "61", "62", "63"] +root = [79, 230, 126, 184, 98, 125, 226, 58, 117, 45, 140, 15, 72, 118, 89, 173, 117, 161, 166, 0, 214, 125, 13, 16, 113, 81, 173, 156, 97, 15, 57, 216] diff --git a/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr b/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr new file mode 100644 index 00000000000..aabf7fc9d5c --- /dev/null +++ b/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr @@ -0,0 +1,27 @@ +fn main(leaf: [u8; 32], path: [u8; 64], index: u32, root: [u8; 32]) { + compute_root(leaf, path, index, root); +} + +fn compute_root(leaf: [u8; 32], path: [u8; 64], _index: u32, root: [u8; 32]) { + let mut current = leaf; + let mut index = _index; + + for i in 0..2 { + let mut hash_input = [0; 64]; + let offset = i * 32; + let is_right = (index & 1) != 0; + let a = if is_right { 32 } else { 0 }; + let b = if is_right { 0 } else { 32 }; + + for j in 0..32 { + hash_input[j + a] = current[j]; + hash_input[j + b] = path[offset + j]; + } + + current = dep::std::hash::sha256(hash_input); + index = index >> 1; + } + + // Regression for issue #4258 + assert(root == current); +} \ No newline at end of file diff --git a/test_programs/execution_success/array_dynamic_nested_blackbox_input/Nargo.toml b/test_programs/execution_success/array_dynamic_nested_blackbox_input/Nargo.toml new file mode 100644 index 00000000000..07d867d433f --- /dev/null +++ b/test_programs/execution_success/array_dynamic_nested_blackbox_input/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "array_dynamic_nested_blackbox_input" +type = "bin" +authors = [""] +compiler_version = ">=0.24.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/array_dynamic_nested_blackbox_input/Prover.toml b/test_programs/execution_success/array_dynamic_nested_blackbox_input/Prover.toml new file mode 100644 index 00000000000..1f291532414 --- /dev/null +++ b/test_programs/execution_success/array_dynamic_nested_blackbox_input/Prover.toml @@ -0,0 +1,23 @@ +y = "3" +hash_result = [50, 53, 90, 252, 105, 236, 223, 30, 135, 229, 193, 172, 51, 139, 8, 32, 188, 104, 151, 115, 129, 168, 27, 71, 203, 47, 40, 228, 89, 177, 129, 100] + +[[x]] +a = "1" +b = ["2", "3", "20"] + +[x.bar] +inner = ["100", "101", "102"] + +[[x]] +a = "4" # idx = 3, flattened start idx = 7 +b = ["5", "6", "21"] # idx = 4, flattened start idx = 8 + +[x.bar] +inner = ["103", "104", "105"] # idx = 5, flattened start idx = 11 + +[[x]] +a = "7" +b = ["8", "9", "22"] + +[x.bar] +inner = ["106", "107", "108"] \ No newline at end of file diff --git a/test_programs/execution_success/array_dynamic_nested_blackbox_input/src/main.nr b/test_programs/execution_success/array_dynamic_nested_blackbox_input/src/main.nr new file mode 100644 index 00000000000..8faaf69dfc8 --- /dev/null +++ b/test_programs/execution_success/array_dynamic_nested_blackbox_input/src/main.nr @@ -0,0 +1,20 @@ +struct Bar { + inner: [u8; 3], +} + +struct Foo { + a: Field, + b: [Field; 3], + bar: Bar, +} + +fn main(mut x: [Foo; 3], y: pub Field, hash_result: pub [u8; 32]) { + // Simple dynamic array set for entire inner most array + x[y - 1].bar.inner = [106, 107, 10]; + let mut hash_input = x[y - 1].bar.inner; + // Make sure that we are passing a dynamic array to the black box function call + // by setting the array using a dynamic index here + hash_input[y - 1] = 0; + let hash = dep::std::hash::sha256(hash_input); + assert_eq(hash, hash_result); +} From 2952bf1960074413df538cee928c62b93d76ceb8 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 14 Feb 2024 19:04:24 +0000 Subject: [PATCH 05/45] chore(ci): add warning for external contributors force pushing (#4373) # Description ## Problem\* Resolves ## Summary\* This PR adds a sticky comment to any PRs which are opened from another repository which warns against force pushing once we've started reviewing the PR. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: jfecher --- .github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md | 5 +++++ .github/workflows/pull-request-title.yml | 15 +++++++++++++++ 2 files changed, 20 insertions(+) create mode 100644 .github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md diff --git a/.github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md b/.github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md new file mode 100644 index 00000000000..4031bcdb61c --- /dev/null +++ b/.github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md @@ -0,0 +1,5 @@ +Thank you for your contribution to the Noir language. + +Please **do not force push to this branch** after the Noir team have reviewed this PR. Doing so will only delay us merging your PR as we will need to start the review process from scratch. + +Thanks for your understanding. \ No newline at end of file diff --git a/.github/workflows/pull-request-title.yml b/.github/workflows/pull-request-title.yml index 4b8a626a94e..7c2822aa954 100644 --- a/.github/workflows/pull-request-title.yml +++ b/.github/workflows/pull-request-title.yml @@ -27,3 +27,18 @@ jobs: fix feat chore + + force-push-comment: + name: Warn external contributors about force-pushing + runs-on: ubuntu-latest + if: github.repository != 'noir-lang/noir' && github.event_name == 'pull_request_target' + permissions: + pull-requests: write + + steps: + - name: Post comment on force pushes + uses: marocchino/sticky-pull-request-comment@v2 + with: + path: ./.github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md + + \ No newline at end of file From da1281fc755e55aee52beab467b6d58734f42f4a Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 14 Feb 2024 19:37:44 +0000 Subject: [PATCH 06/45] chore: fix trigger for force-push sticky comment (#4377) # Description ## Problem\* Resolves ## Summary\* #4373 isn't being triggered properly currently. ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [ ] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .github/workflows/pull-request-title.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pull-request-title.yml b/.github/workflows/pull-request-title.yml index 7c2822aa954..8f863160cf1 100644 --- a/.github/workflows/pull-request-title.yml +++ b/.github/workflows/pull-request-title.yml @@ -31,7 +31,7 @@ jobs: force-push-comment: name: Warn external contributors about force-pushing runs-on: ubuntu-latest - if: github.repository != 'noir-lang/noir' && github.event_name == 'pull_request_target' + if: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.repo.full_name != 'noir-lang/noir' }} permissions: pull-requests: write From 8536c7c8ea8fc6b740b2ae6d1aef3bc7e1907b8c Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 15 Feb 2024 11:47:11 +0000 Subject: [PATCH 07/45] fix: only add `.nr` files to file manager (#4380) # Description ## Problem\* Resolves https://github.com/noir-lang/noir/issues/4379 ## Summary\* This PR adds a filter so we only add files with a `.nr` extension to the file manager. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- tooling/nargo/src/lib.rs | 26 +++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/tooling/nargo/src/lib.rs b/tooling/nargo/src/lib.rs index 0fdff8b202f..e12bf4d4ad1 100644 --- a/tooling/nargo/src/lib.rs +++ b/tooling/nargo/src/lib.rs @@ -16,7 +16,7 @@ pub mod workspace; use std::collections::BTreeMap; -use fm::FileManager; +use fm::{FileManager, FILE_EXTENSION}; use noirc_driver::{add_dep, prepare_crate, prepare_dependency}; use noirc_frontend::{ graph::{CrateId, CrateName}, @@ -69,8 +69,8 @@ fn insert_all_files_for_package_into_file_manager( .clone(); // Get all files in the package and add them to the file manager - let paths = - get_all_paths_in_dir(entry_path_parent).expect("could not get all paths in the package"); + let paths = get_all_noir_source_in_dir(entry_path_parent) + .expect("could not get all paths in the package"); for path in paths { let source = std::fs::read_to_string(path.as_path()) .unwrap_or_else(|_| panic!("could not read file {:?} into string", path)); @@ -125,6 +125,15 @@ pub fn prepare_package<'file_manager, 'parsed_files>( (context, crate_id) } +// Get all Noir source files in the directory and subdirectories. +// +// Panics: If the path is not a path to a directory. +fn get_all_noir_source_in_dir(dir: &std::path::Path) -> std::io::Result> { + get_all_paths_in_dir(dir, |path| { + path.extension().map_or(false, |extension| extension == FILE_EXTENSION) + }) +} + // Get all paths in the directory and subdirectories. // // Panics: If the path is not a path to a directory. @@ -132,7 +141,10 @@ pub fn prepare_package<'file_manager, 'parsed_files>( // TODO: Along with prepare_package, this function is an abstraction leak // TODO: given that this crate should not know about the file manager. // TODO: We can clean this up in a future refactor -fn get_all_paths_in_dir(dir: &std::path::Path) -> std::io::Result> { +fn get_all_paths_in_dir( + dir: &std::path::Path, + predicate: fn(&std::path::Path) -> bool, +) -> std::io::Result> { assert!(dir.is_dir(), "directory {dir:?} is not a path to a directory"); let mut paths = Vec::new(); @@ -142,9 +154,9 @@ fn get_all_paths_in_dir(dir: &std::path::Path) -> std::io::Result Date: Thu, 15 Feb 2024 21:04:37 +0900 Subject: [PATCH 08/45] chore(github): Auto tag Dev Rel on doc-changing PRs (#4375) # Description ## Problem\* Documentation-changing PRs do not currently notify @noir-lang/developerrelations, which the team prefers (let me know below if that is not actually the case). ## Summary\* Extend the documentation preview action to auto-comment and tag `@noir-lang/developerrelations` on doc-changing PRs. Preview of how it looks: https://github.com/Savio-Sou/noir/pull/3#issuecomment-1944321469 ## Documentation\* Check one: - [ ] No documentation needed. - [x] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> --- .github/workflows/docs-pr.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/.github/workflows/docs-pr.yml b/.github/workflows/docs-pr.yml index 712fb100ba6..dddb309a3a4 100644 --- a/.github/workflows/docs-pr.yml +++ b/.github/workflows/docs-pr.yml @@ -114,3 +114,16 @@ jobs: NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} timeout-minutes: 1 + + add_comment: + needs: [deploy_preview] + runs-on: ubuntu-latest + permissions: + pull-requests: write + steps: + - name: Tag dev rel in comment + uses: marocchino/sticky-pull-request-comment@v2 + with: + message: | + FYI @noir-lang/developerrelations on Noir doc changes. + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file From 4f4f24f5ce2726b6955404139d4946086fd246e5 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 15 Feb 2024 18:01:05 +0000 Subject: [PATCH 09/45] chore: clippy fix (#4387) # Description ## Problem\* Resolves ## Summary\* As we're bumping the MSRV in #4385, we're getting a whole new version of clippy which is picking up more stuff. This PR applies clippy + cargo fmt changes from rustc 1.76.0 to reduce the diff on #4385 ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- acvm-repo/acir_field/src/generic_ark.rs | 114 +++++++++--------- .../compiler/optimizers/redundant_range.rs | 5 +- acvm-repo/acvm/src/pwg/blackbox/bigint.rs | 2 +- aztec_macros/src/lib.rs | 8 +- compiler/noirc_errors/src/debug_info.rs | 2 +- .../src/brillig/brillig_gen/brillig_block.rs | 2 +- .../noirc_evaluator/src/ssa/ssa_gen/mod.rs | 4 +- compiler/noirc_frontend/src/debug/mod.rs | 4 +- .../noirc_frontend/src/hir/type_check/expr.rs | 2 +- compiler/noirc_frontend/src/lexer/token.rs | 42 +++---- .../src/monomorphization/mod.rs | 4 +- compiler/noirc_frontend/src/node_interner.rs | 4 +- compiler/noirc_frontend/src/parser/parser.rs | 48 ++++---- tooling/debugger/src/context.rs | 2 +- tooling/debugger/src/dap.rs | 57 +++++---- tooling/debugger/src/foreign_calls.rs | 8 +- tooling/lsp/src/lib.rs | 10 +- tooling/nargo/src/artifacts/debug_vars.rs | 20 +-- tooling/nargo/src/lib.rs | 3 +- tooling/nargo_cli/src/cli/check_cmd.rs | 36 +++--- tooling/nargo_cli/src/cli/dap_cmd.rs | 3 +- tooling/nargo_cli/src/cli/debug_cmd.rs | 3 +- tooling/nargo_fmt/src/rewrite/infix.rs | 4 +- tooling/nargo_fmt/src/visitor/expr.rs | 1 - 24 files changed, 199 insertions(+), 189 deletions(-) diff --git a/acvm-repo/acir_field/src/generic_ark.rs b/acvm-repo/acir_field/src/generic_ark.rs index dc54d271beb..3178011a075 100644 --- a/acvm-repo/acir_field/src/generic_ark.rs +++ b/acvm-repo/acir_field/src/generic_ark.rs @@ -429,63 +429,6 @@ impl SubAssign for FieldElement { } } -#[cfg(test)] -mod tests { - #[test] - fn and() { - let max = 10_000u32; - - let num_bits = (std::mem::size_of::() * 8) as u32 - max.leading_zeros(); - - for x in 0..max { - let x = crate::generic_ark::FieldElement::::from(x as i128); - let res = x.and(&x, num_bits); - assert_eq!(res.to_be_bytes(), x.to_be_bytes()); - } - } - - #[test] - fn serialize_fixed_test_vectors() { - // Serialized field elements from of 0, -1, -2, -3 - let hex_strings = vec![ - "0000000000000000000000000000000000000000000000000000000000000000", - "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000", - "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffffff", - "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593effffffe", - ]; - - for (i, string) in hex_strings.into_iter().enumerate() { - let minus_i_field_element = - -crate::generic_ark::FieldElement::::from(i as i128); - assert_eq!(minus_i_field_element.to_hex(), string); - } - } - - #[test] - fn deserialize_even_and_odd_length_hex() { - // Test cases of (odd, even) length hex strings - let hex_strings = - vec![("0x0", "0x00"), ("0x1", "0x01"), ("0x002", "0x0002"), ("0x00003", "0x000003")]; - for (i, case) in hex_strings.into_iter().enumerate() { - let i_field_element = - crate::generic_ark::FieldElement::::from(i as i128); - let odd_field_element = - crate::generic_ark::FieldElement::::from_hex(case.0).unwrap(); - let even_field_element = - crate::generic_ark::FieldElement::::from_hex(case.1).unwrap(); - - assert_eq!(i_field_element, odd_field_element); - assert_eq!(odd_field_element, even_field_element); - } - } - - #[test] - fn max_num_bits_smoke() { - let max_num_bits_bn254 = crate::generic_ark::FieldElement::::max_num_bits(); - assert_eq!(max_num_bits_bn254, 254); - } -} - fn mask_vector_le(bytes: &mut [u8], num_bits: usize) { // reverse to big endian format bytes.reverse(); @@ -543,3 +486,60 @@ fn superscript(n: u64) -> String { panic!("{}", n.to_string() + " can't be converted to superscript."); } } + +#[cfg(test)] +mod tests { + #[test] + fn and() { + let max = 10_000u32; + + let num_bits = (std::mem::size_of::() * 8) as u32 - max.leading_zeros(); + + for x in 0..max { + let x = crate::generic_ark::FieldElement::::from(x as i128); + let res = x.and(&x, num_bits); + assert_eq!(res.to_be_bytes(), x.to_be_bytes()); + } + } + + #[test] + fn serialize_fixed_test_vectors() { + // Serialized field elements from of 0, -1, -2, -3 + let hex_strings = vec![ + "0000000000000000000000000000000000000000000000000000000000000000", + "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000000", + "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffffff", + "30644e72e131a029b85045b68181585d2833e84879b9709143e1f593effffffe", + ]; + + for (i, string) in hex_strings.into_iter().enumerate() { + let minus_i_field_element = + -crate::generic_ark::FieldElement::::from(i as i128); + assert_eq!(minus_i_field_element.to_hex(), string); + } + } + + #[test] + fn deserialize_even_and_odd_length_hex() { + // Test cases of (odd, even) length hex strings + let hex_strings = + vec![("0x0", "0x00"), ("0x1", "0x01"), ("0x002", "0x0002"), ("0x00003", "0x000003")]; + for (i, case) in hex_strings.into_iter().enumerate() { + let i_field_element = + crate::generic_ark::FieldElement::::from(i as i128); + let odd_field_element = + crate::generic_ark::FieldElement::::from_hex(case.0).unwrap(); + let even_field_element = + crate::generic_ark::FieldElement::::from_hex(case.1).unwrap(); + + assert_eq!(i_field_element, odd_field_element); + assert_eq!(odd_field_element, even_field_element); + } + } + + #[test] + fn max_num_bits_smoke() { + let max_num_bits_bn254 = crate::generic_ark::FieldElement::::max_num_bits(); + assert_eq!(max_num_bits_bn254, 254); + } +} diff --git a/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs b/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs index 64fe5291cc6..c6ca18d30ae 100644 --- a/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs +++ b/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs @@ -72,12 +72,9 @@ impl RangeOptimizer { } } - Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { input: FunctionInput { witness, num_bits }, - }) => { - Some((*witness, *num_bits)) - } + }) => Some((*witness, *num_bits)), _ => None, }) else { diff --git a/acvm-repo/acvm/src/pwg/blackbox/bigint.rs b/acvm-repo/acvm/src/pwg/blackbox/bigint.rs index 986afaa3ce7..f094bb1ba20 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/bigint.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/bigint.rs @@ -69,7 +69,7 @@ impl BigIntSolver { pub(crate) fn bigint_to_bytes( &self, input: u32, - outputs: &Vec, + outputs: &[Witness], initial_witness: &mut WitnessMap, ) -> Result<(), OpcodeResolutionError> { let bigint = self.get_bigint(input, BlackBoxFunc::BigIntToLeBytes)?; diff --git a/aztec_macros/src/lib.rs b/aztec_macros/src/lib.rs index 51a8b5361a6..21e3dd56e0d 100644 --- a/aztec_macros/src/lib.rs +++ b/aztec_macros/src/lib.rs @@ -809,7 +809,7 @@ fn get_serialized_length( ) -> Result { let (struct_name, maybe_stored_in_state) = match typ { Type::Struct(struct_type, generics) => { - Ok((struct_type.borrow().name.0.contents.clone(), generics.get(0))) + Ok((struct_type.borrow().name.0.contents.clone(), generics.first())) } _ => Err(AztecMacroError::CouldNotAssignStorageSlots { secondary_message: Some("State storage variable must be a struct".to_string()), @@ -859,7 +859,7 @@ fn get_serialized_length( let serialized_trait_impl_shared = interner.get_trait_implementation(*serialized_trait_impl_id); let serialized_trait_impl = serialized_trait_impl_shared.borrow(); - match serialized_trait_impl.trait_generics.get(0).unwrap() { + match serialized_trait_impl.trait_generics.first().unwrap() { Type::Constant(value) => Ok(*value), _ => Err(AztecMacroError::CouldNotAssignStorageSlots { secondary_message: None }), } @@ -946,9 +946,7 @@ fn assign_storage_slots( let slot_arg_expression = interner.expression(&new_call_expression.arguments[1]); let current_storage_slot = match slot_arg_expression { - HirExpression::Literal(HirLiteral::Integer(slot, _)) => { - Ok(slot.borrow().to_u128()) - } + HirExpression::Literal(HirLiteral::Integer(slot, _)) => Ok(slot.to_u128()), _ => Err(( AztecMacroError::CouldNotAssignStorageSlots { secondary_message: Some( diff --git a/compiler/noirc_errors/src/debug_info.rs b/compiler/noirc_errors/src/debug_info.rs index 25722aac57f..67ec851d46d 100644 --- a/compiler/noirc_errors/src/debug_info.rs +++ b/compiler/noirc_errors/src/debug_info.rs @@ -90,7 +90,7 @@ impl DebugInfo { for (opcode_location, locations) in self.locations.iter() { for location in locations.iter() { - let opcodes = accumulator.entry(*location).or_insert(Vec::new()); + let opcodes = accumulator.entry(*location).or_default(); opcodes.push(opcode_location); } } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index c299daa158a..f01f60252f6 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -269,7 +269,7 @@ impl<'block> BrilligBlock<'block> { unreachable!("expected a call instruction") }; - let Value::Function(func_id) = &dfg[*func] else { + let Value::Function(func_id) = &dfg[*func] else { unreachable!("expected a function value") }; diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index ecc8bf87597..8f2c923d62c 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -684,9 +684,7 @@ impl<'a> FunctionContext<'a> { &mut self, assert_message: &Option>, ) -> Result>, RuntimeError> { - let Some(assert_message_expr) = assert_message else { - return Ok(None) - }; + let Some(assert_message_expr) = assert_message else { return Ok(None) }; if let ast::Expression::Literal(ast::Literal::Str(assert_message)) = assert_message_expr.as_ref() diff --git a/compiler/noirc_frontend/src/debug/mod.rs b/compiler/noirc_frontend/src/debug/mod.rs index 9056e821e8d..a88567fcaf9 100644 --- a/compiler/noirc_frontend/src/debug/mod.rs +++ b/compiler/noirc_frontend/src/debug/mod.rs @@ -96,7 +96,7 @@ impl DebugInstrumenter { self.walk_scope(&mut func.body.0, func.span); // prepend fn params: - func.body.0 = vec![set_fn_params, func.body.0.clone()].concat(); + func.body.0 = [set_fn_params, func.body.0.clone()].concat(); } // Modify a vector of statements in-place, adding instrumentation for sets and drops. @@ -130,7 +130,7 @@ impl DebugInstrumenter { let span = Span::empty(span.end()); // drop scope variables - let scope_vars = self.scope.pop().unwrap_or(HashMap::default()); + let scope_vars = self.scope.pop().unwrap_or_default(); let drop_vars_stmts = scope_vars.values().map(|var_id| build_drop_var_stmt(*var_id, span)); statements.extend(drop_vars_stmts); diff --git a/compiler/noirc_frontend/src/hir/type_check/expr.rs b/compiler/noirc_frontend/src/hir/type_check/expr.rs index 96a79152f69..b6bb5984bcd 100644 --- a/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -65,7 +65,7 @@ impl<'interner> TypeChecker<'interner> { let elem_types = vecmap(&arr, |arg| self.check_expression(arg)); let first_elem_type = elem_types - .get(0) + .first() .cloned() .unwrap_or_else(|| self.interner.next_type_variable()); diff --git a/compiler/noirc_frontend/src/lexer/token.rs b/compiler/noirc_frontend/src/lexer/token.rs index f7c07c5f5db..fe12132e202 100644 --- a/compiler/noirc_frontend/src/lexer/token.rs +++ b/compiler/noirc_frontend/src/lexer/token.rs @@ -774,6 +774,27 @@ impl Keyword { } } +pub struct Tokens(pub Vec); + +type TokenMapIter = Map, fn(SpannedToken) -> (Token, Span)>; + +impl<'a> From for chumsky::Stream<'a, Token, Span, TokenMapIter> { + fn from(tokens: Tokens) -> Self { + let end_of_input = match tokens.0.last() { + Some(spanned_token) => spanned_token.to_span(), + None => Span::single_char(0), + }; + + fn get_span(token: SpannedToken) -> (Token, Span) { + let span = token.to_span(); + (token.into_token(), span) + } + + let iter = tokens.0.into_iter().map(get_span as fn(_) -> _); + chumsky::Stream::from_iter(end_of_input, iter) + } +} + #[cfg(test)] mod keywords { use strum::IntoEnumIterator; @@ -796,24 +817,3 @@ mod keywords { } } } - -pub struct Tokens(pub Vec); - -type TokenMapIter = Map, fn(SpannedToken) -> (Token, Span)>; - -impl<'a> From for chumsky::Stream<'a, Token, Span, TokenMapIter> { - fn from(tokens: Tokens) -> Self { - let end_of_input = match tokens.0.last() { - Some(spanned_token) => spanned_token.to_span(), - None => Span::single_char(0), - }; - - fn get_span(token: SpannedToken) -> (Token, Span) { - let span = token.to_span(); - (token.into_token(), span) - } - - let iter = tokens.0.into_iter().map(get_span as fn(_) -> _); - chumsky::Stream::from_iter(end_of_input, iter) - } -} diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index f691a0c9065..cfe671d7d58 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -733,7 +733,9 @@ impl<'interner> Monomorphizer<'interner> { } DefinitionKind::Global(global_id) => { let Some(let_) = self.interner.get_global_let_statement(*global_id) else { - unreachable!("Globals should have a corresponding let statement by monomorphization") + unreachable!( + "Globals should have a corresponding let statement by monomorphization" + ) }; self.expr(let_.expression) } diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index 9a45268d111..815bc4c5e9c 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -1147,7 +1147,7 @@ impl NodeInterner { }) .collect() }) - .unwrap_or(vec![]) + .unwrap_or_default() } /// Similar to `lookup_trait_implementation` but does not apply any type bindings on success. @@ -1670,7 +1670,7 @@ impl Methods { for method in self.iter() { match interner.function_meta(&method).typ.instantiate(interner).0 { Type::Function(args, _, _) => { - if let Some(object) = args.get(0) { + if let Some(object) = args.first() { let mut bindings = TypeBindings::new(); if object.try_unify(typ, &mut bindings).is_ok() { diff --git a/compiler/noirc_frontend/src/parser/parser.rs b/compiler/noirc_frontend/src/parser/parser.rs index 8bcd7670716..1cb81e26a0a 100644 --- a/compiler/noirc_frontend/src/parser/parser.rs +++ b/compiler/noirc_frontend/src/parser/parser.rs @@ -833,7 +833,7 @@ where ignore_then_commit(keyword(Keyword::Assert), parenthesized(argument_parser)) .labelled(ParsingRuleLabel::Statement) .validate(|expressions, span, _| { - let condition = expressions.get(0).unwrap_or(&Expression::error(span)).clone(); + let condition = expressions.first().unwrap_or(&Expression::error(span)).clone(); let message = expressions.get(1).cloned(); StatementKind::Constrain(ConstrainStatement(condition, message, ConstrainKind::Assert)) }) @@ -851,7 +851,7 @@ where .validate(|exprs: Vec, span, _| { let predicate = Expression::new( ExpressionKind::Infix(Box::new(InfixExpression { - lhs: exprs.get(0).unwrap_or(&Expression::error(span)).clone(), + lhs: exprs.first().unwrap_or(&Expression::error(span)).clone(), rhs: exprs.get(1).unwrap_or(&Expression::error(span)).clone(), operator: Spanned::from(span, BinaryOpKind::Equal), })), @@ -2483,7 +2483,7 @@ mod test { #[test] fn return_validation() { - let cases = vec![ + let cases = [ Case { source: "{ return 42; }", expect: concat!("{\n", " Error\n", "}",), @@ -2512,7 +2512,7 @@ mod test { #[test] fn expr_no_constructors() { - let cases = vec![ + let cases = [ Case { source: "{ if structure { a: 1 } {} }", expect: concat!( @@ -2567,10 +2567,10 @@ mod test { #[test] fn parse_raw_string_expr() { let cases = vec![ - Case { source: r##" r"foo" "##, expect: r##"r"foo""##, errors: 0 }, + Case { source: r#" r"foo" "#, expect: r#"r"foo""#, errors: 0 }, Case { source: r##" r#"foo"# "##, expect: r##"r#"foo"#"##, errors: 0 }, // backslash - Case { source: r##" r"\\" "##, expect: r##"r"\\""##, errors: 0 }, + Case { source: r#" r"\\" "#, expect: r#"r"\\""#, errors: 0 }, Case { source: r##" r#"\"# "##, expect: r##"r#"\"#"##, errors: 0 }, Case { source: r##" r#"\\"# "##, expect: r##"r#"\\"#"##, errors: 0 }, Case { source: r##" r#"\\\"# "##, expect: r##"r#"\\\"#"##, errors: 0 }, @@ -2582,27 +2582,27 @@ mod test { }, Case { source: r##" r#"\\\\\\\\"# "##, expect: r##"r#"\\\\\\\\"#"##, errors: 0 }, // mismatch - errors: - Case { source: r###" r#"foo"## "###, expect: r###"r#"foo"#"###, errors: 1 }, - Case { source: r###" r##"foo"# "###, expect: "(none)", errors: 2 }, + Case { source: r###" r#"foo"## "###, expect: r##"r#"foo"#"##, errors: 1 }, + Case { source: r##" r##"foo"# "##, expect: "(none)", errors: 2 }, // mismatch: short: - Case { source: r###" r"foo"# "###, expect: r###"r"foo""###, errors: 1 }, - Case { source: r###" r#"foo" "###, expect: "(none)", errors: 2 }, + Case { source: r##" r"foo"# "##, expect: r#"r"foo""#, errors: 1 }, + Case { source: r#" r#"foo" "#, expect: "(none)", errors: 2 }, // empty string - Case { source: r####"r"""####, expect: r####"r"""####, errors: 0 }, + Case { source: r#"r"""#, expect: r#"r"""#, errors: 0 }, Case { source: r####"r###""###"####, expect: r####"r###""###"####, errors: 0 }, // miscellaneous - Case { source: r###" r#\"foo\"# "###, expect: "plain::r", errors: 2 }, - Case { source: r###" r\"foo\" "###, expect: "plain::r", errors: 1 }, - Case { source: r###" r##"foo"# "###, expect: "(none)", errors: 2 }, + Case { source: r##" r#\"foo\"# "##, expect: "plain::r", errors: 2 }, + Case { source: r#" r\"foo\" "#, expect: "plain::r", errors: 1 }, + Case { source: r##" r##"foo"# "##, expect: "(none)", errors: 2 }, // missing 'r' letter - Case { source: r###" ##"foo"# "###, expect: r#""foo""#, errors: 2 }, - Case { source: r###" #"foo" "###, expect: "plain::foo", errors: 2 }, + Case { source: r##" ##"foo"# "##, expect: r#""foo""#, errors: 2 }, + Case { source: r#" #"foo" "#, expect: "plain::foo", errors: 2 }, // whitespace - Case { source: r###" r #"foo"# "###, expect: "plain::r", errors: 2 }, - Case { source: r###" r# "foo"# "###, expect: "plain::r", errors: 3 }, - Case { source: r###" r#"foo" # "###, expect: "(none)", errors: 2 }, + Case { source: r##" r #"foo"# "##, expect: "plain::r", errors: 2 }, + Case { source: r##" r# "foo"# "##, expect: "plain::r", errors: 3 }, + Case { source: r#" r#"foo" # "#, expect: "(none)", errors: 2 }, // after identifier - Case { source: r###" bar#"foo"# "###, expect: "plain::bar", errors: 2 }, + Case { source: r##" bar#"foo"# "##, expect: "plain::bar", errors: 2 }, // nested Case { source: r###"r##"foo r#"bar"# r"baz" ### bye"##"###, @@ -2617,10 +2617,10 @@ mod test { #[test] fn parse_raw_string_lit() { let lit_cases = vec![ - Case { source: r##" r"foo" "##, expect: r##"r"foo""##, errors: 0 }, + Case { source: r#" r"foo" "#, expect: r#"r"foo""#, errors: 0 }, Case { source: r##" r#"foo"# "##, expect: r##"r#"foo"#"##, errors: 0 }, // backslash - Case { source: r##" r"\\" "##, expect: r##"r"\\""##, errors: 0 }, + Case { source: r#" r"\\" "#, expect: r#"r"\\""#, errors: 0 }, Case { source: r##" r#"\"# "##, expect: r##"r#"\"#"##, errors: 0 }, Case { source: r##" r#"\\"# "##, expect: r##"r#"\\"#"##, errors: 0 }, Case { source: r##" r#"\\\"# "##, expect: r##"r#"\\\"#"##, errors: 0 }, @@ -2632,8 +2632,8 @@ mod test { }, Case { source: r##" r#"\\\\\\\\"# "##, expect: r##"r#"\\\\\\\\"#"##, errors: 0 }, // mismatch - errors: - Case { source: r###" r#"foo"## "###, expect: r###"r#"foo"#"###, errors: 1 }, - Case { source: r###" r##"foo"# "###, expect: "(none)", errors: 2 }, + Case { source: r###" r#"foo"## "###, expect: r##"r#"foo"#"##, errors: 1 }, + Case { source: r##" r##"foo"# "##, expect: "(none)", errors: 2 }, ]; check_cases_with_errors(&lit_cases[..], literal()); diff --git a/tooling/debugger/src/context.rs b/tooling/debugger/src/context.rs index 5ab2c63c365..515edf0bb06 100644 --- a/tooling/debugger/src/context.rs +++ b/tooling/debugger/src/context.rs @@ -138,7 +138,7 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { }) .collect() }) - .unwrap_or(vec![]) + .unwrap_or_default() } /// Returns the current call stack with expanded source locations. In diff --git a/tooling/debugger/src/dap.rs b/tooling/debugger/src/dap.rs index 184018e9fcc..7e67a26b257 100644 --- a/tooling/debugger/src/dap.rs +++ b/tooling/debugger/src/dap.rs @@ -115,7 +115,8 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { let source_location = source_locations[0]; let span = source_location.span; let file_id = source_location.file; - let Ok(line_index) = &simple_files[&file_id].line_index((), span.start() as usize) else { + let Ok(line_index) = &simple_files[&file_id].line_index((), span.start() as usize) + else { return; }; let line_number = line_index + 1; @@ -143,7 +144,7 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { pub fn run_loop(&mut self) -> Result<(), ServerError> { self.running = self.context.get_current_opcode_location().is_some(); - if self.running && matches!(self.context.get_current_source_location(), None) { + if self.running && self.context.get_current_source_location().is_none() { // TODO: remove this? This is to ensure that the tool has a proper // source location to show when first starting the debugger, but // maybe the default behavior should be to start executing until the @@ -297,7 +298,7 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { } } // the actual opcodes - while count > 0 && !matches!(opcode_location, None) { + while count > 0 && opcode_location.is_some() { instructions.push(DisassembledInstruction { address: format!("{}", opcode_location.unwrap()), instruction: self.context.render_opcode_at_location(&opcode_location), @@ -446,29 +447,31 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { // compute breakpoints to set and return let mut breakpoints_to_set: Vec<(OpcodeLocation, i64)> = vec![]; - let breakpoints: Vec = args.breakpoints.iter().map(|breakpoint| { - let Ok(location) = OpcodeLocation::from_str(breakpoint.instruction_reference.as_str()) else { - return Breakpoint { - verified: false, - message: Some(String::from("Missing instruction reference")), - ..Breakpoint::default() - }; - }; - if !self.context.is_valid_opcode_location(&location) { - return Breakpoint { - verified: false, - message: Some(String::from("Invalid opcode location")), - ..Breakpoint::default() + let breakpoints: Vec = args + .breakpoints + .iter() + .map(|breakpoint| { + let Ok(location) = + OpcodeLocation::from_str(breakpoint.instruction_reference.as_str()) + else { + return Breakpoint { + verified: false, + message: Some(String::from("Missing instruction reference")), + ..Breakpoint::default() + }; }; - } - let id = self.get_next_breakpoint_id(); - breakpoints_to_set.push((location, id)); - Breakpoint { - id: Some(id), - verified: true, - ..Breakpoint::default() - } - }).collect(); + if !self.context.is_valid_opcode_location(&location) { + return Breakpoint { + verified: false, + message: Some(String::from("Invalid opcode location")), + ..Breakpoint::default() + }; + } + let id = self.get_next_breakpoint_id(); + breakpoints_to_set.push((location, id)); + Breakpoint { id: Some(id), verified: true, ..Breakpoint::default() } + }) + .collect(); // actually set the computed breakpoints self.instruction_breakpoints = breakpoints_to_set; @@ -539,7 +542,9 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { let Some(location) = self.find_opcode_for_source_location(&file_id, line) else { return Breakpoint { verified: false, - message: Some(String::from("Source location cannot be matched to opcode location")), + message: Some(String::from( + "Source location cannot be matched to opcode location", + )), ..Breakpoint::default() }; }; diff --git a/tooling/debugger/src/foreign_calls.rs b/tooling/debugger/src/foreign_calls.rs index 01676adfef3..68c4d3947b0 100644 --- a/tooling/debugger/src/foreign_calls.rs +++ b/tooling/debugger/src/foreign_calls.rs @@ -100,7 +100,7 @@ impl ForeignCallExecutor for DefaultDebugForeignCallExecutor { Ok(ForeignCallResult::default().into()) } Some(DebugForeignCall::MemberAssign(arity)) => { - if let Some(ForeignCallParam::Single(var_id_value)) = foreign_call.inputs.get(0) { + if let Some(ForeignCallParam::Single(var_id_value)) = foreign_call.inputs.first() { let arity = arity as usize; let var_id = debug_var_id(var_id_value); let n = foreign_call.inputs.len(); @@ -116,7 +116,11 @@ impl ForeignCallExecutor for DefaultDebugForeignCallExecutor { .collect(); let values: Vec = (0..n - 1 - arity) .flat_map(|i| { - foreign_call.inputs.get(1 + i).map(|fci| fci.values()).unwrap_or(vec![]) + foreign_call + .inputs + .get(1 + i) + .map(|fci| fci.values()) + .unwrap_or_default() }) .collect(); self.debug_vars.assign_field(var_id, indexes, &values); diff --git a/tooling/lsp/src/lib.rs b/tooling/lsp/src/lib.rs index a0e024c70fd..be9b83e02f6 100644 --- a/tooling/lsp/src/lib.rs +++ b/tooling/lsp/src/lib.rs @@ -222,11 +222,15 @@ pub(crate) fn resolve_workspace_for_source_path(file_path: &Path) -> Result ParsedFiles { cache_misses .into_iter() .map(|(id, _, _, parse_results)| (id, parse_results)) - .chain(cache_hits.into_iter()) + .chain(cache_hits) .collect() } else { parse_all(file_manager) diff --git a/tooling/nargo/src/artifacts/debug_vars.rs b/tooling/nargo/src/artifacts/debug_vars.rs index b5559ca53c8..20f2637f7d6 100644 --- a/tooling/nargo/src/artifacts/debug_vars.rs +++ b/tooling/nargo/src/artifacts/debug_vars.rs @@ -18,23 +18,25 @@ impl DebugVars { self.active .iter() .filter_map(|var_id| { - self.variables - .get(var_id) - .and_then(|debug_var| { - let Some(value) = self.values.get(var_id) else { return None; }; - let Some(ptype) = self.types.get(&debug_var.debug_type_id) else { return None; }; - Some((debug_var.name.as_str(), value, ptype)) - }) + self.variables.get(var_id).and_then(|debug_var| { + let Some(value) = self.values.get(var_id) else { + return None; + }; + let Some(ptype) = self.types.get(&debug_var.debug_type_id) else { + return None; + }; + Some((debug_var.name.as_str(), value, ptype)) + }) }) .collect() } pub fn insert_variables(&mut self, vars: &DebugVariables) { - self.variables.extend(vars.clone().into_iter()); + self.variables.extend(vars.clone()); } pub fn insert_types(&mut self, types: &DebugTypes) { - self.types.extend(types.clone().into_iter()); + self.types.extend(types.clone()); } pub fn assign_var(&mut self, var_id: DebugVarId, values: &[Value]) { diff --git a/tooling/nargo/src/lib.rs b/tooling/nargo/src/lib.rs index e12bf4d4ad1..3deced041f8 100644 --- a/tooling/nargo/src/lib.rs +++ b/tooling/nargo/src/lib.rs @@ -65,8 +65,7 @@ fn insert_all_files_for_package_into_file_manager( let entry_path_parent = package .entry_path .parent() - .unwrap_or_else(|| panic!("The entry path is expected to be a single file within a directory and so should have a parent {:?}", package.entry_path)) - .clone(); + .unwrap_or_else(|| panic!("The entry path is expected to be a single file within a directory and so should have a parent {:?}", package.entry_path)); // Get all files in the package and add them to the file manager let paths = get_all_noir_source_in_dir(entry_path_parent) diff --git a/tooling/nargo_cli/src/cli/check_cmd.rs b/tooling/nargo_cli/src/cli/check_cmd.rs index a8b9dbdeeb2..4da06d2536a 100644 --- a/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/tooling/nargo_cli/src/cli/check_cmd.rs @@ -142,6 +142,24 @@ fn create_input_toml_template( toml::to_string(&map).unwrap() } +/// Run the lexing, parsing, name resolution, and type checking passes and report any warnings +/// and errors found. +pub(crate) fn check_crate_and_report_errors( + context: &mut Context, + crate_id: CrateId, + deny_warnings: bool, + disable_macros: bool, + silence_warnings: bool, +) -> Result<(), CompileError> { + let result = check_crate(context, crate_id, deny_warnings, disable_macros); + super::compile_cmd::report_errors( + result, + &context.file_manager, + deny_warnings, + silence_warnings, + ) +} + #[cfg(test)] mod tests { use noirc_abi::{AbiParameter, AbiType, AbiVisibility, Sign}; @@ -189,21 +207,3 @@ d2 = ["", "", ""] assert_eq!(toml_str, expected_toml_str); } } - -/// Run the lexing, parsing, name resolution, and type checking passes and report any warnings -/// and errors found. -pub(crate) fn check_crate_and_report_errors( - context: &mut Context, - crate_id: CrateId, - deny_warnings: bool, - disable_macros: bool, - silence_warnings: bool, -) -> Result<(), CompileError> { - let result = check_crate(context, crate_id, deny_warnings, disable_macros); - super::compile_cmd::report_errors( - result, - &context.file_manager, - deny_warnings, - silence_warnings, - ) -} diff --git a/tooling/nargo_cli/src/cli/dap_cmd.rs b/tooling/nargo_cli/src/cli/dap_cmd.rs index f4df309f1c9..ba4f91609ef 100644 --- a/tooling/nargo_cli/src/cli/dap_cmd.rs +++ b/tooling/nargo_cli/src/cli/dap_cmd.rs @@ -159,7 +159,8 @@ fn loop_uninitialized_dap( server.respond(req.error("Missing launch arguments"))?; continue; }; - let Some(Value::String(ref project_folder)) = additional_data.get("projectFolder") else { + let Some(Value::String(ref project_folder)) = additional_data.get("projectFolder") + else { server.respond(req.error("Missing project folder argument"))?; continue; }; diff --git a/tooling/nargo_cli/src/cli/debug_cmd.rs b/tooling/nargo_cli/src/cli/debug_cmd.rs index 6fcfee91457..130a07b5c90 100644 --- a/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -147,8 +147,7 @@ fn instrument_package_files( let entry_path_parent = package .entry_path .parent() - .unwrap_or_else(|| panic!("The entry path is expected to be a single file within a directory and so should have a parent {:?}", package.entry_path)) - .clone(); + .unwrap_or_else(|| panic!("The entry path is expected to be a single file within a directory and so should have a parent {:?}", package.entry_path)); let mut debug_instrumenter = DebugInstrumenter::default(); diff --git a/tooling/nargo_fmt/src/rewrite/infix.rs b/tooling/nargo_fmt/src/rewrite/infix.rs index 15f5fe23aae..5d2b387496a 100644 --- a/tooling/nargo_fmt/src/rewrite/infix.rs +++ b/tooling/nargo_fmt/src/rewrite/infix.rs @@ -96,7 +96,9 @@ pub(crate) fn flatten( result.push(rewrite); - let Some(pop) = stack.pop() else { break; }; + let Some(pop) = stack.pop() else { + break; + }; match &pop.kind { ExpressionKind::Infix(infix) => { diff --git a/tooling/nargo_fmt/src/visitor/expr.rs b/tooling/nargo_fmt/src/visitor/expr.rs index 9b36911b1af..2cd0e881e84 100644 --- a/tooling/nargo_fmt/src/visitor/expr.rs +++ b/tooling/nargo_fmt/src/visitor/expr.rs @@ -202,7 +202,6 @@ pub(crate) fn format_seq( reduce: bool, ) -> String { let mut nested_indent = shape; - let shape = shape; nested_indent.indent.block_indent(visitor.config); From 0a1d109f478c997da5c43876fd12464af638bb15 Mon Sep 17 00:00:00 2001 From: jfecher Date: Thu, 15 Feb 2024 12:22:03 -0600 Subject: [PATCH 10/45] fix: Use correct type for numeric generics (#4386) # Description ## Problem\* Resolves #4290 ## Summary\* Previously, the monomorphizer would assume all numeric generics were Fields, but this was not necessarily true. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/noirc_frontend/src/monomorphization/mod.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index cfe671d7d58..31a254d9f0a 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -714,7 +714,6 @@ impl<'interner> Monomorphizer<'interner> { let mutable = definition.mutable; let location = Some(ident.location); let name = definition.name.clone(); - let typ = self.interner.id_type(expr_id); let definition = self.lookup_function(*func_id, expr_id, &typ, None); let typ = self.convert_type(&typ); let ident = ast::Ident { location, mutable, definition, name, typ: typ.clone() }; @@ -755,7 +754,8 @@ impl<'interner> Monomorphizer<'interner> { let value = FieldElement::from(value as u128); let location = self.interner.id_location(expr_id); - ast::Expression::Literal(ast::Literal::Integer(value, ast::Type::Field, location)) + let typ = self.convert_type(&typ); + ast::Expression::Literal(ast::Literal::Integer(value, typ, location)) } } } From 1fbc6ab19a91a9cf6b12a56fb8f02a44fa7592f3 Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Thu, 15 Feb 2024 19:32:45 +0000 Subject: [PATCH 11/45] chore: Test for printing array of strings (#4389) # Description ## Problem\* Resolves #2903 ## Summary\* I added a function `regression_2903` to the `debug_logs` test that shows we can now accurately print arrays of strings. This was fixed in previous formatting work. I also removed `std::` prefixes from `println` now that we have a prelude. ## Additional Context ## Documentation\* Check one: - [X] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [X] I have tested the changes locally. - [X] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../execution_success/debug_logs/src/main.nr | 76 +++++++++++-------- 1 file changed, 43 insertions(+), 33 deletions(-) diff --git a/test_programs/execution_success/debug_logs/src/main.nr b/test_programs/execution_success/debug_logs/src/main.nr index 49e0041594a..c628a9ae6a4 100644 --- a/test_programs/execution_success/debug_logs/src/main.nr +++ b/test_programs/execution_success/debug_logs/src/main.nr @@ -1,77 +1,76 @@ -use dep::std; - -fn main(x: Field, y: pub Field) { +fn main(x: Field, y: pub Field) { let string = "i: {i}, j: {j}"; - std::println(string); + println(string); // TODO: fmtstr cannot be printed // let fmt_str: fmtstr<14, (Field, Field)> = f"i: {x}, j: {y}"; // let fmt_fmt_str = f"fmtstr: {fmt_str}, i: {x}"; - // std::println(fmt_fmt_str); + // println(fmt_fmt_str); // A `fmtstr` lets you easily perform string interpolation. let fmt_str: fmtstr<14, (Field, Field)> = f"i: {x}, j: {y}"; let fmt_str = string_identity(fmt_str); - std::println(fmt_str); + println(fmt_str); let fmt_str_no_type = f"i: {x}, j: {y}"; - std::println(fmt_str_no_type); + println(fmt_str_no_type); let fmt_str_generic = string_with_generics(fmt_str_no_type); - std::println(fmt_str_generic); + println(fmt_str_generic); let s = myStruct { y: x, x: y }; - std::println(s); + println(s); - std::println(f"randomstring{x}{x}"); + println(f"randomstring{x}{x}"); let fmt_str = string_with_partial_generics(f"i: {x}, s: {s}"); - std::println(fmt_str); + println(fmt_str); - std::println(x); - std::println([x, y]); + println(x); + println([x, y]); let foo = fooStruct { my_struct: s, foo: 15 }; - std::println(f"s: {s}, foo: {foo}"); + println(f"s: {s}, foo: {foo}"); - std::println(f"x: 0, y: 1"); + println(f"x: 0, y: 1"); let s_2 = myStruct { x: 20, y: 30 }; - std::println(f"s1: {s}, s2: {s_2}"); + println(f"s1: {s}, s2: {s_2}"); let bar = fooStruct { my_struct: s_2, foo: 20 }; - std::println(f"foo1: {foo}, foo2: {bar}"); + println(f"foo1: {foo}, foo2: {bar}"); let struct_string = if x != 5 { f"{foo}" } else { f"{bar}" }; - std::println(struct_string); + println(struct_string); let one_tuple = (1, 2, 3); let another_tuple = (4, 5, 6); - std::println(f"one_tuple: {one_tuple}, another_tuple: {another_tuple}"); - std::println(one_tuple); + println(f"one_tuple: {one_tuple}, another_tuple: {another_tuple}"); + println(one_tuple); let tuples_nested = (one_tuple, another_tuple); - std::println(f"tuples_nested: {tuples_nested}"); - std::println(tuples_nested); + println(f"tuples_nested: {tuples_nested}"); + println(tuples_nested); + regression_2903(); regression_2906(); let first_array = [1, 2, 3]; let second_array = [4, 5, 6]; let arrays_nested = [first_array, second_array]; - std::println(f"first_array: {first_array}, second_array: {second_array}"); - std::println(f"arrays_nested: {arrays_nested}"); + println(f"first_array: {first_array}, second_array: {second_array}"); + println(f"arrays_nested: {arrays_nested}"); let free_lambda = |x| x + 1; let sentinel: u32 = 8888; - std::println(f"free_lambda: {free_lambda}, sentinel: {sentinel}"); - std::println(free_lambda); + println(f"free_lambda: {free_lambda}, sentinel: {sentinel}"); + println(free_lambda); let one = 1; let closured_lambda = |x| x + one; - std::println(f"closured_lambda: {closured_lambda}, sentinel: {sentinel}"); - std::println(closured_lambda); + println(f"closured_lambda: {closured_lambda}, sentinel: {sentinel}"); + println(closured_lambda); } fn string_identity(string: fmtstr<14, (Field, Field)>) -> fmtstr<14, (Field, Field)> { @@ -96,19 +95,30 @@ struct fooStruct { foo: Field, } +fn regression_2903() { + let v : [str<1>; 1] = ["1"; 1]; + println(v); // will print [1] + + let a = v[0]; + println(a); // will print `1` + + let bytes = [ "aaa", "bbb", "ccc" ]; + println(bytes); +} + fn regression_2906() { let array_two_vals = [1, 2]; - dep::std::println(f"array_two_vals: {array_two_vals}"); + println(f"array_two_vals: {array_two_vals}"); let label_two_vals = "12"; - dep::std::println(f"label_two_vals: {label_two_vals}"); + println(f"label_two_vals: {label_two_vals}"); let array_five_vals = [1, 2, 3, 4, 5]; - dep::std::println(f"array_five_vals: {array_five_vals}"); + println(f"array_five_vals: {array_five_vals}"); let label_five_vals = "12345"; - dep::std::println(f"label_five_vals: {label_five_vals}"); + println(f"label_five_vals: {label_five_vals}"); - dep::std::println(f"array_five_vals: {array_five_vals}, label_five_vals: {label_five_vals}"); + println(f"array_five_vals: {array_five_vals}, label_five_vals: {label_five_vals}"); } From 2fc95d2d82b3220267ce7d5815e7073e00ef1360 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 15 Feb 2024 20:07:16 +0000 Subject: [PATCH 12/45] chore(ci)!: Bump MSRV to 1.72.1 and enforce that ACVM can be published using updated lockfile (#4385) # Description ## Problem\* Resolves #4384 ## Summary\* This PR resolves #4384 by updating the lockfile so that it's using the latest versions of the dependencies used by all of the ACVM packages and then runs the test suite against it to ensure that we still work correctly. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: kevaundray --- .../workflows/test-rust-workspace-msrv.yml | 112 ++++++++++++++++++ flake.nix | 2 +- rust-toolchain.toml | 2 +- 3 files changed, 114 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/test-rust-workspace-msrv.yml diff --git a/.github/workflows/test-rust-workspace-msrv.yml b/.github/workflows/test-rust-workspace-msrv.yml new file mode 100644 index 00000000000..02444b52856 --- /dev/null +++ b/.github/workflows/test-rust-workspace-msrv.yml @@ -0,0 +1,112 @@ +name: Test (MSRV check) + +# TL;DR https://github.com/noir-lang/noir/issues/4384 +# +# This workflow acts to ensure that we can publish to crates.io, we need this extra check as libraries don't respect the Cargo.lock file committed in this repository. +# We must then always be able to build the workspace using the latest versions of all of our dependencies, so we explicitly update them and build in this workflow. + +on: + pull_request: + merge_group: + push: + branches: + - master + +# This will cancel previous runs when a branch or PR is updated +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} + cancel-in-progress: true + +jobs: + build-test-artifacts: + name: Build test artifacts + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.71.1 + with: + targets: x86_64-unknown-linux-gnu + + # We force the ACVM crate and all of its dependencies to update their dependencies + # This ensures that we'll be able to build the crates when they're being published. + - name: Update Cargo.lock + run: | + cargo update --package acvm --aggressive + cargo update --package bn254_blackbox_solver --aggressive + + - uses: Swatinem/rust-cache@v2 + with: + key: x86_64-unknown-linux-gnu-msrv-check + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + + - name: Install nextest + uses: taiki-e/install-action@v2 + with: + tool: nextest@0.9.67 + + - name: Build and archive tests + run: cargo nextest archive --workspace --release --archive-file nextest-archive.tar.zst + + - name: Upload archive to workflow + uses: actions/upload-artifact@v4 + with: + name: nextest-archive + path: nextest-archive.tar.zst + + run-tests: + name: "Run tests (partition ${{matrix.partition}})" + runs-on: ubuntu-latest + needs: [build-test-artifacts] + strategy: + fail-fast: false + matrix: + partition: [1, 2, 3, 4] + steps: + - uses: actions/checkout@v4 + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.71.1 + with: + targets: x86_64-unknown-linux-gnu + + - name: Install nextest + uses: taiki-e/install-action@v2 + with: + tool: nextest@0.9.67 + + - name: Download archive + uses: actions/download-artifact@v4 + with: + name: nextest-archive + - name: Run tests + run: | + cargo nextest run --archive-file nextest-archive.tar.zst \ + --partition count:${{ matrix.partition }}/4 + + # This is a job which depends on all test jobs and reports the overall status. + # This allows us to add/remove test jobs without having to update the required workflows. + tests-end: + name: Rust End + runs-on: ubuntu-latest + # We want this job to always run (even if the dependant jobs fail) as we want this job to fail rather than skipping. + if: ${{ always() }} + needs: + - run-tests + + steps: + - name: Report overall success + run: | + if [[ $FAIL == true ]]; then + exit 1 + else + exit 0 + fi + env: + # We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole. + FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }} diff --git a/flake.nix b/flake.nix index 659df12f260..f0d0a2eaebb 100644 --- a/flake.nix +++ b/flake.nix @@ -44,7 +44,7 @@ rustToolchain = fenix.packages.${system}.fromToolchainFile { file = ./rust-toolchain.toml; - sha256 = "sha256-R0F0Risbr74xg9mEYydyebx/z0Wu6HI0/KWwrV30vZo="; + sha256 = "sha256-dxE7lmCFWlq0nl/wKcmYvpP9zqQbBitAQgZ1zx9Ooik="; }; craneLib = (crane.mkLib pkgs).overrideToolchain rustToolchain; diff --git a/rust-toolchain.toml b/rust-toolchain.toml index b6f7edc4bde..955e24485fc 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "1.71.1" +channel = "1.72.1" components = [ "rust-src" ] targets = [ "wasm32-unknown-unknown", "wasm32-wasi", "aarch64-apple-darwin" ] profile = "default" From a7cc16b85f642232939f2a23755700228c27912d Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 15 Feb 2024 20:08:38 +0000 Subject: [PATCH 13/45] chore(ci): add alerts for failed publishes (#4388) # Description ## Problem\* Resolves ## Summary\* This PR adds an action to open an issue if either the ACVM crates or JS packages fail to publish. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. Co-authored-by: kevaundray --- .github/CRATES_IO_PUBLISH_FAILED.md | 10 ++++++++++ .github/JS_PUBLISH_FAILED.md | 11 +++++++++++ .github/workflows/publish-acvm.yml | 13 +++++++++++++ .github/workflows/publish-es-packages.yml | 13 +++++++++++++ 4 files changed, 47 insertions(+) create mode 100644 .github/CRATES_IO_PUBLISH_FAILED.md create mode 100644 .github/JS_PUBLISH_FAILED.md diff --git a/.github/CRATES_IO_PUBLISH_FAILED.md b/.github/CRATES_IO_PUBLISH_FAILED.md new file mode 100644 index 00000000000..ec4de319772 --- /dev/null +++ b/.github/CRATES_IO_PUBLISH_FAILED.md @@ -0,0 +1,10 @@ +--- +title: "ACVM crates failed to publish" +assignees: TomAFrench kevaundray savio-sou +--- + +The {{env.CRATE_VERSION}} release of the ACVM crates failed. + +Check the [Publish ACVM]({{env.WORKFLOW_URL}}) workflow for details. + +This issue was raised by the workflow `{{env.WORKFLOW_NAME}}` diff --git a/.github/JS_PUBLISH_FAILED.md b/.github/JS_PUBLISH_FAILED.md new file mode 100644 index 00000000000..5b9f79aac1f --- /dev/null +++ b/.github/JS_PUBLISH_FAILED.md @@ -0,0 +1,11 @@ +--- +title: "JS packages failed to publish" +assignees: TomAFrench kevaundray savio-sou +labels: js +--- + +The {{env.NPM_TAG}} release of the JS packages failed. + +Check the [Publish JS packages]({{env.WORKFLOW_URL}}) workflow for details. + +This issue was raised by the workflow `{{env.WORKFLOW_NAME}}` diff --git a/.github/workflows/publish-acvm.yml b/.github/workflows/publish-acvm.yml index 0251aaa0377..e19a61fff4f 100644 --- a/.github/workflows/publish-acvm.yml +++ b/.github/workflows/publish-acvm.yml @@ -62,3 +62,16 @@ jobs: cargo publish --package acvm env: CARGO_REGISTRY_TOKEN: ${{ secrets.ACVM_CRATES_IO_TOKEN }} + + # Raise an issue if any package failed to publish + - name: Alert on failed publish + uses: JasonEtco/create-an-issue@v2 + if: ${{ failure() }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + CRATE_VERSION: ${{ inputs.noir-ref }} + WORKFLOW_NAME: ${{ github.workflow }} + WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + with: + update_existing: true + filename: .github/JS_PUBLISH_FAILED.md \ No newline at end of file diff --git a/.github/workflows/publish-es-packages.yml b/.github/workflows/publish-es-packages.yml index fa245883ced..d4cd356a138 100644 --- a/.github/workflows/publish-es-packages.yml +++ b/.github/workflows/publish-es-packages.yml @@ -143,3 +143,16 @@ jobs: - name: Publish ES Packages run: yarn publish:all --access public --tag ${{ inputs.npm-tag }} + + # Raise an issue if any package failed to publish + - name: Alert on failed publish + uses: JasonEtco/create-an-issue@v2 + if: ${{ failure() }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + NPM_TAG: ${{ inputs.npm-tag }} + WORKFLOW_NAME: ${{ github.workflow }} + WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + with: + update_existing: true + filename: .github/JS_PUBLISH_FAILED.md \ No newline at end of file From f77f702e0cfb81dcce4dd97e274b831e887ba5d2 Mon Sep 17 00:00:00 2001 From: josh crites Date: Thu, 15 Feb 2024 21:11:53 -0500 Subject: [PATCH 14/45] fix(docs): Update noirjs_app for 0.23 (#4378) # Description Updates the tutorial to work with v 0.23.0 ## Problem\* Resolves ## Summary\* ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [ ] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../version-v0.23.0/tutorials/noirjs_app.md | 22 ++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/docs/versioned_docs/version-v0.23.0/tutorials/noirjs_app.md b/docs/versioned_docs/version-v0.23.0/tutorials/noirjs_app.md index ad76dd255cc..82899217e61 100644 --- a/docs/versioned_docs/version-v0.23.0/tutorials/noirjs_app.md +++ b/docs/versioned_docs/version-v0.23.0/tutorials/noirjs_app.md @@ -14,9 +14,9 @@ You can find the complete app code for this guide [here](https://github.com/noir :::note -Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.19.x matches `noir_js@0.19.x`, etc. +Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.23.x matches `noir_js@0.23.x`, etc. -In this guide, we will be pinned to 0.19.4. +In this guide, we will be pinned to 0.23.0. ::: @@ -80,7 +80,7 @@ To do this this, go back to the previous folder (`cd ..`) and create a new vite You should see `vite-project` appear in your root folder. This seems like a good time to `cd` into it and install our NoirJS packages: ```bash -npm i @noir-lang/backend_barretenberg@0.19.4 @noir-lang/noir_js@0.19.4 +npm i @noir-lang/backend_barretenberg@0.23.0 @noir-lang/noir_js@0.23.0 vite-plugin-top-level-await ``` :::info @@ -99,6 +99,22 @@ At this point in the tutorial, your folder structure should look like this: #### Some cleanup +Add a `vite.config.js` file containing the following: + +```js +import { defineConfig } from 'vite'; +import topLevelAwait from "vite-plugin-top-level-await"; + +export default defineConfig({ + plugins: [ + topLevelAwait({ + promiseExportName: "__tla", + promiseImportName: i => `__tla_${i}` + }) + ] +}) +``` + `npx create vite` is amazing but it creates a bunch of files we don't really need for our simple example. Actually, let's just delete everything except for `index.html`, `main.js` and `package.json`. I feel lighter already. ![my heart is ready for you, noir.js](@site/static/img/memes/titanic.jpeg) From 70866aea976d59dbcbd4af34067fdd8f46555673 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Rodr=C3=ADguez?= Date: Fri, 16 Feb 2024 10:59:53 +0100 Subject: [PATCH 15/45] fix: Enforce matching types of binary ops in SSA (#4391) # Description ## Problem\* Resolves https://github.com/noir-lang/noir/issues/4275 ## Summary\* Adds a check in insert_binary to make sure we don't start codegening non-matching binary ops again. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/noirc_evaluator/src/ssa/function_builder/mod.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index 9e17595a033..fe71b876879 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -216,6 +216,11 @@ impl FunctionBuilder { operator: BinaryOp, rhs: ValueId, ) -> ValueId { + assert_eq!( + self.type_of_value(lhs), + self.type_of_value(rhs), + "ICE - Binary instruction operands must have the same type" + ); let instruction = Instruction::Binary(Binary { lhs, rhs, operator }); self.insert_instruction(instruction, None).first() } From b283637e092038eb296c468168aec2d41e1c2734 Mon Sep 17 00:00:00 2001 From: josh crites Date: Fri, 16 Feb 2024 12:13:50 -0500 Subject: [PATCH 16/45] fix(docs): update install versions (#4396) # Description Updates the install scripts to reference the correct versions. ## Problem\* Resolves #4271 ## Summary\* ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [ ] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .../getting_started/installation/other_install_methods.md | 6 +++--- .../getting_started/installation/other_install_methods.md | 6 +++--- .../getting_started/installation/other_install_methods.md | 6 +++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/docs/getting_started/installation/other_install_methods.md b/docs/docs/getting_started/installation/other_install_methods.md index 489f1eda802..076f26dfd94 100644 --- a/docs/docs/getting_started/installation/other_install_methods.md +++ b/docs/docs/getting_started/installation/other_install_methods.md @@ -112,7 +112,7 @@ Paste and run the following in the terminal to extract and install the binary: ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-aarch64-apple-darwin.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ source ~/.zshrc @@ -122,7 +122,7 @@ source ~/.zshrc ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-x86_64-apple-darwin.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ source ~/.zshrc @@ -132,7 +132,7 @@ source ~/.zshrc ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ source ~/.bashrc diff --git a/docs/versioned_docs/version-v0.23.0/getting_started/installation/other_install_methods.md b/docs/versioned_docs/version-v0.23.0/getting_started/installation/other_install_methods.md index a532f83750e..746633b628d 100644 --- a/docs/versioned_docs/version-v0.23.0/getting_started/installation/other_install_methods.md +++ b/docs/versioned_docs/version-v0.23.0/getting_started/installation/other_install_methods.md @@ -48,7 +48,7 @@ Paste and run the following in the terminal to extract and install the binary: ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.23.0/nargo-aarch64-apple-darwin.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ source ~/.zshrc @@ -58,7 +58,7 @@ source ~/.zshrc ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.23.0/nargo-x86_64-apple-darwin.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ source ~/.zshrc @@ -68,7 +68,7 @@ source ~/.zshrc ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.23.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ source ~/.bashrc diff --git a/docs/versioned_docs/version-v0.24.0/getting_started/installation/other_install_methods.md b/docs/versioned_docs/version-v0.24.0/getting_started/installation/other_install_methods.md index 489f1eda802..076f26dfd94 100644 --- a/docs/versioned_docs/version-v0.24.0/getting_started/installation/other_install_methods.md +++ b/docs/versioned_docs/version-v0.24.0/getting_started/installation/other_install_methods.md @@ -112,7 +112,7 @@ Paste and run the following in the terminal to extract and install the binary: ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-aarch64-apple-darwin.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ source ~/.zshrc @@ -122,7 +122,7 @@ source ~/.zshrc ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-x86_64-apple-darwin.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ source ~/.zshrc @@ -132,7 +132,7 @@ source ~/.zshrc ```bash mkdir -p $HOME/.nargo/bin && \ -curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.24.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ source ~/.bashrc From 46f22045bd11f96ab95a5c2a338d19dd049d8635 Mon Sep 17 00:00:00 2001 From: Michael J Klein Date: Fri, 16 Feb 2024 14:38:24 -0500 Subject: [PATCH 17/45] chore: remove dependency on generational-arena (#4207) # Description ## Problem\* Resolves https://github.com/noir-lang/noir/issues/15#issuecomment-1914950104 ## Summary\* Replaces `generational-arena`'s `Arena` class with a thin wrapper around `Vec` ## Additional Context The thin wrapper is helpful for: 1. `insert` returning the index 2. `iter` iterating over the `(index, item)` pairs ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: kevaundray Co-authored-by: Jake Fecher Co-authored-by: jfecher --- Cargo.lock | 12 --- Cargo.toml | 4 - aztec_macros/src/lib.rs | 9 +- .../noirc_frontend/src/hir/def_map/mod.rs | 2 +- .../src/hir/resolution/resolver.rs | 2 +- .../noirc_frontend/src/hir/type_check/expr.rs | 15 ++-- .../noirc_frontend/src/hir/type_check/mod.rs | 4 +- .../noirc_frontend/src/hir/type_check/stmt.rs | 2 +- compiler/noirc_frontend/src/hir_def/types.rs | 9 +- .../src/monomorphization/debug.rs | 4 +- .../src/monomorphization/mod.rs | 4 +- compiler/noirc_frontend/src/node_interner.rs | 57 +++++------- compiler/utils/arena/Cargo.toml | 5 -- compiler/utils/arena/src/lib.rs | 88 ++++++++++++++++++- deny.toml | 2 +- 15 files changed, 134 insertions(+), 85 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a7d721ef097..4d8b12d5379 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -213,9 +213,6 @@ checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" [[package]] name = "arena" version = "0.24.0" -dependencies = [ - "generational-arena", -] [[package]] name = "ark-bls12-381" @@ -1842,15 +1839,6 @@ dependencies = [ "byteorder", ] -[[package]] -name = "generational-arena" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877e94aff08e743b651baaea359664321055749b398adff8740a7399af7796e7" -dependencies = [ - "cfg-if 1.0.0", -] - [[package]] name = "generic-array" version = "0.14.7" diff --git a/Cargo.toml b/Cargo.toml index 4f95e3b0821..77058554aff 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -69,12 +69,10 @@ noirc_errors = { path = "compiler/noirc_errors" } noirc_evaluator = { path = "compiler/noirc_evaluator" } noirc_frontend = { path = "compiler/noirc_frontend" } noirc_printable_type = { path = "compiler/noirc_printable_type" } -noir_wasm = { path = "compiler/wasm" } # Noir tooling workspace dependencies nargo = { path = "tooling/nargo" } nargo_fmt = { path = "tooling/nargo_fmt" } -nargo_cli = { path = "tooling/nargo_cli" } nargo_toml = { path = "tooling/nargo_toml" } noir_lsp = { path = "tooling/lsp" } noir_debugger = { path = "tooling/debugger" } @@ -97,8 +95,6 @@ getrandom = "0.2" # Debugger dap = "0.4.1-alpha1" - -cfg-if = "1.0.0" clap = { version = "4.3.19", features = ["derive", "env"] } codespan = { version = "0.11.1", features = ["serialization"] } codespan-lsp = "0.11.1" diff --git a/aztec_macros/src/lib.rs b/aztec_macros/src/lib.rs index 21e3dd56e0d..0b93dbaa634 100644 --- a/aztec_macros/src/lib.rs +++ b/aztec_macros/src/lib.rs @@ -697,7 +697,7 @@ fn collect_traits(context: &HirContext) -> Vec { crates .flat_map(|crate_id| context.def_map(&crate_id).map(|def_map| def_map.modules())) .flatten() - .flat_map(|(_, module)| { + .flat_map(|module| { module.type_definitions().filter_map(|typ| { if let ModuleDefId::TraitId(struct_id) = typ { Some(struct_id) @@ -763,11 +763,11 @@ fn transform_event( HirExpression::Literal(HirLiteral::Str(signature)) if signature == SIGNATURE_PLACEHOLDER => { - let selector_literal_id = first_arg_id; + let selector_literal_id = *first_arg_id; let structure = interner.get_struct(struct_id); let signature = event_signature(&structure.borrow()); - interner.update_expression(*selector_literal_id, |expr| { + interner.update_expression(selector_literal_id, |expr| { *expr = HirExpression::Literal(HirLiteral::Str(signature.clone())); }); @@ -833,7 +833,7 @@ fn get_serialized_length( let serialized_trait_impl_kind = traits .iter() - .filter_map(|&trait_id| { + .find_map(|&trait_id| { let r#trait = interner.get_trait(trait_id); if r#trait.borrow().name.0.contents == "Serialize" && r#trait.borrow().generics.len() == 1 @@ -846,7 +846,6 @@ fn get_serialized_length( None } }) - .next() .ok_or(AztecMacroError::CouldNotAssignStorageSlots { secondary_message: Some("Stored data must implement Serialize trait".to_string()), })?; diff --git a/compiler/noirc_frontend/src/hir/def_map/mod.rs b/compiler/noirc_frontend/src/hir/def_map/mod.rs index 8c985e88e0b..8e0dacc294b 100644 --- a/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -31,7 +31,7 @@ pub struct LocalModuleId(pub Index); impl LocalModuleId { pub fn dummy_id() -> LocalModuleId { - LocalModuleId(Index::from_raw_parts(std::usize::MAX, std::u64::MAX)) + LocalModuleId(Index::dummy()) } } diff --git a/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/compiler/noirc_frontend/src/hir/resolution/resolver.rs index d4aae133b35..f05a69be7c2 100644 --- a/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -1463,7 +1463,7 @@ impl<'a> Resolver<'a> { // they're used in expressions. We must do this here since the type // checker does not check definition kinds and otherwise expects // parameters to already be typed. - if self.interner.id_type(hir_ident.id) == Type::Error { + if self.interner.definition_type(hir_ident.id) == Type::Error { let typ = Type::polymorphic_integer(self.interner); self.interner.push_definition_type(hir_ident.id, typ); } diff --git a/compiler/noirc_frontend/src/hir/type_check/expr.rs b/compiler/noirc_frontend/src/hir/type_check/expr.rs index b6bb5984bcd..a669a4a246e 100644 --- a/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -284,8 +284,9 @@ impl<'interner> TypeChecker<'interner> { Type::Tuple(vecmap(&elements, |elem| self.check_expression(elem))) } HirExpression::Lambda(lambda) => { - let captured_vars = - vecmap(lambda.captures, |capture| self.interner.id_type(capture.ident.id)); + let captured_vars = vecmap(lambda.captures, |capture| { + self.interner.definition_type(capture.ident.id) + }); let env_type: Type = if captured_vars.is_empty() { Type::Unit } else { Type::Tuple(captured_vars) }; @@ -308,7 +309,7 @@ impl<'interner> TypeChecker<'interner> { } }; - self.interner.push_expr_type(expr_id, typ.clone()); + self.interner.push_expr_type(*expr_id, typ.clone()); typ } @@ -459,7 +460,7 @@ impl<'interner> TypeChecker<'interner> { operator: UnaryOp::MutableReference, rhs: method_call.object, })); - self.interner.push_expr_type(&new_object, new_type); + self.interner.push_expr_type(new_object, new_type); self.interner.push_expr_location(new_object, location.span, location.file); new_object }); @@ -485,7 +486,7 @@ impl<'interner> TypeChecker<'interner> { operator: UnaryOp::Dereference { implicitly_added: true }, rhs: object, })); - self.interner.push_expr_type(&object, element.as_ref().clone()); + self.interner.push_expr_type(object, element.as_ref().clone()); self.interner.push_expr_location(object, location.span, location.file); // Recursively dereference to allow for converting &mut &mut T to T @@ -682,8 +683,8 @@ impl<'interner> TypeChecker<'interner> { operator: crate::UnaryOp::Dereference { implicitly_added: true }, rhs: old_lhs, })); - this.interner.push_expr_type(&old_lhs, lhs_type); - this.interner.push_expr_type(access_lhs, element); + this.interner.push_expr_type(old_lhs, lhs_type); + this.interner.push_expr_type(*access_lhs, element); let old_location = this.interner.id_location(old_lhs); this.interner.push_expr_location(*access_lhs, span, old_location.file); diff --git a/compiler/noirc_frontend/src/hir/type_check/mod.rs b/compiler/noirc_frontend/src/hir/type_check/mod.rs index 8952ba83586..225f5756d7a 100644 --- a/compiler/noirc_frontend/src/hir/type_check/mod.rs +++ b/compiler/noirc_frontend/src/hir/type_check/mod.rs @@ -458,7 +458,7 @@ mod test { } fn local_module_id(&self) -> LocalModuleId { - LocalModuleId(arena::Index::from_raw_parts(0, 0)) + LocalModuleId(arena::Index::unsafe_zeroed()) } fn module_id(&self) -> ModuleId { @@ -509,7 +509,7 @@ mod test { let mut def_maps = BTreeMap::new(); let file = FileId::default(); - let mut modules = arena::Arena::new(); + let mut modules = arena::Arena::default(); let location = Location::new(Default::default(), file); modules.insert(ModuleData::new(None, location, false)); diff --git a/compiler/noirc_frontend/src/hir/type_check/stmt.rs b/compiler/noirc_frontend/src/hir/type_check/stmt.rs index 03d61b93e0c..370b4ee7b17 100644 --- a/compiler/noirc_frontend/src/hir/type_check/stmt.rs +++ b/compiler/noirc_frontend/src/hir/type_check/stmt.rs @@ -192,7 +192,7 @@ impl<'interner> TypeChecker<'interner> { mutable = definition.mutable; } - let typ = self.interner.id_type(ident.id).instantiate(self.interner).0; + let typ = self.interner.definition_type(ident.id).instantiate(self.interner).0; typ.follow_bindings() }; diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index 98b47f17cd4..d4d8a948460 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -1672,11 +1672,10 @@ fn convert_array_expression_to_slice( interner.push_expr_location(call, location.span, location.file); interner.push_expr_location(func, location.span, location.file); - interner.push_expr_type(&call, target_type.clone()); - interner.push_expr_type( - &func, - Type::Function(vec![array_type], Box::new(target_type), Box::new(Type::Unit)), - ); + interner.push_expr_type(call, target_type.clone()); + + let func_type = Type::Function(vec![array_type], Box::new(target_type), Box::new(Type::Unit)); + interner.push_expr_type(func, func_type); } impl BinaryTypeOperator { diff --git a/compiler/noirc_frontend/src/monomorphization/debug.rs b/compiler/noirc_frontend/src/monomorphization/debug.rs index d36816e3d37..5837d67660a 100644 --- a/compiler/noirc_frontend/src/monomorphization/debug.rs +++ b/compiler/noirc_frontend/src/monomorphization/debug.rs @@ -143,7 +143,7 @@ impl<'interner> Monomorphizer<'interner> { let index_id = self.interner.push_expr(HirExpression::Literal( HirLiteral::Integer(field_index.into(), false), )); - self.interner.push_expr_type(&index_id, crate::Type::FieldElement); + self.interner.push_expr_type(index_id, crate::Type::FieldElement); self.interner.push_expr_location( index_id, call.location.span, @@ -171,7 +171,7 @@ impl<'interner> Monomorphizer<'interner> { fn intern_var_id(&mut self, var_id: DebugVarId, location: &Location) -> ExprId { let var_id_literal = HirLiteral::Integer((var_id.0 as u128).into(), false); let expr_id = self.interner.push_expr(HirExpression::Literal(var_id_literal)); - self.interner.push_expr_type(&expr_id, crate::Type::FieldElement); + self.interner.push_expr_type(expr_id, crate::Type::FieldElement); self.interner.push_expr_location(expr_id, location.span, location.file); expr_id } diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index 31a254d9f0a..0f243e47bbe 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -696,7 +696,7 @@ impl<'interner> Monomorphizer<'interner> { let mutable = definition.mutable; let definition = self.lookup_local(ident.id)?; - let typ = self.convert_type(&self.interner.id_type(ident.id)); + let typ = self.convert_type(&self.interner.definition_type(ident.id)); Some(ast::Ident { location: Some(ident.location), mutable, definition, name, typ }) } @@ -1040,7 +1040,7 @@ impl<'interner> Monomorphizer<'interner> { ) { match hir_argument { HirExpression::Ident(ident) => { - let typ = self.interner.id_type(ident.id); + let typ = self.interner.definition_type(ident.id); let typ: Type = typ.follow_bindings(); let is_fmt_str = match typ { // A format string has many different possible types that need to be handled. diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index 815bc4c5e9c..7d533947f65 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -75,13 +75,14 @@ pub struct NodeInterner { // Type checking map // - // Notice that we use `Index` as the Key and not an ExprId or IdentId - // Therefore, If a raw index is passed in, then it is not safe to assume that it will have - // a Type, as not all Ids have types associated to them. - // Further note, that an ExprId and an IdentId will never have the same underlying Index - // Because we use one Arena to store all Definitions/Nodes + // This should only be used with indices from the `nodes` arena. + // Otherwise the indices used may overwrite other existing indices. + // Each type for each index is filled in during type checking. id_to_type: HashMap, + // Similar to `id_to_type` but maps definitions to their type + definition_to_type: HashMap, + // Struct map. // // Each struct definition is possibly shared across multiple type nodes. @@ -277,12 +278,6 @@ impl DefinitionId { } } -impl From for Index { - fn from(id: DefinitionId) -> Self { - Index::from_raw_parts(id.0, u64::MAX) - } -} - /// An ID for a global value #[derive(Debug, Eq, PartialEq, Hash, Clone, Copy)] pub struct GlobalId(usize); @@ -302,7 +297,7 @@ impl StmtId { // This can be anything, as the program will ultimately fail // after resolution pub fn dummy_id() -> StmtId { - StmtId(Index::from_raw_parts(std::usize::MAX, 0)) + StmtId(Index::dummy()) } } @@ -311,7 +306,7 @@ pub struct ExprId(Index); impl ExprId { pub fn empty_block_id() -> ExprId { - ExprId(Index::from_raw_parts(0, 0)) + ExprId(Index::unsafe_zeroed()) } } #[derive(Debug, Eq, PartialEq, Hash, Copy, Clone)] @@ -322,7 +317,7 @@ impl FuncId { // This can be anything, as the program will ultimately fail // after resolution pub fn dummy_id() -> FuncId { - FuncId(Index::from_raw_parts(std::usize::MAX, 0)) + FuncId(Index::dummy()) } } @@ -396,23 +391,9 @@ macro_rules! into_index { }; } -macro_rules! partialeq { - ($id_type:ty) => { - impl PartialEq for &$id_type { - fn eq(&self, other: &usize) -> bool { - let (index, _) = self.0.into_raw_parts(); - index == *other - } - } - }; -} - into_index!(ExprId); into_index!(StmtId); -partialeq!(ExprId); -partialeq!(StmtId); - /// A Definition enum specifies anything that we can intern in the NodeInterner /// We use one Arena for all types that can be interned as that has better cache locality /// This data structure is never accessed directly, so API wise there is no difference between using @@ -496,6 +477,7 @@ impl Default for NodeInterner { id_to_location: HashMap::new(), definitions: vec![], id_to_type: HashMap::new(), + definition_to_type: HashMap::new(), structs: HashMap::new(), struct_attributes: HashMap::new(), type_aliases: Vec::new(), @@ -545,10 +527,15 @@ impl NodeInterner { } /// Store the type for an interned expression - pub fn push_expr_type(&mut self, expr_id: &ExprId, typ: Type) { + pub fn push_expr_type(&mut self, expr_id: ExprId, typ: Type) { self.id_to_type.insert(expr_id.into(), typ); } + /// Store the type for an interned expression + pub fn push_definition_type(&mut self, definition_id: DefinitionId, typ: Type) { + self.definition_to_type.insert(definition_id, typ); + } + pub fn push_empty_trait(&mut self, type_id: TraitId, unresolved_trait: &UnresolvedTrait) { let self_type_typevar_id = self.next_type_variable_id(); @@ -660,11 +647,6 @@ impl NodeInterner { } } - /// Store the type for an interned Identifier - pub fn push_definition_type(&mut self, definition_id: DefinitionId, typ: Type) { - self.id_to_type.insert(definition_id.into(), typ); - } - /// Store [Location] of [Type] reference pub fn push_type_ref_location(&mut self, typ: Type, location: Location) { self.type_ref_locations.push((typ, location)); @@ -980,8 +962,13 @@ impl NodeInterner { self.id_to_type.get(&index.into()).cloned().unwrap_or(Type::Error) } + /// Returns the type of the definition or `Type::Error` if it was not found. + pub fn definition_type(&self, id: DefinitionId) -> Type { + self.definition_to_type.get(&id).cloned().unwrap_or(Type::Error) + } + pub fn id_type_substitute_trait_as_type(&self, def_id: DefinitionId) -> Type { - let typ = self.id_type(def_id); + let typ = self.definition_type(def_id); if let Type::Function(args, ret, env) = &typ { let def = self.definition(def_id); if let Type::TraitAsType(..) = ret.as_ref() { diff --git a/compiler/utils/arena/Cargo.toml b/compiler/utils/arena/Cargo.toml index e82201a2cf4..41c6ebc9a8b 100644 --- a/compiler/utils/arena/Cargo.toml +++ b/compiler/utils/arena/Cargo.toml @@ -4,8 +4,3 @@ version.workspace = true authors.workspace = true edition.workspace = true license.workspace = true - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -generational-arena = "0.2.8" diff --git a/compiler/utils/arena/src/lib.rs b/compiler/utils/arena/src/lib.rs index fc19f44ab6e..2d117304e16 100644 --- a/compiler/utils/arena/src/lib.rs +++ b/compiler/utils/arena/src/lib.rs @@ -3,5 +3,89 @@ #![warn(unreachable_pub)] #![warn(clippy::semicolon_if_nothing_returned)] -// For now we use a wrapper around generational-arena -pub use generational_arena::{Arena, Index}; +#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)] +pub struct Index(usize); + +impl Index { + #[cfg(test)] + pub fn test_new(index: usize) -> Index { + Self(index) + } + + /// Return a dummy index (max value internally). + /// This should be avoided over `Option` if possible. + pub fn dummy() -> Self { + Self(usize::MAX) + } + + /// Return the zeroed index. This is unsafe since we don't know + /// if this is a valid index for any particular map yet. + pub fn unsafe_zeroed() -> Self { + Self(0) + } +} + +#[derive(Clone, Debug)] +pub struct Arena { + pub vec: Vec, +} + +impl Default for Arena { + fn default() -> Self { + Self { vec: Vec::new() } + } +} + +impl core::ops::Index for Arena { + type Output = T; + + fn index(&self, index: Index) -> &Self::Output { + self.vec.index(index.0) + } +} + +impl core::ops::IndexMut for Arena { + fn index_mut(&mut self, index: Index) -> &mut Self::Output { + self.vec.index_mut(index.0) + } +} + +impl IntoIterator for Arena { + type Item = T; + + type IntoIter = as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.vec.into_iter() + } +} + +impl<'a, T> IntoIterator for &'a Arena { + type Item = &'a T; + + type IntoIter = <&'a Vec as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.vec.iter() + } +} + +impl Arena { + pub fn insert(&mut self, item: T) -> Index { + let index = self.vec.len(); + self.vec.push(item); + Index(index) + } + + pub fn get(&self, index: Index) -> Option<&T> { + self.vec.get(index.0) + } + + pub fn get_mut(&mut self, index: Index) -> Option<&mut T> { + self.vec.get_mut(index.0) + } + + pub fn iter(&self) -> impl Iterator { + self.vec.iter().enumerate().map(|(index, item)| (Index(index), item)) + } +} diff --git a/deny.toml b/deny.toml index a3e506984c9..72150f08a3c 100644 --- a/deny.toml +++ b/deny.toml @@ -54,7 +54,7 @@ allow = [ "LicenseRef-ring", # https://github.com/rustls/webpki/blob/main/LICENSE ISC Style "LicenseRef-rustls-webpki", - # bitmaps 2.1.0, generational-arena 0.2.9,im 15.1.0 + # bitmaps 2.1.0, im 15.1.0 "MPL-2.0", # Boost Software License "BSL-1.0", From 5051ec4d434a9e5cf405c68357faaf213e68de9e Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Sat, 17 Feb 2024 20:44:54 +0000 Subject: [PATCH 18/45] fix: correct invalid brillig codegen for `EmbeddedCurvePoint.add` (#4382) # Description ## Problem\* Resolves https://github.com/noir-lang/noir/issues/4260 ## Summary\* The error is explained in the comment I've added to the stdlib. This is a quick fix and we can clean it up once we're making serialisation changes in `aztec-packages` again. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- noir_stdlib/src/scalar_mul.nr | 11 ++++++++++- .../execution_success/brillig_scalar_mul/src/main.nr | 9 +++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/noir_stdlib/src/scalar_mul.nr b/noir_stdlib/src/scalar_mul.nr index 26378e4839a..1a7f1ad707c 100644 --- a/noir_stdlib/src/scalar_mul.nr +++ b/noir_stdlib/src/scalar_mul.nr @@ -32,5 +32,14 @@ pub fn fixed_base_embedded_curve( // docs:end:fixed_base_embedded_curve {} +// This is a hack as returning an `EmbeddedCurvePoint` from a foreign function in brillig returns a [BrilligVariable::SingleAddr; 2] rather than BrilligVariable::BrilligArray +// as is defined in the brillig bytecode format. This is a workaround which allows us to fix this without modifying the serialization format. +fn embedded_curve_add(point1: EmbeddedCurvePoint, point2: EmbeddedCurvePoint) -> EmbeddedCurvePoint { + let point_array = embedded_curve_add_array_return(point1, point2); + let x = point_array[0]; + let y = point_array[1]; + EmbeddedCurvePoint { x, y } +} + #[foreign(embedded_curve_add)] -fn embedded_curve_add(_point1: EmbeddedCurvePoint, _point2: EmbeddedCurvePoint) -> EmbeddedCurvePoint {} +fn embedded_curve_add_array_return(_point1: EmbeddedCurvePoint, _point2: EmbeddedCurvePoint) -> [Field; 2] {} diff --git a/test_programs/execution_success/brillig_scalar_mul/src/main.nr b/test_programs/execution_success/brillig_scalar_mul/src/main.nr index ab2f79eb815..c7c3a85a4ff 100644 --- a/test_programs/execution_success/brillig_scalar_mul/src/main.nr +++ b/test_programs/execution_success/brillig_scalar_mul/src/main.nr @@ -20,4 +20,13 @@ unconstrained fn main( let res = std::scalar_mul::fixed_base_embedded_curve(priv_key, 0); assert(res[0] == pub_x); assert(res[1] == pub_y); + + let pub_point= std::scalar_mul::EmbeddedCurvePoint { x: pub_x, y: pub_y }; + let g1_y = 17631683881184975370165255887551781615748388533673675138860; + let g1= std::scalar_mul::EmbeddedCurvePoint { x: 1, y: g1_y }; + + let res = pub_point.double(); + let double = g1.add(g1); + + assert(double.x == res.x); } From 8aa39c042a4b689ef03fd761323163bac50aee1d Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Sat, 17 Feb 2024 20:47:04 +0000 Subject: [PATCH 19/45] chore: bump webpack dependencies (#4346) # Description ## Problem\* Resolves ## Summary\* Bumping all the webpack dependencies to see if that helps with build stability. ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [ ] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/wasm/package.json | 9 +- compiler/wasm/webpack.config.ts | 10 +- yarn.lock | 483 ++++++++++++++++++++++++++++---- 3 files changed, 432 insertions(+), 70 deletions(-) diff --git a/compiler/wasm/package.json b/compiler/wasm/package.json index b71058b3367..67584a2def1 100644 --- a/compiler/wasm/package.json +++ b/compiler/wasm/package.json @@ -61,10 +61,10 @@ "assert": "^2.1.0", "browserify-fs": "^1.0.0", "chai": "^4.3.10", - "copy-webpack-plugin": "^11.0.0", + "copy-webpack-plugin": "^12.0.2", "eslint": "^8.56.0", "eslint-plugin-prettier": "^5.0.0", - "html-webpack-plugin": "^5.5.4", + "html-webpack-plugin": "^5.6.0", "memfs": "^4.6.0", "mocha": "^10.2.0", "mocha-each": "^2.0.1", @@ -78,8 +78,9 @@ "typescript": "~5.2.2", "unzipit": "^1.4.3", "url": "^0.11.3", - "webpack": "^5.49.0", - "webpack-cli": "^4.7.2" + "webpack": "^5.90.1", + "webpack-cli": "^5.1.4", + "webpack-dev-server": "^5.0.0" }, "dependencies": { "@noir-lang/types": "workspace:*", diff --git a/compiler/wasm/webpack.config.ts b/compiler/wasm/webpack.config.ts index d5d70df2b8a..456c5d82dca 100644 --- a/compiler/wasm/webpack.config.ts +++ b/compiler/wasm/webpack.config.ts @@ -1,6 +1,6 @@ import { resolve, join } from 'path'; import webpack from 'webpack'; -import 'webpack-dev-server'; +import type { Configuration as DevServerConfiguration } from 'webpack-dev-server'; import WasmPackPlugin from '@wasm-tool/wasm-pack-plugin'; import HtmlWebpackPlugin from 'html-webpack-plugin'; import CopyWebpackPlugin from 'copy-webpack-plugin'; @@ -25,6 +25,10 @@ const config: webpack.Configuration = { }, }; +const devServerConfig: DevServerConfiguration = { + static: join(__dirname, 'dist'), +}; + const webConfig: webpack.Configuration = { name: 'web', entry: './src/index.mts', @@ -74,9 +78,7 @@ const webConfig: webpack.Configuration = { }, ], }, - devServer: { - static: join(__dirname, 'dist'), - }, + devServer: devServerConfig, resolve: { ...config.resolve, alias: { diff --git a/yarn.lock b/yarn.lock index 84cf3e593c6..ace7959279f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4050,6 +4050,16 @@ __metadata: languageName: node linkType: hard +"@jridgewell/trace-mapping@npm:^0.3.20": + version: 0.3.22 + resolution: "@jridgewell/trace-mapping@npm:0.3.22" + dependencies: + "@jridgewell/resolve-uri": ^3.1.0 + "@jridgewell/sourcemap-codec": ^1.4.14 + checksum: ac7dd2cfe0b479aa1b81776d40d789243131cc792dc8b6b6a028c70fcd6171958ae1a71bf67b618ffe3c0c3feead9870c095ee46a5e30319410d92976b28f498 + languageName: node + linkType: hard + "@leichtgewicht/ip-codec@npm:^2.0.1": version: 2.0.4 resolution: "@leichtgewicht/ip-codec@npm:2.0.4" @@ -4466,10 +4476,10 @@ __metadata: assert: ^2.1.0 browserify-fs: ^1.0.0 chai: ^4.3.10 - copy-webpack-plugin: ^11.0.0 + copy-webpack-plugin: ^12.0.2 eslint: ^8.56.0 eslint-plugin-prettier: ^5.0.0 - html-webpack-plugin: ^5.5.4 + html-webpack-plugin: ^5.6.0 memfs: ^4.6.0 mocha: ^10.2.0 mocha-each: ^2.0.1 @@ -4484,8 +4494,9 @@ __metadata: typescript: ~5.2.2 unzipit: ^1.4.3 url: ^0.11.3 - webpack: ^5.49.0 - webpack-cli: ^4.7.2 + webpack: ^5.90.1 + webpack-cli: ^5.1.4 + webpack-dev-server: ^5.0.0 languageName: unknown linkType: soft @@ -5267,6 +5278,13 @@ __metadata: languageName: node linkType: hard +"@sindresorhus/merge-streams@npm:^2.1.0": + version: 2.1.0 + resolution: "@sindresorhus/merge-streams@npm:2.1.0" + checksum: 8aa91a3fca68d4ba78f81cad80f2dc280fa82b6c49c9fa5fe37438b6b9082cf993adb2309163f924bef9d7173b2fae6bb40fc4070a344cbab8bcc19eb1ee0b7c + languageName: node + linkType: hard + "@sinonjs/commons@npm:^2.0.0": version: 2.0.0 resolution: "@sinonjs/commons@npm:2.0.0" @@ -5621,7 +5639,7 @@ __metadata: languageName: node linkType: hard -"@types/bonjour@npm:^3.5.9": +"@types/bonjour@npm:^3.5.13, @types/bonjour@npm:^3.5.9": version: 3.5.13 resolution: "@types/bonjour@npm:3.5.13" dependencies: @@ -5663,7 +5681,7 @@ __metadata: languageName: node linkType: hard -"@types/connect-history-api-fallback@npm:^1.3.5": +"@types/connect-history-api-fallback@npm:^1.3.5, @types/connect-history-api-fallback@npm:^1.5.4": version: 1.5.4 resolution: "@types/connect-history-api-fallback@npm:1.5.4" dependencies: @@ -5753,7 +5771,7 @@ __metadata: languageName: node linkType: hard -"@types/estree@npm:*, @types/estree@npm:1.0.5, @types/estree@npm:^1.0.0": +"@types/estree@npm:*, @types/estree@npm:1.0.5, @types/estree@npm:^1.0.0, @types/estree@npm:^1.0.5": version: 1.0.5 resolution: "@types/estree@npm:1.0.5" checksum: dd8b5bed28e6213b7acd0fb665a84e693554d850b0df423ac8076cc3ad5823a6bc26b0251d080bdc545af83179ede51dd3f6fa78cad2c46ed1f29624ddf3e41a @@ -5779,7 +5797,7 @@ __metadata: languageName: node linkType: hard -"@types/express@npm:*, @types/express@npm:^4.17.13": +"@types/express@npm:*, @types/express@npm:^4.17.13, @types/express@npm:^4.17.21": version: 4.17.21 resolution: "@types/express@npm:4.17.21" dependencies: @@ -6233,6 +6251,13 @@ __metadata: languageName: node linkType: hard +"@types/retry@npm:0.12.2": + version: 0.12.2 + resolution: "@types/retry@npm:0.12.2" + checksum: e5675035717b39ce4f42f339657cae9637cf0c0051cf54314a6a2c44d38d91f6544be9ddc0280587789b6afd056be5d99dbe3e9f4df68c286c36321579b1bf4a + languageName: node + linkType: hard + "@types/sax@npm:^1.2.1": version: 1.2.7 resolution: "@types/sax@npm:1.2.7" @@ -6275,7 +6300,7 @@ __metadata: languageName: node linkType: hard -"@types/serve-index@npm:^1.9.1": +"@types/serve-index@npm:^1.9.1, @types/serve-index@npm:^1.9.4": version: 1.9.4 resolution: "@types/serve-index@npm:1.9.4" dependencies: @@ -6284,7 +6309,7 @@ __metadata: languageName: node linkType: hard -"@types/serve-static@npm:*, @types/serve-static@npm:^1.13.10": +"@types/serve-static@npm:*, @types/serve-static@npm:^1.13.10, @types/serve-static@npm:^1.15.5": version: 1.15.5 resolution: "@types/serve-static@npm:1.15.5" dependencies: @@ -6311,7 +6336,7 @@ __metadata: languageName: node linkType: hard -"@types/sockjs@npm:^0.3.33": +"@types/sockjs@npm:^0.3.33, @types/sockjs@npm:^0.3.36": version: 0.3.36 resolution: "@types/sockjs@npm:0.3.36" dependencies: @@ -6343,7 +6368,7 @@ __metadata: languageName: node linkType: hard -"@types/ws@npm:^8.5.5": +"@types/ws@npm:^8.5.10, @types/ws@npm:^8.5.5": version: 8.5.10 resolution: "@types/ws@npm:8.5.10" dependencies: @@ -7181,36 +7206,36 @@ __metadata: languageName: node linkType: hard -"@webpack-cli/configtest@npm:^1.2.0": - version: 1.2.0 - resolution: "@webpack-cli/configtest@npm:1.2.0" +"@webpack-cli/configtest@npm:^2.1.1": + version: 2.1.1 + resolution: "@webpack-cli/configtest@npm:2.1.1" peerDependencies: - webpack: 4.x.x || 5.x.x - webpack-cli: 4.x.x - checksum: a2726cd9ec601d2b57e5fc15e0ebf5200a8892065e735911269ac2038e62be4bfc176ea1f88c2c46ff09b4d05d4c10ae045e87b3679372483d47da625a327e28 + webpack: 5.x.x + webpack-cli: 5.x.x + checksum: 9f9f9145c2d05471fc83d426db1df85cf49f329836b0c4b9f46b6948bed4b013464c00622b136d2a0a26993ce2306976682592245b08ee717500b1db45009a72 languageName: node linkType: hard -"@webpack-cli/info@npm:^1.5.0": - version: 1.5.0 - resolution: "@webpack-cli/info@npm:1.5.0" - dependencies: - envinfo: ^7.7.3 +"@webpack-cli/info@npm:^2.0.2": + version: 2.0.2 + resolution: "@webpack-cli/info@npm:2.0.2" peerDependencies: - webpack-cli: 4.x.x - checksum: 7f56fe037cd7d1fd5c7428588519fbf04a0cad33925ee4202ffbafd00f8ec1f2f67d991245e687d50e0f3e23f7b7814273d56cb9f7da4b05eed47c8d815c6296 + webpack: 5.x.x + webpack-cli: 5.x.x + checksum: 8f9a178afca5c82e113aed1efa552d64ee5ae4fdff63fe747c096a981ec74f18a5d07bd6e89bbe6715c3e57d96eea024a410e58977169489fe1df044c10dd94e languageName: node linkType: hard -"@webpack-cli/serve@npm:^1.7.0": - version: 1.7.0 - resolution: "@webpack-cli/serve@npm:1.7.0" +"@webpack-cli/serve@npm:^2.0.5": + version: 2.0.5 + resolution: "@webpack-cli/serve@npm:2.0.5" peerDependencies: - webpack-cli: 4.x.x + webpack: 5.x.x + webpack-cli: 5.x.x peerDependenciesMeta: webpack-dev-server: optional: true - checksum: d475e8effa23eb7ff9a48b14d4de425989fd82f906ce71c210921cc3852327c22873be00c35e181a25a6bd03d424ae2b83e7f3b3f410ac7ee31b128ab4ac7713 + checksum: 75f0e54681796d567a71ac3e2781d2901a8d8cf1cdfc82f261034dddac59a8343e8c3bc5e32b4bb9d6766759ba49fb29a5cd86ef1701d79c506fe886bb63ac75 languageName: node linkType: hard @@ -8050,6 +8075,16 @@ __metadata: languageName: node linkType: hard +"bonjour-service@npm:^1.2.1": + version: 1.2.1 + resolution: "bonjour-service@npm:1.2.1" + dependencies: + fast-deep-equal: ^3.1.3 + multicast-dns: ^7.2.5 + checksum: b65b3e6e3a07e97f2da5806afb76f3946d5a6426b72e849a0236dc3c9d3612fb8c5359ebade4be7eb63f74a37670c53a53be2ff17f4f709811fda77f600eb25b + languageName: node + linkType: hard + "boolbase@npm:^1.0.0": version: 1.0.0 resolution: "boolbase@npm:1.0.0" @@ -8300,6 +8335,15 @@ __metadata: languageName: node linkType: hard +"bundle-name@npm:^4.1.0": + version: 4.1.0 + resolution: "bundle-name@npm:4.1.0" + dependencies: + run-applescript: ^7.0.0 + checksum: 1d966c8d2dbf4d9d394e53b724ac756c2414c45c01340b37743621f59cc565a435024b394ddcb62b9b335d1c9a31f4640eb648c3fec7f97ee74dc0694c9beb6c + languageName: node + linkType: hard + "bytes@npm:3.0.0": version: 3.0.0 resolution: "bytes@npm:3.0.0" @@ -8669,6 +8713,25 @@ __metadata: languageName: node linkType: hard +"chokidar@npm:^3.6.0": + version: 3.6.0 + resolution: "chokidar@npm:3.6.0" + dependencies: + anymatch: ~3.1.2 + braces: ~3.0.2 + fsevents: ~2.3.2 + glob-parent: ~5.1.2 + is-binary-path: ~2.1.0 + is-glob: ~4.0.1 + normalize-path: ~3.0.0 + readdirp: ~3.6.0 + dependenciesMeta: + fsevents: + optional: true + checksum: d2f29f499705dcd4f6f3bbed79a9ce2388cf530460122eed3b9c48efeab7a4e28739c6551fd15bec9245c6b9eeca7a32baa64694d64d9b6faeb74ddb8c4a413d + languageName: node + linkType: hard + "chownr@npm:^1.1.1": version: 1.1.4 resolution: "chownr@npm:1.1.4" @@ -9101,7 +9164,7 @@ __metadata: languageName: node linkType: hard -"commander@npm:^7.0.0, commander@npm:^7.2.0": +"commander@npm:^7.2.0": version: 7.2.0 resolution: "commander@npm:7.2.0" checksum: 53501cbeee61d5157546c0bef0fedb6cdfc763a882136284bed9a07225f09a14b82d2a84e7637edfd1a679fb35ed9502fd58ef1d091e6287f60d790147f68ddc @@ -9327,6 +9390,22 @@ __metadata: languageName: node linkType: hard +"copy-webpack-plugin@npm:^12.0.2": + version: 12.0.2 + resolution: "copy-webpack-plugin@npm:12.0.2" + dependencies: + fast-glob: ^3.3.2 + glob-parent: ^6.0.1 + globby: ^14.0.0 + normalize-path: ^3.0.0 + schema-utils: ^4.2.0 + serialize-javascript: ^6.0.2 + peerDependencies: + webpack: ^5.1.0 + checksum: 98127735336c6db5924688486d3a1854a41835963d0c0b81695b2e3d58c6675164be7d23dee7090b84a56d3c9923175d3d0863ac1942bcc3317d2efc1962b927 + languageName: node + linkType: hard + "core-js-compat@npm:^3.31.0, core-js-compat@npm:^3.33.1": version: 3.34.0 resolution: "core-js-compat@npm:3.34.0" @@ -9949,6 +10028,13 @@ __metadata: languageName: node linkType: hard +"default-browser-id@npm:^5.0.0": + version: 5.0.0 + resolution: "default-browser-id@npm:5.0.0" + checksum: 185bfaecec2c75fa423544af722a3469b20704c8d1942794a86e4364fe7d9e8e9f63241a5b769d61c8151993bc65833a5b959026fa1ccea343b3db0a33aa6deb + languageName: node + linkType: hard + "default-browser@npm:^4.0.0": version: 4.0.0 resolution: "default-browser@npm:4.0.0" @@ -9961,6 +10047,16 @@ __metadata: languageName: node linkType: hard +"default-browser@npm:^5.2.1": + version: 5.2.1 + resolution: "default-browser@npm:5.2.1" + dependencies: + bundle-name: ^4.1.0 + default-browser-id: ^5.0.0 + checksum: afab7eff7b7f5f7a94d9114d1ec67273d3fbc539edf8c0f80019879d53aa71e867303c6f6d7cffeb10a6f3cfb59d4f963dba3f9c96830b4540cc7339a1bf9840 + languageName: node + linkType: hard + "default-gateway@npm:^6.0.3": version: 6.0.3 resolution: "default-gateway@npm:6.0.3" @@ -12202,6 +12298,20 @@ __metadata: languageName: node linkType: hard +"globby@npm:^14.0.0": + version: 14.0.1 + resolution: "globby@npm:14.0.1" + dependencies: + "@sindresorhus/merge-streams": ^2.1.0 + fast-glob: ^3.3.2 + ignore: ^5.2.4 + path-type: ^5.0.0 + slash: ^5.1.0 + unicorn-magic: ^0.1.0 + checksum: 33568444289afb1135ad62d52d5e8412900cec620e3b6ece533afa46d004066f14b97052b643833d7cf4ee03e7fac571430130cde44c333df91a45d313105170 + languageName: node + linkType: hard + "gopd@npm:^1.0.1": version: 1.0.1 resolution: "gopd@npm:1.0.1" @@ -12791,7 +12901,7 @@ __metadata: languageName: node linkType: hard -"html-entities@npm:^2.3.2": +"html-entities@npm:^2.3.2, html-entities@npm:^2.4.0": version: 2.4.0 resolution: "html-entities@npm:2.4.0" checksum: 25bea32642ce9ebd0eedc4d24381883ecb0335ccb8ac26379a0958b9b16652fdbaa725d70207ce54a51db24103436a698a8e454397d3ba8ad81460224751f1dc @@ -12860,7 +12970,7 @@ __metadata: languageName: node linkType: hard -"html-webpack-plugin@npm:^5.5.0, html-webpack-plugin@npm:^5.5.3, html-webpack-plugin@npm:^5.5.4": +"html-webpack-plugin@npm:^5.5.0, html-webpack-plugin@npm:^5.5.3": version: 5.5.4 resolution: "html-webpack-plugin@npm:5.5.4" dependencies: @@ -12875,6 +12985,27 @@ __metadata: languageName: node linkType: hard +"html-webpack-plugin@npm:^5.6.0": + version: 5.6.0 + resolution: "html-webpack-plugin@npm:5.6.0" + dependencies: + "@types/html-minifier-terser": ^6.0.0 + html-minifier-terser: ^6.0.2 + lodash: ^4.17.21 + pretty-error: ^4.0.0 + tapable: ^2.0.0 + peerDependencies: + "@rspack/core": 0.x || 1.x + webpack: ^5.20.0 + peerDependenciesMeta: + "@rspack/core": + optional: true + webpack: + optional: true + checksum: 32a6e41da538e798fd0be476637d7611a5e8a98a3508f031996e9eb27804dcdc282cb01f847cf5d066f21b49cfb8e21627fcf977ffd0c9bea81cf80e5a65070d + languageName: node + linkType: hard + "htmlparser2@npm:^6.1.0": version: 6.1.0 resolution: "htmlparser2@npm:6.1.0" @@ -13305,10 +13436,10 @@ __metadata: languageName: node linkType: hard -"interpret@npm:^2.2.0": - version: 2.2.0 - resolution: "interpret@npm:2.2.0" - checksum: f51efef7cb8d02da16408ffa3504cd6053014c5aeb7bb8c223727e053e4235bf565e45d67028b0c8740d917c603807aa3c27d7bd2f21bf20b6417e2bb3e5fd6e +"interpret@npm:^3.1.1": + version: 3.1.1 + resolution: "interpret@npm:3.1.1" + checksum: 35cebcf48c7351130437596d9ab8c8fe131ce4038da4561e6d665f25640e0034702a031cf7e3a5cea60ac7ac548bf17465e0571ede126f3d3a6933152171ac82 languageName: node linkType: hard @@ -13351,7 +13482,7 @@ __metadata: languageName: node linkType: hard -"ipaddr.js@npm:^2.0.1": +"ipaddr.js@npm:^2.0.1, ipaddr.js@npm:^2.1.0": version: 2.1.0 resolution: "ipaddr.js@npm:2.1.0" checksum: 807a054f2bd720c4d97ee479d6c9e865c233bea21f139fb8dabd5a35c4226d2621c42e07b4ad94ff3f82add926a607d8d9d37c625ad0319f0e08f9f2bd1968e2 @@ -13609,6 +13740,13 @@ __metadata: languageName: node linkType: hard +"is-network-error@npm:^1.0.0": + version: 1.0.1 + resolution: "is-network-error@npm:1.0.1" + checksum: 165d61500c4186c62db5a3a693d6bfa14ca40fe9b471ef4cd4f27b20ef6760880faf5386dc01ca9867531631782941fedaa94521d09959edf71f046e393c7b91 + languageName: node + linkType: hard + "is-npm@npm:^5.0.0": version: 5.0.0 resolution: "is-npm@npm:5.0.0" @@ -13792,6 +13930,15 @@ __metadata: languageName: node linkType: hard +"is-wsl@npm:^3.1.0": + version: 3.1.0 + resolution: "is-wsl@npm:3.1.0" + dependencies: + is-inside-container: ^1.0.0 + checksum: f9734c81f2f9cf9877c5db8356bfe1ff61680f1f4c1011e91278a9c0564b395ae796addb4bf33956871041476ec82c3e5260ed57b22ac91794d4ae70a1d2f0a9 + languageName: node + linkType: hard + "is-yarn-global@npm:^0.3.0": version: 0.3.0 resolution: "is-yarn-global@npm:0.3.0" @@ -14330,7 +14477,7 @@ __metadata: languageName: node linkType: hard -"launch-editor@npm:^2.6.0": +"launch-editor@npm:^2.6.0, launch-editor@npm:^2.6.1": version: 2.6.1 resolution: "launch-editor@npm:2.6.1" dependencies: @@ -16565,6 +16712,18 @@ __metadata: languageName: node linkType: hard +"open@npm:^10.0.3": + version: 10.0.3 + resolution: "open@npm:10.0.3" + dependencies: + default-browser: ^5.2.1 + define-lazy-prop: ^3.0.0 + is-inside-container: ^1.0.0 + is-wsl: ^3.1.0 + checksum: 3c4b4eb3c08210f7b7b3f3311d36440f4b83f0641ac70e5e56d637f48d4a7736e0fd49a604eebe0a55c51223d77f9ced11912223cab12d5e9fdc866727c6cb1d + languageName: node + linkType: hard + "open@npm:^8.0.2, open@npm:^8.0.9, open@npm:^8.4.0": version: 8.4.2 resolution: "open@npm:8.4.2" @@ -16748,6 +16907,17 @@ __metadata: languageName: node linkType: hard +"p-retry@npm:^6.2.0": + version: 6.2.0 + resolution: "p-retry@npm:6.2.0" + dependencies: + "@types/retry": 0.12.2 + is-network-error: ^1.0.0 + retry: ^0.13.1 + checksum: 6003573c559ee812329c9c3ede7ba12a783fdc8dd70602116646e850c920b4597dc502fe001c3f9526fca4e93275045db7a27341c458e51db179c1374a01ac44 + languageName: node + linkType: hard + "p-try@npm:^1.0.0": version: 1.0.0 resolution: "p-try@npm:1.0.0" @@ -17043,6 +17213,13 @@ __metadata: languageName: node linkType: hard +"path-type@npm:^5.0.0": + version: 5.0.0 + resolution: "path-type@npm:5.0.0" + checksum: 15ec24050e8932c2c98d085b72cfa0d6b4eeb4cbde151a0a05726d8afae85784fc5544f733d8dfc68536587d5143d29c0bd793623fad03d7e61cc00067291cd5 + languageName: node + linkType: hard + "pathval@npm:^1.1.1": version: 1.1.1 resolution: "pathval@npm:1.1.1" @@ -18273,12 +18450,12 @@ __metadata: languageName: node linkType: hard -"rechoir@npm:^0.7.0": - version: 0.7.1 - resolution: "rechoir@npm:0.7.1" +"rechoir@npm:^0.8.0": + version: 0.8.0 + resolution: "rechoir@npm:0.8.0" dependencies: - resolve: ^1.9.0 - checksum: 2a04aab4e28c05fcd6ee6768446bc8b859d8f108e71fc7f5bcbc5ef25e53330ce2c11d10f82a24591a2df4c49c4f61feabe1fd11f844c66feedd4cd7bb61146a + resolve: ^1.20.0 + checksum: ad3caed8afdefbc33fbc30e6d22b86c35b3d51c2005546f4e79bcc03c074df804b3640ad18945e6bef9ed12caedc035655ec1082f64a5e94c849ff939dc0a788 languageName: node linkType: hard @@ -18730,7 +18907,7 @@ __metadata: languageName: node linkType: hard -"resolve@npm:^1.1.6, resolve@npm:^1.14.2, resolve@npm:^1.19.0, resolve@npm:^1.22.1, resolve@npm:^1.3.2, resolve@npm:^1.9.0": +"resolve@npm:^1.1.6, resolve@npm:^1.14.2, resolve@npm:^1.19.0, resolve@npm:^1.20.0, resolve@npm:^1.22.1, resolve@npm:^1.3.2": version: 1.22.8 resolution: "resolve@npm:1.22.8" dependencies: @@ -18752,7 +18929,7 @@ __metadata: languageName: node linkType: hard -"resolve@patch:resolve@^1.1.6#~builtin, resolve@patch:resolve@^1.14.2#~builtin, resolve@patch:resolve@^1.19.0#~builtin, resolve@patch:resolve@^1.22.1#~builtin, resolve@patch:resolve@^1.3.2#~builtin, resolve@patch:resolve@^1.9.0#~builtin": +"resolve@patch:resolve@^1.1.6#~builtin, resolve@patch:resolve@^1.14.2#~builtin, resolve@patch:resolve@^1.19.0#~builtin, resolve@patch:resolve@^1.20.0#~builtin, resolve@patch:resolve@^1.22.1#~builtin, resolve@patch:resolve@^1.3.2#~builtin": version: 1.22.8 resolution: "resolve@patch:resolve@npm%3A1.22.8#~builtin::version=1.22.8&hash=c3c19d" dependencies: @@ -18966,6 +19143,13 @@ __metadata: languageName: node linkType: hard +"run-applescript@npm:^7.0.0": + version: 7.0.0 + resolution: "run-applescript@npm:7.0.0" + checksum: b02462454d8b182ad4117e5d4626e9e6782eb2072925c9fac582170b0627ae3c1ea92ee9b2df7daf84b5e9ffe14eb1cf5fb70bc44b15c8a0bfcdb47987e2410c + languageName: node + linkType: hard + "run-parallel-limit@npm:^1.1.0": version: 1.1.0 resolution: "run-parallel-limit@npm:1.1.0" @@ -19070,7 +19254,7 @@ __metadata: languageName: node linkType: hard -"schema-utils@npm:^4.0.0": +"schema-utils@npm:^4.0.0, schema-utils@npm:^4.2.0": version: 4.2.0 resolution: "schema-utils@npm:4.2.0" dependencies: @@ -19118,7 +19302,7 @@ __metadata: languageName: node linkType: hard -"selfsigned@npm:^2.1.1": +"selfsigned@npm:^2.1.1, selfsigned@npm:^2.4.1": version: 2.4.1 resolution: "selfsigned@npm:2.4.1" dependencies: @@ -19223,6 +19407,15 @@ __metadata: languageName: node linkType: hard +"serialize-javascript@npm:^6.0.2": + version: 6.0.2 + resolution: "serialize-javascript@npm:6.0.2" + dependencies: + randombytes: ^2.1.0 + checksum: c4839c6206c1d143c0f80763997a361310305751171dd95e4b57efee69b8f6edd8960a0b7fbfc45042aadff98b206d55428aee0dc276efe54f100899c7fa8ab7 + languageName: node + linkType: hard + "serve-handler@npm:6.1.5, serve-handler@npm:^6.1.3, serve-handler@npm:^6.1.5": version: 6.1.5 resolution: "serve-handler@npm:6.1.5" @@ -19490,6 +19683,13 @@ __metadata: languageName: node linkType: hard +"slash@npm:^5.1.0": + version: 5.1.0 + resolution: "slash@npm:5.1.0" + checksum: 70434b34c50eb21b741d37d455110258c42d2cf18c01e6518aeb7299f3c6e626330c889c0c552b5ca2ef54a8f5a74213ab48895f0640717cacefeef6830a1ba4 + languageName: node + linkType: hard + "slice-ansi@npm:^4.0.0": version: 4.0.0 resolution: "slice-ansi@npm:4.0.0" @@ -20114,6 +20314,28 @@ __metadata: languageName: node linkType: hard +"terser-webpack-plugin@npm:^5.3.10": + version: 5.3.10 + resolution: "terser-webpack-plugin@npm:5.3.10" + dependencies: + "@jridgewell/trace-mapping": ^0.3.20 + jest-worker: ^27.4.5 + schema-utils: ^3.1.1 + serialize-javascript: ^6.0.1 + terser: ^5.26.0 + peerDependencies: + webpack: ^5.1.0 + peerDependenciesMeta: + "@swc/core": + optional: true + esbuild: + optional: true + uglify-js: + optional: true + checksum: bd6e7596cf815f3353e2a53e79cbdec959a1b0276f5e5d4e63e9d7c3c5bb5306df567729da287d1c7b39d79093e56863c569c42c6c24cc34c76aa313bd2cbcea + languageName: node + linkType: hard + "terser-webpack-plugin@npm:^5.3.3, terser-webpack-plugin@npm:^5.3.7, terser-webpack-plugin@npm:^5.3.9": version: 5.3.9 resolution: "terser-webpack-plugin@npm:5.3.9" @@ -20150,6 +20372,20 @@ __metadata: languageName: node linkType: hard +"terser@npm:^5.26.0": + version: 5.27.0 + resolution: "terser@npm:5.27.0" + dependencies: + "@jridgewell/source-map": ^0.3.3 + acorn: ^8.8.2 + commander: ^2.20.0 + source-map-support: ~0.5.20 + bin: + terser: bin/terser + checksum: c165052cfea061e8512e9b9ba42a098c2ff6382886ae122b040fd5b6153443070cc2dcb4862269f1669c09c716763e856125a355ff984aa72be525d6fffd8729 + languageName: node + linkType: hard + "text-table@npm:^0.2.0": version: 0.2.0 resolution: "text-table@npm:0.2.0" @@ -20765,6 +21001,13 @@ __metadata: languageName: node linkType: hard +"unicorn-magic@npm:^0.1.0": + version: 0.1.0 + resolution: "unicorn-magic@npm:0.1.0" + checksum: 48c5882ca3378f380318c0b4eb1d73b7e3c5b728859b060276e0a490051d4180966beeb48962d850fd0c6816543bcdfc28629dcd030bb62a286a2ae2acb5acb6 + languageName: node + linkType: hard + "unified@npm:9.2.0": version: 9.2.0 resolution: "unified@npm:9.2.0" @@ -21409,36 +21652,35 @@ __metadata: languageName: node linkType: hard -"webpack-cli@npm:^4.7.2": - version: 4.10.0 - resolution: "webpack-cli@npm:4.10.0" +"webpack-cli@npm:^5.1.4": + version: 5.1.4 + resolution: "webpack-cli@npm:5.1.4" dependencies: "@discoveryjs/json-ext": ^0.5.0 - "@webpack-cli/configtest": ^1.2.0 - "@webpack-cli/info": ^1.5.0 - "@webpack-cli/serve": ^1.7.0 + "@webpack-cli/configtest": ^2.1.1 + "@webpack-cli/info": ^2.0.2 + "@webpack-cli/serve": ^2.0.5 colorette: ^2.0.14 - commander: ^7.0.0 + commander: ^10.0.1 cross-spawn: ^7.0.3 + envinfo: ^7.7.3 fastest-levenshtein: ^1.0.12 import-local: ^3.0.2 - interpret: ^2.2.0 - rechoir: ^0.7.0 + interpret: ^3.1.1 + rechoir: ^0.8.0 webpack-merge: ^5.7.3 peerDependencies: - webpack: 4.x.x || 5.x.x + webpack: 5.x.x peerDependenciesMeta: "@webpack-cli/generators": optional: true - "@webpack-cli/migrate": - optional: true webpack-bundle-analyzer: optional: true webpack-dev-server: optional: true bin: webpack-cli: bin/cli.js - checksum: 2ff5355ac348e6b40f2630a203b981728834dca96d6d621be96249764b2d0fc01dd54edfcc37f02214d02935de2cf0eefd6ce689d970d154ef493f01ba922390 + checksum: 3a4ad0d0342a6815c850ee4633cc2a8a5dae04f918e7847f180bf24ab400803cf8a8943707ffbed03eb20fe6ce647f996f60a2aade87b0b4a9954da3da172ce0 languageName: node linkType: hard @@ -21457,6 +21699,24 @@ __metadata: languageName: node linkType: hard +"webpack-dev-middleware@npm:^7.0.0": + version: 7.0.0 + resolution: "webpack-dev-middleware@npm:7.0.0" + dependencies: + colorette: ^2.0.10 + memfs: ^4.6.0 + mime-types: ^2.1.31 + range-parser: ^1.2.1 + schema-utils: ^4.0.0 + peerDependencies: + webpack: ^5.0.0 + peerDependenciesMeta: + webpack: + optional: true + checksum: 90f6c87c80bd5849c34f3a1761ac7dc1b123def2e6e9922f55102ff4b7532538641fa8c7169ce8254b0d471c27d882cdf4a1c32979952474fc8eacc8b3447915 + languageName: node + linkType: hard + "webpack-dev-server@npm:^4.15.1, webpack-dev-server@npm:^4.9.3": version: 4.15.1 resolution: "webpack-dev-server@npm:4.15.1" @@ -21504,6 +21764,53 @@ __metadata: languageName: node linkType: hard +"webpack-dev-server@npm:^5.0.0": + version: 5.0.0 + resolution: "webpack-dev-server@npm:5.0.0" + dependencies: + "@types/bonjour": ^3.5.13 + "@types/connect-history-api-fallback": ^1.5.4 + "@types/express": ^4.17.21 + "@types/serve-index": ^1.9.4 + "@types/serve-static": ^1.15.5 + "@types/sockjs": ^0.3.36 + "@types/ws": ^8.5.10 + ansi-html-community: ^0.0.8 + bonjour-service: ^1.2.1 + chokidar: ^3.6.0 + colorette: ^2.0.10 + compression: ^1.7.4 + connect-history-api-fallback: ^2.0.0 + default-gateway: ^6.0.3 + express: ^4.17.3 + graceful-fs: ^4.2.6 + html-entities: ^2.4.0 + http-proxy-middleware: ^2.0.3 + ipaddr.js: ^2.1.0 + launch-editor: ^2.6.1 + open: ^10.0.3 + p-retry: ^6.2.0 + rimraf: ^5.0.5 + schema-utils: ^4.2.0 + selfsigned: ^2.4.1 + serve-index: ^1.9.1 + sockjs: ^0.3.24 + spdy: ^4.0.2 + webpack-dev-middleware: ^7.0.0 + ws: ^8.16.0 + peerDependencies: + webpack: ^5.0.0 + peerDependenciesMeta: + webpack: + optional: true + webpack-cli: + optional: true + bin: + webpack-dev-server: bin/webpack-dev-server.js + checksum: 419d1af6b6164900fb01168c3ef965fe8d27a78939ef8f5c602f82af5be8a2b68a0b015df564623dd69996d5265c679202c5970b59797e83cf322e47bbcd6022 + languageName: node + linkType: hard + "webpack-merge@npm:^5.7.3, webpack-merge@npm:^5.8.0, webpack-merge@npm:^5.9.0": version: 5.10.0 resolution: "webpack-merge@npm:5.10.0" @@ -21522,7 +21829,7 @@ __metadata: languageName: node linkType: hard -"webpack@npm:^5.49.0, webpack@npm:^5.73.0, webpack@npm:^5.88.1": +"webpack@npm:^5.73.0, webpack@npm:^5.88.1": version: 5.89.0 resolution: "webpack@npm:5.89.0" dependencies: @@ -21559,6 +21866,43 @@ __metadata: languageName: node linkType: hard +"webpack@npm:^5.90.1": + version: 5.90.1 + resolution: "webpack@npm:5.90.1" + dependencies: + "@types/eslint-scope": ^3.7.3 + "@types/estree": ^1.0.5 + "@webassemblyjs/ast": ^1.11.5 + "@webassemblyjs/wasm-edit": ^1.11.5 + "@webassemblyjs/wasm-parser": ^1.11.5 + acorn: ^8.7.1 + acorn-import-assertions: ^1.9.0 + browserslist: ^4.21.10 + chrome-trace-event: ^1.0.2 + enhanced-resolve: ^5.15.0 + es-module-lexer: ^1.2.1 + eslint-scope: 5.1.1 + events: ^3.2.0 + glob-to-regexp: ^0.4.1 + graceful-fs: ^4.2.9 + json-parse-even-better-errors: ^2.3.1 + loader-runner: ^4.2.0 + mime-types: ^2.1.27 + neo-async: ^2.6.2 + schema-utils: ^3.2.0 + tapable: ^2.1.1 + terser-webpack-plugin: ^5.3.10 + watchpack: ^2.4.0 + webpack-sources: ^3.2.3 + peerDependenciesMeta: + webpack-cli: + optional: true + bin: + webpack: bin/webpack.js + checksum: a7be844d5720a0c6282fec012e6fa34b1137dff953c5d48bf2ef066a6c27c1dbc92a9b9effc05ee61c9fe269499266db9782073f2d82a589d3c5c966ffc56584 + languageName: node + linkType: hard + "webpackbar@npm:^5.0.2": version: 5.0.2 resolution: "webpackbar@npm:5.0.2" @@ -21833,6 +22177,21 @@ __metadata: languageName: node linkType: hard +"ws@npm:^8.16.0": + version: 8.16.0 + resolution: "ws@npm:8.16.0" + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ">=5.0.2" + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + checksum: feb3eecd2bae82fa8a8beef800290ce437d8b8063bdc69712725f21aef77c49cb2ff45c6e5e7fce622248f9c7abaee506bae0a9064067ffd6935460c7357321b + languageName: node + linkType: hard + "xdg-basedir@npm:^4.0.0": version: 4.0.0 resolution: "xdg-basedir@npm:4.0.0" From d2585e738a63208fca3c9e26242e896d7f1df1e4 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Sat, 17 Feb 2024 21:15:21 +0000 Subject: [PATCH 20/45] feat: update error message when trying to load workspace as dependency (#4393) # Description ## Problem\* Resolves ## Summary\* This addresses an issue that a user in the discord is running into where they're trying to load a workspace as a dependency and can't figure out how to fix their Nargo.toml. ## Additional Context We should probably be embedding links to the docs but that would require some setup so that we point to the correct version, etc. ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. Co-authored-by: kevaundray --- tooling/nargo_toml/src/errors.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tooling/nargo_toml/src/errors.rs b/tooling/nargo_toml/src/errors.rs index 440895056c3..77fe77bcdbb 100644 --- a/tooling/nargo_toml/src/errors.rs +++ b/tooling/nargo_toml/src/errors.rs @@ -28,7 +28,7 @@ pub enum ManifestError { #[error("Nargo.toml is badly formed, could not parse.\n\n {0}")] MalformedFile(#[from] toml::de::Error), - #[error("Unexpected workspace definition found in {0}")] + #[error("Unexpected workspace definition found in {0}. If you're attempting to load this as a dependency, you may need to add a `directory` field to your `Nargo.toml` to show which package within the workspace to use")] UnexpectedWorkspace(PathBuf), #[error("Cannot find file {entry} which was specified as the `entry` field in {toml}")] From 6169a5b2d85d22fcd1ac9f7fd90514f88d4ef8a6 Mon Sep 17 00:00:00 2001 From: jfecher Date: Sat, 17 Feb 2024 18:52:28 -0600 Subject: [PATCH 21/45] chore: Update Vec docs (#4400) # Description ## Problem\* Working towards #4348 ## Summary\* We'll soon have bounded vec and hashmap types documented so I thought I'd create a new `containers` folder for documentation of the `std::containers` module. ## Additional Context Minor updates in the Vec docs: - Removed implementation details from methods - Reordered `Vec::len` up to the top since it is important - Minor grammar fixes ## Documentation\* Check one: - [ ] No documentation needed. - [x] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. Co-authored-by: kevaundray --- .../containers/vec.mdx} | 68 +++++++------------ 1 file changed, 24 insertions(+), 44 deletions(-) rename docs/docs/noir/{concepts/data_types/vectors.mdx => standard_library/containers/vec.mdx} (71%) diff --git a/docs/docs/noir/concepts/data_types/vectors.mdx b/docs/docs/noir/standard_library/containers/vec.mdx similarity index 71% rename from docs/docs/noir/concepts/data_types/vectors.mdx rename to docs/docs/noir/standard_library/containers/vec.mdx index aed13183719..1954f05bc76 100644 --- a/docs/docs/noir/concepts/data_types/vectors.mdx +++ b/docs/docs/noir/standard_library/containers/vec.mdx @@ -1,6 +1,6 @@ --- title: Vectors -description: Delve into the Vector data type in Noir. Learn about its methods, practical examples, and best practices for using Vectors in your Noir code. +description: Delve into the Vec data type in Noir. Learn about its methods, practical examples, and best practices for using Vectors in your Noir code. keywords: [noir, vector type, methods, examples, dynamic arrays] sidebar_position: 6 --- @@ -9,7 +9,7 @@ import Experimental from '@site/src/components/Notes/_experimental.mdx'; -A vector is a collection type similar to Rust's Vector type. It's convenient way to use slices as mutable arrays. +A vector is a collection type similar to Rust's `Vec` type. In Noir, it is a convenient way to use slices as mutable arrays. Example: @@ -28,9 +28,7 @@ assert(vector.len() == 5); Creates a new, empty vector. ```rust -pub fn new() -> Self { - Self { slice: [] } -} +pub fn new() -> Self ``` Example: @@ -45,9 +43,7 @@ assert(empty_vector.len() == 0); Creates a vector containing each element from a given slice. Mutations to the resulting vector will not affect the original slice. ```rust -pub fn from_slice(slice: [T]) -> Self { - Self { slice } -} +pub fn from_slice(slice: [T]) -> Self ``` Example: @@ -58,14 +54,27 @@ let vector_from_slice = Vec::from_slice(arr); assert(vector_from_slice.len() == 3); ``` +### len + +Returns the number of elements in the vector. + +```rust +pub fn len(self) -> Field +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` + ### get Retrieves an element from the vector at a given index. Panics if the index points beyond the vector's end. ```rust -pub fn get(self, index: Field) -> T { - self.slice[index] -} +pub fn get(self, index: Field) -> T ``` Example: @@ -80,9 +89,7 @@ assert(vector.get(1) == 20); Adds a new element to the vector's end, returning a new vector with a length one greater than the original unmodified vector. ```rust -pub fn push(&mut self, elem: T) { - self.slice = self.slice.push_back(elem); -} +pub fn push(&mut self, elem: T) ``` Example: @@ -98,11 +105,7 @@ assert(vector.len() == 1); Removes an element from the vector's end, returning a new vector with a length one less than the original vector, along with the removed element. Panics if the vector's length is zero. ```rust -pub fn pop(&mut self) -> T { - let (popped_slice, last_elem) = self.slice.pop_back(); - self.slice = popped_slice; - last_elem -} +pub fn pop(&mut self) -> T ``` Example: @@ -119,9 +122,7 @@ assert(vector.len() == 1); Inserts an element at a specified index, shifting subsequent elements to the right. ```rust -pub fn insert(&mut self, index: Field, elem: T) { - self.slice = self.slice.insert(index, elem); -} +pub fn insert(&mut self, index: Field, elem: T) ``` Example: @@ -137,11 +138,7 @@ assert(vector.get(1) == 20); Removes an element at a specified index, shifting subsequent elements to the left, and returns the removed element. ```rust -pub fn remove(&mut self, index: Field) -> T { - let (new_slice, elem) = self.slice.remove(index); - self.slice = new_slice; - elem -} +pub fn remove(&mut self, index: Field) -> T ``` Example: @@ -152,20 +149,3 @@ let removed_elem = vector.remove(1); assert(removed_elem == 20); assert(vector.len() == 2); ``` - -### len - -Returns the number of elements in the vector. - -```rust -pub fn len(self) -> Field { - self.slice.len() -} -``` - -Example: - -```rust -let empty_vector: Vec = Vec::new(); -assert(empty_vector.len() == 0); -``` From 722dc969e8b09e5a6fd56b094b6939e9330233f6 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Sun, 18 Feb 2024 18:33:37 +0000 Subject: [PATCH 22/45] chore: fix docker test workflows (#4308) # Description ## Problem\* Resolves ## Summary\* This adds a change which didn't make it into #4306 so that we only run node tests on the node image and vice versa. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .github/scripts/integration-test-browser.sh | 2 +- .github/scripts/integration-test-node.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/scripts/integration-test-browser.sh b/.github/scripts/integration-test-browser.sh index c9cda58aab8..12195a88928 100755 --- a/.github/scripts/integration-test-browser.sh +++ b/.github/scripts/integration-test-browser.sh @@ -2,4 +2,4 @@ set -eu ./.github/scripts/playwright-install.sh -yarn workspace integration-tests test +yarn workspace integration-tests test:browser \ No newline at end of file diff --git a/.github/scripts/integration-test-node.sh b/.github/scripts/integration-test-node.sh index 7260ca4bb0f..b7f00c65620 100755 --- a/.github/scripts/integration-test-node.sh +++ b/.github/scripts/integration-test-node.sh @@ -2,4 +2,4 @@ set -eu apt-get install libc++-dev -y -yarn workspace integration-tests test +yarn workspace integration-tests test:node From b5e5c30f4db52c79ef556e80660f39db369b1911 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Wed, 21 Feb 2024 17:39:09 +0000 Subject: [PATCH 23/45] chore!: bump msrv to 1.73.0 (#4406) # Description ## Problem\* Resolves ## Summary\* Bumpalo bumped their MSRV in a [patch/minor release](https://github.com/fitzgen/bumpalo/commit/f8597ceb3600807a902fa9692fb43c49e7b63b27) and wasmer is using an unlocked dependency so we need to bump to match. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .github/workflows/docs-pr.yml | 2 +- .github/workflows/formatting.yml | 2 +- .github/workflows/gates_report.yml | 2 +- .github/workflows/publish-acvm.yml | 2 +- .github/workflows/publish-es-packages.yml | 6 +++--- .github/workflows/publish-nargo.yml | 4 ++-- .github/workflows/test-js-packages.yml | 8 ++++---- .github/workflows/test-rust-workspace-msrv.yml | 4 ++-- .github/workflows/test-rust-workspace.yml | 4 ++-- Cargo.toml | 2 +- Dockerfile.ci | 2 +- README.md | 2 +- .../getting_started/installation/other_install_methods.md | 2 +- flake.nix | 2 +- rust-toolchain.toml | 2 +- tooling/nargo/build.rs | 4 ++-- tooling/nargo_cli/build.rs | 4 ++-- 17 files changed, 27 insertions(+), 27 deletions(-) diff --git a/.github/workflows/docs-pr.yml b/.github/workflows/docs-pr.yml index dddb309a3a4..5d0b72c6ad8 100644 --- a/.github/workflows/docs-pr.yml +++ b/.github/workflows/docs-pr.yml @@ -55,7 +55,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml index 97736e2415e..43fd6daa91d 100644 --- a/.github/workflows/formatting.yml +++ b/.github/workflows/formatting.yml @@ -32,7 +32,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: ${{ matrix.target }} components: clippy, rustfmt diff --git a/.github/workflows/gates_report.yml b/.github/workflows/gates_report.yml index 39416e628a9..f3f798fc5ea 100644 --- a/.github/workflows/gates_report.yml +++ b/.github/workflows/gates_report.yml @@ -18,7 +18,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: diff --git a/.github/workflows/publish-acvm.yml b/.github/workflows/publish-acvm.yml index e19a61fff4f..959cd8e4bca 100644 --- a/.github/workflows/publish-acvm.yml +++ b/.github/workflows/publish-acvm.yml @@ -18,7 +18,7 @@ jobs: ref: ${{ inputs.noir-ref }} - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 # These steps are in a specific order so crate dependencies are updated first - name: Publish acir_field diff --git a/.github/workflows/publish-es-packages.yml b/.github/workflows/publish-es-packages.yml index d4cd356a138..b22a26c685e 100644 --- a/.github/workflows/publish-es-packages.yml +++ b/.github/workflows/publish-es-packages.yml @@ -22,7 +22,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: @@ -51,7 +51,7 @@ jobs: ref: ${{ inputs.noir-ref }} - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: @@ -83,7 +83,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: diff --git a/.github/workflows/publish-nargo.yml b/.github/workflows/publish-nargo.yml index 085ab013e4e..e47e1a13053 100644 --- a/.github/workflows/publish-nargo.yml +++ b/.github/workflows/publish-nargo.yml @@ -46,7 +46,7 @@ jobs: echo "MACOSX_DEPLOYMENT_TARGET=$(xcrun -sdk macosx$(sw_vers -productVersion) --show-sdk-platform-version)" >> $GITHUB_ENV - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: ${{ matrix.target }} @@ -120,7 +120,7 @@ jobs: ref: ${{ inputs.tag || env.GITHUB_REF }} - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: ${{ matrix.target }} diff --git a/.github/workflows/test-js-packages.yml b/.github/workflows/test-js-packages.yml index 1afd11c94fa..eb9c50d82dd 100644 --- a/.github/workflows/test-js-packages.yml +++ b/.github/workflows/test-js-packages.yml @@ -22,7 +22,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: @@ -55,7 +55,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: @@ -86,7 +86,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: @@ -121,7 +121,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 - uses: Swatinem/rust-cache@v2 with: diff --git a/.github/workflows/test-rust-workspace-msrv.yml b/.github/workflows/test-rust-workspace-msrv.yml index 02444b52856..061fc65ca8b 100644 --- a/.github/workflows/test-rust-workspace-msrv.yml +++ b/.github/workflows/test-rust-workspace-msrv.yml @@ -28,7 +28,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: x86_64-unknown-linux-gnu @@ -71,7 +71,7 @@ jobs: - uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: x86_64-unknown-linux-gnu diff --git a/.github/workflows/test-rust-workspace.yml b/.github/workflows/test-rust-workspace.yml index bb31ab7873a..c12dcaba0ba 100644 --- a/.github/workflows/test-rust-workspace.yml +++ b/.github/workflows/test-rust-workspace.yml @@ -23,7 +23,7 @@ jobs: uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: x86_64-unknown-linux-gnu @@ -59,7 +59,7 @@ jobs: - uses: actions/checkout@v4 - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 + uses: dtolnay/rust-toolchain@1.73.0 with: targets: x86_64-unknown-linux-gnu diff --git a/Cargo.toml b/Cargo.toml index 77058554aff..7d5da7b00d0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -45,7 +45,7 @@ version = "0.24.0" # x-release-please-end authors = ["The Noir Team "] edition = "2021" -rust-version = "1.71.1" +rust-version = "1.73.0" license = "MIT OR Apache-2.0" repository = "https://github.com/noir-lang/noir/" diff --git a/Dockerfile.ci b/Dockerfile.ci index a73ce4ab969..e0dc030980c 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -1,4 +1,4 @@ -FROM rust:1.71.1-slim-bookworm as base +FROM rust:1.73.0-slim-bookworm as base RUN apt-get update && apt-get upgrade -y && apt-get install build-essential git -y WORKDIR /usr/src/noir ENV PATH="${PATH}:/usr/src/noir/target/release" diff --git a/README.md b/README.md index 771c3f1c74d..5c93512ae26 100644 --- a/README.md +++ b/README.md @@ -54,7 +54,7 @@ Concretely the following items are on the road map: ## Minimum Rust version -This crate's minimum supported rustc version is 1.71.1. +This crate's minimum supported rustc version is 1.73.0. ## Working on this project diff --git a/docs/docs/getting_started/installation/other_install_methods.md b/docs/docs/getting_started/installation/other_install_methods.md index 076f26dfd94..a35e34aaf9c 100644 --- a/docs/docs/getting_started/installation/other_install_methods.md +++ b/docs/docs/getting_started/installation/other_install_methods.md @@ -212,7 +212,7 @@ code . #### Building and testing Assuming you are using `direnv` to populate your environment, building and testing the project can be done -with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `rust-toolchain.toml`, which is 1.71.1 at the time of this writing. +with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `rust-toolchain.toml`, which is 1.73.0 at the time of this writing. If you want to build the entire project in an isolated sandbox, you can use Nix commands: diff --git a/flake.nix b/flake.nix index f0d0a2eaebb..4c5db8bfaae 100644 --- a/flake.nix +++ b/flake.nix @@ -44,7 +44,7 @@ rustToolchain = fenix.packages.${system}.fromToolchainFile { file = ./rust-toolchain.toml; - sha256 = "sha256-dxE7lmCFWlq0nl/wKcmYvpP9zqQbBitAQgZ1zx9Ooik="; + sha256 = "sha256-rLP8+fTxnPHoR96ZJiCa/5Ans1OojI7MLsmSqR2ip8o="; }; craneLib = (crane.mkLib pkgs).overrideToolchain rustToolchain; diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 955e24485fc..0e5ac891ce9 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ [toolchain] -channel = "1.72.1" +channel = "1.73.0" components = [ "rust-src" ] targets = [ "wasm32-unknown-unknown", "wasm32-wasi", "aarch64-apple-darwin" ] profile = "default" diff --git a/tooling/nargo/build.rs b/tooling/nargo/build.rs index 4fa7f58892a..ab2b7579132 100644 --- a/tooling/nargo/build.rs +++ b/tooling/nargo/build.rs @@ -2,8 +2,8 @@ use rustc_version::{version, Version}; fn check_rustc_version() { assert!( - version().unwrap() >= Version::parse("1.71.1").unwrap(), - "The minimal supported rustc version is 1.71.1." + version().unwrap() >= Version::parse("1.73.0").unwrap(), + "The minimal supported rustc version is 1.73.0." ); } diff --git a/tooling/nargo_cli/build.rs b/tooling/nargo_cli/build.rs index 57aa487f66a..1ca12b75dfb 100644 --- a/tooling/nargo_cli/build.rs +++ b/tooling/nargo_cli/build.rs @@ -6,8 +6,8 @@ use std::{env, fs}; fn check_rustc_version() { assert!( - version().unwrap() >= Version::parse("1.71.1").unwrap(), - "The minimal supported rustc version is 1.71.1." + version().unwrap() >= Version::parse("1.73.0").unwrap(), + "The minimal supported rustc version is 1.73.0." ); } From 292a972dfb23dd7c664be87916cccc313d7b134d Mon Sep 17 00:00:00 2001 From: Michael J Klein Date: Thu, 22 Feb 2024 06:06:54 -0500 Subject: [PATCH 24/45] chore: rename parameter 'filter' to 'level' in 'init_log_level' (#4403) # Description ## Problem\* Resolves https://github.com/noir-lang/noir/issues/3879 ## Summary\* ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/wasm/src/lib.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/compiler/wasm/src/lib.rs b/compiler/wasm/src/lib.rs index 6d737a0ea6d..174d9b9ce9c 100644 --- a/compiler/wasm/src/lib.rs +++ b/compiler/wasm/src/lib.rs @@ -32,12 +32,12 @@ pub struct BuildInfo { } #[wasm_bindgen] -pub fn init_log_level(filter: String) { +pub fn init_log_level(level: String) { // Set the static variable from Rust use std::sync::Once; - let filter: EnvFilter = - filter.parse().expect("Could not parse log filter while initializing logger"); + let level_filter: EnvFilter = + level.parse().expect("Could not parse log filter while initializing logger"); static SET_HOOK: Once = Once::new(); SET_HOOK.call_once(|| { @@ -46,7 +46,7 @@ pub fn init_log_level(filter: String) { .without_time() .with_writer(MakeWebConsoleWriter::new()); - tracing_subscriber::registry().with(fmt_layer.with_filter(filter)).init(); + tracing_subscriber::registry().with(fmt_layer.with_filter(level_filter)).init(); }); } From 49822511710a7f1c42b8ed343e80456f8e6db2d9 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 22 Feb 2024 11:07:15 +0000 Subject: [PATCH 25/45] fix: add handling to `noir_wasm` for projects without dependencies (#4344) # Description ## Problem\* Resolves https://github.com/noir-lang/noir/issues/4338 ## Summary\* This PR returns an empty dependencies map rather than undefined if the package being compiled doesn't have any dependencies. I've also updated the test suite so it also compiles more than just a contract ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: kevaundray --- compiler/wasm/src/noir/package.ts | 2 +- compiler/wasm/src/types/noir_artifact.ts | 2 + .../wasm/src/types/noir_package_config.ts | 2 +- .../test/compiler/browser/compile.test.ts | 79 +++++++++++++++++++ .../browser/compile_with_deps.test.ts | 43 ---------- .../wasm/test/compiler/node/compile.test.ts | 39 +++++++++ .../compiler/node/compile_with_deps.test.ts | 20 ----- ...pile_with_deps.test.ts => compile.test.ts} | 60 ++++++++++++-- compiler/wasm/test/shared.ts | 27 +++++-- 9 files changed, 197 insertions(+), 77 deletions(-) create mode 100644 compiler/wasm/test/compiler/browser/compile.test.ts delete mode 100644 compiler/wasm/test/compiler/browser/compile_with_deps.test.ts create mode 100644 compiler/wasm/test/compiler/node/compile.test.ts delete mode 100644 compiler/wasm/test/compiler/node/compile_with_deps.test.ts rename compiler/wasm/test/compiler/shared/{compile_with_deps.test.ts => compile.test.ts} (52%) diff --git a/compiler/wasm/src/noir/package.ts b/compiler/wasm/src/noir/package.ts index a2496a03b3a..81178e6ae96 100644 --- a/compiler/wasm/src/noir/package.ts +++ b/compiler/wasm/src/noir/package.ts @@ -91,7 +91,7 @@ export class Package { * Gets this package's dependencies. */ public getDependencies(): Record { - return this.#config.dependencies; + return this.#config.dependencies ?? {}; } /** diff --git a/compiler/wasm/src/types/noir_artifact.ts b/compiler/wasm/src/types/noir_artifact.ts index 350a4053a9a..e636212a487 100644 --- a/compiler/wasm/src/types/noir_artifact.ts +++ b/compiler/wasm/src/types/noir_artifact.ts @@ -73,6 +73,8 @@ export interface ContractArtifact { * The compilation result of an Noir contract. */ export interface ProgramArtifact { + /** Version of noir used for the build. */ + noir_version: string; /** The hash of the circuit. */ hash?: number; /** * The ABI of the function. */ diff --git a/compiler/wasm/src/types/noir_package_config.ts b/compiler/wasm/src/types/noir_package_config.ts index 5f07c380cf3..0203763039a 100644 --- a/compiler/wasm/src/types/noir_package_config.ts +++ b/compiler/wasm/src/types/noir_package_config.ts @@ -20,7 +20,7 @@ type NoirPackageConfigSchema = { backend?: string; license?: string; }; - dependencies: Record; + dependencies?: Record; }; /** diff --git a/compiler/wasm/test/compiler/browser/compile.test.ts b/compiler/wasm/test/compiler/browser/compile.test.ts new file mode 100644 index 00000000000..b7e6c27427f --- /dev/null +++ b/compiler/wasm/test/compiler/browser/compile.test.ts @@ -0,0 +1,79 @@ +/* eslint-disable @typescript-eslint/ban-ts-comment */ +import { getPaths } from '../../shared'; +import { expect } from '@esm-bundle/chai'; +import { compile, createFileManager } from '@noir-lang/noir_wasm'; +import { ContractArtifact, ProgramArtifact } from '../../../src/types/noir_artifact'; +import { shouldCompileContractIdentically, shouldCompileProgramIdentically } from '../shared/compile.test'; + +const paths = getPaths('.'); + +async function getFile(path: string) { + // @ts-ignore + const basePath = new URL('./../../', import.meta.url).toString().replace(/\/$/g, ''); + const url = `${basePath}${path.replace('.', '')}`; + const response = await fetch(url); + return response; +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function getPrecompiledSource(path: string): Promise { + const response = await getFile(path); + const compiledData = await response.text(); + return JSON.parse(compiledData); +} + +describe('noir-compiler/browser', () => { + shouldCompileProgramIdentically( + async () => { + const { simpleScriptExpectedArtifact } = paths; + const fm = createFileManager('/'); + const files = Object.values(paths).filter((fileOrDir) => /^\.?\/.*\..*$/.test(fileOrDir)); + for (const path of files) { + console.log(path); + await fm.writeFile(path, (await getFile(path)).body as ReadableStream); + } + const nargoArtifact = (await getPrecompiledSource(simpleScriptExpectedArtifact)) as ProgramArtifact; + const noirWasmArtifact = await compile(fm, '/fixtures/simple'); + + return { nargoArtifact, noirWasmArtifact }; + }, + expect, + 60 * 20e3, + ); + + shouldCompileProgramIdentically( + async () => { + const { depsScriptExpectedArtifact } = paths; + const fm = createFileManager('/'); + const files = Object.values(paths).filter((fileOrDir) => /^\.?\/.*\..*$/.test(fileOrDir)); + for (const path of files) { + console.log(path); + await fm.writeFile(path, (await getFile(path)).body as ReadableStream); + } + const nargoArtifact = (await getPrecompiledSource(depsScriptExpectedArtifact)) as ProgramArtifact; + const noirWasmArtifact = await compile(fm, '/fixtures/with-deps'); + + return { nargoArtifact, noirWasmArtifact }; + }, + expect, + 60 * 20e3, + ); + + shouldCompileContractIdentically( + async () => { + const { contractExpectedArtifact } = paths; + const fm = createFileManager('/'); + const files = Object.values(paths).filter((fileOrDir) => /^\.?\/.*\..*$/.test(fileOrDir)); + for (const path of files) { + console.log(path); + await fm.writeFile(path, (await getFile(path)).body as ReadableStream); + } + const nargoArtifact = (await getPrecompiledSource(contractExpectedArtifact)) as ContractArtifact; + const noirWasmArtifact = await compile(fm, '/fixtures/noir-contract'); + + return { nargoArtifact, noirWasmArtifact }; + }, + expect, + 60 * 20e3, + ); +}); diff --git a/compiler/wasm/test/compiler/browser/compile_with_deps.test.ts b/compiler/wasm/test/compiler/browser/compile_with_deps.test.ts deleted file mode 100644 index 0d1e22e288f..00000000000 --- a/compiler/wasm/test/compiler/browser/compile_with_deps.test.ts +++ /dev/null @@ -1,43 +0,0 @@ -/* eslint-disable @typescript-eslint/ban-ts-comment */ -import { getPaths } from '../../shared'; -import { expect } from '@esm-bundle/chai'; -import { compile, createFileManager } from '@noir-lang/noir_wasm'; -import { ContractArtifact } from '../../../src/types/noir_artifact'; -import { shouldCompileIdentically } from '../shared/compile_with_deps.test'; - -const paths = getPaths('.'); - -async function getFile(path: string) { - // @ts-ignore - const basePath = new URL('./../../', import.meta.url).toString().replace(/\/$/g, ''); - const url = `${basePath}${path.replace('.', '')}`; - const response = await fetch(url); - return response; -} - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -async function getPrecompiledSource(path: string): Promise { - const response = await getFile(path); - const compiledData = await response.text(); - return JSON.parse(compiledData); -} - -describe('noir-compiler/browser', () => { - shouldCompileIdentically( - async () => { - const { contractExpectedArtifact } = paths; - const fm = createFileManager('/'); - const files = Object.values(paths).filter((fileOrDir) => /^\.?\/.*\..*$/.test(fileOrDir)); - for (const path of files) { - console.log(path); - await fm.writeFile(path, (await getFile(path)).body as ReadableStream); - } - const nargoArtifact = (await getPrecompiledSource(contractExpectedArtifact)) as ContractArtifact; - const noirWasmArtifact = await compile(fm, '/fixtures/noir-contract'); - - return { nargoArtifact, noirWasmArtifact }; - }, - expect, - 60 * 20e3, - ); -}); diff --git a/compiler/wasm/test/compiler/node/compile.test.ts b/compiler/wasm/test/compiler/node/compile.test.ts new file mode 100644 index 00000000000..9af98195825 --- /dev/null +++ b/compiler/wasm/test/compiler/node/compile.test.ts @@ -0,0 +1,39 @@ +import { join, resolve } from 'path'; +import { getPaths } from '../../shared'; + +import { expect } from 'chai'; +import { compile, createFileManager } from '@noir-lang/noir_wasm'; +import { readFile } from 'fs/promises'; +import { ContractArtifact, ProgramArtifact } from '../../../src/types/noir_artifact'; +import { shouldCompileContractIdentically, shouldCompileProgramIdentically } from '../shared/compile.test'; + +const basePath = resolve(join(__dirname, '../../')); + +describe('noir-compiler/node', () => { + shouldCompileProgramIdentically(async () => { + const { simpleScriptProjectPath, simpleScriptExpectedArtifact } = getPaths(basePath); + + const fm = createFileManager(simpleScriptProjectPath); + const nargoArtifact = JSON.parse((await readFile(simpleScriptExpectedArtifact)).toString()) as ProgramArtifact; + const noirWasmArtifact = await compile(fm); + return { nargoArtifact, noirWasmArtifact }; + }, expect); + + shouldCompileProgramIdentically(async () => { + const { depsScriptProjectPath, depsScriptExpectedArtifact } = getPaths(basePath); + + const fm = createFileManager(depsScriptProjectPath); + const nargoArtifact = JSON.parse((await readFile(depsScriptExpectedArtifact)).toString()) as ProgramArtifact; + const noirWasmArtifact = await compile(fm); + return { nargoArtifact, noirWasmArtifact }; + }, expect); + + shouldCompileContractIdentically(async () => { + const { contractProjectPath, contractExpectedArtifact } = getPaths(basePath); + + const fm = createFileManager(contractProjectPath); + const nargoArtifact = JSON.parse((await readFile(contractExpectedArtifact)).toString()) as ContractArtifact; + const noirWasmArtifact = await compile(fm); + return { nargoArtifact, noirWasmArtifact }; + }, expect); +}); diff --git a/compiler/wasm/test/compiler/node/compile_with_deps.test.ts b/compiler/wasm/test/compiler/node/compile_with_deps.test.ts deleted file mode 100644 index 2a402dc9d02..00000000000 --- a/compiler/wasm/test/compiler/node/compile_with_deps.test.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { join, resolve } from 'path'; -import { getPaths } from '../../shared'; - -import { expect } from 'chai'; -import { compile, createFileManager } from '@noir-lang/noir_wasm'; -import { readFile } from 'fs/promises'; -import { ContractArtifact } from '../../../src/types/noir_artifact'; -import { shouldCompileIdentically } from '../shared/compile_with_deps.test'; - -const basePath = resolve(join(__dirname, '../../')); -const { contractProjectPath, contractExpectedArtifact } = getPaths(basePath); - -describe('noir-compiler/node', () => { - shouldCompileIdentically(async () => { - const fm = createFileManager(contractProjectPath); - const nargoArtifact = JSON.parse((await readFile(contractExpectedArtifact)).toString()) as ContractArtifact; - const noirWasmArtifact = await compile(fm); - return { nargoArtifact, noirWasmArtifact }; - }, expect); -}); diff --git a/compiler/wasm/test/compiler/shared/compile_with_deps.test.ts b/compiler/wasm/test/compiler/shared/compile.test.ts similarity index 52% rename from compiler/wasm/test/compiler/shared/compile_with_deps.test.ts rename to compiler/wasm/test/compiler/shared/compile.test.ts index 0960cba0665..88e8e8c8e5a 100644 --- a/compiler/wasm/test/compiler/shared/compile_with_deps.test.ts +++ b/compiler/wasm/test/compiler/shared/compile.test.ts @@ -6,9 +6,47 @@ import { DebugFileMap, DebugInfo, NoirFunctionEntry, + ProgramArtifact, + ProgramCompilationArtifacts, } from '../../../src/types/noir_artifact'; -export function shouldCompileIdentically( +export function shouldCompileProgramIdentically( + compileFn: () => Promise<{ nargoArtifact: ProgramArtifact; noirWasmArtifact: CompilationResult }>, + expect: typeof Expect, + timeout = 5000, +) { + it('both nargo and noir_wasm should compile identically', async () => { + // Compile! + const { nargoArtifact, noirWasmArtifact } = await compileFn(); + + // Prepare nargo artifact + const [_nargoDebugInfos, nargoFileMap] = deleteProgramDebugMetadata(nargoArtifact); + normalizeVersion(nargoArtifact); + + // Prepare noir-wasm artifact + const noirWasmProgram = (noirWasmArtifact as unknown as ProgramCompilationArtifacts).program; + expect(noirWasmProgram).not.to.be.undefined; + const [_noirWasmDebugInfos, norWasmFileMap] = deleteProgramDebugMetadata(noirWasmProgram); + normalizeVersion(noirWasmProgram); + + // We first compare both contracts without considering debug info + delete (noirWasmProgram as Partial).hash; + delete (nargoArtifact as Partial).hash; + expect(nargoArtifact).to.deep.eq(noirWasmProgram); + + // Compare the file maps, ignoring keys, since those depend in the order in which files are visited, + // which may change depending on the file manager implementation. Also ignores paths, since the base + // path is reported differently between nargo and noir-wasm. + expect(getSources(nargoFileMap)).to.have.members(getSources(norWasmFileMap)); + + // Compare the debug symbol information, ignoring the actual ids used for file identifiers. + // Debug symbol info looks like the following, what we need is to ignore the 'file' identifiers + // {"locations":{"0":[{"span":{"start":141,"end":156},"file":39},{"span":{"start":38,"end":76},"file":38},{"span":{"start":824,"end":862},"file":23}]}} + // expect(nargoDebugInfos).to.deep.eq(noirWasmDebugInfos); + }).timeout(timeout); +} + +export function shouldCompileContractIdentically( compileFn: () => Promise<{ nargoArtifact: ContractArtifact; noirWasmArtifact: CompilationResult }>, expect: typeof Expect, timeout = 5000, @@ -18,13 +56,13 @@ export function shouldCompileIdentically( const { nargoArtifact, noirWasmArtifact } = await compileFn(); // Prepare nargo artifact - const [nargoDebugInfos, nargoFileMap] = deleteDebugMetadata(nargoArtifact); + const [nargoDebugInfos, nargoFileMap] = deleteContractDebugMetadata(nargoArtifact); normalizeVersion(nargoArtifact); // Prepare noir-wasm artifact - const noirWasmContract = (noirWasmArtifact as ContractCompilationArtifacts).contract; + const noirWasmContract = (noirWasmArtifact as unknown as ContractCompilationArtifacts).contract; expect(noirWasmContract).not.to.be.undefined; - const [noirWasmDebugInfos, norWasmFileMap] = deleteDebugMetadata(noirWasmContract); + const [noirWasmDebugInfos, norWasmFileMap] = deleteContractDebugMetadata(noirWasmContract); normalizeVersion(noirWasmContract); // We first compare both contracts without considering debug info @@ -43,7 +81,7 @@ export function shouldCompileIdentically( } /** Remove commit identifier from version, which may not match depending on cached nargo and noir-wasm */ -function normalizeVersion(contract: ContractArtifact) { +function normalizeVersion(contract: ProgramArtifact | ContractArtifact) { contract.noir_version = contract.noir_version.replace(/\+.+$/, ''); } @@ -57,8 +95,18 @@ function extractDebugInfos(fns: NoirFunctionEntry[]) { }); } +/** Deletes all debug info from a program and returns it. */ +function deleteProgramDebugMetadata(program: ProgramArtifact) { + const debugSymbols = inflateDebugSymbols(program.debug_symbols); + const fileMap = program.file_map; + + delete (program as Partial).debug_symbols; + delete (program as Partial).file_map; + return [debugSymbols, fileMap]; +} + /** Deletes all debug info from a contract and returns it. */ -function deleteDebugMetadata(contract: ContractArtifact) { +function deleteContractDebugMetadata(contract: ContractArtifact) { contract.functions.sort((a, b) => a.name.localeCompare(b.name)); const fileMap = contract.file_map; delete (contract as Partial).file_map; diff --git a/compiler/wasm/test/shared.ts b/compiler/wasm/test/shared.ts index 9181919ff39..9f4d417a614 100644 --- a/compiler/wasm/test/shared.ts +++ b/compiler/wasm/test/shared.ts @@ -1,14 +1,23 @@ export function getPaths(basePath: string) { const fixtures = `${basePath}/fixtures`; - const simpleScriptSourcePath = `${fixtures}/simple/src/main.nr`; - const simpleScriptExpectedArtifact = `${fixtures}/simple/target/noir_wasm_testing.json`; + const simpleScriptProjectPath = `${fixtures}/simple`; + const simpleScriptSourcePath = `${simpleScriptProjectPath}/src/main.nr`; + const simpleScriptTOMLPath = `${simpleScriptProjectPath}/Nargo.toml`; + const simpleScriptExpectedArtifact = `${simpleScriptProjectPath}/target/noir_wasm_testing.json`; - const depsScriptSourcePath = `${fixtures}/with-deps/src/main.nr`; - const depsScriptExpectedArtifact = `${fixtures}/with-deps/target/noir_wasm_testing.json`; + const depsScriptProjectPath = `${fixtures}/with-deps`; + const depsScriptSourcePath = `${depsScriptProjectPath}/src/main.nr`; + const depsScriptTOMLPath = `${depsScriptProjectPath}/Nargo.toml`; + const depsScriptExpectedArtifact = `${depsScriptProjectPath}/target/noir_wasm_testing.json`; - const libASourcePath = `${fixtures}/deps/lib-a/src/lib.nr`; - const libBSourcePath = `${fixtures}/deps/lib-b/src/lib.nr`; + const libAProjectPath = `${fixtures}/deps/lib-a`; + const libASourcePath = `${libAProjectPath}/src/lib.nr`; + const libATOMLPath = `${libAProjectPath}/Nargo.toml`; + + const libBProjectPath = `${fixtures}/deps/lib-b`; + const libBSourcePath = `${libBProjectPath}/src/lib.nr`; + const libBTOMLPath = `${libBProjectPath}/Nargo.toml`; const contractProjectPath = `${fixtures}/noir-contract`; const contractSourcePath = `${contractProjectPath}/src/main.nr`; @@ -22,12 +31,18 @@ export function getPaths(basePath: string) { const libCTOMLPath = `${libCProjectPath}/Nargo.toml`; return { + simpleScriptProjectPath, simpleScriptSourcePath, + simpleScriptTOMLPath, simpleScriptExpectedArtifact, + depsScriptProjectPath, depsScriptSourcePath, + depsScriptTOMLPath, depsScriptExpectedArtifact, libASourcePath, + libATOMLPath, libBSourcePath, + libBTOMLPath, contractProjectPath, contractSourcePath, contractTOMLPath, From e3829213d8411f84e117a14b43816967925095e0 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 22 Feb 2024 11:07:46 +0000 Subject: [PATCH 26/45] feat(ci): Use wasm-opt when compiling wasm packages (#4334) # Description ## Problem\* Resolves ## Summary\* We're currently building unoptimised wasm binaries in CI as wasm-opt isn't installed. This PR installs wasm-opt to perform these optimisations. ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [ ] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .github/scripts/wasm-opt-install.sh | 8 ++++++++ .github/workflows/publish-es-packages.yml | 9 +++++++++ .github/workflows/test-js-packages.yml | 9 +++++++++ 3 files changed, 26 insertions(+) create mode 100755 .github/scripts/wasm-opt-install.sh diff --git a/.github/scripts/wasm-opt-install.sh b/.github/scripts/wasm-opt-install.sh new file mode 100755 index 00000000000..cbdeb8f2bfe --- /dev/null +++ b/.github/scripts/wasm-opt-install.sh @@ -0,0 +1,8 @@ +#!/bin/bash +set -eu + +cd $(dirname "$0") + +./cargo-binstall-install.sh + +cargo-binstall wasm-opt --version 0.116.0 -y diff --git a/.github/workflows/publish-es-packages.yml b/.github/workflows/publish-es-packages.yml index b22a26c685e..f72a97b2684 100644 --- a/.github/workflows/publish-es-packages.yml +++ b/.github/workflows/publish-es-packages.yml @@ -32,6 +32,9 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Install wasm-opt + run: ./.github/scripts/wasm-opt-install.sh + - name: Build noirc_abi run: ./.github/scripts/noirc-abi-build.sh @@ -61,6 +64,9 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Install wasm-opt + run: ./.github/scripts/wasm-opt-install.sh + - name: Build noir_js_types run: yarn workspace @noir-lang/types build @@ -93,6 +99,9 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Install wasm-opt + run: ./.github/scripts/wasm-opt-install.sh + - name: Build acvm_js run: ./.github/scripts/acvm_js-build.sh diff --git a/.github/workflows/test-js-packages.yml b/.github/workflows/test-js-packages.yml index eb9c50d82dd..b3908ee5d3e 100644 --- a/.github/workflows/test-js-packages.yml +++ b/.github/workflows/test-js-packages.yml @@ -66,6 +66,9 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Install wasm-opt + run: ./.github/scripts/wasm-opt-install.sh + - name: Build noirc_abi run: ./.github/scripts/noirc-abi-build.sh @@ -97,6 +100,9 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Install wasm-opt + run: ./.github/scripts/wasm-opt-install.sh + - name: Build noir_js_types run: yarn workspace @noir-lang/types build @@ -132,6 +138,9 @@ jobs: - name: Install Yarn dependencies uses: ./.github/actions/setup + - name: Install wasm-opt + run: ./.github/scripts/wasm-opt-install.sh + - name: Build acvm_js run: ./.github/scripts/acvm_js-build.sh From ceb8001e213066bd8a01d90657951ce5f1419f3a Mon Sep 17 00:00:00 2001 From: paulallensuxs <114240091+paulallensuxs@users.noreply.github.com> Date: Thu, 22 Feb 2024 12:46:10 +0000 Subject: [PATCH 27/45] chore: Add #[recursive] Explainer to Documentation (#4399) # Description ## Problem Step towards [#4392](https://github.com/noir-lang/noir/issues/4392) The documentation for Recursive Proofs was lacking information on the newly introduced `#[recursive]` attribute, which is crucial for understanding how to mark circuits for recursive proof generation. ## Summary This pull request updates the Recursive Proofs documentation page to include a comprehensive section on the `#[recursive]` attribute. It explains the attribute's purpose, how it should be used, and provides an example demonstrating its application within a circuit definition. ## Additional Context The introduction of the `#[recursive]` attribute simplifies the process of designating circuits for recursive proofs, eliminating the need for manual flagging in the tooling infrastructure. ## Documentation - [ ] No documentation needed. - [x] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: Savio <72797635+Savio-Sou@users.noreply.github.com> --- docs/docs/noir/standard_library/recursion.md | 20 +++++++++++++++++++ .../noir/standard_library/recursion.md | 20 +++++++++++++++++++ 2 files changed, 40 insertions(+) diff --git a/docs/docs/noir/standard_library/recursion.md b/docs/docs/noir/standard_library/recursion.md index f252150c8b5..9337499dac8 100644 --- a/docs/docs/noir/standard_library/recursion.md +++ b/docs/docs/noir/standard_library/recursion.md @@ -8,6 +8,26 @@ Noir supports recursively verifying proofs, meaning you verify the proof of a No Read [the explainer on recursion](../../explainers/explainer-recursion.md) to know more about this function and the [guide on how to use it.](../../how_to/how-to-recursion.md) +## The `#[recursive]` Attribute + +In Noir, the `#[recursive]` attribute is used to indicate that a circuit is designed for recursive proof generation. When applied, it informs the compiler and the tooling that the circuit should be compiled in a way that makes its proofs suitable for recursive verification. This attribute eliminates the need for manual flagging of recursion at the tooling level, streamlining the proof generation process for recursive circuits. + +### Example usage with `#[recursive]` + +```rust +#[recursive] +fn main(x: Field, y: pub Field) { + assert(x == y, "x and y are not equal"); +} + +// This marks the circuit as recursion-friendly and indicates that proofs generated from this circuit +// are intended for recursive verification. +``` + +By incorporating this attribute directly in the circuit's definition, tooling like Nargo and NoirJS can automatically execute recursive-specific duties for Noir programs (e.g. recursive-friendly proof artifact generation) without additional flags or configurations. + +## Verifying Recursive Proofs + ```rust #[foreign(verify_proof)] fn verify_proof(_verification_key : [Field], _proof : [Field], _public_input : Field, _key_hash : Field) {} diff --git a/docs/versioned_docs/version-v0.24.0/noir/standard_library/recursion.md b/docs/versioned_docs/version-v0.24.0/noir/standard_library/recursion.md index f252150c8b5..9337499dac8 100644 --- a/docs/versioned_docs/version-v0.24.0/noir/standard_library/recursion.md +++ b/docs/versioned_docs/version-v0.24.0/noir/standard_library/recursion.md @@ -8,6 +8,26 @@ Noir supports recursively verifying proofs, meaning you verify the proof of a No Read [the explainer on recursion](../../explainers/explainer-recursion.md) to know more about this function and the [guide on how to use it.](../../how_to/how-to-recursion.md) +## The `#[recursive]` Attribute + +In Noir, the `#[recursive]` attribute is used to indicate that a circuit is designed for recursive proof generation. When applied, it informs the compiler and the tooling that the circuit should be compiled in a way that makes its proofs suitable for recursive verification. This attribute eliminates the need for manual flagging of recursion at the tooling level, streamlining the proof generation process for recursive circuits. + +### Example usage with `#[recursive]` + +```rust +#[recursive] +fn main(x: Field, y: pub Field) { + assert(x == y, "x and y are not equal"); +} + +// This marks the circuit as recursion-friendly and indicates that proofs generated from this circuit +// are intended for recursive verification. +``` + +By incorporating this attribute directly in the circuit's definition, tooling like Nargo and NoirJS can automatically execute recursive-specific duties for Noir programs (e.g. recursive-friendly proof artifact generation) without additional flags or configurations. + +## Verifying Recursive Proofs + ```rust #[foreign(verify_proof)] fn verify_proof(_verification_key : [Field], _proof : [Field], _public_input : Field, _key_hash : Field) {} From 601fd9afc502236af1db0c4492698ba2298c7501 Mon Sep 17 00:00:00 2001 From: jfecher Date: Thu, 22 Feb 2024 15:31:36 +0000 Subject: [PATCH 28/45] fix!: Ban Fields in for loop indices and bitwise ops (#4376) # Description ## Problem\* Resolves https://github.com/noir-lang/noir/issues/3639 Resolves https://github.com/noir-lang/noir/issues/4193 ## Summary\* Uses the new TypeVariableKind::Integer in for loops and bitwise operations to prevent `Field` types from being used there. Removes the old `delayed_type_checks` hack. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: kevaundray Co-authored-by: TomAFrench --- .../src/ssa/function_builder/mod.rs | 5 + .../src/ssa/ir/instruction/call.rs | 12 +- compiler/noirc_evaluator/src/ssa/ir/types.rs | 5 + .../src/ssa/ssa_gen/context.rs | 10 +- .../noirc_evaluator/src/ssa/ssa_gen/mod.rs | 9 +- compiler/noirc_frontend/src/ast/expression.rs | 18 +-- .../src/hir/resolution/resolver.rs | 2 +- .../noirc_frontend/src/hir/type_check/expr.rs | 148 +++++++----------- .../noirc_frontend/src/hir/type_check/mod.rs | 31 +--- .../noirc_frontend/src/hir/type_check/stmt.rs | 13 +- compiler/noirc_frontend/src/hir_def/expr.rs | 13 -- compiler/noirc_frontend/src/hir_def/types.rs | 17 +- .../src/monomorphization/mod.rs | 7 +- compiler/noirc_frontend/src/tests.rs | 2 +- noir_stdlib/src/array.nr | 10 +- noir_stdlib/src/collections/bounded_vec.nr | 20 ++- noir_stdlib/src/collections/vec.nr | 8 +- noir_stdlib/src/field.nr | 2 +- noir_stdlib/src/hash/poseidon.nr | 8 +- noir_stdlib/src/hash/poseidon/bn254.nr | 16 +- noir_stdlib/src/slice.nr | 4 +- .../execution_success/array_len/src/main.nr | 6 +- .../brillig_cow_regression/src/main.nr | 14 +- .../brillig_oracle/Prover.toml | 2 +- .../brillig_oracle/src/main.nr | 4 +- .../brillig_slices/src/main.nr | 2 +- .../global_consts/src/baz.nr | 2 +- .../global_consts/src/foo.nr | 6 +- .../global_consts/src/foo/bar.nr | 4 +- .../global_consts/src/main.nr | 14 +- .../slice_dynamic_index/src/main.nr | 6 +- .../execution_success/slices/src/main.nr | 2 +- 32 files changed, 184 insertions(+), 238 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index fe71b876879..9d27554dcaa 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -115,6 +115,11 @@ impl FunctionBuilder { self.numeric_constant(value.into(), Type::field()) } + /// Insert a numeric constant into the current function of type Type::length_type() + pub(crate) fn length_constant(&mut self, value: impl Into) -> ValueId { + self.numeric_constant(value.into(), Type::length_type()) + } + /// Insert an array constant into the current function with the given element values. pub(crate) fn array_constant(&mut self, elements: im::Vector, typ: Type) -> ValueId { self.current_function.dfg.make_array(elements, typ) diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 4217a3d4710..9349d58c4d9 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -77,7 +77,7 @@ pub(super) fn simplify_call( Intrinsic::ArrayLen => { if let Some(length) = dfg.try_get_array_length(arguments[0]) { let length = FieldElement::from(length as u128); - SimplifyResult::SimplifiedTo(dfg.make_constant(length, Type::field())) + SimplifyResult::SimplifiedTo(dfg.make_constant(length, Type::length_type())) } else if matches!(dfg.type_of_value(arguments[1]), Type::Slice(_)) { SimplifyResult::SimplifiedTo(arguments[0]) } else { @@ -283,7 +283,7 @@ fn update_slice_length( operator: BinaryOp, block: BasicBlockId, ) -> ValueId { - let one = dfg.make_constant(FieldElement::one(), Type::field()); + let one = dfg.make_constant(FieldElement::one(), Type::length_type()); let instruction = Instruction::Binary(Binary { lhs: slice_len, operator, rhs: one }); let call_stack = dfg.get_value_call_stack(slice_len); dfg.insert_instruction_and_results(instruction, block, None, call_stack).first() @@ -296,8 +296,8 @@ fn simplify_slice_push_back( dfg: &mut DataFlowGraph, block: BasicBlockId, ) -> SimplifyResult { - // The capacity must be an integer so that we can compare it against the slice length which is represented as a field - let capacity = dfg.make_constant((slice.len() as u128).into(), Type::unsigned(64)); + // The capacity must be an integer so that we can compare it against the slice length + let capacity = dfg.make_constant((slice.len() as u128).into(), Type::length_type()); let len_equals_capacity_instr = Instruction::Binary(Binary { lhs: arguments[0], operator: BinaryOp::Eq, rhs: capacity }); let call_stack = dfg.get_value_call_stack(arguments[0]); @@ -362,7 +362,7 @@ fn simplify_slice_pop_back( let new_slice_length = update_slice_length(arguments[0], dfg, BinaryOp::Sub, block); - let element_size = dfg.make_constant((element_count as u128).into(), Type::field()); + let element_size = dfg.make_constant((element_count as u128).into(), Type::length_type()); let flattened_len_instr = Instruction::binary(BinaryOp::Mul, arguments[0], element_size); let mut flattened_len = dfg .insert_instruction_and_results(flattened_len_instr, block, None, CallStack::new()) @@ -478,7 +478,7 @@ fn make_constant_slice( let typ = Type::Slice(Rc::new(vec![typ])); let length = FieldElement::from(result_constants.len() as u128); - (dfg.make_constant(length, Type::field()), dfg.make_array(result_constants.into(), typ)) + (dfg.make_constant(length, Type::length_type()), dfg.make_array(result_constants.into(), typ)) } /// Returns a slice (represented by a tuple (len, slice)) of constants corresponding to the limbs of the radix decomposition. diff --git a/compiler/noirc_evaluator/src/ssa/ir/types.rs b/compiler/noirc_evaluator/src/ssa/ir/types.rs index 8dc9e67db79..ea3f5393245 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -90,6 +90,11 @@ impl Type { Type::Numeric(NumericType::NativeField) } + /// Creates the type of an array's length. + pub(crate) fn length_type() -> Type { + Type::unsigned(64) + } + /// Returns the bit size of the provided numeric type. /// /// # Panics diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index 845ffd15413..9c760c013a9 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -192,7 +192,7 @@ impl<'a> FunctionContext<'a> { ast::Type::Slice(elements) => { let element_types = Self::convert_type(elements).flatten(); Tree::Branch(vec![ - Tree::Leaf(f(Type::field())), + Tree::Leaf(f(Type::length_type())), Tree::Leaf(f(Type::Slice(Rc::new(element_types)))), ]) } @@ -640,13 +640,13 @@ impl<'a> FunctionContext<'a> { let result_alloc = self.builder.set_location(location).insert_allocate(Type::bool()); let true_value = self.builder.numeric_constant(1u128, Type::bool()); self.builder.insert_store(result_alloc, true_value); - let zero = self.builder.field_constant(0u128); + let zero = self.builder.length_constant(0u128); self.builder.terminate_with_jmp(loop_start, vec![zero]); // loop_start self.builder.switch_to_block(loop_start); - let i = self.builder.add_block_parameter(loop_start, Type::field()); - let array_length = self.builder.field_constant(array_length as u128); + let i = self.builder.add_block_parameter(loop_start, Type::length_type()); + let array_length = self.builder.length_constant(array_length as u128); let v0 = self.builder.insert_binary(i, BinaryOp::Lt, array_length); self.builder.terminate_with_jmpif(v0, loop_body, loop_end); @@ -658,7 +658,7 @@ impl<'a> FunctionContext<'a> { let v4 = self.builder.insert_load(result_alloc, Type::bool()); let v5 = self.builder.insert_binary(v4, BinaryOp::And, v3); self.builder.insert_store(result_alloc, v5); - let one = self.builder.field_constant(1u128); + let one = self.builder.length_constant(1u128); let v6 = self.builder.insert_binary(i, BinaryOp::Add, one); self.builder.terminate_with_jmp(loop_start, vec![v6]); diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index 8f2c923d62c..6f59fa13274 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -128,6 +128,7 @@ impl<'a> FunctionContext<'a> { } fn codegen_expression(&mut self, expr: &Expression) -> Result { + eprintln!("Codegen {expr}"); match expr { Expression::Ident(ident) => Ok(self.codegen_ident(ident)), Expression::Literal(literal) => self.codegen_literal(literal), @@ -196,7 +197,7 @@ impl<'a> FunctionContext<'a> { } ast::Type::Slice(_) => { let slice_length = - self.builder.field_constant(array.contents.len() as u128); + self.builder.length_constant(array.contents.len() as u128); let slice_contents = self.codegen_array_checked(elements, typ[1].clone())?; Tree::Branch(vec![slice_length.into(), slice_contents]) @@ -221,7 +222,7 @@ impl<'a> FunctionContext<'a> { // A caller needs multiple pieces of information to make use of a format string // The message string, the number of fields to be formatted, and the fields themselves let string = self.codegen_string(string); - let field_count = self.builder.field_constant(*number_of_fields as u128); + let field_count = self.builder.length_constant(*number_of_fields as u128); let fields = self.codegen_expression(fields)?; Ok(Tree::Branch(vec![string, field_count.into(), fields])) @@ -347,8 +348,10 @@ impl<'a> FunctionContext<'a> { } fn codegen_binary(&mut self, binary: &ast::Binary) -> Result { + eprintln!("Start binary"); let lhs = self.codegen_non_tuple_expression(&binary.lhs)?; let rhs = self.codegen_non_tuple_expression(&binary.rhs)?; + eprintln!("Insert binary"); Ok(self.insert_binary(lhs, binary.operator, rhs, binary.location)) } @@ -615,7 +618,7 @@ impl<'a> FunctionContext<'a> { { match intrinsic { Intrinsic::SliceInsert => { - let one = self.builder.field_constant(1u128); + let one = self.builder.length_constant(1u128); // We add one here in the case of a slice insert as a slice insert at the length of the slice // can be converted to a slice push back diff --git a/compiler/noirc_frontend/src/ast/expression.rs b/compiler/noirc_frontend/src/ast/expression.rs index c78deaf6dbb..2a252633a29 100644 --- a/compiler/noirc_frontend/src/ast/expression.rs +++ b/compiler/noirc_frontend/src/ast/expression.rs @@ -236,7 +236,15 @@ impl BinaryOpKind { } pub fn is_valid_for_field_type(self) -> bool { - matches!(self, BinaryOpKind::Equal | BinaryOpKind::NotEqual) + matches!( + self, + BinaryOpKind::Add + | BinaryOpKind::Subtract + | BinaryOpKind::Multiply + | BinaryOpKind::Divide + | BinaryOpKind::Equal + | BinaryOpKind::NotEqual + ) } pub fn as_string(self) -> &'static str { @@ -280,14 +288,6 @@ impl BinaryOpKind { BinaryOpKind::Modulo => Token::Percent, } } - - pub fn is_bit_shift(&self) -> bool { - matches!(self, BinaryOpKind::ShiftRight | BinaryOpKind::ShiftLeft) - } - - pub fn is_modulo(&self) -> bool { - matches!(self, BinaryOpKind::Modulo) - } } #[derive(PartialEq, PartialOrd, Eq, Ord, Hash, Debug, Copy, Clone)] diff --git a/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/compiler/noirc_frontend/src/hir/resolution/resolver.rs index f05a69be7c2..7f9e48353a7 100644 --- a/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -1464,7 +1464,7 @@ impl<'a> Resolver<'a> { // checker does not check definition kinds and otherwise expects // parameters to already be typed. if self.interner.definition_type(hir_ident.id) == Type::Error { - let typ = Type::polymorphic_integer(self.interner); + let typ = Type::polymorphic_integer_or_field(self.interner); self.interner.push_definition_type(hir_ident.id, typ); } } diff --git a/compiler/noirc_frontend/src/hir/type_check/expr.rs b/compiler/noirc_frontend/src/hir/type_check/expr.rs index a669a4a246e..b78f07c88f2 100644 --- a/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -104,7 +104,7 @@ impl<'interner> TypeChecker<'interner> { Type::Array(Box::new(length), Box::new(elem_type)) } HirLiteral::Bool(_) => Type::Bool, - HirLiteral::Integer(_, _) => Type::polymorphic_integer(self.interner), + HirLiteral::Integer(_, _) => Type::polymorphic_integer_or_field(self.interner), HirLiteral::Str(string) => { let len = Type::Constant(string.len() as u64); Type::String(Box::new(len)) @@ -529,13 +529,15 @@ impl<'interner> TypeChecker<'interner> { let index_type = self.check_expression(&index_expr.index); let span = self.interner.expr_span(&index_expr.index); - index_type.unify(&Type::polymorphic_integer(self.interner), &mut self.errors, || { - TypeCheckError::TypeMismatch { + index_type.unify( + &Type::polymorphic_integer_or_field(self.interner), + &mut self.errors, + || TypeCheckError::TypeMismatch { expected_typ: "an integer".to_owned(), expr_typ: index_type.to_string(), expr_span: span, - } - }); + }, + ); // When writing `a[i]`, if `a : &mut ...` then automatically dereference `a` as many // times as needed to get the underlying array. @@ -807,43 +809,13 @@ impl<'interner> TypeChecker<'interner> { // Matches on TypeVariable must be first to follow any type // bindings. - (TypeVariable(int, int_kind), other) | (other, TypeVariable(int, int_kind)) => { - if let TypeBinding::Bound(binding) = &*int.borrow() { + (TypeVariable(var, _), other) | (other, TypeVariable(var, _)) => { + if let TypeBinding::Bound(binding) = &*var.borrow() { return self.comparator_operand_type_rules(other, binding, op, span); } - if !op.kind.is_valid_for_field_type() && (other.is_bindable() || other.is_field()) { - let other = other.follow_bindings(); - - self.push_delayed_type_check(Box::new(move || { - if other.is_field() || other.is_bindable() { - Err(TypeCheckError::InvalidComparisonOnField { span }) - } else { - Ok(()) - } - })); - } - - let mut bindings = TypeBindings::new(); - if other - .try_bind_to_polymorphic_int( - int, - &mut bindings, - *int_kind == TypeVariableKind::Integer, - ) - .is_ok() - || other == &Type::Error - { - Type::apply_type_bindings(bindings); - Ok((Bool, false)) - } else { - Err(TypeCheckError::TypeMismatchWithSource { - expected: lhs_type.clone(), - actual: rhs_type.clone(), - span, - source: Source::Binary, - }) - } + self.bind_type_variables_for_infix(lhs_type, op, rhs_type, span); + Ok((Bool, false)) } (Alias(alias, args), other) | (other, Alias(alias, args)) => { let alias = alias.borrow().get_type(args); @@ -1071,6 +1043,38 @@ impl<'interner> TypeChecker<'interner> { } } + fn bind_type_variables_for_infix( + &mut self, + lhs_type: &Type, + op: &HirBinaryOp, + rhs_type: &Type, + span: Span, + ) { + self.unify(lhs_type, rhs_type, || TypeCheckError::TypeMismatchWithSource { + expected: lhs_type.clone(), + actual: rhs_type.clone(), + source: Source::Binary, + span, + }); + + // In addition to unifying both types, we also have to bind either + // the lhs or rhs to an integer type variable. This ensures if both lhs + // and rhs are type variables, that they will have the correct integer + // type variable kind instead of TypeVariableKind::Normal. + let target = if op.kind.is_valid_for_field_type() { + Type::polymorphic_integer_or_field(self.interner) + } else { + Type::polymorphic_integer(self.interner) + }; + + self.unify(lhs_type, &target, || TypeCheckError::TypeMismatchWithSource { + expected: lhs_type.clone(), + actual: rhs_type.clone(), + source: Source::Binary, + span, + }); + } + // Given a binary operator and another type. This method will produce the output type // and a boolean indicating whether to use the trait impl corresponding to the operator // or not. A value of false indicates the caller to use a primitive operation for this @@ -1093,58 +1097,15 @@ impl<'interner> TypeChecker<'interner> { // Matches on TypeVariable must be first so that we follow any type // bindings. - (TypeVariable(int, int_kind), other) | (other, TypeVariable(int, int_kind)) => { + (TypeVariable(int, _), other) | (other, TypeVariable(int, _)) => { if let TypeBinding::Bound(binding) = &*int.borrow() { return self.infix_operand_type_rules(binding, op, other, span); } - if (op.is_modulo() || op.is_bitwise()) && (other.is_bindable() || other.is_field()) - { - let other = other.follow_bindings(); - let kind = op.kind; - // This will be an error if these types later resolve to a Field, or stay - // polymorphic as the bit size will be unknown. Delay this error until the function - // finishes resolving so we can still allow cases like `let x: u8 = 1 << 2;`. - self.push_delayed_type_check(Box::new(move || { - if other.is_field() { - if kind == BinaryOpKind::Modulo { - Err(TypeCheckError::FieldModulo { span }) - } else { - Err(TypeCheckError::InvalidBitwiseOperationOnField { span }) - } - } else if other.is_bindable() { - Err(TypeCheckError::AmbiguousBitWidth { span }) - } else if kind.is_bit_shift() && other.is_signed() { - Err(TypeCheckError::TypeCannotBeUsed { - typ: other, - place: "bit shift", - span, - }) - } else { - Ok(()) - } - })); - } - let mut bindings = TypeBindings::new(); - if other - .try_bind_to_polymorphic_int( - int, - &mut bindings, - *int_kind == TypeVariableKind::Integer, - ) - .is_ok() - || other == &Type::Error - { - Type::apply_type_bindings(bindings); - Ok((other.clone(), false)) - } else { - Err(TypeCheckError::TypeMismatchWithSource { - expected: lhs_type.clone(), - actual: rhs_type.clone(), - source: Source::Binary, - span, - }) - } + self.bind_type_variables_for_infix(lhs_type, op, rhs_type, span); + + // Both types are unified so the choice of which to return is arbitrary + Ok((other.clone(), false)) } (Alias(alias, args), other) | (other, Alias(alias, args)) => { let alias = alias.borrow().get_type(args); @@ -1169,11 +1130,12 @@ impl<'interner> TypeChecker<'interner> { } // The result of two Fields is always a witness (FieldElement, FieldElement) => { - if op.is_bitwise() { - return Err(TypeCheckError::InvalidBitwiseOperationOnField { span }); - } - if op.is_modulo() { - return Err(TypeCheckError::FieldModulo { span }); + if !op.kind.is_valid_for_field_type() { + if op.kind == BinaryOpKind::Modulo { + return Err(TypeCheckError::FieldModulo { span }); + } else { + return Err(TypeCheckError::InvalidBitwiseOperationOnField { span }); + } } Ok((FieldElement, false)) } @@ -1213,7 +1175,7 @@ impl<'interner> TypeChecker<'interner> { self.errors .push(TypeCheckError::InvalidUnaryOp { kind: rhs_type.to_string(), span }); } - let expected = Type::polymorphic_integer(self.interner); + let expected = Type::polymorphic_integer_or_field(self.interner); rhs_type.unify(&expected, &mut self.errors, || TypeCheckError::InvalidUnaryOp { kind: rhs_type.to_string(), span, diff --git a/compiler/noirc_frontend/src/hir/type_check/mod.rs b/compiler/noirc_frontend/src/hir/type_check/mod.rs index 225f5756d7a..21d1c75a0f2 100644 --- a/compiler/noirc_frontend/src/hir/type_check/mod.rs +++ b/compiler/noirc_frontend/src/hir/type_check/mod.rs @@ -21,10 +21,7 @@ use crate::{ use self::errors::Source; -type TypeCheckFn = Box Result<(), TypeCheckError>>; - pub struct TypeChecker<'interner> { - delayed_type_checks: Vec, interner: &'interner mut NodeInterner, errors: Vec, current_function: Option, @@ -80,15 +77,7 @@ pub fn type_check_func(interner: &mut NodeInterner, func_id: FuncId) -> Vec (noirc_e impl<'interner> TypeChecker<'interner> { fn new(interner: &'interner mut NodeInterner) -> Self { - Self { - delayed_type_checks: Vec::new(), - interner, - errors: Vec::new(), - trait_constraints: Vec::new(), - current_function: None, - } - } - - pub fn push_delayed_type_check(&mut self, f: TypeCheckFn) { - self.delayed_type_checks.push(f); + Self { interner, errors: Vec::new(), trait_constraints: Vec::new(), current_function: None } } - fn check_function_body(&mut self, body: &ExprId) -> (Type, Vec) { - let body_type = self.check_expression(body); - (body_type, std::mem::take(&mut self.delayed_type_checks)) + fn check_function_body(&mut self, body: &ExprId) -> Type { + self.check_expression(body) } pub fn check_global( @@ -198,7 +176,6 @@ impl<'interner> TypeChecker<'interner> { interner: &'interner mut NodeInterner, ) -> Vec { let mut this = Self { - delayed_type_checks: Vec::new(), interner, errors: Vec::new(), trait_constraints: Vec::new(), diff --git a/compiler/noirc_frontend/src/hir/type_check/stmt.rs b/compiler/noirc_frontend/src/hir/type_check/stmt.rs index 370b4ee7b17..358bea86922 100644 --- a/compiler/noirc_frontend/src/hir/type_check/stmt.rs +++ b/compiler/noirc_frontend/src/hir/type_check/stmt.rs @@ -73,13 +73,10 @@ impl<'interner> TypeChecker<'interner> { let expected_type = Type::polymorphic_integer(self.interner); - self.unify(&start_range_type, &expected_type, || { - TypeCheckError::TypeCannotBeUsed { - typ: start_range_type.clone(), - place: "for loop", - span: range_span, - } - .add_context("The range of a loop must be known at compile-time") + self.unify(&start_range_type, &expected_type, || TypeCheckError::TypeCannotBeUsed { + typ: start_range_type.clone(), + place: "for loop", + span: range_span, }); self.interner.push_definition_type(for_loop.identifier.id, start_range_type); @@ -235,7 +232,7 @@ impl<'interner> TypeChecker<'interner> { let expr_span = self.interner.expr_span(index); index_type.unify( - &Type::polymorphic_integer(self.interner), + &Type::polymorphic_integer_or_field(self.interner), &mut self.errors, || TypeCheckError::TypeMismatch { expected_typ: "an integer".to_owned(), diff --git a/compiler/noirc_frontend/src/hir_def/expr.rs b/compiler/noirc_frontend/src/hir_def/expr.rs index 75ed68af0f6..b4c590de491 100644 --- a/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/compiler/noirc_frontend/src/hir_def/expr.rs @@ -94,19 +94,6 @@ impl HirBinaryOp { let location = Location::new(op.span(), file); HirBinaryOp { location, kind } } - - pub fn is_bitwise(&self) -> bool { - use BinaryOpKind::*; - matches!(self.kind, And | Or | Xor | ShiftRight | ShiftLeft) - } - - pub fn is_bit_shift(&self) -> bool { - self.kind.is_bit_shift() - } - - pub fn is_modulo(&self) -> bool { - self.kind.is_modulo() - } } #[derive(Debug, Clone)] diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index d4d8a948460..e105da1ccf0 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -571,13 +571,20 @@ impl Type { Type::TypeVariable(var, kind) } - pub fn polymorphic_integer(interner: &mut NodeInterner) -> Type { + pub fn polymorphic_integer_or_field(interner: &mut NodeInterner) -> Type { let id = interner.next_type_variable_id(); let kind = TypeVariableKind::IntegerOrField; let var = TypeVariable::unbound(id); Type::TypeVariable(var, kind) } + pub fn polymorphic_integer(interner: &mut NodeInterner) -> Type { + let id = interner.next_type_variable_id(); + let kind = TypeVariableKind::Integer; + let var = TypeVariable::unbound(id); + Type::TypeVariable(var, kind) + } + /// A bit of an awkward name for this function - this function returns /// true for type variables or polymorphic integers which are unbound. /// NamedGenerics will always be false as although they are bindable, @@ -964,7 +971,7 @@ impl Type { /// Try to bind a PolymorphicInt variable to self, succeeding if self is an integer, field, /// other PolymorphicInt type, or type variable. If successful, the binding is placed in the /// given TypeBindings map rather than linked immediately. - pub fn try_bind_to_polymorphic_int( + fn try_bind_to_polymorphic_int( &self, var: &TypeVariable, bindings: &mut TypeBindings, @@ -977,7 +984,11 @@ impl Type { let this = self.substitute(bindings).follow_bindings(); match &this { - Type::FieldElement | Type::Integer(..) => { + Type::Integer(..) => { + bindings.insert(target_id, (var.clone(), this)); + Ok(()) + } + Type::FieldElement if !only_integer => { bindings.insert(target_id, (var.clone(), this)); Ok(()) } diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index 0f243e47bbe..2e714da21c6 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -27,8 +27,8 @@ use crate::{ }, node_interner::{self, DefinitionKind, NodeInterner, StmtId, TraitImplKind, TraitMethodId}, token::FunctionAttribute, - ContractFunctionType, FunctionKind, IntegerBitSize, Type, TypeBinding, TypeBindings, - TypeVariable, TypeVariableKind, UnaryOp, Visibility, + ContractFunctionType, FunctionKind, IntegerBitSize, Signedness, Type, TypeBinding, + TypeBindings, TypeVariable, TypeVariableKind, UnaryOp, Visibility, }; use self::ast::{Definition, FuncId, Function, LocalId, Program}; @@ -1107,7 +1107,8 @@ impl<'interner> Monomorphizer<'interner> { return match opcode.as_str() { "modulus_num_bits" => { let bits = (FieldElement::max_num_bits() as u128).into(); - let typ = ast::Type::Field; + let typ = + ast::Type::Integer(Signedness::Unsigned, IntegerBitSize::SixtyFour); Some(ast::Expression::Literal(ast::Literal::Integer(bits, typ, location))) } "zeroed" => { diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index 8a56b337398..c18379f1c26 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -956,7 +956,7 @@ mod test { #[test] fn resolve_for_expr() { let src = r#" - fn main(x : Field) { + fn main(x : u64) { for i in 1..20 { let _z = x + i; }; diff --git a/noir_stdlib/src/array.nr b/noir_stdlib/src/array.nr index 995af6c4c6f..7871b1a6f9a 100644 --- a/noir_stdlib/src/array.nr +++ b/noir_stdlib/src/array.nr @@ -4,10 +4,10 @@ use crate::cmp::{Ord}; // by the methods in the `slice` module impl [T; N] { #[builtin(array_len)] - pub fn len(self) -> Field {} + pub fn len(self) -> u64 {} pub fn sort(self) -> Self where T: Ord { - self.sort_via(|a, b| a <= b) + self.sort_via(|a: T, b: T| a <= b) } pub fn sort_via(self, ordering: fn[Env](T, T) -> bool) -> Self { @@ -31,7 +31,7 @@ impl [T; N] { } /// Returns the index of the elements in the array that would sort it, using the provided custom sorting function. - unconstrained fn get_sorting_index(self, ordering: fn[Env](T, T) -> bool) -> [Field; N] { + unconstrained fn get_sorting_index(self, ordering: fn[Env](T, T) -> bool) -> [u64; N] { let mut result = [0;N]; let mut a = self; for i in 0..N { @@ -117,7 +117,7 @@ impl [T; N] { // helper function used to look up the position of a value in an array of Field // Note that function returns 0 if the value is not found -unconstrained fn find_index(a: [Field;N], find: Field) -> Field { +unconstrained fn find_index(a: [u64; N], find: u64) -> u64 { let mut result = 0; for i in 0..a.len() { if a[i] == find { @@ -125,4 +125,4 @@ unconstrained fn find_index(a: [Field;N], find: Field) -> Field { } } result -} \ No newline at end of file +} diff --git a/noir_stdlib/src/collections/bounded_vec.nr b/noir_stdlib/src/collections/bounded_vec.nr index 332fefa63f9..a4aa4823f38 100644 --- a/noir_stdlib/src/collections/bounded_vec.nr +++ b/noir_stdlib/src/collections/bounded_vec.nr @@ -1,8 +1,6 @@ struct BoundedVec { storage: [T; MaxLen], - // TODO: change this to return a u64 as Noir now - // uses u64 for indexing - len: Field, + len: u64, empty_value: T, } @@ -11,27 +9,27 @@ impl BoundedVec { BoundedVec { storage: [initial_value; MaxLen], len: 0, empty_value: initial_value } } - pub fn get(mut self: Self, index: Field) -> T { - assert(index as u64 < self.len as u64); + pub fn get(mut self: Self, index: u64) -> T { + assert(index as u64 < self.len); self.storage[index] } - pub fn get_unchecked(mut self: Self, index: Field) -> T { + pub fn get_unchecked(mut self: Self, index: u64) -> T { self.storage[index] } pub fn push(&mut self, elem: T) { - assert(self.len as u64 < MaxLen as u64, "push out of bounds"); + assert(self.len < MaxLen as u64, "push out of bounds"); self.storage[self.len] = elem; self.len += 1; } - pub fn len(self) -> Field { + pub fn len(self) -> u64 { self.len } - pub fn max_len(_self: BoundedVec) -> Field { + pub fn max_len(_self: BoundedVec) -> u64{ MaxLen } @@ -59,7 +57,7 @@ impl BoundedVec { for i in 0..Len { exceeded_len |= i == append_len; if !exceeded_len { - self.storage[self.len + (i as Field)] = vec.get_unchecked(i as Field); + self.storage[self.len + i] = vec.get_unchecked(i); } } self.len = new_len; @@ -85,4 +83,4 @@ impl BoundedVec { } ret } -} \ No newline at end of file +} diff --git a/noir_stdlib/src/collections/vec.nr b/noir_stdlib/src/collections/vec.nr index 43d68e1d1e7..2e7945be827 100644 --- a/noir_stdlib/src/collections/vec.nr +++ b/noir_stdlib/src/collections/vec.nr @@ -17,7 +17,7 @@ impl Vec { /// Get an element from the vector at the given index. /// Panics if the given index /// points beyond the end of the vector. - pub fn get(self, index: Field) -> T { + pub fn get(self, index: u64) -> T { self.slice[index] } @@ -40,20 +40,20 @@ impl Vec { /// Insert an element at a specified index, shifting all elements /// after it to the right - pub fn insert(&mut self, index: Field, elem: T) { + pub fn insert(&mut self, index: u64, elem: T) { self.slice = self.slice.insert(index, elem); } /// Remove an element at a specified index, shifting all elements /// after it to the left, returning the removed element - pub fn remove(&mut self, index: Field) -> T { + pub fn remove(&mut self, index: u64) -> T { let (new_slice, elem) = self.slice.remove(index); self.slice = new_slice; elem } /// Returns the number of elements in the vector - pub fn len(self) -> Field { + pub fn len(self) -> u64 { self.slice.len() } } diff --git a/noir_stdlib/src/field.nr b/noir_stdlib/src/field.nr index 66fb50119f9..a7278d85999 100644 --- a/noir_stdlib/src/field.nr +++ b/noir_stdlib/src/field.nr @@ -89,7 +89,7 @@ impl Field { } #[builtin(modulus_num_bits)] -pub fn modulus_num_bits() -> Field {} +pub fn modulus_num_bits() -> u64 {} #[builtin(modulus_be_bits)] pub fn modulus_be_bits() -> [u1] {} diff --git a/noir_stdlib/src/hash/poseidon.nr b/noir_stdlib/src/hash/poseidon.nr index 3f4de73c0db..b1a7c4a2367 100644 --- a/noir_stdlib/src/hash/poseidon.nr +++ b/noir_stdlib/src/hash/poseidon.nr @@ -21,7 +21,7 @@ pub fn config( // Input checks let mul = crate::wrapping_mul(t as u8, (rf + rp)); assert(mul == ark.len() as u8); - assert(t * t == mds.len()); + assert(t * t == mds.len() as Field); assert(alpha != 0); PoseidonConfig { t, rf, rp, alpha, ark, mds } @@ -30,7 +30,7 @@ pub fn config( fn permute(pos_conf: PoseidonConfig, mut state: [Field; O]) -> [Field; O] { let PoseidonConfig {t, rf, rp, alpha, ark, mds} = pos_conf; - assert(t == state.len()); + assert(t == state.len() as Field); let mut count = 0; // for r in 0..rf + rp @@ -47,7 +47,7 @@ fn permute(pos_conf: PoseidonConfig, mut state: [Field; O]) -> [F } state = apply_matrix(mds, state); // Apply MDS matrix - count = count + t; + count = count + t as u64; } state @@ -85,7 +85,7 @@ fn absorb( fn check_security(rate: Field, width: Field, security: Field) -> bool { let n = modulus_num_bits(); - ((n - 1) * (width - rate) / 2) as u8 > security as u8 + ((n - 1) as Field * (width - rate) / 2) as u8 > security as u8 } // A*x where A is an n x n matrix in row-major order and x an n-vector fn apply_matrix(a: [Field; M], x: [Field; N]) -> [Field; N] { diff --git a/noir_stdlib/src/hash/poseidon/bn254.nr b/noir_stdlib/src/hash/poseidon/bn254.nr index 0db6d9546dc..37b08e3c8fb 100644 --- a/noir_stdlib/src/hash/poseidon/bn254.nr +++ b/noir_stdlib/src/hash/poseidon/bn254.nr @@ -9,12 +9,12 @@ use crate::hash::poseidon::apply_matrix; #[field(bn254)] pub fn permute(pos_conf: PoseidonConfig, mut state: [Field; O]) -> [Field; O] { let PoseidonConfig {t, rf: config_rf, rp: config_rp, alpha, ark, mds} = pos_conf; - let rf = 8; - let rp = [56, 57, 56, 60, 60, 63, 64, 63, 60, 66, 60, 65, 70, 60, 64, 68][state.len() - 2]; + let rf: u8 = 8; + let rp: u8 = [56, 57, 56, 60, 60, 63, 64, 63, 60, 66, 60, 65, 70, 60, 64, 68][state.len() - 2]; - assert(t == state.len()); - assert(rf == config_rf as Field); - assert(rp == config_rp as Field); + assert(t == state.len() as Field); + assert(rf == config_rf); + assert(rp == config_rp); let mut count = 0; // First half of full rounds @@ -27,7 +27,7 @@ pub fn permute(pos_conf: PoseidonConfig, mut state: [Field; O]) - } state = apply_matrix(mds, state); // Apply MDS matrix - count = count + t; + count = count + t as u64; } // Partial rounds for _r in 0..rp { @@ -37,7 +37,7 @@ pub fn permute(pos_conf: PoseidonConfig, mut state: [Field; O]) - state[0] = state[0].pow_32(alpha); state = apply_matrix(mds, state); // Apply MDS matrix - count = count + t; + count = count + t as u64; } // Second half of full rounds for _r in 0..rf / 2 { @@ -49,7 +49,7 @@ pub fn permute(pos_conf: PoseidonConfig, mut state: [Field; O]) - } state = apply_matrix(mds, state); // Apply MDS matrix - count = count + t; + count = count + t as u64; } state diff --git a/noir_stdlib/src/slice.nr b/noir_stdlib/src/slice.nr index aa4b73edc1a..bb5c43e497b 100644 --- a/noir_stdlib/src/slice.nr +++ b/noir_stdlib/src/slice.nr @@ -24,13 +24,13 @@ impl [T] { /// Insert an element at a specified index, shifting all elements /// after it to the right #[builtin(slice_insert)] - pub fn insert(self, index: Field, elem: T) -> Self { } + pub fn insert(self, index: u64, elem: T) -> Self { } /// Remove an element at a specified index, shifting all elements /// after it to the left, returning the altered slice and /// the removed element #[builtin(slice_remove)] - pub fn remove(self, index: Field) -> (Self, T) { } + pub fn remove(self, index: u64) -> (Self, T) { } // Append each element of the `other` slice to the end of `self`. // This returns a new slice and leaves both input slices unchanged. diff --git a/test_programs/execution_success/array_len/src/main.nr b/test_programs/execution_success/array_len/src/main.nr index b60762f4636..f846cfb9844 100644 --- a/test_programs/execution_success/array_len/src/main.nr +++ b/test_programs/execution_success/array_len/src/main.nr @@ -1,12 +1,12 @@ -fn len_plus_1(array: [T; N]) -> Field { +fn len_plus_1(array: [T; N]) -> u64 { array.len() + 1 } -fn add_lens(a: [T; N], b: [Field; M]) -> Field { +fn add_lens(a: [T; N], b: [Field; M]) -> u64 { a.len() + b.len() } -fn nested_call(b: [Field; N]) -> Field { +fn nested_call(b: [Field; N]) -> u64 { len_plus_1(b) } diff --git a/test_programs/execution_success/brillig_cow_regression/src/main.nr b/test_programs/execution_success/brillig_cow_regression/src/main.nr index 974c17dfbc9..74aeda18261 100644 --- a/test_programs/execution_success/brillig_cow_regression/src/main.nr +++ b/test_programs/execution_success/brillig_cow_regression/src/main.nr @@ -1,12 +1,12 @@ // Tests a performance regression found in aztec-packages with brillig cow optimization -global MAX_NEW_COMMITMENTS_PER_TX: Field = 64; -global MAX_NEW_NULLIFIERS_PER_TX: Field = 64; -global MAX_NEW_L2_TO_L1_MSGS_PER_TX: Field = 2; -global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX: Field = 16; -global MAX_NEW_CONTRACTS_PER_TX: Field = 1; -global NUM_ENCRYPTED_LOGS_HASHES_PER_TX: Field = 1; -global NUM_UNENCRYPTED_LOGS_HASHES_PER_TX: Field = 1; +global MAX_NEW_COMMITMENTS_PER_TX = 64; +global MAX_NEW_NULLIFIERS_PER_TX = 64; +global MAX_NEW_L2_TO_L1_MSGS_PER_TX = 2; +global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX = 16; +global MAX_NEW_CONTRACTS_PER_TX = 1; +global NUM_ENCRYPTED_LOGS_HASHES_PER_TX = 1; +global NUM_UNENCRYPTED_LOGS_HASHES_PER_TX = 1; global NUM_FIELDS_PER_SHA256 = 2; global CALLDATA_HASH_INPUT_SIZE = 169; global CALL_DATA_HASH_LOG_FIELDS = 4; diff --git a/test_programs/execution_success/brillig_oracle/Prover.toml b/test_programs/execution_success/brillig_oracle/Prover.toml index 2b26a4ce471..161f4fb62c0 100644 --- a/test_programs/execution_success/brillig_oracle/Prover.toml +++ b/test_programs/execution_success/brillig_oracle/Prover.toml @@ -1,2 +1,2 @@ -x = "10" +_x = "10" diff --git a/test_programs/execution_success/brillig_oracle/src/main.nr b/test_programs/execution_success/brillig_oracle/src/main.nr index 490b7b605e3..6a9e5806621 100644 --- a/test_programs/execution_success/brillig_oracle/src/main.nr +++ b/test_programs/execution_success/brillig_oracle/src/main.nr @@ -2,7 +2,7 @@ use dep::std::slice; use dep::std::test::OracleMock; // Tests oracle usage in brillig/unconstrained functions -fn main(x: Field) { +fn main(_x: Field) { let size = 20; // TODO: Add a method along the lines of `(0..size).to_array()`. let mut mock_oracle_response = [0; 20]; @@ -17,7 +17,7 @@ fn main(x: Field) { let _ = OracleMock::mock("get_number_sequence").with_params(size).returns((20, mock_oracle_response)); let _ = OracleMock::mock("get_reverse_number_sequence").with_params(size).returns((20, reversed_mock_oracle_response)); - get_number_sequence_wrapper(size); + get_number_sequence_wrapper(size as Field); } // Define oracle functions which we have mocked above diff --git a/test_programs/execution_success/brillig_slices/src/main.nr b/test_programs/execution_success/brillig_slices/src/main.nr index 48bc8a76bb8..847c41de25c 100644 --- a/test_programs/execution_success/brillig_slices/src/main.nr +++ b/test_programs/execution_success/brillig_slices/src/main.nr @@ -131,7 +131,7 @@ unconstrained fn merge_slices_mutate_in_loop(x: Field, y: Field) -> [Field] { let mut slice = [0; 2]; if x != y { for i in 0..5 { - slice = slice.push_back(i); + slice = slice.push_back(i as Field); } } else { slice = slice.push_back(x); diff --git a/test_programs/execution_success/global_consts/src/baz.nr b/test_programs/execution_success/global_consts/src/baz.nr index 4271de81118..384cf9d3569 100644 --- a/test_programs/execution_success/global_consts/src/baz.nr +++ b/test_programs/execution_success/global_consts/src/baz.nr @@ -1,5 +1,5 @@ pub fn from_baz(x: [Field; crate::foo::MAGIC_NUMBER]) { for i in 0..crate::foo::MAGIC_NUMBER { - assert(x[i] == crate::foo::MAGIC_NUMBER); + assert(x[i] == crate::foo::MAGIC_NUMBER as Field); } } diff --git a/test_programs/execution_success/global_consts/src/foo.nr b/test_programs/execution_success/global_consts/src/foo.nr index 7b0ae75b74b..413b9c3a74b 100644 --- a/test_programs/execution_success/global_consts/src/foo.nr +++ b/test_programs/execution_success/global_consts/src/foo.nr @@ -1,11 +1,11 @@ mod bar; -global N: Field = 5; -global MAGIC_NUMBER: Field = 3; +global N: u64 = 5; +global MAGIC_NUMBER: u64 = 3; global TYPE_INFERRED = 42; pub fn from_foo(x: [Field; bar::N]) { for i in 0..bar::N { - assert(x[i] == bar::N); + assert(x[i] == bar::N as Field); } } diff --git a/test_programs/execution_success/global_consts/src/foo/bar.nr b/test_programs/execution_success/global_consts/src/foo/bar.nr index b8d0b85b0f3..5404c9cf1e3 100644 --- a/test_programs/execution_success/global_consts/src/foo/bar.nr +++ b/test_programs/execution_success/global_consts/src/foo/bar.nr @@ -1,5 +1,5 @@ -global N: Field = 5; +global N: u64 = 5; pub fn from_bar(x: Field) -> Field { - x * N + x * N as Field } diff --git a/test_programs/execution_success/global_consts/src/main.nr b/test_programs/execution_success/global_consts/src/main.nr index 25cc0e4dd36..3c8ecc67a0c 100644 --- a/test_programs/execution_success/global_consts/src/main.nr +++ b/test_programs/execution_success/global_consts/src/main.nr @@ -3,7 +3,7 @@ mod baz; global M: Field = 32; global L: Field = 10; // Unused globals currently allowed -global N: Field = 5; +global N: u64 = 5; global T_LEN = 2; // Type inference is allowed on globals // Globals can reference other globals @@ -36,12 +36,12 @@ fn main( let test_struct = Dummy { x: d, y: c }; for i in 0..foo::MAGIC_NUMBER { - assert(c[i] == foo::MAGIC_NUMBER); - assert(test_struct.y[i] == foo::MAGIC_NUMBER); + assert(c[i] == foo::MAGIC_NUMBER as Field); + assert(test_struct.y[i] == foo::MAGIC_NUMBER as Field); assert(test_struct.y[i] != NESTED[1][0].v); } - assert(N != M); + assert(N as Field != M); let expected: u32 = 42; assert(foo::TYPE_INFERRED == expected); @@ -62,12 +62,12 @@ fn main( arrays_neq(a, b); - let t: [Field; T_LEN] = [N, M]; + let t: [Field; T_LEN] = [N as Field, M]; assert(t[1] == 32); assert(15 == my_submodule::my_helper()); - let add_submodules_N = my_submodule::N + foo::bar::N; + let add_submodules_N = my_submodule::N + foo::bar::N as Field; assert(15 == add_submodules_N); let add_from_bar_N = my_submodule::N + foo::bar::from_bar(1); assert(15 == add_from_bar_N); @@ -75,7 +75,7 @@ fn main( let sugared = [0; my_submodule::N + 2]; assert(sugared[my_submodule::N + 1] == 0); - let arr: [Field; my_submodule::N] = [N; 10]; + let arr: [Field; my_submodule::N] = [N as Field; 10]; assert((arr[0] == 5) & (arr[9] == 5)); foo::from_foo(d); diff --git a/test_programs/execution_success/slice_dynamic_index/src/main.nr b/test_programs/execution_success/slice_dynamic_index/src/main.nr index 374d2ba4c26..41fc9a645c1 100644 --- a/test_programs/execution_success/slice_dynamic_index/src/main.nr +++ b/test_programs/execution_success/slice_dynamic_index/src/main.nr @@ -6,7 +6,7 @@ fn main(x: Field) { fn regression_dynamic_slice_index(x: Field, y: Field) { let mut slice = []; for i in 0..5 { - slice = slice.push_back(i); + slice = slice.push_back(i as Field); } assert(slice.len() == 5); @@ -124,12 +124,12 @@ fn dynamic_slice_merge_if(mut slice: [Field], x: Field) { assert(first_elem == 12); assert(rest_of_slice.len() == 6); - slice = rest_of_slice.insert(x - 2, 20); + slice = rest_of_slice.insert(x as u64 - 2, 20); assert(slice[2] == 20); assert(slice[6] == 30); assert(slice.len() == 7); - let (removed_slice, removed_elem) = slice.remove(x - 1); + let (removed_slice, removed_elem) = slice.remove(x as u64 - 1); // The deconstructed tuple assigns to the slice but is not seen outside of the if statement // without a direct assignment slice = removed_slice; diff --git a/test_programs/execution_success/slices/src/main.nr b/test_programs/execution_success/slices/src/main.nr index c377d2e5b2f..eca42a660c4 100644 --- a/test_programs/execution_success/slices/src/main.nr +++ b/test_programs/execution_success/slices/src/main.nr @@ -167,7 +167,7 @@ fn merge_slices_mutate_in_loop(x: Field, y: Field) -> [Field] { let mut slice = [0; 2]; if x != y { for i in 0..5 { - slice = slice.push_back(i); + slice = slice.push_back(i as Field); } } else { slice = slice.push_back(x); From 650ffc5053cdca4b6ad2e027fa1f4fd90ef64871 Mon Sep 17 00:00:00 2001 From: Nikita Masych <92444221+NikitaMasych@users.noreply.github.com> Date: Fri, 23 Feb 2024 16:55:17 +0200 Subject: [PATCH 29/45] feat: Add HashMap to the stdlib (#4242) # Description This PR shall bring HashMap into the `stdlib` of Noir. ## Problem\* Resolves #4241 ## Summary\* Implementation of `HashMap` with open addressing and quadratic probing scheme. Since Noir requires knowing loop bounds (and recursive calls) at compile time, `HashMap` is of fixed capacity and **no** dynamic resize is accomplished with regard to load factor. Furthermore, contribution includes implementation of `PedersenHasher` to be used for now. One can examine potentially better and less heavy prehash functions. I tried to conform with best practices of engineering, however since Noir is in rapid development, there are certain things which may be optimized in future, both from the code style and performance point of view. ## Additional Context I put the `PedersenHasher` among the `poseidon.nr` and `mimc.nr`, so one can consider moving declaration of other pedersen-related functionality there, however that would be a breaking change. ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [x] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- noir_stdlib/src/collections.nr | 1 + noir_stdlib/src/collections/map.nr | 456 ++++++++++++++++++ noir_stdlib/src/hash.nr | 52 ++ noir_stdlib/src/hash/pedersen.nr | 24 + .../hashmap_load_factor/Nargo.toml | 6 + .../hashmap_load_factor/Prover.toml | 26 + .../hashmap_load_factor/src/main.nr | 35 ++ .../execution_success/hashmap/Nargo.toml | 6 + .../execution_success/hashmap/Prover.toml | 26 + .../execution_success/hashmap/src/main.nr | 192 ++++++++ .../execution_success/hashmap/src/utils.nr | 10 + 11 files changed, 834 insertions(+) create mode 100644 noir_stdlib/src/collections/map.nr create mode 100644 noir_stdlib/src/hash/pedersen.nr create mode 100644 test_programs/compile_failure/hashmap_load_factor/Nargo.toml create mode 100644 test_programs/compile_failure/hashmap_load_factor/Prover.toml create mode 100644 test_programs/compile_failure/hashmap_load_factor/src/main.nr create mode 100644 test_programs/execution_success/hashmap/Nargo.toml create mode 100644 test_programs/execution_success/hashmap/Prover.toml create mode 100644 test_programs/execution_success/hashmap/src/main.nr create mode 100644 test_programs/execution_success/hashmap/src/utils.nr diff --git a/noir_stdlib/src/collections.nr b/noir_stdlib/src/collections.nr index 177ca96816f..2d952f4d6cd 100644 --- a/noir_stdlib/src/collections.nr +++ b/noir_stdlib/src/collections.nr @@ -1,2 +1,3 @@ mod vec; mod bounded_vec; +mod map; diff --git a/noir_stdlib/src/collections/map.nr b/noir_stdlib/src/collections/map.nr new file mode 100644 index 00000000000..d9eb83ff5dc --- /dev/null +++ b/noir_stdlib/src/collections/map.nr @@ -0,0 +1,456 @@ +use crate::cmp::Eq; +use crate::collections::vec::Vec; +use crate::option::Option; +use crate::default::Default; +use crate::hash::{Hash, Hasher, BuildHasher}; + +// We use load factor α_max = 0.75. +// Upon exceeding it, assert will fail in order to inform the user +// about performance degradation, so that he can adjust the capacity. +global MAX_LOAD_FACTOR_NUMERATOR = 3; +global MAX_LOAD_FACTOR_DEN0MINATOR = 4; + +// Hash table with open addressing and quadratic probing. +// Size of the underlying table must be known at compile time. +// It is advised to select capacity N as a power of two, or a prime number +// because utilized probing scheme is best tailored for it. +struct HashMap { + _table: [Slot; N], + + // Amount of valid elements in the map. + _len: u64, + + _build_hasher: B +} + +// Data unit in the HashMap table. +// In case Noir adds support for enums in the future, this +// should be refactored to have three states: +// 1. (key, value) +// 2. (empty) +// 3. (deleted) +struct Slot { + _key_value: Option<(K, V)>, + _is_deleted: bool, +} + +impl Default for Slot{ + fn default() -> Self{ + Slot{ + _key_value: Option::none(), + _is_deleted: false + } + } +} + +impl Slot { + fn is_valid(self) -> bool { + !self._is_deleted & self._key_value.is_some() + } + + fn is_available(self) -> bool { + self._is_deleted | self._key_value.is_none() + } + + fn key_value(self) -> Option<(K, V)> { + self._key_value + } + + fn key_value_unchecked(self) -> (K, V) { + self._key_value.unwrap_unchecked() + } + + fn set(&mut self, key: K, value: V) { + self._key_value = Option::some((key, value)); + self._is_deleted = false; + } + + // Shall not override `_key_value` with Option::none(), + // because we must be able to differentiate empty + // and deleted slots for lookup. + fn mark_deleted(&mut self) { + self._is_deleted = true; + } +} + +// While conducting lookup, we iterate attempt from 0 to N - 1 due to heuristic, +// that if we have went that far without finding desired, +// it is very unlikely to be after - performance will be heavily degraded. +impl HashMap { + // Creates a new instance of HashMap with specified BuildHasher. + pub fn with_hasher(_build_hasher: B) -> Self + where + B: BuildHasher { + let _table = [Slot::default(); N]; + let _len = 0; + Self { _table, _len, _build_hasher } + } + + // Clears the map, removing all key-value entries. + pub fn clear(&mut self) { + self._table = [Slot::default(); N]; + self._len = 0; + } + + // Returns true if the map contains a value for the specified key. + pub fn contains_key( + self, + key: K + ) -> bool + where + K: Hash + Eq, + B: BuildHasher, + H: Hasher { + self.get(key).is_some() + } + + // Returns true if the map contains no elements. + pub fn is_empty(self) -> bool { + self._len == 0 + } + + // Get the Option<(K, V) array of valid entries + // with a length of map capacity. First len() elements + // are safe to unwrap_unchecked(), whilst remaining + // are guaranteed to be Option::none(). + // + // This design is reasoned by compile-time limitations and + // temporary nested slices ban. + pub fn entries(self) -> [Option<(K, V)>; N] { + let mut entries = [Option::none(); N]; + let mut valid_amount = 0; + + for slot in self._table { + if slot.is_valid() { + entries[valid_amount] = slot.key_value(); + valid_amount += 1; + } + } + + let msg = f"Amount of valid elements should have been {self._len} times, but got {valid_amount}."; + assert(valid_amount == self._len, msg); + + entries + } + + // Get the Option array of valid keys + // with a length of map capacity. First len() elements + // are safe to unwrap_unchecked(), whilst remaining + // are guaranteed to be Option::none(). + // + // This design is reasoned by compile-time limitations and + // temporary nested slices ban. + pub fn keys(self) -> [Option; N] { + let mut keys = [Option::none(); N]; + let mut valid_amount = 0; + + for slot in self._table { + if slot.is_valid() { + let (key, _) = slot.key_value_unchecked(); + keys[valid_amount] = Option::some(key); + valid_amount += 1; + } + } + + let msg = f"Amount of valid elements should have been {self._len} times, but got {valid_amount}."; + assert(valid_amount == self._len, msg); + + keys + } + + // Get the Option array of valid values + // with a length of map capacity. First len() elements + // are safe to unwrap_unchecked(), whilst remaining + // are guaranteed to be Option::none(). + // + // This design is reasoned by compile-time limitations and + // temporary nested slices ban. + pub fn values(self) -> [Option; N] { + let mut values = [Option::none(); N]; + let mut valid_amount = 0; + + for slot in self._table { + if slot.is_valid() { + let (_, value) = slot.key_value_unchecked(); + values[valid_amount] = Option::some(value); + valid_amount += 1; + } + } + + let msg = f"Amount of valid elements should have been {self._len} times, but got {valid_amount}."; + assert(valid_amount == self._len, msg); + + values + } + + // For each key-value entry applies mutator function. + pub fn iter_mut( + &mut self, + f: fn(K, V) -> (K, V) + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + let mut entries = self.entries(); + let mut new_map = HashMap::with_hasher(self._build_hasher); + + for i in 0..N { + if i < self._len { + let entry = entries[i].unwrap_unchecked(); + let (key, value) = f(entry.0, entry.1); + new_map.insert(key, value); + } + } + + self._table = new_map._table; + } + + // For each key applies mutator function. + pub fn iter_keys_mut( + &mut self, + f: fn(K) -> K + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + let mut entries = self.entries(); + let mut new_map = HashMap::with_hasher(self._build_hasher); + + for i in 0..N { + if i < self._len { + let entry = entries[i].unwrap_unchecked(); + let (key, value) = (f(entry.0), entry.1); + new_map.insert(key, value); + } + } + + self._table = new_map._table; + } + + // For each value applies mutator function. + pub fn iter_values_mut(&mut self, f: fn(V) -> V) { + for i in 0..N { + let mut slot = self._table[i]; + if slot.is_valid() { + let (key, value) = slot.key_value_unchecked(); + slot.set(key, f(value)); + self._table[i] = slot; + } + } + } + + // Retains only the elements specified by the predicate. + pub fn retain(&mut self, f: fn(K, V) -> bool) { + for index in 0..N { + let mut slot = self._table[index]; + if slot.is_valid() { + let (key, value) = slot.key_value_unchecked(); + if !f(key, value) { + slot.mark_deleted(); + self._len -= 1; + self._table[index] = slot; + } + } + } + } + + // Amount of active key-value entries. + pub fn len(self) -> u64 { + self._len + } + + // Get the compile-time map capacity. + pub fn capacity(_self: Self) -> u64 { + N + } + + // Get the value by key. If it does not exist, returns none(). + pub fn get( + self, + key: K + ) -> Option + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + let mut result = Option::none(); + + let hash = self.hash(key); + let mut break = false; + + for attempt in 0..N { + if !break { + let index = self.quadratic_probe(hash, attempt as u64); + let slot = self._table[index]; + + // Not marked as deleted and has key-value. + if slot.is_valid() { + let (current_key, value) = slot.key_value_unchecked(); + if current_key == key { + result = Option::some(value); + break = true; + } + } + } + } + + result + } + + // Insert key-value entry. In case key was already present, value is overridden. + pub fn insert( + &mut self, + key: K, + value: V + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + self.assert_load_factor(); + + let hash = self.hash(key); + let mut break = false; + + for attempt in 0..N { + if !break { + let index = self.quadratic_probe(hash, attempt as u64); + let mut slot = self._table[index]; + let mut insert = false; + + // Either marked as deleted or has unset key-value. + if slot.is_available() { + insert = true; + self._len += 1; + } else { + let (current_key, _) = slot.key_value_unchecked(); + if current_key == key { + insert = true; + } + } + + if insert { + slot.set(key, value); + self._table[index] = slot; + break = true; + } + } + } + } + + // Remove key-value entry. If key is not present, HashMap remains unchanged. + pub fn remove( + &mut self, + key: K + ) + where + K: Eq + Hash, + B: BuildHasher, + H: Hasher { + let hash = self.hash(key); + let mut break = false; + + for attempt in 0..N { + if !break { + let index = self.quadratic_probe(hash, attempt as u64); + let mut slot = self._table[index]; + + // Not marked as deleted and has key-value. + if slot.is_valid() { + let (current_key, _) = slot.key_value_unchecked(); + if current_key == key { + slot.mark_deleted(); + self._table[index] = slot; + self._len -= 1; + break = true; + } + } + } + } + } + + // Apply HashMap's hasher onto key to obtain pre-hash for probing. + fn hash( + self, + key: K + ) -> u64 + where + K: Hash, + B: BuildHasher, + H: Hasher { + let mut hasher = self._build_hasher.build_hasher(); + key.hash(&mut hasher); + hasher.finish() as u64 + } + + // Probing scheme: quadratic function. + // We use 0.5 constant near variadic attempt and attempt^2 monomials. + // This ensures good uniformity of distribution for table sizes + // equal to prime numbers or powers of two. + fn quadratic_probe(_self: Self, hash: u64, attempt: u64) -> u64 { + (hash + (attempt + attempt * attempt) / 2) % N + } + + // Amount of elements in the table in relation to available slots exceeds α_max. + // To avoid a comparatively more expensive division operation + // we conduct cross-multiplication instead. + // n / m >= MAX_LOAD_FACTOR_NUMERATOR / MAX_LOAD_FACTOR_DEN0MINATOR + // n * MAX_LOAD_FACTOR_DEN0MINATOR >= m * MAX_LOAD_FACTOR_NUMERATOR + fn assert_load_factor(self) { + let lhs = self._len * MAX_LOAD_FACTOR_DEN0MINATOR; + let rhs = self._table.len() as u64 * MAX_LOAD_FACTOR_NUMERATOR; + let exceeded = lhs >= rhs; + assert(!exceeded, "Load factor is exceeded, consider increasing the capacity."); + } +} + +// Equality class on HashMap has to test that they have +// equal sets of key-value entries, +// thus one is a subset of the other and vice versa. +impl Eq for HashMap +where + K: Eq + Hash, + V: Eq, + B: BuildHasher, + H: Hasher +{ + fn eq(self, other: HashMap) -> bool{ + let mut equal = false; + + if self.len() == other.len(){ + equal = true; + for slot in self._table{ + // Not marked as deleted and has key-value. + if equal & slot.is_valid(){ + let (key, value) = slot.key_value_unchecked(); + let other_value = other.get(key); + + if other_value.is_none(){ + equal = false; + }else{ + let other_value = other_value.unwrap_unchecked(); + if value != other_value{ + equal = false; + } + } + } + } + } + + equal + } +} + +impl Default for HashMap +where + B: BuildHasher + Default, + H: Hasher + Default +{ + fn default() -> Self{ + let _build_hasher = B::default(); + let map: HashMap = HashMap::with_hasher(_build_hasher); + map + } +} diff --git a/noir_stdlib/src/hash.nr b/noir_stdlib/src/hash.nr index cc864039a90..7a931f7c047 100644 --- a/noir_stdlib/src/hash.nr +++ b/noir_stdlib/src/hash.nr @@ -1,5 +1,8 @@ mod poseidon; mod mimc; +mod pedersen; + +use crate::default::Default; #[foreign(sha256)] // docs:start:sha256 @@ -74,3 +77,52 @@ pub fn poseidon2_permutation(_input: [u8; N], _state_length: u32) -> [u8; N] #[foreign(sha256_compression)] pub fn sha256_compression(_input: [u32; 16], _state: [u32; 8]) -> [u32; 8] {} + +// Generic hashing support. +// Partially ported and impacted by rust. + +// Hash trait shall be implemented per type. +trait Hash{ + fn hash(self, state: &mut H) where H: Hasher; +} + +// Hasher trait shall be implemented by algorithms to provide hash-agnostic means. +// TODO: consider making the types generic here ([u8], [Field], etc.) +trait Hasher{ + fn finish(self) -> Field; + + fn write(&mut self, input: [Field]); +} + +// BuildHasher is a factory trait, responsible for production of specific Hasher. +trait BuildHasher where H: Hasher{ + fn build_hasher(self) -> H; +} + +struct BuildHasherDefault; + +impl BuildHasher for BuildHasherDefault +where + H: Hasher + Default +{ + fn build_hasher(_self: Self) -> H{ + H::default() + } +} + +impl Default for BuildHasherDefault +where + H: Hasher + Default +{ + fn default() -> Self{ + BuildHasherDefault{} + } +} + +// TODO: add implementations for the remainder of primitive types. +impl Hash for Field{ + fn hash(self, state: &mut H) where H: Hasher{ + let input: [Field] = [self]; + H::write(state, input); + } +} diff --git a/noir_stdlib/src/hash/pedersen.nr b/noir_stdlib/src/hash/pedersen.nr new file mode 100644 index 00000000000..ace6851099d --- /dev/null +++ b/noir_stdlib/src/hash/pedersen.nr @@ -0,0 +1,24 @@ +use crate::hash::{Hasher, pedersen_hash}; +use crate::default::Default; + +struct PedersenHasher{ + _state: [Field] +} + +impl Hasher for PedersenHasher { + fn finish(self) -> Field { + pedersen_hash(self._state) + } + + fn write(&mut self, input: [Field]){ + self._state = self._state.append(input); + } +} + +impl Default for PedersenHasher{ + fn default() -> Self{ + PedersenHasher{ + _state: [] + } + } +} diff --git a/test_programs/compile_failure/hashmap_load_factor/Nargo.toml b/test_programs/compile_failure/hashmap_load_factor/Nargo.toml new file mode 100644 index 00000000000..92da5a357f4 --- /dev/null +++ b/test_programs/compile_failure/hashmap_load_factor/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "hashmap_load_factor" +type = "bin" +authors = [""] + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/hashmap_load_factor/Prover.toml b/test_programs/compile_failure/hashmap_load_factor/Prover.toml new file mode 100644 index 00000000000..e54319c61e9 --- /dev/null +++ b/test_programs/compile_failure/hashmap_load_factor/Prover.toml @@ -0,0 +1,26 @@ +# Expected 6 key-value entries for hashmap capacity of 8. +# These must be distinct (both key-to-key, and value-to-value) for correct testing. + +[[input]] +key = 2 +value = 17 + +[[input]] +key = 3 +value = 19 + +[[input]] +key = 5 +value = 23 + +[[input]] +key = 7 +value = 29 + +[[input]] +key = 11 +value = 31 + +[[input]] +key = 41 +value = 43 \ No newline at end of file diff --git a/test_programs/compile_failure/hashmap_load_factor/src/main.nr b/test_programs/compile_failure/hashmap_load_factor/src/main.nr new file mode 100644 index 00000000000..ade43f898e1 --- /dev/null +++ b/test_programs/compile_failure/hashmap_load_factor/src/main.nr @@ -0,0 +1,35 @@ +use dep::std::collections::map::HashMap; +use dep::std::hash::BuildHasherDefault; +use dep::std::hash::pedersen::PedersenHasher; + +struct Entry{ + key: Field, + value: Field +} + +global HASHMAP_CAP = 8; +global HASHMAP_LEN = 6; + +fn allocate_hashmap() -> HashMap> { + HashMap::default() +} + +fn main(input: [Entry; HASHMAP_LEN]) { + test_load_factor(input); +} + +// In this test we exceed load factor: +// α_max = 0.75, thus for capacity of 8 and lenght of 6 +// insertion of new unique key (7-th) should throw assertion error. +fn test_load_factor(input: [Entry; HASHMAP_LEN]) { + let mut hashmap = allocate_hashmap(); + + for entry in input { + hashmap.insert(entry.key, entry.value); + } + + // We use prime numbers for testing, + // therefore it is guaranteed that doubling key we get unique value. + let key = input[0].key * 2; + hashmap.insert(key, input[0].value); +} diff --git a/test_programs/execution_success/hashmap/Nargo.toml b/test_programs/execution_success/hashmap/Nargo.toml new file mode 100644 index 00000000000..c09debc9833 --- /dev/null +++ b/test_programs/execution_success/hashmap/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "hashmap" +type = "bin" +authors = [""] + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/hashmap/Prover.toml b/test_programs/execution_success/hashmap/Prover.toml new file mode 100644 index 00000000000..84d4c0733e4 --- /dev/null +++ b/test_programs/execution_success/hashmap/Prover.toml @@ -0,0 +1,26 @@ +# Input: 6 key-value entries for hashmap capacity of 8. +# These must be distinct (both key-to-key, and value-to-value) for correct testing. + +[[input]] +key = 2 +value = 17 + +[[input]] +key = 3 +value = 19 + +[[input]] +key = 5 +value = 23 + +[[input]] +key = 7 +value = 29 + +[[input]] +key = 11 +value = 31 + +[[input]] +key = 41 +value = 43 \ No newline at end of file diff --git a/test_programs/execution_success/hashmap/src/main.nr b/test_programs/execution_success/hashmap/src/main.nr new file mode 100644 index 00000000000..597a5c0b7de --- /dev/null +++ b/test_programs/execution_success/hashmap/src/main.nr @@ -0,0 +1,192 @@ +mod utils; + +use dep::std::collections::map::HashMap; +use dep::std::hash::BuildHasherDefault; +use dep::std::hash::pedersen::PedersenHasher; +use dep::std::cmp::Eq; + +use utils::cut; + +type K = Field; +type V = Field; + +// It is more convenient and readable to use structs as input. +struct Entry{ + key: Field, + value: Field +} + +global HASHMAP_CAP = 8; +global HASHMAP_LEN = 6; + +global FIELD_CMP = |a: Field, b: Field| a.lt(b); + +global K_CMP = FIELD_CMP; +global V_CMP = FIELD_CMP; +global KV_CMP = |a: (K, V), b: (K, V)| a.0.lt(b.0); + +global ALLOCATE_HASHMAP = || -> HashMap> + HashMap::default(); + +fn main(input: [Entry; HASHMAP_LEN]) { + test_sequential(input[0].key, input[0].value); + test_multiple_equal_insert(input[1].key, input[1].value); + test_value_override(input[2].key, input[2].value, input[3].value); + test_insert_and_methods(input); + test_hashmaps_equality(input); + test_retain(); + test_iterators(); + test_mut_iterators(); +} + +// Insert, get, remove. +fn test_sequential(key: K, value: V) { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New HashMap should be empty."); + + hashmap.insert(key, value); + assert(hashmap.len() == 1, "HashMap after one insert should have a length of 1 element."); + + let got = hashmap.get(key); + assert(got.is_some(), "Got none value."); + let got = got.unwrap_unchecked(); + assert(value == got, f"Inserted {value} but got {got} for the same key."); + + hashmap.remove(key); + assert(hashmap.is_empty(), "HashMap after one insert and corresponding removal should be empty."); + let got = hashmap.get(key); + assert(got.is_none(), "Value has been removed, but is still available (not none)."); +} + +// Insert same pair several times. +fn test_multiple_equal_insert(key: K, value: V) { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New HashMap should be empty."); + + for _ in 0..HASHMAP_LEN { + hashmap.insert(key, value); + } + + let len = hashmap.len(); + assert(len == 1, f"HashMap length must be 1, got {len}."); + + let got = hashmap.get(key); + assert(got.is_some(), "Got none value."); + let got = got.unwrap_unchecked(); + assert(value == got, f"Inserted {value} but got {got} for the same key."); +} + +// Override value for existing pair. +fn test_value_override(key: K, value: V, new_value: V) { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New hashmap should be empty."); + + hashmap.insert(key, value); + hashmap.insert(key, new_value); + assert(hashmap.len() == 1, "HashMap length is invalid."); + + let got = hashmap.get(key); + assert(got.is_some(), "Got none value."); + let got = got.unwrap_unchecked(); + assert(got == new_value, f"Expected {new_value}, but got {got}."); +} + +// Insert several distinct pairs and test auxiliary methods. +fn test_insert_and_methods(input: [Entry; HASHMAP_LEN]) { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New HashMap should be empty."); + + for entry in input { + hashmap.insert(entry.key, entry.value); + } + + assert(hashmap.len() == HASHMAP_LEN, "hashmap.len() does not match input lenght."); + + for entry in input { + assert(hashmap.contains_key(entry.key), f"Not found inserted key {entry.key}."); + } + + hashmap.clear(); + assert(hashmap.is_empty(), "HashMap after clear() should be empty."); +} + +// Insert several pairs and test retaining. +fn test_retain() { + let mut hashmap = ALLOCATE_HASHMAP(); + assert(hashmap.is_empty(), "New HashMap should be empty."); + + let (key, value) = (5, 11); + hashmap.insert(key, value); + let (key, value) = (2, 13); + hashmap.insert(key, value); + let (key, value) = (11, 5); + hashmap.insert(key, value); + + let predicate = |key: K, value: V| -> bool {key * value == 55}; + hashmap.retain(predicate); + + assert(hashmap.len() == 2, "HashMap should have retained 2 elements."); + assert(hashmap.get(2).is_none(), "Pair should have been removed, since it does not match predicate."); +} + +// Equality trait check. +fn test_hashmaps_equality(input: [Entry; HASHMAP_LEN]) { + let mut hashmap_1 = ALLOCATE_HASHMAP(); + let mut hashmap_2 = ALLOCATE_HASHMAP(); + + for entry in input { + hashmap_1.insert(entry.key, entry.value); + hashmap_2.insert(entry.key, entry.value); + } + + assert(hashmap_1 == hashmap_2, "HashMaps should be equal."); + + hashmap_2.remove(input[0].key); + + assert(hashmap_1 != hashmap_2, "HashMaps should not be equal."); +} + +// Test entries, keys, values. +fn test_iterators() { + let mut hashmap = ALLOCATE_HASHMAP(); + + hashmap.insert(2, 3); + hashmap.insert(5, 7); + hashmap.insert(11, 13); + + let keys: [K; 3] = cut(hashmap.keys()).map(|k: Option| k.unwrap_unchecked()).sort_via(K_CMP); + let values: [V; 3] = cut(hashmap.values()).map(|v: Option| v.unwrap_unchecked()).sort_via(V_CMP); + let entries: [(K, V); 3] = cut(hashmap.entries()).map(|e: Option<(K, V)>| e.unwrap_unchecked()).sort_via(KV_CMP); + + assert(keys == [2, 5, 11], "Got incorrect iteration of keys."); + assert(values == [3, 7, 13], "Got incorrect iteration of values."); + assert(entries == [(2, 3), (5, 7), (11, 13)], "Got incorrect iteration of entries."); +} + +// Test mutable iteration over keys, values and entries. +fn test_mut_iterators() { + let mut hashmap = ALLOCATE_HASHMAP(); + + hashmap.insert(2, 3); + hashmap.insert(5, 7); + hashmap.insert(11, 13); + + let f = |k: K| -> K{ k * 3}; + hashmap.iter_keys_mut(f); + + let f = |v: V| -> V{ v * 5}; + hashmap.iter_values_mut(f); + + let keys: [K; 3] = cut(hashmap.keys()).map(|k: Option| k.unwrap_unchecked()).sort_via(K_CMP); + let values: [V; 3] = cut(hashmap.values()).map(|v: Option| v.unwrap_unchecked()).sort_via(V_CMP); + + assert(keys == [6, 15, 33], f"Got incorrect iteration of keys: {keys}"); + assert(values == [15, 35, 65], "Got incorrect iteration of values."); + + let f = |k: K, v: V| -> (K, V){(k * 2, v * 2)}; + hashmap.iter_mut(f); + + let entries: [(K, V); 3] = cut(hashmap.entries()).map(|e: Option<(K, V)>| e.unwrap_unchecked()).sort_via(KV_CMP); + + assert(entries == [(12, 30), (30, 70), (66, 130)], "Got incorrect iteration of entries."); +} diff --git a/test_programs/execution_success/hashmap/src/utils.nr b/test_programs/execution_success/hashmap/src/utils.nr new file mode 100644 index 00000000000..45c9ca9bbf7 --- /dev/null +++ b/test_programs/execution_success/hashmap/src/utils.nr @@ -0,0 +1,10 @@ +// Compile-time: cuts the M first elements from the [T; N] array. +pub(crate) fn cut(input: [T; N]) -> [T; M] { + assert(M as u64 < N as u64, "M should be less than N."); + + let mut new = [dep::std::unsafe::zeroed(); M]; + for i in 0..M { + new[i] = input[i]; + } + new +} From 16d5f18c68cc3da1d11c98e101e3942d2437c3a8 Mon Sep 17 00:00:00 2001 From: Maxim Vezenov Date: Fri, 23 Feb 2024 15:06:15 +0000 Subject: [PATCH 30/45] chore(ssa): Remove mem2reg run before flattening (#4415) # Description ## Problem\* Before https://github.com/noir-lang/noir/pull/4240 we needed mem2reg to be run as to not panic when fetching slice lengths. ## Summary\* After the linked PR we have an improved startegy for tracking slice capacities by generating a slice capacities map before merging of values. This should enable us to remove a mem2reg pass that is run before flattening. ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [ ] I have tested the changes locally. - [ ] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/noirc_evaluator/src/ssa.rs | 5 ----- 1 file changed, 5 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa.rs b/compiler/noirc_evaluator/src/ssa.rs index d19c4467235..0bb81efe977 100644 --- a/compiler/noirc_evaluator/src/ssa.rs +++ b/compiler/noirc_evaluator/src/ssa.rs @@ -54,11 +54,6 @@ pub(crate) fn optimize_into_acir( .try_run_pass(Ssa::evaluate_assert_constant, "After Assert Constant:")? .try_run_pass(Ssa::unroll_loops, "After Unrolling:")? .run_pass(Ssa::simplify_cfg, "After Simplifying:") - // Run mem2reg before flattening to handle any promotion - // of values that can be accessed after loop unrolling. - // If there are slice mergers uncovered by loop unrolling - // and this pass is missed, slice merging will fail inside of flattening. - .run_pass(Ssa::mem2reg, "After Mem2Reg:") .run_pass(Ssa::flatten_cfg, "After Flattening:") .run_pass(Ssa::remove_bit_shifts, "After Removing Bit Shifts:") // Run mem2reg once more with the flattened CFG to catch any remaining loads/stores From 27c66b3d0741e68ed591ae8a16b47b30bc87175f Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Sat, 24 Feb 2024 14:00:22 +0000 Subject: [PATCH 31/45] fix: remove print from monomorphization pass (#4417) # Description ## Problem\* Resolves ## Summary\* We're currently printing out every expression we're monomorphising. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index 6f59fa13274..d8857f9e599 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -128,7 +128,6 @@ impl<'a> FunctionContext<'a> { } fn codegen_expression(&mut self, expr: &Expression) -> Result { - eprintln!("Codegen {expr}"); match expr { Expression::Ident(ident) => Ok(self.codegen_ident(ident)), Expression::Literal(literal) => self.codegen_literal(literal), From 33860678a642a76d8251ef42ffbe6d8a5a013528 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Sat, 24 Feb 2024 14:15:25 +0000 Subject: [PATCH 32/45] chore: remove unwanted prints (#4419) # Description ## Problem\* Resolves ## Summary\* This removes some unwanted prints which were left in from #4376 ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index d8857f9e599..d95295ae3c9 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -347,10 +347,8 @@ impl<'a> FunctionContext<'a> { } fn codegen_binary(&mut self, binary: &ast::Binary) -> Result { - eprintln!("Start binary"); let lhs = self.codegen_non_tuple_expression(&binary.lhs)?; let rhs = self.codegen_non_tuple_expression(&binary.rhs)?; - eprintln!("Insert binary"); Ok(self.insert_binary(lhs, binary.operator, rhs, binary.location)) } From 29e9b5e5d0f7a00c806639e900f2f8209675ee0e Mon Sep 17 00:00:00 2001 From: guipublic <47281315+guipublic@users.noreply.github.com> Date: Mon, 26 Feb 2024 14:54:28 +0100 Subject: [PATCH 33/45] chore: do not panic when dividing by zero (#4424) # Description ## Problem\* Resolves #2480 ## Summary\* In BrilligVM, we have now the same behaviour regarding division by 0. Whether it is a field or integer division, we return 0 when dividing by zero. Since we have a constraint which checks that the inverse times itself is 1, this constraint will always fail if we divide by zero (instead of panic or returning an error). ## Additional Context ## Documentation\* Check one: - [X] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [X] I have tested the changes locally. - [X] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- acvm-repo/brillig_vm/src/arithmetic.rs | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/acvm-repo/brillig_vm/src/arithmetic.rs b/acvm-repo/brillig_vm/src/arithmetic.rs index 263a733e3c4..9d7b6fe8f02 100644 --- a/acvm-repo/brillig_vm/src/arithmetic.rs +++ b/acvm-repo/brillig_vm/src/arithmetic.rs @@ -36,18 +36,20 @@ pub(crate) fn evaluate_binary_bigint_op( BinaryIntOp::UnsignedDiv => { let b_mod = b % bit_modulo; if b_mod.is_zero() { - return Err("Division by zero".to_owned()); + BigUint::zero() + } else { + (a % bit_modulo) / b_mod } - (a % bit_modulo) / b_mod } // Perform signed division by first converting a and b to signed integers and then back to unsigned after the operation. BinaryIntOp::SignedDiv => { let b_signed = to_big_signed(b, bit_size); if b_signed.is_zero() { - return Err("Division by zero".to_owned()); + BigUint::zero() + } else { + let signed_div = to_big_signed(a, bit_size) / b_signed; + to_big_unsigned(signed_div, bit_size) } - let signed_div = to_big_signed(a, bit_size) / b_signed; - to_big_unsigned(signed_div, bit_size) } // Perform a == operation, returning 0 or 1 BinaryIntOp::Equals => { From 7cd5fdb3d2a53475b7c8681231d517cab30f9f9b Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 26 Feb 2024 13:56:14 +0000 Subject: [PATCH 34/45] feat: expose separate functions to compile programs vs contracts in `noir_wasm` (#4413) # Description ## Problem\* Resolves ## Summary\* This PR exposes separate functions to compile contracts vs programs in the wasm compiler. This allows us to simplify various code paths as we don't need to deal with the potential for the two artifact types as this just leads to us asserting types and breaking type safety. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/wasm/src/compile.rs | 212 ++++++++++-------- compiler/wasm/src/compile_new.rs | 75 +++++-- compiler/wasm/src/index.cts | 77 ++++++- compiler/wasm/src/index.mts | 77 ++++++- compiler/wasm/src/lib.rs | 4 +- compiler/wasm/src/noir/noir-wasm-compiler.ts | 61 ++++- compiler/wasm/src/types/noir_artifact.ts | 19 -- .../test/compiler/browser/compile.test.ts | 8 +- .../wasm/test/compiler/node/compile.test.ts | 8 +- .../wasm/test/compiler/shared/compile.test.ts | 10 +- compiler/wasm/test/wasm/browser/index.test.ts | 10 +- compiler/wasm/test/wasm/node/index.test.ts | 10 +- 12 files changed, 377 insertions(+), 194 deletions(-) diff --git a/compiler/wasm/src/compile.rs b/compiler/wasm/src/compile.rs index c8b1680bc00..ca6c8efedb1 100644 --- a/compiler/wasm/src/compile.rs +++ b/compiler/wasm/src/compile.rs @@ -6,8 +6,7 @@ use nargo::artifacts::{ program::ProgramArtifact, }; use noirc_driver::{ - add_dep, compile_contract, compile_main, file_manager_with_stdlib, prepare_crate, - prepare_dependency, CompileOptions, CompiledContract, CompiledProgram, + add_dep, file_manager_with_stdlib, prepare_crate, prepare_dependency, CompileOptions, NOIR_ARTIFACT_VERSION_STRING, }; use noirc_evaluator::errors::SsaReport; @@ -60,51 +59,64 @@ extern "C" { #[derive(Clone, Debug, PartialEq, Eq)] pub type JsDependencyGraph; - #[wasm_bindgen(extends = Object, js_name = "CompileResult", typescript_type = "CompileResult")] + #[wasm_bindgen(extends = Object, js_name = "ProgramCompileResult", typescript_type = "ProgramCompileResult")] #[derive(Clone, Debug, PartialEq, Eq)] - pub type JsCompileResult; + pub type JsCompileProgramResult; #[wasm_bindgen(constructor, js_class = "Object")] - fn constructor() -> JsCompileResult; + fn constructor() -> JsCompileProgramResult; + + #[wasm_bindgen(extends = Object, js_name = "ContractCompileResult", typescript_type = "ContractCompileResult")] + #[derive(Clone, Debug, PartialEq, Eq)] + pub type JsCompileContractResult; + + #[wasm_bindgen(constructor, js_class = "Object")] + fn constructor() -> JsCompileContractResult; } -impl JsCompileResult { - const CONTRACT_PROP: &'static str = "contract"; +impl JsCompileProgramResult { const PROGRAM_PROP: &'static str = "program"; const WARNINGS_PROP: &'static str = "warnings"; - pub fn new(resp: CompileResult) -> JsCompileResult { - let obj = JsCompileResult::constructor(); - match resp { - CompileResult::Contract { contract, warnings } => { - js_sys::Reflect::set( - &obj, - &JsString::from(JsCompileResult::CONTRACT_PROP), - &::from_serde(&contract).unwrap(), - ) - .unwrap(); - js_sys::Reflect::set( - &obj, - &JsString::from(JsCompileResult::WARNINGS_PROP), - &::from_serde(&warnings).unwrap(), - ) - .unwrap(); - } - CompileResult::Program { program, warnings } => { - js_sys::Reflect::set( - &obj, - &JsString::from(JsCompileResult::PROGRAM_PROP), - &::from_serde(&program).unwrap(), - ) - .unwrap(); - js_sys::Reflect::set( - &obj, - &JsString::from(JsCompileResult::WARNINGS_PROP), - &::from_serde(&warnings).unwrap(), - ) - .unwrap(); - } - }; + pub fn new(program: ProgramArtifact, warnings: Vec) -> JsCompileProgramResult { + let obj = JsCompileProgramResult::constructor(); + + js_sys::Reflect::set( + &obj, + &JsString::from(JsCompileProgramResult::PROGRAM_PROP), + &::from_serde(&program).unwrap(), + ) + .unwrap(); + js_sys::Reflect::set( + &obj, + &JsString::from(JsCompileProgramResult::WARNINGS_PROP), + &::from_serde(&warnings).unwrap(), + ) + .unwrap(); + + obj + } +} + +impl JsCompileContractResult { + const CONTRACT_PROP: &'static str = "contract"; + const WARNINGS_PROP: &'static str = "warnings"; + + pub fn new(contract: ContractArtifact, warnings: Vec) -> JsCompileContractResult { + let obj = JsCompileContractResult::constructor(); + + js_sys::Reflect::set( + &obj, + &JsString::from(JsCompileContractResult::CONTRACT_PROP), + &::from_serde(&contract).unwrap(), + ) + .unwrap(); + js_sys::Reflect::set( + &obj, + &JsString::from(JsCompileContractResult::WARNINGS_PROP), + &::from_serde(&warnings).unwrap(), + ) + .unwrap(); obj } @@ -144,73 +156,98 @@ pub(crate) fn parse_all(fm: &FileManager) -> ParsedFiles { fm.as_file_map().all_file_ids().map(|&file_id| (file_id, parse_file(fm, file_id))).collect() } -pub enum CompileResult { - Contract { contract: ContractArtifact, warnings: Vec }, - Program { program: ProgramArtifact, warnings: Vec }, -} - #[wasm_bindgen] -pub fn compile( +pub fn compile_program( entry_point: String, - contracts: Option, dependency_graph: Option, file_source_map: PathToFileSourceMap, -) -> Result { +) -> Result { console_error_panic_hook::set_once(); - - let dependency_graph: DependencyGraph = if let Some(dependency_graph) = dependency_graph { - ::into_serde(&JsValue::from(dependency_graph)) - .map_err(|err| err.to_string())? - } else { - DependencyGraph { root_dependencies: vec![], library_dependencies: HashMap::new() } - }; - - let fm = file_manager_with_source_map(file_source_map); - let parsed_files = parse_all(&fm); - let mut context = Context::new(fm, parsed_files); - - let path = Path::new(&entry_point); - let crate_id = prepare_crate(&mut context, path); - - process_dependency_graph(&mut context, dependency_graph); + let (crate_id, mut context) = prepare_context(entry_point, dependency_graph, file_source_map)?; let compile_options = CompileOptions::default(); - // For now we default to a bounded width of 3, though we can add it as a parameter let expression_width = acvm::acir::circuit::ExpressionWidth::Bounded { width: 3 }; - if contracts.unwrap_or_default() { - let compiled_contract = compile_contract(&mut context, crate_id, &compile_options) + let compiled_program = + noirc_driver::compile_main(&mut context, crate_id, &compile_options, None) .map_err(|errs| { CompileError::with_file_diagnostics( - "Failed to compile contract", + "Failed to compile program", errs, &context.file_manager, ) })? .0; - let optimized_contract = - nargo::ops::transform_contract(compiled_contract, expression_width); + let optimized_program = nargo::ops::transform_program(compiled_program, expression_width); + let warnings = optimized_program.warnings.clone(); - let compile_output = generate_contract_artifact(optimized_contract); - Ok(JsCompileResult::new(compile_output)) - } else { - let compiled_program = compile_main(&mut context, crate_id, &compile_options, None) + Ok(JsCompileProgramResult::new(optimized_program.into(), warnings)) +} + +#[wasm_bindgen] +pub fn compile_contract( + entry_point: String, + dependency_graph: Option, + file_source_map: PathToFileSourceMap, +) -> Result { + console_error_panic_hook::set_once(); + let (crate_id, mut context) = prepare_context(entry_point, dependency_graph, file_source_map)?; + + let compile_options = CompileOptions::default(); + // For now we default to a bounded width of 3, though we can add it as a parameter + let expression_width = acvm::acir::circuit::ExpressionWidth::Bounded { width: 3 }; + + let compiled_contract = + noirc_driver::compile_contract(&mut context, crate_id, &compile_options) .map_err(|errs| { CompileError::with_file_diagnostics( - "Failed to compile program", + "Failed to compile contract", errs, &context.file_manager, ) })? .0; - let optimized_program = nargo::ops::transform_program(compiled_program, expression_width); + let optimized_contract = nargo::ops::transform_contract(compiled_contract, expression_width); - let compile_output = generate_program_artifact(optimized_program); - Ok(JsCompileResult::new(compile_output)) - } + let functions = + optimized_contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); + + let contract_artifact = ContractArtifact { + noir_version: String::from(NOIR_ARTIFACT_VERSION_STRING), + name: optimized_contract.name, + functions, + events: optimized_contract.events, + file_map: optimized_contract.file_map, + }; + + Ok(JsCompileContractResult::new(contract_artifact, optimized_contract.warnings)) +} + +fn prepare_context( + entry_point: String, + dependency_graph: Option, + file_source_map: PathToFileSourceMap, +) -> Result<(CrateId, Context<'static, 'static>), JsCompileError> { + let dependency_graph: DependencyGraph = if let Some(dependency_graph) = dependency_graph { + ::into_serde(&JsValue::from(dependency_graph)) + .map_err(|err| err.to_string())? + } else { + DependencyGraph { root_dependencies: vec![], library_dependencies: HashMap::new() } + }; + + let fm = file_manager_with_source_map(file_source_map); + let parsed_files = parse_all(&fm); + let mut context = Context::new(fm, parsed_files); + + let path = Path::new(&entry_point); + let crate_id = prepare_crate(&mut context, path); + + process_dependency_graph(&mut context, dependency_graph); + + Ok((crate_id, context)) } // Create a new FileManager with the given source map @@ -270,25 +307,6 @@ fn add_noir_lib(context: &mut Context, library_name: &CrateName) -> CrateId { prepare_dependency(context, &path_to_lib) } -pub(crate) fn generate_program_artifact(program: CompiledProgram) -> CompileResult { - let warnings = program.warnings.clone(); - CompileResult::Program { program: program.into(), warnings } -} - -pub(crate) fn generate_contract_artifact(contract: CompiledContract) -> CompileResult { - let functions = contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); - - let contract_artifact = ContractArtifact { - noir_version: String::from(NOIR_ARTIFACT_VERSION_STRING), - name: contract.name, - functions, - events: contract.events, - file_map: contract.file_map, - }; - - CompileResult::Contract { contract: contract_artifact, warnings: contract.warnings } -} - #[cfg(test)] mod test { use noirc_driver::prepare_crate; diff --git a/compiler/wasm/src/compile_new.rs b/compiler/wasm/src/compile_new.rs index f8fbed4f470..2a5f7ab6545 100644 --- a/compiler/wasm/src/compile_new.rs +++ b/compiler/wasm/src/compile_new.rs @@ -1,10 +1,12 @@ use crate::compile::{ - file_manager_with_source_map, generate_contract_artifact, generate_program_artifact, parse_all, - JsCompileResult, PathToFileSourceMap, + file_manager_with_source_map, parse_all, JsCompileContractResult, JsCompileProgramResult, + PathToFileSourceMap, }; use crate::errors::{CompileError, JsCompileError}; +use nargo::artifacts::contract::{ContractArtifact, ContractFunctionArtifact}; use noirc_driver::{ add_dep, compile_contract, compile_main, prepare_crate, prepare_dependency, CompileOptions, + NOIR_ARTIFACT_VERSION_STRING, }; use noirc_frontend::{ graph::{CrateId, CrateName}, @@ -92,7 +94,7 @@ impl CompilerContext { pub fn compile_program( mut self, program_width: usize, - ) -> Result { + ) -> Result { let compile_options = CompileOptions::default(); let np_language = acvm::acir::circuit::ExpressionWidth::Bounded { width: program_width }; @@ -110,15 +112,15 @@ impl CompilerContext { .0; let optimized_program = nargo::ops::transform_program(compiled_program, np_language); + let warnings = optimized_program.warnings.clone(); - let compile_output = generate_program_artifact(optimized_program); - Ok(JsCompileResult::new(compile_output)) + Ok(JsCompileProgramResult::new(optimized_program.into(), warnings)) } pub fn compile_contract( mut self, program_width: usize, - ) -> Result { + ) -> Result { let compile_options = CompileOptions::default(); let np_language = acvm::acir::circuit::ExpressionWidth::Bounded { width: program_width }; let root_crate_id = *self.context.root_crate_id(); @@ -136,24 +138,64 @@ impl CompilerContext { let optimized_contract = nargo::ops::transform_contract(compiled_contract, np_language); - let compile_output = generate_contract_artifact(optimized_contract); - Ok(JsCompileResult::new(compile_output)) + let functions = + optimized_contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); + + let contract_artifact = ContractArtifact { + noir_version: String::from(NOIR_ARTIFACT_VERSION_STRING), + name: optimized_contract.name, + functions, + events: optimized_contract.events, + file_map: optimized_contract.file_map, + }; + + Ok(JsCompileContractResult::new(contract_artifact, optimized_contract.warnings)) } } /// This is a method that exposes the same API as `compile` /// But uses the Context based APi internally #[wasm_bindgen] -pub fn compile_( +pub fn compile_program_( entry_point: String, - contracts: Option, dependency_graph: Option, file_source_map: PathToFileSourceMap, -) -> Result { - use std::collections::HashMap; +) -> Result { + console_error_panic_hook::set_once(); + + let compiler_context = + prepare_compiler_context(entry_point, dependency_graph, file_source_map)?; + let program_width = 3; + compiler_context.compile_program(program_width) +} + +/// This is a method that exposes the same API as `compile` +/// But uses the Context based APi internally +#[wasm_bindgen] +pub fn compile_contract_( + entry_point: String, + dependency_graph: Option, + file_source_map: PathToFileSourceMap, +) -> Result { console_error_panic_hook::set_once(); + let compiler_context = + prepare_compiler_context(entry_point, dependency_graph, file_source_map)?; + let program_width = 3; + + compiler_context.compile_contract(program_width) +} + +/// This is a method that exposes the same API as `prepare_context` +/// But uses the Context based API internally +fn prepare_compiler_context( + entry_point: String, + dependency_graph: Option, + file_source_map: PathToFileSourceMap, +) -> Result { + use std::collections::HashMap; + let dependency_graph: crate::compile::DependencyGraph = if let Some(dependency_graph) = dependency_graph { ::into_serde( @@ -218,14 +260,7 @@ pub fn compile_( } } - let is_contract = contracts.unwrap_or(false); - let program_width = 3; - - if is_contract { - compiler_context.compile_contract(program_width) - } else { - compiler_context.compile_program(program_width) - } + Ok(compiler_context) } #[cfg(test)] diff --git a/compiler/wasm/src/index.cts b/compiler/wasm/src/index.cts index 7c707e662d8..234bfa7280c 100644 --- a/compiler/wasm/src/index.cts +++ b/compiler/wasm/src/index.cts @@ -2,7 +2,7 @@ import { FileManager } from './noir/file-manager/file-manager'; import { createNodejsFileManager } from './noir/file-manager/nodejs-file-manager'; import { NoirWasmCompiler } from './noir/noir-wasm-compiler'; import { LogData, LogFn } from './utils'; -import { CompilationResult } from './types/noir_artifact'; +import { ContractCompilationArtifacts, ProgramCompilationArtifacts } from './types/noir_artifact'; import { inflateDebugSymbols } from './noir/debug'; /** @@ -17,36 +17,86 @@ import { inflateDebugSymbols } from './noir/debug'; * ```typescript * // Node.js * - * import { compile, createFileManager } from '@noir-lang/noir_wasm'; + * import { compile_program, createFileManager } from '@noir-lang/noir_wasm'; * * const fm = createFileManager(myProjectPath); - * const myCompiledCode = await compile(fm); + * const myCompiledCode = await compile_program(fm); * ``` * * ```typescript * // Browser * - * import { compile, createFileManager } from '@noir-lang/noir_wasm'; + * import { compile_program, createFileManager } from '@noir-lang/noir_wasm'; * * const fm = createFileManager('/'); * for (const path of files) { * await fm.writeFile(path, await getFileAsStream(path)); * } - * const myCompiledCode = await compile(fm); + * const myCompiledCode = await compile_program(fm); * ``` */ -async function compile( +async function compile_program( fileManager: FileManager, projectPath?: string, logFn?: LogFn, debugLogFn?: LogFn, -): Promise { +): Promise { + const compiler = await setup_compiler(fileManager, projectPath, logFn, debugLogFn); + return await compiler.compile_program(); +} + +/** + * Compiles a Noir project + * + * @param fileManager - The file manager to use + * @param projectPath - The path to the project inside the file manager. Defaults to the root of the file manager + * @param logFn - A logging function. If not provided, console.log will be used + * @param debugLogFn - A debug logging function. If not provided, logFn will be used + * + * @example + * ```typescript + * // Node.js + * + * import { compile_contract, createFileManager } from '@noir-lang/noir_wasm'; + * + * const fm = createFileManager(myProjectPath); + * const myCompiledCode = await compile_contract(fm); + * ``` + * + * ```typescript + * // Browser + * + * import { compile_contract, createFileManager } from '@noir-lang/noir_wasm'; + * + * const fm = createFileManager('/'); + * for (const path of files) { + * await fm.writeFile(path, await getFileAsStream(path)); + * } + * const myCompiledCode = await compile_contract(fm); + * ``` + */ +async function compile_contract( + fileManager: FileManager, + projectPath?: string, + logFn?: LogFn, + debugLogFn?: LogFn, +): Promise { + const compiler = await setup_compiler(fileManager, projectPath, logFn, debugLogFn); + return await compiler.compile_contract(); +} + +async function setup_compiler( + fileManager: FileManager, + projectPath?: string, + logFn?: LogFn, + debugLogFn?: LogFn, +): Promise { if (logFn && !debugLogFn) { debugLogFn = logFn; } const cjs = await require('../build/cjs'); - const compiler = await NoirWasmCompiler.new( + return await NoirWasmCompiler.new( fileManager, projectPath ?? fileManager.getDataDir(), cjs, @@ -72,9 +122,16 @@ async function compile( }, }, ); - return await compiler.compile(); } const createFileManager = createNodejsFileManager; -export { compile, createFileManager, inflateDebugSymbols, CompilationResult }; +export { + compile_program as compile, + compile_program, + compile_contract, + createFileManager, + inflateDebugSymbols, + ProgramCompilationArtifacts, + ContractCompilationArtifacts, +}; diff --git a/compiler/wasm/src/index.mts b/compiler/wasm/src/index.mts index d4ed0beccfc..326a7337117 100644 --- a/compiler/wasm/src/index.mts +++ b/compiler/wasm/src/index.mts @@ -2,7 +2,7 @@ import { FileManager } from './noir/file-manager/file-manager'; import { createNodejsFileManager } from './noir/file-manager/nodejs-file-manager'; import { NoirWasmCompiler } from './noir/noir-wasm-compiler'; import { LogData, LogFn } from './utils'; -import { CompilationResult } from './types/noir_artifact'; +import { ContractCompilationArtifacts, ProgramCompilationArtifacts } from './types/noir_artifact'; import { inflateDebugSymbols } from './noir/debug'; /** @@ -17,30 +17,80 @@ import { inflateDebugSymbols } from './noir/debug'; * ```typescript * // Node.js * - * import { compile, createFileManager } from '@noir-lang/noir_wasm'; + * import { compile_program, createFileManager } from '@noir-lang/noir_wasm'; * * const fm = createFileManager(myProjectPath); - * const myCompiledCode = await compile(fm); + * const myCompiledCode = await compile_program(fm); * ``` * * ```typescript * // Browser * - * import { compile, createFileManager } from '@noir-lang/noir_wasm'; + * import { compile_program, createFileManager } from '@noir-lang/noir_wasm'; * * const fm = createFileManager('/'); * for (const path of files) { * await fm.writeFile(path, await getFileAsStream(path)); * } - * const myCompiledCode = await compile(fm); + * const myCompiledCode = await compile_program(fm); * ``` */ -async function compile( +async function compile_program( fileManager: FileManager, projectPath?: string, logFn?: LogFn, debugLogFn?: LogFn, -): Promise { +): Promise { + const compiler = await setup_compiler(fileManager, projectPath, logFn, debugLogFn); + return await compiler.compile_program(); +} + +/** + * Compiles a Noir project + * + * @param fileManager - The file manager to use + * @param projectPath - The path to the project inside the file manager. Defaults to the root of the file manager + * @param logFn - A logging function. If not provided, console.log will be used + * @param debugLogFn - A debug logging function. If not provided, logFn will be used + * + * @example + * ```typescript + * // Node.js + * + * import { compile_contract, createFileManager } from '@noir-lang/noir_wasm'; + * + * const fm = createFileManager(myProjectPath); + * const myCompiledCode = await compile_contract(fm); + * ``` + * + * ```typescript + * // Browser + * + * import { compile_contract, createFileManager } from '@noir-lang/noir_wasm'; + * + * const fm = createFileManager('/'); + * for (const path of files) { + * await fm.writeFile(path, await getFileAsStream(path)); + * } + * const myCompiledCode = await compile_contract(fm); + * ``` + */ +async function compile_contract( + fileManager: FileManager, + projectPath?: string, + logFn?: LogFn, + debugLogFn?: LogFn, +): Promise { + const compiler = await setup_compiler(fileManager, projectPath, logFn, debugLogFn); + return await compiler.compile_contract(); +} + +async function setup_compiler( + fileManager: FileManager, + projectPath?: string, + logFn?: LogFn, + debugLogFn?: LogFn, +): Promise { if (logFn && !debugLogFn) { debugLogFn = logFn; } @@ -48,7 +98,7 @@ async function compile( const esm = await import(/* webpackMode: "eager" */ '../build/esm'); await esm.default(); - const compiler = await NoirWasmCompiler.new( + return await NoirWasmCompiler.new( fileManager, projectPath ?? fileManager.getDataDir(), esm, @@ -74,9 +124,16 @@ async function compile( }, }, ); - return await compiler.compile(); } const createFileManager = createNodejsFileManager; -export { compile, createFileManager, inflateDebugSymbols, CompilationResult }; +export { + compile_program as compile, + compile_program, + compile_contract, + createFileManager, + inflateDebugSymbols, + ProgramCompilationArtifacts, + ContractCompilationArtifacts, +}; diff --git a/compiler/wasm/src/lib.rs b/compiler/wasm/src/lib.rs index 174d9b9ce9c..6753faf2009 100644 --- a/compiler/wasm/src/lib.rs +++ b/compiler/wasm/src/lib.rs @@ -18,10 +18,10 @@ mod compile; mod compile_new; mod errors; -pub use compile::compile; +pub use compile::{compile_contract, compile_program}; // Expose the new Context-Centric API -pub use compile_new::{compile_, CompilerContext, CrateIDWrapper}; +pub use compile_new::{compile_contract_, compile_program_, CompilerContext, CrateIDWrapper}; use wasm_bindgen::{prelude::wasm_bindgen, JsValue}; #[derive(Serialize, Deserialize)] diff --git a/compiler/wasm/src/noir/noir-wasm-compiler.ts b/compiler/wasm/src/noir/noir-wasm-compiler.ts index 2a0af5d8fee..1ec3af1fd65 100644 --- a/compiler/wasm/src/noir/noir-wasm-compiler.ts +++ b/compiler/wasm/src/noir/noir-wasm-compiler.ts @@ -6,7 +6,7 @@ import { LocalDependencyResolver } from './dependencies/local-dependency-resolve import { FileManager } from './file-manager/file-manager'; import { Package } from './package'; import { LogFn } from '../utils'; -import { CompilationResult } from '../types/noir_artifact'; +import { ContractCompilationArtifacts, ProgramCompilationArtifacts } from '../types/noir_artifact'; /** Compilation options */ export type NoirWasmCompileOptions = { @@ -84,21 +84,64 @@ export class NoirWasmCompiler { /** * Compile EntryPoint */ + public async compile_program(): Promise { + console.log(`Compiling at ${this.#package.getEntryPointPath()}`); + + if (this.#package.getType() !== 'bin') { + throw new Error(`Expected to find package type "bin" but found ${this.#package.getType()}`); + } + await this.#dependencyManager.resolveDependencies(); + this.#debugLog(`Dependencies: ${this.#dependencyManager.getPackageNames().join(', ')}`); + + try { + const entrypoint = this.#package.getEntryPointPath(); + const deps = { + /* eslint-disable camelcase */ + root_dependencies: this.#dependencyManager.getEntrypointDependencies(), + library_dependencies: this.#dependencyManager.getLibraryDependencies(), + /* eslint-enable camelcase */ + }; + const packageSources = await this.#package.getSources(this.#fm); + const librarySources = ( + await Promise.all( + this.#dependencyManager + .getLibraries() + .map(async ([alias, library]) => await library.package.getSources(this.#fm, alias)), + ) + ).flat(); + [...packageSources, ...librarySources].forEach((sourceFile) => { + this.#debugLog(`Adding source ${sourceFile.path}`); + this.#sourceMap.add_source_code(sourceFile.path, sourceFile.source); + }); + const result = this.#wasmCompiler.compile_program(entrypoint, deps, this.#sourceMap); + + return result; + } catch (err) { + if (err instanceof Error && err.name === 'CompileError') { + const logs = await this.#processCompileError(err); + for (const log of logs) { + this.#log(log); + } + throw new Error(logs.join('\n')); + } + + throw err; + } + } + /** * Compile EntryPoint */ - public async compile(): Promise { + public async compile_contract(): Promise { console.log(`Compiling at ${this.#package.getEntryPointPath()}`); - if (!(this.#package.getType() === 'contract' || this.#package.getType() === 'bin')) { - throw new Error(`Only supports compiling "contract" and "bin" package types (${this.#package.getType()})`); + if (this.#package.getType() !== 'contract') { + throw new Error(`Expected to find package type "contract" but found ${this.#package.getType()}`); } await this.#dependencyManager.resolveDependencies(); this.#debugLog(`Dependencies: ${this.#dependencyManager.getPackageNames().join(', ')}`); try { - const isContract: boolean = this.#package.getType() === 'contract'; - const entrypoint = this.#package.getEntryPointPath(); const deps = { /* eslint-disable camelcase */ @@ -118,11 +161,7 @@ export class NoirWasmCompiler { this.#debugLog(`Adding source ${sourceFile.path}`); this.#sourceMap.add_source_code(sourceFile.path, sourceFile.source); }); - const result = this.#wasmCompiler.compile(entrypoint, isContract, deps, this.#sourceMap); - - if ((isContract && !('contract' in result)) || (!isContract && !('program' in result))) { - throw new Error('Invalid compilation result'); - } + const result = this.#wasmCompiler.compile_contract(entrypoint, deps, this.#sourceMap); return result; } catch (err) { diff --git a/compiler/wasm/src/types/noir_artifact.ts b/compiler/wasm/src/types/noir_artifact.ts index e636212a487..832a6ed9bf9 100644 --- a/compiler/wasm/src/types/noir_artifact.ts +++ b/compiler/wasm/src/types/noir_artifact.ts @@ -180,22 +180,3 @@ export interface ProgramCompilationArtifacts { /** Compilation warnings. */ warnings: Warning[]; } - -/** - * output of Noir Wasm compilation, can be for a contract or lib/binary - */ -export type CompilationResult = ContractCompilationArtifacts | ProgramCompilationArtifacts; - -/** - * Check if it has Contract unique property - */ -export function isContractCompilationArtifacts(artifact: CompilationResult): artifact is ContractCompilationArtifacts { - return (artifact as ContractCompilationArtifacts).contract !== undefined; -} - -/** - * Check if it has Contract unique property - */ -export function isProgramCompilationArtifacts(artifact: CompilationResult): artifact is ProgramCompilationArtifacts { - return (artifact as ProgramCompilationArtifacts).program !== undefined; -} diff --git a/compiler/wasm/test/compiler/browser/compile.test.ts b/compiler/wasm/test/compiler/browser/compile.test.ts index b7e6c27427f..7d4b3da55aa 100644 --- a/compiler/wasm/test/compiler/browser/compile.test.ts +++ b/compiler/wasm/test/compiler/browser/compile.test.ts @@ -1,7 +1,7 @@ /* eslint-disable @typescript-eslint/ban-ts-comment */ import { getPaths } from '../../shared'; import { expect } from '@esm-bundle/chai'; -import { compile, createFileManager } from '@noir-lang/noir_wasm'; +import { compile_program, compile_contract, createFileManager } from '@noir-lang/noir_wasm'; import { ContractArtifact, ProgramArtifact } from '../../../src/types/noir_artifact'; import { shouldCompileContractIdentically, shouldCompileProgramIdentically } from '../shared/compile.test'; @@ -33,7 +33,7 @@ describe('noir-compiler/browser', () => { await fm.writeFile(path, (await getFile(path)).body as ReadableStream); } const nargoArtifact = (await getPrecompiledSource(simpleScriptExpectedArtifact)) as ProgramArtifact; - const noirWasmArtifact = await compile(fm, '/fixtures/simple'); + const noirWasmArtifact = await compile_program(fm, '/fixtures/simple'); return { nargoArtifact, noirWasmArtifact }; }, @@ -51,7 +51,7 @@ describe('noir-compiler/browser', () => { await fm.writeFile(path, (await getFile(path)).body as ReadableStream); } const nargoArtifact = (await getPrecompiledSource(depsScriptExpectedArtifact)) as ProgramArtifact; - const noirWasmArtifact = await compile(fm, '/fixtures/with-deps'); + const noirWasmArtifact = await compile_program(fm, '/fixtures/with-deps'); return { nargoArtifact, noirWasmArtifact }; }, @@ -69,7 +69,7 @@ describe('noir-compiler/browser', () => { await fm.writeFile(path, (await getFile(path)).body as ReadableStream); } const nargoArtifact = (await getPrecompiledSource(contractExpectedArtifact)) as ContractArtifact; - const noirWasmArtifact = await compile(fm, '/fixtures/noir-contract'); + const noirWasmArtifact = await compile_contract(fm, '/fixtures/noir-contract'); return { nargoArtifact, noirWasmArtifact }; }, diff --git a/compiler/wasm/test/compiler/node/compile.test.ts b/compiler/wasm/test/compiler/node/compile.test.ts index 9af98195825..811dc95ce16 100644 --- a/compiler/wasm/test/compiler/node/compile.test.ts +++ b/compiler/wasm/test/compiler/node/compile.test.ts @@ -2,7 +2,7 @@ import { join, resolve } from 'path'; import { getPaths } from '../../shared'; import { expect } from 'chai'; -import { compile, createFileManager } from '@noir-lang/noir_wasm'; +import { compile_program, compile_contract, createFileManager } from '@noir-lang/noir_wasm'; import { readFile } from 'fs/promises'; import { ContractArtifact, ProgramArtifact } from '../../../src/types/noir_artifact'; import { shouldCompileContractIdentically, shouldCompileProgramIdentically } from '../shared/compile.test'; @@ -15,7 +15,7 @@ describe('noir-compiler/node', () => { const fm = createFileManager(simpleScriptProjectPath); const nargoArtifact = JSON.parse((await readFile(simpleScriptExpectedArtifact)).toString()) as ProgramArtifact; - const noirWasmArtifact = await compile(fm); + const noirWasmArtifact = await compile_program(fm); return { nargoArtifact, noirWasmArtifact }; }, expect); @@ -24,7 +24,7 @@ describe('noir-compiler/node', () => { const fm = createFileManager(depsScriptProjectPath); const nargoArtifact = JSON.parse((await readFile(depsScriptExpectedArtifact)).toString()) as ProgramArtifact; - const noirWasmArtifact = await compile(fm); + const noirWasmArtifact = await compile_program(fm); return { nargoArtifact, noirWasmArtifact }; }, expect); @@ -33,7 +33,7 @@ describe('noir-compiler/node', () => { const fm = createFileManager(contractProjectPath); const nargoArtifact = JSON.parse((await readFile(contractExpectedArtifact)).toString()) as ContractArtifact; - const noirWasmArtifact = await compile(fm); + const noirWasmArtifact = await compile_contract(fm); return { nargoArtifact, noirWasmArtifact }; }, expect); }); diff --git a/compiler/wasm/test/compiler/shared/compile.test.ts b/compiler/wasm/test/compiler/shared/compile.test.ts index 88e8e8c8e5a..52cef14968b 100644 --- a/compiler/wasm/test/compiler/shared/compile.test.ts +++ b/compiler/wasm/test/compiler/shared/compile.test.ts @@ -1,4 +1,4 @@ -import { CompilationResult, inflateDebugSymbols } from '@noir-lang/noir_wasm'; +import { inflateDebugSymbols } from '@noir-lang/noir_wasm'; import { type expect as Expect } from 'chai'; import { ContractArtifact, @@ -11,7 +11,7 @@ import { } from '../../../src/types/noir_artifact'; export function shouldCompileProgramIdentically( - compileFn: () => Promise<{ nargoArtifact: ProgramArtifact; noirWasmArtifact: CompilationResult }>, + compileFn: () => Promise<{ nargoArtifact: ProgramArtifact; noirWasmArtifact: ProgramCompilationArtifacts }>, expect: typeof Expect, timeout = 5000, ) { @@ -24,7 +24,7 @@ export function shouldCompileProgramIdentically( normalizeVersion(nargoArtifact); // Prepare noir-wasm artifact - const noirWasmProgram = (noirWasmArtifact as unknown as ProgramCompilationArtifacts).program; + const noirWasmProgram = noirWasmArtifact.program; expect(noirWasmProgram).not.to.be.undefined; const [_noirWasmDebugInfos, norWasmFileMap] = deleteProgramDebugMetadata(noirWasmProgram); normalizeVersion(noirWasmProgram); @@ -47,7 +47,7 @@ export function shouldCompileProgramIdentically( } export function shouldCompileContractIdentically( - compileFn: () => Promise<{ nargoArtifact: ContractArtifact; noirWasmArtifact: CompilationResult }>, + compileFn: () => Promise<{ nargoArtifact: ContractArtifact; noirWasmArtifact: ContractCompilationArtifacts }>, expect: typeof Expect, timeout = 5000, ) { @@ -60,7 +60,7 @@ export function shouldCompileContractIdentically( normalizeVersion(nargoArtifact); // Prepare noir-wasm artifact - const noirWasmContract = (noirWasmArtifact as unknown as ContractCompilationArtifacts).contract; + const noirWasmContract = noirWasmArtifact.contract; expect(noirWasmContract).not.to.be.undefined; const [noirWasmDebugInfos, norWasmFileMap] = deleteContractDebugMetadata(noirWasmContract); normalizeVersion(noirWasmContract); diff --git a/compiler/wasm/test/wasm/browser/index.test.ts b/compiler/wasm/test/wasm/browser/index.test.ts index 3122fa57945..b59b4ae417a 100644 --- a/compiler/wasm/test/wasm/browser/index.test.ts +++ b/compiler/wasm/test/wasm/browser/index.test.ts @@ -2,7 +2,7 @@ import { getPaths } from '../../shared'; import { expect } from '@esm-bundle/chai'; -import init, { compile, PathToFileSourceMap, compile_, CompilerContext } from '../../../build/esm'; +import init, { compile_program, PathToFileSourceMap, compile_program_, CompilerContext } from '../../../build/esm'; // @ts-ignore await init(); @@ -35,7 +35,7 @@ describe('noir wasm compilation', () => { it('matching nargos compilation', async () => { const sourceMap = new PathToFileSourceMap(); sourceMap.add_source_code('script/main.nr', await getFileAsString(simpleScriptSourcePath)); - const wasmCircuit = compile('script/main.nr', undefined, undefined, sourceMap); + const wasmCircuit = compile_program('script/main.nr', undefined, sourceMap); const cliCircuit = await getPrecompiledSource(simpleScriptExpectedArtifact); if (!('program' in wasmCircuit)) { @@ -58,9 +58,8 @@ describe('noir wasm compilation', () => { }); it('matching nargos compilation', async () => { - const wasmCircuit = compile( + const wasmCircuit = compile_program( 'script/main.nr', - false, { root_dependencies: ['lib_a'], library_dependencies: { @@ -132,9 +131,8 @@ describe('noir wasm compilation', () => { }).timeout(60 * 20e3); it('matching nargos compilation - context-implementation-compile-api', async () => { - const wasmCircuit = await compile_( + const wasmCircuit = await compile_program_( 'script/main.nr', - false, { root_dependencies: ['lib_a'], library_dependencies: { diff --git a/compiler/wasm/test/wasm/node/index.test.ts b/compiler/wasm/test/wasm/node/index.test.ts index c73ce7477e5..23c87cc059a 100644 --- a/compiler/wasm/test/wasm/node/index.test.ts +++ b/compiler/wasm/test/wasm/node/index.test.ts @@ -3,7 +3,7 @@ import { readFileSync } from 'fs'; import { join, resolve } from 'path'; import { expect } from 'chai'; -import { compile, PathToFileSourceMap, compile_, CompilerContext } from '../../../build/cjs'; +import { compile_program, PathToFileSourceMap, compile_program_, CompilerContext } from '../../../build/cjs'; const basePath = resolve(join(__dirname, '../../')); const { @@ -26,7 +26,7 @@ describe('noir wasm compilation', () => { it('matching nargos compilation', async () => { const sourceMap = new PathToFileSourceMap(); sourceMap.add_source_code(simpleScriptSourcePath, readFileSync(simpleScriptSourcePath, 'utf-8')); - const wasmCircuit = compile(simpleScriptSourcePath, undefined, undefined, sourceMap); + const wasmCircuit = compile_program(simpleScriptSourcePath, undefined, sourceMap); const cliCircuit = await getPrecompiledSource(simpleScriptExpectedArtifact); if (!('program' in wasmCircuit)) { @@ -49,9 +49,8 @@ describe('noir wasm compilation', () => { }); it('matching nargos compilation', async () => { - const wasmCircuit = compile( + const wasmCircuit = compile_program( 'script/main.nr', - false, { root_dependencies: ['lib_a'], library_dependencies: { @@ -123,9 +122,8 @@ describe('noir wasm compilation', () => { }).timeout(60 * 20e3); it('matching nargos compilation - context-implementation-compile-api', async () => { - const wasmCircuit = await compile_( + const wasmCircuit = await compile_program_( 'script/main.nr', - false, { root_dependencies: ['lib_a'], library_dependencies: { From 176fab42970ff0a9797b7f8c7ce53817e7d85b90 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 26 Feb 2024 14:05:41 +0000 Subject: [PATCH 35/45] chore(ci): prevent msrv checks from blocking PRs (#4414) # Description ## Problem\* Resolves ## Summary\* Current the MSRV check CI runs on every PR so if one of our dependencies breaks us, all merges halt until we fix this. This is unnecessary as we only need to stop publishing releases and normal development work can continue. This PR switches this workflow to instead run only on master and on a nightly schedule. If the workflow fails then an issue will be raised. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. Co-authored-by: kevaundray --- .github/ACVM_NOT_PUBLISHABLE.md | 13 +++++++ .../workflows/test-rust-workspace-msrv.yml | 37 +++++++++++++------ .github/workflows/test-rust-workspace.yml | 20 +++++----- cspell.json | 1 + 4 files changed, 49 insertions(+), 22 deletions(-) create mode 100644 .github/ACVM_NOT_PUBLISHABLE.md diff --git a/.github/ACVM_NOT_PUBLISHABLE.md b/.github/ACVM_NOT_PUBLISHABLE.md new file mode 100644 index 00000000000..e7eacb3b523 --- /dev/null +++ b/.github/ACVM_NOT_PUBLISHABLE.md @@ -0,0 +1,13 @@ +--- +title: "ACVM crates are not publishable" +assignees: TomAFrench kevaundray savio-sou +--- + + +The ACVM crates are currently unpublishable, making a release will NOT push our crates to crates.io. + +This is likely due to a crate we depend on bumping its MSRV above our own. Our lockfile is not taken into account when publishing to crates.io (as people downloading our crate don't use it) so we need to be able to use the most up to date versions of our dependencies (including transient dependencies) specified. + +Check the [MSRV check]({{env.WORKFLOW_URL}}) workflow for details. + +This issue was raised by the workflow `{{env.WORKFLOW_NAME}}` diff --git a/.github/workflows/test-rust-workspace-msrv.yml b/.github/workflows/test-rust-workspace-msrv.yml index 061fc65ca8b..0b2855fa834 100644 --- a/.github/workflows/test-rust-workspace-msrv.yml +++ b/.github/workflows/test-rust-workspace-msrv.yml @@ -6,8 +6,9 @@ name: Test (MSRV check) # We must then always be able to build the workspace using the latest versions of all of our dependencies, so we explicitly update them and build in this workflow. on: - pull_request: - merge_group: + schedule: + # Run a nightly check at 2 AM UTC + - cron: "0 2 * * *" push: branches: - master @@ -100,13 +101,25 @@ jobs: - run-tests steps: - - name: Report overall success - run: | - if [[ $FAIL == true ]]; then - exit 1 - else - exit 0 - fi - env: - # We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole. - FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }} + - name: Report overall success + run: | + if [[ $FAIL == true ]]; then + exit 1 + else + exit 0 + fi + env: + # We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole. + FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }} + + # Raise an issue if the tests failed + - name: Alert on failed publish + uses: JasonEtco/create-an-issue@v2 + if: ${{ failure() }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + WORKFLOW_NAME: ${{ github.workflow }} + WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + with: + update_existing: true + filename: .github/JS_PUBLISH_FAILED.md \ No newline at end of file diff --git a/.github/workflows/test-rust-workspace.yml b/.github/workflows/test-rust-workspace.yml index c12dcaba0ba..22684de3044 100644 --- a/.github/workflows/test-rust-workspace.yml +++ b/.github/workflows/test-rust-workspace.yml @@ -88,13 +88,13 @@ jobs: - run-tests steps: - - name: Report overall success - run: | - if [[ $FAIL == true ]]; then - exit 1 - else - exit 0 - fi - env: - # We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole. - FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }} + - name: Report overall success + run: | + if [[ $FAIL == true ]]; then + exit 1 + else + exit 0 + fi + env: + # We treat any cancelled, skipped or failing jobs as a failure for the workflow as a whole. + FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }} diff --git a/cspell.json b/cspell.json index be6b7c5c7e8..23659b39c68 100644 --- a/cspell.json +++ b/cspell.json @@ -118,6 +118,7 @@ "monomorphizes", "monomorphizing", "montcurve", + "MSRV", "nand", "nargo", "neovim", From ab25b5ed3cd17e3f53c5cc873571fe4c08bad35d Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 26 Feb 2024 14:40:31 +0000 Subject: [PATCH 36/45] chore: remove duplicate `parse_all` function in wasm compiler (#4411) # Description ## Problem\* Resolves ## Summary\* This removes a function which exists in `nargo` from being defined again in `noir_wasm` ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- compiler/wasm/src/compile.rs | 20 +++++++++----------- compiler/wasm/src/compile_new.rs | 6 ++++-- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/compiler/wasm/src/compile.rs b/compiler/wasm/src/compile.rs index ca6c8efedb1..9e6fca1126e 100644 --- a/compiler/wasm/src/compile.rs +++ b/compiler/wasm/src/compile.rs @@ -1,9 +1,12 @@ use fm::FileManager; use gloo_utils::format::JsValueSerdeExt; use js_sys::{JsString, Object}; -use nargo::artifacts::{ - contract::{ContractArtifact, ContractFunctionArtifact}, - program::ProgramArtifact, +use nargo::{ + artifacts::{ + contract::{ContractArtifact, ContractFunctionArtifact}, + program::ProgramArtifact, + }, + parse_all, }; use noirc_driver::{ add_dep, file_manager_with_stdlib, prepare_crate, prepare_dependency, CompileOptions, @@ -12,7 +15,7 @@ use noirc_driver::{ use noirc_evaluator::errors::SsaReport; use noirc_frontend::{ graph::{CrateId, CrateName}, - hir::{def_map::parse_file, Context, ParsedFiles}, + hir::Context, }; use serde::Deserialize; use std::{collections::HashMap, path::Path}; @@ -152,10 +155,6 @@ impl PathToFileSourceMap { } } -pub(crate) fn parse_all(fm: &FileManager) -> ParsedFiles { - fm.as_file_map().all_file_ids().map(|&file_id| (file_id, parse_file(fm, file_id))).collect() -} - #[wasm_bindgen] pub fn compile_program( entry_point: String, @@ -309,14 +308,13 @@ fn add_noir_lib(context: &mut Context, library_name: &CrateName) -> CrateId { #[cfg(test)] mod test { + use nargo::parse_all; use noirc_driver::prepare_crate; use noirc_frontend::{graph::CrateName, hir::Context}; use crate::compile::PathToFileSourceMap; - use super::{ - file_manager_with_source_map, parse_all, process_dependency_graph, DependencyGraph, - }; + use super::{file_manager_with_source_map, process_dependency_graph, DependencyGraph}; use std::{collections::HashMap, path::Path}; fn setup_test_context(source_map: PathToFileSourceMap) -> Context<'static, 'static> { diff --git a/compiler/wasm/src/compile_new.rs b/compiler/wasm/src/compile_new.rs index 2a5f7ab6545..d6b382f669f 100644 --- a/compiler/wasm/src/compile_new.rs +++ b/compiler/wasm/src/compile_new.rs @@ -1,9 +1,10 @@ use crate::compile::{ - file_manager_with_source_map, parse_all, JsCompileContractResult, JsCompileProgramResult, + file_manager_with_source_map, JsCompileContractResult, JsCompileProgramResult, PathToFileSourceMap, }; use crate::errors::{CompileError, JsCompileError}; use nargo::artifacts::contract::{ContractArtifact, ContractFunctionArtifact}; +use nargo::parse_all; use noirc_driver::{ add_dep, compile_contract, compile_main, prepare_crate, prepare_dependency, CompileOptions, NOIR_ARTIFACT_VERSION_STRING, @@ -265,10 +266,11 @@ fn prepare_compiler_context( #[cfg(test)] mod test { + use nargo::parse_all; use noirc_driver::prepare_crate; use noirc_frontend::hir::Context; - use crate::compile::{file_manager_with_source_map, parse_all, PathToFileSourceMap}; + use crate::compile::{file_manager_with_source_map, PathToFileSourceMap}; use std::path::Path; From cd796dea4937dd1a261f154e5f2e599bbc649165 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 26 Feb 2024 15:22:01 +0000 Subject: [PATCH 37/45] fix: correct formatting for databus visibility types (#4423) # Description ## Problem\* Resolves ## Summary\* This PR fixes an issue uncovered by #4422 where we're not properly formatting databus visibility modifiers. I've fixed this and added a new test case for regressions. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- tooling/nargo_fmt/src/utils.rs | 14 +++++++++++++- tooling/nargo_fmt/src/visitor/item.rs | 14 ++++++++++---- tooling/nargo_fmt/tests/expected/databus.nr | 2 ++ tooling/nargo_fmt/tests/input/databus.nr | 2 ++ 4 files changed, 27 insertions(+), 5 deletions(-) create mode 100644 tooling/nargo_fmt/tests/expected/databus.nr create mode 100644 tooling/nargo_fmt/tests/input/databus.nr diff --git a/tooling/nargo_fmt/src/utils.rs b/tooling/nargo_fmt/src/utils.rs index 5874ebdebbc..94969d45e81 100644 --- a/tooling/nargo_fmt/src/utils.rs +++ b/tooling/nargo_fmt/src/utils.rs @@ -1,3 +1,5 @@ +use std::borrow::Cow; + use crate::items::HasItem; use crate::rewrite; use crate::visitor::{FmtVisitor, Shape}; @@ -143,7 +145,7 @@ impl HasItem for Param { fn format(self, visitor: &FmtVisitor, shape: Shape) -> String { let pattern = visitor.slice(self.pattern.span()); let visibility = match self.visibility { - Visibility::Public => "pub ", + Visibility::Public => "pub", Visibility::Private => "", Visibility::DataBus => "call_data", }; @@ -152,6 +154,7 @@ impl HasItem for Param { pattern.to_string() } else { let ty = rewrite::typ(visitor, shape, self.typ); + let visibility = append_space_if_nonempty(visibility.into()); format!("{pattern}: {visibility}{ty}") } } @@ -183,6 +186,15 @@ pub(crate) fn last_line_contains_single_line_comment(s: &str) -> bool { s.lines().last().map_or(false, |line| line.contains("//")) } +pub(crate) fn append_space_if_nonempty(mut string: Cow) -> Cow { + if !string.is_empty() { + let inner = string.to_mut(); + inner.push(' '); + } + + string +} + pub(crate) fn last_line_used_width(s: &str, offset: usize) -> usize { if s.contains('\n') { last_line_width(s) diff --git a/tooling/nargo_fmt/src/visitor/item.rs b/tooling/nargo_fmt/src/visitor/item.rs index 1825a6e05b0..28aad3c551f 100644 --- a/tooling/nargo_fmt/src/visitor/item.rs +++ b/tooling/nargo_fmt/src/visitor/item.rs @@ -7,7 +7,10 @@ use noirc_frontend::{ use crate::{ rewrite::{self, UseTree}, - utils::{last_line_contains_single_line_comment, last_line_used_width, FindToken}, + utils::{ + append_space_if_nonempty, last_line_contains_single_line_comment, last_line_used_width, + FindToken, + }, visitor::expr::{format_seq, NewlineMode}, }; @@ -119,9 +122,12 @@ impl super::FmtVisitor<'_> { result.push_str("distinct "); } - if let Visibility::Public = func.def.return_visibility { - result.push_str("pub "); - } + let visibility = match func.def.return_visibility { + Visibility::Public => "pub", + Visibility::DataBus => "return_data", + Visibility::Private => "", + }; + result.push_str(&append_space_if_nonempty(visibility.into())); let typ = rewrite::typ(self, self.shape(), func.return_type()); result.push_str(&typ); diff --git a/tooling/nargo_fmt/tests/expected/databus.nr b/tooling/nargo_fmt/tests/expected/databus.nr new file mode 100644 index 00000000000..60934b60b2f --- /dev/null +++ b/tooling/nargo_fmt/tests/expected/databus.nr @@ -0,0 +1,2 @@ +fn main(x: pub u8, y: call_data u8) -> return_data u32 {} + diff --git a/tooling/nargo_fmt/tests/input/databus.nr b/tooling/nargo_fmt/tests/input/databus.nr new file mode 100644 index 00000000000..60934b60b2f --- /dev/null +++ b/tooling/nargo_fmt/tests/input/databus.nr @@ -0,0 +1,2 @@ +fn main(x: pub u8, y: call_data u8) -> return_data u32 {} + From 15c5618c6d15af527287d21ac74eb07cd2b98c14 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 26 Feb 2024 17:06:54 +0000 Subject: [PATCH 38/45] chore(ci): enforce formatting of noir code in CI (#4422) # Description ## Problem\* Resolves ## Summary\* We currently format everything in the repository except our noir source code. If we enforce this internally then we'll uncover issues in the formatter earlier and provide a good example of what Noir source should look like. We then now run `nargo fmt --check` on the stdlib and `test_programs/execution_success` ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .github/workflows/formatting.yml | 65 +++++++++ noir_stdlib/src/array.nr | 23 ++-- noir_stdlib/src/bigint.nr | 16 +-- noir_stdlib/src/collections/bounded_vec.nr | 2 +- noir_stdlib/src/collections/vec.nr | 8 +- noir_stdlib/src/ec/montcurve.nr | 89 ++++++------ noir_stdlib/src/ec/swcurve.nr | 127 +++++++++--------- noir_stdlib/src/ec/tecurve.nr | 122 ++++++++--------- noir_stdlib/src/ecdsa_secp256k1.nr | 2 +- noir_stdlib/src/ecdsa_secp256r1.nr | 2 +- noir_stdlib/src/field.nr | 11 +- noir_stdlib/src/option.nr | 24 +--- noir_stdlib/src/scalar_mul.nr | 2 +- noir_stdlib/src/schnorr.nr | 2 +- noir_stdlib/src/slice.nr | 12 +- noir_stdlib/src/string.nr | 2 +- noir_stdlib/src/test.nr | 4 +- noir_stdlib/src/uint128.nr | 65 ++++----- test_programs/.gitignore | 3 +- .../closure_explicit_types/src/main.nr | 4 +- .../conditional_regression_579/src/main.nr | 4 +- .../reexports/src/main.nr | 4 +- .../specialization/src/main.nr | 8 +- .../1327_concrete_in_generic/src/main.nr | 28 ++-- .../array_dynamic/src/main.nr | 4 +- .../array_dynamic_blackbox_input/src/main.nr | 2 +- .../array_dynamic_main_output/src/main.nr | 2 +- .../assert_statement_recursive/src/main.nr | 2 +- .../execution_success/bigint/src/main.nr | 10 +- .../execution_success/brillig_cow/src/main.nr | 27 ++-- .../brillig_cow_regression/src/main.nr | 74 +++++----- .../brillig_fns_as_values/src/main.nr | 2 +- .../conditional_regression_661/src/main.nr | 4 +- .../execution_success/databus/src/main.nr | 8 +- .../execution_success/debug_logs/src/main.nr | 6 +- .../distinct_keyword/src/main.nr | 2 +- .../ecdsa_secp256k1/src/main.nr | 10 +- .../ecdsa_secp256r1/src/main.nr | 2 +- .../main_bool_arg/src/main.nr | 2 +- .../operator_overloading/src/main.nr | 6 +- .../regression_3394/src/main.nr | 2 +- .../regression_3607/src/main.nr | 2 +- .../regression_3889/src/main.nr | 1 - .../side_effects_constrain_array/src/main.nr | 4 +- .../execution_success/struct/src/main.nr | 4 +- test_programs/format.sh | 47 +++++++ 46 files changed, 455 insertions(+), 397 deletions(-) create mode 100755 test_programs/format.sh diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml index 43fd6daa91d..279e90f5f6f 100644 --- a/.github/workflows/formatting.yml +++ b/.github/workflows/formatting.yml @@ -63,3 +63,68 @@ jobs: - name: Run `yarn lint` run: yarn lint + + build-nargo: + runs-on: ubuntu-22.04 + timeout-minutes: 30 + + steps: + - name: Checkout Noir repo + uses: actions/checkout@v4 + + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.73.0 + + - uses: Swatinem/rust-cache@v2 + with: + key: x86_64-unknown-linux-gnu + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} + + - name: Build Nargo + run: cargo build --package nargo_cli --release + + - name: Package artifacts + run: | + mkdir dist + cp ./target/release/nargo ./dist/nargo + 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz + + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + name: nargo + path: ./dist/* + retention-days: 3 + + nargo_fmt: + needs: [build-nargo] + name: Nargo fmt + runs-on: ubuntu-latest + timeout-minutes: 30 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Download nargo binary + uses: actions/download-artifact@v4 + with: + name: nargo + path: ./nargo + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V + + - name: Format stdlib + working-directory: ./noir_stdlib + run: nargo fmt --check + + - name: Format test suite + working-directory: ./test_programs + run: ./format.sh check diff --git a/noir_stdlib/src/array.nr b/noir_stdlib/src/array.nr index 7871b1a6f9a..3da4b649174 100644 --- a/noir_stdlib/src/array.nr +++ b/noir_stdlib/src/array.nr @@ -17,14 +17,14 @@ impl [T; N] { for i in 0..N { let pos = find_index(sorted_index, i); assert(sorted_index[pos] == i); - } + } // Sort the array using the indexes - for i in 0..N { + for i in 0..N { result[i] = self[sorted_index[i]]; - } + } // Ensure the array is sorted - for i in 0..N-1 { - assert(ordering(result[i], result[i+1])); + for i in 0..N - 1 { + assert(ordering(result[i], result[i + 1])); } result @@ -32,12 +32,12 @@ impl [T; N] { /// Returns the index of the elements in the array that would sort it, using the provided custom sorting function. unconstrained fn get_sorting_index(self, ordering: fn[Env](T, T) -> bool) -> [u64; N] { - let mut result = [0;N]; + let mut result = [0; N]; let mut a = self; for i in 0..N { result[i] = i; } - for i in 1 .. N { + for i in 1..N { for j in 0..i { if ordering(a[i], a[j]) { let old_a_j = a[j]; @@ -45,14 +45,13 @@ impl [T; N] { a[i] = old_a_j; let old_j = result[j]; result[j] = result[i]; - result[i] = old_j; + result[i] = old_j; } } } result } - // Converts an array into a slice. pub fn as_slice(self) -> [T] { let mut slice = []; @@ -68,7 +67,7 @@ impl [T; N] { let first_elem = f(self[0]); let mut ret = [first_elem; N]; - for i in 1 .. self.len() { + for i in 1..self.len() { ret[i] = f(self[i]); } @@ -90,7 +89,7 @@ impl [T; N] { // element of the given array as its starting accumulator value. pub fn reduce(self, f: fn[Env](T, T) -> T) -> T { let mut accumulator = self[0]; - for i in 1 .. self.len() { + for i in 1..self.len() { accumulator = f(accumulator, self[i]); } accumulator @@ -122,7 +121,7 @@ unconstrained fn find_index(a: [u64; N], find: u64) -> u64 { for i in 0..a.len() { if a[i] == find { result = i; - } + } } result } diff --git a/noir_stdlib/src/bigint.nr b/noir_stdlib/src/bigint.nr index 11026651207..66e81f05812 100644 --- a/noir_stdlib/src/bigint.nr +++ b/noir_stdlib/src/bigint.nr @@ -1,5 +1,4 @@ -use crate::ops::{Add, Sub, Mul, Div, Rem,}; - +use crate::ops::{Add, Sub, Mul, Div, Rem}; global bn254_fq = [0x47, 0xFD, 0x7C, 0xD8, 0x16, 0x8C, 0x20, 0x3C, 0x8d, 0xca, 0x71, 0x68, 0x91, 0x6a, 0x81, 0x97, 0x5d, 0x58, 0x81, 0x81, 0xb6, 0x45, 0x50, 0xb8, 0x29, 0xa0, 0x31, 0xe1, 0x72, 0x4e, 0x64, 0x30]; @@ -13,7 +12,6 @@ global secpr1_fq = [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF]; global secpr1_fr = [0x51, 0x25, 0x63, 0xFC, 0xC2, 0xCA, 0xB9, 0xF3, 0x84, 0x9E, 0x17, 0xA7, 0xAD, 0xFA, 0xE6, 0xBC, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00,0xFF, 0xFF, 0xFF, 0xFF]; - struct BigInt { pointer: u32, @@ -22,17 +20,13 @@ struct BigInt { impl BigInt { #[builtin(bigint_add)] - fn bigint_add(self, other: BigInt) -> BigInt { - } + fn bigint_add(self, other: BigInt) -> BigInt {} #[builtin(bigint_sub)] - fn bigint_sub(self, other: BigInt) -> BigInt { - } + fn bigint_sub(self, other: BigInt) -> BigInt {} #[builtin(bigint_mul)] - fn bigint_mul(self, other: BigInt) -> BigInt { - } + fn bigint_mul(self, other: BigInt) -> BigInt {} #[builtin(bigint_div)] - fn bigint_div(self, other: BigInt) -> BigInt { - } + fn bigint_div(self, other: BigInt) -> BigInt {} #[builtin(bigint_from_le_bytes)] fn from_le_bytes(bytes: [u8], modulus: [u8]) -> BigInt {} #[builtin(bigint_to_le_bytes)] diff --git a/noir_stdlib/src/collections/bounded_vec.nr b/noir_stdlib/src/collections/bounded_vec.nr index a4aa4823f38..f78d86de77d 100644 --- a/noir_stdlib/src/collections/bounded_vec.nr +++ b/noir_stdlib/src/collections/bounded_vec.nr @@ -29,7 +29,7 @@ impl BoundedVec { self.len } - pub fn max_len(_self: BoundedVec) -> u64{ + pub fn max_len(_self: BoundedVec) -> u64 { MaxLen } diff --git a/noir_stdlib/src/collections/vec.nr b/noir_stdlib/src/collections/vec.nr index 2e7945be827..deec98185ff 100644 --- a/noir_stdlib/src/collections/vec.nr +++ b/noir_stdlib/src/collections/vec.nr @@ -19,12 +19,12 @@ impl Vec { /// points beyond the end of the vector. pub fn get(self, index: u64) -> T { self.slice[index] - } + } /// Push a new element to the end of the vector, returning a /// new vector with a length one greater than the /// original unmodified vector. - pub fn push(&mut self, elem: T) { + pub fn push(&mut self, elem: T) { self.slice = self.slice.push_back(elem); } @@ -32,7 +32,7 @@ impl Vec { /// a new vector with a length of one less than the given vector, /// as well as the popped element. /// Panics if the given vector's length is zero. - pub fn pop(&mut self) -> T { + pub fn pop(&mut self) -> T { let (popped_slice, last_elem) = self.slice.pop_back(); self.slice = popped_slice; last_elem @@ -42,7 +42,7 @@ impl Vec { /// after it to the right pub fn insert(&mut self, index: u64, elem: T) { self.slice = self.slice.insert(index, elem); - } + } /// Remove an element at a specified index, shifting all elements /// after it to the left, returning the removed element diff --git a/noir_stdlib/src/ec/montcurve.nr b/noir_stdlib/src/ec/montcurve.nr index 83a17bae322..7dc756781c0 100644 --- a/noir_stdlib/src/ec/montcurve.nr +++ b/noir_stdlib/src/ec/montcurve.nr @@ -31,7 +31,7 @@ mod affine { impl Point { // Point constructor pub fn new(x: Field, y: Field) -> Self { - Self {x, y, infty: false} + Self { x, y, infty: false } } // Check if zero @@ -45,30 +45,30 @@ mod affine { curvegroup::Point::zero() } else { let (x,y) = (self.x, self.y); - curvegroup::Point::new(x,y,1) + curvegroup::Point::new(x, y, 1) } } // Additive identity pub fn zero() -> Self { - Self {x: 0, y: 0, infty: true} + Self { x: 0, y: 0, infty: true } } // Negation fn negate(self) -> Self { let Self {x, y, infty} = self; - Self {x, y: 0-y, infty} + Self { x, y: 0 - y, infty } } // Map into equivalent Twisted Edwards curve fn into_tecurve(self) -> TEPoint { let Self {x, y, infty} = self; - - if infty | (y*(x+1) == 0) { + + if infty | (y * (x + 1) == 0) { TEPoint::zero() } else { - TEPoint::new(x/y, (x-1)/(x+1)) + TEPoint::new(x / y, (x - 1) / (x + 1)) } } } @@ -84,9 +84,9 @@ mod affine { pub fn new(j: Field, k: Field, gen: Point) -> Self { // Check curve coefficients assert(k != 0); - assert(j*j != 4); + assert(j * j != 4); - let curve = Self {j, k, gen}; + let curve = Self { j, k, gen }; // gen should be on the curve assert(curve.contains(curve.gen)); @@ -103,8 +103,8 @@ mod affine { pub fn contains(self, p: Point) -> bool { let Self {j, k, gen: _gen} = self; let Point {x, y, infty: infty} = p; - - infty | (k*y*y == x*(x*x + j*x + 1)) + + infty | (k * y * y == x * (x * x + j * x + 1)) } // Point addition @@ -122,7 +122,7 @@ mod affine { fn mul(self, n: Field, p: Point) -> Point { self.into_tecurve().mul(n, p.into_tecurve()).into_montcurve() } - + // Multi-scalar multiplication (n[0]*p[0] + ... + n[N]*p[N], where * denotes scalar multiplication) fn msm(self, n: [Field; N], p: [Point; N]) -> Point { let mut out = Point::zero(); @@ -142,15 +142,15 @@ mod affine { // Conversion to equivalent Twisted Edwards curve fn into_tecurve(self) -> TECurve { let Self {j, k, gen} = self; - TECurve::new((j+2)/k, (j-2)/k, gen.into_tecurve()) + TECurve::new((j + 2) / k, (j - 2) / k, gen.into_tecurve()) } // Conversion to equivalent Short Weierstraß curve pub fn into_swcurve(self) -> SWCurve { let j = self.j; let k = self.k; - let a0 = (3-j*j)/(3*k*k); - let b0 = (2*j*j*j - 9*j)/(27*k*k*k); + let a0 = (3 - j * j) / (3 * k * k); + let b0 = (2 * j * j * j - 9 * j) / (27 * k * k * k); SWCurve::new(a0, b0, self.map_into_swcurve(self.gen)) } @@ -160,8 +160,7 @@ mod affine { if p.is_zero() { SWPoint::zero() } else { - SWPoint::new((3*p.x + self.j)/(3*self.k), - p.y/self.k) + SWPoint::new((3 * p.x + self.j) / (3 * self.k), p.y / self.k) } } @@ -170,8 +169,8 @@ mod affine { let SWPoint {x, y, infty} = p; let j = self.j; let k = self.k; - - Point {x: (3*k*x - j)/3, y: y*k, infty} + + Point { x: (3 * k * x - j) / 3, y: y * k, infty } } // Elligator 2 map-to-curve method; see . @@ -179,18 +178,18 @@ mod affine { let j = self.j; let k = self.k; let z = ZETA; // Non-square Field element required for map - + // Check whether curve is admissible assert(j != 0); - let l = (j*j - 4)/(k*k); + let l = (j * j - 4) / (k * k); assert(l != 0); assert(is_square(l) == false); - let x1 = safe_inverse(1+z*u*u)*(0 - (j/k)); - - let gx1 = x1*x1*x1 + (j/k)*x1*x1 + x1/(k*k); - let x2 = 0 - x1 - (j/k); - let gx2 = x2*x2*x2 + (j/k)*x2*x2 + x2/(k*k); + let x1 = safe_inverse(1 + z * u * u) * (0 - (j / k)); + + let gx1 = x1 * x1 * x1 + (j / k) * x1 * x1 + x1 / (k * k); + let x2 = 0 - x1 - (j / k); + let gx2 = x2 * x2 * x2 + (j / k) * x2 * x2 + x2 / (k * k); let x = if is_square(gx1) { x1 } else { x2 }; @@ -202,13 +201,12 @@ mod affine { if y0.sgn0() == 0 { y0 } else { 0 - y0 } }; - Point::new(x*k, y*k) - + Point::new(x * k, y * k) } // SWU map-to-curve method (via rational map) fn swu_map(self, z: Field, u: Field) -> Point { - self.map_from_swcurve(self.into_swcurve().swu_map(z,u)) + self.map_from_swcurve(self.into_swcurve().swu_map(z, u)) } } } @@ -240,7 +238,7 @@ mod curvegroup { impl Point { // Point constructor pub fn new(x: Field, y: Field, z: Field) -> Self { - Self {x, y, z} + Self { x, y, z } } // Check if zero @@ -254,20 +252,20 @@ mod curvegroup { affine::Point::zero() } else { let (x,y,z) = (self.x, self.y, self.z); - affine::Point::new(x/z, y/z) + affine::Point::new(x / z, y / z) } } // Additive identity pub fn zero() -> Self { - Self {x: 0, y: 1,z: 0} + Self { x: 0, y: 1, z: 0 } } // Negation fn negate(self) -> Self { let Self {x, y, z} = self; - Point::new(x, 0-y, z) + Point::new(x, 0 - y, z) } // Map into equivalent Twisted Edwards curve @@ -287,9 +285,9 @@ mod curvegroup { pub fn new(j: Field, k: Field, gen: Point) -> Self { // Check curve coefficients assert(k != 0); - assert(j*j != 4); + assert(j * j != 4); - let curve = Self {j, k, gen}; + let curve = Self { j, k, gen }; // gen should be on the curve assert(curve.contains(curve.gen)); @@ -306,8 +304,8 @@ mod curvegroup { pub fn contains(self, p: Point) -> bool { let Self {j, k, gen: _gen} = self; let Point {x, y, z} = p; - - k*y*y*z == x*(x*x + j*x*z + z*z) + + k * y * y * z == x * (x * x + j * x * z + z * z) } // Point addition @@ -320,12 +318,12 @@ mod curvegroup { fn bit_mul(self, bits: [u1; N], p: Point) -> Point { self.into_tecurve().bit_mul(bits, p.into_tecurve()).into_montcurve() } - + // Scalar multiplication (p + ... + p n times) pub fn mul(self, n: Field, p: Point) -> Point { self.into_tecurve().mul(n, p.into_tecurve()).into_montcurve() } - + // Multi-scalar multiplication (n[0]*p[0] + ... + n[N]*p[N], where * denotes scalar multiplication) fn msm(self, n: [Field; N], p: [Point; N]) -> Point { let mut out = Point::zero(); @@ -345,18 +343,17 @@ mod curvegroup { // Conversion to equivalent Twisted Edwards curve fn into_tecurve(self) -> TECurve { let Self {j, k, gen} = self; - TECurve::new((j+2)/k, (j-2)/k, gen.into_tecurve()) + TECurve::new((j + 2) / k, (j - 2) / k, gen.into_tecurve()) } // Conversion to equivalent Short Weierstraß curve fn into_swcurve(self) -> SWCurve { let j = self.j; let k = self.k; - let a0 = (3-j*j)/(3*k*k); - let b0 = (2*j*j*j - 9*j)/(27*k*k*k); + let a0 = (3 - j * j) / (3 * k * k); + let b0 = (2 * j * j * j - 9 * j) / (27 * k * k * k); - SWCurve::new(a0, b0, - self.map_into_swcurve(self.gen)) + SWCurve::new(a0, b0, self.map_into_swcurve(self.gen)) } // Point mapping into equivalent Short Weierstraß curve @@ -373,10 +370,10 @@ mod curvegroup { fn elligator2_map(self, u: Field) -> Point { self.into_affine().elligator2_map(u).into_group() } - + // SWU map-to-curve method (via rational map) fn swu_map(self, z: Field, u: Field) -> Point { - self.into_affine().swu_map(z,u).into_group() + self.into_affine().swu_map(z, u).into_group() } } } diff --git a/noir_stdlib/src/ec/swcurve.nr b/noir_stdlib/src/ec/swcurve.nr index e64f5a7be02..9dd324f3085 100644 --- a/noir_stdlib/src/ec/swcurve.nr +++ b/noir_stdlib/src/ec/swcurve.nr @@ -27,14 +27,14 @@ mod affine { impl Point { // Point constructor pub fn new(x: Field, y: Field) -> Self { - Self {x, y, infty: false} + Self { x, y, infty: false } } // Check if zero pub fn is_zero(self) -> bool { self.eq(Point::zero()) } - + // Conversion to CurveGroup coordinates fn into_group(self) -> curvegroup::Point { let Self {x, y, infty} = self; @@ -45,16 +45,16 @@ mod affine { curvegroup::Point::new(x, y, 1) } } - + // Additive identity pub fn zero() -> Self { - Self {x: 0, y: 0, infty: true} + Self { x: 0, y: 0, infty: true } } - + // Negation fn negate(self) -> Self { let Self {x, y, infty} = self; - Self {x, y: 0-y, infty} + Self { x, y: 0 - y, infty } } } @@ -72,8 +72,8 @@ mod affine { // Curve constructor pub fn new(a: Field, b: Field, gen: Point) -> Curve { // Check curve coefficients - assert(4*a*a*a + 27*b*b != 0); - + assert(4 * a * a * a + 27 * b * b != 0); + let curve = Curve { a, b, gen }; // gen should be on the curve @@ -85,16 +85,16 @@ mod affine { // Conversion to CurveGroup coordinates fn into_group(self) -> curvegroup::Curve { let Curve{a, b, gen} = self; - - curvegroup::Curve {a, b, gen: gen.into_group()} + + curvegroup::Curve { a, b, gen: gen.into_group() } } // Membership check pub fn contains(self, p: Point) -> bool { let Point {x, y, infty} = p; - infty | (y*y == x*x*x + self.a*x + self.b) + infty | (y * y == x * x * x + self.a * x + self.b) } - + // Point addition, implemented in terms of mixed addition for reasons of efficiency pub fn add(self, p1: Point, p2: Point) -> Point { self.mixed_add(p1, p2.into_group()).into_affine() @@ -109,9 +109,9 @@ mod affine { } else { let Point {x: x1, y: y1, infty: _inf} = p1; let curvegroup::Point {x: x2, y: y2, z: z2} = p2; - let you1 = x1*z2*z2; + let you1 = x1 * z2 * z2; let you2 = x2; - let s1 = y1*z2*z2*z2; + let s1 = y1 * z2 * z2 * z2; let s2 = y2; if you1 == you2 { @@ -120,15 +120,14 @@ mod affine { } else { self.into_group().double(p2) } - } else - { + } else { let h = you2 - you1; let r = s2 - s1; - let x3 = r*r - h*h*h - 2*you1*h*h; - let y3 = r*(you1*h*h - x3) - s1*h*h*h; - let z3 = h*z2; + let x3 = r * r - h * h * h - 2 * you1 * h * h; + let y3 = r * (you1 * h * h - x3) - s1 * h * h * h; + let z3 = h * z2; - curvegroup::Point::new(x3,y3,z3) + curvegroup::Point::new(x3, y3, z3) } } } @@ -138,7 +137,7 @@ mod affine { fn bit_mul(self, bits: [u1; N], p: Point) -> Point { self.into_group().bit_mul(bits, p.into_group()).into_affine() } - + // Scalar multiplication (p + ... + p n times) pub fn mul(self, n: Field, p: Point) -> Point { self.into_group().mul(n, p.into_group()).into_affine() @@ -165,17 +164,25 @@ mod affine { // where g(x) = x^3 + a*x + b. swu_map(c,z,.) then maps a Field element to a point on curve c. fn swu_map(self, z: Field, u: Field) -> Point { // Check whether curve is admissible - assert(self.a*self.b != 0); - + assert(self.a * self.b != 0); + let Curve {a, b, gen: _gen} = self; - - let tv1 = safe_inverse(z*z*u*u*u*u + u*u*z); - let x1 = if tv1 == 0 {b/(z*a)} else {(0-b/a)*(1 + tv1)}; - let gx1 = x1*x1*x1 + a*x1 + b; - let x2 = z*u*u*x1; - let gx2 = x2*x2*x2 + a*x2 + b; - let (x,y) = if is_square(gx1) {(x1, sqrt(gx1))} else {(x2, sqrt(gx2))}; - Point::new(x, if u.sgn0() != y.sgn0() {0-y} else {y}) + + let tv1 = safe_inverse(z * z * u * u * u * u + u * u * z); + let x1 = if tv1 == 0 { + b / (z * a) + } else { + (0 - b / a) * (1 + tv1) + }; + let gx1 = x1 * x1 * x1 + a * x1 + b; + let x2 = z * u * u * x1; + let gx2 = x2 * x2 * x2 + a * x2 + b; + let (x,y) = if is_square(gx1) { + (x1, sqrt(gx1)) + } else { + (x2, sqrt(gx2)) + }; + Point::new(x, if u.sgn0() != y.sgn0() { 0 - y } else { y }) } } } @@ -205,14 +212,14 @@ mod curvegroup { impl Point { // Point constructor pub fn new(x: Field, y: Field, z: Field) -> Self { - Self {x, y, z} + Self { x, y, z } } // Check if zero pub fn is_zero(self) -> bool { self.eq(Point::zero()) } - + // Conversion to affine coordinates pub fn into_affine(self) -> affine::Point { let Self {x, y, z} = self; @@ -220,20 +227,19 @@ mod curvegroup { if z == 0 { affine::Point::zero() } else { - affine::Point::new(x/(z*z), y/(z*z*z)) + affine::Point::new(x / (z * z), y / (z * z * z)) } } // Additive identity pub fn zero() -> Self { - Self {x: 0, y: 0, z: 0} + Self { x: 0, y: 0, z: 0 } } - - + // Negation fn negate(self) -> Self { let Self {x, y, z} = self; - Self {x, y: 0-y, z} + Self { x, y: 0 - y, z } } } @@ -250,8 +256,8 @@ mod curvegroup { // Curve constructor pub fn new(a: Field, b: Field, gen: Point) -> Curve { // Check curve coefficients - assert(4*a*a*a + 27*b*b != 0); - + assert(4 * a * a * a + 27 * b * b != 0); + let curve = Curve { a, b, gen }; // gen should be on the curve @@ -264,7 +270,7 @@ mod curvegroup { pub fn into_affine(self) -> affine::Curve { let Curve{a, b, gen} = self; - affine::Curve {a, b, gen: gen.into_affine()} + affine::Curve { a, b, gen: gen.into_affine() } } // Membership check @@ -273,13 +279,12 @@ mod curvegroup { if z == 0 { true } else { - y*y == x*x*x + self.a*x*z*z*z*z + self.b*z*z*z*z*z*z + y * y == x * x * x + self.a * x * z * z * z * z + self.b * z * z * z * z * z * z } } - + // Addition pub fn add(self, p1: Point, p2: Point) -> Point { - if p1.is_zero() { p2 } else if p2.is_zero() { @@ -287,10 +292,10 @@ mod curvegroup { } else { let Point {x: x1, y: y1, z: z1} = p1; let Point {x: x2, y: y2, z: z2} = p2; - let you1 = x1*z2*z2; - let you2 = x2*z1*z1; - let s1 = y1*z2*z2*z2; - let s2 = y2*z1*z1*z1; + let you1 = x1 * z2 * z2; + let you2 = x2 * z1 * z1; + let s1 = y1 * z2 * z2 * z2; + let s2 = y2 * z1 * z1 * z1; if you1 == you2 { if s1 != s2 { @@ -301,11 +306,11 @@ mod curvegroup { } else { let h = you2 - you1; let r = s2 - s1; - let x3 = r*r - h*h*h - 2*you1*h*h; - let y3 = r*(you1*h*h - x3) - s1*h*h*h; - let z3 = h*z1*z2; + let x3 = r * r - h * h * h - 2 * you1 * h * h; + let y3 = r * (you1 * h * h - x3) - s1 * h * h * h; + let z3 = h * z1 * z2; - Point::new(x3,y3,z3) + Point::new(x3, y3, z3) } } } @@ -313,19 +318,19 @@ mod curvegroup { // Point doubling pub fn double(self, p: Point) -> Point { let Point {x, y, z} = p; - + if p.is_zero() { p } else if y == 0 { Point::zero() } else { - let s = 4*x*y*y; - let m = 3*x*x + self.a*z*z*z*z; - let x0 = m*m - 2*s; - let y0 = m*(s-x0) - 8*y*y*y*y; - let z0 = 2*y*z; + let s = 4 * x * y * y; + let m = 3 * x * x + self.a * z * z * z * z; + let x0 = m * m - 2 * s; + let y0 = m * (s - x0) - 8 * y * y * y * y; + let z0 = 2 * y * z; - Point::new(x0,y0,z0) + Point::new(x0, y0, z0) } } @@ -351,7 +356,7 @@ mod curvegroup { let mut n_as_bits: [u1; 254] = [0; 254]; let tmp = n.to_le_bits(N_BITS as u32); for i in 0..254 { - n_as_bits[i] = tmp[i]; + n_as_bits[i] = tmp[i]; } self.bit_mul(n_as_bits, p) @@ -375,7 +380,7 @@ mod curvegroup { // Simplified SWU map-to-curve method fn swu_map(self, z: Field, u: Field) -> Point { - self.into_affine().swu_map(z,u).into_group() + self.into_affine().swu_map(z, u).into_group() } } } diff --git a/noir_stdlib/src/ec/tecurve.nr b/noir_stdlib/src/ec/tecurve.nr index 5333ece4c4a..506fe89313a 100644 --- a/noir_stdlib/src/ec/tecurve.nr +++ b/noir_stdlib/src/ec/tecurve.nr @@ -40,18 +40,18 @@ mod affine { fn into_group(self) -> curvegroup::Point { let Self {x, y} = self; - curvegroup::Point::new(x, y, x*y, 1) + curvegroup::Point::new(x, y, x * y, 1) } // Additive identity pub fn zero() -> Self { - Point::new(0,1) + Point::new(0, 1) } // Negation fn negate(self) -> Self { let Self {x, y} = self; - Point::new(0-x, y) + Point::new(0 - x, y) } // Map into prime-order subgroup of equivalent Montgomery curve @@ -60,10 +60,10 @@ mod affine { MPoint::zero() } else { let Self {x, y} = self; - let x0 = (1+y)/(1-y); - let y0 = (1+y)/(x*(1-y)); + let x0 = (1 + y) / (1 - y); + let y0 = (1 + y) / (x * (1 - y)); - MPoint::new(x0,y0) + MPoint::new(x0, y0) } } } @@ -81,9 +81,9 @@ mod affine { // Curve constructor pub fn new(a: Field, d: Field, gen: Point) -> Curve { // Check curve coefficients - assert(a*d*(a-d) != 0); - - let curve = Curve {a, d, gen}; + assert(a * d * (a - d) != 0); + + let curve = Curve { a, d, gen }; // gen should be on the curve assert(curve.contains(curve.gen)); @@ -95,15 +95,15 @@ mod affine { fn into_group(self) -> curvegroup::Curve { let Curve{a, d, gen} = self; - curvegroup::Curve {a, d, gen: gen.into_group()} + curvegroup::Curve { a, d, gen: gen.into_group() } } - + // Membership check pub fn contains(self, p: Point) -> bool { let Point {x, y} = p; - self.a*x*x + y*y == 1 + self.d*x*x*y*y + self.a * x * x + y * y == 1 + self.d * x * x * y * y } - + // Point addition, implemented in terms of mixed addition for reasons of efficiency pub fn add(self, p1: Point, p2: Point) -> Point { self.mixed_add(p1, p2.into_group()).into_affine() @@ -114,20 +114,20 @@ mod affine { let Point{x: x1, y: y1} = p1; let curvegroup::Point{x: x2, y: y2, t: t2, z: z2} = p2; - let a = x1*x2; - let b = y1*y2; - let c = self.d*x1*y1*t2; - let e = (x1 + y1)*(x2 + y2) - a - b; + let a = x1 * x2; + let b = y1 * y2; + let c = self.d * x1 * y1 * t2; + let e = (x1 + y1) * (x2 + y2) - a - b; let f = z2 - c; let g = z2 + c; - let h = b - self.a*a; + let h = b - self.a * a; - let x = e*f; - let y = g*h; - let t = e*h; - let z = f*g; + let x = e * f; + let y = g * h; + let t = e * h; + let z = f * g; - curvegroup::Point::new(x,y,t,z) + curvegroup::Point::new(x, y, t, z) } // Scalar multiplication with scalar represented by a bit array (little-endian convention). @@ -135,7 +135,7 @@ mod affine { fn bit_mul(self, bits: [u1; N], p: Point) -> Point { self.into_group().bit_mul(bits, p.into_group()).into_affine() } - + // Scalar multiplication (p + ... + p n times) fn mul(self, n: Field, p: Point) -> Point { self.into_group().mul(n, p.into_group()).into_affine() @@ -159,10 +159,10 @@ mod affine { // Conversion to equivalent Montgomery curve pub fn into_montcurve(self) -> MCurve { - let j = 2*(self.a + self.d)/(self.a - self.d); - let k = 4/(self.a - self.d); + let j = 2 * (self.a + self.d) / (self.a - self.d); + let k = 4 / (self.a - self.d); let gen_montcurve = self.gen.into_montcurve(); - + MCurve::new(j, k, gen_montcurve) } @@ -188,7 +188,7 @@ mod affine { // Simplified SWU map-to-curve method (via rational map) fn swu_map(self, z: Field, u: Field) -> Point { - self.into_montcurve().swu_map(z,u).into_tecurve() + self.into_montcurve().swu_map(z, u).into_tecurve() } } } @@ -222,7 +222,7 @@ mod curvegroup { impl Point { // Point constructor pub fn new(x: Field, y: Field, t: Field, z: Field) -> Self { - Self {x, y, t, z} + Self { x, y, t, z } } // Check if zero @@ -235,19 +235,19 @@ mod curvegroup { pub fn into_affine(self) -> affine::Point { let Self {x, y, t: _t, z} = self; - affine::Point::new(x/z, y/z) + affine::Point::new(x / z, y / z) } // Additive identity pub fn zero() -> Self { - Point::new(0,1,0,1) + Point::new(0, 1, 0, 1) } // Negation fn negate(self) -> Self { let Self {x, y, t, z} = self; - Point::new(0-x, y, 0-t, z) + Point::new(0 - x, y, 0 - t, z) } // Map into prime-order subgroup of equivalent Montgomery curve @@ -269,8 +269,8 @@ mod curvegroup { // Curve constructor pub fn new(a: Field, d: Field, gen: Point) -> Curve { // Check curve coefficients - assert(a*d*(a-d) != 0); - + assert(a * d * (a - d) != 0); + let curve = Curve { a, d, gen }; // gen should be on the curve @@ -283,14 +283,16 @@ mod curvegroup { pub fn into_affine(self) -> affine::Curve { let Curve{a, d, gen} = self; - affine::Curve {a, d, gen: gen.into_affine()} + affine::Curve { a, d, gen: gen.into_affine() } } // Membership check pub fn contains(self, p: Point) -> bool { let Point {x, y, t, z} = p; - (z != 0) & (z*t == x*y) & (z*z*(self.a*x*x + y*y) == z*z*z*z + self.d*x*x*y*y) + (z != 0) + & (z * t == x * y) + & (z * z * (self.a * x * x + y * y) == z * z * z * z + self.d * x * x * y * y) } // Point addition @@ -298,40 +300,40 @@ mod curvegroup { let Point{x: x1, y: y1, t: t1, z: z1} = p1; let Point{x: x2, y: y2, t: t2, z: z2} = p2; - let a = x1*x2; - let b = y1*y2; - let c = self.d*t1*t2; - let d = z1*z2; - let e = (x1 + y1)*(x2 + y2) - a - b; + let a = x1 * x2; + let b = y1 * y2; + let c = self.d * t1 * t2; + let d = z1 * z2; + let e = (x1 + y1) * (x2 + y2) - a - b; let f = d - c; let g = d + c; - let h = b - self.a*a; + let h = b - self.a * a; - let x = e*f; - let y = g*h; - let t = e*h; - let z = f*g; + let x = e * f; + let y = g * h; + let t = e * h; + let z = f * g; - Point::new(x,y,t,z) + Point::new(x, y, t, z) } // Point doubling, cf. §3.3 pub fn double(self, p: Point) -> Point { let Point{x, y, t: _t, z} = p; - let a = x*x; - let b = y*y; - let c = 2*z*z; - let d = self.a*a; - let e = (x + y)*(x + y) - a - b; + let a = x * x; + let b = y * y; + let c = 2 * z * z; + let d = self.a * a; + let e = (x + y) * (x + y) - a - b; let g = d + b; let f = g - c; let h = d - b; - let x0 = e*f; - let y0 = g*h; - let t0 = e*h; - let z0 = f*g; + let x0 = e * f; + let y0 = g * h; + let t0 = e * h; + let z0 = f * g; Point::new(x0, y0, t0, z0) } @@ -340,7 +342,7 @@ mod curvegroup { // If k is the natural number represented by `bits`, then this computes p + ... + p k times. fn bit_mul(self, bits: [u1; N], p: Point) -> Point { let mut out = Point::zero(); - + for i in 0..N { out = self.add( self.add(out, out), @@ -349,7 +351,7 @@ mod curvegroup { out } - + // Scalar multiplication (p + ... + p n times) pub fn mul(self, n: Field, p: Point) -> Point { let N_BITS = crate::field::modulus_num_bits(); @@ -358,7 +360,7 @@ mod curvegroup { let mut n_as_bits: [u1; 254] = [0; 254]; let tmp = n.to_le_bits(N_BITS as u32); for i in 0..254 { - n_as_bits[i] = tmp[i]; + n_as_bits[i] = tmp[i]; } self.bit_mul(n_as_bits, p) @@ -407,7 +409,7 @@ mod curvegroup { // Simplified SWU map-to-curve method (via rational map) fn swu_map(self, z: Field, u: Field) -> Point { - self.into_montcurve().swu_map(z,u).into_tecurve() + self.into_montcurve().swu_map(z, u).into_tecurve() } } } diff --git a/noir_stdlib/src/ecdsa_secp256k1.nr b/noir_stdlib/src/ecdsa_secp256k1.nr index e8d9af2230f..b72a1acd041 100644 --- a/noir_stdlib/src/ecdsa_secp256k1.nr +++ b/noir_stdlib/src/ecdsa_secp256k1.nr @@ -7,4 +7,4 @@ pub fn verify_signature( message_hash: [u8; N] ) -> bool // docs:end:ecdsa_secp256k1 -{} \ No newline at end of file +{} diff --git a/noir_stdlib/src/ecdsa_secp256r1.nr b/noir_stdlib/src/ecdsa_secp256r1.nr index 9fe932a2f3d..ef92bf24ae4 100644 --- a/noir_stdlib/src/ecdsa_secp256r1.nr +++ b/noir_stdlib/src/ecdsa_secp256r1.nr @@ -7,4 +7,4 @@ pub fn verify_signature( message_hash: [u8; N] ) -> bool // docs:end:ecdsa_secp256r1 -{} \ No newline at end of file +{} diff --git a/noir_stdlib/src/field.nr b/noir_stdlib/src/field.nr index a7278d85999..0f4c2caffdf 100644 --- a/noir_stdlib/src/field.nr +++ b/noir_stdlib/src/field.nr @@ -6,7 +6,7 @@ impl Field { crate::assert_constant(bit_size); self.__to_le_bits(bit_size) } - + pub fn to_be_bits(self: Self, bit_size: u32) -> [u1] { crate::assert_constant(bit_size); self.__to_be_bits(bit_size) @@ -14,7 +14,7 @@ impl Field { #[builtin(to_le_bits)] fn __to_le_bits(self, _bit_size: u32) -> [u1] {} - + #[builtin(to_be_bits)] fn __to_be_bits(self, bit_size: u32) -> [u1] {} @@ -35,7 +35,6 @@ impl Field { self.to_be_radix(256, byte_size) } - pub fn to_le_radix(self: Self, radix: u32, result_len: u32) -> [u8] { crate::assert_constant(radix); crate::assert_constant(result_len); @@ -48,17 +47,14 @@ impl Field { self.__to_be_radix(radix, result_len) } - - // decompose `_self` into a `_result_len` vector over the `_radix` basis // `_radix` must be less than 256 #[builtin(to_le_radix)] fn __to_le_radix(self, radix: u32, result_len: u32) -> [u8] {} - + #[builtin(to_be_radix)] fn __to_be_radix(self, radix: u32, result_len: u32) -> [u8] {} - // Returns self to the power of the given exponent value. // Caution: we assume the exponent fits into 32 bits // using a bigger bit size impacts negatively the performance and should be done only if the exponent does not fit in 32 bits @@ -85,7 +81,6 @@ impl Field { lt_fallback(self, another) } } - } #[builtin(modulus_num_bits)] diff --git a/noir_stdlib/src/option.nr b/noir_stdlib/src/option.nr index cab95731d05..1c32f758af7 100644 --- a/noir_stdlib/src/option.nr +++ b/noir_stdlib/src/option.nr @@ -39,11 +39,7 @@ impl Option { /// Returns the wrapped value if `self.is_some()`. Otherwise, returns the given default value. pub fn unwrap_or(self, default: T) -> T { - if self._is_some { - self._value - } else { - default - } + if self._is_some { self._value } else { default } } /// Returns the wrapped value if `self.is_some()`. Otherwise, calls the given function to return @@ -112,31 +108,19 @@ impl Option { /// If self is Some, return self. Otherwise, return `other`. pub fn or(self, other: Self) -> Self { - if self._is_some { - self - } else { - other - } + if self._is_some { self } else { other } } /// If self is Some, return self. Otherwise, return `default()`. pub fn or_else(self, default: fn[Env]() -> Self) -> Self { - if self._is_some { - self - } else { - default() - } + if self._is_some { self } else { default() } } // If only one of the two Options is Some, return that option. // Otherwise, if both options are Some or both are None, None is returned. pub fn xor(self, other: Self) -> Self { if self._is_some { - if other._is_some { - Option::none() - } else { - self - } + if other._is_some { Option::none() } else { self } } else if other._is_some { other } else { diff --git a/noir_stdlib/src/scalar_mul.nr b/noir_stdlib/src/scalar_mul.nr index 1a7f1ad707c..eee7aac39f2 100644 --- a/noir_stdlib/src/scalar_mul.nr +++ b/noir_stdlib/src/scalar_mul.nr @@ -6,7 +6,7 @@ struct EmbeddedCurvePoint { } impl EmbeddedCurvePoint { - fn double(self) -> EmbeddedCurvePoint { + fn double(self) -> EmbeddedCurvePoint { embedded_curve_add(self, self) } } diff --git a/noir_stdlib/src/schnorr.nr b/noir_stdlib/src/schnorr.nr index 33656254550..757963d40d7 100644 --- a/noir_stdlib/src/schnorr.nr +++ b/noir_stdlib/src/schnorr.nr @@ -7,4 +7,4 @@ pub fn verify_signature( message: [u8; N] ) -> bool // docs:end:schnorr_verify -{} \ No newline at end of file +{} diff --git a/noir_stdlib/src/slice.nr b/noir_stdlib/src/slice.nr index bb5c43e497b..ea8d09d14ce 100644 --- a/noir_stdlib/src/slice.nr +++ b/noir_stdlib/src/slice.nr @@ -3,34 +3,34 @@ impl [T] { /// new slice with a length one greater than the /// original unmodified slice. #[builtin(slice_push_back)] - pub fn push_back(self, elem: T) -> Self { } + pub fn push_back(self, elem: T) -> Self {} /// Push a new element to the front of the slice, returning a /// new slice with a length one greater than the /// original unmodified slice. #[builtin(slice_push_front)] - pub fn push_front(self, elem: T) -> Self { } + pub fn push_front(self, elem: T) -> Self {} /// Remove the last element of the slice, returning the /// popped slice and the element in a tuple #[builtin(slice_pop_back)] - pub fn pop_back(self) -> (Self, T) { } + pub fn pop_back(self) -> (Self, T) {} /// Remove the first element of the slice, returning the /// element and the popped slice in a tuple #[builtin(slice_pop_front)] - pub fn pop_front(self) -> (T, Self) { } + pub fn pop_front(self) -> (T, Self) {} /// Insert an element at a specified index, shifting all elements /// after it to the right #[builtin(slice_insert)] - pub fn insert(self, index: u64, elem: T) -> Self { } + pub fn insert(self, index: u64, elem: T) -> Self {} /// Remove an element at a specified index, shifting all elements /// after it to the left, returning the altered slice and /// the removed element #[builtin(slice_remove)] - pub fn remove(self, index: u64) -> (Self, T) { } + pub fn remove(self, index: u64) -> (Self, T) {} // Append each element of the `other` slice to the end of `self`. // This returns a new slice and leaves both input slices unchanged. diff --git a/noir_stdlib/src/string.nr b/noir_stdlib/src/string.nr index ad6fd19e2de..12b5a1e75ec 100644 --- a/noir_stdlib/src/string.nr +++ b/noir_stdlib/src/string.nr @@ -2,7 +2,7 @@ use crate::collections::vec::Vec; impl str { /// Converts the given string into a byte array #[builtin(str_as_bytes)] - pub fn as_bytes(self) -> [u8; N] { } + pub fn as_bytes(self) -> [u8; N] {} /// return a byte vector of the str content pub fn as_bytes_vec(self: Self) -> Vec { diff --git a/noir_stdlib/src/test.nr b/noir_stdlib/src/test.nr index 560cfde741c..e1c320215de 100644 --- a/noir_stdlib/src/test.nr +++ b/noir_stdlib/src/test.nr @@ -19,9 +19,7 @@ struct OracleMock { impl OracleMock { unconstrained pub fn mock(name: str) -> Self { - Self { - id: create_mock_oracle(name), - } + Self { id: create_mock_oracle(name) } } unconstrained pub fn with_params

(self, params: P) -> Self { diff --git a/noir_stdlib/src/uint128.nr b/noir_stdlib/src/uint128.nr index c8c6217de90..d6f0b1e2232 100644 --- a/noir_stdlib/src/uint128.nr +++ b/noir_stdlib/src/uint128.nr @@ -13,14 +13,11 @@ impl U128 { pub fn from_u64s_le(lo: u64, hi: u64) -> U128 { // in order to handle multiplication, we need to represent the product of two u64 without overflow assert(crate::field::modulus_num_bits() as u32 > 128); - U128 { - lo: lo as Field, - hi: hi as Field, - } + U128 { lo: lo as Field, hi: hi as Field } } pub fn from_u64s_be(hi: u64, lo: u64) -> U128 { - U128::from_u64s_le(lo,hi) + U128::from_u64s_le(lo, hi) } pub fn from_le_bytes(bytes: [u8; 16]) -> U128 { @@ -36,16 +33,13 @@ impl U128 { hi += (bytes[i] as Field)*base; base *= 256; } - U128 { - lo, - hi, - } + U128 { lo, hi } } pub fn to_be_bytes(self: Self) -> [u8; 16] { let lo = self.lo.to_be_bytes(8); let hi = self.hi.to_be_bytes(8); - let mut bytes = [0;16]; + let mut bytes = [0; 16]; for i in 0..8 { bytes[i] = hi[i]; bytes[i+8] = lo[i]; @@ -56,7 +50,7 @@ impl U128 { pub fn to_le_bytes(self: Self) -> [u8; 16] { let lo = self.lo.to_le_bytes(8); let hi = self.hi.to_le_bytes(8); - let mut bytes = [0;16]; + let mut bytes = [0; 16]; for i in 0..8 { bytes[i] = lo[i]; bytes[i+8] = hi[i]; @@ -73,9 +67,9 @@ impl U128 { let mut lo = 0; let mut hi = 0; - let mut base = 1; + let mut base = 1; if N <= 18 { - for i in 0..N-2 { + for i in 0..N - 2 { lo += U128::decode_ascii(bytes[N-i-1])*base; base = base*16; } @@ -85,27 +79,21 @@ impl U128 { base = base*16; } base = 1; - for i in 17..N-1 { + for i in 17..N - 1 { hi += U128::decode_ascii(bytes[N-i])*base; base = base*16; } } - U128 { - lo: lo as Field, - hi: hi as Field, - } + U128 { lo: lo as Field, hi: hi as Field } } fn decode_ascii(ascii: u8) -> Field { if ascii < 58 { ascii - 48 + } else if ascii < 71 { + ascii - 55 } else { - if ascii < 71 { - ascii - 55 - } else { - ascii - 87 - } - + ascii - 87 } as Field } @@ -114,15 +102,14 @@ impl U128 { (U128::from_u64s_le(0, 0), self) } else { //TODO check if this can overflow? - let (q,r) = self.unconstrained_div(b * U128::from_u64s_le(2,0)); - let q_mul_2 = q * U128::from_u64s_le(2,0); + let (q,r) = self.unconstrained_div(b * U128::from_u64s_le(2, 0)); + let q_mul_2 = q * U128::from_u64s_le(2, 0); if r < b { (q_mul_2, r) } else { - (q_mul_2 + U128::from_u64s_le(1,0), r - b) + (q_mul_2 + U128::from_u64s_le(1, 0), r - b) } - - } + } } pub fn from_integer(i: T) -> U128 { @@ -130,31 +117,25 @@ impl U128 { // Reject values which would overflow a u128 f.assert_max_bit_size(128); let lo = f as u64 as Field; - let hi = (f-lo) / pow64; - U128 { - lo, - hi, - } + let hi = (f - lo) / pow64; + U128 { lo, hi } } pub fn to_integer(self) -> T { - crate::from_field(self.lo+self.hi*pow64) + crate::from_field(self.lo + self.hi * pow64) } fn wrapping_mul(self: Self, b: U128) -> U128 { - let low = self.lo*b.lo; + let low = self.lo * b.lo; let lo = low as u64 as Field; let carry = (low - lo) / pow64; let high = if crate::field::modulus_num_bits() as u32 > 196 { - (self.lo+self.hi)*(b.lo+b.hi) - low + carry + (self.lo + self.hi) * (b.lo + b.hi) - low + carry } else { - self.lo*b.hi + self.hi*b.lo + carry + self.lo * b.hi + self.hi * b.lo + carry }; let hi = high as u64 as Field; - U128 { - lo, - hi, - } + U128 { lo, hi } } } diff --git a/test_programs/.gitignore b/test_programs/.gitignore index a229df6197f..e98a2fb38b6 100644 --- a/test_programs/.gitignore +++ b/test_programs/.gitignore @@ -1,2 +1,3 @@ acir_artifacts -execution_success/**/crs \ No newline at end of file +execution_success/**/crs +Nargo.toml diff --git a/test_programs/compile_success_empty/closure_explicit_types/src/main.nr b/test_programs/compile_success_empty/closure_explicit_types/src/main.nr index eec2b90b5b2..b6c8a6b7b3c 100644 --- a/test_programs/compile_success_empty/closure_explicit_types/src/main.nr +++ b/test_programs/compile_success_empty/closure_explicit_types/src/main.nr @@ -7,13 +7,13 @@ fn ret_closure1() -> fn[(Field,)]() -> Field { || x + 10 } // return lamda that captures two things -fn ret_closure2() -> fn[(Field,Field)]() -> Field { +fn ret_closure2() -> fn[(Field, Field)]() -> Field { let x = 20; let y = 10; || x + y + 10 } // return lamda that captures two things with different types -fn ret_closure3() -> fn[(u32,u64)]() -> u64 { +fn ret_closure3() -> fn[(u32, u64)]() -> u64 { let x: u32 = 20; let y: u64 = 10; || x as u64 + y + 10 diff --git a/test_programs/compile_success_empty/conditional_regression_579/src/main.nr b/test_programs/compile_success_empty/conditional_regression_579/src/main.nr index a479a7a6fbf..a517f4fdb70 100644 --- a/test_programs/compile_success_empty/conditional_regression_579/src/main.nr +++ b/test_programs/compile_success_empty/conditional_regression_579/src/main.nr @@ -12,9 +12,7 @@ struct MyStruct579 { impl MyStruct579 { fn new(array_param: [u32; 2]) -> MyStruct579 { - MyStruct579 { - array_param: array_param - } + MyStruct579 { array_param } } } diff --git a/test_programs/compile_success_empty/reexports/src/main.nr b/test_programs/compile_success_empty/reexports/src/main.nr index bb94b21b221..ed469ff77d0 100644 --- a/test_programs/compile_success_empty/reexports/src/main.nr +++ b/test_programs/compile_success_empty/reexports/src/main.nr @@ -1,8 +1,6 @@ use dep::reexporting_lib::{FooStruct, MyStruct, lib}; fn main() { - let x: FooStruct = MyStruct { - inner: 0 - }; + let x: FooStruct = MyStruct { inner: 0 }; assert(lib::is_struct_zero(x)); } diff --git a/test_programs/compile_success_empty/specialization/src/main.nr b/test_programs/compile_success_empty/specialization/src/main.nr index 9cd32e0f1eb..30116330a86 100644 --- a/test_programs/compile_success_empty/specialization/src/main.nr +++ b/test_programs/compile_success_empty/specialization/src/main.nr @@ -1,11 +1,15 @@ struct Foo {} impl Foo { - fn foo(_self: Self) -> Field { 1 } + fn foo(_self: Self) -> Field { + 1 + } } impl Foo { - fn foo(_self: Self) -> Field { 2 } + fn foo(_self: Self) -> Field { + 2 + } } fn main() { diff --git a/test_programs/execution_success/1327_concrete_in_generic/src/main.nr b/test_programs/execution_success/1327_concrete_in_generic/src/main.nr index e1d601b13c9..8250b31789b 100644 --- a/test_programs/execution_success/1327_concrete_in_generic/src/main.nr +++ b/test_programs/execution_success/1327_concrete_in_generic/src/main.nr @@ -10,15 +10,15 @@ struct B { } impl B { - fn new(new_concrete_t_c_constructor: fn () -> T_C) -> B { - B { new_concrete_t_c_constructor } - } + fn new(new_concrete_t_c_constructor: fn() -> T_C) -> B { + B { new_concrete_t_c_constructor } + } - fn get_t_c(self) -> T_C { - let new_concrete_t_c_constructor = self.new_concrete_t_c_constructor; - new_concrete_t_c_constructor() - } + fn get_t_c(self) -> T_C { + let new_concrete_t_c_constructor = self.new_concrete_t_c_constructor; + new_concrete_t_c_constructor() } +} // --- // Set struct C { @@ -26,15 +26,15 @@ struct C { } impl C { - fn new (t_d_interface: MethodInterface) -> Self { - C { t_d_interface } - } + fn new(t_d_interface: MethodInterface) -> Self { + C { t_d_interface } + } - fn call_method_of_t_d(self, t_d: T_D) -> Field { - let some_method_on_t_d = self.t_d_interface.some_method_on_t_d; - some_method_on_t_d(t_d) - } + fn call_method_of_t_d(self, t_d: T_D) -> Field { + let some_method_on_t_d = self.t_d_interface.some_method_on_t_d; + some_method_on_t_d(t_d) } +} // --- struct MethodInterface { some_method_on_t_d: fn(T_D)->Field, diff --git a/test_programs/execution_success/array_dynamic/src/main.nr b/test_programs/execution_success/array_dynamic/src/main.nr index dde7bacc455..6b51095bd8c 100644 --- a/test_programs/execution_success/array_dynamic/src/main.nr +++ b/test_programs/execution_success/array_dynamic/src/main.nr @@ -2,8 +2,8 @@ fn main( x: [u32; 5], mut z: u32, t: u32, - index: [Field;5], - index2: [Field;5], + index: [Field; 5], + index2: [Field; 5], offset: Field, sublen: Field ) { diff --git a/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr b/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr index aabf7fc9d5c..4cbf1bd8e6d 100644 --- a/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr +++ b/test_programs/execution_success/array_dynamic_blackbox_input/src/main.nr @@ -24,4 +24,4 @@ fn compute_root(leaf: [u8; 32], path: [u8; 64], _index: u32, root: [u8; 32]) { // Regression for issue #4258 assert(root == current); -} \ No newline at end of file +} diff --git a/test_programs/execution_success/array_dynamic_main_output/src/main.nr b/test_programs/execution_success/array_dynamic_main_output/src/main.nr index ccb7016a190..50feb71f983 100644 --- a/test_programs/execution_success/array_dynamic_main_output/src/main.nr +++ b/test_programs/execution_success/array_dynamic_main_output/src/main.nr @@ -1,4 +1,4 @@ fn main(mut x: [Field; 10], index: u8) -> pub [Field; 10] { x[index] = 0; x -} \ No newline at end of file +} diff --git a/test_programs/execution_success/assert_statement_recursive/src/main.nr b/test_programs/execution_success/assert_statement_recursive/src/main.nr index 687a0d324ba..d89ea3d35bb 100644 --- a/test_programs/execution_success/assert_statement_recursive/src/main.nr +++ b/test_programs/execution_success/assert_statement_recursive/src/main.nr @@ -8,4 +8,4 @@ fn main(x: Field, y: pub Field) { assert(x == y, "x and y are not equal"); assert_eq(x, y, "x and y are not equal"); -} \ No newline at end of file +} diff --git a/test_programs/execution_success/bigint/src/main.nr b/test_programs/execution_success/bigint/src/main.nr index 74949a5f785..046d7d07d5e 100644 --- a/test_programs/execution_success/bigint/src/main.nr +++ b/test_programs/execution_success/bigint/src/main.nr @@ -1,8 +1,8 @@ use dep::std::bigint; -fn main(mut x: [u8;5], y: [u8;5]) { - let a = bigint::BigInt::secpk1_fq_from_le_bytes([x[0],x[1],x[2],x[3],x[4]]); - let b = bigint::BigInt::secpk1_fq_from_le_bytes([y[0],y[1],y[2],y[3],y[4]]); +fn main(mut x: [u8; 5], y: [u8; 5]) { + let a = bigint::BigInt::secpk1_fq_from_le_bytes([x[0], x[1], x[2], x[3], x[4]]); + let b = bigint::BigInt::secpk1_fq_from_le_bytes([y[0], y[1], y[2], y[3], y[4]]); let a_bytes = a.to_le_bytes(); let b_bytes = b.to_le_bytes(); @@ -11,11 +11,11 @@ fn main(mut x: [u8;5], y: [u8;5]) { assert(b_bytes[i] == y[i]); } - let d = a*b - b; + let d = a * b - b; let d_bytes = d.to_le_bytes(); let d1 = bigint::BigInt::secpk1_fq_from_le_bytes(597243850900842442924.to_le_bytes(10)); let d1_bytes = d1.to_le_bytes(); for i in 0..32 { - assert(d_bytes[i] == d1_bytes[i]); + assert(d_bytes[i] == d1_bytes[i]); } } diff --git a/test_programs/execution_success/brillig_cow/src/main.nr b/test_programs/execution_success/brillig_cow/src/main.nr index 7d847e085fe..52ce8b8be3c 100644 --- a/test_programs/execution_success/brillig_cow/src/main.nr +++ b/test_programs/execution_success/brillig_cow/src/main.nr @@ -10,42 +10,37 @@ struct ExecutionResult { impl ExecutionResult { fn is_equal(self, other: ExecutionResult) -> bool { - (self.original == other.original) & - (self.modified_once == other.modified_once) & - (self.modified_twice == other.modified_twice) + (self.original == other.original) + & (self.modified_once == other.modified_once) + & (self.modified_twice == other.modified_twice) } } fn modify_in_inlined_constrained(original: [Field; ARRAY_SIZE], index: u64) -> ExecutionResult { let mut modified = original; - + modified[index] = 27; let modified_once = modified; modified[index+1] = 27; - ExecutionResult { - original, - modified_once, - modified_twice: modified - } + ExecutionResult { original, modified_once, modified_twice: modified } } -unconstrained fn modify_in_unconstrained(original: [Field; ARRAY_SIZE], index: u64) -> ExecutionResult { +unconstrained fn modify_in_unconstrained( + original: [Field; ARRAY_SIZE], + index: u64 +) -> ExecutionResult { let mut modified = original; - + modified[index] = 27; let modified_once = modified; modified[index+1] = 27; - ExecutionResult { - original, - modified_once, - modified_twice: modified - } + ExecutionResult { original, modified_once, modified_twice: modified } } unconstrained fn main(original: [Field; ARRAY_SIZE], index: u64, expected_result: ExecutionResult) { diff --git a/test_programs/execution_success/brillig_cow_regression/src/main.nr b/test_programs/execution_success/brillig_cow_regression/src/main.nr index 74aeda18261..7f3dd766480 100644 --- a/test_programs/execution_success/brillig_cow_regression/src/main.nr +++ b/test_programs/execution_success/brillig_cow_regression/src/main.nr @@ -47,54 +47,54 @@ struct U256 { } impl U256 { - pub fn from_bytes32(bytes : [u8;32]) -> U256 { + pub fn from_bytes32(bytes: [u8; 32]) -> U256 { // We use addition rather than a bitwise OR as the bitshifts ensure that none of the bytes overlap each other. let high_0 = ((bytes[0] as u64) << 56) - + ((bytes[1] as u64) << 48) - + ((bytes[2] as u64) << 40) - + ((bytes[3] as u64) << 32) - + ((bytes[4] as u64) << 24) - + ((bytes[5] as u64) << 16) - + ((bytes[6] as u64) << 8) - + (bytes[7] as u64); - + + ((bytes[1] as u64) << 48) + + ((bytes[2] as u64) << 40) + + ((bytes[3] as u64) << 32) + + ((bytes[4] as u64) << 24) + + ((bytes[5] as u64) << 16) + + ((bytes[6] as u64) << 8) + + (bytes[7] as u64); + let high_1 = ((bytes[8] as u64) << 56) - + ((bytes[9] as u64) << 48) - + ((bytes[10] as u64) << 40) - + ((bytes[11] as u64) << 32) - + ((bytes[12] as u64) << 24) - + ((bytes[13] as u64) << 16) - + ((bytes[14] as u64) << 8) - + (bytes[15] as u64); - + + ((bytes[9] as u64) << 48) + + ((bytes[10] as u64) << 40) + + ((bytes[11] as u64) << 32) + + ((bytes[12] as u64) << 24) + + ((bytes[13] as u64) << 16) + + ((bytes[14] as u64) << 8) + + (bytes[15] as u64); + let low_0 = ((bytes[16] as u64) << 56) - + ((bytes[17] as u64) << 48) - + ((bytes[18] as u64) << 40) - + ((bytes[19] as u64) << 32) - + ((bytes[20] as u64) << 24) - + ((bytes[21] as u64) << 16) - + ((bytes[22] as u64) << 8) - + (bytes[23] as u64); - + + ((bytes[17] as u64) << 48) + + ((bytes[18] as u64) << 40) + + ((bytes[19] as u64) << 32) + + ((bytes[20] as u64) << 24) + + ((bytes[21] as u64) << 16) + + ((bytes[22] as u64) << 8) + + (bytes[23] as u64); + let low_1 = ((bytes[24] as u64) << 56) - + ((bytes[25] as u64) << 48) - + ((bytes[26] as u64) << 40) - + ((bytes[27] as u64) << 32) - + ((bytes[28] as u64) << 24) - + ((bytes[29] as u64) << 16) - + ((bytes[30] as u64) << 8) - + (bytes[31] as u64); - - U256{inner : [high_0, high_1, low_0, low_1]} + + ((bytes[25] as u64) << 48) + + ((bytes[26] as u64) << 40) + + ((bytes[27] as u64) << 32) + + ((bytes[28] as u64) << 24) + + ((bytes[29] as u64) << 16) + + ((bytes[30] as u64) << 8) + + (bytes[31] as u64); + + U256 { inner: [high_0, high_1, low_0, low_1] } } - pub fn to_u128_limbs(self) -> [Field;2] { + pub fn to_u128_limbs(self) -> [Field; 2] { let two_pow_64 = 2.pow_32(64); let high = (self.inner[0] as Field) * two_pow_64 + self.inner[1] as Field; let low = (self.inner[2] as Field) * two_pow_64 + self.inner[3] as Field; - - [high,low] + + [high, low] } } diff --git a/test_programs/execution_success/brillig_fns_as_values/src/main.nr b/test_programs/execution_success/brillig_fns_as_values/src/main.nr index 2f5d14583d5..ea3148915b8 100644 --- a/test_programs/execution_success/brillig_fns_as_values/src/main.nr +++ b/test_programs/execution_success/brillig_fns_as_values/src/main.nr @@ -14,7 +14,7 @@ fn main(x: u32) { assert(increment(x) == x + 1); } -unconstrained fn wrapper(func: fn (u32) -> u32, param: u32) -> u32 { +unconstrained fn wrapper(func: fn(u32) -> u32, param: u32) -> u32 { func(param) } diff --git a/test_programs/execution_success/conditional_regression_661/src/main.nr b/test_programs/execution_success/conditional_regression_661/src/main.nr index 03102eb775e..26521a88358 100644 --- a/test_programs/execution_success/conditional_regression_661/src/main.nr +++ b/test_programs/execution_success/conditional_regression_661/src/main.nr @@ -16,11 +16,11 @@ fn test5(a: u32) { } } -fn issue_661_foo(array: [u32;4], b: u32) -> [u32;1] { +fn issue_661_foo(array: [u32; 4], b: u32) -> [u32; 1] { [array[0] + b] } -fn issue_661_bar(a: [u32;4]) -> [u32;4] { +fn issue_661_bar(a: [u32; 4]) -> [u32; 4] { let mut b: [u32; 4] = [0; 4]; b[0]=a[0]+1; b diff --git a/test_programs/execution_success/databus/src/main.nr b/test_programs/execution_success/databus/src/main.nr index 61a9637f5fe..1cf95be8a22 100644 --- a/test_programs/execution_success/databus/src/main.nr +++ b/test_programs/execution_success/databus/src/main.nr @@ -1,12 +1,12 @@ use dep::std; -fn main(mut x: u32, y: call_data u32, z: call_data [u32;4]) -> return_data u32 { - let a = z[x]; - a+foo(y) +fn main(mut x: u32, y: call_data u32, z: call_data [u32; 4]) -> return_data u32 { + let a = z[x]; + a + foo(y) } // Use an unconstrained function to force the compiler to avoid inlining unconstrained fn foo(x: u32) -> u32 { - x+1 + x + 1 } diff --git a/test_programs/execution_success/debug_logs/src/main.nr b/test_programs/execution_success/debug_logs/src/main.nr index c628a9ae6a4..ec24b0cc8e8 100644 --- a/test_programs/execution_success/debug_logs/src/main.nr +++ b/test_programs/execution_success/debug_logs/src/main.nr @@ -1,4 +1,4 @@ -fn main(x: Field, y: pub Field) { +fn main(x: Field, y: pub Field) { let string = "i: {i}, j: {j}"; println(string); @@ -102,8 +102,8 @@ fn regression_2903() { let a = v[0]; println(a); // will print `1` - let bytes = [ "aaa", "bbb", "ccc" ]; - println(bytes); + let bytes = ["aaa", "bbb", "ccc"]; + println(bytes); } fn regression_2906() { diff --git a/test_programs/execution_success/distinct_keyword/src/main.nr b/test_programs/execution_success/distinct_keyword/src/main.nr index 0e55a011a48..8e9b5c008ed 100644 --- a/test_programs/execution_success/distinct_keyword/src/main.nr +++ b/test_programs/execution_success/distinct_keyword/src/main.nr @@ -1,4 +1,4 @@ // Example that uses the distinct keyword -fn main(x: pub Field) -> distinct pub [Field;2] { +fn main(x: pub Field) -> distinct pub [Field; 2] { [x + 1, x] } diff --git a/test_programs/execution_success/ecdsa_secp256k1/src/main.nr b/test_programs/execution_success/ecdsa_secp256k1/src/main.nr index 2f410755f74..ac0359e4bb8 100644 --- a/test_programs/execution_success/ecdsa_secp256k1/src/main.nr +++ b/test_programs/execution_success/ecdsa_secp256k1/src/main.nr @@ -1,11 +1,11 @@ use dep::std; fn main( - message: [u8;38], - hashed_message: [u8;32], - pub_key_x: [u8;32], - pub_key_y: [u8;32], - signature: [u8;64] + message: [u8; 38], + hashed_message: [u8; 32], + pub_key_x: [u8; 32], + pub_key_y: [u8; 32], + signature: [u8; 64] ) { // Hash the message, since secp256k1 expects a hashed_message let expected = std::hash::sha256(message); diff --git a/test_programs/execution_success/ecdsa_secp256r1/src/main.nr b/test_programs/execution_success/ecdsa_secp256r1/src/main.nr index d23573d13a6..c64e390d652 100644 --- a/test_programs/execution_success/ecdsa_secp256r1/src/main.nr +++ b/test_programs/execution_success/ecdsa_secp256r1/src/main.nr @@ -1,6 +1,6 @@ use dep::std; -fn main(hashed_message: [u8;32], pub_key_x: [u8;32], pub_key_y: [u8;32], signature: [u8;64]) { +fn main(hashed_message: [u8; 32], pub_key_x: [u8; 32], pub_key_y: [u8; 32], signature: [u8; 64]) { let valid_signature = std::ecdsa_secp256r1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); assert(valid_signature); } diff --git a/test_programs/execution_success/main_bool_arg/src/main.nr b/test_programs/execution_success/main_bool_arg/src/main.nr index 111a23ec0c2..2c50d7dee16 100644 --- a/test_programs/execution_success/main_bool_arg/src/main.nr +++ b/test_programs/execution_success/main_bool_arg/src/main.nr @@ -1,4 +1,4 @@ -fn main(x: bool, y: [bool;2]) { +fn main(x: bool, y: [bool; 2]) { if x { assert(1 != 2); } diff --git a/test_programs/execution_success/operator_overloading/src/main.nr b/test_programs/execution_success/operator_overloading/src/main.nr index 3867531abca..d61e1da170e 100644 --- a/test_programs/execution_success/operator_overloading/src/main.nr +++ b/test_programs/execution_success/operator_overloading/src/main.nr @@ -1,4 +1,4 @@ -use dep::std::ops::{ Add, Sub, Mul, Div, Rem, BitAnd, BitOr, BitXor, Shl, Shr }; +use dep::std::ops::{Add, Sub, Mul, Div, Rem, BitAnd, BitOr, BitXor, Shl, Shr}; use dep::std::cmp::Ordering; // x = 3, y = 9 @@ -126,10 +126,6 @@ impl Ord for Wrapper { } } - - - - struct Pair { x: Wrapper, y: Wrapper, diff --git a/test_programs/execution_success/regression_3394/src/main.nr b/test_programs/execution_success/regression_3394/src/main.nr index cc45487b98b..94b6c818ff2 100644 --- a/test_programs/execution_success/regression_3394/src/main.nr +++ b/test_programs/execution_success/regression_3394/src/main.nr @@ -3,4 +3,4 @@ use dep::std; fn main() { let x : i8 = -128; std::println(x); -} \ No newline at end of file +} diff --git a/test_programs/execution_success/regression_3607/src/main.nr b/test_programs/execution_success/regression_3607/src/main.nr index c09211c2810..9c7ef243f60 100644 --- a/test_programs/execution_success/regression_3607/src/main.nr +++ b/test_programs/execution_success/regression_3607/src/main.nr @@ -5,4 +5,4 @@ fn main(mut x: u32) { x = (x+1) / x; } assert(x != 0); -} \ No newline at end of file +} diff --git a/test_programs/execution_success/regression_3889/src/main.nr b/test_programs/execution_success/regression_3889/src/main.nr index 10b8ecabee3..402a69a10da 100644 --- a/test_programs/execution_success/regression_3889/src/main.nr +++ b/test_programs/execution_success/regression_3889/src/main.nr @@ -17,7 +17,6 @@ mod Baz { use crate::Bar::NewType; } - fn main(works: Baz::Works, fails: Baz::BarStruct, also_fails: Bar::NewType) -> pub Field { works.a + fails.a + also_fails.a } diff --git a/test_programs/execution_success/side_effects_constrain_array/src/main.nr b/test_programs/execution_success/side_effects_constrain_array/src/main.nr index fb3c346a460..c4a62603bc3 100644 --- a/test_programs/execution_success/side_effects_constrain_array/src/main.nr +++ b/test_programs/execution_success/side_effects_constrain_array/src/main.nr @@ -7,11 +7,11 @@ fn main(y: pub u32) { // The assert inside the if should be hit if y < 10 { - assert(bar.inner == [100, 101, 102]); + assert(bar.inner == [100, 101, 102]); } // The assert inside the if should not be hit if y > 10 { assert(bar.inner == [0, 1, 2]); } -} \ No newline at end of file +} diff --git a/test_programs/execution_success/struct/src/main.nr b/test_programs/execution_success/struct/src/main.nr index 45c5e347e5a..de08f42f79d 100644 --- a/test_programs/execution_success/struct/src/main.nr +++ b/test_programs/execution_success/struct/src/main.nr @@ -9,8 +9,8 @@ struct Pair { } impl Foo { - fn default(x: Field,y: Field) -> Self { - Self { bar: 0, array: [x,y] } + fn default(x: Field, y: Field) -> Self { + Self { bar: 0, array: [x, y] } } } diff --git a/test_programs/format.sh b/test_programs/format.sh new file mode 100755 index 00000000000..3c679b8689e --- /dev/null +++ b/test_programs/format.sh @@ -0,0 +1,47 @@ +#!/usr/bin/env bash +set -e + +# These tests are incompatible with gas reporting +excluded_dirs=("workspace" "workspace_default_member" "workspace_reexport_bug") + +# These tests cause failures in CI with a stack overflow for some reason. +ci_excluded_dirs=("eddsa") + +current_dir=$(pwd) + +# We generate a Noir workspace which contains all of the test cases +# This allows us to generate a gates report using `nargo info` for all of them at once. + + +function collect_dirs { + test_dirs=$(ls $current_dir/$1) + + for dir in $test_dirs; do + if [[ " ${excluded_dirs[@]} " =~ " ${dir} " ]]; then + continue + fi + + if [[ ${CI-false} = "true" ]] && [[ " ${ci_excluded_dirs[@]} " =~ " ${dir} " ]]; then + continue + fi + + echo " \"$1/$dir\"," >> Nargo.toml +done +} + +echo "[workspace]" > Nargo.toml +echo "members = [" >> Nargo.toml + +collect_dirs compile_success_empty +collect_dirs execution_success + +echo "]" >> Nargo.toml + +if [ "$1" == "check" ]; then + nargo fmt --check +else + nargo fmt +fi + + +rm Nargo.toml From 00ab3db86b06111d144516e862902b8604284611 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Mon, 26 Feb 2024 17:28:28 +0000 Subject: [PATCH 39/45] fix: remove panic when generic array length is not resolvable (#4408) # Description ## Problem\* Resolves #4407 ## Summary\* We currently have no way to gracefully error during monomorphization and so must panic if we run into any errors. This PR then adds the `MonomorphizationError` enum with an example error type. We've also added a `CompileError` which unifies `RuntimeError` and `MonomorphizationError` so they can be converted into `FileDiagnostic`s ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- Cargo.lock | 1 + compiler/noirc_driver/Cargo.toml | 1 + compiler/noirc_driver/src/lib.rs | 27 +- compiler/noirc_evaluator/src/errors.rs | 2 +- .../src/monomorphization/debug.rs | 38 ++- .../src/monomorphization/mod.rs | 286 ++++++++++++------ compiler/noirc_frontend/src/tests.rs | 2 +- tooling/nargo/src/ops/test.rs | 10 +- 8 files changed, 246 insertions(+), 121 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4d8b12d5379..0f575d9c46e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2947,6 +2947,7 @@ dependencies = [ "noirc_macros", "rust-embed", "serde", + "thiserror", "tracing", ] diff --git a/compiler/noirc_driver/Cargo.toml b/compiler/noirc_driver/Cargo.toml index d9b240101d8..681976735f3 100644 --- a/compiler/noirc_driver/Cargo.toml +++ b/compiler/noirc_driver/Cargo.toml @@ -23,6 +23,7 @@ serde.workspace = true fxhash.workspace = true rust-embed.workspace = true tracing.workspace = true +thiserror.workspace = true aztec_macros = { path = "../../aztec_macros" } noirc_macros = { path = "../../noirc_macros" } diff --git a/compiler/noirc_driver/src/lib.rs b/compiler/noirc_driver/src/lib.rs index 8b0fc5dc97a..11f53cdb749 100644 --- a/compiler/noirc_driver/src/lib.rs +++ b/compiler/noirc_driver/src/lib.rs @@ -16,9 +16,10 @@ use noirc_frontend::graph::{CrateId, CrateName}; use noirc_frontend::hir::def_map::{Contract, CrateDefMap}; use noirc_frontend::hir::Context; use noirc_frontend::macros_api::MacroProcessor; -use noirc_frontend::monomorphization::{monomorphize, monomorphize_debug}; +use noirc_frontend::monomorphization::{monomorphize, monomorphize_debug, MonomorphizationError}; use noirc_frontend::node_interner::FuncId; use std::path::Path; +use thiserror::Error; use tracing::info; mod abi_gen; @@ -107,6 +108,24 @@ fn parse_expression_width(input: &str) -> Result for FileDiagnostic { + fn from(error: CompileError) -> FileDiagnostic { + match error { + CompileError::RuntimeError(err) => err.into(), + CompileError::MonomorphizationError(err) => err.into(), + } + } +} + /// Helper type used to signify where only warnings are expected in file diagnostics pub type Warnings = Vec; @@ -436,11 +455,11 @@ pub fn compile_no_check( main_function: FuncId, cached_program: Option, force_compile: bool, -) -> Result { +) -> Result { let program = if options.instrument_debug { - monomorphize_debug(main_function, &mut context.def_interner, &context.debug_instrumenter) + monomorphize_debug(main_function, &mut context.def_interner, &context.debug_instrumenter)? } else { - monomorphize(main_function, &mut context.def_interner) + monomorphize(main_function, &mut context.def_interner)? }; let hash = fxhash::hash64(&program); diff --git a/compiler/noirc_evaluator/src/errors.rs b/compiler/noirc_evaluator/src/errors.rs index ed94adac28e..40f4336e0b5 100644 --- a/compiler/noirc_evaluator/src/errors.rs +++ b/compiler/noirc_evaluator/src/errors.rs @@ -158,7 +158,7 @@ impl RuntimeError { RuntimeError::InternalError(cause) => { Diagnostic::simple_error( "Internal Consistency Evaluators Errors: \n - This is likely a bug. Consider Opening an issue at https://github.com/noir-lang/noir/issues".to_owned(), + This is likely a bug. Consider opening an issue at https://github.com/noir-lang/noir/issues".to_owned(), cause.to_string(), noirc_errors::Span::inclusive(0, 0) ) diff --git a/compiler/noirc_frontend/src/monomorphization/debug.rs b/compiler/noirc_frontend/src/monomorphization/debug.rs index 5837d67660a..a8ff4399f99 100644 --- a/compiler/noirc_frontend/src/monomorphization/debug.rs +++ b/compiler/noirc_frontend/src/monomorphization/debug.rs @@ -8,7 +8,7 @@ use crate::hir_def::expr::*; use crate::node_interner::ExprId; use super::ast::{Expression, Ident}; -use super::Monomorphizer; +use super::{MonomorphizationError, Monomorphizer}; const DEBUG_MEMBER_ASSIGN_PREFIX: &str = "__debug_member_assign_"; const DEBUG_VAR_ID_ARG_SLOT: usize = 0; @@ -39,18 +39,19 @@ impl<'interner> Monomorphizer<'interner> { &mut self, call: &HirCallExpression, arguments: &mut [Expression], - ) { - let original_func = Box::new(self.expr(call.func)); + ) -> Result<(), MonomorphizationError> { + let original_func = Box::new(self.expr(call.func)?); if let Expression::Ident(Ident { name, .. }) = original_func.as_ref() { if name == "__debug_var_assign" { - self.patch_debug_var_assign(call, arguments); + self.patch_debug_var_assign(call, arguments)?; } else if name == "__debug_var_drop" { - self.patch_debug_var_drop(call, arguments); + self.patch_debug_var_drop(call, arguments)?; } else if let Some(arity) = name.strip_prefix(DEBUG_MEMBER_ASSIGN_PREFIX) { let arity = arity.parse::().expect("failed to parse member assign arity"); - self.patch_debug_member_assign(call, arguments, arity); + self.patch_debug_member_assign(call, arguments, arity)?; } } + Ok(()) } /// Update instrumentation code inserted on variable assignment. We need to @@ -59,7 +60,11 @@ impl<'interner> Monomorphizer<'interner> { /// variable are possible if using generic functions, hence the temporary ID /// created when injecting the instrumentation code can map to multiple IDs /// at runtime. - fn patch_debug_var_assign(&mut self, call: &HirCallExpression, arguments: &mut [Expression]) { + fn patch_debug_var_assign( + &mut self, + call: &HirCallExpression, + arguments: &mut [Expression], + ) -> Result<(), MonomorphizationError> { let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); let var_id_arg = hir_arguments.get(DEBUG_VAR_ID_ARG_SLOT); let Some(HirExpression::Literal(HirLiteral::Integer(source_var_id, _))) = var_id_arg else { @@ -73,13 +78,18 @@ impl<'interner> Monomorphizer<'interner> { // then update the ID used for tracking at runtime let var_id = self.debug_type_tracker.insert_var(source_var_id, var_type); let interned_var_id = self.intern_var_id(var_id, &call.location); - arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id); + arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id)?; + Ok(()) } /// Update instrumentation code for a variable being dropped out of scope. /// Given the source_var_id we search for the last assigned debug var_id and /// replace it instead. - fn patch_debug_var_drop(&mut self, call: &HirCallExpression, arguments: &mut [Expression]) { + fn patch_debug_var_drop( + &mut self, + call: &HirCallExpression, + arguments: &mut [Expression], + ) -> Result<(), MonomorphizationError> { let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); let var_id_arg = hir_arguments.get(DEBUG_VAR_ID_ARG_SLOT); let Some(HirExpression::Literal(HirLiteral::Integer(source_var_id, _))) = var_id_arg else { @@ -92,7 +102,8 @@ impl<'interner> Monomorphizer<'interner> { .get_var_id(source_var_id) .unwrap_or_else(|| unreachable!("failed to find debug variable")); let interned_var_id = self.intern_var_id(var_id, &call.location); - arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id); + arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id)?; + Ok(()) } /// Update instrumentation code inserted when assigning to a member of an @@ -106,7 +117,7 @@ impl<'interner> Monomorphizer<'interner> { call: &HirCallExpression, arguments: &mut [Expression], arity: usize, - ) { + ) -> Result<(), MonomorphizationError> { let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); let var_id_arg = hir_arguments.get(DEBUG_VAR_ID_ARG_SLOT); let Some(HirExpression::Literal(HirLiteral::Integer(source_var_id, _))) = var_id_arg else { @@ -149,7 +160,7 @@ impl<'interner> Monomorphizer<'interner> { call.location.span, call.location.file, ); - arguments[DEBUG_MEMBER_FIELD_INDEX_ARG_SLOT + i] = self.expr(index_id); + arguments[DEBUG_MEMBER_FIELD_INDEX_ARG_SLOT + i] = self.expr(index_id)?; } else { // array/string element using constant index cursor_type = element_type_at_index(cursor_type, index as usize); @@ -165,7 +176,8 @@ impl<'interner> Monomorphizer<'interner> { .get_var_id(source_var_id) .unwrap_or_else(|| unreachable!("failed to find debug variable")); let interned_var_id = self.intern_var_id(var_id, &call.location); - arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id); + arguments[DEBUG_VAR_ID_ARG_SLOT] = self.expr(interned_var_id)?; + Ok(()) } fn intern_var_id(&mut self, var_id: DebugVarId, location: &Location) -> ExprId { diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index 2e714da21c6..ce880401d77 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -9,13 +9,14 @@ //! The entry point to this pass is the `monomorphize` function which, starting from a given //! function, will monomorphize the entire reachable program. use acvm::FieldElement; -use iter_extended::{btree_map, vecmap}; -use noirc_errors::Location; +use iter_extended::{btree_map, try_vecmap, vecmap}; +use noirc_errors::{CustomDiagnostic, FileDiagnostic, Location}; use noirc_printable_type::PrintableType; use std::{ collections::{BTreeMap, HashMap, VecDeque}, unreachable, }; +use thiserror::Error; use crate::{ debug::DebugInstrumenter, @@ -87,6 +88,40 @@ struct Monomorphizer<'interner> { type HirType = crate::Type; +#[derive(Debug, Error)] +pub enum MonomorphizationError { + #[error("Length of generic array could not be determined.")] + UnknownArrayLength { location: Location }, +} + +impl MonomorphizationError { + fn call_stack(&self) -> Vec { + match self { + MonomorphizationError::UnknownArrayLength { location } => vec![*location], + } + } +} + +impl From for FileDiagnostic { + fn from(error: MonomorphizationError) -> FileDiagnostic { + let call_stack = error.call_stack(); + let file_id = call_stack.last().map(|location| location.file).unwrap_or_default(); + let diagnostic = error.into_diagnostic(); + diagnostic.in_file(file_id).with_call_stack(call_stack) + } +} + +impl MonomorphizationError { + fn into_diagnostic(self) -> CustomDiagnostic { + CustomDiagnostic::simple_error( + "Internal Consistency Evaluators Errors: \n + This is likely a bug. Consider opening an issue at https://github.com/noir-lang/noir/issues".to_owned(), + self.to_string(), + noirc_errors::Span::inclusive(0, 0) + ) + } +} + /// Starting from the given `main` function, monomorphize the entire program, /// replacing all references to type variables and NamedGenerics with concrete /// types, duplicating definitions as necessary to do so. @@ -99,7 +134,10 @@ type HirType = crate::Type; /// this function. Typically, this is the function named "main" in the source project, /// but it can also be, for example, an arbitrary test function for running `nargo test`. #[tracing::instrument(level = "trace", skip(main, interner))] -pub fn monomorphize(main: node_interner::FuncId, interner: &mut NodeInterner) -> Program { +pub fn monomorphize( + main: node_interner::FuncId, + interner: &mut NodeInterner, +) -> Result { monomorphize_debug(main, interner, &DebugInstrumenter::default()) } @@ -107,10 +145,10 @@ pub fn monomorphize_debug( main: node_interner::FuncId, interner: &mut NodeInterner, debug_instrumenter: &DebugInstrumenter, -) -> Program { +) -> Result { let debug_type_tracker = DebugTypeTracker::build_from_debug_instrumenter(debug_instrumenter); let mut monomorphizer = Monomorphizer::new(interner, debug_type_tracker); - let function_sig = monomorphizer.compile_main(main); + let function_sig = monomorphizer.compile_main(main)?; while !monomorphizer.queue.is_empty() { let (next_fn_id, new_id, bindings, trait_method) = monomorphizer.queue.pop_front().unwrap(); @@ -118,7 +156,7 @@ pub fn monomorphize_debug( perform_instantiation_bindings(&bindings); let impl_bindings = monomorphizer.perform_impl_bindings(trait_method, next_fn_id); - monomorphizer.function(next_fn_id, new_id); + monomorphizer.function(next_fn_id, new_id)?; undo_instantiation_bindings(impl_bindings); undo_instantiation_bindings(bindings); } @@ -128,7 +166,7 @@ pub fn monomorphize_debug( monomorphizer.interner.function_meta(&main); let (debug_variables, debug_types) = monomorphizer.debug_type_tracker.extract_vars_and_types(); - Program::new( + let program = Program::new( functions, function_sig, *return_distinctness, @@ -137,7 +175,8 @@ pub fn monomorphize_debug( *kind == FunctionKind::Recursive, debug_variables, debug_types, - ) + ); + Ok(program) } impl<'interner> Monomorphizer<'interner> { @@ -233,10 +272,13 @@ impl<'interner> Monomorphizer<'interner> { self.globals.entry(id).or_default().insert(typ, new_id); } - fn compile_main(&mut self, main_id: node_interner::FuncId) -> FunctionSignature { + fn compile_main( + &mut self, + main_id: node_interner::FuncId, + ) -> Result { let new_main_id = self.next_function_id(); assert_eq!(new_main_id, Program::main_id()); - self.function(main_id, new_main_id); + self.function(main_id, new_main_id)?; self.return_location = self.interner.function(&main_id).block(self.interner).statements().last().and_then( |x| match self.interner.statement(x) { @@ -245,10 +287,14 @@ impl<'interner> Monomorphizer<'interner> { }, ); let main_meta = self.interner.function_meta(&main_id); - main_meta.function_signature() + Ok(main_meta.function_signature()) } - fn function(&mut self, f: node_interner::FuncId, id: FuncId) { + fn function( + &mut self, + f: node_interner::FuncId, + id: FuncId, + ) -> Result<(), MonomorphizationError> { if let Some((self_type, trait_id)) = self.interner.get_function_trait(&f) { let the_trait = self.interner.get_trait(trait_id); the_trait.self_type_typevar.force_bind(self_type); @@ -268,10 +314,11 @@ impl<'interner> Monomorphizer<'interner> { || matches!(modifiers.contract_function_type, Some(ContractFunctionType::Open)); let parameters = self.parameters(&meta.parameters); - let body = self.expr(body_expr_id); + let body = self.expr(body_expr_id)?; let function = ast::Function { id, name, parameters, body, return_type, unconstrained }; self.push_function(id, function); + Ok(()) } fn push_function(&mut self, id: FuncId, function: ast::Function) { @@ -331,15 +378,18 @@ impl<'interner> Monomorphizer<'interner> { } } - fn expr(&mut self, expr: node_interner::ExprId) -> ast::Expression { + fn expr( + &mut self, + expr: node_interner::ExprId, + ) -> Result { use ast::Expression::Literal; use ast::Literal::*; - match self.interner.expression(&expr) { - HirExpression::Ident(ident) => self.ident(ident, expr), + let expr = match self.interner.expression(&expr) { + HirExpression::Ident(ident) => self.ident(ident, expr)?, HirExpression::Literal(HirLiteral::Str(contents)) => Literal(Str(contents)), HirExpression::Literal(HirLiteral::FmtStr(contents, idents)) => { - let fields = vecmap(idents, |ident| self.expr(ident)); + let fields = try_vecmap(idents, |ident| self.expr(ident))?; Literal(FmtStr( contents, fields.len() as u64, @@ -367,27 +417,27 @@ impl<'interner> Monomorphizer<'interner> { } } HirExpression::Literal(HirLiteral::Array(array)) => match array { - HirArrayLiteral::Standard(array) => self.standard_array(expr, array), + HirArrayLiteral::Standard(array) => self.standard_array(expr, array)?, HirArrayLiteral::Repeated { repeated_element, length } => { - self.repeated_array(expr, repeated_element, length) + self.repeated_array(expr, repeated_element, length)? } }, HirExpression::Literal(HirLiteral::Unit) => ast::Expression::Block(vec![]), - HirExpression::Block(block) => self.block(block.0), + HirExpression::Block(block) => self.block(block.0)?, HirExpression::Prefix(prefix) => { let location = self.interner.expr_location(&expr); ast::Expression::Unary(ast::Unary { operator: prefix.operator, - rhs: Box::new(self.expr(prefix.rhs)), + rhs: Box::new(self.expr(prefix.rhs)?), result_type: self.convert_type(&self.interner.id_type(expr)), location, }) } HirExpression::Infix(infix) => { - let lhs = self.expr(infix.lhs); - let rhs = self.expr(infix.rhs); + let lhs = self.expr(infix.lhs)?; + let rhs = self.expr(infix.rhs)?; let operator = infix.operator.kind; let location = self.interner.expr_location(&expr); if self.interner.get_selected_impl_for_expression(expr).is_some() { @@ -418,26 +468,27 @@ impl<'interner> Monomorphizer<'interner> { } } - HirExpression::Index(index) => self.index(expr, index), + HirExpression::Index(index) => self.index(expr, index)?, HirExpression::MemberAccess(access) => { let field_index = self.interner.get_field_index(expr); - let expr = Box::new(self.expr(access.lhs)); + let expr = Box::new(self.expr(access.lhs)?); ast::Expression::ExtractTupleField(expr, field_index) } - HirExpression::Call(call) => self.function_call(call, expr), + HirExpression::Call(call) => self.function_call(call, expr)?, HirExpression::Cast(cast) => ast::Expression::Cast(ast::Cast { - lhs: Box::new(self.expr(cast.lhs)), + lhs: Box::new(self.expr(cast.lhs)?), r#type: self.convert_type(&cast.r#type), location: self.interner.expr_location(&expr), }), HirExpression::If(if_expr) => { - let cond = self.expr(if_expr.condition); - let then = self.expr(if_expr.consequence); - let else_ = if_expr.alternative.map(|alt| Box::new(self.expr(alt))); + let cond = self.expr(if_expr.condition)?; + let then = self.expr(if_expr.consequence)?; + let else_ = + if_expr.alternative.map(|alt| self.expr(alt)).transpose()?.map(Box::new); ast::Expression::If(ast::If { condition: Box::new(cond), consequence: Box::new(then), @@ -447,28 +498,30 @@ impl<'interner> Monomorphizer<'interner> { } HirExpression::Tuple(fields) => { - let fields = vecmap(fields, |id| self.expr(id)); + let fields = try_vecmap(fields, |id| self.expr(id))?; ast::Expression::Tuple(fields) } - HirExpression::Constructor(constructor) => self.constructor(constructor, expr), + HirExpression::Constructor(constructor) => self.constructor(constructor, expr)?, - HirExpression::Lambda(lambda) => self.lambda(lambda, expr), + HirExpression::Lambda(lambda) => self.lambda(lambda, expr)?, HirExpression::MethodCall(hir_method_call) => { unreachable!("Encountered HirExpression::MethodCall during monomorphization {hir_method_call:?}") } HirExpression::Error => unreachable!("Encountered Error node during monomorphization"), - } + }; + + Ok(expr) } fn standard_array( &mut self, array: node_interner::ExprId, array_elements: Vec, - ) -> ast::Expression { + ) -> Result { let typ = self.convert_type(&self.interner.id_type(array)); - let contents = vecmap(array_elements, |id| self.expr(id)); - ast::Expression::Literal(ast::Literal::Array(ast::ArrayLiteral { contents, typ })) + let contents = try_vecmap(array_elements, |id| self.expr(id))?; + Ok(ast::Expression::Literal(ast::Literal::Array(ast::ArrayLiteral { contents, typ }))) } fn repeated_array( @@ -476,48 +529,56 @@ impl<'interner> Monomorphizer<'interner> { array: node_interner::ExprId, repeated_element: node_interner::ExprId, length: HirType, - ) -> ast::Expression { + ) -> Result { let typ = self.convert_type(&self.interner.id_type(array)); - let length = length - .evaluate_to_u64() - .expect("Length of array is unknown when evaluating numeric generic"); + let length = length.evaluate_to_u64().ok_or_else(|| { + let location = self.interner.expr_location(&array); + MonomorphizationError::UnknownArrayLength { location } + })?; - let contents = vecmap(0..length, |_| self.expr(repeated_element)); - ast::Expression::Literal(ast::Literal::Array(ast::ArrayLiteral { contents, typ })) + let contents = try_vecmap(0..length, |_| self.expr(repeated_element))?; + Ok(ast::Expression::Literal(ast::Literal::Array(ast::ArrayLiteral { contents, typ }))) } - fn index(&mut self, id: node_interner::ExprId, index: HirIndexExpression) -> ast::Expression { + fn index( + &mut self, + id: node_interner::ExprId, + index: HirIndexExpression, + ) -> Result { let element_type = self.convert_type(&self.interner.id_type(id)); - let collection = Box::new(self.expr(index.collection)); - let index = Box::new(self.expr(index.index)); + let collection = Box::new(self.expr(index.collection)?); + let index = Box::new(self.expr(index.index)?); let location = self.interner.expr_location(&id); - ast::Expression::Index(ast::Index { collection, index, element_type, location }) + Ok(ast::Expression::Index(ast::Index { collection, index, element_type, location })) } - fn statement(&mut self, id: StmtId) -> ast::Expression { + fn statement(&mut self, id: StmtId) -> Result { match self.interner.statement(&id) { HirStatement::Let(let_statement) => self.let_statement(let_statement), HirStatement::Constrain(constrain) => { - let expr = self.expr(constrain.0); + let expr = self.expr(constrain.0)?; let location = self.interner.expr_location(&constrain.0); - let assert_message = - constrain.2.map(|assert_msg_expr| Box::new(self.expr(assert_msg_expr))); - ast::Expression::Constrain(Box::new(expr), location, assert_message) + let assert_message = constrain + .2 + .map(|assert_msg_expr| self.expr(assert_msg_expr)) + .transpose()? + .map(Box::new); + Ok(ast::Expression::Constrain(Box::new(expr), location, assert_message)) } HirStatement::Assign(assign) => self.assign(assign), HirStatement::For(for_loop) => { self.is_range_loop = true; - let start = self.expr(for_loop.start_range); - let end = self.expr(for_loop.end_range); + let start = self.expr(for_loop.start_range)?; + let end = self.expr(for_loop.end_range)?; self.is_range_loop = false; let index_variable = self.next_local_id(); self.define_local(for_loop.identifier.id, index_variable); - let block = Box::new(self.expr(for_loop.block)); + let block = Box::new(self.expr(for_loop.block)?); - ast::Expression::For(ast::For { + Ok(ast::Expression::For(ast::For { index_variable, index_name: self.interner.definition_name(for_loop.identifier.id).to_owned(), index_type: self.convert_type(&self.interner.id_type(for_loop.start_range)), @@ -526,25 +587,30 @@ impl<'interner> Monomorphizer<'interner> { start_range_location: self.interner.expr_location(&for_loop.start_range), end_range_location: self.interner.expr_location(&for_loop.end_range), block, - }) + })) } HirStatement::Expression(expr) => self.expr(expr), - HirStatement::Semi(expr) => ast::Expression::Semi(Box::new(self.expr(expr))), + HirStatement::Semi(expr) => { + self.expr(expr).map(|expr| ast::Expression::Semi(Box::new(expr))) + } HirStatement::Error => unreachable!(), } } - fn let_statement(&mut self, let_statement: HirLetStatement) -> ast::Expression { - let expr = self.expr(let_statement.expression); + fn let_statement( + &mut self, + let_statement: HirLetStatement, + ) -> Result { + let expr = self.expr(let_statement.expression)?; let expected_type = self.interner.id_type(let_statement.expression); - self.unpack_pattern(let_statement.pattern, expr, &expected_type) + Ok(self.unpack_pattern(let_statement.pattern, expr, &expected_type)) } fn constructor( &mut self, constructor: HirConstructorExpression, id: node_interner::ExprId, - ) -> ast::Expression { + ) -> Result { let typ = self.interner.id_type(id); let field_types = unwrap_struct_type(&typ); @@ -561,7 +627,7 @@ impl<'interner> Monomorphizer<'interner> { let typ = self.convert_type(field_type); field_vars.insert(field_name.0.contents.clone(), (new_id, typ)); - let expression = Box::new(self.expr(expr_id)); + let expression = Box::new(self.expr(expr_id)?); new_exprs.push(ast::Expression::Let(ast::Let { id: new_id, @@ -586,11 +652,15 @@ impl<'interner> Monomorphizer<'interner> { // Finally we can return the created Tuple from the new block new_exprs.push(ast::Expression::Tuple(field_idents)); - ast::Expression::Block(new_exprs) + Ok(ast::Expression::Block(new_exprs)) } - fn block(&mut self, statement_ids: Vec) -> ast::Expression { - ast::Expression::Block(vecmap(statement_ids, |id| self.statement(id))) + fn block( + &mut self, + statement_ids: Vec, + ) -> Result { + let stmts = try_vecmap(statement_ids, |id| self.statement(id)); + stmts.map(ast::Expression::Block) } fn unpack_pattern( @@ -701,15 +771,19 @@ impl<'interner> Monomorphizer<'interner> { Some(ast::Ident { location: Some(ident.location), mutable, definition, name, typ }) } - fn ident(&mut self, ident: HirIdent, expr_id: node_interner::ExprId) -> ast::Expression { + fn ident( + &mut self, + ident: HirIdent, + expr_id: node_interner::ExprId, + ) -> Result { let typ = self.interner.id_type(expr_id); if let ImplKind::TraitMethod(method, _, _) = ident.impl_kind { - return self.resolve_trait_method_reference(expr_id, typ, method); + return Ok(self.resolve_trait_method_reference(expr_id, typ, method)); } let definition = self.interner.definition(ident.id); - match &definition.kind { + let ident = match &definition.kind { DefinitionKind::Function(func_id) => { let mutable = definition.mutable; let location = Some(ident.location); @@ -736,7 +810,7 @@ impl<'interner> Monomorphizer<'interner> { "Globals should have a corresponding let statement by monomorphization" ) }; - self.expr(let_.expression) + self.expr(let_.expression)? } DefinitionKind::Local(_) => self.lookup_captured_expr(ident.id).unwrap_or_else(|| { let ident = self.local_ident(&ident).unwrap(); @@ -757,7 +831,9 @@ impl<'interner> Monomorphizer<'interner> { let typ = self.convert_type(&typ); ast::Expression::Literal(ast::Literal::Integer(value, typ, location)) } - } + }; + + Ok(ident) } /// Convert a non-tuple/struct type to a monomorphized type @@ -949,12 +1025,12 @@ impl<'interner> Monomorphizer<'interner> { &mut self, call: HirCallExpression, id: node_interner::ExprId, - ) -> ast::Expression { - let original_func = Box::new(self.expr(call.func)); - let mut arguments = vecmap(&call.arguments, |id| self.expr(*id)); + ) -> Result { + let original_func = Box::new(self.expr(call.func)?); + let mut arguments = try_vecmap(&call.arguments, |id| self.expr(*id))?; let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); - self.patch_debug_instrumentation_call(&call, &mut arguments); + self.patch_debug_instrumentation_call(&call, &mut arguments)?; let return_type = self.interner.id_type(id); let return_type = self.convert_type(&return_type); @@ -969,7 +1045,7 @@ impl<'interner> Monomorphizer<'interner> { // The second argument is expected to always be an ident self.append_printable_type_info(&hir_arguments[1], &mut arguments); } else if name.as_str() == "assert_message" { - // The first argument to the `assert_message` oracle is the expression passed as a mesage to an `assert` or `assert_eq` statement + // The first argument to the `assert_message` oracle is the expression passed as a message to an `assert` or `assert_eq` statement self.append_printable_type_info(&hir_arguments[0], &mut arguments); } } @@ -1017,9 +1093,9 @@ impl<'interner> Monomorphizer<'interner> { if !block_expressions.is_empty() { block_expressions.push(call); - ast::Expression::Block(block_expressions) + Ok(ast::Expression::Block(block_expressions)) } else { - call + Ok(call) } } @@ -1190,47 +1266,59 @@ impl<'interner> Monomorphizer<'interner> { .collect() } - fn assign(&mut self, assign: HirAssignStatement) -> ast::Expression { - let expression = Box::new(self.expr(assign.expression)); - let lvalue = self.lvalue(assign.lvalue); - ast::Expression::Assign(ast::Assign { expression, lvalue }) + fn assign( + &mut self, + assign: HirAssignStatement, + ) -> Result { + let expression = Box::new(self.expr(assign.expression)?); + let lvalue = self.lvalue(assign.lvalue)?; + Ok(ast::Expression::Assign(ast::Assign { expression, lvalue })) } - fn lvalue(&mut self, lvalue: HirLValue) -> ast::LValue { - match lvalue { + fn lvalue(&mut self, lvalue: HirLValue) -> Result { + let value = match lvalue { HirLValue::Ident(ident, _) => self .lookup_captured_lvalue(ident.id) .unwrap_or_else(|| ast::LValue::Ident(self.local_ident(&ident).unwrap())), HirLValue::MemberAccess { object, field_index, .. } => { let field_index = field_index.unwrap(); - let object = Box::new(self.lvalue(*object)); + let object = Box::new(self.lvalue(*object)?); ast::LValue::MemberAccess { object, field_index } } HirLValue::Index { array, index, typ } => { let location = self.interner.expr_location(&index); - let array = Box::new(self.lvalue(*array)); - let index = Box::new(self.expr(index)); + let array = Box::new(self.lvalue(*array)?); + let index = Box::new(self.expr(index)?); let element_type = self.convert_type(&typ); ast::LValue::Index { array, index, element_type, location } } HirLValue::Dereference { lvalue, element_type } => { - let reference = Box::new(self.lvalue(*lvalue)); + let reference = Box::new(self.lvalue(*lvalue)?); let element_type = self.convert_type(&element_type); ast::LValue::Dereference { reference, element_type } } - } + }; + + Ok(value) } - fn lambda(&mut self, lambda: HirLambda, expr: node_interner::ExprId) -> ast::Expression { + fn lambda( + &mut self, + lambda: HirLambda, + expr: node_interner::ExprId, + ) -> Result { if lambda.captures.is_empty() { self.lambda_no_capture(lambda) } else { - let (setup, closure_variable) = self.lambda_with_setup(lambda, expr); - ast::Expression::Block(vec![setup, closure_variable]) + let (setup, closure_variable) = self.lambda_with_setup(lambda, expr)?; + Ok(ast::Expression::Block(vec![setup, closure_variable])) } } - fn lambda_no_capture(&mut self, lambda: HirLambda) -> ast::Expression { + fn lambda_no_capture( + &mut self, + lambda: HirLambda, + ) -> Result { let ret_type = self.convert_type(&lambda.return_type); let lambda_name = "lambda"; let parameter_types = vecmap(&lambda.parameters, |(_, typ)| self.convert_type(typ)); @@ -1240,7 +1328,7 @@ impl<'interner> Monomorphizer<'interner> { vecmap(lambda.parameters, |(pattern, typ)| (pattern, typ, Visibility::Private)).into(); let parameters = self.parameters(¶meters); - let body = self.expr(lambda.body); + let body = self.expr(lambda.body)?; let id = self.next_function_id(); let return_type = ret_type.clone(); @@ -1254,20 +1342,20 @@ impl<'interner> Monomorphizer<'interner> { ast::Type::Function(parameter_types, Box::new(ret_type), Box::new(ast::Type::Unit)); let name = lambda_name.to_owned(); - ast::Expression::Ident(ast::Ident { + Ok(ast::Expression::Ident(ast::Ident { definition: Definition::Function(id), mutable: false, location: None, name, typ, - }) + })) } fn lambda_with_setup( &mut self, lambda: HirLambda, expr: node_interner::ExprId, - ) -> (ast::Expression, ast::Expression) { + ) -> Result<(ast::Expression, ast::Expression), MonomorphizationError> { // returns (, ) // which can be used directly in callsites or transformed // directly to a single `Expression` @@ -1343,7 +1431,7 @@ impl<'interner> Monomorphizer<'interner> { self.lambda_envs_stack .push(LambdaContext { env_ident: env_ident.clone(), captures: lambda.captures }); - let body = self.expr(lambda.body); + let body = self.expr(lambda.body)?; self.lambda_envs_stack.pop(); let lambda_fn_typ: ast::Type = @@ -1385,7 +1473,7 @@ impl<'interner> Monomorphizer<'interner> { typ: ast::Type::Tuple(vec![env_typ, lambda_fn_typ]), }); - (block_let_stmt, closure_ident) + Ok((block_let_stmt, closure_ident)) } /// Implements std::unsafe::zeroed by returning an appropriate zeroed diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index c18379f1c26..c661cc92eef 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -1130,7 +1130,7 @@ mod test { fn check_rewrite(src: &str, expected: &str) { let (_program, mut context, _errors) = get_program(src); let main_func_id = context.def_interner.find_function("main").unwrap(); - let program = monomorphize(main_func_id, &mut context.def_interner); + let program = monomorphize(main_func_id, &mut context.def_interner).unwrap(); assert!(format!("{}", program) == expected); } diff --git a/tooling/nargo/src/ops/test.rs b/tooling/nargo/src/ops/test.rs index 0929739a6ab..92c09ec889e 100644 --- a/tooling/nargo/src/ops/test.rs +++ b/tooling/nargo/src/ops/test.rs @@ -1,5 +1,5 @@ use acvm::{acir::native_types::WitnessMap, BlackBoxFunctionSolver}; -use noirc_driver::{compile_no_check, CompileOptions}; +use noirc_driver::{compile_no_check, CompileError, CompileOptions}; use noirc_errors::{debug_info::DebugInfo, FileDiagnostic}; use noirc_evaluator::errors::RuntimeError; use noirc_frontend::hir::{def_map::TestFunction, Context}; @@ -45,14 +45,18 @@ pub fn run_test( /// that a constraint was never satisfiable. /// An example of this is the program `assert(false)` /// In that case, we check if the test function should fail, and if so, we return `TestStatus::Pass`. -fn test_status_program_compile_fail(err: RuntimeError, test_function: TestFunction) -> TestStatus { +fn test_status_program_compile_fail(err: CompileError, test_function: TestFunction) -> TestStatus { // The test has failed compilation, but it should never fail. Report error. if !test_function.should_fail() { return TestStatus::CompileError(err.into()); } // The test has failed compilation, extract the assertion message if present and check if it's expected. - let assert_message = if let RuntimeError::FailedConstraint { assert_message, .. } = &err { + let assert_message = if let CompileError::RuntimeError(RuntimeError::FailedConstraint { + assert_message, + .. + }) = &err + { assert_message.clone() } else { None From 10e82920798380f50046e52db4a20ca205191ab7 Mon Sep 17 00:00:00 2001 From: guipublic <47281315+guipublic@users.noreply.github.com> Date: Mon, 26 Feb 2024 18:36:25 +0100 Subject: [PATCH 40/45] feat: add poseidon2 opcode implementation for acvm/brillig, and Noir (#4398) and poseidon2 noir implementation # Description ## Problem\* Resolves #4170 ## Summary\* The PR implements Poseidon2 permutation for ACMV and Brillig, enabling the use of the opcode. Then it also includes a Noir implementation of Poseidon2 using the opcode in the stdlib ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [X] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [X] I have tested the changes locally. - [X] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: kevaundray --- Cargo.lock | 1 + acvm-repo/acvm/src/pwg/blackbox/hash.rs | 36 +- acvm-repo/acvm/src/pwg/blackbox/mod.rs | 8 +- .../src/curve_specific_solver.rs | 12 + acvm-repo/bn254_blackbox_solver/Cargo.toml | 1 + acvm-repo/bn254_blackbox_solver/src/lib.rs | 11 + .../bn254_blackbox_solver/src/poseidon2.rs | 1043 +++++++++++++++++ acvm-repo/brillig_vm/src/black_box.rs | 13 +- acvm-repo/brillig_vm/src/lib.rs | 7 + .../noirc_evaluator/src/brillig/brillig_ir.rs | 8 + .../cryptographic_primitives/hashes.mdx | 13 + noir_stdlib/src/hash.nr | 3 +- noir_stdlib/src/hash/poseidon2.nr | 119 ++ .../poseidon_bn254_hash/Prover.toml | 5 + .../poseidon_bn254_hash/src/main.nr | 6 +- tooling/lsp/src/solver.rs | 8 + 16 files changed, 1288 insertions(+), 6 deletions(-) create mode 100644 acvm-repo/bn254_blackbox_solver/src/poseidon2.rs create mode 100644 noir_stdlib/src/hash/poseidon2.nr diff --git a/Cargo.lock b/Cargo.lock index 0f575d9c46e..714b700119a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -589,6 +589,7 @@ dependencies = [ "js-sys", "noir_grumpkin", "num-bigint", + "num-traits", "pkg-config", "reqwest", "rust-embed", diff --git a/acvm-repo/acvm/src/pwg/blackbox/hash.rs b/acvm-repo/acvm/src/pwg/blackbox/hash.rs index 06489822c92..1bc26f06188 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/hash.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/hash.rs @@ -3,7 +3,7 @@ use acir::{ native_types::{Witness, WitnessMap}, BlackBoxFunc, FieldElement, }; -use acvm_blackbox_solver::{sha256compression, BlackBoxResolutionError}; +use acvm_blackbox_solver::{sha256compression, BlackBoxFunctionSolver, BlackBoxResolutionError}; use crate::pwg::{insert_value, witness_to_value}; use crate::OpcodeResolutionError; @@ -131,3 +131,37 @@ pub(crate) fn solve_sha_256_permutation_opcode( Ok(()) } + +pub(crate) fn solve_poseidon2_permutation_opcode( + backend: &impl BlackBoxFunctionSolver, + initial_witness: &mut WitnessMap, + inputs: &[FunctionInput], + outputs: &[Witness], + len: u32, +) -> Result<(), OpcodeResolutionError> { + if len as usize != inputs.len() { + return Err(OpcodeResolutionError::BlackBoxFunctionFailed( + acir::BlackBoxFunc::Poseidon2Permutation, + format!( + "the number of inputs does not match specified length. {} > {}", + inputs.len(), + len + ), + )); + } + + // Read witness assignments + let mut state = Vec::new(); + for input in inputs.iter() { + let witness_assignment = witness_to_value(initial_witness, input.witness)?; + state.push(*witness_assignment); + } + + let state = backend.poseidon2_permutation(&state, len)?; + + // Write witness assignments + for (output_witness, value) in outputs.iter().zip(state.into_iter()) { + insert_value(output_witness, value, initial_witness)?; + } + Ok(()) +} diff --git a/acvm-repo/acvm/src/pwg/blackbox/mod.rs b/acvm-repo/acvm/src/pwg/blackbox/mod.rs index 7ae92fd84fc..4309cad1b2e 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/mod.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/mod.rs @@ -5,7 +5,9 @@ use acir::{ }; use acvm_blackbox_solver::{blake2s, blake3, keccak256, keccakf1600, sha256}; -use self::{bigint::BigIntSolver, pedersen::pedersen_hash}; +use self::{ + bigint::BigIntSolver, hash::solve_poseidon2_permutation_opcode, pedersen::pedersen_hash, +}; use super::{insert_value, OpcodeNotSolvable, OpcodeResolutionError}; use crate::{pwg::witness_to_value, BlackBoxFunctionSolver}; @@ -204,7 +206,6 @@ pub(crate) fn solve( BlackBoxFuncCall::BigIntToLeBytes { input, outputs } => { bigint_solver.bigint_to_bytes(*input, outputs, initial_witness) } - BlackBoxFuncCall::Poseidon2Permutation { .. } => todo!(), BlackBoxFuncCall::Sha256Compression { inputs, hash_values, outputs } => { solve_sha_256_permutation_opcode( initial_witness, @@ -214,5 +215,8 @@ pub(crate) fn solve( bb_func.get_black_box_func(), ) } + BlackBoxFuncCall::Poseidon2Permutation { inputs, outputs, len } => { + solve_poseidon2_permutation_opcode(backend, initial_witness, inputs, outputs, *len) + } } } diff --git a/acvm-repo/blackbox_solver/src/curve_specific_solver.rs b/acvm-repo/blackbox_solver/src/curve_specific_solver.rs index 2234710dec0..f0ab4561229 100644 --- a/acvm-repo/blackbox_solver/src/curve_specific_solver.rs +++ b/acvm-repo/blackbox_solver/src/curve_specific_solver.rs @@ -36,6 +36,11 @@ pub trait BlackBoxFunctionSolver { input2_x: &FieldElement, input2_y: &FieldElement, ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError>; + fn poseidon2_permutation( + &self, + _inputs: &[FieldElement], + _len: u32, + ) -> Result, BlackBoxResolutionError>; } pub struct StubbedBlackBoxSolver; @@ -89,4 +94,11 @@ impl BlackBoxFunctionSolver for StubbedBlackBoxSolver { ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { Err(Self::fail(BlackBoxFunc::EmbeddedCurveAdd)) } + fn poseidon2_permutation( + &self, + _inputs: &[FieldElement], + _len: u32, + ) -> Result, BlackBoxResolutionError> { + Err(Self::fail(BlackBoxFunc::Poseidon2Permutation)) + } } diff --git a/acvm-repo/bn254_blackbox_solver/Cargo.toml b/acvm-repo/bn254_blackbox_solver/Cargo.toml index ef80e2c1c0f..ea601a6b80f 100644 --- a/acvm-repo/bn254_blackbox_solver/Cargo.toml +++ b/acvm-repo/bn254_blackbox_solver/Cargo.toml @@ -16,6 +16,7 @@ repository.workspace = true acir.workspace = true acvm_blackbox_solver.workspace = true thiserror.workspace = true +num-traits.workspace = true rust-embed = { version = "6.6.0", features = [ "debug-embed", diff --git a/acvm-repo/bn254_blackbox_solver/src/lib.rs b/acvm-repo/bn254_blackbox_solver/src/lib.rs index 13aa956f9e1..be0e60ada96 100644 --- a/acvm-repo/bn254_blackbox_solver/src/lib.rs +++ b/acvm-repo/bn254_blackbox_solver/src/lib.rs @@ -6,9 +6,11 @@ use acir::{BlackBoxFunc, FieldElement}; use acvm_blackbox_solver::{BlackBoxFunctionSolver, BlackBoxResolutionError}; mod fixed_base_scalar_mul; +mod poseidon2; mod wasm; pub use fixed_base_scalar_mul::{embedded_curve_add, fixed_base_scalar_mul}; +use poseidon2::Poseidon2; use wasm::Barretenberg; use self::wasm::{Pedersen, SchnorrSig}; @@ -97,4 +99,13 @@ impl BlackBoxFunctionSolver for Bn254BlackBoxSolver { ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { embedded_curve_add(*input1_x, *input1_y, *input2_x, *input2_y) } + + fn poseidon2_permutation( + &self, + inputs: &[FieldElement], + len: u32, + ) -> Result, BlackBoxResolutionError> { + let poseidon = Poseidon2::new(); + poseidon.permutation(inputs, len) + } } diff --git a/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs b/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs new file mode 100644 index 00000000000..e0ed5bcd053 --- /dev/null +++ b/acvm-repo/bn254_blackbox_solver/src/poseidon2.rs @@ -0,0 +1,1043 @@ +use acir::FieldElement; +use acvm_blackbox_solver::BlackBoxResolutionError; +use num_bigint::BigUint; +use num_traits::Num; + +pub(crate) struct Poseidon2 { + t: u32, + rounds_f: u32, + rounds_p: u32, + internal_matrix_diagonal: [FieldElement; 4], + round_constant: [[FieldElement; 4]; 64], +} + +impl Poseidon2 { + pub(crate) fn new() -> Self { + Poseidon2 { + t: 4, + rounds_f: 8, + rounds_p: 56, + internal_matrix_diagonal: [ + Poseidon2::field_from_hex( + "0x10dc6e9c006ea38b04b1e03b4bd9490c0d03f98929ca1d7fb56821fd19d3b6e7", + ), + Poseidon2::field_from_hex( + "0x0c28145b6a44df3e0149b3d0a30b3bb599df9756d4dd9b84a86b38cfb45a740b", + ), + Poseidon2::field_from_hex( + "0x00544b8338791518b2c7645a50392798b21f75bb60e3596170067d00141cac15", + ), + Poseidon2::field_from_hex( + "0x222c01175718386f2e2e82eb122789e352e105a3b8fa852613bc534433ee428b", + ), + ], + round_constant: [ + [ + Poseidon2::field_from_hex( + "0x19b849f69450b06848da1d39bd5e4a4302bb86744edc26238b0878e269ed23e5", + ), + Poseidon2::field_from_hex( + "0x265ddfe127dd51bd7239347b758f0a1320eb2cc7450acc1dad47f80c8dcf34d6", + ), + Poseidon2::field_from_hex( + "0x199750ec472f1809e0f66a545e1e51624108ac845015c2aa3dfc36bab497d8aa", + ), + Poseidon2::field_from_hex( + "0x157ff3fe65ac7208110f06a5f74302b14d743ea25067f0ffd032f787c7f1cdf8", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2e49c43c4569dd9c5fd35ac45fca33f10b15c590692f8beefe18f4896ac94902", + ), + Poseidon2::field_from_hex( + "0x0e35fb89981890520d4aef2b6d6506c3cb2f0b6973c24fa82731345ffa2d1f1e", + ), + Poseidon2::field_from_hex( + "0x251ad47cb15c4f1105f109ae5e944f1ba9d9e7806d667ffec6fe723002e0b996", + ), + Poseidon2::field_from_hex( + "0x13da07dc64d428369873e97160234641f8beb56fdd05e5f3563fa39d9c22df4e", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0c009b84e650e6d23dc00c7dccef7483a553939689d350cd46e7b89055fd4738", + ), + Poseidon2::field_from_hex( + "0x011f16b1c63a854f01992e3956f42d8b04eb650c6d535eb0203dec74befdca06", + ), + Poseidon2::field_from_hex( + "0x0ed69e5e383a688f209d9a561daa79612f3f78d0467ad45485df07093f367549", + ), + Poseidon2::field_from_hex( + "0x04dba94a7b0ce9e221acad41472b6bbe3aec507f5eb3d33f463672264c9f789b", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0a3f2637d840f3a16eb094271c9d237b6036757d4bb50bf7ce732ff1d4fa28e8", + ), + Poseidon2::field_from_hex( + "0x259a666f129eea198f8a1c502fdb38fa39b1f075569564b6e54a485d1182323f", + ), + Poseidon2::field_from_hex( + "0x28bf7459c9b2f4c6d8e7d06a4ee3a47f7745d4271038e5157a32fdf7ede0d6a1", + ), + Poseidon2::field_from_hex( + "0x0a1ca941f057037526ea200f489be8d4c37c85bbcce6a2aeec91bd6941432447", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0c6f8f958be0e93053d7fd4fc54512855535ed1539f051dcb43a26fd926361cf", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x123106a93cd17578d426e8128ac9d90aa9e8a00708e296e084dd57e69caaf811", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x26e1ba52ad9285d97dd3ab52f8e840085e8fa83ff1e8f1877b074867cd2dee75", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1cb55cad7bd133de18a64c5c47b9c97cbe4d8b7bf9e095864471537e6a4ae2c5", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1dcd73e46acd8f8e0e2c7ce04bde7f6d2a53043d5060a41c7143f08e6e9055d0", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x011003e32f6d9c66f5852f05474a4def0cda294a0eb4e9b9b12b9bb4512e5574", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2b1e809ac1d10ab29ad5f20d03a57dfebadfe5903f58bafed7c508dd2287ae8c", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2539de1785b735999fb4dac35ee17ed0ef995d05ab2fc5faeaa69ae87bcec0a5", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0c246c5a2ef8ee0126497f222b3e0a0ef4e1c3d41c86d46e43982cb11d77951d", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x192089c4974f68e95408148f7c0632edbb09e6a6ad1a1c2f3f0305f5d03b527b", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1eae0ad8ab68b2f06a0ee36eeb0d0c058529097d91096b756d8fdc2fb5a60d85", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x179190e5d0e22179e46f8282872abc88db6e2fdc0dee99e69768bd98c5d06bfb", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x29bb9e2c9076732576e9a81c7ac4b83214528f7db00f31bf6cafe794a9b3cd1c", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x225d394e42207599403efd0c2464a90d52652645882aac35b10e590e6e691e08", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x064760623c25c8cf753d238055b444532be13557451c087de09efd454b23fd59", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x10ba3a0e01df92e87f301c4b716d8a394d67f4bf42a75c10922910a78f6b5b87", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0e070bf53f8451b24f9c6e96b0c2a801cb511bc0c242eb9d361b77693f21471c", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1b94cd61b051b04dd39755ff93821a73ccd6cb11d2491d8aa7f921014de252fb", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1d7cb39bafb8c744e148787a2e70230f9d4e917d5713bb050487b5aa7d74070b", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2ec93189bd1ab4f69117d0fe980c80ff8785c2961829f701bb74ac1f303b17db", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2db366bfdd36d277a692bb825b86275beac404a19ae07a9082ea46bd83517926", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x062100eb485db06269655cf186a68532985275428450359adc99cec6960711b8", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0761d33c66614aaa570e7f1e8244ca1120243f92fa59e4f900c567bf41f5a59b", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x20fc411a114d13992c2705aa034e3f315d78608a0f7de4ccf7a72e494855ad0d", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x25b5c004a4bdfcb5add9ec4e9ab219ba102c67e8b3effb5fc3a30f317250bc5a", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x23b1822d278ed632a494e58f6df6f5ed038b186d8474155ad87e7dff62b37f4b", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x22734b4c5c3f9493606c4ba9012499bf0f14d13bfcfcccaa16102a29cc2f69e0", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x26c0c8fe09eb30b7e27a74dc33492347e5bdff409aa3610254413d3fad795ce5", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x070dd0ccb6bd7bbae88eac03fa1fbb26196be3083a809829bbd626df348ccad9", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x12b6595bdb329b6fb043ba78bb28c3bec2c0a6de46d8c5ad6067c4ebfd4250da", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x248d97d7f76283d63bec30e7a5876c11c06fca9b275c671c5e33d95bb7e8d729", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1a306d439d463b0816fc6fd64cc939318b45eb759ddde4aa106d15d9bd9baaaa", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x28a8f8372e3c38daced7c00421cb4621f4f1b54ddc27821b0d62d3d6ec7c56cf", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0094975717f9a8a8bb35152f24d43294071ce320c829f388bc852183e1e2ce7e", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x04d5ee4c3aa78f7d80fde60d716480d3593f74d4f653ae83f4103246db2e8d65", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2a6cf5e9aa03d4336349ad6fb8ed2269c7bef54b8822cc76d08495c12efde187", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2304d31eaab960ba9274da43e19ddeb7f792180808fd6e43baae48d7efcba3f3", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x03fd9ac865a4b2a6d5e7009785817249bff08a7e0726fcb4e1c11d39d199f0b0", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x00b7258ded52bbda2248404d55ee5044798afc3a209193073f7954d4d63b0b64", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x159f81ada0771799ec38fca2d4bf65ebb13d3a74f3298db36272c5ca65e92d9a", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1ef90e67437fbc8550237a75bc28e3bb9000130ea25f0c5471e144cf4264431f", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1e65f838515e5ff0196b49aa41a2d2568df739bc176b08ec95a79ed82932e30d", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2b1b045def3a166cec6ce768d079ba74b18c844e570e1f826575c1068c94c33f", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0832e5753ceb0ff6402543b1109229c165dc2d73bef715e3f1c6e07c168bb173", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x02f614e9cedfb3dc6b762ae0a37d41bab1b841c2e8b6451bc5a8e3c390b6ad16", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0e2427d38bd46a60dd640b8e362cad967370ebb777bedff40f6a0be27e7ed705", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0493630b7c670b6deb7c84d414e7ce79049f0ec098c3c7c50768bbe29214a53a", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x22ead100e8e482674decdab17066c5a26bb1515355d5461a3dc06cc85327cea9", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x25b3e56e655b42cdaae2626ed2554d48583f1ae35626d04de5084e0b6d2a6f16", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1e32752ada8836ef5837a6cde8ff13dbb599c336349e4c584b4fdc0a0cf6f9d0", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2fa2a871c15a387cc50f68f6f3c3455b23c00995f05078f672a9864074d412e5", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x2f569b8a9a4424c9278e1db7311e889f54ccbf10661bab7fcd18e7c7a7d83505", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x044cb455110a8fdd531ade530234c518a7df93f7332ffd2144165374b246b43d", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x227808de93906d5d420246157f2e42b191fe8c90adfe118178ddc723a5319025", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x02fcca2934e046bc623adead873579865d03781ae090ad4a8579d2e7a6800355", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0ef915f0ac120b876abccceb344a1d36bad3f3c5ab91a8ddcbec2e060d8befac", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + Poseidon2::field_from_hex( + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ], + [ + Poseidon2::field_from_hex( + "0x1797130f4b7a3e1777eb757bc6f287f6ab0fb85f6be63b09f3b16ef2b1405d38", + ), + Poseidon2::field_from_hex( + "0x0a76225dc04170ae3306c85abab59e608c7f497c20156d4d36c668555decc6e5", + ), + Poseidon2::field_from_hex( + "0x1fffb9ec1992d66ba1e77a7b93209af6f8fa76d48acb664796174b5326a31a5c", + ), + Poseidon2::field_from_hex( + "0x25721c4fc15a3f2853b57c338fa538d85f8fbba6c6b9c6090611889b797b9c5f", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0c817fd42d5f7a41215e3d07ba197216adb4c3790705da95eb63b982bfcaf75a", + ), + Poseidon2::field_from_hex( + "0x13abe3f5239915d39f7e13c2c24970b6df8cf86ce00a22002bc15866e52b5a96", + ), + Poseidon2::field_from_hex( + "0x2106feea546224ea12ef7f39987a46c85c1bc3dc29bdbd7a92cd60acb4d391ce", + ), + Poseidon2::field_from_hex( + "0x21ca859468a746b6aaa79474a37dab49f1ca5a28c748bc7157e1b3345bb0f959", + ), + ], + [ + Poseidon2::field_from_hex( + "0x05ccd6255c1e6f0c5cf1f0df934194c62911d14d0321662a8f1a48999e34185b", + ), + Poseidon2::field_from_hex( + "0x0f0e34a64b70a626e464d846674c4c8816c4fb267fe44fe6ea28678cb09490a4", + ), + Poseidon2::field_from_hex( + "0x0558531a4e25470c6157794ca36d0e9647dbfcfe350d64838f5b1a8a2de0d4bf", + ), + Poseidon2::field_from_hex( + "0x09d3dca9173ed2faceea125157683d18924cadad3f655a60b72f5864961f1455", + ), + ], + [ + Poseidon2::field_from_hex( + "0x0328cbd54e8c0913493f866ed03d218bf23f92d68aaec48617d4c722e5bd4335", + ), + Poseidon2::field_from_hex( + "0x2bf07216e2aff0a223a487b1a7094e07e79e7bcc9798c648ee3347dd5329d34b", + ), + Poseidon2::field_from_hex( + "0x1daf345a58006b736499c583cb76c316d6f78ed6a6dffc82111e11a63fe412df", + ), + Poseidon2::field_from_hex( + "0x176563472456aaa746b694c60e1823611ef39039b2edc7ff391e6f2293d2c404", + ), + ], + ], + } + } + fn field_from_hex(hex: &str) -> FieldElement { + let bigint = BigUint::from_str_radix(hex.strip_prefix("0x").unwrap(), 16).unwrap(); + FieldElement::from_be_bytes_reduce(&bigint.to_bytes_be()) + } + + fn single_box(x: FieldElement) -> FieldElement { + let s = x * x; + s * s * x + } + + fn s_box(input: &mut [FieldElement]) { + for i in input { + *i = Self::single_box(*i); + } + } + + fn add_round_constants(&self, state: &mut [FieldElement], round: usize) { + for (state_element, constant_element) in state.iter_mut().zip(self.round_constant[round]) { + *state_element += constant_element; + } + } + + /// Algorithm is taken directly from the Poseidon2 implementation in Barretenberg crypto module. + fn matrix_multiplication_4x4(input: &mut [FieldElement]) { + assert!(input.len() == 4); + let t0 = input[0] + input[1]; // A + B + let t1 = input[2] + input[3]; // C + D + let mut t2 = input[1] + input[1]; // 2B + t2 += t1; // 2B + C + D + let mut t3 = input[3] + input[3]; // 2D + t3 += t0; // 2D + A + B + let mut t4 = t1 + t1; + t4 += t4; + t4 += t3; // A + B + 4C + 6D + let mut t5 = t0 + t0; + t5 += t5; + t5 += t2; // 4A + 6B + C + D + let t6 = t3 + t5; // 5A + 7B + C + 3D + let t7 = t2 + t4; // A + 3B + 5C + 7D + input[0] = t6; + input[1] = t5; + input[2] = t7; + input[3] = t4; + } + + fn internal_m_multiplication(&self, input: &mut [FieldElement]) { + let mut sum = FieldElement::zero(); + for i in input.iter() { + sum += *i; + } + for (index, i) in input.iter_mut().enumerate() { + *i = *i * self.internal_matrix_diagonal[index]; + *i += sum; + } + } + + pub(crate) fn permutation( + &self, + inputs: &[FieldElement], + len: u32, + ) -> Result, BlackBoxResolutionError> { + if len as usize != inputs.len() { + return Err(BlackBoxResolutionError::Failed( + acir::BlackBoxFunc::Poseidon2Permutation, + format!( + "the number of inputs does not match specified length. {} > {}", + inputs.len(), + len + ), + )); + } + if len != self.t { + return Err(BlackBoxResolutionError::Failed( + acir::BlackBoxFunc::Poseidon2Permutation, + format!("Expected {} values but encountered {}", self.t, len), + )); + } + // Read witness assignments + let mut state = [FieldElement::zero(); 4]; + for (index, input) in inputs.iter().enumerate() { + state[index] = *input; + } + // Apply 1st linear layer + Self::matrix_multiplication_4x4(&mut state); + + // First set of external rounds + let rf_first = self.rounds_f / 2; + for r in 0..rf_first { + self.add_round_constants(&mut state, r as usize); + Self::s_box(&mut state); + Self::matrix_multiplication_4x4(&mut state); + } + // Internal rounds + let p_end = rf_first + self.rounds_p; + for r in rf_first..p_end { + state[0] += self.round_constant[r as usize][0]; + state[0] = Self::single_box(state[0]); + self.internal_m_multiplication(&mut state); + } + + // Remaining external rounds + let num_rounds = self.rounds_f + self.rounds_p; + for i in p_end..num_rounds { + self.add_round_constants(&mut state, i as usize); + Self::s_box(&mut state); + Self::matrix_multiplication_4x4(&mut state); + } + Ok(state.into()) + } +} diff --git a/acvm-repo/brillig_vm/src/black_box.rs b/acvm-repo/brillig_vm/src/black_box.rs index 5b2680465ab..73b57b907f3 100644 --- a/acvm-repo/brillig_vm/src/black_box.rs +++ b/acvm-repo/brillig_vm/src/black_box.rs @@ -184,7 +184,18 @@ pub(crate) fn evaluate_black_box( BlackBoxOp::BigIntDiv { .. } => todo!(), BlackBoxOp::BigIntFromLeBytes { .. } => todo!(), BlackBoxOp::BigIntToLeBytes { .. } => todo!(), - BlackBoxOp::Poseidon2Permutation { .. } => todo!(), + BlackBoxOp::Poseidon2Permutation { message, output, len } => { + let input = read_heap_vector(memory, message); + let input: Vec = input.iter().map(|x| x.to_field()).collect(); + let len = memory.read(*len).to_u128() as u32; + let result = solver.poseidon2_permutation(&input, len)?; + let mut values = Vec::new(); + for i in result { + values.push(Value::from(i)); + } + memory.write_slice(memory.read_ref(output.pointer), &values); + Ok(()) + } BlackBoxOp::Sha256Compression { input, hash_values, output } => { let mut message = [0; 16]; let inputs = read_heap_vector(memory, input); diff --git a/acvm-repo/brillig_vm/src/lib.rs b/acvm-repo/brillig_vm/src/lib.rs index 13accbeacb3..c7bf014f068 100644 --- a/acvm-repo/brillig_vm/src/lib.rs +++ b/acvm-repo/brillig_vm/src/lib.rs @@ -568,6 +568,13 @@ impl BlackBoxFunctionSolver for DummyBlackBoxSolver { ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { Ok((5_u128.into(), 6_u128.into())) } + fn poseidon2_permutation( + &self, + _input: &[FieldElement], + len: u32, + ) -> Result, BlackBoxResolutionError> { + Ok(vec![0_u128.into(); len as usize]) + } } #[cfg(test)] diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index 073b0e6f59f..90608974f98 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -1151,6 +1151,14 @@ pub(crate) mod tests { ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { panic!("Path not trodden by this test") } + + fn poseidon2_permutation( + &self, + _inputs: &[FieldElement], + _len: u32, + ) -> Result, BlackBoxResolutionError> { + Ok(vec![0_u128.into(), 1_u128.into(), 2_u128.into(), 3_u128.into()]) + } } pub(crate) fn create_context() -> BrilligContext { diff --git a/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx b/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx index 85706384eee..b9239f822e8 100644 --- a/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx +++ b/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx @@ -114,6 +114,19 @@ example: #include_code poseidon test_programs/execution_success/poseidon_bn254_hash/src/main.nr rust +## poseidon 2 + +Given an array of Fields, returns a new Field with the Poseidon2 Hash. Contrary to the Poseidon +function, there is only one hash and you can specify a message_size to hash only the first +`message_size` bytes of the input, + +```rust +// example for hashing the first three elements of the input +Poseidon2::hash(input, 3); +``` + +The above example for Poseidon also includes Poseidon2. + ## mimc_bn254 and mimc `mimc_bn254` is `mimc`, but with hardcoded parameters for the BN254 curve. You can use it by diff --git a/noir_stdlib/src/hash.nr b/noir_stdlib/src/hash.nr index 7a931f7c047..fcf21436197 100644 --- a/noir_stdlib/src/hash.nr +++ b/noir_stdlib/src/hash.nr @@ -1,5 +1,6 @@ mod poseidon; mod mimc; +mod poseidon2; mod pedersen; use crate::default::Default; @@ -73,7 +74,7 @@ pub fn keccak256(input: [u8; N], message_size: u32) -> [u8; 32] {} #[foreign(poseidon2_permutation)] -pub fn poseidon2_permutation(_input: [u8; N], _state_length: u32) -> [u8; N] {} +pub fn poseidon2_permutation(_input: [Field; N], _state_length: u32) -> [Field; N] {} #[foreign(sha256_compression)] pub fn sha256_compression(_input: [u32; 16], _state: [u32; 8]) -> [u32; 8] {} diff --git a/noir_stdlib/src/hash/poseidon2.nr b/noir_stdlib/src/hash/poseidon2.nr new file mode 100644 index 00000000000..8e0fcc6858e --- /dev/null +++ b/noir_stdlib/src/hash/poseidon2.nr @@ -0,0 +1,119 @@ +global rate = 3; + +struct Poseidon2 { + cache: [Field;3], + state: [Field;4], + cache_size: u32, + squeeze_mode: bool, // 0 => absorb, 1 => squeeze +} + +impl Poseidon2 { + + pub fn hash(input: [Field; N], message_size: u32) -> Field { + if message_size == N { + Poseidon2::hash_internal(input, N, false) + } else { + Poseidon2::hash_internal(input, message_size, true) + } + } + + fn new(iv: Field) -> Poseidon2 { + let mut result = Poseidon2 { + cache: [0;3], + state: [0;4], + cache_size: 0, + squeeze_mode: false, + }; + result.state[rate] = iv; + result + } + + fn perform_duplex(&mut self) -> [Field; rate] { + // zero-pad the cache + for i in 0..rate { + if i >= self.cache_size { + self.cache[i] = 0; + } + } + // add the cache into sponge state + for i in 0..rate { + self.state[i] += self.cache[i]; + } + self.state = crate::hash::poseidon2_permutation(self.state, 4); + // return `rate` number of field elements from the sponge state. + let mut result = [0; rate]; + for i in 0..rate { + result[i] = self.state[i]; + } + result + } + + fn absorb(&mut self, input: Field) { + if (!self.squeeze_mode) & (self.cache_size == rate) { + // If we're absorbing, and the cache is full, apply the sponge permutation to compress the cache + let _ = self.perform_duplex(); + self.cache[0] = input; + self.cache_size = 1; + } else if (!self.squeeze_mode) & (self.cache_size != rate) { + // If we're absorbing, and the cache is not full, add the input into the cache + self.cache[self.cache_size] = input; + self.cache_size += 1; + } else if self.squeeze_mode { + // If we're in squeeze mode, switch to absorb mode and add the input into the cache. + // N.B. I don't think this code path can be reached?! + self.cache[0] = input; + self.cache_size = 1; + self.squeeze_mode = false; + } + } + + fn squeeze(&mut self) -> Field + { + if self.squeeze_mode & (self.cache_size == 0) { + // If we're in squeze mode and the cache is empty, there is nothing left to squeeze out of the sponge! + // Switch to absorb mode. + self.squeeze_mode = false; + self.cache_size = 0; + } + if !self.squeeze_mode { + // If we're in absorb mode, apply sponge permutation to compress the cache, populate cache with compressed + // state and switch to squeeze mode. Note: this code block will execute if the previous `if` condition was + // matched + let new_output_elements = self.perform_duplex(); + self.squeeze_mode = true; + for i in 0..rate { + self.cache[i] = new_output_elements[i]; + } + self.cache_size = rate; + } + // By this point, we should have a non-empty cache. Pop one item off the top of the cache and return it. + let result = self.cache[0]; + for i in 1..rate { + if i < self.cache_size { + self.cache[i - 1] = self.cache[i]; + } + } + self.cache_size -= 1; + self.cache[self.cache_size] = 0; + result + } + + fn hash_internal(input:[Field;N], in_len:u32, is_variable_length: bool) -> Field + { + let iv : Field = (in_len as Field)*18446744073709551616; + let mut sponge = Poseidon2::new(iv); + for i in 0..input.len() { + if i as u32 < in_len { + sponge.absorb(input[i]); + } + } + + // In the case where the hash preimage is variable-length, we append `1` to the end of the input, to distinguish + // from fixed-length hashes. (the combination of this additional field element + the hash IV ensures + // fixed-length and variable-length hashes do not collide) + if is_variable_length { + sponge.absorb(1); + } + sponge.squeeze() + } +} diff --git a/test_programs/execution_success/poseidon_bn254_hash/Prover.toml b/test_programs/execution_success/poseidon_bn254_hash/Prover.toml index 8eecf9a3db2..fa6fd05b0a3 100644 --- a/test_programs/execution_success/poseidon_bn254_hash/Prover.toml +++ b/test_programs/execution_success/poseidon_bn254_hash/Prover.toml @@ -2,3 +2,8 @@ x1 = [1,2] y1 = "0x115cc0f5e7d690413df64c6b9662e9cf2a3617f2743245519e19607a4417189a" x2 = [1,2,3,4] y2 = "0x299c867db6c1fdd79dcefa40e4510b9837e60ebb1ce0663dbaa525df65250465" +x3 = ["4218458030232820015255714794613421442512497197372123294583664908262453897094", + "4218458030232820015255714794613421442512497197372123294583664908262453897094", + "4218458030232820015255714794613421442512497197372123294583664908262453897094", + "4218458030232820015255714794613421442512497197372123294583664908262453897094"] + y3 = "0x2f43a0f83b51a6f5fc839dea0ecec74947637802a579fa9841930a25a0bcec11" diff --git a/test_programs/execution_success/poseidon_bn254_hash/src/main.nr b/test_programs/execution_success/poseidon_bn254_hash/src/main.nr index e742a440d1c..939b99595c7 100644 --- a/test_programs/execution_success/poseidon_bn254_hash/src/main.nr +++ b/test_programs/execution_success/poseidon_bn254_hash/src/main.nr @@ -1,11 +1,15 @@ // docs:start:poseidon use dep::std::hash::poseidon; +use dep::std::hash::poseidon2; -fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field) { +fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field, x3: [Field; 4], y3: Field) { let hash1 = poseidon::bn254::hash_2(x1); assert(hash1 == y1); let hash2 = poseidon::bn254::hash_4(x2); assert(hash2 == y2); + + let hash3 = poseidon2::Poseidon2::hash(x3, x3.len() as u32); + assert(hash3 == y3); } // docs:end:poseidon diff --git a/tooling/lsp/src/solver.rs b/tooling/lsp/src/solver.rs index f001cebaa4d..d0acbf1aec5 100644 --- a/tooling/lsp/src/solver.rs +++ b/tooling/lsp/src/solver.rs @@ -49,4 +49,12 @@ impl BlackBoxFunctionSolver for WrapperSolver { ) -> Result<(acvm::FieldElement, acvm::FieldElement), acvm::BlackBoxResolutionError> { self.0.ec_add(input1_x, input1_y, input2_x, input2_y) } + + fn poseidon2_permutation( + &self, + inputs: &[acvm::FieldElement], + len: u32, + ) -> Result, acvm::BlackBoxResolutionError> { + self.0.poseidon2_permutation(inputs, len) + } } From 568a7812b6a11cd427ffca38103179c7ec0830db Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 27 Feb 2024 10:19:15 +0000 Subject: [PATCH 41/45] chore: nargo fmt (#4434) # Description ## Problem\* Resolves ## Summary\* This PR fixes the formatting CI ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- noir_stdlib/src/collections/bounded_vec.nr | 2 +- noir_stdlib/src/hash/poseidon2.nr | 41 +++++++++------------- 2 files changed, 18 insertions(+), 25 deletions(-) diff --git a/noir_stdlib/src/collections/bounded_vec.nr b/noir_stdlib/src/collections/bounded_vec.nr index f78d86de77d..4a14bd16376 100644 --- a/noir_stdlib/src/collections/bounded_vec.nr +++ b/noir_stdlib/src/collections/bounded_vec.nr @@ -77,7 +77,7 @@ impl BoundedVec { let mut exceeded_len = false; for i in 0..MaxLen { exceeded_len |= i == self.len; - if (!exceeded_len) { + if !exceeded_len { ret |= predicate(self.storage[i]); } } diff --git a/noir_stdlib/src/hash/poseidon2.nr b/noir_stdlib/src/hash/poseidon2.nr index 8e0fcc6858e..64c1876b4e2 100644 --- a/noir_stdlib/src/hash/poseidon2.nr +++ b/noir_stdlib/src/hash/poseidon2.nr @@ -1,4 +1,4 @@ -global rate = 3; +global RATE = 3; struct Poseidon2 { cache: [Field;3], @@ -18,43 +18,38 @@ impl Poseidon2 { } fn new(iv: Field) -> Poseidon2 { - let mut result = Poseidon2 { - cache: [0;3], - state: [0;4], - cache_size: 0, - squeeze_mode: false, - }; - result.state[rate] = iv; + let mut result = Poseidon2 { cache: [0; 3], state: [0; 4], cache_size: 0, squeeze_mode: false }; + result.state[RATE] = iv; result } - fn perform_duplex(&mut self) -> [Field; rate] { + fn perform_duplex(&mut self) -> [Field; RATE] { // zero-pad the cache - for i in 0..rate { + for i in 0..RATE { if i >= self.cache_size { self.cache[i] = 0; } } // add the cache into sponge state - for i in 0..rate { + for i in 0..RATE { self.state[i] += self.cache[i]; } self.state = crate::hash::poseidon2_permutation(self.state, 4); - // return `rate` number of field elements from the sponge state. - let mut result = [0; rate]; - for i in 0..rate { + // return `RATE` number of field elements from the sponge state. + let mut result = [0; RATE]; + for i in 0..RATE { result[i] = self.state[i]; } result } fn absorb(&mut self, input: Field) { - if (!self.squeeze_mode) & (self.cache_size == rate) { + if (!self.squeeze_mode) & (self.cache_size == RATE) { // If we're absorbing, and the cache is full, apply the sponge permutation to compress the cache let _ = self.perform_duplex(); self.cache[0] = input; self.cache_size = 1; - } else if (!self.squeeze_mode) & (self.cache_size != rate) { + } else if (!self.squeeze_mode) & (self.cache_size != RATE) { // If we're absorbing, and the cache is not full, add the input into the cache self.cache[self.cache_size] = input; self.cache_size += 1; @@ -67,8 +62,7 @@ impl Poseidon2 { } } - fn squeeze(&mut self) -> Field - { + fn squeeze(&mut self) -> Field { if self.squeeze_mode & (self.cache_size == 0) { // If we're in squeze mode and the cache is empty, there is nothing left to squeeze out of the sponge! // Switch to absorb mode. @@ -81,14 +75,14 @@ impl Poseidon2 { // matched let new_output_elements = self.perform_duplex(); self.squeeze_mode = true; - for i in 0..rate { + for i in 0..RATE { self.cache[i] = new_output_elements[i]; } - self.cache_size = rate; + self.cache_size = RATE; } // By this point, we should have a non-empty cache. Pop one item off the top of the cache and return it. let result = self.cache[0]; - for i in 1..rate { + for i in 1..RATE { if i < self.cache_size { self.cache[i - 1] = self.cache[i]; } @@ -98,9 +92,8 @@ impl Poseidon2 { result } - fn hash_internal(input:[Field;N], in_len:u32, is_variable_length: bool) -> Field - { - let iv : Field = (in_len as Field)*18446744073709551616; + fn hash_internal(input: [Field; N], in_len: u32, is_variable_length: bool) -> Field { + let iv : Field = (in_len as Field) * 18446744073709551616; let mut sponge = Poseidon2::new(iv); for i in 0..input.len() { if i as u32 < in_len { From b9384fb23abf4ab15e880fb7e03c21509a9fa8a6 Mon Sep 17 00:00:00 2001 From: jfecher Date: Tue, 27 Feb 2024 10:44:23 +0000 Subject: [PATCH 42/45] chore!: Remove empty value from bounded vec (#4431) # Description ## Problem\* ## Summary\* Removes the `empty_value` field from the bounded vec. This muddies the API and shouldn't be needed since we have `crate::unsafe::zeroed()` instead. ## Additional Context ## Documentation\* Check one: - [ ] No documentation needed. - [ ] Documentation included in this PR. - [x] **[Exceptional Case]** Documentation to be submitted in a separate PR. - Included in https://github.com/noir-lang/noir/pull/4430 # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --------- Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> Co-authored-by: Tom French --- noir_stdlib/src/collections/bounded_vec.nr | 8 ++--- .../noir_test_success/bounded_vec/src/main.nr | 32 +++++++++---------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/noir_stdlib/src/collections/bounded_vec.nr b/noir_stdlib/src/collections/bounded_vec.nr index 4a14bd16376..752b96d6591 100644 --- a/noir_stdlib/src/collections/bounded_vec.nr +++ b/noir_stdlib/src/collections/bounded_vec.nr @@ -1,12 +1,12 @@ struct BoundedVec { storage: [T; MaxLen], len: u64, - empty_value: T, } impl BoundedVec { - pub fn new(initial_value: T) -> Self { - BoundedVec { storage: [initial_value; MaxLen], len: 0, empty_value: initial_value } + pub fn new() -> Self { + let zeroed = crate::unsafe::zeroed(); + BoundedVec { storage: [zeroed; MaxLen], len: 0 } } pub fn get(mut self: Self, index: u64) -> T { @@ -68,7 +68,7 @@ impl BoundedVec { self.len -= 1; let elem = self.storage[self.len]; - self.storage[self.len] = self.empty_value; + self.storage[self.len] = crate::unsafe::zeroed(); elem } diff --git a/test_programs/noir_test_success/bounded_vec/src/main.nr b/test_programs/noir_test_success/bounded_vec/src/main.nr index d51d2cc3685..0e2c89c9064 100644 --- a/test_programs/noir_test_success/bounded_vec/src/main.nr +++ b/test_programs/noir_test_success/bounded_vec/src/main.nr @@ -1,6 +1,6 @@ #[test] fn test_vec_push_pop() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); assert(vec.len == 0); vec.push(2); assert(vec.len == 1); @@ -17,7 +17,7 @@ fn test_vec_push_pop() { #[test] fn test_vec_extend_from_array() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2, 4]); assert(vec.len == 2); assert(vec.get(0) == 2); @@ -26,13 +26,13 @@ fn test_vec_extend_from_array() { #[test(should_fail_with="extend_from_array out of bounds")] fn test_vec_extend_from_array_out_of_bound() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2, 4, 6]); } #[test(should_fail_with="extend_from_array out of bounds")] fn test_vec_extend_from_array_twice_out_of_bound() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2]); assert(vec.len == 1); vec.extend_from_array([4, 6]); @@ -40,36 +40,36 @@ fn test_vec_extend_from_array_twice_out_of_bound() { #[test(should_fail)] fn test_vec_get_out_of_bound() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2, 4]); let _x = vec.get(2); } #[test(should_fail)] fn test_vec_get_not_declared() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2]); let _x = vec.get(1); } #[test(should_fail)] fn test_vec_get_uninitialized() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); let _x = vec.get(0); } #[test(should_fail_with="push out of bounds")] fn test_vec_push_out_of_bound() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.push(1); vec.push(2); } #[test(should_fail_with="extend_from_bounded_vec out of bounds")] fn test_vec_extend_from_bounded_vec_out_of_bound() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); - let mut another_vec: BoundedVec = BoundedVec::new(0); + let mut another_vec: BoundedVec = BoundedVec::new(); another_vec.extend_from_array([1, 2, 3]); vec.extend_from_bounded_vec(another_vec); @@ -77,10 +77,10 @@ fn test_vec_extend_from_bounded_vec_out_of_bound() { #[test(should_fail_with="extend_from_bounded_vec out of bounds")] fn test_vec_extend_from_bounded_vec_twice_out_of_bound() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([1, 2]); - let mut another_vec: BoundedVec = BoundedVec::new(0); + let mut another_vec: BoundedVec = BoundedVec::new(); another_vec.push(3); vec.extend_from_bounded_vec(another_vec); @@ -88,7 +88,7 @@ fn test_vec_extend_from_bounded_vec_twice_out_of_bound() { #[test] fn test_vec_any() { - let mut vec: BoundedVec = BoundedVec::new(0); + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2, 4, 6]); assert(vec.any(|v| v == 2) == true); assert(vec.any(|v| v == 4) == true); @@ -98,8 +98,8 @@ fn test_vec_any() { #[test] fn test_vec_any_not_default() { - let default_value = 1; - let mut vec: BoundedVec = BoundedVec::new(default_value); + let default_value = 0; + let mut vec: BoundedVec = BoundedVec::new(); vec.extend_from_array([2, 4]); assert(vec.any(|v| v == default_value) == false); -} \ No newline at end of file +} From 8f935af0813c4f012005c8b3eb70441b44db5714 Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Tue, 27 Feb 2024 11:43:55 +0000 Subject: [PATCH 43/45] chore(docs): fix external contributor force push workflow (#4437) # Description ## Problem\* Resolves # ## Summary\* This workflow was failing due to the fact that we're storing the message in a file but not checking out the repository. While checking out would be safe in this case, I've just moved the comment into the workflow for paranoia reasons. ## Additional Context ## Documentation\* Check one: - [x] No documentation needed. - [ ] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist\* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. --- .github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md | 5 ----- .github/workflows/pull-request-title.yml | 6 +++++- 2 files changed, 5 insertions(+), 6 deletions(-) delete mode 100644 .github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md diff --git a/.github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md b/.github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md deleted file mode 100644 index 4031bcdb61c..00000000000 --- a/.github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md +++ /dev/null @@ -1,5 +0,0 @@ -Thank you for your contribution to the Noir language. - -Please **do not force push to this branch** after the Noir team have reviewed this PR. Doing so will only delay us merging your PR as we will need to start the review process from scratch. - -Thanks for your understanding. \ No newline at end of file diff --git a/.github/workflows/pull-request-title.yml b/.github/workflows/pull-request-title.yml index 8f863160cf1..7e9b729da28 100644 --- a/.github/workflows/pull-request-title.yml +++ b/.github/workflows/pull-request-title.yml @@ -39,6 +39,10 @@ jobs: - name: Post comment on force pushes uses: marocchino/sticky-pull-request-comment@v2 with: - path: ./.github/EXTERNAL_CONTRIBUTOR_PR_COMMENT.md + message: | + Thank you for your contribution to the Noir language. + Please **do not force push to this branch** after the Noir team have started review of this PR. Doing so will only delay us merging your PR as we will need to start the review process from scratch. + + Thanks for your understanding. \ No newline at end of file From 2498115cf197450f33af0b9c158fa2ee4ce3e222 Mon Sep 17 00:00:00 2001 From: Aztec Bot <49558828+AztecBot@users.noreply.github.com> Date: Tue, 27 Feb 2024 06:35:28 -0500 Subject: [PATCH 44/45] feat: Sync from aztec-packages (#4390) BEGIN_COMMIT_OVERRIDE chore: bootstrap improvements. (https://github.com/AztecProtocol/aztec-packages/pull/4711) chore: Add full recursive verification test (https://github.com/AztecProtocol/aztec-packages/pull/4658) chore: add struct for each bigint modulus (https://github.com/AztecProtocol/aztec-packages/pull/4422) END_COMMIT_OVERRIDE --------- Co-authored-by: Tom French Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> Co-authored-by: sirasistant --- Cargo.lock | 1 + Dockerfile | 6 +- aztec_macros/Cargo.toml | 1 + aztec_macros/src/lib.rs | 269 ++++++++++++-- bootstrap.sh | 3 + bootstrap_cache.sh | 4 + .../src/hir/def_collector/dc_crate.rs | 14 + .../noirc_frontend/src/hir/def_map/mod.rs | 5 + compiler/noirc_frontend/src/hir/mod.rs | 4 + compiler/noirc_frontend/src/lib.rs | 11 + compiler/noirc_frontend/src/node_interner.rs | 2 +- noir_stdlib/src/bigint.nr | 327 ++++++++++++++++-- noir_stdlib/src/uint128.nr | 2 +- noirc_macros/src/lib.rs | 12 + scripts/test_native.sh | 4 +- .../1327_concrete_in_generic/src/main.nr | 4 +- .../execution_success/bigint/src/main.nr | 13 +- .../brillig_cow_regression/Prover.toml | 2 +- .../brillig_cow_regression/src/main.nr | 24 +- .../double_verify_nested_proof/Nargo.toml | 7 + .../double_verify_nested_proof/Prover.toml | 5 + .../double_verify_nested_proof/src/main.nr | 28 ++ .../double_verify_proof/src/main.nr | 3 +- .../regression_4124/src/main.nr | 8 +- test_programs/gates_report.sh | 2 +- tooling/debugger/ignored-tests.txt | 1 + tooling/nargo_fmt/tests/expected/contract.nr | 21 +- tooling/nargo_fmt/tests/input/contract.nr | 37 +- 28 files changed, 688 insertions(+), 132 deletions(-) create mode 100644 test_programs/execution_success/double_verify_nested_proof/Nargo.toml create mode 100644 test_programs/execution_success/double_verify_nested_proof/Prover.toml create mode 100644 test_programs/execution_success/double_verify_nested_proof/src/main.nr diff --git a/Cargo.lock b/Cargo.lock index 714b700119a..c0438eaf81f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -417,6 +417,7 @@ version = "0.24.0" dependencies = [ "convert_case 0.6.0", "iter-extended", + "noirc_errors", "noirc_frontend", ] diff --git a/Dockerfile b/Dockerfile index 000292e0a47..3a478c3f95a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,12 +1,12 @@ -FROM rust:bookworm +FROM rust:bullseye WORKDIR /usr/src/noir COPY . . RUN ./scripts/bootstrap_native.sh # When running the container, mount the users home directory to same location. -FROM ubuntu:lunar +FROM ubuntu:focal # Install Tini as nargo doesn't handle signals properly. # Install git as nargo needs it to clone. RUN apt-get update && apt-get install -y git tini && rm -rf /var/lib/apt/lists/* && apt-get clean COPY --from=0 /usr/src/noir/target/release/nargo /usr/src/noir/target/release/nargo -ENTRYPOINT ["/usr/bin/tini", "--", "/usr/src/noir/target/release/nargo"] \ No newline at end of file +ENTRYPOINT ["/usr/bin/tini", "--", "/usr/src/noir/target/release/nargo"] diff --git a/aztec_macros/Cargo.toml b/aztec_macros/Cargo.toml index 5e908b2e672..ed9821fabcf 100644 --- a/aztec_macros/Cargo.toml +++ b/aztec_macros/Cargo.toml @@ -11,5 +11,6 @@ repository.workspace = true [dependencies] noirc_frontend.workspace = true +noirc_errors.workspace = true iter-extended.workspace = true convert_case = "0.6.0" diff --git a/aztec_macros/src/lib.rs b/aztec_macros/src/lib.rs index 0b93dbaa634..0ccc421d3bc 100644 --- a/aztec_macros/src/lib.rs +++ b/aztec_macros/src/lib.rs @@ -3,6 +3,10 @@ use std::vec; use convert_case::{Case, Casing}; use iter_extended::vecmap; +use noirc_errors::Location; +use noirc_frontend::hir::def_collector::dc_crate::{UnresolvedFunctions, UnresolvedTraitImpl}; +use noirc_frontend::hir::def_map::{LocalModuleId, ModuleId}; +use noirc_frontend::macros_api::parse_program; use noirc_frontend::macros_api::FieldElement; use noirc_frontend::macros_api::{ BlockExpression, CallExpression, CastExpression, Distinctness, Expression, ExpressionKind, @@ -16,9 +20,8 @@ use noirc_frontend::macros_api::{ use noirc_frontend::macros_api::{CrateId, FileId}; use noirc_frontend::macros_api::{MacroError, MacroProcessor}; use noirc_frontend::macros_api::{ModuleDefId, NodeInterner, SortedModule, StructId}; -use noirc_frontend::node_interner::{TraitId, TraitImplKind}; +use noirc_frontend::node_interner::{FuncId, TraitId, TraitImplId, TraitImplKind}; use noirc_frontend::Lambda; - pub struct AztecMacro; impl MacroProcessor for AztecMacro { @@ -31,6 +34,25 @@ impl MacroProcessor for AztecMacro { transform(ast, crate_id, context) } + fn process_unresolved_traits_impls( + &self, + crate_id: &CrateId, + context: &mut HirContext, + unresolved_traits_impls: &[UnresolvedTraitImpl], + collected_functions: &mut Vec, + ) -> Result<(), (MacroError, FileId)> { + if has_aztec_dependency(crate_id, context) { + inject_compute_note_hash_and_nullifier( + crate_id, + context, + unresolved_traits_impls, + collected_functions, + ) + } else { + Ok(()) + } + } + fn process_typed_ast( &self, crate_id: &CrateId, @@ -46,7 +68,6 @@ const MAX_CONTRACT_FUNCTIONS: usize = 2_usize.pow(FUNCTION_TREE_HEIGHT); #[derive(Debug, Clone)] pub enum AztecMacroError { AztecDepNotFound, - ComputeNoteHashAndNullifierNotFound { span: Span }, ContractHasTooManyFunctions { span: Span }, ContractConstructorMissing { span: Span }, UnsupportedFunctionArgumentType { span: Span, typ: UnresolvedTypeData }, @@ -63,11 +84,6 @@ impl From for MacroError { secondary_message: None, span: None, }, - AztecMacroError::ComputeNoteHashAndNullifierNotFound { span } => MacroError { - primary_message: "compute_note_hash_and_nullifier function not found. Define it in your contract. For more information go to https://docs.aztec.network/developers/debugging/aztecnr-errors#compute_note_hash_and_nullifier-function-not-found-define-it-in-your-contract".to_owned(), - secondary_message: None, - span: Some(span), - }, AztecMacroError::ContractHasTooManyFunctions { span } => MacroError { primary_message: format!("Contract can only have a maximum of {} functions", MAX_CONTRACT_FUNCTIONS), secondary_message: None, @@ -313,15 +329,17 @@ fn check_for_aztec_dependency( crate_id: &CrateId, context: &HirContext, ) -> Result<(), (MacroError, FileId)> { - let crate_graph = &context.crate_graph[crate_id]; - let has_aztec_dependency = crate_graph.dependencies.iter().any(|dep| dep.as_name() == "aztec"); - if has_aztec_dependency { + if has_aztec_dependency(crate_id, context) { Ok(()) } else { - Err((AztecMacroError::AztecDepNotFound.into(), crate_graph.root_file_id)) + Err((AztecMacroError::AztecDepNotFound.into(), context.crate_graph[crate_id].root_file_id)) } } +fn has_aztec_dependency(crate_id: &CrateId, context: &HirContext) -> bool { + context.crate_graph[crate_id].dependencies.iter().any(|dep| dep.as_name() == "aztec") +} + // Check to see if the user has defined a storage struct fn check_for_storage_definition(module: &SortedModule) -> bool { module.types.iter().any(|r#struct| r#struct.name.0.contents == "Storage") @@ -338,27 +356,30 @@ fn check_for_storage_implementation(module: &SortedModule) -> bool { } // Check if "compute_note_hash_and_nullifier(AztecAddress,Field,Field,Field,[Field; N]) -> [Field; 4]" is defined -fn check_for_compute_note_hash_and_nullifier_definition(module: &SortedModule) -> bool { - module.functions.iter().any(|func| { - func.def.name.0.contents == "compute_note_hash_and_nullifier" - && func.def.parameters.len() == 5 - && match &func.def.parameters[0].typ.typ { +fn check_for_compute_note_hash_and_nullifier_definition( + functions_data: &[(LocalModuleId, FuncId, NoirFunction)], + module_id: LocalModuleId, +) -> bool { + functions_data.iter().filter(|func_data| func_data.0 == module_id).any(|func_data| { + func_data.2.def.name.0.contents == "compute_note_hash_and_nullifier" + && func_data.2.def.parameters.len() == 5 + && match &func_data.2.def.parameters[0].typ.typ { UnresolvedTypeData::Named(path, _, _) => path.segments.last().unwrap().0.contents == "AztecAddress", _ => false, } - && func.def.parameters[1].typ.typ == UnresolvedTypeData::FieldElement - && func.def.parameters[2].typ.typ == UnresolvedTypeData::FieldElement - && func.def.parameters[3].typ.typ == UnresolvedTypeData::FieldElement + && func_data.2.def.parameters[1].typ.typ == UnresolvedTypeData::FieldElement + && func_data.2.def.parameters[2].typ.typ == UnresolvedTypeData::FieldElement + && func_data.2.def.parameters[3].typ.typ == UnresolvedTypeData::FieldElement // checks if the 5th parameter is an array and the Box in // Array(Option, Box) contains only fields - && match &func.def.parameters[4].typ.typ { + && match &func_data.2.def.parameters[4].typ.typ { UnresolvedTypeData::Array(_, inner_type) => { matches!(inner_type.typ, UnresolvedTypeData::FieldElement) }, _ => false, } // We check the return type the same way as we did the 5th parameter - && match &func.def.return_type { + && match &func_data.2.def.return_type { FunctionReturnType::Default(_) => false, FunctionReturnType::Ty(unresolved_type) => { match &unresolved_type.typ { @@ -401,13 +422,6 @@ fn transform_module( generate_storage_implementation(module).map_err(|err| (err, crate_graph.root_file_id))?; } - if storage_defined && !check_for_compute_note_hash_and_nullifier_definition(module) { - return Err(( - AztecMacroError::ComputeNoteHashAndNullifierNotFound { span: Span::default() }, - crate_graph.root_file_id, - )); - } - for structure in module.types.iter() { if structure.attributes.iter().any(|attr| matches!(attr, SecondaryAttribute::Event)) { module.impls.push(generate_selector_impl(structure)); @@ -596,7 +610,7 @@ fn generate_storage_implementation(module: &mut SortedModule) -> Result<(), Azte /// If it does, it will insert the following things: /// - A new Input that is provided for a kernel app circuit, named: {Public/Private}ContextInputs /// - Hashes all of the function input variables -/// - This instantiates a helper function +/// - This instantiates a helper function fn transform_function( ty: &str, func: &mut NoirFunction, @@ -826,8 +840,8 @@ fn get_serialized_length( && !interner.lookup_all_trait_implementations(stored_in_state, trait_id).is_empty() }); - // Maps and (private) Notes always occupy a single slot. Someone could store a Note in PublicState for whatever reason though. - if struct_name == "Map" || (is_note && struct_name != "PublicState") { + // Maps and (private) Notes always occupy a single slot. Someone could store a Note in PublicMutable for whatever reason though. + if struct_name == "Map" || (is_note && struct_name != "PublicMutable") { return Ok(1); } @@ -1601,3 +1615,194 @@ fn event_signature(event: &StructType) -> String { let fields = vecmap(event.get_fields(&[]), |(_, typ)| signature_of_type(&typ)); format!("{}({})", event.name.0.contents, fields.join(",")) } + +fn inject_compute_note_hash_and_nullifier( + crate_id: &CrateId, + context: &mut HirContext, + unresolved_traits_impls: &[UnresolvedTraitImpl], + collected_functions: &mut [UnresolvedFunctions], +) -> Result<(), (MacroError, FileId)> { + // We first fetch modules in this crate which correspond to contracts, along with their file id. + let contract_module_file_ids: Vec<(LocalModuleId, FileId)> = context + .def_map(crate_id) + .expect("ICE: Missing crate in def_map") + .modules() + .iter() + .filter(|(_, module)| module.is_contract) + .map(|(idx, module)| (LocalModuleId(idx), module.location.file)) + .collect(); + + // If the current crate does not contain a contract module we simply skip it. + if contract_module_file_ids.is_empty() { + return Ok(()); + } else if contract_module_file_ids.len() != 1 { + panic!("Found multiple contracts in the same crate"); + } + + let (module_id, file_id) = contract_module_file_ids[0]; + + // If compute_note_hash_and_nullifier is already defined by the user, we skip auto-generation in order to provide an + // escape hatch for this mechanism. + // TODO(#4647): improve this diagnosis and error messaging. + if collected_functions.iter().any(|coll_funcs_data| { + check_for_compute_note_hash_and_nullifier_definition(&coll_funcs_data.functions, module_id) + }) { + return Ok(()); + } + + // In order to implement compute_note_hash_and_nullifier, we need to know all of the different note types the + // contract might use. These are the types that implement the NoteInterface trait, which provides the + // get_note_type_id function. + let note_types = fetch_struct_trait_impls(context, unresolved_traits_impls, "NoteInterface"); + + // We can now generate a version of compute_note_hash_and_nullifier tailored for the contract in this crate. + let func = generate_compute_note_hash_and_nullifier(¬e_types); + + // And inject the newly created function into the contract. + + // TODO(#4373): We don't have a reasonable location for the source code of this autogenerated function, so we simply + // pass an empty span. This function should not produce errors anyway so this should not matter. + let location = Location::new(Span::empty(0), file_id); + + // These are the same things the ModCollector does when collecting functions: we push the function to the + // NodeInterner, declare it in the module (which checks for duplicate definitions), and finally add it to the list + // on collected but unresolved functions. + + let func_id = context.def_interner.push_empty_fn(); + context.def_interner.push_function( + func_id, + &func.def, + ModuleId { krate: *crate_id, local_id: module_id }, + location, + ); + + context.def_map_mut(crate_id).unwrap() + .modules_mut()[module_id.0] + .declare_function( + func.name_ident().clone(), func_id + ).expect( + "Failed to declare the autogenerated compute_note_hash_and_nullifier function, likely due to a duplicate definition. See https://github.com/AztecProtocol/aztec-packages/issues/4647." + ); + + collected_functions + .iter_mut() + .find(|fns| fns.file_id == file_id) + .expect("ICE: no functions found in contract file") + .push_fn(module_id, func_id, func.clone()); + + Ok(()) +} + +// Fetches the name of all structs that implement trait_name, both in the current crate and all of its dependencies. +fn fetch_struct_trait_impls( + context: &mut HirContext, + unresolved_traits_impls: &[UnresolvedTraitImpl], + trait_name: &str, +) -> Vec { + let mut struct_typenames: Vec = Vec::new(); + + // These structs can be declared in either external crates or the current one. External crates that contain + // dependencies have already been processed and resolved, but are available here via the NodeInterner. Note that + // crates on which the current crate does not depend on may not have been processed, and will be ignored. + for trait_impl_id in 0..context.def_interner.next_trait_impl_id().0 { + let trait_impl = &context.def_interner.get_trait_implementation(TraitImplId(trait_impl_id)); + + if trait_impl.borrow().ident.0.contents == *trait_name { + if let Type::Struct(s, _) = &trait_impl.borrow().typ { + struct_typenames.push(s.borrow().name.0.contents.clone()); + } else { + panic!("Found impl for {} on non-Struct", trait_name); + } + } + } + + // This crate's traits and impls have not yet been resolved, so we look for impls in unresolved_trait_impls. + struct_typenames.extend( + unresolved_traits_impls + .iter() + .filter(|trait_impl| { + trait_impl + .trait_path + .segments + .last() + .expect("ICE: empty trait_impl path") + .0 + .contents + == *trait_name + }) + .filter_map(|trait_impl| match &trait_impl.object_type.typ { + UnresolvedTypeData::Named(path, _, _) => { + Some(path.segments.last().unwrap().0.contents.clone()) + } + _ => None, + }), + ); + + struct_typenames +} + +fn generate_compute_note_hash_and_nullifier(note_types: &Vec) -> NoirFunction { + let function_source = generate_compute_note_hash_and_nullifier_source(note_types); + + let (function_ast, errors) = parse_program(&function_source); + if !errors.is_empty() { + dbg!(errors.clone()); + } + assert_eq!(errors.len(), 0, "Failed to parse Noir macro code. This is either a bug in the compiler or the Noir macro code"); + + let mut function_ast = function_ast.into_sorted(); + function_ast.functions.remove(0) +} + +fn generate_compute_note_hash_and_nullifier_source(note_types: &Vec) -> String { + // TODO(#4649): The serialized_note parameter is a fixed-size array, but we don't know what length it should have. + // For now we hardcode it to 20, which is the same as MAX_NOTE_FIELDS_LENGTH. + + if note_types.is_empty() { + // TODO(#4520): Even if the contract does not include any notes, other parts of the stack expect for this + // function to exist, so we include a dummy version. We likely should error out here instead. + " + unconstrained fn compute_note_hash_and_nullifier( + contract_address: AztecAddress, + nonce: Field, + storage_slot: Field, + note_type_id: Field, + serialized_note: [Field; 20] + ) -> pub [Field; 4] { + [0, 0, 0, 0] + }" + .to_string() + } else { + // For contracts that include notes we do a simple if-else chain comparing note_type_id with the different + // get_note_type_id of each of the note types. + + let if_statements: Vec = note_types.iter().map(|note_type| format!( + "if (note_type_id == {0}::get_note_type_id()) {{ + note_utils::compute_note_hash_and_nullifier({0}::deserialize_content, note_header, serialized_note) + }}" + , note_type)).collect(); + + // TODO(#4520): error out on the else instead of returning a zero array + let full_if_statement = if_statements.join(" else ") + + " + else { + [0, 0, 0, 0] + }"; + + format!( + " + unconstrained fn compute_note_hash_and_nullifier( + contract_address: AztecAddress, + nonce: Field, + storage_slot: Field, + note_type_id: Field, + serialized_note: [Field; 20] + ) -> pub [Field; 4] {{ + let note_header = NoteHeader::new(contract_address, nonce, storage_slot); + + {} + }}", + full_if_statement + ) + } +} diff --git a/bootstrap.sh b/bootstrap.sh index 5ebe7ade090..1f9506904a4 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -15,5 +15,8 @@ if [ -n "$CMD" ]; then fi fi +# Attempt to just pull artefacts from CI and exit on success. +./bootstrap_cache.sh && exit + ./scripts/bootstrap_native.sh ./scripts/bootstrap_packages.sh \ No newline at end of file diff --git a/bootstrap_cache.sh b/bootstrap_cache.sh index 672702416bd..d06aa493662 100755 --- a/bootstrap_cache.sh +++ b/bootstrap_cache.sh @@ -1,6 +1,8 @@ #!/usr/bin/env bash set -eu +[ -z "${NO_CACHE:-}" ] && type docker &> /dev/null && [ -f ~/.aws/credentials ] || exit 1 + cd "$(dirname "$0")" source ../build-system/scripts/setup_env '' '' mainframe_$USER > /dev/null @@ -9,3 +11,5 @@ extract_repo noir-packages /usr/src/noir/packages ./ echo -e "\033[1mRetrieving nargo from remote cache...\033[0m" extract_repo noir /usr/src/noir/target/release ./target/ +remove_old_images noir-packages +remove_old_images noir diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index 0d1dd1b4337..7f36af5b30e 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -256,6 +256,20 @@ impl DefCollector { // Add the current crate to the collection of DefMaps context.def_maps.insert(crate_id, def_collector.def_map); + // TODO(#4653): generalize this function + for macro_processor in ¯o_processors { + macro_processor + .process_unresolved_traits_impls( + &crate_id, + context, + &def_collector.collected_traits_impls, + &mut def_collector.collected_functions, + ) + .unwrap_or_else(|(macro_err, file_id)| { + errors.push((macro_err.into(), file_id)); + }); + } + inject_prelude(crate_id, context, crate_root, &mut def_collector.collected_imports); for submodule in submodules { inject_prelude( diff --git a/compiler/noirc_frontend/src/hir/def_map/mod.rs b/compiler/noirc_frontend/src/hir/def_map/mod.rs index 8e0dacc294b..8721bdb6c3c 100644 --- a/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -135,6 +135,11 @@ impl CrateDefMap { pub fn modules(&self) -> &Arena { &self.modules } + + pub fn modules_mut(&mut self) -> &mut Arena { + &mut self.modules + } + pub fn krate(&self) -> CrateId { self.krate } diff --git a/compiler/noirc_frontend/src/hir/mod.rs b/compiler/noirc_frontend/src/hir/mod.rs index 4d3800f1a50..00bcb0cdebf 100644 --- a/compiler/noirc_frontend/src/hir/mod.rs +++ b/compiler/noirc_frontend/src/hir/mod.rs @@ -91,6 +91,10 @@ impl Context<'_, '_> { self.def_maps.get(crate_id) } + pub fn def_map_mut(&mut self, crate_id: &CrateId) -> Option<&mut CrateDefMap> { + self.def_maps.get_mut(crate_id) + } + /// Return the CrateId for each crate that has been compiled /// successfully pub fn crates(&self) -> impl Iterator + '_ { diff --git a/compiler/noirc_frontend/src/lib.rs b/compiler/noirc_frontend/src/lib.rs index eb00a61adf6..be007929fc4 100644 --- a/compiler/noirc_frontend/src/lib.rs +++ b/compiler/noirc_frontend/src/lib.rs @@ -45,6 +45,7 @@ pub mod macros_api { pub use noirc_errors::Span; pub use crate::graph::CrateId; + use crate::hir::def_collector::dc_crate::{UnresolvedFunctions, UnresolvedTraitImpl}; pub use crate::hir::def_collector::errors::MacroError; pub use crate::hir_def::expr::{HirExpression, HirLiteral}; pub use crate::hir_def::stmt::HirStatement; @@ -74,6 +75,16 @@ pub mod macros_api { crate_id: &CrateId, context: &HirContext, ) -> Result; + + // TODO(#4653): generalize this function + fn process_unresolved_traits_impls( + &self, + _crate_id: &CrateId, + _context: &mut HirContext, + _unresolved_traits_impls: &[UnresolvedTraitImpl], + _collected_functions: &mut Vec, + ) -> Result<(), (MacroError, FileId)>; + /// Function to manipulate the AST after type checking has been completed. /// The AST after type checking has been done is called the HIR. fn process_typed_ast( diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index 7d533947f65..5de43e59254 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -367,7 +367,7 @@ impl TraitId { } #[derive(Debug, Eq, PartialEq, Hash, Clone, Copy)] -pub struct TraitImplId(usize); +pub struct TraitImplId(pub usize); #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct TraitMethodId { diff --git a/noir_stdlib/src/bigint.nr b/noir_stdlib/src/bigint.nr index 66e81f05812..98237a54779 100644 --- a/noir_stdlib/src/bigint.nr +++ b/noir_stdlib/src/bigint.nr @@ -1,4 +1,5 @@ -use crate::ops::{Add, Sub, Mul, Div, Rem}; +use crate::ops::{Add, Sub, Mul, Div}; +use crate::cmp::Eq; global bn254_fq = [0x47, 0xFD, 0x7C, 0xD8, 0x16, 0x8C, 0x20, 0x3C, 0x8d, 0xca, 0x71, 0x68, 0x91, 0x6a, 0x81, 0x97, 0x5d, 0x58, 0x81, 0x81, 0xb6, 0x45, 0x50, 0xb8, 0x29, 0xa0, 0x31, 0xe1, 0x72, 0x4e, 0x64, 0x30]; @@ -30,46 +31,320 @@ impl BigInt { #[builtin(bigint_from_le_bytes)] fn from_le_bytes(bytes: [u8], modulus: [u8]) -> BigInt {} #[builtin(bigint_to_le_bytes)] - pub fn to_le_bytes(self) -> [u8] {} + fn to_le_bytes(self) -> [u8] {} - pub fn bn254_fr_from_le_bytes(bytes: [u8]) -> BigInt { - BigInt::from_le_bytes(bytes, bn254_fr) + fn check_32_bytes(self: Self, other: BigInt) -> bool { + let bytes = self.to_le_bytes(); + let o_bytes = other.to_le_bytes(); + let mut result = true; + for i in 0..32 { + result = result & (bytes[i] == o_bytes[i]); + } + result } - pub fn bn254_fq_from_le_bytes(bytes: [u8]) -> BigInt { - BigInt::from_le_bytes(bytes, bn254_fq) +} + +trait BigField { + fn from_le_bytes(bytes: [u8]) -> Self; + fn to_le_bytes(self) -> [u8]; +} + +struct Secpk1Fq { + inner: BigInt, +} + +impl BigField for Secpk1Fq { + fn from_le_bytes(bytes: [u8]) -> Secpk1Fq { + Secpk1Fq { + inner: BigInt::from_le_bytes(bytes, secpk1_fq) + } + } + fn to_le_bytes(self) -> [u8] { + self.inner.to_le_bytes() + } +} + +impl Add for Secpk1Fq { + fn add(self: Self, other: Secpk1Fq) -> Secpk1Fq { + Secpk1Fq { + inner: self.inner.bigint_add(other.inner) + } + } +} +impl Sub for Secpk1Fq { + fn sub(self: Self, other: Secpk1Fq) -> Secpk1Fq { + Secpk1Fq { + inner: self.inner.bigint_sub(other.inner) + } + } +} +impl Mul for Secpk1Fq { + fn mul(self: Self, other: Secpk1Fq) -> Secpk1Fq { + Secpk1Fq { + inner: self.inner.bigint_mul(other.inner) + } + + } +} +impl Div for Secpk1Fq { + fn div(self: Self, other: Secpk1Fq) -> Secpk1Fq { + Secpk1Fq { + inner: self.inner.bigint_div(other.inner) + } + } +} +impl Eq for Secpk1Fq { + fn eq(self: Self, other: Secpk1Fq) -> bool { + self.inner.check_32_bytes(other.inner) + } +} + +struct Secpk1Fr { + inner: BigInt, +} + +impl BigField for Secpk1Fr { + fn from_le_bytes(bytes: [u8]) -> Secpk1Fr { + Secpk1Fr { + inner: BigInt::from_le_bytes(bytes, secpk1_fr) + } } - pub fn secpk1_fq_from_le_bytes(bytes: [u8]) -> BigInt { - BigInt::from_le_bytes(bytes, secpk1_fq) + fn to_le_bytes(self) -> [u8] { + self.inner.to_le_bytes() } - pub fn secpk1_fr_from_le_bytes(bytes: [u8]) -> BigInt { - BigInt::from_le_bytes(bytes, secpk1_fr) +} + +impl Add for Secpk1Fr { + fn add(self: Self, other: Secpk1Fr) -> Secpk1Fr { + Secpk1Fr { + inner: self.inner.bigint_add(other.inner) + } + } +} +impl Sub for Secpk1Fr { + fn sub(self: Self, other: Secpk1Fr) -> Secpk1Fr { + Secpk1Fr { + inner: self.inner.bigint_sub(other.inner) + } + } +} +impl Mul for Secpk1Fr { + fn mul(self: Self, other: Secpk1Fr) -> Secpk1Fr { + Secpk1Fr { + inner: self.inner.bigint_mul(other.inner) + } + + } +} +impl Div for Secpk1Fr { + fn div(self: Self, other: Secpk1Fr) -> Secpk1Fr { + Secpk1Fr { + inner: self.inner.bigint_div(other.inner) + } + } +} +impl Eq for Secpk1Fr { + fn eq(self: Self, other: Secpk1Fr) -> bool { + self.inner.check_32_bytes(other.inner) + } +} + +struct Bn254Fr { + inner: BigInt, +} + +impl BigField for Bn254Fr { + fn from_le_bytes(bytes: [u8]) -> Bn254Fr { + Bn254Fr { + inner: BigInt::from_le_bytes(bytes, bn254_fr) + } + } + fn to_le_bytes(self) -> [u8] { + self.inner.to_le_bytes() } } -impl Add for BigInt { - fn add(self: Self, other: BigInt) -> BigInt { - self.bigint_add(other) +impl Add for Bn254Fr { + fn add(self: Self, other: Bn254Fr) -> Bn254Fr { + Bn254Fr { + inner: self.inner.bigint_add(other.inner) + } + } +} +impl Sub for Bn254Fr { + fn sub(self: Self, other: Bn254Fr) -> Bn254Fr { + Bn254Fr { + inner: self.inner.bigint_sub(other.inner) + } } } -impl Sub for BigInt { - fn sub(self: Self, other: BigInt) -> BigInt { - self.bigint_sub(other) +impl Mul for Bn254Fr { + fn mul(self: Self, other: Bn254Fr) -> Bn254Fr { + Bn254Fr { + inner: self.inner.bigint_mul(other.inner) + } + } } -impl Mul for BigInt { - fn mul(self: Self, other: BigInt) -> BigInt { - self.bigint_mul(other) +impl Div for Bn254Fr { + fn div(self: Self, other: Bn254Fr) -> Bn254Fr { + Bn254Fr { + inner: self.inner.bigint_div(other.inner) + } } } -impl Div for BigInt { - fn div(self: Self, other: BigInt) -> BigInt { - self.bigint_div(other) +impl Eq for Bn254Fr { + fn eq(self: Self, other: Bn254Fr) -> bool { + self.inner.check_32_bytes(other.inner) } } -impl Rem for BigInt { - fn rem(self: Self, other: BigInt) -> BigInt { - let quotient = self.bigint_div(other); - self.bigint_sub(quotient.bigint_mul(other)) + +struct Bn254Fq { + inner: BigInt, +} + +impl BigField for Bn254Fq { + fn from_le_bytes(bytes: [u8]) -> Bn254Fq { + Bn254Fq { + inner: BigInt::from_le_bytes(bytes, bn254_fq) + } + } + fn to_le_bytes(self) -> [u8] { + self.inner.to_le_bytes() } } +impl Add for Bn254Fq { + fn add(self: Self, other: Bn254Fq) -> Bn254Fq { + Bn254Fq { + inner: self.inner.bigint_add(other.inner) + } + } +} +impl Sub for Bn254Fq { + fn sub(self: Self, other: Bn254Fq) -> Bn254Fq { + Bn254Fq { + inner: self.inner.bigint_sub(other.inner) + } + } +} +impl Mul for Bn254Fq { + fn mul(self: Self, other: Bn254Fq) -> Bn254Fq { + Bn254Fq { + inner: self.inner.bigint_mul(other.inner) + } + + } +} +impl Div for Bn254Fq { + fn div(self: Self, other: Bn254Fq) -> Bn254Fq { + Bn254Fq { + inner: self.inner.bigint_div(other.inner) + } + } +} +impl Eq for Bn254Fq { + fn eq(self: Self, other: Bn254Fq) -> bool { + self.inner.check_32_bytes(other.inner) + } +} + +struct Secpr1Fq { + inner: BigInt, +} + +impl BigField for Secpr1Fq { + fn from_le_bytes(bytes: [u8]) -> Secpr1Fq { + Secpr1Fq { + inner: BigInt::from_le_bytes(bytes, secpr1_fq) + } + } + fn to_le_bytes(self) -> [u8] { + self.inner.to_le_bytes() + } +} + +impl Add for Secpr1Fq { + fn add(self: Self, other: Secpr1Fq) -> Secpr1Fq { + Secpr1Fq { + inner: self.inner.bigint_add(other.inner) + } + } +} +impl Sub for Secpr1Fq { + fn sub(self: Self, other: Secpr1Fq) -> Secpr1Fq { + Secpr1Fq { + inner: self.inner.bigint_sub(other.inner) + } + } +} +impl Mul for Secpr1Fq { + fn mul(self: Self, other: Secpr1Fq) -> Secpr1Fq { + Secpr1Fq { + inner: self.inner.bigint_mul(other.inner) + } + + } +} +impl Div for Secpr1Fq { + fn div(self: Self, other: Secpr1Fq) -> Secpr1Fq { + Secpr1Fq { + inner: self.inner.bigint_div(other.inner) + } + } +} +impl Eq for Secpr1Fq { + fn eq(self: Self, other: Secpr1Fq) -> bool { + self.inner.check_32_bytes(other.inner) + } +} + +struct Secpr1Fr { + inner: BigInt, +} + +impl BigField for Secpr1Fr { + fn from_le_bytes(bytes: [u8]) -> Secpr1Fr { + Secpr1Fr { + inner: BigInt::from_le_bytes(bytes, secpr1_fr) + } + } + fn to_le_bytes(self) -> [u8] { + self.inner.to_le_bytes() + } +} + +impl Add for Secpr1Fr { + fn add(self: Self, other: Secpr1Fr) -> Secpr1Fr { + Secpr1Fr { + inner: self.inner.bigint_add(other.inner) + } + } +} +impl Sub for Secpr1Fr { + fn sub(self: Self, other: Secpr1Fr) -> Secpr1Fr { + Secpr1Fr { + inner: self.inner.bigint_sub(other.inner) + } + } +} +impl Mul for Secpr1Fr { + fn mul(self: Self, other: Secpr1Fr) -> Secpr1Fr { + Secpr1Fr { + inner: self.inner.bigint_mul(other.inner) + } + + } +} +impl Div for Secpr1Fr { + fn div(self: Self, other: Secpr1Fr) -> Secpr1Fr { + Secpr1Fr { + inner: self.inner.bigint_div(other.inner) + } + } +} +impl Eq for Secpr1Fr { + fn eq(self: Self, other: Secpr1Fr) -> bool { + self.inner.check_32_bytes(other.inner) + } +} diff --git a/noir_stdlib/src/uint128.nr b/noir_stdlib/src/uint128.nr index d6f0b1e2232..b91ed5c4cb2 100644 --- a/noir_stdlib/src/uint128.nr +++ b/noir_stdlib/src/uint128.nr @@ -161,7 +161,7 @@ impl Sub for U128 { let borrow = (low == lo) as Field; let high = self.hi - b.hi - borrow; let hi = high as u64 as Field; - assert(hi == high, "attempt to subtract with overflow"); + assert(hi == high, "attempt to subtract with underflow"); U128 { lo, hi, diff --git a/noirc_macros/src/lib.rs b/noirc_macros/src/lib.rs index 4337214d69f..9a916843200 100644 --- a/noirc_macros/src/lib.rs +++ b/noirc_macros/src/lib.rs @@ -1,3 +1,5 @@ +use noirc_frontend::hir::def_collector::dc_crate::UnresolvedFunctions; +use noirc_frontend::hir::def_collector::dc_crate::UnresolvedTraitImpl; use noirc_frontend::macros_api::parse_program; use noirc_frontend::macros_api::HirContext; use noirc_frontend::macros_api::SortedModule; @@ -16,6 +18,16 @@ impl MacroProcessor for AssertMessageMacro { transform(ast, crate_id) } + fn process_unresolved_traits_impls( + &self, + _crate_id: &CrateId, + _context: &mut HirContext, + _unresolved_traits_impls: &[UnresolvedTraitImpl], + _collected_functions: &mut Vec, + ) -> Result<(), (MacroError, FileId)> { + Ok(()) + } + // This macro does not need to process any information after name resolution fn process_typed_ast( &self, diff --git a/scripts/test_native.sh b/scripts/test_native.sh index bc1c47ecf12..9b9aa0ce4d7 100755 --- a/scripts/test_native.sh +++ b/scripts/test_native.sh @@ -12,4 +12,6 @@ else export GIT_COMMIT=$(git rev-parse --verify HEAD) fi -cargo test --workspace --locked --release \ No newline at end of file +cargo fmt --all --check +cargo clippy --workspace --locked --release +cargo test --workspace --locked --release diff --git a/test_programs/execution_success/1327_concrete_in_generic/src/main.nr b/test_programs/execution_success/1327_concrete_in_generic/src/main.nr index 8250b31789b..3e476107c29 100644 --- a/test_programs/execution_success/1327_concrete_in_generic/src/main.nr +++ b/test_programs/execution_success/1327_concrete_in_generic/src/main.nr @@ -20,7 +20,7 @@ impl B { } } // --- -// Set +// PrivateSet struct C { t_d_interface: MethodInterface, } @@ -55,7 +55,7 @@ fn get_d_method_interface() -> MethodInterface { // --- fn main(input: Field) -> pub Field { let b: B> = B::new(new_concrete_c_over_d); - let c: C = b.get_t_c(); // Singleton + let c: C = b.get_t_c(); // PrivateMutable let d: D = D { d: input }; // Note let output = c.call_method_of_t_d(d); diff --git a/test_programs/execution_success/bigint/src/main.nr b/test_programs/execution_success/bigint/src/main.nr index 046d7d07d5e..b93fec370e5 100644 --- a/test_programs/execution_success/bigint/src/main.nr +++ b/test_programs/execution_success/bigint/src/main.nr @@ -1,9 +1,8 @@ use dep::std::bigint; fn main(mut x: [u8; 5], y: [u8; 5]) { - let a = bigint::BigInt::secpk1_fq_from_le_bytes([x[0], x[1], x[2], x[3], x[4]]); - let b = bigint::BigInt::secpk1_fq_from_le_bytes([y[0], y[1], y[2], y[3], y[4]]); - + let a = bigint::Secpk1Fq::from_le_bytes([x[0], x[1], x[2], x[3], x[4]]); + let b = bigint::Secpk1Fq::from_le_bytes([y[0], y[1], y[2], y[3], y[4]]); let a_bytes = a.to_le_bytes(); let b_bytes = b.to_le_bytes(); for i in 0..5 { @@ -12,10 +11,6 @@ fn main(mut x: [u8; 5], y: [u8; 5]) { } let d = a * b - b; - let d_bytes = d.to_le_bytes(); - let d1 = bigint::BigInt::secpk1_fq_from_le_bytes(597243850900842442924.to_le_bytes(10)); - let d1_bytes = d1.to_le_bytes(); - for i in 0..32 { - assert(d_bytes[i] == d1_bytes[i]); - } + let d1 = bigint::Secpk1Fq::from_le_bytes(597243850900842442924.to_le_bytes(10)); + assert(d1 == d); } diff --git a/test_programs/execution_success/brillig_cow_regression/Prover.toml b/test_programs/execution_success/brillig_cow_regression/Prover.toml index f0a4dc2485d..44813823448 100644 --- a/test_programs/execution_success/brillig_cow_regression/Prover.toml +++ b/test_programs/execution_success/brillig_cow_regression/Prover.toml @@ -3,7 +3,7 @@ encrypted_logs_hash = [ "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", ] -new_commitments = [ +new_note_hashes = [ "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x0000000000000000000000000000000000000000000000000000000000000000", diff --git a/test_programs/execution_success/brillig_cow_regression/src/main.nr b/test_programs/execution_success/brillig_cow_regression/src/main.nr index 7f3dd766480..ba51548d9dd 100644 --- a/test_programs/execution_success/brillig_cow_regression/src/main.nr +++ b/test_programs/execution_success/brillig_cow_regression/src/main.nr @@ -1,12 +1,12 @@ // Tests a performance regression found in aztec-packages with brillig cow optimization -global MAX_NEW_COMMITMENTS_PER_TX = 64; -global MAX_NEW_NULLIFIERS_PER_TX = 64; -global MAX_NEW_L2_TO_L1_MSGS_PER_TX = 2; -global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX = 16; -global MAX_NEW_CONTRACTS_PER_TX = 1; -global NUM_ENCRYPTED_LOGS_HASHES_PER_TX = 1; -global NUM_UNENCRYPTED_LOGS_HASHES_PER_TX = 1; +global MAX_NEW_NOTE_HASHES_PER_TX: u64 = 64; +global MAX_NEW_NULLIFIERS_PER_TX: u64 = 64; +global MAX_NEW_L2_TO_L1_MSGS_PER_TX: u64 = 2; +global MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX: u64 = 16; +global MAX_NEW_CONTRACTS_PER_TX: u64 = 1; +global NUM_ENCRYPTED_LOGS_HASHES_PER_TX: u64 = 1; +global NUM_UNENCRYPTED_LOGS_HASHES_PER_TX: u64 = 1; global NUM_FIELDS_PER_SHA256 = 2; global CALLDATA_HASH_INPUT_SIZE = 169; global CALL_DATA_HASH_LOG_FIELDS = 4; @@ -30,7 +30,7 @@ impl NewContractData { } struct DataToHash { - new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_TX], + new_note_hashes: [Field; MAX_NEW_NOTE_HASHES_PER_TX], new_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_TX], public_data_update_requests: [PublicDataUpdateRequest; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX], new_l2_to_l1_msgs: [Field; MAX_NEW_L2_TO_L1_MSGS_PER_TX], @@ -101,7 +101,7 @@ impl U256 { unconstrained fn main(kernel_data: DataToHash) -> pub [Field; NUM_FIELDS_PER_SHA256] { let mut calldata_hash_inputs = [0; CALLDATA_HASH_INPUT_SIZE]; - let new_commitments = kernel_data.new_commitments; + let new_note_hashes = kernel_data.new_note_hashes; let new_nullifiers = kernel_data.new_nullifiers; let public_data_update_requests = kernel_data.public_data_update_requests; let newL2ToL1msgs = kernel_data.new_l2_to_l1_msgs; @@ -110,10 +110,10 @@ unconstrained fn main(kernel_data: DataToHash) -> pub [Field; NUM_FIELDS_PER_SHA let mut offset = 0; - for j in 0..MAX_NEW_COMMITMENTS_PER_TX { - calldata_hash_inputs[offset + j] = new_commitments[j]; + for j in 0..MAX_NEW_NOTE_HASHES_PER_TX { + calldata_hash_inputs[offset + j] = new_note_hashes[j]; } - offset += MAX_NEW_COMMITMENTS_PER_TX ; + offset += MAX_NEW_NOTE_HASHES_PER_TX ; for j in 0..MAX_NEW_NULLIFIERS_PER_TX { calldata_hash_inputs[offset + j] = new_nullifiers[j]; diff --git a/test_programs/execution_success/double_verify_nested_proof/Nargo.toml b/test_programs/execution_success/double_verify_nested_proof/Nargo.toml new file mode 100644 index 00000000000..3ead649c879 --- /dev/null +++ b/test_programs/execution_success/double_verify_nested_proof/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "double_verify_nested_proof" +type = "bin" +authors = [""] +compiler_version = ">=0.24.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/double_verify_nested_proof/Prover.toml b/test_programs/execution_success/double_verify_nested_proof/Prover.toml new file mode 100644 index 00000000000..2a2b4b33586 --- /dev/null +++ b/test_programs/execution_success/double_verify_nested_proof/Prover.toml @@ -0,0 +1,5 @@ +key_hash = "0x13fd5b632ce9e9d12c9ac56c150ed09413df3edf40d1b7ab8ced9f262ec61b29" +proof_b = ["0x0000000000000000000000000000000000000000000000042ab5d6d1986846cf","0x00000000000000000000000000000000000000000000000b75c020998797da78","0x0000000000000000000000000000000000000000000000005a107acb64952eca","0x000000000000000000000000000000000000000000000000000031e97a575e9d","0x00000000000000000000000000000000000000000000000b5666547acf8bd5a4","0x00000000000000000000000000000000000000000000000c410db10a01750aeb","0x00000000000000000000000000000000000000000000000d722669117f9758a4","0x000000000000000000000000000000000000000000000000000178cbf4206471","0x000000000000000000000000000000000000000000000000e91b8a11e7842c38","0x000000000000000000000000000000000000000000000007fd51009034b3357f","0x000000000000000000000000000000000000000000000009889939f81e9c7402","0x0000000000000000000000000000000000000000000000000000f94656a2ca48","0x000000000000000000000000000000000000000000000006fb128b46c1ddb67f","0x0000000000000000000000000000000000000000000000093fe27776f50224bd","0x000000000000000000000000000000000000000000000004a0c80c0da527a081","0x0000000000000000000000000000000000000000000000000001b52c2020d746","0x0000000000000000000000000000004bdfb9b586a637ceebd99ff26dcd3af427","0x0000000000000000000000000000000000265c2a5caf8e033e32d192807f5353","0x000000000000000000000000000000c0ab1db6ea40ac087cdc82c4a61ab00c86","0x0000000000000000000000000000000000010800ea8010f4bd3dd432d1cc11ed","0x000000000000000000000000000000eb3db3c41e3e636d686fd2903b1b913a01","0x000000000000000000000000000000000009bbab6b90377114c9e33d2a302226","0x000000000000000000000000000000758726e60ef4b211cf1c965fe08293365b","0x0000000000000000000000000000000000290ec193bc7f4f9006b9cea136bff5","0x0000000000000000000000000000005a2a389cd1702b3aa37f30ed974147d343","0x00000000000000000000000000000000001d83087d6efe0db3f482730b8d5e32","0x000000000000000000000000000000ad015051ed84c11d061e63eddbc3c0417a","0x0000000000000000000000000000000000155011c8b0167ff694740c48d67683","0x00000000000000000000000000000010c638a3b13dba3e77be3f10a3d096927c","0x00000000000000000000000000000000002372b9853214a1f76e5636dc26f146","0x00000000000000000000000000000005d9ca201c07bd4216689677feb9227715","0x000000000000000000000000000000000001dcf09921797dffb8eb21abef187b","0x00000000000000000000000000000070af16c9644b777dcf84d69e820e1ed895","0x00000000000000000000000000000000002d5e8f7eb7a4e20964dd94dc141534","0x0000000000000000000000000000003636871dbe453b366c3351be6e84144683","0x0000000000000000000000000000000000206464e290e4f4764365038ac77edf","0x000000000000000000000000000000175c20da35cc833dd542af57de9b62a2da","0x00000000000000000000000000000000001d2e31de3715e05ff6278f88e5a0db","0x000000000000000000000000000000328610e4eabb48be78d3c75f7c159205c5","0x000000000000000000000000000000000026720634b8076fee0a17b358b04653","0x0000000000000000000000000000000e5f48906892ffbff91e8b58ceabba0949","0x000000000000000000000000000000000013c349df687926ccb712622fc72a36","0x000000000000000000000000000000a4b8c9046c7e7e4cc19bbf9a367668eac7","0x00000000000000000000000000000000002a81128e53672c33bb0dae0ff18f41","0x000000000000000000000000000000edb79df57c4a2303ed1e5c2d7ed1e1bdaf","0x000000000000000000000000000000000018d3cea4ce204eafd70c0ded024650","0x000000000000000000000000000000e5f82856854fe0a2d587f6a9ae8555f321","0x0000000000000000000000000000000000235480ec2adc05f04261054345e568","0x00000000000000000000000000000083607465f60b70b092f606853f4d9e96eb","0x000000000000000000000000000000000006569e3a3174bcb71efe46f7fb7e0f","0x000000000000000000000000000000cb4d5fc546f20f63e3b7cf60341956f36f","0x00000000000000000000000000000000000e14b1932630bf606a637eabb7c80f","0x000000000000000000000000000000786f31c2e082aa7e398e6323bb48a27472","0x00000000000000000000000000000000002dd72746f5e5a4a438def122ae6bba","0x000000000000000000000000000000d007be60a28b744e49279fab277c8bd623","0x00000000000000000000000000000000000e52e2b940b9cd8d001209cc40f7c8","0x000000000000000000000000000000dd4357e24a1bda0b5a6c5eee657cfe9091","0x0000000000000000000000000000000000047bb24b20feb0b66089a96671c901","0x0000000000000000000000000000003fe7f42f34e3360ef0fa8bd9c17e6190a3","0x0000000000000000000000000000000000161d17a3848118e91b435b553d34e9","0x216fa2905e105e0c767687f9b5e81c2e4ce03abe2993ac8dcd9e8d89e088966f","0x1288ba942d41c7f4b048e125454253bc7d7ffc0875365c0b8f75a2bb3ea90b42","0x1ad706f84cffcc62fa030f1bc57cb478a687aa74c1019beeda9bab4e40d35373","0x03050c8016b8041a557a46840ab4166a9c2531eb7c3985a447996a334e0caf5f","0x2b3c485da75bdaef8cec120bd08bc21e3ff717740114d13d3811006215a1fb24","0x008fc8c76c4d8cbba8653bf0919c047d379941be60c7afc7250bc6bfc5f29ad5","0x1993ae2a0da54e5e643533fdefbf54a0df21115b2ee79a63a7f477c2c9c4a5d5","0x22520fa7fde2d72b9776c07c9b897ef7ce48f8a7937ec0cacb01d3e23f72b78a","0x259b7b9c1dbfe88d613102f0e8548f0c770a1c83876b26a5cb4b6790740cb487","0x043006102e519b8011d089f51811337fbdedc856a73842f7c8197be176b08d38","0x2222bd509df909ce38b67b3172b24c8ce1e0e1dd0d811f4fae6957e3418415ac","0x1b1204474652fa85979f0274145680718bed80466f4c91ad58f37df1b4fe2395","0x08d57251b42c0697535617ae239d7f3ef9d1558c1bb71fa01c68e7b5fd266139","0x04ca7f21f1d0ba50ecf00c615d18bf8f7291bb04f513cbef78fb6d03ed9b0cb2","0x070ae1119c80846863a4cd971e535ff87fe34473eb5730b14e5b30212b7b78a1","0x1128027ded5032cc265c96ff81d76e2ce06420702fd4e5bc4e24fda695961651","0x1ef7a9e5885b934eee2b44335157309de2f60519e50a8471e5e24495dff2a9fe","0x2d0dad89e5633da796c0c897804575879bc5dc7ad3805b44260943101ac9609e","0x287edcbd60e9d636ba1cd1c9ff3ec2b71b694112c65876525f5e2f8209cd747f","0x24b1157a1cb5bdbd2829de066b8c5573584f9b8638bf9bad476a1fe1172da4b9","0x1f9825731638cd1c43f7cf035b808a1704f122453318cb88fe3b1164f034e170","0x07003a6552f3a6ab1ad3e0717be0af049767b554ff88986c4e48224632523405","0x288002c2ff29874077b2c216a35cb61ecc97d12750a3a86574d50acd42607095","0x0a12fc37918ce7dcbd0d354a05bdbb409a8e4530d86f3d8ce07231240590f65c","0x2ec631b05fc693b07286eecf6b6ac1aef0d073cdced8e050244ec7cf4e8f6e42","0x107bc98da225efe7749d51b9966c3edd6c245f2e5cf183a924ba982817e4525a","0x2ca603f77ea0ca42b6f38cd43bc3cc09442906373a2f197fdc976533066ac343","0x138ace5653809375aa9d95240fa9b6508860a471aed70bcc8b7dd52ae34809f3","0x21e1eb924951881c3d0ce5657d2e26a3e5150ce8f49c9e4d0476c5fdf1e43a54","0x2e2daec93f5e94f6784ce569883cf285da12244b38fb001b94bfb99bb4de060c","0x186a8d30c973bef6286115865a690a2528adbeea8376e5221fffeb6a135d9904","0x1e0d9d90628be31ebc16ef1d85d5f9e6fb8cb57e6a74e576f958cf21db45042e","0x124ceb5e1d9da6d0fe163e961643bb0423c926ef4e0c583eaba9e32d99ec6c7c","0x2db34cc38a50bfea50750830710c13b4d80f4ec0e8df8f186047ee36f338eeeb","0x0b174aa403b42235d5bdde8e9f5bb6c52ae62fec2884334cbe3e53418bd2463d","0x1571ebd9c3854c2f63418f206c6937495450ab9238a238b9c63fbf5867378c5b","0x24f92d1ab27e5810e5b7f4b31254526822f866602922258135c5eb5a2b21ca04","0x20cc7f5ba8df67d9c95642e2662654eb2305c6a280ce1747aec88a581ee50647","0x24112b99f63bbda7487709396dff22aae89ae809263021b65503ff7f809c7e38","0x06805c80f64efd1fa7f08382c9981aad9cecad78808da670477566674141bc48","0x146d4801d6f5898051ee0d7c95375a65ea0e6deeac6ffee1d9b9cf64da72dc3e","0x000000000000000000000000000000425b99a5c96b22ba0286d9ebeecf8e4559","0x0000000000000000000000000000000000110be4b8fe46a96303c205d3a1d61d","0x000000000000000000000000000000d9ff7ae757f2f0c91d1f1e71fac1b27b74","0x000000000000000000000000000000000009b0c285f6c221f6eba93b1e330ac4","0x0000000000000000000000000000004055cd5738a25ab1860a1e35555962dc19","0x00000000000000000000000000000000001a8726ccf54e17cf1b005e3e04879a","0x0000000000000000000000000000007be4dc343e9c2e0d4a9156f1ef9769f65a","0x00000000000000000000000000000000002b0e96f68f6509615ca0544dfa3107"] +public_inputs = ["0x0000000000000000000000000000000000000000000000000000000000000003"] +verification_key = ["0x2260e724844bca5251829353968e4915305258418357473a5c1d597f613f6cbd","0x0000000000000000000000000000000000000000000000000000000000080000","0x0000000000000000000000000000000000000000000000000000000000000005","0x0000000000000000000000000000000000000000000000000000000000080000","0x0000000000000000000000000000000000000000000000000000000000000011","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000003","0x0000000000000000000000000000000000000000000000000000000000000004","0x0000000000000000000000000000000000000000000000000000000000000005","0x0000000000000000000000000000000000000000000000000000000000000006","0x0000000000000000000000000000000000000000000000000000000000000007","0x0000000000000000000000000000000000000000000000000000000000000008","0x0000000000000000000000000000000000000000000000000000000000000009","0x000000000000000000000000000000000000000000000000000000000000000a","0x000000000000000000000000000000000000000000000000000000000000000b","0x000000000000000000000000000000000000000000000000000000000000000c","0x000000000000000000000000000000000000000000000000000000000000000d","0x000000000000000000000000000000000000000000000000000000000000000e","0x000000000000000000000000000000000000000000000000000000000000000f","0x0000000000000000000000000000000000000000000000000000000000000010","0x000000000000000000000000000000ba765ed919550454064328e0fd7c51ff4a","0x00000000000000000000000000000000000418b2f4104c289eb20cb95344c850","0x0000000000000000000000000000006defa500aab13c8cf3c00117db573bef2c","0x000000000000000000000000000000000026dea3ea8fb7e77b5bfa8443397dc7","0x0000000000000000000000000000009a5c33c4054817f3402e68aeca4728a405","0x00000000000000000000000000000000002abf5ea67ec384cb2e5998c7a48b3a","0x000000000000000000000000000000ee78817f7d959ea45abb27404e3371c708","0x0000000000000000000000000000000000260a979e8190a83b0bce1351b92d3c","0x000000000000000000000000000000ec447bd83a83883ce4c11573ce24845c74","0x000000000000000000000000000000000005b23c2076f50d10baa061a67b9200","0x00000000000000000000000000000058ffc16cfb64ec06a56a2b1a9047fb8f0c","0x000000000000000000000000000000000011d2f5833d720e1d0a02749471e7ad","0x000000000000000000000000000000416dd6c8c0d1cbb185b3c3197eac767d0b","0x000000000000000000000000000000000023b9c5a4e525926d64247ec92e0baf","0x000000000000000000000000000000a55f5f52ebc8936a58e413a1068d94d376","0x00000000000000000000000000000000000be3f377ccc88a6cb5df6f230da95e","0x00000000000000000000000000000070a162a08d4d4800f450af94888f8f3480","0x0000000000000000000000000000000000085883b02590372a7b36a1c57db4c3","0x00000000000000000000000000000045b0b661ea73930ee3327ccff8a0ca9ce1","0x00000000000000000000000000000000002854cab8629792eb07e9ef81bc46ee","0x00000000000000000000000000000067f365021e0e42117c43a39419d1d9cc73","0x000000000000000000000000000000000022c370b38f0a97eb3d718146f2284b","0x00000000000000000000000000000016de6670aba605233072b8eecfa9069b06","0x000000000000000000000000000000000002c29c49d66457bcbd4fa5bf6096fd","0x000000000000000000000000000000e32e8ce4f18ba30ce53245044d0c60508a","0x00000000000000000000000000000000001170220489121b8eedd58a4b5599df","0x000000000000000000000000000000139ed828b410380d053ec0f056656f5703","0x0000000000000000000000000000000000072aebdce25ba333c86769adec1362","0x000000000000000000000000000000aa352ee565f91fc2b73323fc824bc14636","0x00000000000000000000000000000000001f3e272a192808ec9283ee3bb4df4b","0x00000000000000000000000000000005c72c8c88be0259ae226ccb0488452b4b","0x00000000000000000000000000000000001c68407d694502b929b77cbbab8374","0x0000000000000000000000000000003716bda8267f29931ed0aa811e4607f1c6","0x000000000000000000000000000000000007d888936af2141bb2f6823a587e81","0x0000000000000000000000000000004cf1a4f39c5363f70ecc9e433d751ea529","0x00000000000000000000000000000000002e8a81232ec84e48032178f1ee6edb","0x000000000000000000000000000000388e8265061fa0c92c96fc85d99bac7891","0x00000000000000000000000000000000002e3c516222565332e6e7362400bc5f","0x0000000000000000000000000000003a68d13661a0906e5828fe8271a336bf64","0x00000000000000000000000000000000001412d3e67497c98e5ec2aaee8779f5","0x000000000000000000000000000000b5d123498733b5279d8bcbade0d8345ef7","0x00000000000000000000000000000000000fa572890537089a5fb36953e7a1ca","0x0000000000000000000000000000004d8ff057fc9936a693035266c80c6ea57d","0x00000000000000000000000000000000001907a614968d777fcc506f639799f6","0x00000000000000000000000000000010769533212d3cafbf6ac378c8055c33a2","0x00000000000000000000000000000000000eac32851272327acdc0890792dfb7","0x000000000000000000000000000000e3e32f343643d319a977beb0c2b0ab9b31","0x00000000000000000000000000000000000c10c4c9dce6ff648ef70f54d45ba6","0x00000000000000000000000000000025721304165b9b313b94cf2c77b61dc1ef","0x000000000000000000000000000000000024b8083b0f323c2703a7255caa7078","0x0000000000000000000000000000002b860372c65049c88f6532cbd360917b11","0x000000000000000000000000000000000011ee2ac2bc36cdfdc107eca47369f3","0x0000000000000000000000000000001c1b0233882acb5a78a977642e4dce91d5","0x000000000000000000000000000000000020922a70853993b3516eeb01d7c8a4","0x0000000000000000000000000000001f90b5fade69a55a2da8d2db3c62b62d7c","0x0000000000000000000000000000000000173312bb89c6722b548ff87a7487a2","0x0000000000000000000000000000009d618ffd933cf58a8a0953dc76f97cf108","0x00000000000000000000000000000000000ddc3b6d8e59cf0996ca71ad4132ca","0x000000000000000000000000000000ec4c6a253f431d3f3fc06aa0e5b0448b8c","0x0000000000000000000000000000000000153193287060386695f4f2d0d3525d","0x0000000000000000000000000000004bd25585edb9319128045c005d48491b1e","0x00000000000000000000000000000000001170f0ece62f8c572bca96b141d27f","0x0000000000000000000000000000003dd2e37b8edb1f56b785809d7710bf1c88","0x0000000000000000000000000000000000246cd041690f653f88ed0c56ad282a","0x00000000000000000000000000000034bc8a00ce9d452888e5fc2b5a7e14fed7","0x000000000000000000000000000000000026153c937447356a0c6d6be09d85eb","0x000000000000000000000000000000555388ad9364679246b07992f84b4e91b2","0x0000000000000000000000000000000000189da022421fbd8dfd7973084d978e","0x000000000000000000000000000000e8c0f9753e2a5a35acec051fafe2cecce5","0x0000000000000000000000000000000000285311c5e9a4cbb56a3f04f29d5443","0x00000000000000000000000000000092d2d0ac76a1be7f1fad96cbd997175312","0x00000000000000000000000000000000002436400260c9d3180beedd0bf49fec","0x000000000000000000000000000000887d86d95387bbb29616cc5c41ee4a2669","0x0000000000000000000000000000000000063bf32f8addf7a3e1cf6cd223cb71","0x000000000000000000000000000000d841dc7d9da6cc699e8377b2a04723fea0","0x00000000000000000000000000000000002ce091428268c212a2bcfea0edb338","0x00000000000000000000000000000012fe4771092fa47e4d6050701527133f09","0x00000000000000000000000000000000002f36672865c5ae4976486fdaf2d81d","0x0000000000000000000000000000008e6bced56a3d94dfe9d476da3a424b8eff","0x00000000000000000000000000000000002d6303cf28aa721f4e5348a0d83642","0x0000000000000000000000000000008c5807dace05b2079d200f7f71caffdaf7","0x000000000000000000000000000000000008f7beb50cb16f3b6210aff1bdb05d","0x0000000000000000000000000000004f9ee08a49536eb54a238b982c4dfd5446","0x000000000000000000000000000000000014f55e7065eabacf1a7d6cbf1f6765","0x00000000000000000000000000000021150153ec654b02a66d9bea056185877e","0x00000000000000000000000000000000000e7bf50a142b21057bcfd340a5e77c","0x00000000000000000000000000000038110629263a662f10464b375f988cccda","0x00000000000000000000000000000000001964a0ab814f71282cd159df492710","0x000000000000000000000000000000b9310dd49ea52ba735b9654ebced7bc67b","0x000000000000000000000000000000000019ad72f92554ce44921ca3f420f995","0x000000000000000000000000000000d67d7e81fa6e1cdfae6d84510a8cb7e257","0x00000000000000000000000000000000000a6ec9d85c10a85e8f31eaedb4e459"] +proof = ["0x0000000000000000000000000000000000000000000000042ab5d6d1986846cf","0x00000000000000000000000000000000000000000000000b75c020998797da78","0x0000000000000000000000000000000000000000000000005a107acb64952eca","0x000000000000000000000000000000000000000000000000000031e97a575e9d","0x00000000000000000000000000000000000000000000000b5666547acf8bd5a4","0x00000000000000000000000000000000000000000000000c410db10a01750aeb","0x00000000000000000000000000000000000000000000000d722669117f9758a4","0x000000000000000000000000000000000000000000000000000178cbf4206471","0x000000000000000000000000000000000000000000000000e91b8a11e7842c38","0x000000000000000000000000000000000000000000000007fd51009034b3357f","0x000000000000000000000000000000000000000000000009889939f81e9c7402","0x0000000000000000000000000000000000000000000000000000f94656a2ca48","0x000000000000000000000000000000000000000000000006fb128b46c1ddb67f","0x0000000000000000000000000000000000000000000000093fe27776f50224bd","0x000000000000000000000000000000000000000000000004a0c80c0da527a081","0x0000000000000000000000000000000000000000000000000001b52c2020d746","0x00000000000000000000000000000063cb03b1d83ae3942e11ca8ec63055898b","0x00000000000000000000000000000000001edaf70d547a857fbed6a9ff8a38c9","0x000000000000000000000000000000097fb881332193ff4489e213f600e6a007","0x00000000000000000000000000000000001f2903742639c3595d22b96d4d9c21","0x000000000000000000000000000000bca7215bb1bcdde52ed9cf845b7e54072d","0x0000000000000000000000000000000000188bd12b19073eb01e8be5bda41b3e","0x0000000000000000000000000000007d1a114656606c391bfb286ea4e14062a5","0x000000000000000000000000000000000026d8a3b8821da41b6b1d6b85872260","0x000000000000000000000000000000c49078b857741b82cba39d8a394c1876c1","0x00000000000000000000000000000000002f9b9f76f80a4ff456e60c024f8d03","0x0000000000000000000000000000004bab3e60680935219213ea32be70ec5100","0x00000000000000000000000000000000002c45bda56f0115cfde2678889694ab","0x0000000000000000000000000000006434e56313172088d5a6b10fdd1b94b4ca","0x000000000000000000000000000000000007ad41e7980534fc2f89e8ad7366ad","0x00000000000000000000000000000023d769c68ef65f0b4f06a01e655fb265e7","0x0000000000000000000000000000000000008d3b5d5b201ed6773c369fe20d10","0x0000000000000000000000000000005eacdd2121ba4b1cf0df09632df6991fcf","0x0000000000000000000000000000000000005e98e857c8c1eb16cef913e44f90","0x0000000000000000000000000000003449da35dc7c0b67b0c3e99ced603ea381","0x000000000000000000000000000000000022347c8daec6739b183413a787fd13","0x000000000000000000000000000000df23d8f1ac4ddfced428737db15e63f603","0x000000000000000000000000000000000015e03670ba72d84269d764d8f8e725","0x000000000000000000000000000000457a7f854dbab545c8c94ccdb8e4b9ad45","0x00000000000000000000000000000000000a268fc41b7031912cec59dc0a7078","0x00000000000000000000000000000022fcb55824b67af33225f8f2e614fbbdb4","0x0000000000000000000000000000000000235f698e6aee7bf8ca94f4a44db006","0x000000000000000000000000000000a327da390bd3e01e4a7b639605fdfd9c42","0x0000000000000000000000000000000000210196c4fb53d660a3824867b2b1c5","0x000000000000000000000000000000728fb44750fa2b956221bd441fa61e32d6","0x0000000000000000000000000000000000073db9e2cafdf0fe22b5090855533e","0x0000000000000000000000000000004fe310e93730876891eebab46db9496dbc","0x000000000000000000000000000000000007d3574fe79c87011abdbd51a46670","0x000000000000000000000000000000adc522f42e085c51403fc50c83f35904b9","0x00000000000000000000000000000000000d2d9ef8fc0031b4568842a99b34eb","0x00000000000000000000000000000098586d928c8abc7cc56d571c8eded52168","0x000000000000000000000000000000000024279c001a40e94d3d149ec01a468a","0x00000000000000000000000000000066122aaf47d9d5060a2ce1d17cc5201be0","0x00000000000000000000000000000000001c21031d83d52e27867a611229d2ca","0x000000000000000000000000000000838dfc066499f7715682f755b42f3a4869","0x00000000000000000000000000000000001f816d2c5b2e903496f1443cb91de3","0x0000000000000000000000000000007ef917b6df805f430f8a0833942a7c3094","0x00000000000000000000000000000000000a9cefe716f31dbe37485179d60f0e","0x00000000000000000000000000000028adb1040bd0c07448de51d5cac9fd0495","0x00000000000000000000000000000000000c66b25a22c8b3ba82ec09ab4bdef3","0x2cc791d253f03f47cc88f7f0aeae481762f4aa6426712772544aaeca72466cb7","0x14197950f448f679eeff75c4e83dac9f0ebd5aa194709ea3875fb4e4b15bc2f2","0x1a92022c2ed8f8a41e3f392e22f1875f6916543bbb22c3aaf50d703de649c381","0x2ee77a26e78d5e1093dabd3612beee4b515a4f159992138e13ecd3f0afcfba18","0x2c280cba627b147142a2d333ee856000298708f9b5df0cc8d23c26d0936d6869","0x1b2569bb6f6b60b6f743ff892a39a490770d4ad40a961a06149d4968b0487a40","0x2f80351e43621d69b7e620338b2822e15dec9e6a2de16e8d04bb559153cd53a3","0x15a78b8ae9b3be431b609250b69c7cb746c6a689b2122150f258c6f7d67409fc","0x1334c47f273be542576813933e89a9130a342846272b39a2eab3ab7fc022d5fe","0x1031bdcafc5c0dad81c8b6c4931c9b442cd0c8a0bb9a729cc2f6bf0a18dc1b82","0x177f92f0cef76c5c45f55d16fa2be426354cdd4af6ac9aaad479c9b47f88656d","0x0064c0e0ec8984d612189e5287d59eedc1a6de52fc78bf72028f744350c27a0e","0x2c06222cf0d415c976e6904f1706b77cf438636ada3222e1c31c4957d6877dac","0x173da534b7001f44f19bb3e3f8601ac94fbf90b2e39b7d4079d8fac2d65102ea","0x012909bcdbd1167010cf0084028e851f3448f58946f4951b1b8d544d86b138c8","0x2975c3987f110c06bd8ced1d8bb0d398ac72c6f196ea639bdde58fa4b899d4a0","0x05c196fb2f6ccfd92a38ae526af85bccc3695ea0e2561e7a211c60360187602d","0x18a288590dd0cbfe5b7652458c9caddc9eac2f08e5822b64141ed1b4e805bda3","0x0cd08c41605b22a7ae31c3961486f645a32bff0ccaef63b6d661ef356db78560","0x05d5e48184693259f722f84ea48f9b84667d1e9db19e1381b2279fe24b01484b","0x2187a6f6a2398e5f0137880a983ff6b682b5a7c2b62e4bdfff6ff6becd0d53ab","0x1d4764ca9346e8ac48675320521e0daba651f480efe932302e8a9673580fc0d8","0x00cfcb920adeb0293acf26e63aeac4489622e4c806b93f1c72f8491cba3d0196","0x1bcd6a556800b8385ba1250afd69999fe2bb5518a6ba2cc461a4afba21ffbedb","0x11a15b3c8ef0e4ac0ff151fba72b922f6c005519151a4f88557352265944aeea","0x063d550a154f2ce80b08fb169d137fa96dcea6a6c489e98e1390aa9a5db18928","0x25da993132041b9f667de044194f5c6b0cdae961cdea5f6dbbda8595f213ac08","0x22fcecc2e3794814bbb84700031cd75ec9817201c8c88df2e86407a14412f902","0x01583d25d2f91d646da02a520d3dbf758b0a0590a533bd1417a717fd0cd18915","0x18ebacffdc81e15547232dfc1a0e31ec2848a1e5b9c8509a92432c2549d93091","0x20a3d15aa70d04a841802fe1d990f56c6b9e6eadc17da2c0dfd2a817e6cf0430","0x0b497cc2e54412ce07c52effdce6c01de2c1a0e1d095a2a37f5351232400c0a1","0x14419bb69d02675b8d58e60ce88a2f4b6a43674461e4015e2e302285a42c5784","0x0c84db03ff77d0729bb68eab2d6d697b7caebd4ea3db781499492a6f0ef67765","0x1a676b1c6b0ab1c85b31af681e05751296c3d0a1a883668f5fe971827ce86fc9","0x08da949bf7603bfe20f3c152abe727051c6306cff322197e8fa56b390f565b5b","0x1fd77e041239f94e907dc3ae3069a70cbff726b9d8b3a368a4910c8a070a9c9a","0x03755d83a4f0fdfbb4fd1b2b465842e1bb707a419c2952a2ca9faba50d4be379","0x0ee90c8166adcb238d85c72a85db2248353610c55390a2ed54e59dd1c35c12d2","0x170bcd78efaa1b19bcfd065c2ec60b48aa1e62465df73e62f3bd291115315144","0x015d60e5cc5c7d67853993261bd9e3c6e56f95dee8724ce79c7601ee10c1a731","0x000000000000000000000000000000f0a8b99d65fc1555bafb688233a6489aea","0x0000000000000000000000000000000000043849f038ec96c8c1c6e242351361","0x0000000000000000000000000000001ad41d3dfebb280623d5b325f0a7aa38f7","0x00000000000000000000000000000000002e5f2119536daa9e6d1f9b82b797dd","0x000000000000000000000000000000e5570c2b6e74d0994e2fc8be1a9dab4160","0x00000000000000000000000000000000002ed426a78ed52d4c13f2c651a6d4ec","0x000000000000000000000000000000aba14637487e4d3ca30dc397416696c85c","0x000000000000000000000000000000000005ae1eb3eee0cdf5e5c7bb0ac9be07"] diff --git a/test_programs/execution_success/double_verify_nested_proof/src/main.nr b/test_programs/execution_success/double_verify_nested_proof/src/main.nr new file mode 100644 index 00000000000..0466f2a226d --- /dev/null +++ b/test_programs/execution_success/double_verify_nested_proof/src/main.nr @@ -0,0 +1,28 @@ +use dep::std; + +fn main( + verification_key: [Field; 114], + // This is the proof without public inputs attached. + // + // This means: the size of this does not change with the number of public inputs. + proof: [Field; 109], + public_inputs: pub [Field; 1], + // This is currently not public. It is fine given that the vk is a part of the circuit definition. + // I believe we want to eventually make it public too though. + key_hash: Field, + proof_b: [Field; 109] +) { + std::verify_proof( + verification_key.as_slice(), + proof.as_slice(), + public_inputs.as_slice(), + key_hash + ); + + std::verify_proof( + verification_key.as_slice(), + proof_b.as_slice(), + public_inputs.as_slice(), + key_hash + ); +} diff --git a/test_programs/execution_success/double_verify_proof/src/main.nr b/test_programs/execution_success/double_verify_proof/src/main.nr index ce087dc4e61..e4c6926efbc 100644 --- a/test_programs/execution_success/double_verify_proof/src/main.nr +++ b/test_programs/execution_success/double_verify_proof/src/main.nr @@ -1,12 +1,13 @@ use dep::std; +#[recursive] fn main( verification_key: [Field; 114], // This is the proof without public inputs attached. // // This means: the size of this does not change with the number of public inputs. proof: [Field; 93], - public_inputs: [Field; 1], + public_inputs: pub [Field; 1], // This is currently not public. It is fine given that the vk is a part of the circuit definition. // I believe we want to eventually make it public too though. key_hash: Field, diff --git a/test_programs/execution_success/regression_4124/src/main.nr b/test_programs/execution_success/regression_4124/src/main.nr index b47bf28d461..49ff68ee6ad 100644 --- a/test_programs/execution_success/regression_4124/src/main.nr +++ b/test_programs/execution_success/regression_4124/src/main.nr @@ -14,14 +14,14 @@ pub fn storage_read() -> [Field; N] { dep::std::unsafe::zeroed() } -struct PublicState { +struct PublicMutable { storage_slot: Field, } -impl PublicState { +impl PublicMutable { pub fn new(storage_slot: Field) -> Self { assert(storage_slot != 0, "Storage slot 0 not allowed. Storage slots must start from 1."); - PublicState { storage_slot } + PublicMutable { storage_slot } } pub fn read(_self: Self) -> T where T: MyDeserialize { @@ -32,7 +32,7 @@ impl PublicState { } fn main(value: Field) { - let ps: PublicState = PublicState::new(27); + let ps: PublicMutable = PublicMutable::new(27); // error here assert(ps.read() == value); diff --git a/test_programs/gates_report.sh b/test_programs/gates_report.sh index 4192c581376..3b0b4d9e148 100755 --- a/test_programs/gates_report.sh +++ b/test_programs/gates_report.sh @@ -2,7 +2,7 @@ set -e # These tests are incompatible with gas reporting -excluded_dirs=("workspace" "workspace_default_member") +excluded_dirs=("workspace" "workspace_default_member" "double_verify_nested_proof") # These tests cause failures in CI with a stack overflow for some reason. ci_excluded_dirs=("eddsa") diff --git a/tooling/debugger/ignored-tests.txt b/tooling/debugger/ignored-tests.txt index 7ac440c335b..c472e828739 100644 --- a/tooling/debugger/ignored-tests.txt +++ b/tooling/debugger/ignored-tests.txt @@ -7,6 +7,7 @@ brillig_nested_arrays brillig_references brillig_to_bytes_integration debug_logs +double_verify_nested_proof double_verify_proof modulus nested_array_dynamic diff --git a/tooling/nargo_fmt/tests/expected/contract.nr b/tooling/nargo_fmt/tests/expected/contract.nr index b80efeeb692..a03b8774700 100644 --- a/tooling/nargo_fmt/tests/expected/contract.nr +++ b/tooling/nargo_fmt/tests/expected/contract.nr @@ -10,14 +10,14 @@ contract Benchmarking { use dep::aztec::{ context::{Context}, note::{utils as note_utils, note_getter_options::NoteGetterOptions, note_header::NoteHeader}, - log::emit_unencrypted_log, state_vars::{map::Map, public_state::PublicState, set::Set}, + log::emit_unencrypted_log, state_vars::{Map, PublicMutable, PrivateSet}, types::type_serialization::field_serialization::{FieldSerializationMethods, FIELD_SERIALIZED_LEN}, types::address::{AztecAddress} }; struct Storage { - notes: Map>, - balances: Map>, + notes: Map>, + balances: Map>, } impl Storage { @@ -26,12 +26,12 @@ contract Benchmarking { notes: Map::new( context, 1, - |context, slot| { Set::new(context, slot, ValueNoteMethods) } + |context, slot| { PrivateSet::new(context, slot, ValueNoteMethods) } ), balances: Map::new( context, 2, - |context, slot| { PublicState::new(context, slot, FieldSerializationMethods) } + |context, slot| { PublicMutable::new(context, slot, FieldSerializationMethods) } ) } } @@ -74,17 +74,6 @@ contract Benchmarking { fn broadcast(owner: Field) { emit_unencrypted_log(&mut context, storage.balances.at(owner).read()); } - - unconstrained fn compute_note_hash_and_nullifier( - contract_address: AztecAddress, - nonce: Field, - storage_slot: Field, - note_type_id: Field, - preimage: [Field; VALUE_NOTE_LEN] - ) -> [Field; 4] { - let note_header = NoteHeader::new(contract_address, nonce, storage_slot); - note_utils::compute_note_hash_and_nullifier(ValueNoteMethods, note_header, preimage) - } } // Uses the token bridge contract, which tells which input token we need to talk to and handles the exit funds to L1 diff --git a/tooling/nargo_fmt/tests/input/contract.nr b/tooling/nargo_fmt/tests/input/contract.nr index d10bfb745b6..a03b8774700 100644 --- a/tooling/nargo_fmt/tests/input/contract.nr +++ b/tooling/nargo_fmt/tests/input/contract.nr @@ -5,30 +5,34 @@ contract Benchmarking { use dep::aztec::protocol_types::abis::function_selector::FunctionSelector; - use dep::value_note::{ - utils::{increment, decrement}, - value_note::{VALUE_NOTE_LEN, ValueNote, ValueNoteMethods}, - }; + use dep::value_note::{utils::{increment, decrement}, value_note::{VALUE_NOTE_LEN, ValueNote, ValueNoteMethods}}; use dep::aztec::{ context::{Context}, note::{utils as note_utils, note_getter_options::NoteGetterOptions, note_header::NoteHeader}, - log::emit_unencrypted_log, - state_vars::{map::Map, public_state::PublicState, set::Set}, + log::emit_unencrypted_log, state_vars::{Map, PublicMutable, PrivateSet}, types::type_serialization::field_serialization::{FieldSerializationMethods, FIELD_SERIALIZED_LEN}, - types::address::{AztecAddress}, + types::address::{AztecAddress} }; struct Storage { - notes: Map>, - balances: Map>, + notes: Map>, + balances: Map>, } impl Storage { fn init(context: Context) -> pub Self { Storage { - notes: Map::new(context, 1, |context, slot| { Set::new(context, slot, ValueNoteMethods) }), - balances: Map::new(context, 2, |context, slot| { PublicState::new(context, slot, FieldSerializationMethods) }), + notes: Map::new( + context, + 1, + |context, slot| { PrivateSet::new(context, slot, ValueNoteMethods) } + ), + balances: Map::new( + context, + 2, + |context, slot| { PublicMutable::new(context, slot, FieldSerializationMethods) } + ) } } } @@ -70,17 +74,6 @@ contract Benchmarking { fn broadcast(owner: Field) { emit_unencrypted_log(&mut context, storage.balances.at(owner).read()); } - - unconstrained fn compute_note_hash_and_nullifier( - contract_address: AztecAddress, - nonce: Field, - storage_slot: Field, - note_type_id: Field, - preimage: [Field; VALUE_NOTE_LEN] - ) -> [Field; 4] { - let note_header = NoteHeader::new(contract_address, nonce, storage_slot); - note_utils::compute_note_hash_and_nullifier(ValueNoteMethods, note_header, preimage) - } } // Uses the token bridge contract, which tells which input token we need to talk to and handles the exit funds to L1 From e80c5f73a4cdcba3f5cf44576c605ba1e611a2ab Mon Sep 17 00:00:00 2001 From: NaijaCoderGirl <150683513+NaijaCoderGirl@users.noreply.github.com> Date: Tue, 27 Feb 2024 11:59:22 +0000 Subject: [PATCH 45/45] chore(docs): correct 'Edit this page' URL for dev docs (#4433) # Description ## Problem* The "Edit this page" button in the generated documentation for the `noir-lang` project leads to a 404 - Page Not Found error for documents in the development version. This issue is due to the button's link containing `/processed-docs` in the path instead of `/docs`. The problem affects the development version of the documentation, whereas versioned documentation (e.g., v0.23.0) correctly links to editable markdown files on GitHub. | Before | After | | -------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------- | | ![Before](https://github.com/noir-lang/noir/assets/150683513/260f8eb5-4ef9-46db-ba39-ae836d2bb7de) | ![After](https://github.com/noir-lang/noir/assets/150683513/087e2c8c-b9dc-4c35-b824-2b95742872f6) | ## Summary* This pull request fixes the incorrect URL generated for the "Edit this page" button in the docusaurus configuration. By modifying the `editUrl` function within `docusaurus.config.ts`, the path now correctly replaces `processed-docs` with `docs` for the development version of the documentation. This change guarantees that contributors are directed to the correct GitHub page to edit the documentation, thus eliminating the 404 error previously encountered. ## Additional Context The issue was identified when attempting to edit pages from the development version of the docs. The button's link incorrectly pointed to a non-existent path due to the inclusion of `processed-docs` in the URL. ## Documentation* - [ ] No documentation needed. - [x] Documentation included in this PR. - [ ] **[Exceptional Case]** Documentation to be submitted in a separate PR. # PR Checklist* - [x] I have tested the changes locally. - [x] I have formatted the changes with [Prettier](https://prettier.io/) and/or `cargo fmt` on default settings. Co-authored-by: Tom French <15848336+TomAFrench@users.noreply.github.com> --- docs/docusaurus.config.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docusaurus.config.ts b/docs/docusaurus.config.ts index 1b6c65d5139..49566c5c380 100644 --- a/docs/docusaurus.config.ts +++ b/docs/docusaurus.config.ts @@ -38,7 +38,7 @@ export default { }, }, editUrl: ({ versionDocsDirPath, docPath }) => - `https://github.com/noir-lang/noir/edit/master/docs/${versionDocsDirPath}/${docPath}`, + `https://github.com/noir-lang/noir/edit/master/docs/${versionDocsDirPath.replace('processed-docs', 'docs')}/${docPath}`, }, blog: false, theme: {