diff --git a/Cargo.lock b/Cargo.lock index 652271fcaff..cec7b5f0371 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1633,7 +1633,6 @@ version = "0.20.0" dependencies = [ "codespan-reporting", "iter-extended", - "rust-embed", "serde", "tempfile", ] @@ -2443,6 +2442,7 @@ dependencies = [ "rayon", "rustc_version", "serde", + "tempfile", "thiserror", ] @@ -2577,7 +2577,6 @@ version = "0.20.0" dependencies = [ "acvm", "async-lsp", - "cfg-if", "codespan-lsp", "fm", "lsp-types 0.94.1", @@ -2601,7 +2600,6 @@ version = "0.20.0" dependencies = [ "acvm", "build-data", - "cfg-if", "console_error_panic_hook", "fm", "getrandom", @@ -2666,6 +2664,7 @@ dependencies = [ "noirc_errors", "noirc_evaluator", "noirc_frontend", + "rust-embed", "serde", ] diff --git a/compiler/fm/Cargo.toml b/compiler/fm/Cargo.toml index 9e4309693ed..699f709e9b5 100644 --- a/compiler/fm/Cargo.toml +++ b/compiler/fm/Cargo.toml @@ -9,7 +9,6 @@ license.workspace = true [dependencies] codespan-reporting.workspace = true -rust-embed = "6.6.0" serde.workspace = true [dev-dependencies] diff --git a/compiler/fm/src/file_reader.rs b/compiler/fm/src/file_reader.rs deleted file mode 100644 index d17aefeda7e..00000000000 --- a/compiler/fm/src/file_reader.rs +++ /dev/null @@ -1,49 +0,0 @@ -use rust_embed::RustEmbed; -use std::io::{Error, ErrorKind}; -use std::path::Path; - -// Based on the environment, we either read files using the rust standard library or we -// read files using the javascript host function - -pub type FileReader = dyn Fn(&Path) -> std::io::Result + Send; - -#[derive(RustEmbed)] -#[folder = "../../noir_stdlib/src"] -#[cfg_attr(not(target_os = "windows"), prefix = "std/")] -#[cfg_attr(target_os = "windows", prefix = r"std\")] // Note reversed slash direction -struct StdLibAssets; - -#[cfg(target_os = "windows")] -pub(super) fn is_stdlib_asset(path: &Path) -> bool { - path.starts_with("std\\") -} - -#[cfg(not(target_os = "windows"))] -pub(super) fn is_stdlib_asset(path: &Path) -> bool { - path.starts_with("std/") -} - -fn get_stdlib_asset(path: &Path) -> std::io::Result { - if !is_stdlib_asset(path) { - return Err(Error::new(ErrorKind::InvalidInput, "requested a non-stdlib asset")); - } - - match StdLibAssets::get(path.to_str().unwrap()) { - Some(std_lib_asset) => { - Ok(std::str::from_utf8(std_lib_asset.data.as_ref()).unwrap().to_string()) - } - - None => Err(Error::new(ErrorKind::NotFound, "invalid stdlib path")), - } -} - -pub(crate) fn read_file_to_string( - path_to_file: &Path, - get_non_stdlib_asset: &impl Fn(&Path) -> std::io::Result, -) -> std::io::Result { - if is_stdlib_asset(path_to_file) { - get_stdlib_asset(path_to_file) - } else { - get_non_stdlib_asset(path_to_file) - } -} diff --git a/compiler/fm/src/lib.rs b/compiler/fm/src/lib.rs index a251ecc70c5..2a54e58d3b9 100644 --- a/compiler/fm/src/lib.rs +++ b/compiler/fm/src/lib.rs @@ -4,16 +4,12 @@ #![warn(clippy::semicolon_if_nothing_returned)] mod file_map; -mod file_reader; pub use file_map::{File, FileId, FileMap, PathString}; // Re-export for the lsp pub use codespan_reporting::files as codespan_files; -use file_reader::is_stdlib_asset; -pub use file_reader::FileReader; - use std::{ collections::HashMap, path::{Component, Path, PathBuf}, @@ -26,7 +22,6 @@ pub struct FileManager { file_map: FileMap, id_to_path: HashMap, path_to_id: HashMap, - file_reader: Box, } impl std::fmt::Debug for FileManager { @@ -41,13 +36,12 @@ impl std::fmt::Debug for FileManager { } impl FileManager { - pub fn new(root: &Path, file_reader: Box) -> Self { + pub fn new(root: &Path) -> Self { Self { root: root.normalize(), file_map: Default::default(), id_to_path: Default::default(), path_to_id: Default::default(), - file_reader, } } @@ -55,25 +49,32 @@ impl FileManager { &self.file_map } - pub fn add_file(&mut self, file_name: &Path) -> Option { - // Handle both relative file paths and std/lib virtual paths. - let resolved_path: PathBuf = if is_stdlib_asset(file_name) { - // Special case for stdlib where we want to read specifically the `std/` relative path - // TODO: The stdlib path should probably be an absolute path rooted in something people would never create - file_name.to_path_buf() - } else { - self.root.join(file_name).normalize() - }; + /// Adds a source file to the [`FileManager`]. + /// + /// The `file_name` is expected to be relative to the [`FileManager`]'s root directory. + pub fn add_file_with_source(&mut self, file_name: &Path, source: String) -> Option { + let file_name = self.root.join(file_name); + self.add_file_with_source_canonical_path(&file_name, source) + } - // Check that the resolved path already exists in the file map, if it is, we return it. - if let Some(file_id) = self.path_to_id.get(&resolved_path) { + /// Adds a source file to the [`FileManager`] using a path which is not appended to the root path. + /// + /// This should only be used for the stdlib as these files do not exist on the user's filesystem. + pub fn add_file_with_source_canonical_path( + &mut self, + file_name: &Path, + source: String, + ) -> Option { + let file_name = file_name.normalize(); + // Check that the file name already exists in the file map, if it is, we return it. + if let Some(file_id) = self.path_to_id.get(&file_name) { return Some(*file_id); } + let file_name_path_buf = file_name.to_path_buf(); // Otherwise we add the file - let source = file_reader::read_file_to_string(&resolved_path, &self.file_reader).ok()?; - let file_id = self.file_map.add_file(resolved_path.clone().into(), source); - self.register_path(file_id, resolved_path); + let file_id = self.file_map.add_file(file_name_path_buf.clone().into(), source); + self.register_path(file_id, file_name_path_buf); Some(file_id) } @@ -98,7 +99,10 @@ impl FileManager { self.id_to_path.get(&file_id).unwrap().as_path() } - pub fn find_module(&mut self, anchor: FileId, mod_name: &str) -> Result { + // TODO: This should also ideally not be here, so that the file manager + // TODO: does not know about rust modules. + // TODO: Ideally this is moved to def_collector_mod and we make this method accept a FileManager + pub fn find_module(&self, anchor: FileId, mod_name: &str) -> Result { let anchor_path = self.path(anchor).with_extension(""); let anchor_dir = anchor_path.parent().unwrap(); @@ -111,14 +115,19 @@ impl FileManager { anchor_path.join(format!("{mod_name}.{FILE_EXTENSION}")) }; - self.add_file(&candidate).ok_or_else(|| candidate.as_os_str().to_string_lossy().to_string()) + self.name_to_id(candidate.clone()) + .ok_or_else(|| candidate.as_os_str().to_string_lossy().to_string()) } + // TODO: This should accept a &Path instead of a PathBuf pub fn name_to_id(&self, file_name: PathBuf) -> Option { self.file_map.get_file_id(&PathString::from_path(file_name)) } } +// TODO: This should not be here because the file manager should not know about the +// TODO: rust modules. See comment on `find_module`` +// TODO: Moreover, the check for main, lib, mod should ideally not be done here /// Returns true if a module's child module's are expected to be in the same directory. /// Returns false if they are expected to be in a subdirectory matching the name of the module. fn should_check_siblings_for_module(module_path: &Path, parent_path: &Path) -> bool { @@ -220,9 +229,11 @@ mod tests { use super::*; use tempfile::{tempdir, TempDir}; - fn create_dummy_file(dir: &TempDir, file_name: &Path) { + // Returns the absolute path to the file + fn create_dummy_file(dir: &TempDir, file_name: &Path) -> PathBuf { let file_path = dir.path().join(file_name); - let _file = std::fs::File::create(file_path).unwrap(); + let _file = std::fs::File::create(&file_path).unwrap(); + file_path } #[test] @@ -232,9 +243,9 @@ mod tests { let entry_file_name = Path::new("my_dummy_file.nr"); create_dummy_file(&dir, entry_file_name); - let mut fm = FileManager::new(dir.path(), Box::new(|path| std::fs::read_to_string(path))); + let mut fm = FileManager::new(dir.path()); - let file_id = fm.add_file(entry_file_name).unwrap(); + let file_id = fm.add_file_with_source(entry_file_name, "fn foo() {}".to_string()).unwrap(); let dep_file_name = Path::new("foo.nr"); create_dummy_file(&dir, dep_file_name); @@ -247,9 +258,9 @@ mod tests { let file_name = Path::new("foo.nr"); create_dummy_file(&dir, file_name); - let mut fm = FileManager::new(dir.path(), Box::new(|path| std::fs::read_to_string(path))); + let mut fm = FileManager::new(dir.path()); - let file_id = fm.add_file(file_name).unwrap(); + let file_id = fm.add_file_with_source(file_name, "fn foo() {}".to_string()).unwrap(); assert!(fm.path(file_id).ends_with("foo.nr")); } @@ -257,14 +268,14 @@ mod tests { #[test] fn path_resolve_sub_module() { let dir = tempdir().unwrap(); - let mut fm = FileManager::new(dir.path(), Box::new(|path| std::fs::read_to_string(path))); + let mut fm = FileManager::new(dir.path()); // Create a lib.nr file at the root. // we now have dir/lib.nr - let file_name = Path::new("lib.nr"); - create_dummy_file(&dir, file_name); - - let file_id = fm.add_file(file_name).unwrap(); + let lib_nr_path = create_dummy_file(&dir, Path::new("lib.nr")); + let file_id = fm + .add_file_with_source(lib_nr_path.as_path(), "fn foo() {}".to_string()) + .expect("could not add file to file manager and obtain a FileId"); // Create a sub directory // we now have: @@ -277,14 +288,16 @@ mod tests { // we no have: // - dir/lib.nr // - dir/sub_dir/foo.nr - create_dummy_file(&sub_dir, Path::new("foo.nr")); + let foo_nr_path = create_dummy_file(&sub_dir, Path::new("foo.nr")); + fm.add_file_with_source(foo_nr_path.as_path(), "fn foo() {}".to_string()); // Add a parent module for the sub_dir // we no have: // - dir/lib.nr // - dir/sub_dir.nr // - dir/sub_dir/foo.nr - create_dummy_file(&dir, Path::new(&format!("{sub_dir_name}.nr"))); + let sub_dir_nr_path = create_dummy_file(&dir, Path::new(&format!("{sub_dir_name}.nr"))); + fm.add_file_with_source(sub_dir_nr_path.as_path(), "fn foo() {}".to_string()); // First check for the sub_dir.nr file and add it to the FileManager let sub_dir_file_id = fm.find_module(file_id, sub_dir_name).unwrap(); @@ -303,7 +316,7 @@ mod tests { let sub_dir = TempDir::new_in(&dir).unwrap(); let sub_sub_dir = TempDir::new_in(&sub_dir).unwrap(); - let mut fm = FileManager::new(dir.path(), Box::new(|path| std::fs::read_to_string(path))); + let mut fm = FileManager::new(dir.path()); // Create a lib.nr file at the root. let file_name = Path::new("lib.nr"); @@ -313,8 +326,9 @@ mod tests { let second_file_name = PathBuf::from(sub_sub_dir.path()).join("./../../lib.nr"); // Add both files to the file manager - let file_id = fm.add_file(file_name).unwrap(); - let second_file_id = fm.add_file(&second_file_name).unwrap(); + let file_id = fm.add_file_with_source(file_name, "fn foo() {}".to_string()).unwrap(); + let second_file_id = + fm.add_file_with_source(&second_file_name, "fn foo() {}".to_string()).unwrap(); assert_eq!(file_id, second_file_id); } diff --git a/compiler/integration-tests/test/browser/compile_prove_verify.test.ts b/compiler/integration-tests/test/browser/compile_prove_verify.test.ts index 2aef56c23f9..29e2fbc55b8 100644 --- a/compiler/integration-tests/test/browser/compile_prove_verify.test.ts +++ b/compiler/integration-tests/test/browser/compile_prove_verify.test.ts @@ -1,17 +1,17 @@ import { expect } from '@esm-bundle/chai'; -import { Logger } from 'tslog'; import * as TOML from 'smol-toml'; -import { initializeResolver } from '@noir-lang/source-resolver'; -import newCompiler, { CompiledProgram, compile, init_log_level as compilerLogLevel } from '@noir-lang/noir_wasm'; +import newCompiler, { + CompiledProgram, + PathToFileSourceMap, + compile, + init_log_level as compilerLogLevel, +} from '@noir-lang/noir_wasm'; import { Noir } from '@noir-lang/noir_js'; import { InputMap } from '@noir-lang/noirc_abi'; import { BarretenbergBackend } from '@noir-lang/backend_barretenberg'; import { getFile } from './utils.js'; -import { TEST_LOG_LEVEL } from '../environment.js'; - -const logger = new Logger({ name: 'test', minLevel: TEST_LOG_LEVEL }); await newCompiler(); @@ -33,14 +33,11 @@ const suite = Mocha.Suite.create(mocha.suite, 'Noir end to end test'); suite.timeout(60 * 20e3); //20mins function getCircuit(noirSource: string): CompiledProgram { - // eslint-disable-next-line @typescript-eslint/no-unused-vars - initializeResolver((id: string) => { - logger.debug('source-resolver: resolving:', id); - return noirSource; - }); + const sourceMap = new PathToFileSourceMap(); + sourceMap.add_source_code('main.nr', noirSource); // We're ignoring this in the resolver but pass in something sensible. - const result = compile('/main.nr'); + const result = compile('main.nr', undefined, undefined, sourceMap); if (!('program' in result)) { throw new Error('Compilation failed'); } diff --git a/compiler/integration-tests/test/browser/recursion.test.ts b/compiler/integration-tests/test/browser/recursion.test.ts index 308be81417f..faa317b2c3c 100644 --- a/compiler/integration-tests/test/browser/recursion.test.ts +++ b/compiler/integration-tests/test/browser/recursion.test.ts @@ -2,8 +2,12 @@ import { expect } from '@esm-bundle/chai'; import { TEST_LOG_LEVEL } from '../environment.js'; import { Logger } from 'tslog'; -import { initializeResolver } from '@noir-lang/source-resolver'; -import newCompiler, { CompiledProgram, compile, init_log_level as compilerLogLevel } from '@noir-lang/noir_wasm'; +import newCompiler, { + CompiledProgram, + PathToFileSourceMap, + compile, + init_log_level as compilerLogLevel, +} from '@noir-lang/noir_wasm'; import { acvm, abi, Noir } from '@noir-lang/noir_js'; import * as TOML from 'smol-toml'; @@ -27,14 +31,9 @@ const circuit_main = 'test_programs/execution_success/assert_statement'; const circuit_recursion = 'compiler/integration-tests/circuits/recursion'; function getCircuit(noirSource: string): CompiledProgram { - // eslint-disable-next-line @typescript-eslint/no-unused-vars - initializeResolver((id: string) => { - logger.debug('source-resolver: resolving:', id); - return noirSource; - }); - - // We're ignoring this in the resolver but pass in something sensible. - const result = compile('/main.nr'); + const sourceMap = new PathToFileSourceMap(); + sourceMap.add_source_code('main.nr', noirSource); + const result = compile('main.nr', undefined, undefined, sourceMap); if (!('program' in result)) { throw new Error('Compilation failed'); } diff --git a/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts b/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts index 353678b470b..6c20d44882b 100644 --- a/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts +++ b/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts @@ -5,7 +5,12 @@ import { readFileSync } from 'node:fs'; import { resolve } from 'path'; import toml from 'toml'; -import { compile, CompiledProgram, init_log_level as compilerLogLevel } from '@noir-lang/noir_wasm'; +import { + compile, + CompiledProgram, + init_log_level as compilerLogLevel, + PathToFileSourceMap, +} from '@noir-lang/noir_wasm'; import { Noir } from '@noir-lang/noir_js'; import { BarretenbergBackend, flattenPublicInputs } from '@noir-lang/backend_barretenberg'; import { Field, InputMap } from '@noir-lang/noirc_abi'; @@ -13,16 +18,24 @@ import { Field, InputMap } from '@noir-lang/noirc_abi'; compilerLogLevel('INFO'); it(`smart contract can verify a recursive proof`, async () => { - const inner_source_path = resolve(`../../test_programs/execution_success/assert_statement/src/main.nr`); - const inner_program = (compile(inner_source_path) as { program: CompiledProgram }).program; - - const recursion_source_path = resolve(`./circuits/recursion/src/main.nr`); - const recursion_program = (compile(recursion_source_path) as { program: CompiledProgram }).program; + const innerSourcePath = resolve(`../../test_programs/execution_success/assert_statement/src/main.nr`); + const sourceMapInnerProgram = new PathToFileSourceMap(); + sourceMapInnerProgram.add_source_code(innerSourcePath, readFileSync(innerSourcePath, 'utf-8')); + const innerProgram = ( + compile(innerSourcePath, undefined, undefined, sourceMapInnerProgram) as { program: CompiledProgram } + ).program; + + const recursionSourcePath = resolve(`./circuits/recursion/src/main.nr`); + const sourceMapRecursionProgram = new PathToFileSourceMap(); + sourceMapRecursionProgram.add_source_code(recursionSourcePath, readFileSync(recursionSourcePath, 'utf-8')); + const recursionProgram = ( + compile(recursionSourcePath, undefined, undefined, sourceMapRecursionProgram) as { program: CompiledProgram } + ).program; // Intermediate proof - const inner_backend = new BarretenbergBackend(inner_program); - const inner = new Noir(inner_program); + const inner_backend = new BarretenbergBackend(innerProgram); + const inner = new Noir(innerProgram); const inner_prover_toml = readFileSync( resolve(`../../test_programs/execution_success/assert_statement/Prover.toml`), @@ -41,8 +54,8 @@ it(`smart contract can verify a recursive proof`, async () => { // Final proof - const recursion_backend = new BarretenbergBackend(recursion_program); - const recursion = new Noir(recursion_program, recursion_backend); + const recursion_backend = new BarretenbergBackend(recursionProgram); + const recursion = new Noir(recursionProgram, recursion_backend); const recursion_inputs: InputMap = { verification_key: vkAsFields, diff --git a/compiler/integration-tests/test/node/smart_contract_verifier.test.ts b/compiler/integration-tests/test/node/smart_contract_verifier.test.ts index 7dafada0ffb..5b3d0e2d337 100644 --- a/compiler/integration-tests/test/node/smart_contract_verifier.test.ts +++ b/compiler/integration-tests/test/node/smart_contract_verifier.test.ts @@ -5,7 +5,7 @@ import { readFileSync } from 'node:fs'; import { resolve } from 'path'; import toml from 'toml'; -import { compile, init_log_level as compilerLogLevel } from '@noir-lang/noir_wasm'; +import { PathToFileSourceMap, compile, init_log_level as compilerLogLevel } from '@noir-lang/noir_wasm'; import { Noir } from '@noir-lang/noir_js'; import { BarretenbergBackend, flattenPublicInputs } from '@noir-lang/backend_barretenberg'; @@ -31,9 +31,11 @@ test_cases.forEach((testInfo) => { const base_relative_path = '../..'; const test_case = testInfo.case; - const noir_source_path = resolve(`${base_relative_path}/${test_case}/src/main.nr`); + const noirSourcePath = resolve(`${base_relative_path}/${test_case}/src/main.nr`); + const sourceMap = new PathToFileSourceMap(); + sourceMap.add_source_code(noirSourcePath, readFileSync(noirSourcePath, 'utf-8')); - const compileResult = compile(noir_source_path); + const compileResult = compile(noirSourcePath, undefined, undefined, sourceMap); if (!('program' in compileResult)) { throw new Error('Compilation failed'); } diff --git a/compiler/noirc_driver/Cargo.toml b/compiler/noirc_driver/Cargo.toml index 8759e3f65e8..e5a837e6822 100644 --- a/compiler/noirc_driver/Cargo.toml +++ b/compiler/noirc_driver/Cargo.toml @@ -21,5 +21,6 @@ iter-extended.workspace = true fm.workspace = true serde.workspace = true fxhash.workspace = true +rust-embed = "6.6.0" aztec_macros = { path = "../../aztec_macros" } diff --git a/compiler/noirc_driver/src/lib.rs b/compiler/noirc_driver/src/lib.rs index be139846cd7..c326d04c84d 100644 --- a/compiler/noirc_driver/src/lib.rs +++ b/compiler/noirc_driver/src/lib.rs @@ -23,6 +23,7 @@ mod abi_gen; mod contract; mod debug; mod program; +mod stdlib; use debug::filter_relevant_files; @@ -82,11 +83,23 @@ pub type CompilationResult = Result<(T, Warnings), ErrorsAndWarnings>; /// Adds the file from the file system at `Path` to the crate graph as a root file pub fn prepare_crate(context: &mut Context, file_name: &Path) -> CrateId { + // Add the stdlib contents to the file manager, since every package automatically has a dependency + // on the stdlib. For other dependencies, we read the package.Dependencies file to add their file + // contents to the file manager. However since the dependency on the stdlib is implicit, we need + // to manually add it here. + let stdlib_paths_with_source = stdlib::stdlib_paths_with_source(); + for (path, source) in stdlib_paths_with_source { + context.file_manager.add_file_with_source_canonical_path(Path::new(&path), source); + } + let path_to_std_lib_file = Path::new(STD_CRATE_NAME).join("lib.nr"); - let std_file_id = context.file_manager.add_file(&path_to_std_lib_file).unwrap(); + let std_file_id = context + .file_manager + .name_to_id(path_to_std_lib_file) + .expect("stdlib file id is expected to be present"); let std_crate_id = context.crate_graph.add_stdlib(std_file_id); - let root_file_id = context.file_manager.add_file(file_name).unwrap(); + let root_file_id = context.file_manager.name_to_id(file_name.to_path_buf()).unwrap_or_else(|| panic!("files are expected to be added to the FileManager before reaching the compiler file_path: {file_name:?}")); let root_crate_id = context.crate_graph.add_crate_root(root_file_id); @@ -97,7 +110,10 @@ pub fn prepare_crate(context: &mut Context, file_name: &Path) -> CrateId { // Adds the file from the file system at `Path` to the crate graph pub fn prepare_dependency(context: &mut Context, file_name: &Path) -> CrateId { - let root_file_id = context.file_manager.add_file(file_name).unwrap(); + let root_file_id = context + .file_manager + .name_to_id(file_name.to_path_buf()) + .unwrap_or_else(|| panic!("files are expected to be added to the FileManager before reaching the compiler file_path: {file_name:?}")); let crate_id = context.crate_graph.add_crate(root_file_id); diff --git a/compiler/noirc_driver/src/stdlib.rs b/compiler/noirc_driver/src/stdlib.rs new file mode 100644 index 00000000000..5a91e3f45b5 --- /dev/null +++ b/compiler/noirc_driver/src/stdlib.rs @@ -0,0 +1,24 @@ +use rust_embed::RustEmbed; + +#[derive(RustEmbed)] +#[folder = "../../noir_stdlib/src"] +#[cfg_attr(not(target_os = "windows"), prefix = "std/")] +#[cfg_attr(target_os = "windows", prefix = r"std\")] // Note reversed slash direction +struct StdLibAssets; + +// Returns a vector of tuples containing the path to a stdlib file in the std lib crate +// along with the source code of that file. +// +// This is needed because when we preload the file manager, it needs to know where +// the source code for the stdlib is. The stdlib is treated special because it comes with +// the compiler and is never included as a dependency like other user defined crates. +pub(crate) fn stdlib_paths_with_source() -> Vec<(String, String)> { + StdLibAssets::iter() + .map(|path| { + let source = std::str::from_utf8(StdLibAssets::get(&path).unwrap().data.as_ref()) + .unwrap() + .to_string(); + (path.to_string(), source) + }) + .collect() +} diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index 2619189ee8d..b33db60c093 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -61,8 +61,7 @@ mod test { src: &str, ) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { let root = std::path::Path::new("/"); - let fm = FileManager::new(root, Box::new(|path| std::fs::read_to_string(path))); - //let fm = FileManager::new(root, Box::new(get_non_stdlib_asset)); + let fm = FileManager::new(root); let graph = CrateGraph::default(); let mut context = Context::new(fm, graph); let root_file_id = FileId::dummy(); diff --git a/compiler/wasm/Cargo.toml b/compiler/wasm/Cargo.toml index 8e693182db9..58ad7764fdc 100644 --- a/compiler/wasm/Cargo.toml +++ b/compiler/wasm/Cargo.toml @@ -21,7 +21,6 @@ noirc_errors.workspace = true wasm-bindgen.workspace = true serde.workspace = true js-sys.workspace = true -cfg-if.workspace = true console_error_panic_hook.workspace = true gloo-utils.workspace = true log.workspace = true diff --git a/compiler/wasm/src/compile.rs b/compiler/wasm/src/compile.rs index e7fd3dd5212..13b366819b0 100644 --- a/compiler/wasm/src/compile.rs +++ b/compiler/wasm/src/compile.rs @@ -126,6 +126,31 @@ struct DependencyGraph { library_dependencies: HashMap>, } +#[wasm_bindgen] +// This is a map containing the paths of all of the files in the entry-point crate and +// the transitive dependencies of the entry-point crate. +// +// This is for all intents and purposes the file system that the compiler will use to resolve/compile +// files in the crate being compiled and its dependencies. +#[derive(Deserialize, Default)] +pub struct PathToFileSourceMap(HashMap); + +#[wasm_bindgen] +impl PathToFileSourceMap { + #[wasm_bindgen(constructor)] + pub fn new() -> PathToFileSourceMap { + PathToFileSourceMap::default() + } + // Inserts a path and its source code into the map. + // + // Returns true, if there was already source code in the map for the given path + pub fn add_source_code(&mut self, path: String, source_code: String) -> bool { + let path_buf = Path::new(&path).to_path_buf(); + let old_value = self.0.insert(path_buf, source_code); + old_value.is_some() + } +} + pub enum CompileResult { Contract { contract: PreprocessedContract, debug: DebugArtifact }, Program { program: PreprocessedProgram, debug: DebugArtifact }, @@ -136,6 +161,7 @@ pub fn compile( entry_point: String, contracts: Option, dependency_graph: Option, + file_source_map: PathToFileSourceMap, ) -> Result { console_error_panic_hook::set_once(); @@ -146,8 +172,8 @@ pub fn compile( DependencyGraph { root_dependencies: vec![], library_dependencies: HashMap::new() } }; - let root = Path::new("/"); - let fm = FileManager::new(root, Box::new(get_non_stdlib_asset)); + let fm = file_manager_with_source_map(file_source_map); + let graph = CrateGraph::default(); let mut context = Context::new(fm, graph); @@ -200,6 +226,31 @@ pub fn compile( } } +// Create a new FileManager with the given source map +// +// Note: Use this method whenever initializing a new FileManager +// to ensure that the file manager contains all of the files +// that one intends the compiler to use. +// +// For all intents and purposes, the file manager being returned +// should be considered as immutable. +fn file_manager_with_source_map(source_map: PathToFileSourceMap) -> FileManager { + let root = Path::new(""); + let mut fm = FileManager::new(root); + + for (path, source) in source_map.0 { + fm.add_file_with_source(path.as_path(), source); + } + + fm +} + +// Root dependencies are dependencies which the entry-point package relies upon. +// These will be in the Nargo.toml of the package being compiled. +// +// Library dependencies are transitive dependencies; for example, if the entry-point relies +// upon some library `lib1`. Then the packages that `lib1` depend upon will be placed in the +// `library_dependencies` list and the `lib1` will be placed in the `root_dependencies` list. fn process_dependency_graph(context: &mut Context, dependency_graph: DependencyGraph) { let mut crate_names: HashMap<&CrateName, CrateId> = HashMap::new(); @@ -279,51 +330,26 @@ fn preprocess_contract(contract: CompiledContract) -> CompileResult { CompileResult::Contract { contract: preprocessed_contract, debug: debug_artifact } } -cfg_if::cfg_if! { - if #[cfg(target_os = "wasi")] { - fn get_non_stdlib_asset(path_to_file: &Path) -> std::io::Result { - std::fs::read_to_string(path_to_file) - } - } else { - use std::io::{Error, ErrorKind}; - - #[wasm_bindgen(module = "@noir-lang/source-resolver")] - extern "C" { - #[wasm_bindgen(catch)] - fn read_file(path: &str) -> Result; - } - - fn get_non_stdlib_asset(path_to_file: &Path) -> std::io::Result { - let path_str = path_to_file.to_str().unwrap(); - match read_file(path_str) { - Ok(buffer) => Ok(buffer), - Err(_) => Err(Error::new(ErrorKind::Other, "could not read file using wasm")), - } - } - } -} - #[cfg(test)] mod test { - use fm::FileManager; use noirc_driver::prepare_crate; use noirc_frontend::{ graph::{CrateGraph, CrateName}, hir::Context, }; - use super::{process_dependency_graph, DependencyGraph}; + use crate::compile::PathToFileSourceMap; + + use super::{file_manager_with_source_map, process_dependency_graph, DependencyGraph}; use std::{collections::HashMap, path::Path}; - fn mock_get_non_stdlib_asset(_path_to_file: &Path) -> std::io::Result { - Ok("".to_string()) - } + fn setup_test_context(source_map: PathToFileSourceMap) -> Context { + let mut fm = file_manager_with_source_map(source_map); + // Add this due to us calling prepare_crate on "/main.nr" below + fm.add_file_with_source(Path::new("/main.nr"), "fn foo() {}".to_string()); - fn setup_test_context() -> Context { - let fm = FileManager::new(Path::new("/"), Box::new(mock_get_non_stdlib_asset)); let graph = CrateGraph::default(); let mut context = Context::new(fm, graph); - prepare_crate(&mut context, Path::new("/main.nr")); context @@ -335,10 +361,12 @@ mod test { #[test] fn test_works_with_empty_dependency_graph() { - let mut context = setup_test_context(); let dependency_graph = DependencyGraph { root_dependencies: vec![], library_dependencies: HashMap::new() }; + let source_map = PathToFileSourceMap::default(); + let mut context = setup_test_context(source_map); + process_dependency_graph(&mut context, dependency_graph); // one stdlib + one root crate @@ -347,12 +375,19 @@ mod test { #[test] fn test_works_with_root_dependencies() { - let mut context = setup_test_context(); let dependency_graph = DependencyGraph { root_dependencies: vec![crate_name("lib1")], library_dependencies: HashMap::new(), }; + let source_map = PathToFileSourceMap( + vec![(Path::new("lib1/lib.nr").to_path_buf(), "fn foo() {}".to_string())] + .into_iter() + .collect(), + ); + + let mut context = setup_test_context(source_map); + process_dependency_graph(&mut context, dependency_graph); assert_eq!(context.crate_graph.number_of_crates(), 3); @@ -360,12 +395,18 @@ mod test { #[test] fn test_works_with_duplicate_root_dependencies() { - let mut context = setup_test_context(); let dependency_graph = DependencyGraph { root_dependencies: vec![crate_name("lib1"), crate_name("lib1")], library_dependencies: HashMap::new(), }; + let source_map = PathToFileSourceMap( + vec![(Path::new("lib1/lib.nr").to_path_buf(), "fn foo() {}".to_string())] + .into_iter() + .collect(), + ); + let mut context = setup_test_context(source_map); + process_dependency_graph(&mut context, dependency_graph); assert_eq!(context.crate_graph.number_of_crates(), 3); @@ -373,7 +414,6 @@ mod test { #[test] fn test_works_with_transitive_dependencies() { - let mut context = setup_test_context(); let dependency_graph = DependencyGraph { root_dependencies: vec![crate_name("lib1")], library_dependencies: HashMap::from([ @@ -382,6 +422,17 @@ mod test { ]), }; + let source_map = PathToFileSourceMap( + vec![ + (Path::new("lib1/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + (Path::new("lib2/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + (Path::new("lib3/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + ] + .into_iter() + .collect(), + ); + + let mut context = setup_test_context(source_map); process_dependency_graph(&mut context, dependency_graph); assert_eq!(context.crate_graph.number_of_crates(), 5); @@ -389,12 +440,22 @@ mod test { #[test] fn test_works_with_missing_dependencies() { - let mut context = setup_test_context(); let dependency_graph = DependencyGraph { root_dependencies: vec![crate_name("lib1")], library_dependencies: HashMap::from([(crate_name("lib2"), vec![crate_name("lib3")])]), }; + let source_map = PathToFileSourceMap( + vec![ + (Path::new("lib1/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + (Path::new("lib2/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + (Path::new("lib3/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + ] + .into_iter() + .collect(), + ); + + let mut context = setup_test_context(source_map); process_dependency_graph(&mut context, dependency_graph); assert_eq!(context.crate_graph.number_of_crates(), 5); diff --git a/compiler/wasm/test/browser/index.test.ts b/compiler/wasm/test/browser/index.test.ts index 8a3f82ffff9..346c20c834c 100644 --- a/compiler/wasm/test/browser/index.test.ts +++ b/compiler/wasm/test/browser/index.test.ts @@ -1,6 +1,5 @@ import { expect } from '@esm-bundle/chai'; -import initNoirWasm, { compile } from '@noir-lang/noir_wasm'; -import { initializeResolver } from '@noir-lang/source-resolver'; +import initNoirWasm, { PathToFileSourceMap, compile } from '@noir-lang/noir_wasm'; import { depsScriptExpectedArtifact, depsScriptSourcePath, @@ -28,23 +27,11 @@ async function getPrecompiledSource(path: string): Promise { describe('noir wasm', () => { describe('can compile script without dependencies', () => { - beforeEach(async () => { - const source = await getFileContent(simpleScriptSourcePath); - initializeResolver((id: string) => { - console.log(`Resolving source ${id}`); - - if (typeof source === 'undefined') { - throw Error(`Could not resolve source for '${id}'`); - } else if (id !== '/main.nr') { - throw Error(`Unexpected id: '${id}'`); - } else { - return source; - } - }); - }); - it('matching nargos compilation', async () => { - const wasmCircuit = await compile('/main.nr'); + const sourceMap = new PathToFileSourceMap(); + sourceMap.add_source_code('main.nr', await getFileContent(simpleScriptSourcePath)); + + const wasmCircuit = await compile('main.nr', undefined, undefined, sourceMap); const cliCircuit = await getPrecompiledSource(simpleScriptExpectedArtifact); if (!('program' in wasmCircuit)) { @@ -59,37 +46,29 @@ describe('noir wasm', () => { }); describe('can compile script with dependencies', () => { - beforeEach(async () => { + it('matching nargos compilation', async () => { const [scriptSource, libASource, libBSource] = await Promise.all([ getFileContent(depsScriptSourcePath), getFileContent(libASourcePath), getFileContent(libBSourcePath), ]); - initializeResolver((file: string) => { - switch (file) { - case '/script/main.nr': - return scriptSource; - - case '/lib_a/lib.nr': - return libASource; - - case '/lib_b/lib.nr': - return libBSource; - - default: - return ''; - } - }); - }); - - it('matching nargos compilation', async () => { - const wasmCircuit = await compile('/script/main.nr', false, { - root_dependencies: ['lib_a'], - library_dependencies: { - lib_a: ['lib_b'], + const sourceMap = new PathToFileSourceMap(); + sourceMap.add_source_code('script/main.nr', scriptSource); + sourceMap.add_source_code('lib_a/lib.nr', libASource); + sourceMap.add_source_code('lib_b/lib.nr', libBSource); + + const wasmCircuit = await compile( + 'script/main.nr', + false, + { + root_dependencies: ['lib_a'], + library_dependencies: { + lib_a: ['lib_b'], + }, }, - }); + sourceMap, + ); if (!('program' in wasmCircuit)) { throw Error('Expected program to be present'); diff --git a/compiler/wasm/test/node/index.test.ts b/compiler/wasm/test/node/index.test.ts index c0d5f88e407..5cf9e3be2df 100644 --- a/compiler/wasm/test/node/index.test.ts +++ b/compiler/wasm/test/node/index.test.ts @@ -9,8 +9,7 @@ import { } from '../shared'; import { readFileSync } from 'node:fs'; import { join, resolve } from 'node:path'; -import { compile } from '@noir-lang/noir_wasm'; -import { initializeResolver } from '@noir-lang/source-resolver'; +import { compile, PathToFileSourceMap } from '@noir-lang/noir_wasm'; // eslint-disable-next-line @typescript-eslint/no-explicit-any async function getPrecompiledSource(path: string): Promise { @@ -21,7 +20,12 @@ async function getPrecompiledSource(path: string): Promise { describe('noir wasm compilation', () => { describe('can compile simple scripts', () => { it('matching nargos compilation', async () => { - const wasmCircuit = await compile(join(__dirname, simpleScriptSourcePath)); + const sourceMap = new PathToFileSourceMap(); + sourceMap.add_source_code( + join(__dirname, simpleScriptSourcePath), + readFileSync(join(__dirname, simpleScriptSourcePath), 'utf-8'), + ); + const wasmCircuit = await compile(join(__dirname, simpleScriptSourcePath), undefined, undefined, sourceMap); const cliCircuit = await getPrecompiledSource(simpleScriptExpectedArtifact); if (!('program' in wasmCircuit)) { @@ -36,32 +40,25 @@ describe('noir wasm compilation', () => { }); describe('can compile scripts with dependencies', () => { + const sourceMap: PathToFileSourceMap = new PathToFileSourceMap(); beforeEach(() => { - // this test requires a custom resolver in order to correctly resolve dependencies - initializeResolver((file) => { - switch (file) { - case '/script/main.nr': - return readFileSync(join(__dirname, depsScriptSourcePath), 'utf-8'); - - case '/lib_a/lib.nr': - return readFileSync(join(__dirname, libASourcePath), 'utf-8'); - - case '/lib_b/lib.nr': - return readFileSync(join(__dirname, libBSourcePath), 'utf-8'); - - default: - return ''; - } - }); + sourceMap.add_source_code('script/main.nr', readFileSync(join(__dirname, depsScriptSourcePath), 'utf-8')); + sourceMap.add_source_code('lib_a/lib.nr', readFileSync(join(__dirname, libASourcePath), 'utf-8')); + sourceMap.add_source_code('lib_b/lib.nr', readFileSync(join(__dirname, libBSourcePath), 'utf-8')); }); it('matching nargos compilation', async () => { - const wasmCircuit = await compile('/script/main.nr', false, { - root_dependencies: ['lib_a'], - library_dependencies: { - lib_a: ['lib_b'], + const wasmCircuit = await compile( + 'script/main.nr', + false, + { + root_dependencies: ['lib_a'], + library_dependencies: { + lib_a: ['lib_b'], + }, }, - }); + sourceMap, + ); const cliCircuit = await getPrecompiledSource(depsScriptExpectedArtifact); diff --git a/tooling/lsp/Cargo.toml b/tooling/lsp/Cargo.toml index 02d6d10ffa8..5f5e701da67 100644 --- a/tooling/lsp/Cargo.toml +++ b/tooling/lsp/Cargo.toml @@ -21,7 +21,6 @@ noirc_frontend.workspace = true serde.workspace = true serde_json.workspace = true tower.workspace = true -cfg-if.workspace = true async-lsp = { workspace = true, features = ["omni-trait"] } serde_with = "3.2.0" fm.workspace = true diff --git a/tooling/lsp/src/lib.rs b/tooling/lsp/src/lib.rs index eecd0bda45b..2ad8096a13f 100644 --- a/tooling/lsp/src/lib.rs +++ b/tooling/lsp/src/lib.rs @@ -7,7 +7,7 @@ use std::{ collections::HashMap, future::Future, ops::{self, ControlFlow}, - path::{Path, PathBuf}, + path::PathBuf, pin::Pin, task::{self, Poll}, }; @@ -175,29 +175,3 @@ fn byte_span_to_range<'a, F: files::Files<'a> + ?Sized>( None } } - -cfg_if::cfg_if! { - if #[cfg(all(target_arch = "wasm32", not(target_os = "wasi")))] { - use wasm_bindgen::{prelude::*, JsValue}; - - #[wasm_bindgen(module = "@noir-lang/source-resolver")] - extern "C" { - - #[wasm_bindgen(catch)] - fn read_file(path: &str) -> Result; - - } - - fn get_non_stdlib_asset(path_to_file: &Path) -> std::io::Result { - let path_str = path_to_file.to_str().unwrap(); - match read_file(path_str) { - Ok(buffer) => Ok(buffer), - Err(_) => Err(Error::new(ErrorKind::Other, "could not read file using wasm")), - } - } - } else { - fn get_non_stdlib_asset(path_to_file: &Path) -> std::io::Result { - std::fs::read_to_string(path_to_file) - } - } -} diff --git a/tooling/lsp/src/notifications/mod.rs b/tooling/lsp/src/notifications/mod.rs index 876ff157c07..61f0d231738 100644 --- a/tooling/lsp/src/notifications/mod.rs +++ b/tooling/lsp/src/notifications/mod.rs @@ -13,7 +13,7 @@ use crate::types::{ PublishDiagnosticsParams, }; -use crate::{byte_span_to_range, get_non_stdlib_asset, get_package_tests_in_crate, LspState}; +use crate::{byte_span_to_range, get_package_tests_in_crate, LspState}; pub(super) fn on_initialized( _state: &mut LspState, @@ -103,7 +103,7 @@ pub(super) fn on_did_save_text_document( let diagnostics: Vec<_> = workspace .into_iter() .flat_map(|package| -> Vec { - let (mut context, crate_id) = prepare_package(package, Box::new(get_non_stdlib_asset)); + let (mut context, crate_id) = prepare_package(package); let file_diagnostics = match check_crate(&mut context, crate_id, false, false) { Ok(((), warnings)) => warnings, diff --git a/tooling/lsp/src/requests/goto_definition.rs b/tooling/lsp/src/requests/goto_definition.rs index 4b5ccddc613..558851d4ecf 100644 --- a/tooling/lsp/src/requests/goto_definition.rs +++ b/tooling/lsp/src/requests/goto_definition.rs @@ -52,8 +52,7 @@ fn on_goto_definition_inner( let mut definition_position = None; for package in &workspace { - let (mut context, crate_id) = - nargo::prepare_package(package, Box::new(crate::get_non_stdlib_asset)); + let (mut context, crate_id) = nargo::prepare_package(package); // We ignore the warnings and errors produced by compilation while resolving the definition let _ = noirc_driver::check_crate(&mut context, crate_id, false, false); diff --git a/tooling/lsp/src/requests/test_run.rs b/tooling/lsp/src/requests/test_run.rs index 293b101eb85..e5245de426f 100644 --- a/tooling/lsp/src/requests/test_run.rs +++ b/tooling/lsp/src/requests/test_run.rs @@ -10,7 +10,6 @@ use noirc_driver::{check_crate, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::hir::FunctionNameMatch; use crate::{ - get_non_stdlib_asset, types::{NargoTestRunParams, NargoTestRunResult}, LspState, }; @@ -51,7 +50,7 @@ fn on_test_run_request_inner( // Since we filtered on crate name, this should be the only item in the iterator match workspace.into_iter().next() { Some(package) => { - let (mut context, crate_id) = prepare_package(package, Box::new(get_non_stdlib_asset)); + let (mut context, crate_id) = prepare_package(package); if check_crate(&mut context, crate_id, false, false).is_err() { let result = NargoTestRunResult { id: params.id.clone(), diff --git a/tooling/lsp/src/requests/tests.rs b/tooling/lsp/src/requests/tests.rs index bed29ebe4e6..9a67eaae6db 100644 --- a/tooling/lsp/src/requests/tests.rs +++ b/tooling/lsp/src/requests/tests.rs @@ -7,7 +7,7 @@ use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSele use noirc_driver::{check_crate, NOIR_ARTIFACT_VERSION_STRING}; use crate::{ - get_non_stdlib_asset, get_package_tests_in_crate, + get_package_tests_in_crate, types::{NargoPackageTests, NargoTestsParams, NargoTestsResult}, LspState, }; @@ -53,7 +53,7 @@ fn on_tests_request_inner( let package_tests: Vec<_> = workspace .into_iter() .filter_map(|package| { - let (mut context, crate_id) = prepare_package(package, Box::new(get_non_stdlib_asset)); + let (mut context, crate_id) = prepare_package(package); // We ignore the warnings and errors produced by compilation for producing tests // because we can still get the test functions even if compilation fails let _ = check_crate(&mut context, crate_id, false, false); diff --git a/tooling/nargo/Cargo.toml b/tooling/nargo/Cargo.toml index f8269459968..48741c367a5 100644 --- a/tooling/nargo/Cargo.toml +++ b/tooling/nargo/Cargo.toml @@ -24,4 +24,9 @@ iter-extended.workspace = true serde.workspace = true thiserror.workspace = true codespan-reporting.workspace = true -rayon = "1.8.0" \ No newline at end of file +rayon = "1.8.0" + +[dev-dependencies] +# TODO: This dependency is used to generate unit tests for `get_all_paths_in_dir` +# TODO: once that method is moved to nargo_cli, we can move this dependency to nargo_cli +tempfile = "3.2.0" \ No newline at end of file diff --git a/tooling/nargo/src/lib.rs b/tooling/nargo/src/lib.rs index ef014fb436b..6f3d36febba 100644 --- a/tooling/nargo/src/lib.rs +++ b/tooling/nargo/src/lib.rs @@ -16,7 +16,7 @@ pub mod workspace; use std::collections::BTreeMap; -use fm::{FileManager, FileReader}; +use fm::FileManager; use noirc_driver::{add_dep, prepare_crate, prepare_dependency}; use noirc_frontend::{ graph::{CrateGraph, CrateId, CrateName}, @@ -42,9 +42,55 @@ pub fn prepare_dependencies( } } -pub fn prepare_package(package: &Package, file_reader: Box) -> (Context, CrateId) { - // TODO: FileManager continues to leak into various crates - let fm = FileManager::new(&package.root_dir, file_reader); +// We will pre-populate the file manager with all the files in the package +// This is so that we can avoid having to read from disk when we are compiling +// +// This does not require parsing because we are interested in the files under the src directory +// it may turn out that we do not need to include some Noir files that we add to the file +// manager +pub fn insert_all_files_for_package_into_file_manager( + package: &Package, + file_manager: &mut FileManager, +) { + // Start off at the entry path and read all files in the parent directory. + let entry_path_parent = package + .entry_path + .parent() + .unwrap_or_else(|| panic!("The entry path is expected to be a single file within a directory and so should have a parent {:?}", package.entry_path)) + .clone(); + + // Get all files in the package and add them to the file manager + let paths = + get_all_paths_in_dir(entry_path_parent).expect("could not get all paths in the package"); + for path in paths { + let source = std::fs::read_to_string(path.as_path()) + .unwrap_or_else(|_| panic!("could not read file {:?} into string", path)); + file_manager.add_file_with_source(path.as_path(), source); + } + + insert_all_files_for_packages_dependencies_into_file_manager(package, file_manager); +} + +// Inserts all files for the dependencies of the package into the file manager +// too +fn insert_all_files_for_packages_dependencies_into_file_manager( + package: &Package, + file_manager: &mut FileManager, +) { + for (_, dep) in package.dependencies.iter() { + match dep { + Dependency::Local { package } | Dependency::Remote { package } => { + insert_all_files_for_package_into_file_manager(package, file_manager); + insert_all_files_for_packages_dependencies_into_file_manager(package, file_manager); + } + } + } +} + +pub fn prepare_package(package: &Package) -> (Context, CrateId) { + let mut fm = FileManager::new(&package.root_dir); + insert_all_files_for_package_into_file_manager(package, &mut fm); + let graph = CrateGraph::default(); let mut context = Context::new(fm, graph); @@ -54,3 +100,72 @@ pub fn prepare_package(package: &Package, file_reader: Box) -> (Cont (context, crate_id) } + +// Get all paths in the directory and subdirectories. +// +// Panics: If the path is not a path to a directory. +// +// TODO: Along with prepare_package, this function is an abstraction leak +// TODO: given that this crate should not know about the file manager. +// TODO: We can clean this up in a future refactor +fn get_all_paths_in_dir(dir: &std::path::Path) -> std::io::Result> { + assert!(dir.is_dir(), "directory {dir:?} is not a path to a directory"); + + let mut paths = Vec::new(); + + if dir.is_dir() { + for entry in std::fs::read_dir(dir)? { + let entry = entry?; + let path = entry.path(); + if path.is_dir() { + let mut sub_paths = get_all_paths_in_dir(&path)?; + paths.append(&mut sub_paths); + } else { + paths.push(path); + } + } + } + + Ok(paths) +} + +#[cfg(test)] +mod tests { + use crate::get_all_paths_in_dir; + use std::{ + fs::{self, File}, + path::Path, + }; + use tempfile::tempdir; + + fn create_test_dir_structure(temp_dir: &Path) -> std::io::Result<()> { + fs::create_dir(temp_dir.join("subdir1"))?; + File::create(temp_dir.join("subdir1/file1.txt"))?; + fs::create_dir(temp_dir.join("subdir2"))?; + File::create(temp_dir.join("subdir2/file2.txt"))?; + File::create(temp_dir.join("file3.txt"))?; + Ok(()) + } + + #[test] + fn test_get_all_paths_in_dir() { + let temp_dir = tempdir().expect("could not create a temporary directory"); + create_test_dir_structure(temp_dir.path()) + .expect("could not create test directory structure"); + + let paths = get_all_paths_in_dir(temp_dir.path()) + .expect("could not get all paths in the test directory"); + + // This should be the paths to all of the files in the directory and the subdirectory + let expected_paths = vec![ + temp_dir.path().join("file3.txt"), + temp_dir.path().join("subdir1/file1.txt"), + temp_dir.path().join("subdir2/file2.txt"), + ]; + + assert_eq!(paths.len(), expected_paths.len()); + for path in expected_paths { + assert!(paths.contains(&path)); + } + } +} diff --git a/tooling/nargo/src/ops/compile.rs b/tooling/nargo/src/ops/compile.rs index 02159345086..59ac5672a11 100644 --- a/tooling/nargo/src/ops/compile.rs +++ b/tooling/nargo/src/ops/compile.rs @@ -70,8 +70,7 @@ pub fn compile_program( np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, ) -> (FileManager, CompilationResult) { - let (mut context, crate_id) = - prepare_package(package, Box::new(|path| std::fs::read_to_string(path))); + let (mut context, crate_id) = prepare_package(package); let program_artifact_path = workspace.package_build_path(package); let mut debug_artifact_path = program_artifact_path.clone(); @@ -98,8 +97,7 @@ fn compile_contract( np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, ) -> (FileManager, CompilationResult) { - let (mut context, crate_id) = - prepare_package(package, Box::new(|path| std::fs::read_to_string(path))); + let (mut context, crate_id) = prepare_package(package); let (contract, warnings) = match noirc_driver::compile_contract(&mut context, crate_id, compile_options) { Ok(contracts_and_warnings) => contracts_and_warnings, diff --git a/tooling/nargo_cli/src/cli/check_cmd.rs b/tooling/nargo_cli/src/cli/check_cmd.rs index 20e51fe1b52..0ea8186a237 100644 --- a/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/tooling/nargo_cli/src/cli/check_cmd.rs @@ -55,8 +55,7 @@ pub(crate) fn run( } fn check_package(package: &Package, compile_options: &CompileOptions) -> Result<(), CompileError> { - let (mut context, crate_id) = - prepare_package(package, Box::new(|path| std::fs::read_to_string(path))); + let (mut context, crate_id) = prepare_package(package); check_crate_and_report_errors( &mut context, crate_id, diff --git a/tooling/nargo_cli/src/cli/compile_cmd.rs b/tooling/nargo_cli/src/cli/compile_cmd.rs index 3e4f868aecf..043c0841958 100644 --- a/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -175,8 +175,7 @@ fn compile_program( np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, ) -> (FileManager, CompilationResult) { - let (mut context, crate_id) = - prepare_package(package, Box::new(|path| std::fs::read_to_string(path))); + let (mut context, crate_id) = prepare_package(package); let program_artifact_path = workspace.package_build_path(package); let mut debug_artifact_path = program_artifact_path.clone(); @@ -228,8 +227,7 @@ fn compile_contract( np_language: Language, is_opcode_supported: &impl Fn(&Opcode) -> bool, ) -> (FileManager, CompilationResult) { - let (mut context, crate_id) = - prepare_package(package, Box::new(|path| std::fs::read_to_string(path))); + let (mut context, crate_id) = prepare_package(package); let (contract, warnings) = match noirc_driver::compile_contract(&mut context, crate_id, compile_options) { Ok(contracts_and_warnings) => contracts_and_warnings, diff --git a/tooling/nargo_cli/src/cli/fmt_cmd.rs b/tooling/nargo_cli/src/cli/fmt_cmd.rs index ec3d373a483..0c2ca71eba3 100644 --- a/tooling/nargo_cli/src/cli/fmt_cmd.rs +++ b/tooling/nargo_cli/src/cli/fmt_cmd.rs @@ -2,6 +2,7 @@ use std::{fs::DirEntry, path::Path}; use clap::Args; use fm::FileManager; +use nargo::insert_all_files_for_package_into_file_manager; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::NOIR_ARTIFACT_VERSION_STRING; use noirc_errors::CustomDiagnostic; @@ -35,11 +36,11 @@ pub(crate) fn run(args: FormatCommand, config: NargoConfig) -> Result<(), CliErr let mut check_exit_code_one = false; for package in &workspace { - let mut file_manager = - FileManager::new(&package.root_dir, Box::new(|path| std::fs::read_to_string(path))); + let mut file_manager = FileManager::new(&package.root_dir); + insert_all_files_for_package_into_file_manager(package, &mut file_manager); visit_noir_files(&package.root_dir.join("src"), &mut |entry| { - let file_id = file_manager.add_file(&entry.path()).expect("file exists"); + let file_id = file_manager.name_to_id(entry.path().to_path_buf()).expect("The file should exist since we added all files in the package into the file manager"); let (parsed_module, errors) = parse_file(&file_manager, file_id); let is_all_warnings = errors.iter().all(ParserError::is_warning); diff --git a/tooling/nargo_cli/src/cli/test_cmd.rs b/tooling/nargo_cli/src/cli/test_cmd.rs index 7f7ae67d946..43cfecd17e9 100644 --- a/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/tooling/nargo_cli/src/cli/test_cmd.rs @@ -86,8 +86,7 @@ fn run_tests( show_output: bool, compile_options: &CompileOptions, ) -> Result<(), CliError> { - let (mut context, crate_id) = - prepare_package(package, Box::new(|path| std::fs::read_to_string(path))); + let (mut context, crate_id) = prepare_package(package); check_crate_and_report_errors( &mut context, crate_id,