From eb8e9087460fd850ca1395f53987415e4909b4be Mon Sep 17 00:00:00 2001 From: FrozenPandaz Date: Thu, 6 Jul 2023 15:18:24 -0400 Subject: [PATCH] fix(core): parse project configs only in js --- Cargo.lock | 43 --------- packages/nx/Cargo.toml | 3 - packages/nx/src/native/utils/path.rs | 4 + .../src/native/workspace/get_config_files.rs | 47 ++++------ .../workspace/get_nx_workspace_files.rs | 90 +++++-------------- packages/nx/src/native/workspace/types.rs | 7 -- .../utils/retrieve-workspace-files.ts | 25 ++++-- 7 files changed, 65 insertions(+), 154 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 03fbb212db6cc9..b50c6e97e05e37 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -844,15 +844,6 @@ version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" -[[package]] -name = "jsonc-parser" -version = "0.21.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b56a20e76235284255a09fcd1f45cf55d3c524ea657ebd3854735925c57743d" -dependencies = [ - "serde_json", -] - [[package]] name = "kqueue" version = "1.0.7" @@ -1171,13 +1162,10 @@ dependencies = [ "ignore", "ignore-files", "itertools", - "jsonc-parser", "napi", "napi-build", "napi-derive", "rayon", - "serde", - "serde_json", "thiserror", "tokio", "tracing", @@ -1447,12 +1435,6 @@ dependencies = [ "windows-sys 0.45.0", ] -[[package]] -name = "ryu" -version = "1.0.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" - [[package]] name = "same-file" version = "1.0.6" @@ -1479,31 +1461,6 @@ name = "serde" version = "1.0.152" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.152" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.107", -] - -[[package]] -name = "serde_json" -version = "1.0.96" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" -dependencies = [ - "itoa", - "ryu", - "serde", -] [[package]] name = "sha1_smol" diff --git a/packages/nx/Cargo.toml b/packages/nx/Cargo.toml index 37d4445d2e9127..2443f8823382cd 100644 --- a/packages/nx/Cargo.toml +++ b/packages/nx/Cargo.toml @@ -13,12 +13,9 @@ hashbrown = { version = "0.14.0", features = ["rayon"] } ignore = '0.4' ignore-files = "1.3.0" itertools = "0.10.5" -jsonc-parser = { version = "0.21.1", features = ["serde"] } napi = { version = '2.12.6', default-features = false, features = ['anyhow', 'napi4', 'tokio_rt'] } napi-derive = '2.9.3' rayon = "1.7.0" -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0" thiserror = "1.0.40" tokio = { version = "1.28.2", features = ["fs"] } tracing = "0.1.37" diff --git a/packages/nx/src/native/utils/path.rs b/packages/nx/src/native/utils/path.rs index eb9edc0e4eea23..15fc00727f596b 100644 --- a/packages/nx/src/native/utils/path.rs +++ b/packages/nx/src/native/utils/path.rs @@ -20,6 +20,10 @@ fn normalize_path

(path: P) -> String where P: AsRef, { + if path.as_ref() == Path::new("") { + return ".".into(); + } + // convert back-slashes in Windows paths, since the js expects only forward-slash path separators if cfg!(windows) { path.as_ref().display().to_string().replace('\\', "/") diff --git a/packages/nx/src/native/workspace/get_config_files.rs b/packages/nx/src/native/workspace/get_config_files.rs index 0050611acf8f67..fc202f1cafd708 100644 --- a/packages/nx/src/native/workspace/get_config_files.rs +++ b/packages/nx/src/native/workspace/get_config_files.rs @@ -2,6 +2,7 @@ use crate::native::utils::glob::build_glob_set; use crate::native::utils::path::Normalize; use crate::native::walker::nx_walker; use globset::GlobSet; + use std::collections::hash_map::Entry; use std::collections::HashMap; use std::path::{Path, PathBuf}; @@ -11,20 +12,20 @@ use std::path::{Path, PathBuf}; pub fn get_config_files(workspace_root: String, globs: Vec) -> anyhow::Result> { let globs = build_glob_set(globs)?; Ok(nx_walker(workspace_root, move |rec| { - let mut config_paths: HashMap)> = HashMap::new(); - for (path, content) in rec { - insert_config_file_into_map((path, content), &mut config_paths, &globs); + let mut config_paths: HashMap = HashMap::new(); + for (path, _) in rec { + insert_config_file_into_map(path, &mut config_paths, &globs); } config_paths - .into_iter() - .map(|(_, (val, _))| val.to_normalized_string()) + .into_values() + .map(|path| path.to_normalized_string()) .collect() })) } pub fn insert_config_file_into_map( - (path, content): (PathBuf, Vec), - config_paths: &mut HashMap)>, + path: PathBuf, + config_paths: &mut HashMap, globs: &GlobSet, ) { if globs.is_match(&path) { @@ -34,25 +35,24 @@ pub fn insert_config_file_into_map( .file_name() .expect("Config paths always have file names"); if file_name == "project.json" { - config_paths.insert(parent, (path, content)); + config_paths.insert(parent, path); } else if file_name == "package.json" { match config_paths.entry(parent) { Entry::Occupied(mut o) => { if o.get() - .0 .file_name() .expect("Config paths always have file names") != "project.json" { - o.insert((path, content)); + o.insert(path); } } Entry::Vacant(v) => { - v.insert((path, content)); + v.insert(path); } } } else { - config_paths.entry(parent).or_insert((path, content)); + config_paths.entry(parent).or_insert(path); } } } @@ -65,34 +65,23 @@ mod test { #[test] fn should_insert_config_files_properly() { - let mut config_paths: HashMap)> = HashMap::new(); + let mut config_paths: HashMap = HashMap::new(); let globs = build_glob_set(vec!["**/*".into()]).unwrap(); + insert_config_file_into_map(PathBuf::from("project.json"), &mut config_paths, &globs); + insert_config_file_into_map(PathBuf::from("package.json"), &mut config_paths, &globs); insert_config_file_into_map( - (PathBuf::from("project.json"), vec![]), - &mut config_paths, - &globs, - ); - insert_config_file_into_map( - (PathBuf::from("package.json"), vec![]), - &mut config_paths, - &globs, - ); - insert_config_file_into_map( - (PathBuf::from("lib1/project.json"), vec![]), + PathBuf::from("lib1/project.json"), &mut config_paths, &globs, ); insert_config_file_into_map( - (PathBuf::from("lib2/package.json"), vec![]), + PathBuf::from("lib2/package.json"), &mut config_paths, &globs, ); - let config_files: Vec = config_paths - .into_iter() - .map(|(_, (path, _))| path) - .collect(); + let config_files: Vec = config_paths.into_values().collect(); assert!(config_files.contains(&PathBuf::from("project.json"))); assert!(config_files.contains(&PathBuf::from("lib1/project.json"))); diff --git a/packages/nx/src/native/workspace/get_nx_workspace_files.rs b/packages/nx/src/native/workspace/get_nx_workspace_files.rs index b8c6e68b488906..58ebd7432111b7 100644 --- a/packages/nx/src/native/workspace/get_nx_workspace_files.rs +++ b/packages/nx/src/native/workspace/get_nx_workspace_files.rs @@ -1,5 +1,4 @@ -use jsonc_parser::ParseOptions; -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::path::{Path, PathBuf}; use rayon::prelude::*; @@ -13,7 +12,7 @@ use crate::native::utils::path::Normalize; use crate::native::walker::nx_walker; use crate::native::workspace::errors::{InternalWorkspaceErrors, WorkspaceErrors}; use crate::native::workspace::get_config_files::insert_config_file_into_map; -use crate::native::workspace::types::{FileLocation, ProjectConfiguration}; +use crate::native::workspace::types::FileLocation; #[napi(object)] pub struct NxWorkspaceFiles { @@ -35,9 +34,10 @@ pub fn get_workspace_files_native( let (projects, mut file_data) = get_file_data(&workspace_root, globs) .map_err(|err| napi::Error::new(WorkspaceErrors::Generic, err.to_string()))?; - let root_map = create_root_map(&projects)?; + dbg!(&projects); + let root_set = create_root_set(&projects)?; - trace!(?root_map); + dbg!(&root_set); // Files need to be sorted each time because when we do hashArray in the TaskHasher.js, the order of the files should be deterministic file_data.par_sort(); @@ -48,12 +48,15 @@ pub fn get_workspace_files_native( let file_path = Path::new(&file_data.file); let mut parent = file_path.parent().unwrap_or_else(|| Path::new("")); - while root_map.get(parent).is_none() && parent != Path::new("") { + while root_set.get(parent).is_none() && parent != Path::new("") { parent = parent.parent().unwrap_or_else(|| Path::new("")); } - match root_map.get(parent) { - Some(project_name) => (FileLocation::Project(project_name.clone()), file_data), + match root_set.get(parent) { + Some(project_root) => ( + FileLocation::Project(project_root.to_normalized_string()), + file_data, + ), None => (FileLocation::Global, file_data), } }) @@ -74,9 +77,9 @@ pub fn get_workspace_files_native( for (file_location, file_data) in file_locations { match file_location { FileLocation::Global => global_files.push(file_data), - FileLocation::Project(project_name) => match project_file_map.get_mut(&project_name) { + FileLocation::Project(project_root) => match project_file_map.get_mut(&project_root) { None => { - project_file_map.insert(project_name, vec![file_data]); + project_file_map.insert(project_root, vec![file_data]); } Some(project_files) => project_files.push(file_data), }, @@ -87,58 +90,31 @@ pub fn get_workspace_files_native( project_file_map, global_files, config_files: projects - .keys() + .iter() .map(|path| path.to_normalized_string()) .collect(), }) } -fn create_root_map( - projects: &HashMap>, -) -> Result, InternalWorkspaceErrors> { +fn create_root_set( + projects: &HashSet, +) -> Result, InternalWorkspaceErrors> { projects .par_iter() - .map(|(path, content)| { + .map(|path| { let file_name = path .file_name() .expect("path should always have a filename"); return if file_name == "project.json" || file_name == "package.json" { - // use serde_json to do the initial parse, if that fails fall back to jsonc_parser. - // If all those fail, expose the error from jsonc_parser - let project_configuration: ProjectConfiguration = - read_project_configuration(content, path)?; - let Some(parent_path) = path.parent() else { - return Err(InternalWorkspaceErrors::Generic { + return Err(InternalWorkspaceErrors::Generic { msg: format!("{path:?} has no parent"), }) }; - let name: String = if let Some(name) = project_configuration.name { - Ok(name) - } else { - parent_path - .file_name() - .unwrap_or_default() - .to_os_string() - .into_string() - .map_err(|os_string| InternalWorkspaceErrors::Generic { - msg: format!("Cannot turn {os_string:?} into String"), - }) - }?; - Ok((parent_path, name)) + Ok(parent_path) } else if let Some(parent_path) = path.parent() { - Ok(( - parent_path, - parent_path - .file_name() - .unwrap_or_default() - .to_os_string() - .into_string() - .map_err(|os_string| InternalWorkspaceErrors::Generic { - msg: format!("Cannot turn {os_string:?} into String"), - })?, - )) + Ok(parent_path) } else { Err(InternalWorkspaceErrors::Generic { msg: format!("{path:?} has no parent"), @@ -148,36 +124,18 @@ fn create_root_map( .collect() } -fn read_project_configuration( - content: &[u8], - path: &Path, -) -> Result { - serde_json::from_slice(content).or_else(|_| { - let content_str = std::str::from_utf8(content).expect("content should be valid utf8"); - let parser_value = - jsonc_parser::parse_to_serde_value(content_str, &ParseOptions::default()).map_err( - |_| InternalWorkspaceErrors::ParseError { - file: PathBuf::from(path), - }, - )?; - serde_json::from_value(parser_value.into()).map_err(|_| InternalWorkspaceErrors::Generic { - msg: format!("Failed to parse {path:?}"), - }) - }) -} - -type WorkspaceData = (HashMap>, Vec); +type WorkspaceData = (HashSet, Vec); fn get_file_data(workspace_root: &str, globs: Vec) -> anyhow::Result { let globs = build_glob_set(globs)?; let (projects, file_data) = nx_walker(workspace_root, move |rec| { - let mut projects: HashMap)> = HashMap::new(); + let mut projects: HashMap = HashMap::new(); let mut file_hashes: Vec = vec![]; for (path, content) in rec { file_hashes.push(FileData { file: path.to_normalized_string(), hash: xxh3::xxh3_64(&content).to_string(), }); - insert_config_file_into_map((path, content), &mut projects, &globs) + insert_config_file_into_map(path, &mut projects, &globs) } (projects, file_hashes) }); diff --git a/packages/nx/src/native/workspace/types.rs b/packages/nx/src/native/workspace/types.rs index 83318cfac7df2c..dc460dabcec666 100644 --- a/packages/nx/src/native/workspace/types.rs +++ b/packages/nx/src/native/workspace/types.rs @@ -1,10 +1,3 @@ -use serde::Deserialize; - -#[derive(Debug, Deserialize)] -pub(crate) struct ProjectConfiguration { - pub name: Option, -} - #[derive(Debug, Eq, PartialEq)] pub enum FileLocation { Global, diff --git a/packages/nx/src/project-graph/utils/retrieve-workspace-files.ts b/packages/nx/src/project-graph/utils/retrieve-workspace-files.ts index 8e80439cdba1c6..48a2b5d0b7b22f 100644 --- a/packages/nx/src/project-graph/utils/retrieve-workspace-files.ts +++ b/packages/nx/src/project-graph/utils/retrieve-workspace-files.ts @@ -17,6 +17,7 @@ import { import { NxJsonConfiguration } from '../../config/nx-json'; import { FileData, ProjectFileMap } from '../../config/project-graph'; import { NxWorkspaceFiles, WorkspaceErrors } from '../../native'; +import { createProjectRootMappingsFromProjectConfigurations } from './find-project-for-path'; /** * Walks the workspace directory to create the `projectFileMap`, `ProjectConfigurations` and `allWorkspaceFiles` @@ -60,17 +61,29 @@ export async function retrieveWorkspaceFiles( 'get-workspace-files:end' ); + const projectConfigurations = createProjectConfigurations( + workspaceRoot, + nxJson, + workspaceFiles.configFiles + ); + + const map = createProjectRootMappingsFromProjectConfigurations( + projectConfigurations.projects + ); + + const projectNameToFilesMap: ProjectFileMap = {}; + for (const projectRoot in workspaceFiles.projectFileMap) { + projectNameToFilesMap[map.get(projectRoot)] = + workspaceFiles.projectFileMap[projectRoot]; + } + return { allWorkspaceFiles: buildAllWorkspaceFiles( workspaceFiles.projectFileMap, workspaceFiles.globalFiles ), - projectFileMap: workspaceFiles.projectFileMap, - projectConfigurations: createProjectConfigurations( - workspaceRoot, - nxJson, - workspaceFiles.configFiles - ), + projectFileMap: projectNameToFilesMap, + projectConfigurations, }; }