From 2e1ec6816d824507c728b264fbe78fd8b7f6b51e Mon Sep 17 00:00:00 2001 From: FrozenPandaz Date: Tue, 4 Jul 2023 20:43:56 -0400 Subject: [PATCH] chore(core): wip --- Cargo.lock | 63 +- packages/nx/Cargo.toml | 9 +- packages/nx/src/native/index.d.ts | 6 +- packages/nx/src/native/logger/mod.rs | 2 +- packages/nx/src/native/ts_import_locators.rs | 1012 +++++++++++++++-- .../workspace/get_nx_workspace_files.rs | 6 +- packages/nx/src/plugins/js/index.ts | 8 + .../build-dependencies/build-dependencies.ts | 1 + .../explicit-project-dependencies.ts | 151 ++- 9 files changed, 1091 insertions(+), 167 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 70e5b51b804997..4dd7491663b52c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -12,17 +12,6 @@ dependencies = [ "regex", ] -[[package]] -name = "ahash" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" -dependencies = [ - "getrandom", - "once_cell", - "version_check", -] - [[package]] name = "ahash" version = "0.8.3" @@ -30,6 +19,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" dependencies = [ "cfg-if", + "getrandom", "once_cell", "version_check", ] @@ -153,9 +143,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.2.1" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24a6904aef64d73cf10ab17ebace7befb918b82164785cb89907993be7f83813" +checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" [[package]] name = "bstr" @@ -576,7 +566,7 @@ version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "786861e84a5793ad5f863d846de5eb064cd23b87e61ad708c8c402608202e7be" dependencies = [ - "bitflags 2.2.1", + "bitflags 2.3.3", "bstr", "gix-path", "libc", @@ -622,7 +612,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c07c98204529ac3f24b34754540a852593d2a4c7349008df389240266627a72a" dependencies = [ - "bitflags 2.2.1", + "bitflags 2.3.3", "bstr", "gix-features", "gix-path", @@ -706,7 +696,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "794520043d5a024dfeac335c6e520cb616f6963e30dab995892382e998c12897" dependencies = [ - "bitflags 2.2.1", + "bitflags 2.3.3", "gix-path", "libc", "windows", @@ -774,7 +764,7 @@ version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" dependencies = [ - "ahash 0.8.3", + "ahash", "allocator-api2", "rayon", ] @@ -1161,7 +1151,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49ac8112fe5998579b22e29903c7b277fc7f91c7860c0236f35792caf8156e18" dependencies = [ "anyhow", - "bitflags 2.2.1", + "bitflags 2.3.3", "ctor", "napi-derive", "napi-sys", @@ -1356,6 +1346,7 @@ dependencies = [ "rayon", "swc_common", "swc_ecma_ast", + "swc_ecma_dep_graph", "swc_ecma_parser", "swc_ecma_visit", "thiserror", @@ -1372,9 +1363,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.17.1" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" [[package]] name = "overload" @@ -1888,11 +1879,11 @@ dependencies = [ [[package]] name = "swc_common" -version = "0.31.13" +version = "0.31.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe738c06147072c476d7e12ee0e99fd947549909c7c0404abd78d95433f75a67" +checksum = "c6414bd4e553f5638961d39b07075ffd37a3d63176829592f4a5900260d94ca1" dependencies = [ - "ahash 0.7.6", + "ahash", "ast_node", "better_scoped_tls", "cfg-if", @@ -1915,11 +1906,11 @@ dependencies = [ [[package]] name = "swc_ecma_ast" -version = "0.106.3" +version = "0.107.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad6c7a25a43568de41e9a4c111b7015b78bb269414395574a2604853748ca71c" +checksum = "5693558188efdd5b664e517b69ba8056a7f64c214ca8cd034e3ae8314566b866" dependencies = [ - "bitflags 2.2.1", + "bitflags 2.3.3", "is-macro", "num-bigint", "scoped-tls", @@ -1929,11 +1920,23 @@ dependencies = [ "unicode-id", ] +[[package]] +name = "swc_ecma_dep_graph" +version = "0.109.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4535c2a8210bf69f78201f93e6dc741a81046d1c04479e41d26849155752d184" +dependencies = [ + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "swc_ecma_visit", +] + [[package]] name = "swc_ecma_parser" -version = "0.136.4" +version = "0.137.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634d401703f48f2cbb6045d45a6874f72fa5b3fb8f7da4981338764220b33f7b" +checksum = "8ea1915adb15d9ca1695e76d41524beb4806e9b603280edb7eedbaebe706a41c" dependencies = [ "either", "lexical", @@ -1951,9 +1954,9 @@ dependencies = [ [[package]] name = "swc_ecma_visit" -version = "0.92.2" +version = "0.93.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20fa5bf3383dbf864d85e4f8e3bc574b1b1bca9f0ca0bb11da3d6add717484be" +checksum = "82bb87ee3345a7049efcbedc419f121933e0e3967457922848d0026fb3b79dac" dependencies = [ "num-bigint", "swc_atoms", diff --git a/packages/nx/Cargo.toml b/packages/nx/Cargo.toml index b0f31b027e0379..e560bbcf2e2076 100644 --- a/packages/nx/Cargo.toml +++ b/packages/nx/Cargo.toml @@ -27,10 +27,11 @@ watchexec-filterer-ignore = "1.2.1" watchexec-signals = "1.0.0" xxhash-rust = { version = '0.8.5', features = ['xxh3', 'xxh64'] } -swc_common = "0.31.12" -swc_ecma_parser = { version = "0.136.0", features = ["typescript"] } -swc_ecma_visit = "0.92.0" -swc_ecma_ast = "0.106.0" +swc_common = "0.31.16" +swc_ecma_parser = { version = "0.137.1", features = ["typescript"] } +swc_ecma_visit = "0.93.0" +swc_ecma_ast = "0.107.0" +swc_ecma_dep_graph = "0.109.1" [lib] crate-type = ['cdylib'] diff --git a/packages/nx/src/native/index.d.ts b/packages/nx/src/native/index.d.ts index e914bacfd9d902..48706473191044 100644 --- a/packages/nx/src/native/index.d.ts +++ b/packages/nx/src/native/index.d.ts @@ -44,7 +44,7 @@ export interface NxWorkspaceFiles { projectConfigurations: Record } export function getWorkspaceFilesNative(workspaceRoot: string, globs: Array, parseConfigurations: (arg0: Array) => Record): NxWorkspaceFiles -export function findImports(filePaths: Array, callback: (obj: null, result: {file: string, importExpr: string}) => void): void +export function findImports(projectFileMap: Record>): Array export class Watcher { origin: string /** @@ -57,5 +57,7 @@ export class Watcher { } export class ImportResult { file: string - importExpr: string + sourceProject: string + dynamicImportExpressions: Array + staticImportExpressions: Array } diff --git a/packages/nx/src/native/logger/mod.rs b/packages/nx/src/native/logger/mod.rs index a22fa7838744c2..d509f71aaf0c4d 100644 --- a/packages/nx/src/native/logger/mod.rs +++ b/packages/nx/src/native/logger/mod.rs @@ -63,7 +63,7 @@ where pub(crate) fn enable_logger() { let env_filter = - EnvFilter::try_from_env("NX_NATIVE_LOGGING").unwrap_or_else(|_| EnvFilter::new("INFO")); + EnvFilter::try_from_env("NX_NATIVE_LOGGING").unwrap_or_else(|_| EnvFilter::new("ERROR")); _ = tracing_subscriber::fmt() .with_env_filter(env_filter) .event_format(NxLogFormatter) diff --git a/packages/nx/src/native/ts_import_locators.rs b/packages/nx/src/native/ts_import_locators.rs index 17e0b7ea52b9e2..dd507775683a73 100644 --- a/packages/nx/src/native/ts_import_locators.rs +++ b/packages/nx/src/native/ts_import_locators.rs @@ -1,119 +1,947 @@ -use std::fs::read_to_string; -use std::io::{Error, ErrorKind}; +use std::collections::HashMap; +use std::path::Path; +use std::rc::Rc; +use std::sync::Arc; use rayon::prelude::*; -use napi::bindgen_prelude::*; -use napi::JsFunction; -use napi::threadsafe_function::{ - ThreadSafeCallContext, - ThreadsafeFunction, - ThreadsafeFunctionCallMode, -}; -use swc_common::{ - FileName, - SourceMap, - sync::Lrc, -}; -use swc_ecma_ast::{CallExpr, Expr, ImportDecl, Lit, Str}; -use swc_ecma_parser::{ - lexer::Lexer, - Parser, - StringInput, - Syntax, - TsConfig, -}; -use swc_ecma_visit::{Visit, VisitWith}; +use tracing::debug; + +use swc_common::{BytePos, SourceFile, SourceMap, Spanned}; +use swc_ecma_ast::EsVersion::EsNext; +use swc_ecma_parser::lexer::Lexer; +use swc_ecma_parser::token::Keyword::{Class, Export, Function, Import}; +use swc_ecma_parser::token::Word::{Ident, Keyword}; +use swc_ecma_parser::token::{BinOpToken, Token, TokenAndSpan}; +use swc_ecma_parser::{Syntax, Tokens, TsConfig}; + +use crate::native::logger::enable_logger; #[napi] +#[derive(Debug)] pub struct ImportResult { pub file: String, - pub import_expr: String, + pub source_project: String, + pub dynamic_import_expressions: Vec, + pub static_import_expressions: Vec, } -struct ImportVisitor<'a> { - pub file: String, - pub callback: &'a ThreadsafeFunction, +#[derive(Debug)] +enum ImportType { + Static, + Dynamic, +} +#[derive(Debug, Clone, Copy, PartialEq)] +enum BlockType { + Block, + Function, + Class, + Object, + ObjectType, + ArrowFunction, } -impl Visit for ImportVisitor<'_> { - fn visit_import_decl(&mut self, import_decl: &ImportDecl) { - self.callback.call(Ok(ImportResult { - file: self.file.to_owned(), - import_expr: import_decl.src.value.to_string(), - }), ThreadsafeFunctionCallMode::NonBlocking); - } +fn is_identifier(token: &Token) -> bool { + matches!(token, Token::Word(Ident(_))) +} + +fn process_file((source_project, file_path): (&String, &String)) -> Option { + let cm = Arc::::default() + .load_file(Path::new(file_path)) + .unwrap(); + + let tsx = file_path.ends_with(".tsx") || file_path.ends_with(".jsx"); + let mut lexer = Lexer::new( + Syntax::Typescript(TsConfig { + tsx, + decorators: false, + dts: file_path.ends_with(".d.ts"), + no_early_errors: false, + disallow_ambiguous_jsx_like: false, + }), + EsNext, + (&*cm).into(), + None, + ); + + let mut static_import_expressions: Vec = vec![]; + let mut dynamic_import_expressions: Vec = vec![]; + + // State + let mut open_brace_count: i128 = 0; + + let mut current_token: Option = None; + let mut last_token: Option; + let mut import_type: ImportType = ImportType::Dynamic; + + let mut blocks_stack: Vec = vec![]; + let mut next_block_type = BlockType::Block; + + 'outer: loop { + // Keep the current token as the last token before calling next + last_token = current_token; + + if let Some(t) = lexer.next() { + // Keep track of braces/ when blocks begin and end + match &t.token { + Token::DollarLBrace => { + open_brace_count += 1; + } + Token::LBrace => { + open_brace_count += 1; + + // A new block has opened so push the new block type + blocks_stack.push(next_block_type); + } + Token::RBrace => { + open_brace_count -= 1; + + // Reset the next block type + next_block_type = BlockType::Block; + + // The block has closed so remove it from the block stack + blocks_stack.pop(); + } + _ => {} + } + + // Keeps the current token so it can be kept as the last token later + current_token = Some(t); + } else { + // This is the end of the file, break out of the loop + break; + } + + // Keep track of when we are in an object declaration because colons mean different things + let in_object_declaration = blocks_stack.contains(&BlockType::Object); + + if let Some(current) = ¤t_token { + let new_line = lexer.had_line_break_before_last(); + + // This is the beginning of a new statement, reset the import type to the default + // Reset import type when there is new line not in braces + if new_line && open_brace_count == 0 { + import_type = ImportType::Dynamic; + } + match ¤t.token { + Token::Word(word) => match word { + // Matches something like const a = a as import('a') + // This is a static type import + Ident(i) if i == "as" => { + import_type = ImportType::Static; + } + // Matches something like export const = import('a') + // This is a dynamic import + Keyword(keyword) if *keyword == Export => { + import_type = ImportType::Dynamic; + } + + // If a function keyword appears, the next open brace will start a function block + Keyword(keyword) if *keyword == Function => { + next_block_type = BlockType::Function; + } + // If a class keyword appears, the next open brace will start a class block + Keyword(keyword) if *keyword == Class => { + next_block_type = BlockType::Class; + } + _ => {} + }, + Token::AssignOp(_) => { + // When things are assigned, they are dynamic imports + // Ex: const a = import('a'); + import_type = ImportType::Dynamic; - fn visit_call_expr(&mut self, call_expr: &CallExpr) { - if call_expr.callee.is_import() { - if let Some(arg) = call_expr.args.get(0) { - if let Expr::Lit(lit) = &*arg.expr { - if let Lit::Str(Str { value: sym, raw: _, span: _ }) = lit { - self.callback.call(Ok(ImportResult { - file: self.file.to_owned(), - import_expr: sym.to_string(), - }), ThreadsafeFunctionCallMode::NonBlocking); + // When assigning things, an open brace means an object + next_block_type = BlockType::Object + } + // When we see a (, the next brace is an object passed into a function + // Matches console.log({ a: import('a') }); + Token::LParen => { + if let Some(t) = &last_token { + match t.token { + _ if is_identifier(&t.token) => { + // Function Call + next_block_type = BlockType::Object; + } + _ => { + // Arrow Function Declaration + next_block_type = BlockType::ArrowFunction; + } + } } } + Token::BinOp(op) => match op { + BinOpToken::Lt => { + // Matches things like Foo + // This is a static import + if let Some(t) = &last_token { + match t.token { + _ if is_identifier(&t.token) => { + // Generic + import_type = ImportType::Static; + } + _ => {} + } + } + } + // When there are binary operations, it is dynamic + // Matches things like const a = 3 + (await import('a')) + _ => { + import_type = ImportType::Dynamic; + } + }, + // When there is a string literal, ${ begins a dynamic expression + Token::DollarLBrace => { + import_type = ImportType::Dynamic; + } + // When functions and methods begin, this starts a dynamic block + Token::LBrace => { + import_type = ImportType::Dynamic; + } + // When we see a ; A new dynamic statement begins + Token::Semi => { + import_type = ImportType::Dynamic; + } + Token::Colon => { + if let Some(t) = &last_token { + match t.token { + // Matches { 'a': import('a') } + Token::Str { .. } if in_object_declaration => { + // Object Property Assignment + import_type = ImportType::Dynamic + } + // Matches { [a]: import('a') } + Token::RBracket if in_object_declaration => { + // Object Property Assignment + import_type = ImportType::Dynamic + } + // Object Property Assignment + // Matches { a: import('a') } + _ if is_identifier(&t.token) && in_object_declaration => { + import_type = ImportType::Dynamic + } + // Matches const a: typeof import('a') + _ => { + // A brace would begin an object type + // Ex: const a: { a: typeof import('a') } + next_block_type = BlockType::ObjectType; + // This is a typing and is static + import_type = ImportType::Static; + } + } + } + } + _ => {} } + + let mut add_import = |import: String| match &import_type { + ImportType::Static => { + static_import_expressions.push(import); + } + ImportType::Dynamic => { + dynamic_import_expressions.push(import); + } + }; + + let word = match ¤t.token { + Token::Word(w) => w, + _ => { + continue; + } + }; + match word { + // This is an import keyword + Keyword(keyword) if *keyword == Import => { + if is_code_ignored(&cm, current.span.lo) { + continue; + } + + if let Some(next) = lexer.next() { + // This match is pretty strict on what should follow an import, anything else is skipped + match next.token { + // This begins a module naming + // Ex: import { a } from 'a'; + Token::LBrace => {} + // This indicates a import function call + // Ex: import('a') + Token::LParen => { + let mut maybe_literal = None; + for current in lexer.by_ref() { + match current.token { + // If we match a string, then it might be a literal import + Token::Str { value, .. } => { + maybe_literal = Some(value.to_string()); + } + Token::RParen => { + // When the function call is closed, add the import if it exists + if let Some(maybe_literal) = maybe_literal { + add_import(maybe_literal); + continue 'outer; + } + } + // If we match anything else, continue the outer loop and skip this import + // because it is not a literal import + _ => { + continue 'outer; + } + } + } + } + // This is a import star statement + // Ex: import * from 'a'; + Token::BinOp(op) if op == BinOpToken::Mul => {} + Token::Word(word) => match word { + // This is a import type statement + // Ex: import type { } from 'a'; + Ident(i) if &i == "type" => { + if let Some(next) = lexer.next() { + // What follows a type import is pretty strict, otherwise ignore it + match next.token { + // Matches import type {} from 'a'; + Token::LBrace => {} + // Matches import type * from 'a'; + Token::BinOp(op) if op == BinOpToken::Mul => {} + // Matches import type Cat from 'a'; + Token::Word(word) if matches!(word, Ident(_)) => {} + _ => { + continue; + } + } + } + } + _ => {} + }, + // Matches: import 'a'; + Token::Str { value, .. } => { + static_import_expressions.push(value.to_string()); + continue; + } + _ => { + continue; + } + } + } + + // This is a static import because it is not a import function call + // import { } from 'a'; + for current in lexer.by_ref() { + if let Token::Str { value, .. } = current.token { + static_import_expressions.push(value.to_string()); + break; + } + } + } + Keyword(keyword) if *keyword == Export => { + if is_code_ignored(&cm, current.span.lo) { + continue; + } + if let Some(next) = lexer.next() { + // This match is pretty strict about what follows an export keyword + // Everything else is skipped + match next.token { + // Matches export { } from 'a'; + Token::LBrace => {} + Token::Word(word) => match word { + // Matches an export type + Ident(i) if &i == "type" => { + if let Some(next) = lexer.next() { + // What follows is pretty strict + match next.token { + // Matches export type { a } from 'a'; + Token::LBrace => {} + // Anything else after a type is a definition, not an import + // Matches export type = 'a'; + _ => { + continue; + } + } + } + } + _ => { + continue; + } + }, + // Matches export * from 'a'; + Token::BinOp(op) if op == BinOpToken::Mul => {} + _ => { + continue; + } + } + } + for current in lexer.by_ref() { + // When we find a string, it's a export + if let Token::Str { value, .. } = current.token { + static_import_expressions.push(value.to_string()); + break; + } + } + } + Ident(ident) if ident == "require" => { + if is_code_ignored(&cm, current.span.lo) { + continue; + } + let mut import = None; + for current in lexer.by_ref() { + match current.token { + // This opens the require call + Token::LParen => {} + // This could be a string literal + Token::Str { value, .. } => { + import = Some(value.to_string()); + } + // Matches things like require.resolve + Token::Dot => {} + Token::Word(Ident(_)) => {} + + // When the require call ends, add the require + Token::RParen => { + if let Some(import) = import { + // When all blocks are object blocks, this is a static require + // Matches things like const a = { a: require('a') }; + let static_import = blocks_stack + .iter() + .all(|block_type| matches!(block_type, BlockType::Object)); + if static_import { + static_import_expressions.push(import); + } else { + dynamic_import_expressions.push(import); + } + } + break; + } + // Anything else means this is not a require of a string literal + _ => { + break; + } + } + } + } + _ => {} + }; } } -} -fn process_file(file_path: String, callback: &mut ThreadsafeFunction) -> () { - let file = file_path.clone(); - let tsx = file_path.ends_with(".tsx") || file_path.ends_with(".jsx"); - let content = read_to_string(&file_path); - - match content { - Ok(source) => { - let cm: Lrc = Default::default(); - let fm = cm.new_source_file(FileName::Real(file_path.into()), source.clone()); - let lexer = Lexer::new( - Syntax::Typescript(TsConfig { - tsx, - ..TsConfig::default() - }), - Default::default(), - StringInput::from(&*fm), - None, + // These are errors from the lexer. They don't always mean something is broken + let mut errs = lexer.take_errors(); + if !errs.is_empty() { + for err in errs.iter() { + debug!( + "{}:{}:{} {}", + file_path, + cm.lookup_line(err.span_hi()).unwrap() + 1, + (err.span_lo() - cm.line_begin_pos(err.span_lo())).0, + err.kind().msg(), ); - let mut parser = Parser::new_from(lexer); - let mut visitor = ImportVisitor { file, callback }; - let module = parser.parse_module() - .map_err(|_| Error::new(ErrorKind::Other, "Failed to parse file")); + } + errs.clear(); + } - match module { - Ok(module) => { - module.visit_with(&mut visitor); - } - Err(_) => {} - } + Some(ImportResult { + file: file_path.clone(), + source_project: source_project.clone(), + static_import_expressions, + dynamic_import_expressions, + }) +} + +fn is_code_ignored(cm: &Rc, pos: BytePos) -> bool { + let line_with_dep = cm.lookup_line(pos).expect("The dep is on a line"); + + if line_with_dep == 0 { + return false; + } + + if let Some(line_before_dep) = cm.get_line(line_with_dep - 1) { + let trimmed_line = line_before_dep.trim(); + if trimmed_line == "// nx-ignore-next-line" || trimmed_line == "/* nx-ignore-next-line */" { + return true; } - Err(_) => {} } + false } #[napi] -fn find_imports( - file_paths: Vec, - // TODO: Figure out why the first arg is being passed as null. The result should be the first arg. - #[napi(ts_arg_type = "(obj: null, result: {file: string, importExpr: string}) => void")] - callback: JsFunction, -) -> Result<()> { - let thread_safe_callback: ThreadsafeFunction = - callback.create_threadsafe_function( - 0, - |ctx: ThreadSafeCallContext| { - let data: ImportResult = ctx.value; - Ok(vec![data]) - }, - )?; - - file_paths +fn find_imports(project_file_map: HashMap>) -> Vec { + enable_logger(); + + let files_to_process: Vec<(&String, &String)> = project_file_map + .iter() + .flat_map(|(project_name, files)| files.iter().map(move |file| (project_name, file))) + .collect(); + + files_to_process .into_par_iter() - .for_each_with(thread_safe_callback, |callback, file_path| { - process_file(file_path, callback) - }); + .filter_map(process_file) + .collect() +} +#[cfg(test)] +mod test { + use super::*; + use assert_fs::prelude::*; + use assert_fs::TempDir; + use swc_common::comments::NoopComments; + + #[test] + fn test_find_imports() { + let temp_dir = TempDir::new().unwrap(); + temp_dir + .child("test.ts") + .write_str( + r#" + // nx-ignore-next-line + import * as bar from 'ignored'; + /* nx-ignore-next-line */ + import * as bar from 'ignored-by-block-comment'; + + // TODO: comment + + // import { a } from 'commented-import'; + + const a = "import { a } from 'string-import'" + + import 'static-import'; + import a from 'static-default-import-from'; + import { a } from 'static-import-from'; + import { a } from "static-import-from-with-double-quotes"; + import type { c } from 'static-type-import-from'; + import type * as a from 'static-type-import-all'; + import type a from 'static-type-import-default'; + + const a = { + import: a['import-function'] + }; - Ok(()) + const a: Foo + import('dynamic-import-after-line-without-semicolon'); + + const a: typeof import('static-typeof-import'); + const a = import('non-literal-' + 'import'); + const a: import('static-type-import') = import('dynamic-const-import') as import('static-as-type-import'); + + const a: Gen; + + const b = require('static-require'); + const b = require(`require-in-backticks`); + const b = require.resolve('static-require-dot-resolve'); + const b = require(`non-literal-${'require'}`); + + export * from 'static-export'; + export { a } from 'static-partial-export'; + export type { a } from 'static-type-export'; + export default import('dynamic-exported-import'); + export default require('static-default-export-of-a-require'); + export function a() { return 'function-export';} + export const a = 'const-export'; + export type a = 'type-export'; + module.exports = { + a: await import('dynamic-module-export-in-object'), + a: require('static-module-export-require-in-object'), + }; + exports = { + a: await import('dynamic-export-in-object'), + a: require('static-require-in-object'), + }; + + + + console.log(`${import('dynamic-import-in-string-template')}`); + console.log(`${`${import('dynamic-import-in-string-template-in-string-template')}`}`); + + function f(a: import('static-argument-type-import') = import('dynamic-default-argument-import')): import('static-return-type-import') { + require('dynamic-require'); + } + + class Animal { + a: import('static-class-property-type-import') = import('dynamic-class-property-value-import'); + a: { a: import('static-class-property-type-import-in-object') } = { a: import('dynamic-class-property-value-import-in-object') }; + eat(food: import('static-method-argument-type-import')) { + import('dynamic-method-import'); + require('dynamic-method-require'); + } + } + + const obj = { + [import('dynamic-obj-key-import')]: import('dynamic-obj-prop-import'), + [require('static-obj-key-require')]: require('static-obj-prop-require') + }; + + const obj = { + method: function(): import('static-import-in-object-method-return-type') { + import('dynamic-import-in-object-method'); + } + }; + + (a as import('static-import-as-type-cast')).a; + + const a: (a: import('static-function-param-type')) => import('static-function-return-type') = () => { + const a: import('static-import-in-arrow-function') = import('static-import-in-arrow-function'); + } + + (() => { + const a: import('static-import-in-iife') = import('dynamic-import-in-iife'); + })(); + + { + import('dynamic-import-in-block-closure'); + } + + console.log({ + a: import('dynamic-import-in-object-in-function-call') + }); + + const a = 3 + (await import('dynamic-import-in-binary-operation')); + + const arr = [require('static-require-in-arr')]; + + const e = require(name + 'non-literal-require'); + + const foo = import(name + 'non-literal-import2'); + + console.log(`import { c } from 'in-string-literal';`); + + "#, + ) + .unwrap(); + + temp_dir + .child("broken-file.ts") + .write_str( + r#" + impo { a } frm 'broken-import'; + import { a } from + export { } + import { a } from 'import-in-broken-file'; + "#, + ) + .unwrap(); + + let test_file_path = temp_dir.display().to_string() + "/test.ts"; + let broken_file_path = temp_dir.display().to_string() + "/broken-file.ts"; + + let results = find_imports(HashMap::from([( + String::from("a"), + vec![test_file_path.clone(), broken_file_path], + )])); + + let result = results.get(0).unwrap(); + let ast_results: ImportResult = find_imports_with_ast(test_file_path); + + assert_eq!( + result.static_import_expressions, + ast_results.static_import_expressions + ); + assert_eq!( + result.dynamic_import_expressions, + ast_results.dynamic_import_expressions + ); + let result_from_broken_file = results.get(1).unwrap(); + + assert_eq!( + result_from_broken_file.static_import_expressions, + vec![String::from("import-in-broken-file"),] + ); + } + + #[test] + fn test_find_imports_with_all_sorts_of_imports() { + let temp_dir = TempDir::new().unwrap(); + temp_dir + .child("test.ts") + .write_str( + r#" + import * as React from "react"; + import { Component } from "react"; + import { + Component + } from "react" + import { + Component + } from "react"; + + import "./app.scss"; + + function inside() { + import('./module.ts') + } + const a = 1; + export class App {} + "#, + ) + .unwrap(); + + let test_file_path = temp_dir.display().to_string() + "/test.ts"; + + let results = find_imports(HashMap::from([( + String::from("a"), + vec![test_file_path.clone()], + )])); + + let result = results.get(0).unwrap(); + let ast_results: ImportResult = find_imports_with_ast(test_file_path); + + assert_eq!( + result.static_import_expressions, + ast_results.static_import_expressions + ); + assert_eq!( + result.dynamic_import_expressions, + ast_results.dynamic_import_expressions + ); + } + + #[test] + fn test_find_imports_with_all_sorts_of_exports() { + let temp_dir = TempDir::new().unwrap(); + temp_dir + .child("test.ts") + .write_str( + r#" + export * from './module'; + export { + A + } from './a'; + + export { B } from './b'; + export type { B } from './b'; + + export { C as D } from './c'; + + const a = 1; + export class App {} + "#, + ) + .unwrap(); + + let test_file_path = temp_dir.display().to_string() + "/test.ts"; + + let results = find_imports(HashMap::from([( + String::from("a"), + vec![test_file_path.clone()], + )])); + + let result = results.get(0).unwrap(); + let ast_results: ImportResult = find_imports_with_ast(test_file_path); + + assert_eq!( + result.static_import_expressions, + ast_results.static_import_expressions + ); + assert_eq!( + result.dynamic_import_expressions, + ast_results.dynamic_import_expressions + ); + } + + #[test] + fn test_find_with_require_statements() { + let temp_dir = TempDir::new().unwrap(); + temp_dir + .child("test.ts") + .write_str( + r#" + require('./a'); + + require('./b'); + const c = require('./c'); + + const a = 1; + export class App {} + "#, + ) + .unwrap(); + + let test_file_path = temp_dir.display().to_string() + "/test.ts"; + + let results = find_imports(HashMap::from([( + String::from("a"), + vec![test_file_path.clone()], + )])); + + let result = results.get(0).unwrap(); + let ast_results: ImportResult = find_imports_with_ast(test_file_path); + + assert_eq!( + result.static_import_expressions, + ast_results.static_import_expressions + ); + assert_eq!( + result.dynamic_import_expressions, + ast_results.dynamic_import_expressions + ); + } + + #[test] + fn test_find_imports_should_ignore_lines_with_nx_ignore() { + let temp_dir = TempDir::new().unwrap(); + temp_dir + .child("test.ts") + .write_str( + r#" + + + // nx-ignore-next-line + import * as React from "react"; + + // nx-ignore-next-line + import "./app.scss"; + + // nx-ignore-next-line + import('./module.ts') + + // nx-ignore-next-line + export { B } from './b'; + + // nx-ignore-next-line + const b = require('./b'); + "#, + ) + .unwrap(); + + let test_file_path = temp_dir.display().to_string() + "/test.ts"; + + let results = find_imports(HashMap::from([(String::from("a"), vec![test_file_path])])); + + let result = results.get(0).unwrap(); + + assert!(result.static_import_expressions.is_empty()); + assert!(result.dynamic_import_expressions.is_empty()); + } + + #[test] + fn find_imports_should_find_imports_around_template_literals() { + let temp_dir = TempDir::new().unwrap(); + temp_dir + .child("test.ts") + .write_str( + r#" + + const b = `a: ${a}` + const c = await import('./c') + + const c = `a: ${a}, b: ${b}` + const d = await import('./d') + const b = unquotedLiteral.replace(/"/g, '\\"') + const c = await import('./c') + const d = require('./d') + const b = `"${unquotedLiteral.replace(/"/g, '\\"')}"` + const c = await import('./c') + const d = require('./d') + const b = `"${1 / 2} ${await import('./b')} ${await require('./c')}"`; + const a = `"${require('./a')}"` + const b = `"${await import('./b')}"` + "#, + ) + .unwrap(); + + let test_file_path = temp_dir.display().to_string() + "/test.ts"; + + let results = find_imports(HashMap::from([( + String::from("a"), + vec![test_file_path.clone()], + )])); + + let result = results.get(0).unwrap(); + let ast_results: ImportResult = find_imports_with_ast(test_file_path); + + dbg!(result); + assert_eq!( + result.static_import_expressions, + ast_results.static_import_expressions + ); + assert_eq!( + result.dynamic_import_expressions, + ast_results.dynamic_import_expressions + ); + } + + #[ignore = "TODO(@FrozenPandaz): Fix this once you know more about regexes in lexers"] + #[test] + fn find_imports_should_find_imports_after_regexp() { + let temp_dir = TempDir::new().unwrap(); + temp_dir + .child("test.ts") + .write_str( + r#" + + const b = /"/g; const c = await import('./c'); const d = require('./d') + "#, + ) + .unwrap(); + + let test_file_path = temp_dir.display().to_string() + "/test.ts"; + + let results = find_imports(HashMap::from([( + String::from("a"), + vec![test_file_path.clone()], + )])); + + let result = results.get(0).unwrap(); + let ast_results: ImportResult = find_imports_with_ast(test_file_path); + + dbg!(&result); + assert_eq!( + result.static_import_expressions, + ast_results.static_import_expressions + ); + assert_eq!( + result.dynamic_import_expressions, + ast_results.dynamic_import_expressions + ); + } + + // This function finds imports with the ast which verifies that the imports we find are the same as the ones typescript finds + fn find_imports_with_ast(file_path: String) -> ImportResult { + let cm = Arc::::default() + .load_file(Path::new(file_path.as_str())) + .unwrap(); + + let mut errs: Vec = vec![]; + let tsx = file_path.ends_with(".tsx") || file_path.ends_with(".jsx"); + + let module = swc_ecma_parser::parse_file_as_module( + &cm, + Syntax::Typescript(TsConfig { + tsx, + decorators: true, + dts: file_path.ends_with(".d.ts"), + no_early_errors: false, + ..TsConfig::default() + }), + EsNext, + None, + &mut errs, + ) + .unwrap(); + + let comments = NoopComments; + let deps = swc_ecma_dep_graph::analyze_dependencies(&module, &comments); + + let mut static_import_expressions = vec![]; + let mut dynamic_import_expressions = vec![]; + for dep in deps { + let line_with_dep = cm.lookup_line(dep.span.lo).expect("The dep is on a line"); + + if line_with_dep > 0 { + if let Some(line_before_dep) = cm.get_line(line_with_dep - 1) { + let trimmed_line = line_before_dep.trim(); + if trimmed_line == "// nx-ignore-next-line" + || trimmed_line == "/* nx-ignore-next-line */" + { + continue; + } + } + } + + if dep.is_dynamic { + dynamic_import_expressions.push(dep.specifier.to_string()); + } else { + static_import_expressions.push(dep.specifier.to_string()); + } + } + ImportResult { + source_project: String::from("source"), + file: file_path, + static_import_expressions, + dynamic_import_expressions, + } + } } diff --git a/packages/nx/src/native/workspace/get_nx_workspace_files.rs b/packages/nx/src/native/workspace/get_nx_workspace_files.rs index 68973833035add..af90b926ec34c1 100644 --- a/packages/nx/src/native/workspace/get_nx_workspace_files.rs +++ b/packages/nx/src/native/workspace/get_nx_workspace_files.rs @@ -1,6 +1,4 @@ -use itertools::Itertools; -use napi::threadsafe_function::{ErrorStrategy, ThreadsafeFunction, ThreadsafeFunctionCallMode}; -use napi::{JsFunction, JsObject, JsUnknown, Status}; +use napi::JsObject; use std::collections::{HashMap, HashSet}; use std::path::{Path, PathBuf}; @@ -13,7 +11,7 @@ use crate::native::types::FileData; use crate::native::utils::glob::build_glob_set; use crate::native::utils::path::Normalize; use crate::native::walker::nx_walker; -use crate::native::workspace::errors::{InternalWorkspaceErrors, WorkspaceErrors}; +use crate::native::workspace::errors::WorkspaceErrors; use crate::native::workspace::get_config_files::insert_config_file_into_map; use crate::native::workspace::types::FileLocation; diff --git a/packages/nx/src/plugins/js/index.ts b/packages/nx/src/plugins/js/index.ts index 7003b48e1e9531..bc2d78223af8ba 100644 --- a/packages/nx/src/plugins/js/index.ts +++ b/packages/nx/src/plugins/js/index.ts @@ -18,6 +18,7 @@ import { projectGraphCacheDirectory } from '../../utils/cache-directory'; import { readFileSync, writeFileSync } from 'fs'; import { workspaceRoot } from '../../utils/workspace-root'; import { ensureDirSync } from 'fs-extra'; +import { performance } from 'perf_hooks'; export const processProjectGraph: ProjectGraphProcessor = async ( graph, @@ -41,7 +42,14 @@ export const processProjectGraph: ProjectGraphProcessor = async ( } } + performance.mark('build typescript dependencies - start'); await buildExplicitDependencies(pluginConfig, context, builder); + performance.mark('build typescript dependencies - end'); + performance.measure( + 'build typescript dependencies', + 'build typescript dependencies - start', + 'build typescript dependencies - end' + ); return builder.getUpdatedProjectGraph(); }; diff --git a/packages/nx/src/plugins/js/project-graph/build-dependencies/build-dependencies.ts b/packages/nx/src/plugins/js/project-graph/build-dependencies/build-dependencies.ts index 65be83e3fa5b99..d13aa1ac1ab86f 100644 --- a/packages/nx/src/plugins/js/project-graph/build-dependencies/build-dependencies.ts +++ b/packages/nx/src/plugins/js/project-graph/build-dependencies/build-dependencies.ts @@ -23,6 +23,7 @@ export function buildExplicitDependencies( // to be able to use at least 2 workers (1 worker per CPU and // 1 CPU for the main thread) if ( + process.env.NX_SWC_IMPORT_LOCATOR === 'true' || jsPluginConfig.analyzeSourceFiles === false || totalNumOfFilesToProcess < 100 || getNumberOfWorkers() <= 2 diff --git a/packages/nx/src/plugins/js/project-graph/build-dependencies/explicit-project-dependencies.ts b/packages/nx/src/plugins/js/project-graph/build-dependencies/explicit-project-dependencies.ts index a8a36b78f49bca..0f0891a5f3a2fc 100644 --- a/packages/nx/src/plugins/js/project-graph/build-dependencies/explicit-project-dependencies.ts +++ b/packages/nx/src/plugins/js/project-graph/build-dependencies/explicit-project-dependencies.ts @@ -1,6 +1,5 @@ import { TypeScriptImportLocator } from './typescript-import-locator'; import { TargetProjectLocator } from './target-project-locator'; -import { findImports } from '../../../../native'; import { DependencyType, ProjectFileMap, @@ -19,11 +18,45 @@ export function buildExplicitTypeScriptDependencies( filesToProcess: ProjectFileMap ): ExplicitDependency[] { let results: ExplicitDependency[]; - if (process.env.NX_SWC_IMPORT_LOCATOR) { + if ( + process.env.NX_SWC_IMPORT_LOCATOR && + process.env.NX_SWC_IMPORT_LOCATOR !== 'false' + ) { results = buildExplicitTypeScriptDependenciesWithSwc(filesToProcess, graph); } else { results = buildExplicitTypeScriptDependenciesWithTs(filesToProcess, graph); } + if ( + process.env.NX_SWC_IMPORT_LOCATOR_DEBUG && + process.env.NX_SWC_IMPORT_LOCATOR_DEBUG !== 'false' + ) { + const tsResults = buildExplicitTypeScriptDependenciesWithTs( + filesToProcess, + graph + ); + + const set = new Set(); + + for (const dep of results) { + set.add( + `+ ${dep.sourceProjectName} -> ${dep.targetProjectName} (${dep.sourceProjectFile})` + ); + } + for (const dep of tsResults) { + set.delete( + `+ ${dep.sourceProjectName} -> ${dep.targetProjectName} (${dep.sourceProjectFile})` + ); + set.add( + `- ${dep.sourceProjectName} -> ${dep.targetProjectName} (${dep.sourceProjectFile})` + ); + } + for (const dep of results) { + set.delete( + `- ${dep.sourceProjectName} -> ${dep.targetProjectName} (${dep.sourceProjectFile})` + ); + } + set.forEach((s) => console.log(s)); + } return results; } @@ -31,8 +64,35 @@ function isRoot(graph: ProjectGraph, projectName: string): boolean { return graph.nodes[projectName]?.data?.root === '.'; } +function convertImportToDependency( + importExpr: string, + file: string, + sourceProject: string, + type: ExplicitDependency['type'], + targetProjectLocator: TargetProjectLocator +): ExplicitDependency { + const target = targetProjectLocator.findProjectWithImport(importExpr, file); + let targetProjectName; + if (target) { + targetProjectName = target; + } else { + // treat all unknowns as npm packages, they can be eiher + // - mistyped local import, which has to be fixed manually + // - node internals, which should still be tracked as a dependency + // - npm packages, which are not yet installed but should be tracked + targetProjectName = `npm:${importExpr}`; + } + + return { + sourceProjectName: sourceProject, + targetProjectName, + sourceProjectFile: file, + type, + }; +} + function buildExplicitTypeScriptDependenciesWithSwc( - filesToProcess: ProjectFileMap, + projectFileMap: ProjectFileMap, graph: ProjectGraph ): ExplicitDependency[] { const targetProjectLocator = new TargetProjectLocator( @@ -41,40 +101,63 @@ function buildExplicitTypeScriptDependenciesWithSwc( ); const res: ExplicitDependency[] = []; - Object.keys(filesToProcess).forEach((source) => { - const files = filesToProcess[source]; - findImports( - files.map((f) => f.file), - (_, { file, importExpr }) => { - const target = targetProjectLocator.findProjectWithImport( - importExpr, - file - ); - let targetProjectName; - if (target) { - if (!isRoot(graph, source) && isRoot(graph, target)) { - // TODO: These edges technically should be allowed but we need to figure out how to separate config files out from root - return; - } + const filesToProcess: Record = {}; - targetProjectName = target; - } else { - // treat all unknowns as npm packages, they can be eiher - // - mistyped local import, which has to be fixed manually - // - node internals, which should still be tracked as a dependency - // - npm packages, which are not yet installed but should be tracked - targetProjectName = `npm:${importExpr}`; - } + const moduleExtensions = ['.ts', '.js', '.tsx', '.jsx', '.mts', '.mjs']; - res.push({ - sourceProjectName: source, - targetProjectName, - sourceProjectFile: file, - type: DependencyType.static, - }); + for (const [project, fileData] of Object.entries(projectFileMap)) { + filesToProcess[project] ??= []; + for (const { file } of fileData) { + if (moduleExtensions.some((ext) => file.endsWith(ext))) { + filesToProcess[project].push(file); } - ); - }); + } + } + + const { findImports } = require('../../../../native'); + + const imports = findImports(filesToProcess); + + for (const { + sourceProject, + file, + staticImportExpressions, + dynamicImportExpressions, + } of imports) { + for (const importExpr of staticImportExpressions) { + const dependency = convertImportToDependency( + importExpr, + file, + sourceProject, + DependencyType.static, + targetProjectLocator + ); + // TODO: These edges technically should be allowed but we need to figure out how to separate config files out from root + if ( + isRoot(graph, dependency.sourceProjectName) || + !isRoot(graph, dependency.targetProjectName) + ) { + res.push(dependency); + } + } + for (const importExpr of dynamicImportExpressions) { + const dependency = convertImportToDependency( + importExpr, + file, + sourceProject, + DependencyType.dynamic, + targetProjectLocator + ); + // TODO: These edges technically should be allowed but we need to figure out how to separate config files out from root + if ( + isRoot(graph, dependency.sourceProjectName) || + !isRoot(graph, dependency.targetProjectName) + ) { + res.push(dependency); + } + } + } + return res; }