From db64569f5ee999dee34c6faab729f47fdcb778f1 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Mon, 1 May 2023 14:07:05 -0700 Subject: [PATCH 01/33] did some more --- pages/index.tsx | 73 +++++++++++++++++++++++++++++++------------- pages/tci.module.css | 1 + 2 files changed, 52 insertions(+), 22 deletions(-) diff --git a/pages/index.tsx b/pages/index.tsx index 27019e86..5a85355e 100644 --- a/pages/index.tsx +++ b/pages/index.tsx @@ -6,8 +6,7 @@ import React from "react"; import { useCompilerWorker } from "@/components/hooks"; import { CompileResult, CompilerOutput } from "@/components/compiler.schema"; -const INITIAL_TEXT = ` -int main() { +const INITIAL_TEXT = `int main() { return 0; } `; @@ -93,30 +92,60 @@ export function App() { gap: "10px", }} > - {result?.lexer && ( -
+
+

Lexed Tokens

-
-                {JSON.stringify(result.lexer, undefined, 2)}
-              
+ {result?.lexer && ( +
+                  {JSON.stringify(result.lexer, undefined, 2)}
+                
+ )}
- )} - {result?.parsed_ast && ( -
+

Parsed AST

-
-                {JSON.stringify(
-                  result.parsed_ast.map((obj) => ({
-                    ...obj,
-                    kind: `${obj.kind.kind}${
-                      obj.kind.data ? `,${obj.kind.data}` : ""
-                    }`,
-                  })),
-                  undefined,
-                  2
-                )}
-              
+ + {result?.parsed_ast && ( +
+ {result.parsed_ast.map((obj, index) => { + const data = obj.kind.data + ? `,${JSON.stringify(obj.kind.data)}` + : ""; + return ( +
+                        kind: {`${obj.kind.kind}${data}`}
+                        {"\n"}
+                        parent: {obj.parent}
+                      
+ ); + })} +
+ )} +
+
+ + {result?.error && ( +
+

Error

+
{JSON.stringify(result.error)}
)}
diff --git a/pages/tci.module.css b/pages/tci.module.css index 46501ee2..0f9f70fa 100644 --- a/pages/tci.module.css +++ b/pages/tci.module.css @@ -40,6 +40,7 @@ position: absolute; top: 0; right: 2rem; + z-index: 3; } .scrollBox .text { From 9fd6d3fe680a234c6e891b0307bf9f684f3badee Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Mon, 1 May 2023 14:54:10 -0700 Subject: [PATCH 02/33] print AST --- compiler/src/ast.rs | 41 +++++++++++++++++++++++++++++++++++++++++ compiler/src/lib.rs | 2 ++ 2 files changed, 43 insertions(+) diff --git a/compiler/src/ast.rs b/compiler/src/ast.rs index 808f5f74..e04df6c6 100644 --- a/compiler/src/ast.rs +++ b/compiler/src/ast.rs @@ -213,3 +213,44 @@ pub struct AstDeclaration; /// Children: AstSpecifier for each specifier, san AstDeclarator, and all the statements associated with the function #[derive(Debug, Clone, Copy, Serialize, Deserialize)] pub struct AstFunctionDefinition; + +pub fn display_tree(ast: &AstNodeVec) -> String { + let mut children = Vec::>::with_capacity(ast.len()); + children.resize_with(ast.len(), || Vec::new()); + + let mut roots = Vec::new(); + + for node in ast.as_slice().into_iter() { + if *node.post_order != *node.parent { + children[*node.parent as usize].push(*node.post_order as usize); + } else { + roots.push(*node.post_order); + } + } + + roots.reverse(); + + let mut parent_stack = Vec::with_capacity(roots.len()); + for root in roots.iter().rev() { + parent_stack.push((0u32, *root as usize)); + } + + let mut out = String::new(); + while let Some((depth, node_id)) = parent_stack.pop() { + if depth > 0 { + for _ in 0..(depth - 1) { + out += "| "; + } + + out += "└ "; + } + + out += &format!("{:?}\n", ast.as_slice().index(node_id).kind); + + for id in children[node_id].iter().rev() { + parent_stack.push((depth + 1, *id)); + } + } + + return out; +} diff --git a/compiler/src/lib.rs b/compiler/src/lib.rs index de51a7cb..821265de 100644 --- a/compiler/src/lib.rs +++ b/compiler/src/lib.rs @@ -119,6 +119,8 @@ pub fn run_test_code(test_source: &str) -> PipelineOutput { }); } + println!("{}", ast::display_tree(&parsed_ast)); + return PipelineOutput { source, lexer: lexer_res.tokens.kind, From e91213dc5bbccadabd01efcef275ccbbba9c8e3e Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Mon, 1 May 2023 15:01:14 -0700 Subject: [PATCH 03/33] meh --- compiler/src/ast.rs | 29 ++++++++++++++++++++++++++--- 1 file changed, 26 insertions(+), 3 deletions(-) diff --git a/compiler/src/ast.rs b/compiler/src/ast.rs index e04df6c6..22e22302 100644 --- a/compiler/src/ast.rs +++ b/compiler/src/ast.rs @@ -101,7 +101,11 @@ pub enum UnaryOp { } /// Handles struct and union declarations: +/// +/// ```text /// struct a { int b; } +/// ``` +/// /// In the above, it would have children for each field /// declaration, and a child for the identifier as well. #[derive(Debug, Clone, Copy, Serialize, Deserialize)] @@ -200,20 +204,39 @@ pub enum AstSpecifier { } /// A typical declaration; this is a stand-in for -/// int *i[1] = {NULL}; or something similar +/// `int *i[1] = {NULL};` or something similar /// /// Children: AstSpecifier for each specifier, AstStructDeclaration if necessary, an AstInitDeclarator for each declared variable #[derive(Debug, Clone, Copy, Serialize, Deserialize)] pub struct AstDeclaration; -/// A typical declaration; this is a stand-in for -/// int *i[1] = {NULL}; or something similar +/// A function definition /// /// Data: DeclarationSpecifiers /// Children: AstSpecifier for each specifier, san AstDeclarator, and all the statements associated with the function #[derive(Debug, Clone, Copy, Serialize, Deserialize)] pub struct AstFunctionDefinition; +/// Prints the tree in a text format, so that it's a lil easier to read. +/// Output right now looks like this: +/// +/// ```text +/// FunctionDefinition(AstFunctionDefinition) +/// └ Specifier(Int) +/// └ Declarator(Ident) +/// | └ DerivedDeclarator(Function) +/// | | └ Declaration(AstDeclaration) +/// | | | └ Specifier(Int) +/// | | | └ Declarator(Ident) +/// | | └ Declaration(AstDeclaration) +/// | | | └ Specifier(Char) +/// | | | └ Declarator(Ident) +/// | | | | └ DerivedDeclarator(Pointer) +/// | | | | └ DerivedDeclarator(Pointer) +/// └ Statement(Block) +/// | └ Statement(Ret) +/// | | └ Expr(StringLit) +/// ``` pub fn display_tree(ast: &AstNodeVec) -> String { let mut children = Vec::>::with_capacity(ast.len()); children.resize_with(ast.len(), || Vec::new()); From 9f9d746a581c09ec868f8e87c2a1a0b96e41e2ef Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Mon, 1 May 2023 15:01:37 -0700 Subject: [PATCH 04/33] meh --- compiler/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/src/lib.rs b/compiler/src/lib.rs index 821265de..2f4b4fe6 100644 --- a/compiler/src/lib.rs +++ b/compiler/src/lib.rs @@ -119,7 +119,7 @@ pub fn run_test_code(test_source: &str) -> PipelineOutput { }); } - println!("{}", ast::display_tree(&parsed_ast)); + // println!("{}", ast::display_tree(&parsed_ast)); return PipelineOutput { source, From e03e36a353904f2e78b39031269b7447f6585401 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Mon, 1 May 2023 16:24:59 -0700 Subject: [PATCH 05/33] me --- package.json | 4 ++-- tci-web/.gitignore | 1 + tci-web/Cargo.toml | 4 ++++ 3 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 tci-web/.gitignore diff --git a/package.json b/package.json index 27879985..33247914 100644 --- a/package.json +++ b/package.json @@ -8,8 +8,8 @@ "private": false, "scripts": { "start": "next start", - "build-wasm": "wasm-pack build tci-web && yarn install --check-files", - "build": "next build", + "link-wasm": "cd tci-web/pkg && yarn link && cd ../.. && yarn link tci-web", + "build-wasm": "wasm-pack build --debug tci-web", "deploy": "git push origin main:production", "dev": "next", "lint": "next lint" diff --git a/tci-web/.gitignore b/tci-web/.gitignore new file mode 100644 index 00000000..ee11c510 --- /dev/null +++ b/tci-web/.gitignore @@ -0,0 +1 @@ +/LICENSE \ No newline at end of file diff --git a/tci-web/Cargo.toml b/tci-web/Cargo.toml index 583ed1aa..f9865aef 100644 --- a/tci-web/Cargo.toml +++ b/tci-web/Cargo.toml @@ -1,7 +1,11 @@ [package] name = "tci-web" +description = "TCI compiler server for the web" +repository = "https://github.com/A1Liu/tci" version = "0.1.0" edition = "2021" +license-file = "../LICENSE" +readme = "../README.md" [lib] crate-type = ["cdylib"] From a28295f9e372ecfa1d5fb44f4fb09d9af7d3738e Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Mon, 1 May 2023 16:34:51 -0700 Subject: [PATCH 06/33] meh --- tci-web/.gitignore | 1 - tci-web/Cargo.toml | 6 ++++-- 2 files changed, 4 insertions(+), 3 deletions(-) delete mode 100644 tci-web/.gitignore diff --git a/tci-web/.gitignore b/tci-web/.gitignore deleted file mode 100644 index ee11c510..00000000 --- a/tci-web/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/LICENSE \ No newline at end of file diff --git a/tci-web/Cargo.toml b/tci-web/Cargo.toml index f9865aef..efb904b1 100644 --- a/tci-web/Cargo.toml +++ b/tci-web/Cargo.toml @@ -4,8 +4,10 @@ description = "TCI compiler server for the web" repository = "https://github.com/A1Liu/tci" version = "0.1.0" edition = "2021" -license-file = "../LICENSE" -readme = "../README.md" + +# Neither of these seem to be processed properly by wasm-pack, so I just disabled them. Whatever +# license-file = "../LICENSE" +# readme = "../README.md" [lib] crate-type = ["cdylib"] From b8ee0f648d5296618f5c35b416f67223d55b6fd7 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Mon, 1 May 2023 16:35:54 -0700 Subject: [PATCH 07/33] meh --- Cargo.toml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 01fe7e52..b898bb33 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,12 +1,9 @@ [workspace] -members = [ - "compiler", - "tci-web" -] +members = ["compiler", "tci-web"] [profile.dev] -opt-level = 0 +opt-level = 0 [profile.release] opt-level = "z" From 158d5eaf91b88756cda1c8b70e88b9e59ce2c7fb Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Mon, 1 May 2023 16:36:35 -0700 Subject: [PATCH 08/33] meh --- compiler/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/Cargo.toml b/compiler/Cargo.toml index f44e7db8..c103b6b0 100644 --- a/compiler/Cargo.toml +++ b/compiler/Cargo.toml @@ -3,7 +3,7 @@ name = "compiler" version = "0.1.0" authors = ["Albert Liu "] edition = "2021" -description = "Teaching C Interpreter" +description = "C compiler for students" license = "MIT" [dependencies] From 57e3f3b7402cf99b554e1a05adcfb7d376aa01e9 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Tue, 2 May 2023 14:01:38 -0700 Subject: [PATCH 09/33] meh --- compiler/src/ast.rs | 25 ++++++------- compiler/src/lib.rs | 1 + compiler/src/pass/mod.rs | 80 ++++++++++++++++++++++++++++++++++++++++ pages/index.tsx | 2 +- 4 files changed, 93 insertions(+), 15 deletions(-) create mode 100644 compiler/src/pass/mod.rs diff --git a/compiler/src/ast.rs b/compiler/src/ast.rs index 22e22302..0b5ef9c0 100644 --- a/compiler/src/ast.rs +++ b/compiler/src/ast.rs @@ -21,7 +21,7 @@ pub struct AstNode { } macro_attr! { -#[derive(Serialize, Deserialize, Debug, Clone, Copy, EnumFromInner!)] +#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, EnumFromInner!)] #[serde(tag = "kind", content = "data")] pub enum AstNodeKind { Expr(AstExpr), @@ -34,10 +34,7 @@ pub enum AstNodeKind { } } -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] -pub struct AstEof; - -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Serialize, Deserialize)] pub enum AstExpr { IntLit, // data: i32 LongLit, // data: i64 @@ -62,7 +59,7 @@ pub enum AstExpr { BinOpAssign(BinOp), // children: expression being assigned to, expression being assigned } -#[derive(Debug, Clone, PartialEq, Hash, Eq, Copy, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Hash, Eq, PartialOrd, Serialize, Deserialize)] pub enum BinOp { Add, Sub, @@ -87,7 +84,7 @@ pub enum BinOp { Comma, } -#[derive(Debug, Clone, PartialEq, Hash, Eq, Copy, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Hash, Eq, Copy, PartialOrd, Serialize, Deserialize)] pub enum UnaryOp { Neg, BoolNot, @@ -108,13 +105,13 @@ pub enum UnaryOp { /// /// In the above, it would have children for each field /// declaration, and a child for the identifier as well. -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Serialize, Deserialize)] pub enum StructDeclaration { Struct, Union, } -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Serialize, Deserialize)] pub enum AstStatement { Labeled, // data: label ; children: statement that is being labelled CaseLabeled, // children: case value expression, statement that is being labelled @@ -137,7 +134,7 @@ pub enum AstStatement { /// `int *const a`, or the `[3]` part of `int b[3]` /// /// Children: AstSpecifer for each type qualifier -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Serialize, Deserialize)] pub enum AstDerivedDeclarator { Pointer = 0, @@ -162,7 +159,7 @@ pub enum AstDerivedDeclarator { } /// children: a AstDerivedDeclarator for each derived declarator -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Serialize, Deserialize)] pub enum AstDeclarator { Abstract, /// data: Symbol @@ -171,7 +168,7 @@ pub enum AstDeclarator { NestedWithChild, } -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Serialize, Deserialize)] pub enum AstSpecifier { Extern, Static, @@ -207,14 +204,14 @@ pub enum AstSpecifier { /// `int *i[1] = {NULL};` or something similar /// /// Children: AstSpecifier for each specifier, AstStructDeclaration if necessary, an AstInitDeclarator for each declared variable -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Serialize, Deserialize)] pub struct AstDeclaration; /// A function definition /// /// Data: DeclarationSpecifiers /// Children: AstSpecifier for each specifier, san AstDeclarator, and all the statements associated with the function -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Serialize, Deserialize)] pub struct AstFunctionDefinition; /// Prints the tree in a text format, so that it's a lil easier to read. diff --git a/compiler/src/lib.rs b/compiler/src/lib.rs index 2f4b4fe6..17c6a37e 100644 --- a/compiler/src/lib.rs +++ b/compiler/src/lib.rs @@ -23,6 +23,7 @@ pub mod filedb; pub mod lexer; pub mod macros; pub mod parser; +pub mod pass; #[cfg(test)] mod tests; diff --git a/compiler/src/pass/mod.rs b/compiler/src/pass/mod.rs new file mode 100644 index 00000000..c3826003 --- /dev/null +++ b/compiler/src/pass/mod.rs @@ -0,0 +1,80 @@ +use crate::api::*; + +pub struct ByKindAst<'a> { + pub by_kind: HashMap, + pub by_kind_in_order: Vec<(AstNodeKind, ast::AstNodeSliceMut<'a>)>, +} + +pub fn sort_by_kind(ast: &mut AstNodeVec) -> ByKindAst { + let mut indices = Vec::with_capacity(ast.len()); + for _ in 0..ast.len() { + indices.push(u32::MAX); + } + + // TODO: Sort by kind,post_order + + for (index, &order) in ast.post_order.iter().enumerate() { + indices[order as usize] = index as u32; + } + + // Rebuild parent indices + for parent in &mut ast.parent { + *parent = indices[*parent as usize]; + } + + let mut by_kind = HashMap::new(); + let mut by_kind_in_order = Vec::new(); + + let mut prev = *ast.index(0).kind; + let mut begin: usize = 0; + let mut index: usize = 0; + while index < ast.len() { + let node = ast.index(index); + let kind = *node.kind; + if kind == prev { + index += 1; + continue; + } + + let by_kind_in_order_index = by_kind_in_order.len(); + let prev_slice = ast.slice_mut(begin..index); + + if let Some(_) = by_kind.insert(prev, by_kind_in_order_index) { + panic!("kind is somehow not sorted"); + } + + // This *should* be safe, because we're really just trying to do a series of split_at_mut + // but for the SOA vec, which is not possible without this. + by_kind_in_order.push((prev, unsafe { core::mem::transmute(prev_slice) })); + + begin = index; + prev = kind; + index += 1; + } + + return ByKindAst { + by_kind, + by_kind_in_order, + }; +} + +pub fn sort_to_postorder(ast: &mut AstNodeVec) { + let mut indices = Vec::with_capacity(ast.len()); + + for &order in &ast.post_order { + indices.push(order); + } + + // TODO: Sort by post_order + + // Rebuild parent indices + for parent in &mut ast.parent { + *parent = indices[*parent as usize]; + } +} + +pub fn validate_ast(ast: &mut AstNodeVec) -> Result<(), Error> { + // sort + + let mut id_translate = return Ok(()); +} diff --git a/pages/index.tsx b/pages/index.tsx index 5a85355e..60e2d6c4 100644 --- a/pages/index.tsx +++ b/pages/index.tsx @@ -6,7 +6,7 @@ import React from "react"; import { useCompilerWorker } from "@/components/hooks"; import { CompileResult, CompilerOutput } from "@/components/compiler.schema"; -const INITIAL_TEXT = `int main() { +const INITIAL_TEXT = `int main(int argc, char** argv) { return 0; } `; From 01eb67b34dfa309e9b5f742788748f957025248e Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Tue, 2 May 2023 14:06:25 -0700 Subject: [PATCH 10/33] meh --- compiler/src/pass/mod.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/compiler/src/pass/mod.rs b/compiler/src/pass/mod.rs index c3826003..ebc80f5a 100644 --- a/compiler/src/pass/mod.rs +++ b/compiler/src/pass/mod.rs @@ -44,7 +44,12 @@ pub fn sort_by_kind(ast: &mut AstNodeVec) -> ByKindAst { } // This *should* be safe, because we're really just trying to do a series of split_at_mut - // but for the SOA vec, which is not possible without this. + // but for the SOA vec. This doesn't work because the lifetime on the output of + // AstNodeSliceMut.split_at_mut is the mut borrow of AstNodeSliceMut, instead of taking + // the AstNodeSliceMut by ownership and inheriting its lifetime. Thus, the result of + // split_at_mut uses the local borrow's lifetime instead of the lifetime of the original + // slice, and you can't really do more than one split at mut. + // Seems this will be fixed soon though. by_kind_in_order.push((prev, unsafe { core::mem::transmute(prev_slice) })); begin = index; From 57c9953e377275bbe8f8ca12d7a20b9923ff190c Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Tue, 2 May 2023 17:03:59 -0700 Subject: [PATCH 11/33] meh --- compiler/src/pass/mod.rs | 17 ++++++++++++++--- pages/index.tsx | 7 +++++++ 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/compiler/src/pass/mod.rs b/compiler/src/pass/mod.rs index ebc80f5a..8b8616c2 100644 --- a/compiler/src/pass/mod.rs +++ b/compiler/src/pass/mod.rs @@ -78,8 +78,19 @@ pub fn sort_to_postorder(ast: &mut AstNodeVec) { } } -pub fn validate_ast(ast: &mut AstNodeVec) -> Result<(), Error> { - // sort +// validate declarations -> produce declaration types +// Function declarations need to have proper derived declarator and etc +// Declaration specifiers need to make sense for the kind of declaration theyre on +pub fn validate_declaration_types(ast: &mut ByKindAst) -> Result<(), Error> { + return Ok(()); +} - let mut id_translate = return Ok(()); +// validate declarators relative to their scopes +// -> produce scopes +// validate identifiers +// -> produce types for the identifiers +// -> track which identifiers are pointer-referenced, and when each declaration is last used +// produce global symbols? +pub fn validate_scopes(ast: &mut ByKindAst) -> Result<(), Error> { + return Ok(()); } diff --git a/pages/index.tsx b/pages/index.tsx index 60e2d6c4..bcf8569e 100644 --- a/pages/index.tsx +++ b/pages/index.tsx @@ -78,6 +78,13 @@ export function App() { defaultValue={INITIAL_TEXT} onMount={(editor, monaco) => { editorRef.current = editor; + monaco.editor.addKeybindingRules([ + { + keybinding: monaco.KeyMod.CtrlCmd | monaco.KeyCode.KeyS, + // TODO: make this do something useful + command: "editor.action.formatDocument", + }, + ]); compile(); }} /> From ead94545084392d4b6901af1fe76a29a56e64854 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Tue, 2 May 2023 18:27:58 -0700 Subject: [PATCH 12/33] meh --- compiler/src/lexer.rs | 35 ++++++++++++++++++- compiler/src/macros.rs | 1 + components/compiler.schema.ts | 24 +++++++------ pages/index.tsx | 13 +++---- tci-web/src/lib.rs | 64 ++++++++++++++++++++++------------- 5 files changed, 96 insertions(+), 41 deletions(-) diff --git a/compiler/src/lexer.rs b/compiler/src/lexer.rs index 5be88f2d..da6e538a 100644 --- a/compiler/src/lexer.rs +++ b/compiler/src/lexer.rs @@ -9,8 +9,9 @@ pub enum TokenKind { StringLit, CharLit, - EOF, + Comment, Newline, + EOF, Hashtag, Dot, @@ -386,6 +387,38 @@ fn lex_tok_from_bytes<'a>(data: &'a [u8]) -> Result { (b'-', _) => (0, TokenKind::Dash), (b'/', Some(b'=')) => (1, TokenKind::SlashEq), + (b'/', Some(b'/')) => { + // we've consumed 1 extra character already from the second '/' + // ALSO though, index is already pushed forwards by one + // So this code leaves our index right before the newline we just found + let mut i = 1; + while let Some(&b) = data.get(index + i) { + // Consume until the newline + match b { + b'\n' | b'\r' => break, + _ => i += 1, + } + } + (i, TokenKind::Comment) + } + (b'/', Some(b'*')) => { + let mut i = 1; + let mut prev = 0u8; + loop { + let b = *data.get(index + i).ok_or(Error::new(ErrorKind::Todo( + "EOF while inside a block comment", + )))?; + i += 1; + + // Consume until we hit the suffix + match (prev, b) { + (b'*', b'/') => break, + _ => prev = b, + } + } + + (i, TokenKind::Comment) + } (b'/', _) => (0, TokenKind::Slash), (b'*', Some(b'=')) => (1, TokenKind::StarEq), diff --git a/compiler/src/macros.rs b/compiler/src/macros.rs index 36c7005b..b5a3b21b 100644 --- a/compiler/src/macros.rs +++ b/compiler/src/macros.rs @@ -20,6 +20,7 @@ pub fn expand_macros(tokens: TokenSlice) -> TokenVec { for tok in &tokens { match *tok.kind { TokenKind::Newline => continue, + TokenKind::Comment => continue, _ => {} } diff --git a/components/compiler.schema.ts b/components/compiler.schema.ts index 3ed65695..0d352042 100644 --- a/components/compiler.schema.ts +++ b/components/compiler.schema.ts @@ -7,17 +7,19 @@ export const CompileCommand = z.object({ export type CompileResult = z.infer; export const CompileResult = z.object({ - source: z.string(), - lexer: z.array(z.string()), - parsed_ast: z.array( - z.object({ - parent: z.number(), - kind: z.object({ - kind: z.string(), - data: z.any().optional(), - }), - }) - ), + lexer: z.array(z.string()).nullish(), + parsed_ast: z + .array( + z.object({ + parent: z.number(), + kind: z.object({ + kind: z.string(), + data: z.any().optional(), + }), + }) + ) + .nullish(), + error: z.any().nullish(), }); export type CompilerOutput = z.infer; diff --git a/pages/index.tsx b/pages/index.tsx index bcf8569e..ea95ddd7 100644 --- a/pages/index.tsx +++ b/pages/index.tsx @@ -4,16 +4,16 @@ import Editor, { Monaco } from "@monaco-editor/react"; import type monaco from "monaco-editor"; import React from "react"; import { useCompilerWorker } from "@/components/hooks"; -import { CompileResult, CompilerOutput } from "@/components/compiler.schema"; +import { CompileResult } from "@/components/compiler.schema"; -const INITIAL_TEXT = `int main(int argc, char** argv) { +const INITIAL_TEXT = `// Write C code here +int main(int argc, char** argv) { return 0; } `; export function App() { - const [result, setResult] = - React.useState>(); + const [result, setResult] = React.useState(); const worker = useCompilerWorker((res) => { switch (res.kind) { @@ -25,10 +25,11 @@ export function App() { console.log("message:", res.message); break; case "error": - setResult({ error: res.error }); + console.log("Error returned"); console.error(res.error); break; case "result": + console.log("Compiled"); setResult(res.result); break; } @@ -74,7 +75,7 @@ export function App() {
{ editorRef.current = editor; diff --git a/tci-web/src/lib.rs b/tci-web/src/lib.rs index 8a296d6e..fc432bfc 100644 --- a/tci-web/src/lib.rs +++ b/tci-web/src/lib.rs @@ -7,10 +7,10 @@ static GLOBAL: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; #[derive(Serialize)] pub struct PipelineOutput { - source: String, - lexer: Vec, - macro_expansion: Vec, - parsed_ast: Vec, + lexer: Option>, + macro_expansion: Option>, + parsed_ast: Option>, + error: Option, } #[wasm_bindgen] @@ -24,29 +24,47 @@ pub fn compile(source: String) -> Result { let file_id = files.add_file("main.c".to_string(), source_string)?; let file = &files.files[file_id as usize]; - let lexer_res = lex(&files, file).expect("Expected lex to succeed"); + let mut output = PipelineOutput { + lexer: None, + macro_expansion: None, + parsed_ast: None, + error: None, + }; - let macro_expansion_res = expand_macros(lexer_res.tokens.as_slice()); + 'done: { + let lexer_res = match lex(&files, file) { + Ok(l) => l, + Err(e) => { + output.error = Some(format!("lex error: {:?}", e)); + break 'done; + } + }; - let parsed_ast = parse(¯o_expansion_res).map_err(|_e| "parsing failed")?; - let mut simple_ast = Vec::with_capacity(parsed_ast.len()); - for node in parsed_ast.as_slice() { - simple_ast.push(compiler::SimpleAstNode { - kind: *node.kind, - parent: *node.parent, - post_order: *node.post_order, - height: *node.height, - }); - } + output.lexer = Some(lexer_res.tokens.kind.clone()); - let out = PipelineOutput { - source, - lexer: lexer_res.tokens.kind, - macro_expansion: macro_expansion_res.kind, - parsed_ast: simple_ast, - }; + let macro_expansion_res = expand_macros(lexer_res.tokens.as_slice()); + + let parsed_ast = match parse(¯o_expansion_res) { + Ok(l) => l, + Err(e) => { + output.error = Some(format!("parse error: {:?}", e)); + break 'done; + } + }; + + let mut simple_ast = Vec::with_capacity(parsed_ast.len()); + for node in parsed_ast.as_slice() { + simple_ast.push(compiler::SimpleAstNode { + kind: *node.kind, + parent: *node.parent, + post_order: *node.post_order, + height: *node.height, + }); + } + output.parsed_ast = Some(simple_ast); + } - let out = serde_json::to_string(&out).map_err(|e| e.to_string())?; + let out = serde_json::to_string(&output).map_err(|e| e.to_string())?; return Ok(out); } From b7bcb7050e3c46d875163335f36eacc9cbf8e357 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Tue, 2 May 2023 18:30:25 -0700 Subject: [PATCH 13/33] meh --- components/compiler.schema.ts | 1 + tci-web/src/lib.rs | 1 + 2 files changed, 2 insertions(+) diff --git a/components/compiler.schema.ts b/components/compiler.schema.ts index 0d352042..a3a947f8 100644 --- a/components/compiler.schema.ts +++ b/components/compiler.schema.ts @@ -8,6 +8,7 @@ export const CompileCommand = z.object({ export type CompileResult = z.infer; export const CompileResult = z.object({ lexer: z.array(z.string()).nullish(), + macro_expansion: z.array(z.string()).nullish(), parsed_ast: z .array( z.object({ diff --git a/tci-web/src/lib.rs b/tci-web/src/lib.rs index fc432bfc..59c3b464 100644 --- a/tci-web/src/lib.rs +++ b/tci-web/src/lib.rs @@ -43,6 +43,7 @@ pub fn compile(source: String) -> Result { output.lexer = Some(lexer_res.tokens.kind.clone()); let macro_expansion_res = expand_macros(lexer_res.tokens.as_slice()); + output.macro_expansion = Some(macro_expansion_res.kind.clone()); let parsed_ast = match parse(¯o_expansion_res) { Ok(l) => l, From b224a45197310599cfc2c444c21108d04e2b006b Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Tue, 2 May 2023 18:34:35 -0700 Subject: [PATCH 14/33] meh --- compiler/src/pass/mod.rs | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/compiler/src/pass/mod.rs b/compiler/src/pass/mod.rs index 8b8616c2..643fa450 100644 --- a/compiler/src/pass/mod.rs +++ b/compiler/src/pass/mod.rs @@ -1,8 +1,11 @@ +use core::ops::Range; + use crate::api::*; pub struct ByKindAst<'a> { - pub by_kind: HashMap, - pub by_kind_in_order: Vec<(AstNodeKind, ast::AstNodeSliceMut<'a>)>, + pub ast: &'a mut AstNodeVec, + pub by_kind: HashMap>, + pub by_kind_in_order: Vec<(AstNodeKind, Range)>, } pub fn sort_by_kind(ast: &mut AstNodeVec) -> ByKindAst { @@ -36,10 +39,7 @@ pub fn sort_by_kind(ast: &mut AstNodeVec) -> ByKindAst { continue; } - let by_kind_in_order_index = by_kind_in_order.len(); - let prev_slice = ast.slice_mut(begin..index); - - if let Some(_) = by_kind.insert(prev, by_kind_in_order_index) { + if let Some(_) = by_kind.insert(prev, begin..index) { panic!("kind is somehow not sorted"); } @@ -50,7 +50,7 @@ pub fn sort_by_kind(ast: &mut AstNodeVec) -> ByKindAst { // split_at_mut uses the local borrow's lifetime instead of the lifetime of the original // slice, and you can't really do more than one split at mut. // Seems this will be fixed soon though. - by_kind_in_order.push((prev, unsafe { core::mem::transmute(prev_slice) })); + by_kind_in_order.push((prev, begin..index)); begin = index; prev = kind; @@ -58,6 +58,7 @@ pub fn sort_by_kind(ast: &mut AstNodeVec) -> ByKindAst { } return ByKindAst { + ast, by_kind, by_kind_in_order, }; From e2f9249007a20bd19f4632f479bb22f060416874 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Tue, 2 May 2023 18:38:49 -0700 Subject: [PATCH 15/33] meh --- compiler/src/pass/mod.rs | 7 ------- 1 file changed, 7 deletions(-) diff --git a/compiler/src/pass/mod.rs b/compiler/src/pass/mod.rs index 643fa450..88bf91b2 100644 --- a/compiler/src/pass/mod.rs +++ b/compiler/src/pass/mod.rs @@ -43,13 +43,6 @@ pub fn sort_by_kind(ast: &mut AstNodeVec) -> ByKindAst { panic!("kind is somehow not sorted"); } - // This *should* be safe, because we're really just trying to do a series of split_at_mut - // but for the SOA vec. This doesn't work because the lifetime on the output of - // AstNodeSliceMut.split_at_mut is the mut borrow of AstNodeSliceMut, instead of taking - // the AstNodeSliceMut by ownership and inheriting its lifetime. Thus, the result of - // split_at_mut uses the local borrow's lifetime instead of the lifetime of the original - // slice, and you can't really do more than one split at mut. - // Seems this will be fixed soon though. by_kind_in_order.push((prev, begin..index)); begin = index; From ab1b9a4f713beaaf85645d06491771dfaf0bd967 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Tue, 2 May 2023 19:36:18 -0700 Subject: [PATCH 16/33] meh --- compiler/src/pass/mod.rs | 89 +++++++++++++---------- components/Ast.tsx | 129 ++++++++++++++++++++++++++++++++++ components/compiler.schema.ts | 22 +++--- pages/index.tsx | 35 ++------- pages/tci.module.css | 1 - 5 files changed, 195 insertions(+), 81 deletions(-) create mode 100644 components/Ast.tsx diff --git a/compiler/src/pass/mod.rs b/compiler/src/pass/mod.rs index 88bf91b2..81190db3 100644 --- a/compiler/src/pass/mod.rs +++ b/compiler/src/pass/mod.rs @@ -8,56 +8,69 @@ pub struct ByKindAst<'a> { pub by_kind_in_order: Vec<(AstNodeKind, Range)>, } -pub fn sort_by_kind(ast: &mut AstNodeVec) -> ByKindAst { - let mut indices = Vec::with_capacity(ast.len()); - for _ in 0..ast.len() { - indices.push(u32::MAX); - } - - // TODO: Sort by kind,post_order +impl<'a> ByKindAst<'a> { + pub fn new(ast: &'a mut AstNodeVec) -> Self { + Self::sort_by_kind(ast); + + let mut by_kind = HashMap::new(); + let mut by_kind_in_order = Vec::new(); + + let mut prev = *ast.index(0).kind; + let mut begin: usize = 0; + let mut index: usize = 0; + while index < ast.len() { + let node = ast.index(index); + let kind = *node.kind; + if kind == prev { + index += 1; + continue; + } + + if let Some(_) = by_kind.insert(prev, begin..index) { + panic!("kind is somehow not sorted"); + } + + by_kind_in_order.push((prev, begin..index)); + + begin = index; + prev = kind; + index += 1; + } - for (index, &order) in ast.post_order.iter().enumerate() { - indices[order as usize] = index as u32; + return ByKindAst { + ast, + by_kind, + by_kind_in_order, + }; } - // Rebuild parent indices - for parent in &mut ast.parent { - *parent = indices[*parent as usize]; - } + // NOTE: Assumes that the input was originally sorted by post-order + fn sort_by_kind(ast: &mut AstNodeVec) { + let mut indices = Vec::with_capacity(ast.len()); + for _ in 0..ast.len() { + indices.push(u32::MAX); + } - let mut by_kind = HashMap::new(); - let mut by_kind_in_order = Vec::new(); + // TODO: Sort by kind,post_order - let mut prev = *ast.index(0).kind; - let mut begin: usize = 0; - let mut index: usize = 0; - while index < ast.len() { - let node = ast.index(index); - let kind = *node.kind; - if kind == prev { - index += 1; - continue; + for (index, &order) in ast.post_order.iter().enumerate() { + indices[order as usize] = index as u32; } - if let Some(_) = by_kind.insert(prev, begin..index) { - panic!("kind is somehow not sorted"); + // Rebuild parent indices + for parent in &mut ast.parent { + *parent = indices[*parent as usize]; } - - by_kind_in_order.push((prev, begin..index)); - - begin = index; - prev = kind; - index += 1; } +} - return ByKindAst { - ast, - by_kind, - by_kind_in_order, - }; +impl<'a> Drop for ByKindAst<'a> { + fn drop(&mut self) { + sort_by_postorder(self.ast); + } } -pub fn sort_to_postorder(ast: &mut AstNodeVec) { +pub fn sort_by_postorder(ast: &mut AstNodeVec) { let mut indices = Vec::with_capacity(ast.len()); for &order in &ast.post_order { diff --git a/components/Ast.tsx b/components/Ast.tsx new file mode 100644 index 00000000..561a6324 --- /dev/null +++ b/components/Ast.tsx @@ -0,0 +1,129 @@ +import { AstNode } from "./compiler.schema"; +import React from "react"; +import styles from "Ast.module.css"; + +const DisplayAstNode = ({ + kind, + children, +}: { + kind: string; + children?: React.ReactNode; +}) => { + return ( +
+
{kind}
+ {children} +
+ ); +}; + +const RecursiveAst = ({ ast }: { ast: AstNode[] }) => { + let rootStack = []; + + // Assumes that nodes are in post order + for (const node of ast) { + const data = node.kind.data ? `,${JSON.stringify(node.kind.data)}` : ""; + const kind = `${node.kind.kind}${data}`; + + let index = rootStack.length; + while (index > 0 && rootStack[index - 1].parent === node.post_order) { + index -= 1; + } + + rootStack.push({ + parent: node.parent, + node: ( + + {rootStack.splice(index).map((node) => node.node)} + + ), + }); + } + + return <>{rootStack.map((root) => root.node)}; +}; + +const FlatAst = ({ ast }: { ast: AstNode[] }) => { + return ( +
+ {ast.map((obj, index) => { + const data = obj.kind.data ? `,${JSON.stringify(obj.kind.data)}` : ""; + return ( +
+            kind: {`${obj.kind.kind}${data}`}
+            {"\n"}
+            parent: {obj.parent}
+          
+ ); + })} +
+ ); +}; + +export const Ast = ({ ast }: { ast: AstNode[] }) => { + const [recursive, setRecursive] = React.useState(true); + + return ( +
+
+ + +

Parsed AST

+
+ +
+ {recursive ? : } +
+
+ ); +}; diff --git a/components/compiler.schema.ts b/components/compiler.schema.ts index a3a947f8..e9274ee7 100644 --- a/components/compiler.schema.ts +++ b/components/compiler.schema.ts @@ -5,21 +5,21 @@ export const CompileCommand = z.object({ source: z.string(), }); +export type AstNode = z.infer; +export const AstNode = z.object({ + post_order: z.number(), + parent: z.number(), + kind: z.object({ + kind: z.string(), + data: z.any().optional(), + }), +}); + export type CompileResult = z.infer; export const CompileResult = z.object({ lexer: z.array(z.string()).nullish(), macro_expansion: z.array(z.string()).nullish(), - parsed_ast: z - .array( - z.object({ - parent: z.number(), - kind: z.object({ - kind: z.string(), - data: z.any().optional(), - }), - }) - ) - .nullish(), + parsed_ast: z.array(AstNode).nullish(), error: z.any().nullish(), }); diff --git a/pages/index.tsx b/pages/index.tsx index ea95ddd7..54ffa9cb 100644 --- a/pages/index.tsx +++ b/pages/index.tsx @@ -1,10 +1,11 @@ import Link from "next/link"; import styles from "./tci.module.css"; -import Editor, { Monaco } from "@monaco-editor/react"; +import Editor from "@monaco-editor/react"; import type monaco from "monaco-editor"; import React from "react"; import { useCompilerWorker } from "@/components/hooks"; import { CompileResult } from "@/components/compiler.schema"; +import { Ast } from "@/components/Ast"; const INITIAL_TEXT = `// Write C code here int main(int argc, char** argv) { @@ -117,36 +118,8 @@ export function App() { )}
-
-

Parsed AST

- - {result?.parsed_ast && ( -
- {result.parsed_ast.map((obj, index) => { - const data = obj.kind.data - ? `,${JSON.stringify(obj.kind.data)}` - : ""; - return ( -
-                        kind: {`${obj.kind.kind}${data}`}
-                        {"\n"}
-                        parent: {obj.parent}
-                      
- ); - })} -
- )} +
+ {result?.parsed_ast && }
diff --git a/pages/tci.module.css b/pages/tci.module.css index 0f9f70fa..d1082c6b 100644 --- a/pages/tci.module.css +++ b/pages/tci.module.css @@ -29,7 +29,6 @@ .scrollBox { position: relative; - min-height: 33%; width: 100%; border-radius: 4px; From 563e5303480dfbd842462fc8d544d9c357659a11 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Tue, 2 May 2023 19:48:05 -0700 Subject: [PATCH 17/33] meh --- compiler/src/lexer.rs | 19 ++++++------------- 1 file changed, 6 insertions(+), 13 deletions(-) diff --git a/compiler/src/lexer.rs b/compiler/src/lexer.rs index da6e538a..355bc21b 100644 --- a/compiler/src/lexer.rs +++ b/compiler/src/lexer.rs @@ -549,27 +549,20 @@ fn lex_num(mut index: usize, data: &[u8]) -> Result { ‘p’ or ‘P’ are used for hexadecimal floating-point constants.) */ - while index < data.len() { - let lower = data[index].to_ascii_lowercase(); - - match lower { + while let Some(&c) = data.get(index) { + let c = c.to_ascii_lowercase(); + match c { b'a'..=b'z' => {} b'0'..=b'9' => {} - b'.' => {} - b'_' => {} + b'.' | b'_' => {} x => break, } index += 1; // Match against exponent - if lower == b'e' || lower == b'p' { - match data.get(index) { - Some(b'-') | Some(b'+') => index += 1, - _ => {} - } - - continue; + if let (b'e' | b'p', Some(b'-' | b'+')) = (c, data.get(index)) { + index += 1; } } From 540befee0cfc0549d8195dfa2a4cb438843050f0 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Tue, 2 May 2023 22:18:47 -0700 Subject: [PATCH 18/33] meh --- Cargo.lock | 110 +++++++++++++++++++++- compiler/Cargo.toml | 1 + compiler/src/bin/test_runner.rs | 25 ++++- compiler/src/error.rs | 30 ++++-- compiler/src/lexer.rs | 26 +++--- compiler/src/lib.rs | 143 +++++++++++++++++++---------- compiler/src/parser.rs | 22 +++-- compiler/src/tests/lexer/include.c | 2 +- compiler/src/tests/lexer/simple.c | 4 +- 9 files changed, 283 insertions(+), 80 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ddba9ff9..9825690e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -167,11 +167,61 @@ dependencies = [ "lazy_static", "macro-attr", "ntest", + "rayon", "serde", "serde_json", "soa_derive", ] +[[package]] +name = "crossbeam-channel" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695" +dependencies = [ + "autocfg", + "cfg-if 1.0.0", + "crossbeam-utils", + "memoffset", + "scopeguard", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "either" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" + [[package]] name = "enum_derive" version = "0.1.7" @@ -211,6 +261,15 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +[[package]] +name = "hermit-abi" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" +dependencies = [ + "libc", +] + [[package]] name = "hermit-abi" version = "0.3.1" @@ -233,7 +292,7 @@ version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c66c74d2ae7e79a5a8f7ac924adbe38ee42a859c6539ad869eb51f0b52dc220" dependencies = [ - "hermit-abi", + "hermit-abi 0.3.1", "libc", "windows-sys", ] @@ -244,7 +303,7 @@ version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f" dependencies = [ - "hermit-abi", + "hermit-abi 0.3.1", "io-lifetimes", "rustix", "windows-sys", @@ -304,6 +363,15 @@ version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +[[package]] +name = "memoffset" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" +dependencies = [ + "autocfg", +] + [[package]] name = "memory_units" version = "0.4.0" @@ -343,6 +411,16 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "num_cpus" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +dependencies = [ + "hermit-abi 0.2.6", + "libc", +] + [[package]] name = "once_cell" version = "1.17.1" @@ -377,6 +455,28 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "rayon" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d" +dependencies = [ + "crossbeam-channel", + "crossbeam-deque", + "crossbeam-utils", + "num_cpus", +] + [[package]] name = "rustix" version = "0.37.18" @@ -397,6 +497,12 @@ version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + [[package]] name = "serde" version = "1.0.160" diff --git a/compiler/Cargo.toml b/compiler/Cargo.toml index c103b6b0..9fefae57 100644 --- a/compiler/Cargo.toml +++ b/compiler/Cargo.toml @@ -16,6 +16,7 @@ clap = { version = "4.2.4", features = ["derive"] } bitfield-struct = "0.3" enum_derive = "0.1.7" macro-attr = "0.2.0" +rayon = "1.7.0" [dev-dependencies] ntest = "0.9.0" diff --git a/compiler/src/bin/test_runner.rs b/compiler/src/bin/test_runner.rs index 10c70571..18b740bd 100644 --- a/compiler/src/bin/test_runner.rs +++ b/compiler/src/bin/test_runner.rs @@ -1,10 +1,23 @@ use clap::Parser; +use compiler::StageOutput; + +#[derive(clap::ValueEnum, Clone, Copy)] +enum Stage { + Lex, + Macro, + Parse, +} + /// Search for a pattern in a file and display the lines that contain it. #[derive(Parser)] struct Cli { test_case: std::path::PathBuf, + #[clap(short, long)] + #[arg(value_enum)] + ignore: Vec, + #[clap(short, long)] write: bool, @@ -18,9 +31,17 @@ fn main() { let test_case = std::fs::read_to_string(&args.test_case).expect("file should exist and be a valid string"); - let result = compiler::api::run_test_code(&*test_case); + let (source, mut result) = compiler::api::run_test_code(&*test_case); + + for stage in args.ignore { + match stage { + Stage::Lex => result.lexer = StageOutput::Ignore, + Stage::Macro => result.macro_expansion = StageOutput::Ignore, + Stage::Parse => result.parsed_ast = StageOutput::Ignore, + } + } - let text = result.test_case(); + let text = result.test_case(source); if let Some(out) = &args.out_file { std::fs::write(out, text).expect("failed to write file"); diff --git a/compiler/src/error.rs b/compiler/src/error.rs index adfab826..eae0c844 100644 --- a/compiler/src/error.rs +++ b/compiler/src/error.rs @@ -15,16 +15,31 @@ pub struct TranslationUnitDebugInfo { pub file_starts: Vec, } -#[derive(Debug)] +#[derive(Debug, PartialEq, serde::Serialize, serde::Deserialize)] pub enum ErrorKind { - Todo(&'static str), + Todo(String), + + DidntRun, + NotImplemented(String), UnrecognizedCharacter { idx: u32 }, UnrecognizedToken { idx: u32 }, } +macro_rules! error { + ($e:ident) => { + Error::new(crate::error::ErrorKind::$e) + }; + ($e:ident $t:tt) => { + Error::new(crate::error::ErrorKind::$e $t) + }; +} + macro_rules! throw { + ($e:ident) => { + return Err(Error::new(crate::error::ErrorKind::$e)) + }; ($e:ident $t:tt) => { return Err(Error::new(crate::error::ErrorKind::$e $t)) }; @@ -32,7 +47,7 @@ macro_rules! throw { #[derive(Debug)] pub struct Error { - kind: ErrorKind, + pub kind: ErrorKind, backtrace: Option, } @@ -50,16 +65,15 @@ impl Error { }; } - fn todo() -> Result<(), Self> { - throw!(Todo("hello")); - } - pub fn message(&self) -> String { use ErrorKind::*; - match self.kind { + match &self.kind { Todo(message) => format!("{}", message), + DidntRun => format!("compiler phase didn't run"), + NotImplemented(message) => format!("{}", message), + UnrecognizedCharacter { idx } => format!("unrecognized character"), UnrecognizedToken { idx } => format!("unrecognized token"), } diff --git a/compiler/src/lexer.rs b/compiler/src/lexer.rs index 355bc21b..32166e66 100644 --- a/compiler/src/lexer.rs +++ b/compiler/src/lexer.rs @@ -301,7 +301,7 @@ pub fn lex(files: &FileDb, file: &File) -> Result { Err(e) => { return Err(LexError { translation_unit: result.translation_unit, - error: Error::new(ErrorKind::Todo(e)), + error: Error::new(ErrorKind::Todo(e.to_string())), }) } }; @@ -406,7 +406,7 @@ fn lex_tok_from_bytes<'a>(data: &'a [u8]) -> Result { let mut prev = 0u8; loop { let b = *data.get(index + i).ok_or(Error::new(ErrorKind::Todo( - "EOF while inside a block comment", + "EOF while inside a block comment".to_string(), )))?; i += 1; @@ -512,7 +512,7 @@ fn lex_tok_from_bytes<'a>(data: &'a [u8]) -> Result { }); } - throw!(Todo("'..' isn't valid")); + throw!(Todo("'..' isn't valid".to_string())); } return Ok(LexedTok { @@ -524,7 +524,7 @@ fn lex_tok_from_bytes<'a>(data: &'a [u8]) -> Result { b'\"' => return lex_character(TokenKind::StringLit, b'\"', index, data), b'\'' => return lex_character(TokenKind::CharLit, b'\'', index, data), - x => throw!(Todo("invalid character")), + x => throw!(Todo("invalid character".to_string())), } } @@ -581,7 +581,7 @@ fn lex_character( while index < data.len() { let cur = data[index]; if !cur.is_ascii() { - throw!(Todo("character is not valid ascii")); + throw!(Todo("character is not valid ascii".to_string())); } if cur == surround { @@ -593,7 +593,9 @@ fn lex_character( // handle early newline if cur == b'\n' || cur == b'\r' { - throw!(Todo("invalid character found when parsing string literal",)); + throw!(Todo( + "invalid character found when parsing string literal".to_string(), + )); } // handle escape cases @@ -612,7 +614,7 @@ fn lex_character( index += 1; } - throw!(Todo("File ended before ohe string was closed")); + throw!(Todo("File ended before ohe string was closed".to_string())); } struct IncludeResult<'a> { @@ -633,7 +635,7 @@ fn lex_include_line(data: &[u8]) -> Result { let (end_quote, file_type) = match data[index] { b'"' => (b'"', FileType::User), b'<' => (b'>', FileType::System), - _ => throw!(Todo("expected a file string")), + _ => throw!(Todo("expected a file string".to_string())), }; index += 1; @@ -643,10 +645,12 @@ fn lex_include_line(data: &[u8]) -> Result { loop { match data.get(index) { None => { - throw!(Todo("file ended before include file string was done",)) + throw!(Todo( + "file ended before include file string was done".to_string(), + )) } Some(b'\n') | Some(b'\r') => { - throw!(Todo("line ended before file string was done")) + throw!(Todo("line ended before file string was done".to_string())) } Some(x) if *x == end_quote => break, @@ -672,7 +676,7 @@ fn lex_include_line(data: &[u8]) -> Result { (b'\n', _) => 1, (b'\r', _) => 1, - _ => throw!(Todo("extra stuff after include file name")), + _ => throw!(Todo("extra stuff after include file name".to_string())), }; index += increment; diff --git a/compiler/src/lib.rs b/compiler/src/lib.rs index 17c6a37e..e058af70 100644 --- a/compiler/src/lib.rs +++ b/compiler/src/lib.rs @@ -48,23 +48,51 @@ pub mod api { pub use ntest::*; } -#[derive(serde::Deserialize)] -pub struct PipelineInput { - lexer: Option>, - macro_expansion: Option>, + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +pub enum StageOutput { + Ok(Vec), + Err(crate::error::ErrorKind), + Ignore, +} + +impl Default for StageOutput { + fn default() -> Self { + Self::Ignore + } +} + +impl PartialEq> for StageOutput +where + T: PartialEq, +{ + fn eq(&self, other: &StageOutput) -> bool { + match (self, other) { + // If there's no stage, dw about it + (Self::Ignore, _) => return true, + (_, Self::Ignore) => return true, + + (Self::Ok(s), Self::Ok(o)) => return s == o, + (Self::Err(s), Self::Err(o)) => return s == o, + + _ => return false, + } + } } -#[derive(serde::Serialize)] -pub struct PipelineOutput<'a> { - #[serde(skip_serializing)] - source: &'a str, +#[derive(serde::Serialize, serde::Deserialize, PartialEq, Debug)] +pub struct PipelineData { + #[serde(default)] + pub lexer: StageOutput, + + #[serde(default)] + pub macro_expansion: StageOutput, - lexer: Vec, - macro_expansion: Vec, - parsed_ast: Vec, + #[serde(default)] + pub parsed_ast: StageOutput, } -#[derive(serde::Serialize)] +#[derive(serde::Serialize, serde::Deserialize, PartialEq, Debug)] pub struct SimpleAstNode { pub kind: AstNodeKind, pub parent: u32, @@ -74,42 +102,42 @@ pub struct SimpleAstNode { const TEST_CASE_DELIMITER: &'static str = "// -- END TEST CASE --\n// "; -pub fn run_test_code(test_source: &str) -> PipelineOutput { +// NOTE: the "source" field is empty +pub fn run_compiler_for_testing(source: String) -> PipelineData { use crate::api::*; - let (source, expected_str) = test_source - .split_once(TEST_CASE_DELIMITER) - .unwrap_or((test_source, "null")); - - let expected = serde_json::from_str::>(expected_str) - .expect("Test case expected value didn't parse") - .unwrap_or(PipelineInput { - lexer: None, - macro_expansion: None, - }); - - let mut source_string = source.to_string(); - if !source_string.ends_with("\n") { - source_string.push('\n'); - } - let mut files = FileDb::new(); let file_id = files - .add_file("main.c".to_string(), source_string) + .add_file("main.c".to_string(), source) .expect("file should add properly"); let file = &files.files[file_id as usize]; - let lexer_res = lex(&files, file).expect("Expected lex to succeed"); - if let Some(expected) = &expected.lexer { - assert_eq!(&lexer_res.tokens.kind, expected, "Invalid token stream"); - } + let mut out = PipelineData { + lexer: StageOutput::Err(ErrorKind::DidntRun), + macro_expansion: StageOutput::Err(ErrorKind::DidntRun), + parsed_ast: StageOutput::Err(ErrorKind::DidntRun), + }; + + let lexer_res = match lex(&files, file) { + Ok(res) => res, + Err(e) => { + out.lexer = StageOutput::Err(e.error.kind); + return out; + } + }; + out.lexer = StageOutput::Ok(lexer_res.tokens.kind.clone()); let macro_expansion_res = expand_macros(lexer_res.tokens.as_slice()); - if let Some(expected) = &expected.macro_expansion { - assert_eq!(¯o_expansion_res.kind, expected, "Invalid token stream"); - } + out.macro_expansion = StageOutput::Ok(macro_expansion_res.kind.clone()); + + let parsed_ast = match parse(¯o_expansion_res) { + Ok(res) => res, + Err(e) => { + out.parsed_ast = StageOutput::Err(e.kind); + return out; + } + }; - let parsed_ast = parse(¯o_expansion_res).expect("parsing failed"); let mut simple_ast = Vec::with_capacity(parsed_ast.len()); for node in parsed_ast.as_slice() { simple_ast.push(SimpleAstNode { @@ -120,19 +148,38 @@ pub fn run_test_code(test_source: &str) -> PipelineOutput { }); } - // println!("{}", ast::display_tree(&parsed_ast)); + out.parsed_ast = StageOutput::Ok(simple_ast); - return PipelineOutput { - source, - lexer: lexer_res.tokens.kind, - macro_expansion: macro_expansion_res.kind, - parsed_ast: simple_ast, - }; + return out; +} + +pub fn run_test_code(test_source: &str) -> (&str, PipelineData) { + let (source, expected_str) = test_source + .split_once(TEST_CASE_DELIMITER) + .unwrap_or((test_source, "null")); + + let mut source_string = source.to_string(); + if !source_string.ends_with("\n") { + source_string.push('\n'); + } + + let expected = serde_json::from_str::>(expected_str) + .expect("Test case expected value didn't parse") + .unwrap_or(PipelineData { + lexer: StageOutput::Ignore, + macro_expansion: StageOutput::Ignore, + parsed_ast: StageOutput::Ignore, + }); + + let output = run_compiler_for_testing(source_string); + assert_eq!(output, expected); + + return (source, output); } -impl<'a> PipelineOutput<'a> { - pub fn test_case(&self) -> String { - let mut output = self.source.to_string(); +impl PipelineData { + pub fn test_case(&self, source: &str) -> String { + let mut output = source.to_string(); let text = serde_json::to_string(self).expect("failed to serialize test output"); diff --git a/compiler/src/parser.rs b/compiler/src/parser.rs index 49d42694..7f7d5812 100644 --- a/compiler/src/parser.rs +++ b/compiler/src/parser.rs @@ -166,7 +166,9 @@ fn parse_global(p: &mut Parser) -> Result<(), Error> { return Ok(()); }; - unimplemented!("a global that's not a declaration"); + throw!(NotImplemented( + "a global that's not a declaration".to_string() + )); } enum DeclarationKind { @@ -196,7 +198,7 @@ fn parse_declaration(p: &mut Parser, kind: DeclarationKind) -> Result {} - x => throw!(Todo("bad character after declartor")), + x => throw!(Todo("bad character after declartor".to_string())), } while p.peek_kind() == TokenKind::Comma { @@ -239,7 +241,9 @@ fn parse_declarator(p: &mut Parser) -> Result { node.child(parse_declarator(p)?); if p.peek_kind() != TokenKind::RParen { - throw!(Todo("nested declarator didn't have closing paren")); + throw!(Todo( + "nested declarator didn't have closing paren".to_string() + )); } p.index += 1; @@ -318,7 +322,9 @@ fn parse_func_declarator(p: &mut Parser) -> Result, Error> { p.index += 1; if !node.child_opt(parse_declaration(p, DeclarationKind::Param)?) { - throw!(Todo("parameter list has extra comma at the end")); + throw!(Todo( + "parameter list has extra comma at the end".to_string() + )); } } @@ -331,7 +337,9 @@ fn parse_func_declarator(p: &mut Parser) -> Result, Error> { }; if p.peek_kind() != TokenKind::RParen { - throw!(Todo("missing closing paren for func declarator")); + throw!(Todo( + "missing closing paren for func declarator".to_string() + )); } p.index += 1; @@ -523,7 +531,7 @@ fn parse_atom_expr(p: &mut Parser) -> Result { TokenKind::StringLit => AstExpr::StringLit, - _ => throw!(Todo("unrecognized atom token")), + _ => throw!(Todo("unrecognized atom token".to_string())), }; p.index += 1; @@ -532,7 +540,7 @@ fn parse_atom_expr(p: &mut Parser) -> Result { fn expect_semicolon(p: &mut Parser) -> Result<(), Error> { if p.peek_kind() != TokenKind::Semicolon { - throw!(Todo("expected a semicolon")); + throw!(Todo("expected a semicolon".to_string())); } while p.peek_kind() == TokenKind::Semicolon { diff --git a/compiler/src/tests/lexer/include.c b/compiler/src/tests/lexer/include.c index 0d9a519b..4a87a9aa 100644 --- a/compiler/src/tests/lexer/include.c +++ b/compiler/src/tests/lexer/include.c @@ -1,4 +1,4 @@ #include int main() {} // -- END TEST CASE -- -// {"lexer":["Typedef","Char","Ident","Semicolon","Newline","Hashtag","Ident","Ident","PreprocessingNum","Newline","Hashtag","Ident","Ident","PreprocessingNum","Newline","Int","Ident","LParen","RParen","LBrace","RBrace","Newline"]} +// {"lexer":{"Ok":["Typedef","Char","Ident","Semicolon","Newline","Hashtag","Ident","Ident","PreprocessingNum","Newline","Hashtag","Ident","Ident","PreprocessingNum","Newline","Int","Ident","LParen","RParen","LBrace","RBrace","Newline"]},"macro_expansion":"Ignore","parsed_ast":"Ignore"} diff --git a/compiler/src/tests/lexer/simple.c b/compiler/src/tests/lexer/simple.c index 939b5c96..61c1622b 100644 --- a/compiler/src/tests/lexer/simple.c +++ b/compiler/src/tests/lexer/simple.c @@ -1,3 +1,5 @@ int main(int argc, char** argv) { return "printf"; -} \ No newline at end of file +} +// -- END TEST CASE -- +// {"lexer":{"Ok":["Int","Ident","LParen","Int","Ident","Comma","Char","Star","Star","Ident","RParen","LBrace","Newline","Return","StringLit","Semicolon","Newline","RBrace","Newline"]},"macro_expansion":"Ignore","parsed_ast":"Ignore"} From ee4dafe258ae73df9ddbe738fa65c49289652d21 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Tue, 2 May 2023 22:24:27 -0700 Subject: [PATCH 19/33] meh --- compiler/src/error.rs | 10 +++++----- compiler/src/lexer.rs | 28 ++++++++++++---------------- compiler/src/parser.rs | 22 +++++++--------------- 3 files changed, 24 insertions(+), 36 deletions(-) diff --git a/compiler/src/error.rs b/compiler/src/error.rs index eae0c844..5b87384d 100644 --- a/compiler/src/error.rs +++ b/compiler/src/error.rs @@ -28,6 +28,9 @@ pub enum ErrorKind { } macro_rules! error { + ($e:ident ( $str:literal )) => { + Error::new(crate::error::ErrorKind::$e ( $str.to_string() )) + }; ($e:ident) => { Error::new(crate::error::ErrorKind::$e) }; @@ -37,11 +40,8 @@ macro_rules! error { } macro_rules! throw { - ($e:ident) => { - return Err(Error::new(crate::error::ErrorKind::$e)) - }; - ($e:ident $t:tt) => { - return Err(Error::new(crate::error::ErrorKind::$e $t)) + ($($e:tt)*) => { + { return Err(error!($($e)*)); } }; } diff --git a/compiler/src/lexer.rs b/compiler/src/lexer.rs index 32166e66..6658d13e 100644 --- a/compiler/src/lexer.rs +++ b/compiler/src/lexer.rs @@ -405,9 +405,9 @@ fn lex_tok_from_bytes<'a>(data: &'a [u8]) -> Result { let mut i = 1; let mut prev = 0u8; loop { - let b = *data.get(index + i).ok_or(Error::new(ErrorKind::Todo( - "EOF while inside a block comment".to_string(), - )))?; + let b = *data + .get(index + i) + .ok_or(error!(Todo("EOF while inside a block comment")))?; i += 1; // Consume until we hit the suffix @@ -512,7 +512,7 @@ fn lex_tok_from_bytes<'a>(data: &'a [u8]) -> Result { }); } - throw!(Todo("'..' isn't valid".to_string())); + throw!(Todo("'..' isn't valid")); } return Ok(LexedTok { @@ -524,7 +524,7 @@ fn lex_tok_from_bytes<'a>(data: &'a [u8]) -> Result { b'\"' => return lex_character(TokenKind::StringLit, b'\"', index, data), b'\'' => return lex_character(TokenKind::CharLit, b'\'', index, data), - x => throw!(Todo("invalid character".to_string())), + x => throw!(Todo("invalid character")), } } @@ -581,7 +581,7 @@ fn lex_character( while index < data.len() { let cur = data[index]; if !cur.is_ascii() { - throw!(Todo("character is not valid ascii".to_string())); + throw!(Todo("character is not valid ascii")); } if cur == surround { @@ -593,9 +593,7 @@ fn lex_character( // handle early newline if cur == b'\n' || cur == b'\r' { - throw!(Todo( - "invalid character found when parsing string literal".to_string(), - )); + throw!(Todo("invalid character found when parsing string literal")); } // handle escape cases @@ -614,7 +612,7 @@ fn lex_character( index += 1; } - throw!(Todo("File ended before ohe string was closed".to_string())); + throw!(Todo("File ended before ohe string was closed")); } struct IncludeResult<'a> { @@ -635,7 +633,7 @@ fn lex_include_line(data: &[u8]) -> Result { let (end_quote, file_type) = match data[index] { b'"' => (b'"', FileType::User), b'<' => (b'>', FileType::System), - _ => throw!(Todo("expected a file string".to_string())), + _ => throw!(Todo("expected a file string")), }; index += 1; @@ -645,12 +643,10 @@ fn lex_include_line(data: &[u8]) -> Result { loop { match data.get(index) { None => { - throw!(Todo( - "file ended before include file string was done".to_string(), - )) + throw!(Todo("file ended before include file string was done")) } Some(b'\n') | Some(b'\r') => { - throw!(Todo("line ended before file string was done".to_string())) + throw!(Todo("line ended before file string was done")) } Some(x) if *x == end_quote => break, @@ -676,7 +672,7 @@ fn lex_include_line(data: &[u8]) -> Result { (b'\n', _) => 1, (b'\r', _) => 1, - _ => throw!(Todo("extra stuff after include file name".to_string())), + _ => throw!(Todo("extra stuff after include file name")), }; index += increment; diff --git a/compiler/src/parser.rs b/compiler/src/parser.rs index 7f7d5812..da43be43 100644 --- a/compiler/src/parser.rs +++ b/compiler/src/parser.rs @@ -166,9 +166,7 @@ fn parse_global(p: &mut Parser) -> Result<(), Error> { return Ok(()); }; - throw!(NotImplemented( - "a global that's not a declaration".to_string() - )); + throw!(NotImplemented("a global that's not a declaration")); } enum DeclarationKind { @@ -198,7 +196,7 @@ fn parse_declaration(p: &mut Parser, kind: DeclarationKind) -> Result {} - x => throw!(Todo("bad character after declartor".to_string())), + x => throw!(Todo("bad character after declartor")), } while p.peek_kind() == TokenKind::Comma { @@ -241,9 +239,7 @@ fn parse_declarator(p: &mut Parser) -> Result { node.child(parse_declarator(p)?); if p.peek_kind() != TokenKind::RParen { - throw!(Todo( - "nested declarator didn't have closing paren".to_string() - )); + throw!(Todo("nested declarator didn't have closing paren")); } p.index += 1; @@ -322,9 +318,7 @@ fn parse_func_declarator(p: &mut Parser) -> Result, Error> { p.index += 1; if !node.child_opt(parse_declaration(p, DeclarationKind::Param)?) { - throw!(Todo( - "parameter list has extra comma at the end".to_string() - )); + throw!(Todo("parameter list has extra comma at the end")); } } @@ -337,9 +331,7 @@ fn parse_func_declarator(p: &mut Parser) -> Result, Error> { }; if p.peek_kind() != TokenKind::RParen { - throw!(Todo( - "missing closing paren for func declarator".to_string() - )); + throw!(Todo("missing closing paren for func declarator")); } p.index += 1; @@ -531,7 +523,7 @@ fn parse_atom_expr(p: &mut Parser) -> Result { TokenKind::StringLit => AstExpr::StringLit, - _ => throw!(Todo("unrecognized atom token".to_string())), + _ => throw!(Todo("unrecognized atom token")), }; p.index += 1; @@ -540,7 +532,7 @@ fn parse_atom_expr(p: &mut Parser) -> Result { fn expect_semicolon(p: &mut Parser) -> Result<(), Error> { if p.peek_kind() != TokenKind::Semicolon { - throw!(Todo("expected a semicolon".to_string())); + throw!(Todo("expected a semicolon")); } while p.peek_kind() == TokenKind::Semicolon { From 05bd753c3b390a080725d04c990d617a0cf28340 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Tue, 2 May 2023 22:34:23 -0700 Subject: [PATCH 20/33] meh --- tci-web/src/lib.rs | 66 +++++++++++++++++----------------------------- 1 file changed, 24 insertions(+), 42 deletions(-) diff --git a/tci-web/src/lib.rs b/tci-web/src/lib.rs index 59c3b464..da54b944 100644 --- a/tci-web/src/lib.rs +++ b/tci-web/src/lib.rs @@ -1,4 +1,4 @@ -use compiler::api::*; +use compiler::{api::*, run_compiler_for_testing, StageOutput}; use serde::Serialize; use wasm_bindgen::prelude::*; @@ -10,59 +10,41 @@ pub struct PipelineOutput { lexer: Option>, macro_expansion: Option>, parsed_ast: Option>, - error: Option, + errors: Option>, } #[wasm_bindgen] pub fn compile(source: String) -> Result { - let mut source_string = source.to_string(); - if !source_string.ends_with("\n") { - source_string.push('\n'); - } - - let mut files = FileDb::new(); - let file_id = files.add_file("main.c".to_string(), source_string)?; - let file = &files.files[file_id as usize]; - let mut output = PipelineOutput { lexer: None, macro_expansion: None, parsed_ast: None, - error: None, + errors: None, }; 'done: { - let lexer_res = match lex(&files, file) { - Ok(l) => l, - Err(e) => { - output.error = Some(format!("lex error: {:?}", e)); - break 'done; - } - }; - - output.lexer = Some(lexer_res.tokens.kind.clone()); - - let macro_expansion_res = expand_macros(lexer_res.tokens.as_slice()); - output.macro_expansion = Some(macro_expansion_res.kind.clone()); - - let parsed_ast = match parse(¯o_expansion_res) { - Ok(l) => l, - Err(e) => { - output.error = Some(format!("parse error: {:?}", e)); - break 'done; - } - }; - - let mut simple_ast = Vec::with_capacity(parsed_ast.len()); - for node in parsed_ast.as_slice() { - simple_ast.push(compiler::SimpleAstNode { - kind: *node.kind, - parent: *node.parent, - post_order: *node.post_order, - height: *node.height, - }); + let data = run_compiler_for_testing(source); + + macro_rules! stage_transfer { + ($i:ident) => { + match data.$i { + StageOutput::Ok(l) => output.$i = Some(l), + StageOutput::Ignore => {} + StageOutput::Err(e) => { + output.$i = None; + + let error = format!(concat!(stringify!($i), " error: {:?}"), e); + output.errors.get_or_insert(Vec::new()).push(error); + + break 'done; + } + } + }; } - output.parsed_ast = Some(simple_ast); + + stage_transfer!(lexer); + stage_transfer!(macro_expansion); + stage_transfer!(parsed_ast); } let out = serde_json::to_string(&output).map_err(|e| e.to_string())?; From 89897f20fd9c04b042118521fb2abdc718e06efc Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Tue, 2 May 2023 23:05:24 -0700 Subject: [PATCH 21/33] meh --- compiler/src/bin/test_runner.rs | 30 +++++++++++++++++++++++------- compiler/src/lib.rs | 5 ++--- compiler/src/tests/lexer/mod.rs | 2 +- 3 files changed, 26 insertions(+), 11 deletions(-) diff --git a/compiler/src/bin/test_runner.rs b/compiler/src/bin/test_runner.rs index 18b740bd..493ac6ac 100644 --- a/compiler/src/bin/test_runner.rs +++ b/compiler/src/bin/test_runner.rs @@ -9,20 +9,36 @@ enum Stage { Parse, } -/// Search for a pattern in a file and display the lines that contain it. +/// Run #[derive(Parser)] +#[clap(author = "Albert Liu", about = "Test runner for TCI.")] struct Cli { + #[clap(help = "a path to a test case")] test_case: std::path::PathBuf, - #[clap(short, long)] + #[clap( + short, + long, + value_delimiter = ',', + help = "a stage to ignore", + long_help = r#"A stage to ignore. This can be repeated, or you can pass +the stage names as a comma-separated list. + +Examples: +"lex,macro" skips the lexing and macro expansion stages."# + )] #[arg(value_enum)] ignore: Vec, - #[clap(short, long)] - write: bool, - - #[clap(short, long)] + #[clap( + short, + long, + help = "output the result to OUT_FILE. Overrides `--write`" + )] out_file: Option, + + #[clap(short, long, help = "write to the input file in-place")] + write: bool, } fn main() { @@ -31,7 +47,7 @@ fn main() { let test_case = std::fs::read_to_string(&args.test_case).expect("file should exist and be a valid string"); - let (source, mut result) = compiler::api::run_test_code(&*test_case); + let (source, mut result) = compiler::api::run_compiler_test_case(&*test_case); for stage in args.ignore { match stage { diff --git a/compiler/src/lib.rs b/compiler/src/lib.rs index e058af70..93bf81d5 100644 --- a/compiler/src/lib.rs +++ b/compiler/src/lib.rs @@ -39,7 +39,7 @@ pub mod api { pub use super::macros::expand_macros; pub use super::parser::parse; - pub use super::run_test_code; + pub use super::run_compiler_test_case; pub(crate) use serde::{Deserialize, Serialize}; pub(crate) use std::collections::HashMap; @@ -48,7 +48,6 @@ pub mod api { pub use ntest::*; } - #[derive(Debug, serde::Serialize, serde::Deserialize)] pub enum StageOutput { Ok(Vec), @@ -153,7 +152,7 @@ pub fn run_compiler_for_testing(source: String) -> PipelineData { return out; } -pub fn run_test_code(test_source: &str) -> (&str, PipelineData) { +pub fn run_compiler_test_case(test_source: &str) -> (&str, PipelineData) { let (source, expected_str) = test_source .split_once(TEST_CASE_DELIMITER) .unwrap_or((test_source, "null")); diff --git a/compiler/src/tests/lexer/mod.rs b/compiler/src/tests/lexer/mod.rs index 6146dcd2..bf6d902e 100644 --- a/compiler/src/tests/lexer/mod.rs +++ b/compiler/src/tests/lexer/mod.rs @@ -1,6 +1,6 @@ #[test] fn simple() { - crate::run_test_code(include_str!("simple.c")); + crate::run_compiler_test_case(include_str!("simple.c")); } // #[test] From a16920772dc032f8682feea11758c539a55d4cc5 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Tue, 2 May 2023 23:31:20 -0700 Subject: [PATCH 22/33] meh --- components/Ast.tsx | 68 +++++++++++++----------------- components/ScrollWindow.module.css | 14 ++++++ components/ScrollWindow.tsx | 39 +++++++++++++++++ components/lodash.tsx | 9 ++++ package.json | 1 + pages/globals.css | 5 +++ pages/index.tsx | 35 +++++++++++++-- yarn.lock | 5 +++ 8 files changed, 135 insertions(+), 41 deletions(-) create mode 100644 components/ScrollWindow.module.css create mode 100644 components/ScrollWindow.tsx create mode 100644 components/lodash.tsx diff --git a/components/Ast.tsx b/components/Ast.tsx index 561a6324..0345bd7a 100644 --- a/components/Ast.tsx +++ b/components/Ast.tsx @@ -1,6 +1,7 @@ import { AstNode } from "./compiler.schema"; import React from "react"; import styles from "Ast.module.css"; +import { ScrollWindow } from "./ScrollWindow"; const DisplayAstNode = ({ kind, @@ -81,49 +82,40 @@ export const Ast = ({ ast }: { ast: AstNode[] }) => { const [recursive, setRecursive] = React.useState(true); return ( -
-
- - -

Parsed AST

-
+ -
+

Parsed AST

+
+ } + > +
{recursive ? : }
-
+ ); }; diff --git a/components/ScrollWindow.module.css b/components/ScrollWindow.module.css new file mode 100644 index 00000000..34c53fea --- /dev/null +++ b/components/ScrollWindow.module.css @@ -0,0 +1,14 @@ +.wrapper { + position: relative; +} + +.inner { + position: absolute; + + top: 0; + bottom: 0; + left: 0; + right: 0; + + overflow-y: scroll; +} diff --git a/components/ScrollWindow.tsx b/components/ScrollWindow.tsx new file mode 100644 index 00000000..5ac4b28d --- /dev/null +++ b/components/ScrollWindow.tsx @@ -0,0 +1,39 @@ +import React from "react"; +import styles from "./ScrollWindow.module.css"; +import cx from "classnames"; + +type ScrollWindowProps = { + className?: string; + style?: React.CSSProperties; + innerClassName?: string; + innerStyle?: React.CSSProperties; + title?: React.ReactNode; + children?: React.ReactNode; +}; + +export const ScrollWindow = ({ + className, + style, + innerClassName, + innerStyle, + title, + children, +}: ScrollWindowProps) => { + /* + The position relative/absolute stuff makes it so that the + inner div doesn't affect layout calculations of the surrounding div. + I found this very confusing at first, so here's the SO post that I got it from: + https://stackoverflow.com/questions/27433183/make-scrollable-div-take-up-remaining-height + */ + return ( +
+
+ {title} +
+ +
+ {children} +
+
+ ); +}; diff --git a/components/lodash.tsx b/components/lodash.tsx new file mode 100644 index 00000000..96d5ff16 --- /dev/null +++ b/components/lodash.tsx @@ -0,0 +1,9 @@ +export function debounce(callback: () => void, wait: number) { + let timeoutId: number | undefined = undefined; + return () => { + window.clearTimeout(timeoutId); + timeoutId = window.setTimeout(() => { + callback(); + }, wait); + }; +} diff --git a/package.json b/package.json index 33247914..1f9d895f 100644 --- a/package.json +++ b/package.json @@ -17,6 +17,7 @@ "dependencies": { "@monaco-editor/react": "^4.0.7", "autoprefixer": "10.4.14", + "classnames": "^2.3.2", "eslint": "8.39.0", "eslint-config-next": "13.3.1", "idb-keyval": "^5.0.2", diff --git a/pages/globals.css b/pages/globals.css index c91e6909..f8ebae3e 100644 --- a/pages/globals.css +++ b/pages/globals.css @@ -23,6 +23,11 @@ div#__next { width: 100%; } +.full { + height: 100%; + width: 100%; +} + .Resizer { background: #000; opacity: 0.2; diff --git a/pages/index.tsx b/pages/index.tsx index 54ffa9cb..eb488e46 100644 --- a/pages/index.tsx +++ b/pages/index.tsx @@ -6,6 +6,8 @@ import React from "react"; import { useCompilerWorker } from "@/components/hooks"; import { CompileResult } from "@/components/compiler.schema"; import { Ast } from "@/components/Ast"; +import { ScrollWindow } from "@/components/ScrollWindow"; +import { debounce } from "@/components/lodash"; const INITIAL_TEXT = `// Write C code here int main(int argc, char** argv) { @@ -36,6 +38,16 @@ export function App() { } }); + React.useEffect(() => { + editorRef; + + function pollEditorValue() { + if (editorRef.current) { + localStorage.setItem("tciEditorValue", editorRef.current.getValue()); + } + } + }, []); + const editorRef = React.useRef(); function compile() { @@ -80,6 +92,17 @@ export function App() { defaultValue={INITIAL_TEXT} onMount={(editor, monaco) => { editorRef.current = editor; + + editor.setValue( + localStorage.getItem("tciEditorValue") ?? INITIAL_TEXT + ); + + const writeToStorage = debounce( + () => localStorage.setItem("tciEditorValue", editor.getValue()), + 300 + ); + editor.getModel()?.onDidChangeContent((evt) => writeToStorage()); + monaco.editor.addKeybindingRules([ { keybinding: monaco.KeyMod.CtrlCmd | monaco.KeyCode.KeyS, @@ -109,14 +132,20 @@ export function App() { flexGrow: 1, }} > -
-

Lexed Tokens

+ {result?.lexer && (
                   {JSON.stringify(result.lexer, undefined, 2)}
                 
)} -
+
{result?.parsed_ast && } diff --git a/yarn.lock b/yarn.lock index cc30659c..7bea3f01 100644 --- a/yarn.lock +++ b/yarn.lock @@ -553,6 +553,11 @@ chownr@^2.0.0: resolved "https://registry.yarnpkg.com/chownr/-/chownr-2.0.0.tgz#15bfbe53d2eab4cf70f18a8cd68ebe5b3cb1dece" integrity sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ== +classnames@^2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.3.2.tgz#351d813bf0137fcc6a76a16b88208d2560a0d924" + integrity sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw== + client-only@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/client-only/-/client-only-0.0.1.tgz#38bba5d403c41ab150bff64a95c85013cf73bca1" From 2ebdd84ccb8e4f0c11f78cde0b87e0f14c9c2221 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Tue, 2 May 2023 23:32:02 -0700 Subject: [PATCH 23/33] meh --- pages/index.tsx | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/pages/index.tsx b/pages/index.tsx index eb488e46..7c969147 100644 --- a/pages/index.tsx +++ b/pages/index.tsx @@ -38,16 +38,6 @@ export function App() { } }); - React.useEffect(() => { - editorRef; - - function pollEditorValue() { - if (editorRef.current) { - localStorage.setItem("tciEditorValue", editorRef.current.getValue()); - } - } - }, []); - const editorRef = React.useRef(); function compile() { From b253970e5f4e62b6b75637c9f61f03427f771d94 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Tue, 2 May 2023 23:33:43 -0700 Subject: [PATCH 24/33] meh --- pages/index.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pages/index.tsx b/pages/index.tsx index 7c969147..9335608b 100644 --- a/pages/index.tsx +++ b/pages/index.tsx @@ -1,7 +1,7 @@ import Link from "next/link"; import styles from "./tci.module.css"; import Editor from "@monaco-editor/react"; -import type monaco from "monaco-editor"; +import monaco from "monaco-editor"; import React from "react"; import { useCompilerWorker } from "@/components/hooks"; import { CompileResult } from "@/components/compiler.schema"; From 151e8e25ff9d015e1015ef2672d6ae1b03a13d27 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Tue, 2 May 2023 23:34:44 -0700 Subject: [PATCH 25/33] meh --- pages/index.tsx | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/pages/index.tsx b/pages/index.tsx index 9335608b..51ee906a 100644 --- a/pages/index.tsx +++ b/pages/index.tsx @@ -69,12 +69,7 @@ export function App() {
-
+
Date: Tue, 2 May 2023 23:49:02 -0700 Subject: [PATCH 26/33] meh --- compiler/src/pass/mod.rs | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/compiler/src/pass/mod.rs b/compiler/src/pass/mod.rs index 81190db3..8cf10d30 100644 --- a/compiler/src/pass/mod.rs +++ b/compiler/src/pass/mod.rs @@ -85,10 +85,22 @@ pub fn sort_by_postorder(ast: &mut AstNodeVec) { } } +// TODO: set up "data" system so that it's possible to interpret the data field +// of an AST node using the node's `kind` field + // validate declarations -> produce declaration types -// Function declarations need to have proper derived declarator and etc // Declaration specifiers need to make sense for the kind of declaration theyre on -pub fn validate_declaration_types(ast: &mut ByKindAst) -> Result<(), Error> { +pub fn validate_declaration_nodes(ast: &mut ByKindAst) -> Result<(), Error> { + // Loop over all specifier nodes, and: + // 1. ensure their parent is a declaration of some kind + // 2. add them to their parent's data field + // 3. ensure the combined declaration specifiers are valid for each kind of declaration + + // 4. Loop over all derived declarators, and combine them into their declarator + // 5. Loop over all declarators, and fold them into parents + // 6. Combine type from declaration and derived declarators to produce types for each declarator + // 7. Validate that types make sense for function definitions + return Ok(()); } From d0e973bb281195ae50d832b4e21d7eacc7b127f3 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Wed, 3 May 2023 18:59:07 -0700 Subject: [PATCH 27/33] meh --- compiler/src/ast.rs | 16 +++++++++++----- compiler/src/pass/mod.rs | 17 ++++++++++++++--- components/Ast.tsx | 14 +++++++++++--- 3 files changed, 36 insertions(+), 11 deletions(-) diff --git a/compiler/src/ast.rs b/compiler/src/ast.rs index 0b5ef9c0..612e3a5a 100644 --- a/compiler/src/ast.rs +++ b/compiler/src/ast.rs @@ -4,6 +4,14 @@ This module describes the AST created by the parser. use crate::api::*; +pub trait AstInterpretData { + type Output; + + fn read(&self, field: &u64) -> Self::Output; + + fn as_mut_ref(&self, field: &mut u64) -> &mut Self::Output; +} + #[derive(Debug, Clone, Copy, StructOfArray)] pub struct AstNode { pub kind: AstNodeKind, @@ -188,14 +196,12 @@ pub enum AstSpecifier { Int, Long, + Unsigned, + Signed, + Float, Double, - UChar, - UShort, - UInt, - ULong, - Struct(StructDeclaration), // children: ident declaration of struct, field declarations of struct Ident, // data: Symbol } diff --git a/compiler/src/pass/mod.rs b/compiler/src/pass/mod.rs index 8cf10d30..132c11d0 100644 --- a/compiler/src/pass/mod.rs +++ b/compiler/src/pass/mod.rs @@ -3,7 +3,7 @@ use core::ops::Range; use crate::api::*; pub struct ByKindAst<'a> { - pub ast: &'a mut AstNodeVec, + pub nodes: &'a mut AstNodeVec, pub by_kind: HashMap>, pub by_kind_in_order: Vec<(AstNodeKind, Range)>, } @@ -38,7 +38,7 @@ impl<'a> ByKindAst<'a> { } return ByKindAst { - ast, + nodes: ast, by_kind, by_kind_in_order, }; @@ -66,7 +66,7 @@ impl<'a> ByKindAst<'a> { impl<'a> Drop for ByKindAst<'a> { fn drop(&mut self) { - sort_by_postorder(self.ast); + sort_by_postorder(self.nodes); } } @@ -91,6 +91,17 @@ pub fn sort_by_postorder(ast: &mut AstNodeVec) { // validate declarations -> produce declaration types // Declaration specifiers need to make sense for the kind of declaration theyre on pub fn validate_declaration_nodes(ast: &mut ByKindAst) -> Result<(), Error> { + for (kind, range) in &ast.by_kind_in_order { + let kind = match kind { + AstNodeKind::Specifier(k) => *k, + _ => continue, + }; + + + + + } + // Loop over all specifier nodes, and: // 1. ensure their parent is a declaration of some kind // 2. add them to their parent's data field diff --git a/components/Ast.tsx b/components/Ast.tsx index 0345bd7a..81c66ece 100644 --- a/components/Ast.tsx +++ b/components/Ast.tsx @@ -15,12 +15,20 @@ const DisplayAstNode = ({ style={{ display: "flex", flexDirection: "column", - paddingLeft: "10px", - borderLeft: "3px solid black", }} >
{kind}
- {children} + +
+ {children} +
); }; From 9f42e0694d8b7e5158db64315d21a8ca2b8a6853 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Wed, 3 May 2023 22:52:35 -0700 Subject: [PATCH 28/33] meh --- compiler/src/error.rs | 106 +++++++++++++++++++++++++++++++++++---- compiler/src/pass/mod.rs | 4 -- components/Ast.tsx | 2 +- pages/index.tsx | 6 +-- pages/tci.module.css | 6 --- 5 files changed, 99 insertions(+), 25 deletions(-) diff --git a/compiler/src/error.rs b/compiler/src/error.rs index 5b87384d..810275c9 100644 --- a/compiler/src/error.rs +++ b/compiler/src/error.rs @@ -1,3 +1,5 @@ +use codespan_reporting::diagnostic::{Diagnostic, Label}; + // Book-keeping to track which ranges belong to which file, so that we can // compute file and line number from `start` #[derive(Debug, Clone, Copy)] @@ -15,6 +17,42 @@ pub struct TranslationUnitDebugInfo { pub file_starts: Vec, } +pub struct FileRange { + pub file: u32, + pub start: usize, +} + +impl TranslationUnitDebugInfo { + pub fn diagnostic(&self, err: &Error) -> Diagnostic { + return Diagnostic::error() + .with_message(err.message()) + .with_code(err.code()) + .with_labels(err.kind.labels(self)); + } + + // + // 3| + // 0 0 1 2 3 3 3 4 5 + + pub fn token_range(&self, start: u32) -> FileRange { + // TODO: binary search + + let mut previous = self.file_starts[0]; + for file_start in &self.file_starts { + if file_start.index > start { + break; + } + + previous = *file_start; + } + + return FileRange { + file: previous.file, + start: previous.file_index + (start as usize - previous.index as usize), + }; + } +} + #[derive(Debug, PartialEq, serde::Serialize, serde::Deserialize)] pub enum ErrorKind { Todo(String), @@ -45,10 +83,62 @@ macro_rules! throw { }; } +impl ErrorKind { + pub fn message(&self) -> String { + use ErrorKind::*; + + match self { + Todo(message) => format!("{}", message), + + DidntRun => format!("compiler phase didn't run"), + NotImplemented(message) => format!("{}", message), + + UnrecognizedCharacter { idx } => format!("unrecognized character"), + UnrecognizedToken { idx } => format!("unrecognized token"), + } + } + + pub fn code(&self) -> &'static str { + use ErrorKind::*; + + match self { + Todo(message) => "001", + + DidntRun => "000", + NotImplemented(message) => "002", + + UnrecognizedCharacter { idx } => "100", + UnrecognizedToken { idx } => "101", + } + } + + pub fn labels(&self, tu: &TranslationUnitDebugInfo) -> Vec> { + use ErrorKind::*; + + let mut labels = Vec::new(); + + match self { + Todo(message) => {} + DidntRun => {} + NotImplemented(message) => {} + + UnrecognizedCharacter { idx } => { + let range = tu.token_range(*idx); + labels.push(Label::primary(range.file, range.start..(range.start + 1))); + } + UnrecognizedToken { idx } => { + let range = tu.token_range(*idx); + labels.push(Label::primary(range.file, range.start..(range.start + 1))); + } + } + + return labels; + } +} + #[derive(Debug)] pub struct Error { pub kind: ErrorKind, - backtrace: Option, } @@ -66,16 +156,10 @@ impl Error { } pub fn message(&self) -> String { - use ErrorKind::*; - - match &self.kind { - Todo(message) => format!("{}", message), - - DidntRun => format!("compiler phase didn't run"), - NotImplemented(message) => format!("{}", message), + return self.kind.message(); + } - UnrecognizedCharacter { idx } => format!("unrecognized character"), - UnrecognizedToken { idx } => format!("unrecognized token"), - } + pub fn code(&self) -> &'static str { + return self.kind.code(); } } diff --git a/compiler/src/pass/mod.rs b/compiler/src/pass/mod.rs index 132c11d0..cdb7f37a 100644 --- a/compiler/src/pass/mod.rs +++ b/compiler/src/pass/mod.rs @@ -96,10 +96,6 @@ pub fn validate_declaration_nodes(ast: &mut ByKindAst) -> Result<(), Error> { AstNodeKind::Specifier(k) => *k, _ => continue, }; - - - - } // Loop over all specifier nodes, and: diff --git a/components/Ast.tsx b/components/Ast.tsx index 81c66ece..ae0a6a62 100644 --- a/components/Ast.tsx +++ b/components/Ast.tsx @@ -92,7 +92,7 @@ export const Ast = ({ ast }: { ast: AstNode[] }) => { return ( Compile
-
-
+
+
{ @@ -121,6 +120,7 @@ export function App() { style={{ borderRadius: "4px", border: "2px solid black", + height: "100%", width: "40%", }} title={"Lexed Tokens"} diff --git a/pages/tci.module.css b/pages/tci.module.css index d1082c6b..389a47e8 100644 --- a/pages/tci.module.css +++ b/pages/tci.module.css @@ -6,13 +6,7 @@ padding-right: 0.5rem; padding-top: 0.5rem; padding-bottom: 0.5rem; - --bg-opacity: 1; - background-color: #424242; - background-color: rgba(66, 66, 66, var(--bg-opacity)); border-bottom-width: 1px; - --border-opacity: 1; - border-color: #757575; - border-color: rgba(117, 117, 117, var(--border-opacity)); } .name { From 55ae718f52ebecac1d435cdc9ad0f906e63642ce Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Thu, 4 May 2023 05:11:54 -0700 Subject: [PATCH 29/33] basic basic basic error messages --- compiler/src/bin/test_runner.rs | 15 +++++++- compiler/src/error.rs | 28 +++++--------- compiler/src/filedb.rs | 64 +++++++++++++++++++++++++++++++ compiler/src/lexer.rs | 15 ++++++-- compiler/src/lib.rs | 24 +++++++++--- compiler/src/tests/lexer/dotdot.c | 5 +++ compiler/src/tests/lexer/mod.rs | 7 +++- tci-web/src/lib.rs | 2 +- 8 files changed, 128 insertions(+), 32 deletions(-) create mode 100644 compiler/src/tests/lexer/dotdot.c diff --git a/compiler/src/bin/test_runner.rs b/compiler/src/bin/test_runner.rs index 493ac6ac..c9956604 100644 --- a/compiler/src/bin/test_runner.rs +++ b/compiler/src/bin/test_runner.rs @@ -1,5 +1,6 @@ use clap::Parser; - +use codespan_reporting::term::termcolor::*; +use codespan_reporting::term::*; use compiler::StageOutput; #[derive(clap::ValueEnum, Clone, Copy)] @@ -47,7 +48,17 @@ fn main() { let test_case = std::fs::read_to_string(&args.test_case).expect("file should exist and be a valid string"); - let (source, mut result) = compiler::api::run_compiler_test_case(&*test_case); + let writer = StandardStream::stderr(ColorChoice::Always); + let config = Config::default(); + + let (source, mut result) = compiler::api::run_compiler_test_case( + &*test_case, + Some(&|files, tu, err| { + let diagnostic = tu.diagnostic(err); + codespan_reporting::term::emit(&mut writer.lock(), &config, files, &diagnostic) + .expect("wtf"); + }), + ); for stage in args.ignore { match stage { diff --git a/compiler/src/error.rs b/compiler/src/error.rs index 810275c9..ce2aeaed 100644 --- a/compiler/src/error.rs +++ b/compiler/src/error.rs @@ -30,13 +30,8 @@ impl TranslationUnitDebugInfo { .with_labels(err.kind.labels(self)); } - // - // 3| - // 0 0 1 2 3 3 3 4 5 - pub fn token_range(&self, start: u32) -> FileRange { // TODO: binary search - let mut previous = self.file_starts[0]; for file_start in &self.file_starts { if file_start.index > start { @@ -60,9 +55,7 @@ pub enum ErrorKind { DidntRun, NotImplemented(String), - UnrecognizedCharacter { idx: u32 }, - - UnrecognizedToken { idx: u32 }, + InvalidCharacterSequence { seq: String, index: u32 }, } macro_rules! error { @@ -93,8 +86,7 @@ impl ErrorKind { DidntRun => format!("compiler phase didn't run"), NotImplemented(message) => format!("{}", message), - UnrecognizedCharacter { idx } => format!("unrecognized character"), - UnrecognizedToken { idx } => format!("unrecognized token"), + InvalidCharacterSequence { seq, index } => format!("'{}' isn't valid", seq), } } @@ -107,8 +99,7 @@ impl ErrorKind { DidntRun => "000", NotImplemented(message) => "002", - UnrecognizedCharacter { idx } => "100", - UnrecognizedToken { idx } => "101", + InvalidCharacterSequence { seq, index } => "100", } } @@ -122,13 +113,12 @@ impl ErrorKind { DidntRun => {} NotImplemented(message) => {} - UnrecognizedCharacter { idx } => { - let range = tu.token_range(*idx); - labels.push(Label::primary(range.file, range.start..(range.start + 1))); - } - UnrecognizedToken { idx } => { - let range = tu.token_range(*idx); - labels.push(Label::primary(range.file, range.start..(range.start + 1))); + InvalidCharacterSequence { seq, index } => { + let range = tu.token_range(*index); + labels.push(Label::primary( + range.file, + range.start..(range.start + seq.len()), + )); } } diff --git a/compiler/src/filedb.rs b/compiler/src/filedb.rs index b6e9678c..13eea4f2 100644 --- a/compiler/src/filedb.rs +++ b/compiler/src/filedb.rs @@ -1,4 +1,5 @@ use crate::api::*; +use codespan_reporting::files::{line_starts, Error as SpanErr, Files}; #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub enum FileType { @@ -12,6 +13,7 @@ pub struct File { pub ty: FileType, pub name: String, pub source: String, + pub line_starts: Vec, } struct FileStatic { @@ -70,6 +72,7 @@ impl FileDb { id, ty, name, + line_starts: line_starts(&source).collect(), source, }); } @@ -91,6 +94,7 @@ impl FileDb { id, ty: FileType::User, name, + line_starts: line_starts(&source).collect(), source, }; self.names.insert((FileType::User, file.name.clone()), id); @@ -141,6 +145,66 @@ impl FileDb { } } +/// Return the starting byte index of the line with the specified line index. +/// Convenience method that already generates errors if necessary. +/// +/// Copied from codespan_reporting +fn get_line_start(len: usize, line_starts: &[usize], line_index: usize) -> Result { + use std::cmp::Ordering; + + match line_index.cmp(&line_starts.len()) { + Ordering::Less => Ok(line_starts + .get(line_index) + .cloned() + .expect("failed despite previous check")), + Ordering::Equal => Ok(len), + Ordering::Greater => Err(SpanErr::LineTooLarge { + given: line_index, + max: line_starts.len() - 1, + }), + } +} + +impl<'a> Files<'a> for FileDb { + type FileId = u32; + + type Name = &'a str; + + type Source = &'a str; + + fn name(&'a self, id: Self::FileId) -> Result { + let f = self.files.get(id as usize).ok_or(SpanErr::FileMissing)?; + return Ok(&f.name); + } + + fn source(&'a self, id: Self::FileId) -> Result { + let f = self.files.get(id as usize).ok_or(SpanErr::FileMissing)?; + return Ok(&f.source); + } + + fn line_index(&'a self, id: Self::FileId, byte_index: usize) -> Result { + let f = self.files.get(id as usize).ok_or(SpanErr::FileMissing)?; + + return Ok(f + .line_starts + .binary_search(&byte_index) + .unwrap_or_else(|next_line| next_line - 1)); + } + + fn line_range( + &'a self, + id: Self::FileId, + line_index: usize, + ) -> Result, SpanErr> { + let f = self.files.get(id as usize).ok_or(SpanErr::FileMissing)?; + + let line_start = get_line_start(f.source.len(), &f.line_starts, line_index)?; + let next_line_start = get_line_start(f.source.len(), &f.line_starts, line_index + 1)?; + + return Ok(line_start..next_line_start); + } +} + #[derive(Debug, Hash, Eq, PartialEq, Clone, Copy, Serialize, Deserialize)] #[non_exhaustive] #[repr(u32)] diff --git a/compiler/src/lexer.rs b/compiler/src/lexer.rs index 6658d13e..3890d8c4 100644 --- a/compiler/src/lexer.rs +++ b/compiler/src/lexer.rs @@ -184,6 +184,12 @@ struct IncludeEntry<'a> { index: usize, } +struct TokInfo<'a> { + begin: u32, + index: &'a mut usize, + data: &'a [u8], +} + pub struct LexResult { pub translation_unit: TranslationUnitDebugInfo, pub symbols: SymbolTable, @@ -246,7 +252,7 @@ pub fn lex(files: &FileDb, file: &File) -> Result { } let data = &input.contents[input.index..]; - let res = match lex_tok_from_bytes(data) { + let res = match lex_tok_from_bytes(index, data) { Ok(res) => res, Err(error) => { return Err(LexError { @@ -342,7 +348,7 @@ struct LexedTok { /// Lex a token from the bytes given. Assumes that we're not at EOF, and /// theres no whitespace before the token. -fn lex_tok_from_bytes<'a>(data: &'a [u8]) -> Result { +fn lex_tok_from_bytes<'a>(global_index: u32, data: &'a [u8]) -> Result { let mut index: usize = 0; let first = data[index]; @@ -512,7 +518,10 @@ fn lex_tok_from_bytes<'a>(data: &'a [u8]) -> Result { }); } - throw!(Todo("'..' isn't valid")); + throw!(InvalidCharacterSequence { + seq: "..".to_string(), + index: global_index, + }); } return Ok(LexedTok { diff --git a/compiler/src/lib.rs b/compiler/src/lib.rs index 93bf81d5..be35c11f 100644 --- a/compiler/src/lib.rs +++ b/compiler/src/lib.rs @@ -2,8 +2,6 @@ #![allow(unused_variables)] #![allow(incomplete_features)] -use api::AstNodeKind; - #[macro_use] extern crate soa_derive; #[macro_use] @@ -93,16 +91,17 @@ pub struct PipelineData { #[derive(serde::Serialize, serde::Deserialize, PartialEq, Debug)] pub struct SimpleAstNode { - pub kind: AstNodeKind, + pub kind: ast::AstNodeKind, pub parent: u32, pub post_order: u32, pub height: u16, } const TEST_CASE_DELIMITER: &'static str = "// -- END TEST CASE --\n// "; +type Printer<'a> = &'a dyn Fn(&filedb::FileDb, &error::TranslationUnitDebugInfo, &error::Error); // NOTE: the "source" field is empty -pub fn run_compiler_for_testing(source: String) -> PipelineData { +pub fn run_compiler_for_testing(source: String, print_err: Option) -> PipelineData { use crate::api::*; let mut files = FileDb::new(); @@ -120,10 +119,16 @@ pub fn run_compiler_for_testing(source: String) -> PipelineData { let lexer_res = match lex(&files, file) { Ok(res) => res, Err(e) => { + if let Some(print) = print_err { + print(&files, &e.translation_unit, &e.error); + } + out.lexer = StageOutput::Err(e.error.kind); return out; } }; + + let tu = lexer_res.translation_unit; out.lexer = StageOutput::Ok(lexer_res.tokens.kind.clone()); let macro_expansion_res = expand_macros(lexer_res.tokens.as_slice()); @@ -132,6 +137,10 @@ pub fn run_compiler_for_testing(source: String) -> PipelineData { let parsed_ast = match parse(¯o_expansion_res) { Ok(res) => res, Err(e) => { + if let Some(print) = print_err { + print(&files, &tu, &e); + } + out.parsed_ast = StageOutput::Err(e.kind); return out; } @@ -152,7 +161,10 @@ pub fn run_compiler_for_testing(source: String) -> PipelineData { return out; } -pub fn run_compiler_test_case(test_source: &str) -> (&str, PipelineData) { +pub fn run_compiler_test_case<'a>( + test_source: &'a str, + print_err: Option, +) -> (&'a str, PipelineData) { let (source, expected_str) = test_source .split_once(TEST_CASE_DELIMITER) .unwrap_or((test_source, "null")); @@ -170,7 +182,7 @@ pub fn run_compiler_test_case(test_source: &str) -> (&str, PipelineData) { parsed_ast: StageOutput::Ignore, }); - let output = run_compiler_for_testing(source_string); + let output = run_compiler_for_testing(source_string, print_err); assert_eq!(output, expected); return (source, output); diff --git a/compiler/src/tests/lexer/dotdot.c b/compiler/src/tests/lexer/dotdot.c new file mode 100644 index 00000000..eff8677d --- /dev/null +++ b/compiler/src/tests/lexer/dotdot.c @@ -0,0 +1,5 @@ +int main() { + .. +} +// -- END TEST CASE -- +// {"lexer":{"Err":{"InvalidCharacterSequence":{"seq":"..","index":17}}},"macro_expansion":{"Err":"DidntRun"},"parsed_ast":{"Err":"DidntRun"}} diff --git a/compiler/src/tests/lexer/mod.rs b/compiler/src/tests/lexer/mod.rs index bf6d902e..cb934048 100644 --- a/compiler/src/tests/lexer/mod.rs +++ b/compiler/src/tests/lexer/mod.rs @@ -1,6 +1,11 @@ #[test] fn simple() { - crate::run_compiler_test_case(include_str!("simple.c")); + crate::run_compiler_test_case(include_str!("simple.c"), None); +} + +#[test] +fn dotdot() { + crate::run_compiler_test_case(include_str!("dotdot.c"), None); } // #[test] diff --git a/tci-web/src/lib.rs b/tci-web/src/lib.rs index da54b944..ff89774d 100644 --- a/tci-web/src/lib.rs +++ b/tci-web/src/lib.rs @@ -23,7 +23,7 @@ pub fn compile(source: String) -> Result { }; 'done: { - let data = run_compiler_for_testing(source); + let data = run_compiler_for_testing(source, None); macro_rules! stage_transfer { ($i:ident) => { From 03d5c5e5e940353640a2c9f14035ccfca3d2cb6e Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Thu, 4 May 2023 05:22:56 -0700 Subject: [PATCH 30/33] meh --- compiler/src/bin/test_runner.rs | 25 ++++++++++++++++--------- compiler/src/lib.rs | 32 +++++++++++++++++--------------- compiler/src/tests/lexer/mod.rs | 4 ++-- 3 files changed, 35 insertions(+), 26 deletions(-) diff --git a/compiler/src/bin/test_runner.rs b/compiler/src/bin/test_runner.rs index c9956604..014da7e4 100644 --- a/compiler/src/bin/test_runner.rs +++ b/compiler/src/bin/test_runner.rs @@ -1,7 +1,7 @@ use clap::Parser; use codespan_reporting::term::termcolor::*; use codespan_reporting::term::*; -use compiler::StageOutput; +use compiler::{parse_test_case, StageOutput}; #[derive(clap::ValueEnum, Clone, Copy)] enum Stage { @@ -50,15 +50,22 @@ fn main() { let writer = StandardStream::stderr(ColorChoice::Always); let config = Config::default(); + let print_err: compiler::PrintFunc = &|files, tu, err| { + let diagnostic = tu.diagnostic(err); + codespan_reporting::term::emit(&mut writer.lock(), &config, files, &diagnostic) + .expect("wtf"); + }; - let (source, mut result) = compiler::api::run_compiler_test_case( - &*test_case, - Some(&|files, tu, err| { - let diagnostic = tu.diagnostic(err); - codespan_reporting::term::emit(&mut writer.lock(), &config, files, &diagnostic) - .expect("wtf"); - }), - ); + let (source, expected) = parse_test_case(&test_case); + + let print_err = if args.out_file.is_some() || args.write { + None + } else { + Some(print_err) + }; + + let mut result = compiler::run_compiler_for_testing(source.to_string(), print_err); + assert_eq!(result, expected); for stage in args.ignore { match stage { diff --git a/compiler/src/lib.rs b/compiler/src/lib.rs index be35c11f..9d6901b4 100644 --- a/compiler/src/lib.rs +++ b/compiler/src/lib.rs @@ -98,12 +98,16 @@ pub struct SimpleAstNode { } const TEST_CASE_DELIMITER: &'static str = "// -- END TEST CASE --\n// "; -type Printer<'a> = &'a dyn Fn(&filedb::FileDb, &error::TranslationUnitDebugInfo, &error::Error); +pub type PrintFunc<'a> = &'a dyn Fn(&filedb::FileDb, &error::TranslationUnitDebugInfo, &error::Error); // NOTE: the "source" field is empty -pub fn run_compiler_for_testing(source: String, print_err: Option) -> PipelineData { +pub fn run_compiler_for_testing(mut source: String, print_err: Option) -> PipelineData { use crate::api::*; + if !source.ends_with("\n") { + source.push('\n'); + } + let mut files = FileDb::new(); let file_id = files .add_file("main.c".to_string(), source) @@ -161,19 +165,20 @@ pub fn run_compiler_for_testing(source: String, print_err: Option) -> P return out; } -pub fn run_compiler_test_case<'a>( - test_source: &'a str, - print_err: Option, -) -> (&'a str, PipelineData) { +pub fn run_compiler_test_case<'a>(test_source: &'a str) -> (&'a str, PipelineData) { + let (source, expected) = parse_test_case(test_source); + + let output = run_compiler_for_testing(source.to_string(), None); + assert_eq!(output, expected); + + return (source, output); +} + +pub fn parse_test_case(test_source: &str) -> (&str, PipelineData) { let (source, expected_str) = test_source .split_once(TEST_CASE_DELIMITER) .unwrap_or((test_source, "null")); - let mut source_string = source.to_string(); - if !source_string.ends_with("\n") { - source_string.push('\n'); - } - let expected = serde_json::from_str::>(expected_str) .expect("Test case expected value didn't parse") .unwrap_or(PipelineData { @@ -182,10 +187,7 @@ pub fn run_compiler_test_case<'a>( parsed_ast: StageOutput::Ignore, }); - let output = run_compiler_for_testing(source_string, print_err); - assert_eq!(output, expected); - - return (source, output); + return (source, expected); } impl PipelineData { diff --git a/compiler/src/tests/lexer/mod.rs b/compiler/src/tests/lexer/mod.rs index cb934048..d26b9d58 100644 --- a/compiler/src/tests/lexer/mod.rs +++ b/compiler/src/tests/lexer/mod.rs @@ -1,11 +1,11 @@ #[test] fn simple() { - crate::run_compiler_test_case(include_str!("simple.c"), None); + crate::run_compiler_test_case(include_str!("simple.c")); } #[test] fn dotdot() { - crate::run_compiler_test_case(include_str!("dotdot.c"), None); + crate::run_compiler_test_case(include_str!("dotdot.c")); } // #[test] From 8cab290276936c2ab37380b6b5a176a311c2f430 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Thu, 4 May 2023 05:24:49 -0700 Subject: [PATCH 31/33] meh --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 1f9d895f..dc375791 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,7 @@ "scripts": { "start": "next start", "link-wasm": "cd tci-web/pkg && yarn link && cd ../.. && yarn link tci-web", - "build-wasm": "wasm-pack build --debug tci-web", + "wasm": "wasm-pack build --debug tci-web", "deploy": "git push origin main:production", "dev": "next", "lint": "next lint" From 9b3a0e89d4ecbde494cd9f8aad746b665892db56 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Thu, 4 May 2023 05:38:37 -0700 Subject: [PATCH 32/33] meh --- components/compiler.schema.ts | 2 +- pages/index.tsx | 11 +++++++---- tci-web/src/lib.rs | 2 -- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/components/compiler.schema.ts b/components/compiler.schema.ts index e9274ee7..2106975e 100644 --- a/components/compiler.schema.ts +++ b/components/compiler.schema.ts @@ -20,7 +20,7 @@ export const CompileResult = z.object({ lexer: z.array(z.string()).nullish(), macro_expansion: z.array(z.string()).nullish(), parsed_ast: z.array(AstNode).nullish(), - error: z.any().nullish(), + errors: z.any().nullish(), }); export type CompilerOutput = z.infer; diff --git a/pages/index.tsx b/pages/index.tsx index 75c6014e..91ca11f3 100644 --- a/pages/index.tsx +++ b/pages/index.tsx @@ -32,7 +32,7 @@ export function App() { console.error(res.error); break; case "result": - console.log("Compiled"); + console.log("Compiled ", res.result); setResult(res.result); break; } @@ -72,6 +72,9 @@ export function App() {
{ @@ -137,10 +140,10 @@ export function App() {
- {result?.error && ( -
+ {result?.errors && ( +

Error

-
{JSON.stringify(result.error)}
+
{JSON.stringify(result.errors)}
)}
diff --git a/tci-web/src/lib.rs b/tci-web/src/lib.rs index ff89774d..f11ee1d9 100644 --- a/tci-web/src/lib.rs +++ b/tci-web/src/lib.rs @@ -31,8 +31,6 @@ pub fn compile(source: String) -> Result { StageOutput::Ok(l) => output.$i = Some(l), StageOutput::Ignore => {} StageOutput::Err(e) => { - output.$i = None; - let error = format!(concat!(stringify!($i), " error: {:?}"), e); output.errors.get_or_insert(Vec::new()).push(error); From 1cc65c48495dc0737b7a741b45238b2a7208bdc2 Mon Sep 17 00:00:00 2001 From: Albert Liu Date: Thu, 4 May 2023 05:54:28 -0700 Subject: [PATCH 33/33] meh --- components/Ast.tsx | 39 ++++++--------------------- package.json | 1 + pages/_app.tsx | 2 +- pages/{globals.css => globals.scss} | 42 ++++++++++++++++++++++++++--- pages/index.tsx | 39 +++++++-------------------- pages/tci.module.css | 3 --- yarn.lock | 18 +++++++++++-- 7 files changed, 75 insertions(+), 69 deletions(-) rename pages/{globals.css => globals.scss} (74%) diff --git a/components/Ast.tsx b/components/Ast.tsx index ae0a6a62..0d190ae6 100644 --- a/components/Ast.tsx +++ b/components/Ast.tsx @@ -11,21 +11,12 @@ const DisplayAstNode = ({ children?: React.ReactNode; }) => { return ( -
+
{kind}
{children}
@@ -61,14 +52,7 @@ const RecursiveAst = ({ ast }: { ast: AstNode[] }) => { const FlatAst = ({ ast }: { ast: AstNode[] }) => { return ( -
+
{ast.map((obj, index) => { const data = obj.kind.data ? `,${JSON.stringify(obj.kind.data)}` : ""; return ( @@ -91,17 +75,10 @@ export const Ast = ({ ast }: { ast: AstNode[] }) => { return ( +
} > -
+
{recursive ? : }
diff --git a/package.json b/package.json index dc375791..bd89aa9b 100644 --- a/package.json +++ b/package.json @@ -35,6 +35,7 @@ "@types/react": "18.0.38", "@types/react-dom": "18.0.11", "postcss": "8.4.23", + "sass": "^1.62.1", "tailwindcss": "3.3.1", "typescript": "5.0.4", "wasm-pack": "^0.11.0" diff --git a/pages/_app.tsx b/pages/_app.tsx index ad6ddc17..51ebbd10 100644 --- a/pages/_app.tsx +++ b/pages/_app.tsx @@ -1,4 +1,4 @@ -import "./globals.css"; +import "./globals.scss"; import type { AppProps } from "next/app"; export default function App({ Component, pageProps }: AppProps) { diff --git a/pages/globals.css b/pages/globals.scss similarity index 74% rename from pages/globals.css rename to pages/globals.scss index f8ebae3e..a2a45528 100644 --- a/pages/globals.css +++ b/pages/globals.scss @@ -6,6 +6,9 @@ --foreground-rgb: 0, 0, 0; --background-start-rgb: 214, 219, 220; --background-end-rgb: 255, 255, 255; + + --border-radius: 0.25rem; + --spacing: 1.25rem; } @media (prefers-color-scheme: dark) { @@ -18,14 +21,47 @@ html, body, +.full, div#__next { height: 100%; width: 100%; } -.full { - height: 100%; - width: 100%; +.row { + display: flex; + flex-direction: row; + align-items: center; + // flex-wrap: nowrap; Don't know whether or now rows should wrap yet + + & > * { + margin: 0px; + } +} + +.col { + display: flex; + flex-direction: column; + flex-wrap: nowrap; + + & > * { + margin: 0px; + } +} + +.rounded { + border-radius: var(--border-radius); +} + +.border { + border: 0.25rem solid black; +} + +.pad { + padding: var(--spacing); +} + +.gap { + gap: var(--spacing); } .Resizer { diff --git a/pages/index.tsx b/pages/index.tsx index 91ca11f3..e289e848 100644 --- a/pages/index.tsx +++ b/pages/index.tsx @@ -1,4 +1,5 @@ import Link from "next/link"; +import cx from "classnames"; import styles from "./tci.module.css"; import Editor from "@monaco-editor/react"; import monaco from "monaco-editor"; @@ -102,49 +103,29 @@ export function App() { />
-
-
+
+
{result?.lexer && ( -
+                
                   {JSON.stringify(result.lexer, undefined, 2)}
                 
)} -
+
{result?.parsed_ast && }
{result?.errors && ( -
-

Error

-
{JSON.stringify(result.errors)}
-
+ +
{JSON.stringify(result.errors)}
+
)}
diff --git a/pages/tci.module.css b/pages/tci.module.css index 389a47e8..167462b9 100644 --- a/pages/tci.module.css +++ b/pages/tci.module.css @@ -22,10 +22,7 @@ } .scrollBox { - position: relative; - width: 100%; - border-radius: 4px; border: 2px solid black; } diff --git a/yarn.lock b/yarn.lock index 7bea3f01..a0a864f1 100644 --- a/yarn.lock +++ b/yarn.lock @@ -533,7 +533,7 @@ chalk@^4.0.0: ansi-styles "^4.1.0" supports-color "^7.1.0" -chokidar@^3.5.3: +"chokidar@>=3.0.0 <4.0.0", chokidar@^3.5.3: version "3.5.3" resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== @@ -1327,6 +1327,11 @@ ignore@^5.2.0: resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== +immutable@^4.0.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/immutable/-/immutable-4.3.0.tgz#eb1738f14ffb39fd068b1dbe1296117484dd34be" + integrity sha512-0AOCmOip+xgJwEVTQj1EfiDDOkPmuyllDuTuEX+DDXUgapLAsBIfkg3sxCYyCEA8mQqZrrxPUGjcOQ2JS3WLkg== + import-fresh@^3.0.0, import-fresh@^3.2.1: version "3.3.0" resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" @@ -2182,6 +2187,15 @@ safe-regex-test@^1.0.0: get-intrinsic "^1.1.3" is-regex "^1.1.4" +sass@^1.62.1: + version "1.62.1" + resolved "https://registry.yarnpkg.com/sass/-/sass-1.62.1.tgz#caa8d6bf098935bc92fc73fa169fb3790cacd029" + integrity sha512-NHpxIzN29MXvWiuswfc1W3I0N8SXBd8UR26WntmDlRYf0bSADnwnOjsyMZ3lMezSlArD33Vs3YFhp7dWvL770A== + dependencies: + chokidar ">=3.0.0 <4.0.0" + immutable "^4.0.0" + source-map-js ">=0.6.2 <2.0.0" + scheduler@^0.23.0: version "0.23.0" resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.23.0.tgz#ba8041afc3d30eb206a487b6b384002e4e61fdfe" @@ -2232,7 +2246,7 @@ slash@^4.0.0: resolved "https://registry.yarnpkg.com/slash/-/slash-4.0.0.tgz#2422372176c4c6c5addb5e2ada885af984b396a7" integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== -source-map-js@^1.0.2: +"source-map-js@>=0.6.2 <2.0.0", source-map-js@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==