diff --git a/sway-core/src/control_flow_analysis/analyze_return_paths.rs b/sway-core/src/control_flow_analysis/analyze_return_paths.rs index 3b50fa244e6..bbc4605303e 100644 --- a/sway-core/src/control_flow_analysis/analyze_return_paths.rs +++ b/sway-core/src/control_flow_analysis/analyze_return_paths.rs @@ -182,10 +182,7 @@ fn connect_node<'eng: 'cfg, 'cfg>( ty::TyAstNodeContent::Declaration(decl) => Ok(NodeConnection::NextStep( connect_declaration(engines, node, decl, graph, leaves)?, )), - ty::TyAstNodeContent::Error(spans, _) => { - let span = Span::join_all(spans.iter().cloned()); - return Err(vec![CompileError::InvalidStatement { span }]); - } + ty::TyAstNodeContent::Error(_, _) => Ok(NodeConnection::NextStep(vec![])), } } diff --git a/sway-core/src/control_flow_analysis/dead_code_analysis.rs b/sway-core/src/control_flow_analysis/dead_code_analysis.rs index 792ddfe4940..8eb89f93304 100644 --- a/sway-core/src/control_flow_analysis/dead_code_analysis.rs +++ b/sway-core/src/control_flow_analysis/dead_code_analysis.rs @@ -426,10 +426,7 @@ fn connect_node<'eng: 'cfg, 'cfg>( exit_node, ) } - ty::TyAstNodeContent::Error(spans, _) => { - let span = Span::join_all(spans.iter().cloned()); - return Err(CompileError::InvalidStatement { span }); - } + ty::TyAstNodeContent::Error(_, _) => (vec![], None), }) } diff --git a/sway-core/src/ir_generation/const_eval.rs b/sway-core/src/ir_generation/const_eval.rs index 2e3dfa5b6d8..fc70b85017c 100644 --- a/sway-core/src/ir_generation/const_eval.rs +++ b/sway-core/src/ir_generation/const_eval.rs @@ -693,11 +693,8 @@ fn const_eval_codeblock( ty::TyAstNodeContent::SideEffect(_) => Err(ConstEvalError::CannotBeEvaluatedToConst { span: ast_node.span.clone(), }), - ty::TyAstNodeContent::Error(spans, _) => { - let span = Span::join_all(spans.iter().cloned()); - Err(ConstEvalError::CompileError( - CompileError::InvalidStatement { span }, - )) + ty::TyAstNodeContent::Error(_, _) => { + unreachable!("error node found when generating IR"); } }; diff --git a/sway-core/src/ir_generation/function.rs b/sway-core/src/ir_generation/function.rs index f46af9ab1b6..a89872fd162 100644 --- a/sway-core/src/ir_generation/function.rs +++ b/sway-core/src/ir_generation/function.rs @@ -216,9 +216,8 @@ impl<'eng> FnCompiler<'eng> { // a side effect can be () because it just impacts the type system/namespacing. // There should be no new IR generated. ty::TyAstNodeContent::SideEffect(_) => Ok(None), - ty::TyAstNodeContent::Error(spans, _) => { - let span = Span::join_all(spans.iter().cloned()); - Err(CompileError::InvalidStatement { span }) + ty::TyAstNodeContent::Error(_, _) => { + unreachable!("error node found when generating IR"); } } } diff --git a/sway-core/src/language/ty/ast_node.rs b/sway-core/src/language/ty/ast_node.rs index 4604a9f2e71..f2a55320b39 100644 --- a/sway-core/src/language/ty/ast_node.rs +++ b/sway-core/src/language/ty/ast_node.rs @@ -53,7 +53,7 @@ impl DebugWithEngines for TyAstNode { Expression(exp) => DebugWithEngines::fmt(exp, f, engines), ImplicitReturnExpression(exp) => write!(f, "return {:?}", engines.help_out(exp)), SideEffect(_) => f.write_str(""), - Error(_, err) => f.write_str("error"), + Error(_, _) => f.write_str("error"), } } } diff --git a/sway-core/src/lib.rs b/sway-core/src/lib.rs index 59683e1983f..a7223ed88bc 100644 --- a/sway-core/src/lib.rs +++ b/sway-core/src/lib.rs @@ -196,6 +196,7 @@ fn parse_in_memory( submodules: Default::default(), }, ); + Ok((lexed_program, parsed::ParseProgram { kind, root })) } @@ -480,6 +481,9 @@ pub fn compile_to_ast( metrics ); + // Fail compilation if we have errors + handler.ok()?; + let (lexed_program, mut parsed_program) = match parse_program_opt { Ok(modules) => modules, Err(e) => { @@ -946,7 +950,6 @@ fn test_unary_ordering() { #[test] fn test_parser_recovery() { - use crate::language::{self, parsed}; let handler = Handler::default(); let engines = Engines::default(); let prog = parse( @@ -962,7 +965,7 @@ fn test_parser_recovery() { &engines, None, ); - let (lexed, parsed) = prog.unwrap(); + let (_, _) = prog.unwrap(); assert!(handler.has_errors()); dbg!(handler); } diff --git a/sway-error/src/error.rs b/sway-error/src/error.rs index ddea44a7967..4682feeb959 100644 --- a/sway-error/src/error.rs +++ b/sway-error/src/error.rs @@ -287,9 +287,6 @@ pub enum CompileError { #[error("This opcode takes an immediate value but none was provided.")] MissingImmediate { span: Span }, - #[error("This statement is invalid.")] - InvalidStatement { span: Span }, - #[error("This immediate value is invalid.")] InvalidImmediateValue { span: Span }, #[error("Variant \"{variant_name}\" does not exist on enum \"{enum_name}\"")] @@ -855,7 +852,6 @@ impl Spanned for CompileError { AbiShadowsSuperAbiMethod { span, .. } => span.clone(), ConflictingSuperAbiMethods { span, .. } => span.clone(), AbiSupertraitMethodCallAsContractCall { span, .. } => span.clone(), - InvalidStatement { span } => span.clone(), } } } diff --git a/sway-error/src/handler.rs b/sway-error/src/handler.rs index ee6472fe026..eb58f227094 100644 --- a/sway-error/src/handler.rs +++ b/sway-error/src/handler.rs @@ -80,11 +80,40 @@ impl Handler { } } + pub fn append_ref(&self, other: &Handler) { + let other = other.inner.borrow(); + + for warn in other.warnings.iter() { + self.emit_warn(warn.clone()); + } + for err in other.errors.iter() { + self.emit_err(err.clone()); + } + } + pub fn dedup(&self) { let mut inner = self.inner.borrow_mut(); inner.errors = dedup_unsorted(inner.errors.clone()); inner.warnings = dedup_unsorted(inner.warnings.clone()); } + + pub fn ok(&self) -> Result<(), ErrorEmitted> { + if !self.has_errors() { + Ok(()) + } else { + Err(ErrorEmitted { _priv: () }) + } + } + + pub fn clear_errors(&self) { + let mut inner = self.inner.borrow_mut(); + inner.errors.clear(); + } + + pub fn clear_warnings(&self) { + let mut inner = self.inner.borrow_mut(); + inner.warnings.clear(); + } } /// Proof that an error was emitted through a `Handler`. diff --git a/sway-error/src/parser_error.rs b/sway-error/src/parser_error.rs index c61eda1e819..54318f597c0 100644 --- a/sway-error/src/parser_error.rs +++ b/sway-error/src/parser_error.rs @@ -20,6 +20,8 @@ pub enum ParseErrorKind { UnexpectedTokenAfterArrayIndex, #[error("Invalid literal to use as a field name.")] InvalidLiteralFieldName, + #[error("Invalid statement.")] + InvalidStatement, #[error("Integer field names cannot have type suffixes.")] IntFieldWithTypeSuffix, #[error("Expected a field name.")] @@ -38,6 +40,8 @@ pub enum ParseErrorKind { MalformedAsmImmediate, #[error("Expected an identifier.")] ExpectedIdent, + #[error("Expected an pattern.")] + ExpectedPattern, #[error("Unexpected token after str length.")] UnexpectedTokenAfterStrLength, #[error("Expected a type.")] diff --git a/sway-parse/src/expr/mod.rs b/sway-parse/src/expr/mod.rs index 863a718e937..e68e9999b19 100644 --- a/sway-parse/src/expr/mod.rs +++ b/sway-parse/src/expr/mod.rs @@ -1,4 +1,3 @@ -use crate::parser::ParseWithRecoveryResult; use crate::{Parse, ParseBracket, ParseResult, ParseToEnd, Parser, ParserConsumed, Peek}; use sway_ast::brackets::{Braces, Parens, SquareBrackets}; @@ -7,7 +6,7 @@ use sway_ast::keywords::{ AbiToken, AddEqToken, AsmToken, CommaToken, ConfigurableToken, ConstToken, DivEqToken, DoubleColonToken, EnumToken, EqToken, FalseToken, FnToken, IfToken, ImplToken, LetToken, OpenAngleBracketToken, PubToken, SemicolonToken, ShlEqToken, ShrEqToken, StarEqToken, - StorageToken, StructToken, SubEqToken, Token, TraitToken, TrueToken, TypeToken, UseToken, + StorageToken, StructToken, SubEqToken, TraitToken, TrueToken, TypeToken, UseToken, }; use sway_ast::literal::{LitBool, LitBoolType}; use sway_ast::punctuated::Punctuated; @@ -94,8 +93,14 @@ impl Parse for Expr { impl Parse for StatementLet { fn parse(parser: &mut Parser) -> ParseResult { - let let_token = parser.parse()?; - let pattern = parser.try_parse()?; + let let_token: LetToken = parser.parse()?; + let pattern = parser.try_parse().map_err(|err| { + parser.emit_error_with_span( + ParseErrorKind::ExpectedPattern, + let_token.span().next_char(), + ); + err + })?; let ty_opt = match parser.take() { Some(colon_token) => Some((colon_token, parser.parse()?)), None => None, @@ -180,10 +185,10 @@ fn parse_stmt<'a>(parser: &mut Parser<'a, '_>) -> ParseResult> { // Try a `let` statement. match parser.guarded_parse_with_recovery::() { Ok(None) => {} - Ok(Some(slet)) => return stmt(Statement::Let(slet)), - Err(recovery) => { - let p = recovery.start(); - let (spans, error) = recovery.finish(p); + Ok(Some(item)) => return stmt(Statement::Let(item)), + Err(r) => { + let (spans, error) = + r.recover_at_next_line_with_fallback_error(ParseErrorKind::InvalidStatement); return stmt(Statement::Error(spans, error)); } } diff --git a/sway-parse/src/lib.rs b/sway-parse/src/lib.rs index f831d8454b7..05b75265b21 100644 --- a/sway-parse/src/lib.rs +++ b/sway-parse/src/lib.rs @@ -43,7 +43,8 @@ pub fn parse_file( source_id: Option, ) -> Result, ErrorEmitted> { let ts = lex(handler, &src, 0, src.len(), source_id)?; - Parser::new(handler, &ts).parse_to_end().map(|(m, _)| m) + let (m, _) = Parser::new(handler, &ts).parse_to_end()?; + Ok(m) } pub fn parse_module_kind( diff --git a/sway-parse/src/parser.rs b/sway-parse/src/parser.rs index f8472746420..f087e47757b 100644 --- a/sway-parse/src/parser.rs +++ b/sway-parse/src/parser.rs @@ -1,9 +1,6 @@ -use crate::{token, Parse, ParseToEnd, Peek}; - +use crate::{Parse, ParseToEnd, Peek}; use core::marker::PhantomData; use std::cell::RefCell; -use std::error::Error; -use std::pin::Pin; use sway_ast::keywords::Keyword; use sway_ast::literal::Literal; use sway_ast::token::{ @@ -18,13 +15,6 @@ use sway_types::{ Ident, Span, Spanned, }; -pub enum ParseWithRecoveryResult { - PeekFailed, - Ok(T), - Recovered(Box<[Span]>, ErrorEmitted), - RecoveryFailed(ErrorEmitted), -} - pub struct Parser<'a, 'e> { token_trees: &'a [TokenTree], full_span: Span, @@ -88,7 +78,7 @@ impl<'a, 'e> Parser<'a, 'e> { pub fn guarded_parse_with_recovery<'original, P: Peek, T: Parse>( &'original mut self, - ) -> Result, Recovery<'original, 'a, 'e>> { + ) -> Result, Recoverer<'original, 'a, 'e>> { if self.peek::

().is_none() { return Ok(None); } @@ -112,7 +102,7 @@ impl<'a, 'e> Parser<'a, 'e> { full_span, .. } = fork; - Err(Recovery { + Err(Recoverer { original: RefCell::new(self), handler, fork_token_trees: token_trees, @@ -126,14 +116,16 @@ impl<'a, 'e> Parser<'a, 'e> { /// Parses a `T` in its canonical way. /// Do not advance the parser on failure pub fn try_parse(&mut self) -> ParseResult { - let mut fork = Self { + let handler = Handler::default(); + let mut fork = Parser { token_trees: self.token_trees, full_span: self.full_span.clone(), - handler: self.handler, + handler: &handler, }; match T::parse(&mut fork) { Ok(result) => { self.token_trees = fork.token_trees; + self.handler.append(handler); Ok(result) } Err(err) => Err(err), @@ -222,24 +214,21 @@ impl<'a, 'e> Parser<'a, 'e> { &self.full_span } /// Consume all tokens that are in the current line. + /// Consume tokens while its line equals `line`. /// /// # Warning /// /// To calculate lines the original source code will be traversed multiple times. - pub fn consume_until_line_end(&mut self) { - let Some(first_token) = self.token_trees.get(0) else { - return; - }; - let first_span = first_token.span(); - let first_span_line = first_span.start_pos().line_col().0; - + pub fn consume_while_line_equals(&mut self, line: usize) { loop { let Some(current_token) = self.token_trees.get(0) else { - return; + break; }; + let current_span = current_token.span(); let current_span_line = current_span.start_pos().line_col().0; - if current_span_line != first_span_line { + + if current_span_line != line { break; } else { self.token_trees = &self.token_trees[1..]; @@ -247,11 +236,21 @@ impl<'a, 'e> Parser<'a, 'e> { } } - // pub fn recover<'othere>(&mut self, recovery: Recovery<ยด_ , 'a, 'othere>) { - // let f = recovery.inner.fork.as_ref().unwrap(); - // self.token_trees = f.token_trees; - // // TODO emit parser diagnostics - // } + pub fn clear_errors(&mut self) { + self.handler.clear_errors(); + } + + pub fn clear_warnings(&mut self) { + self.handler.clear_errors(); + } + + pub fn has_errors(&self) -> bool { + self.handler.has_errors() + } + + pub fn has_warnings(&self) -> bool { + self.handler.has_warnings() + } } pub struct Peeker<'a> { @@ -361,7 +360,7 @@ impl<'a> Peeker<'a> { } } -pub struct Recovery<'original, 'a, 'e> { +pub struct Recoverer<'original, 'a, 'e> { original: RefCell<&'original mut Parser<'a, 'e>>, handler: Handler, fork_token_trees: &'a [TokenTree], @@ -369,18 +368,74 @@ pub struct Recovery<'original, 'a, 'e> { error: ErrorEmitted, } -impl<'original, 'a, 'e> Recovery<'original, 'a, 'e> { - pub fn start(&self) -> Parser<'a, '_> { - Parser { +impl<'original, 'a, 'e> Recoverer<'original, 'a, 'e> { + /// This strategy consumes everything at the current line and emits the fallback error + /// if the forked parser does not contains any error. + pub fn recover_at_next_line_with_fallback_error( + &self, + kind: ParseErrorKind, + ) -> (Box<[Span]>, ErrorEmitted) { + let last_token_span = self.last_consumed_token().span(); + let last_token_span_line = last_token_span.start_pos().line_col().0; + self.recover(|p| { + p.consume_while_line_equals(last_token_span_line); + if !p.has_errors() { + p.emit_error_with_span(kind, self.diff_span(p)); + } + }) + } + + pub fn recover<'this>( + &'this self, + f: impl FnOnce(&mut Parser<'a, 'this>), + ) -> (Box<[Span]>, ErrorEmitted) { + let mut p = Parser { token_trees: self.fork_token_trees, full_span: self.fork_full_span.clone(), handler: &self.handler, - } + }; + f(&mut p); + self.finish(p) + } + + pub fn starting_token(&self) -> &GenericTokenTree { + let original = self.original.borrow(); + &original.token_trees[0] + } + + pub fn last_consumed_token(&self) -> &GenericTokenTree { + // find the last token consumed by the fork + let original = self.original.borrow(); + let fork_pos = original + .token_trees + .iter() + .position(|x| x.span() == self.fork_token_trees[0].span()) + .unwrap(); + &original.token_trees[fork_pos - 1] } - pub fn abort(self) {} + pub fn diff_span<'this>(&self, p: &Parser<'a, 'this>) -> Span { + let original = self.original.borrow_mut(); + + // collect all tokens trees that were consumed by the fork + let first_fork_tt = &p.token_trees[0]; + let qty = original + .token_trees + .iter() + .position(|tt| tt.span() == first_fork_tt.span()) + .expect("not finding fork head"); + + let garbage: Vec<_> = original + .token_trees + .iter() + .take(qty) + .map(|x| x.span()) + .collect(); + + Span::join_all(garbage) + } - pub fn finish(&self, p: Parser<'a, '_>) -> (Box<[Span]>, ErrorEmitted) { + fn finish(&self, p: Parser<'a, '_>) -> (Box<[Span]>, ErrorEmitted) { let mut original = self.original.borrow_mut(); // collect all tokens trees that were consumed by the fork @@ -399,8 +454,9 @@ impl<'original, 'a, 'e> Recovery<'original, 'a, 'e> { .collect(); original.token_trees = p.token_trees; + original.handler.append_ref(&self.handler); - (garbage.into_boxed_slice(), self.error.clone()) + (garbage.into_boxed_slice(), self.error) } } diff --git a/sway-types/src/span.rs b/sway-types/src/span.rs index 898ec029e9d..3eeda432321 100644 --- a/sway-types/src/span.rs +++ b/sway-types/src/span.rs @@ -190,6 +190,15 @@ impl Span { } } + pub fn next_char(&self) -> Span { + Span { + src: self.src.clone(), + source_id: self.source_id, + start: self.end, + end: self.end + 1, + } + } + /// This panics if the spans are not from the same file. This should /// only be used on spans that are actually next to each other. pub fn join(s1: Span, s2: Span) -> Span { diff --git a/test/src/e2e_vm_tests/test_programs/should_fail/recover_letx_no_expr/src/main.sw b/test/src/e2e_vm_tests/test_programs/should_fail/recover_letx_no_expr/src/main.sw index 322a32b3f2b..aacc32a7817 100644 --- a/test/src/e2e_vm_tests/test_programs/should_fail/recover_letx_no_expr/src/main.sw +++ b/test/src/e2e_vm_tests/test_programs/should_fail/recover_letx_no_expr/src/main.sw @@ -6,4 +6,6 @@ fn main() { let b = let c = 1 let d = 1; + let asdoij ojasd 123 apasdok asdpokasd; + let e = 1; }