From cc7b4f603e8c00bff01ca35aa1909d9c138d0a41 Mon Sep 17 00:00:00 2001 From: Kaz Wesley Date: Tue, 25 Jul 2023 14:49:33 +0000 Subject: [PATCH 1/5] Parser: Correctly handle #7335 syntax errors. --- lib/rust/parser/debug/tests/parse.rs | 230 +++++++++++++++++-------- lib/rust/parser/src/lexer.rs | 5 +- lib/rust/parser/src/syntax/operator.rs | 115 +++++++++++-- 3 files changed, 259 insertions(+), 91 deletions(-) diff --git a/lib/rust/parser/debug/tests/parse.rs b/lib/rust/parser/debug/tests/parse.rs index 4a22c36cc509..bd7080096413 100644 --- a/lib/rust/parser/debug/tests/parse.rs +++ b/lib/rust/parser/debug/tests/parse.rs @@ -1,5 +1,7 @@ //! Parse expressions and compare their results to expected values. +// === Features === +#![feature(cell_update)] // === Non-Standard Linter Configuration === #![allow(clippy::option_map_unit_fn)] #![allow(clippy::precedence)] @@ -698,21 +700,34 @@ fn unevaluated_argument() { #[test] fn unary_operator_missing_operand() { - test_invalid("main ~ = x"); + expect_invalid_node("main ~ = x"); } #[test] fn unary_operator_at_end_of_expression() { - test_invalid("foo ~"); + expect_invalid_node("foo ~"); } #[test] -fn plus_negative() { - let code = ["x = x+-x"]; - let expected = block![ - (Assignment (Ident x) "=" (OprApp (Ident x) (Ok "+") (UnaryOprApp "-" (Ident x)))) +fn unspaced_operator_sequence() { + let cases = [ + // Add a negated value. + ("x = y+-z", block![ + (Assignment (Ident x) "=" (OprApp (Ident y) (Ok "+") (UnaryOprApp "-" (Ident z))))]), + // Create an operator section that adds a negated value to its input. + ("x = +-z", block![ + (Assignment (Ident x) "=" (OprSectionBoundary 1 + (OprApp () (Ok "+") (UnaryOprApp "-" (Ident z)))))]), + // Create an operator section that adds its input, negated, to a value. + ("x = y+-", block![ + (Assignment (Ident x) "=" (OprSectionBoundary 1 + (OprApp (Ident y) (Ok "+") (UnaryOprApp "-" ()))))]), + // Assign a negative number to x. + ("x=-1", block![(Assignment (Ident x) "=" (UnaryOprApp "-" (Number () "1" ())))]), + // Assign a negated value to x. + ("x=-y", block![(Assignment (Ident x) "=" (UnaryOprApp "-" (Ident y)))]), ]; - test(&code.join("\n"), expected); + cases.into_iter().for_each(|(code, expected)| test(code, expected)); } #[test] @@ -819,7 +834,7 @@ fn import() { () () ())]), ]; cases.into_iter().for_each(|(code, expected)| test(code, expected)); - test_invalid("from Standard.Base.Data.Array import new as array_new"); + expect_invalid_node("from Standard.Base.Data.Array import new as array_new"); } #[test] @@ -1251,8 +1266,8 @@ fn trailing_whitespace() { #[test] fn at_operator() { - test_invalid("foo@bar"); - test_invalid("foo @ bar"); + expect_invalid_node("foo@bar"); + expect_invalid_node("foo @ bar"); } #[test] @@ -1320,89 +1335,119 @@ fn skip() { #[test] fn space_required() { - test_invalid("foo = if cond.x else.y"); + expect_invalid_node("foo = if cond.x else.y"); } #[test] fn incomplete_type_definition() { - test_invalid("type"); + expect_invalid_node("type"); } #[test] fn bad_case() { - test_invalid("foo = case x of\n 4"); - test_invalid("foo = case x of\n 4 ->"); - test_invalid("foo = case x of\n 4->"); + expect_invalid_node("foo = case x of\n 4"); + expect_invalid_node("foo = case x of\n 4 ->"); + expect_invalid_node("foo = case x of\n 4->"); } #[test] fn malformed_sequence() { - test_invalid("(1, )"); - test_invalid("foo = (1, )"); + expect_invalid_node("(1, )"); + expect_invalid_node("foo = (1, )"); } #[test] fn unmatched_delimiter() { - test_invalid("("); - test_invalid(")"); - test_invalid("["); - test_invalid("]"); - test_invalid("foo = ("); - test_invalid("foo = )"); - test_invalid("foo = ["); - test_invalid("foo = ]"); + expect_invalid_node("("); + expect_invalid_node(")"); + expect_invalid_node("["); + expect_invalid_node("]"); + expect_invalid_node("foo = ("); + expect_invalid_node("foo = )"); + expect_invalid_node("foo = ["); + expect_invalid_node("foo = ]"); } #[test] fn unexpected_special_operator() { - test_invalid("foo = 1, 2"); + expect_invalid_node("foo = 1, 2"); } #[test] fn malformed_import() { - test_invalid("import"); - test_invalid("import as Foo"); - test_invalid("import Foo as Foo, Bar"); - test_invalid("import Foo as Foo.Bar"); - test_invalid("import Foo as"); - test_invalid("import Foo as Bar.Baz"); - test_invalid("import Foo hiding"); - test_invalid("import Foo hiding X,"); - test_invalid("polyglot import Foo"); - test_invalid("polyglot java import"); - test_invalid("from import all"); - test_invalid("from Foo import all hiding"); - test_invalid("from Foo import all hiding X.Y"); - test_invalid("export"); - test_invalid("export as Foo"); - test_invalid("export Foo as Foo, Bar"); - test_invalid("export Foo as Foo.Bar"); - test_invalid("export Foo as"); - test_invalid("export Foo as Bar.Baz"); - test_invalid("export Foo hiding"); - test_invalid("export Foo hiding X,"); - test_invalid("from export all"); - test_invalid("from Foo export all hiding"); - test_invalid("from Foo export all hiding X.Y"); + expect_invalid_node("import"); + expect_invalid_node("import as Foo"); + expect_invalid_node("import Foo as Foo, Bar"); + expect_invalid_node("import Foo as Foo.Bar"); + expect_invalid_node("import Foo as"); + expect_invalid_node("import Foo as Bar.Baz"); + expect_invalid_node("import Foo hiding"); + expect_invalid_node("import Foo hiding X,"); + expect_invalid_node("polyglot import Foo"); + expect_invalid_node("polyglot java import"); + expect_invalid_node("from import all"); + expect_invalid_node("from Foo import all hiding"); + expect_invalid_node("from Foo import all hiding X.Y"); + expect_invalid_node("export"); + expect_invalid_node("export as Foo"); + expect_invalid_node("export Foo as Foo, Bar"); + expect_invalid_node("export Foo as Foo.Bar"); + expect_invalid_node("export Foo as"); + expect_invalid_node("export Foo as Bar.Baz"); + expect_invalid_node("export Foo hiding"); + expect_invalid_node("export Foo hiding X,"); + expect_invalid_node("from export all"); + expect_invalid_node("from Foo export all hiding"); + expect_invalid_node("from Foo export all hiding X.Y"); } #[test] fn invalid_token() { - test_invalid("`"); - test_invalid("splice_outside_text = `"); + expect_invalid_node("`"); + expect_invalid_node("splice_outside_text = `"); } #[test] fn illegal_foreign_body() { - test_invalid("foreign 4"); - test_invalid("foreign 4 * 4"); - test_invalid("foreign foo = \"4\""); - test_invalid("foreign js foo = 4"); + expect_invalid_node("foreign 4"); + expect_invalid_node("foreign 4 * 4"); + expect_invalid_node("foreign foo = \"4\""); + expect_invalid_node("foreign js foo = 4"); } #[test] fn unexpected_tokens_in_inner_macro_segment() { - test_invalid("from Foo import all What_Is_This_Doing_Here hiding Bar"); + expect_invalid_node("from Foo import all What_Is_This_Doing_Here hiding Bar"); +} + +#[test] +fn invalid_unspaced_operator_sequence() { + // Typically, a sequence of operator identifiers is lexed as a single operator. However, an + // exception is made for some sequences of operator characters ending in the `-` character: An + // expression such as `x+-x` is accepted, and read equivalently to `x + -x` (see + // [`unspaced_operator_sequence`]). + // + // Due to this special case, there is no reasonable way to interpret this type of expression as + // valid when spaces are added in the following way: + expect_multiple_operator_error("x = y +- z"); + expect_multiple_operator_error("x =- y"); + // + // Treating the `-` as a unary operator applied to `z` would be confusing, as it would be in + // contradiction to the associativity implied by the whitespace rules. + // + // However, it would also be confusing to lex a sequence of characters like `+-` as a single + // operator in spaced expressions, but as two operators in unspaced expressions. + // + // Lacking any reasonable valid interpretation, we treat this case as a multiple-operator error. + // This is the only case in which we yield a multiple-operator error when there are no spaces + // between the operators. + // + // Similar expressions with missing operands should be treated likewise: + expect_multiple_operator_error("x = y +-"); + expect_multiple_operator_error("x = +- z"); + expect_multiple_operator_error("x =-"); + expect_multiple_operator_error("=- y"); + expect_multiple_operator_error("=-"); } @@ -1411,6 +1456,23 @@ fn unexpected_tokens_in_inner_macro_segment() { // === Test Support === // ==================== + +// === Testing helpers === + +/// Check that the given [`Tree`] is a valid representation of the given source code: +/// - Assert that the given [`Tree`] is composed of tokens that concatenate back to the given source +/// code. +/// - Assert that the given [`Tree`] can be serialized and deserialized without error. +fn expect_tree_representing_code(code: &str, ast: &enso_parser::syntax::Tree) { + assert_eq!(ast.code(), code, "{:?}", &ast); + let serialized = enso_parser::serialization::serialize_tree(ast).unwrap(); + let deserialized = enso_parser::serialization::deserialize_tree(&serialized); + deserialized.unwrap(); +} + + +// === Testing valid inputs === + /// Given a block of input Enso code, test that: /// - The given code parses to the AST represented by the given S-expression. /// - The AST pretty-prints back to the original code. @@ -1427,24 +1489,44 @@ fn test(code: &str, expect: lexpr::Value) { let ast = enso_parser::Parser::new().run(code); let ast_s_expr = to_s_expr(&ast, code); assert_eq!(ast_s_expr.to_string(), expect.to_string(), "{:?}", &ast); - assert_eq!(ast.code(), code, "{:?}", &ast); - let serialized = enso_parser::serialization::serialize_tree(&ast).unwrap(); - let deserialized = enso_parser::serialization::deserialize_tree(&serialized); - deserialized.unwrap(); + expect_tree_representing_code(code, &ast); +} + + +// === Testing inputs containing syntax error === + +#[derive(Debug, Eq, PartialEq, Default, Copy, Clone)] +struct Errors { + invalid_node: bool, + multiple_operator: bool, +} + +impl Errors { + fn collect(code: &str) -> Self { + let ast = enso_parser::Parser::new().run(code); + expect_tree_representing_code(code, &ast); + let errors = core::cell::Cell::new(Errors::default()); + ast.map(|tree| match &*tree.variant { + enso_parser::syntax::tree::Variant::Invalid(_) => { + errors.update(|e| Self { invalid_node: true, ..e }); + } + enso_parser::syntax::tree::Variant::OprApp(opr_app) if opr_app.opr.is_err() => { + errors.update(|e| Self { multiple_operator: true, ..e }); + } + _ => (), + }); + errors.into_inner() + } } /// Checks that an input contains an `Invalid` node somewhere. -fn test_invalid(code: &str) { - let ast = enso_parser::Parser::new().run(code); - let invalid = std::sync::atomic::AtomicBool::new(false); - ast.map(|tree| { - if matches!(&*tree.variant, enso_parser::syntax::tree::Variant::Invalid(_)) { - invalid.store(true, std::sync::atomic::Ordering::Release) - } - }); - assert!(invalid.load(std::sync::atomic::Ordering::Acquire), "{:?}", &ast); - assert_eq!(ast.code(), code, "{:?}", &ast); - let serialized = enso_parser::serialization::serialize_tree(&ast).unwrap(); - let deserialized = enso_parser::serialization::deserialize_tree(&serialized); - deserialized.unwrap(); +fn expect_invalid_node(code: &str) { + let errors = Errors::collect(code); + assert!(errors.invalid_node, "{:?}", enso_parser::Parser::new().run(code)); +} + +/// Checks that an input contains a multiple-operator error somewhere. +fn expect_multiple_operator_error(code: &str) { + let errors = Errors::collect(code); + assert!(errors.multiple_operator, "{:?}", enso_parser::Parser::new().run(code)); } diff --git a/lib/rust/parser/src/lexer.rs b/lib/rust/parser/src/lexer.rs index 46a15aff561a..15fca0163e4a 100644 --- a/lib/rust/parser/src/lexer.rs +++ b/lib/rust/parser/src/lexer.rs @@ -625,7 +625,10 @@ impl<'s> Lexer<'s> { let (left, right) = token.split_at_(Bytes(1)); let lhs = analyze_operator(&left.code); self.submit_token(left.with_variant(token::Variant::operator(lhs))); - let rhs = analyze_operator(&right.code); + // The `-` in this case is not identical to a free `-`: It is only allowed a + // unary interpretation. + let rhs = token::OperatorProperties::new() + .with_unary_prefix_mode(token::Precedence::unary_minus()); self.submit_token(right.with_variant(token::Variant::operator(rhs))); } // Composed of operator characters, but not an operator node. diff --git a/lib/rust/parser/src/syntax/operator.rs b/lib/rust/parser/src/syntax/operator.rs index c686d5c6c8be..9b1dd20ca16a 100644 --- a/lib/rust/parser/src/syntax/operator.rs +++ b/lib/rust/parser/src/syntax/operator.rs @@ -196,8 +196,7 @@ impl<'s> ExpressionBuilder<'s> { // it's acting as unary. (true, _, Some(prec)) => self.unary_operator(prec, assoc, Unary::Simple(opr)), // Outside of a nospace group, a unary-only operator is missing an operand. - (false, None, Some(_)) => - self.operand(syntax::tree::apply_unary_operator(opr, None).into()), + (false, None, Some(_)) => self.unary_operator_section(opr), // Binary operator section (no LHS). (_, Some(prec), _) => self.binary_operator(prec, assoc, opr), // Failed to compute a role for the operator; this should not be possible. @@ -211,6 +210,14 @@ impl<'s> ExpressionBuilder<'s> { assoc: token::Associativity, mut arity: Unary<'s>, ) { + if self.prev_type == Some(ItemType::Opr) + && let Some(prev_opr) = self.operator_stack.last_mut() + && let Arity::Binary { tokens, .. } = &mut prev_opr.opr + && !self.nospace + && let Unary::Simple(opr) = arity { + tokens.push(opr); + return; + } if self.prev_type == Some(ItemType::Ast) { self.application(); if self.nospace { @@ -223,6 +230,20 @@ impl<'s> ExpressionBuilder<'s> { self.push_operator(prec, assoc, Arity::Unary(arity)); } + fn unary_operator_section(&mut self, opr: token::Operator<'s>) { + if self.prev_type == Some(ItemType::Opr) + && let Some(prev_opr) = self.operator_stack.last_mut() + && let Arity::Binary { tokens, .. } = &mut prev_opr.opr { + // Multiple-operator error. + tokens.push(opr); + } else { + self.operand(Operand { + elided: 1, + ..Operand::from(syntax::tree::apply_unary_operator(opr, None)) + }); + } + } + /// Extend the expression with a binary operator, by pushing it to the `operator_stack` or /// emitting a multiple-operator error. fn binary_operator( @@ -257,10 +278,14 @@ impl<'s> ExpressionBuilder<'s> { opr: Arity<'s>, ) { let opr = Operator { precedence, associativity, opr }; - if self.prev_type != Some(ItemType::Opr) { - // If the previous item was also an operator, this must be a unary operator following a - // binary operator; we cannot reduce the stack because the unary operator must be - // evaluated before the binary operator, regardless of precedence. + // When a unary operator follows another operator, we defer reducing the stack because a + // unary operator's affinity for its operand is stronger than any operator precedence. + let defer_reducing_stack = match (&self.prev_type, &opr.opr) { + (Some(ItemType::Opr), Arity::Unary(Unary::Simple(_))) if self.nospace => true, + (Some(ItemType::Opr), Arity::Unary(Unary::Fragment { .. })) => true, + _ => false, + }; + if !defer_reducing_stack { let mut rhs = self.output.pop(); self.reduce(precedence, &mut rhs); if let Some(rhs) = rhs { @@ -282,7 +307,7 @@ impl<'s> ExpressionBuilder<'s> { let rhs_ = rhs.take(); let ast = match opr.opr { Arity::Unary(Unary::Simple(opr)) => - Operand::from(rhs_).map(|item| syntax::tree::apply_unary_operator(opr, item)), + Operand::new(rhs_).map(|item| syntax::tree::apply_unary_operator(opr, item)), Arity::Unary(Unary::Invalid { token, error }) => Operand::from(rhs_) .map(|item| syntax::tree::apply_unary_operator(token, item).with_error(error)), Arity::Unary(Unary::Fragment { mut fragment }) => { @@ -301,6 +326,24 @@ impl<'s> ExpressionBuilder<'s> { let rhs = rhs_.map(syntax::Tree::from); let ast = syntax::tree::apply_operator(lhs, tokens, rhs); Operand::from(ast) + } else if self.nospace + && tokens.len() < 2 + && let Some(opr) = tokens.first() + && opr.properties.can_form_section() { + let mut rhs = None; + let mut elided = 0; + let mut wildcards = 0; + if let Some(rhs_) = rhs_ { + rhs = Some(rhs_.value); + elided += rhs_.elided; + wildcards += rhs_.wildcards; + } + elided += lhs.is_none() as u32 + rhs.is_none() as u32; + let mut operand = Operand::from(lhs) + .map(|lhs| syntax::tree::apply_operator(lhs, tokens, rhs)); + operand.elided += elided; + operand.wildcards += wildcards; + operand } else { let rhs = rhs_.map(syntax::Tree::from); let mut elided = 0; @@ -334,16 +377,39 @@ impl<'s> ExpressionBuilder<'s> { out } + /// Extend the expression with the contents of a [`Self`] built from a subexpression that + /// contains no spaces. pub fn extend_from(&mut self, child: &mut Self) { - if child.output.is_empty() && let Some(op) = child.operator_stack.pop() { - match op.opr { - Arity::Unary(Unary::Simple(un)) => self.operator(un), - Arity::Unary(Unary::Invalid{ .. }) => unreachable!(), - Arity::Unary(Unary::Fragment{ .. }) => unreachable!(), - Arity::Binary { tokens, .. } => tokens.into_iter().for_each(|op| self.operator(op)), - }; + if child.output.is_empty() { + // If the unspaced subexpression doesn't contain any non-operators, promote each + // operator in the (unspaced) child to an operator in the (spaced) parent. + // + // The case where `child.operator_stack.len() > 1` is subtle: + // + // A sequence of operator characters without intervening whitespace is lexed as multiple + // operators in some cases where the last character is `-`. + // + // In such a case, an unspaced expression-builder will: + // 1. Push the first operator to the operator stack (composed of all the operator + // characters except the trailing `-`). + // 2. Push `-` to the operator stack, without reducing the expression (because the `-` + // should be interpreted as a unary operator if a value follows it within the + // unspaced subexpression). + // + // Thus, if we encounter an unspaced subexpression consisting only of multiple + // operators: When we append each operator to the parent (spaced) expression-builder, it + // will be reinterpreted in a *spaced* context. In a spaced context, the sequence of + // operators will cause a multiple-operator error. + for op in child.operator_stack.drain(..) { + match op.opr { + Arity::Unary(Unary::Simple(un)) => self.operator(un), + Arity::Unary(Unary::Invalid { .. }) => unreachable!(), + Arity::Unary(Unary::Fragment { .. }) => unreachable!(), + Arity::Binary { tokens, .. } => + tokens.into_iter().for_each(|op| self.operator(op)), + } + } child.prev_type = None; - debug_assert_eq!(&child.operator_stack, &[]); return; } if child.prev_type == Some(ItemType::Opr) @@ -376,6 +442,9 @@ enum ItemType { Opr, } + +// === Operator === + /// An operator, whose arity and precedence have been determined. #[derive(Debug, PartialEq, Eq)] struct Operator<'s> { @@ -426,7 +495,8 @@ struct Operand { wildcards: u32, } -/// Transpose. +/// Transpose. Note that an absent input will not be treated as an elided value; for that +/// conversion, use [`Operand::new`]. impl From>> for Operand> { fn from(operand: Option>) -> Self { match operand { @@ -471,6 +541,19 @@ impl<'s> From>> for syntax::Tree<'s> { } } +impl Operand> { + /// Lift an option value to a potentially-elided operand. + fn new(value: Option>) -> Self { + match value { + None => Self { value: None, elided: 1, wildcards: default() }, + Some(value) => { + let Operand { value, elided, wildcards } = value; + Self { value: Some(value), elided, wildcards } + } + } + } +} + impl Operand { /// Operate on the contained value without altering the elided-operand information. fn map(self, f: impl FnOnce(T) -> U) -> Operand { From 8b6ad4d7059fe8bc3867761cf14f8e9d01d28e86 Mon Sep 17 00:00:00 2001 From: Kaz Wesley Date: Tue, 25 Jul 2023 17:08:23 +0000 Subject: [PATCH 2/5] Remove CODEOWNERS special case for lib/rust/parser --- .github/CODEOWNERS | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 6ff6f506b53f..3e7b94bdf468 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -13,7 +13,6 @@ Cargo.lock Cargo.toml /lib/rust/ @MichaelMauderer @mwu-tow @farmaazon @kazcw @vitvakatu @Frizi /lib/rust/ensogl/ @MichaelMauderer @farmaazon @kazcw @vitvakatu @Frizi -/lib/rust/parser/ @kazcw @jaroslavtulach /integration-test/ @MichaelMauderer @farmaazon @kazcw @vitvakatu @Frizi /tools/build-performance/ @kazcw @mwu-tow @Akirathan From 18eb0ddb49ca589f2857310f6bf17577d16e80ae Mon Sep 17 00:00:00 2001 From: Kaz Wesley Date: Tue, 25 Jul 2023 17:27:52 +0000 Subject: [PATCH 3/5] Typo. --- lib/rust/parser/debug/tests/parse.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/rust/parser/debug/tests/parse.rs b/lib/rust/parser/debug/tests/parse.rs index bd7080096413..f4b383988c12 100644 --- a/lib/rust/parser/debug/tests/parse.rs +++ b/lib/rust/parser/debug/tests/parse.rs @@ -1493,7 +1493,7 @@ fn test(code: &str, expect: lexpr::Value) { } -// === Testing inputs containing syntax error === +// === Testing inputs containing syntax errors === #[derive(Debug, Eq, PartialEq, Default, Copy, Clone)] struct Errors { From f4556190413062032843ad03a9a36e6429235fd0 Mon Sep 17 00:00:00 2001 From: Kaz Wesley Date: Wed, 26 Jul 2023 13:52:09 +0000 Subject: [PATCH 4/5] Fix CODEOWNERS --- .github/CODEOWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 3e7b94bdf468..fc4c0eb39ffa 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -13,6 +13,7 @@ Cargo.lock Cargo.toml /lib/rust/ @MichaelMauderer @mwu-tow @farmaazon @kazcw @vitvakatu @Frizi /lib/rust/ensogl/ @MichaelMauderer @farmaazon @kazcw @vitvakatu @Frizi +/lib/rust/parser/ @MichaelMauderer @mwu-tow @farmaazon @kazcw @vitvakatu @Frizi @jaroslavtulach /integration-test/ @MichaelMauderer @farmaazon @kazcw @vitvakatu @Frizi /tools/build-performance/ @kazcw @mwu-tow @Akirathan From a546fccc2608ca019a409170945fb2ccc554f034 Mon Sep 17 00:00:00 2001 From: Kaz Wesley Date: Wed, 26 Jul 2023 13:52:21 +0000 Subject: [PATCH 5/5] Update test case --- lib/rust/parser/src/lexer.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/rust/parser/src/lexer.rs b/lib/rust/parser/src/lexer.rs index 15fca0163e4a..150b1a76d80a 100644 --- a/lib/rust/parser/src/lexer.rs +++ b/lib/rust/parser/src/lexer.rs @@ -1563,9 +1563,7 @@ mod tests { #[test] fn test_case_operators() { test_lexer_many(lexer_case_operators(&["+", "-", "=", "==", "===", ":", ","])); - let properties = analyze_operator("-"); - let unary_minus = Token("", "-", token::Variant::operator(properties)); - test_lexer_many(vec![("+-", vec![operator_("", "+"), unary_minus])]); + assert_eq!(run("+-").unwrap().len(), 2); } /// Based on https://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt.