Skip to content

Commit

Permalink
chore(fmt): restore removed comment (#3129)
Browse files Browse the repository at this point in the history
  • Loading branch information
kek kek kek authored Oct 12, 2023
1 parent 2c175c0 commit 45cbd24
Show file tree
Hide file tree
Showing 7 changed files with 79 additions and 22 deletions.
51 changes: 33 additions & 18 deletions compiler/noirc_frontend/src/lexer/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ pub struct Lexer<'a> {
char_iter: Peekable<Zip<Chars<'a>, RangeFrom<u32>>>,
position: Position,
done: bool,
skip_comments: bool,
}

pub type SpannedTokenResult = Result<SpannedToken, LexerErrorKind>;
Expand All @@ -39,15 +40,21 @@ impl<'a> Lexer<'a> {
(Tokens(tokens), errors)
}

fn new(source: &'a str) -> Self {
pub fn new(source: &'a str) -> Self {
Lexer {
// We zip with the character index here to ensure the first char has index 0
char_iter: source.chars().zip(0..).peekable(),
position: 0,
done: false,
skip_comments: true,
}
}

pub fn skip_comments(mut self, flag: bool) -> Self {
self.skip_comments = flag;
self
}

/// Iterates the cursor and returns the char at the new cursor position
fn next_char(&mut self) -> Option<char> {
let (c, index) = self.char_iter.next()?;
Expand Down Expand Up @@ -176,13 +183,16 @@ impl<'a> Lexer<'a> {
Token::Minus => self.single_double_peek_token('>', prev_token, Token::Arrow),
Token::Colon => self.single_double_peek_token(':', prev_token, Token::DoubleColon),
Token::Slash => {
let start = self.position;

if self.peek_char_is('/') {
self.next_char();
return self.parse_comment();
return self.parse_comment(start);
} else if self.peek_char_is('*') {
self.next_char();
return self.parse_block_comment();
return self.parse_block_comment(start);
}

Ok(spanned_prev_token)
}
_ => Err(LexerErrorKind::NotADoubleChar {
Expand Down Expand Up @@ -377,15 +387,18 @@ impl<'a> Lexer<'a> {
}
}

fn parse_comment(&mut self) -> SpannedTokenResult {
let _ = self.eat_while(None, |ch| ch != '\n');
self.next_token()
fn parse_comment(&mut self, start: u32) -> SpannedTokenResult {
let comment = self.eat_while(None, |ch| ch != '\n');
if self.skip_comments {
return self.next_token();
}
Ok(Token::LineComment(comment).into_span(start, self.position))
}

fn parse_block_comment(&mut self) -> SpannedTokenResult {
let start = self.position;
fn parse_block_comment(&mut self, start: u32) -> SpannedTokenResult {
let mut depth = 1usize;

let mut content = String::new();
while let Some(ch) = self.next_char() {
match ch {
'/' if self.peek_char_is('*') => {
Expand All @@ -403,12 +416,15 @@ impl<'a> Lexer<'a> {
break;
}
}
_ => {}
ch => content.push(ch),
}
}

if depth == 0 {
self.next_token()
if self.skip_comments {
return self.next_token();
}
Ok(Token::BlockComment(content).into_span(start, self.position))
} else {
let span = Span::inclusive(start, self.position);
Err(LexerErrorKind::UnterminatedBlockComment { span })
Expand All @@ -431,7 +447,6 @@ impl<'a> Iterator for Lexer<'a> {
}
}
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down Expand Up @@ -497,7 +512,7 @@ mod tests {
let input = r#"#[deprecated]"#;
let mut lexer = Lexer::new(input);

let token = lexer.next().unwrap().unwrap();
let token = lexer.next_token().unwrap();
assert_eq!(
token.token(),
&Token::Attribute(Attribute::Secondary(SecondaryAttribute::Deprecated(None)))
Expand All @@ -509,7 +524,7 @@ mod tests {
let input = r#"#[deprecated("hello")]"#;
let mut lexer = Lexer::new(input);

let token = lexer.next().unwrap().unwrap();
let token = lexer.next_token().unwrap();
assert_eq!(
token.token(),
&Token::Attribute(Attribute::Secondary(crate::token::SecondaryAttribute::Deprecated(
Expand Down Expand Up @@ -542,7 +557,7 @@ mod tests {
let input = r#"#[custom(hello)]"#;
let mut lexer = Lexer::new(input);

let token = lexer.next().unwrap().unwrap();
let token = lexer.next_token().unwrap();
assert_eq!(
token.token(),
&Token::Attribute(Attribute::Secondary(SecondaryAttribute::Custom(
Expand All @@ -556,7 +571,7 @@ mod tests {
let input = r#"#[test]"#;
let mut lexer = Lexer::new(input);

let token = lexer.next().unwrap().unwrap();
let token = lexer.next_token().unwrap();
assert_eq!(
token.token(),
&Token::Attribute(Attribute::Function(FunctionAttribute::Test(TestScope::None)))
Expand All @@ -568,7 +583,7 @@ mod tests {
let input = r#"#[contract_library_method]"#;
let mut lexer = Lexer::new(input);

let token = lexer.next().unwrap().unwrap();
let token = lexer.next_token().unwrap();
assert_eq!(
token.token(),
&Token::Attribute(Attribute::Secondary(SecondaryAttribute::ContractLibraryMethod))
Expand All @@ -580,7 +595,7 @@ mod tests {
let input = r#"#[test(should_fail)]"#;
let mut lexer = Lexer::new(input);

let token = lexer.next().unwrap().unwrap();
let token = lexer.next_token().unwrap();
assert_eq!(
token.token(),
&Token::Attribute(Attribute::Function(FunctionAttribute::Test(
Expand All @@ -594,7 +609,7 @@ mod tests {
let input = r#"#[test(should_fail_with = "hello")]"#;
let mut lexer = Lexer::new(input);

let token = lexer.next().unwrap().unwrap();
let token = lexer.next_token().unwrap();
assert_eq!(
token.token(),
&Token::Attribute(Attribute::Function(FunctionAttribute::Test(
Expand Down
4 changes: 4 additions & 0 deletions compiler/noirc_frontend/src/lexer/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ pub enum Token {
Keyword(Keyword),
IntType(IntType),
Attribute(Attribute),
LineComment(String),
BlockComment(String),
/// <
Less,
/// <=
Expand Down Expand Up @@ -149,6 +151,8 @@ impl fmt::Display for Token {
Token::FmtStr(ref b) => write!(f, "f{b}"),
Token::Keyword(k) => write!(f, "{k}"),
Token::Attribute(ref a) => write!(f, "{a}"),
Token::LineComment(ref s) => write!(f, "//{s}"),
Token::BlockComment(ref s) => write!(f, "/*{s}*/"),
Token::IntType(ref i) => write!(f, "{i}"),
Token::Less => write!(f, "<"),
Token::LessEqual => write!(f, "<="),
Expand Down
32 changes: 30 additions & 2 deletions tooling/nargo_fmt/src/visitor/expr.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use noirc_frontend::{
hir::resolution::errors::Span, ArrayLiteral, BlockExpression, Expression, ExpressionKind,
Literal, Statement,
hir::resolution::errors::Span, lexer::Lexer, token::Token, ArrayLiteral, BlockExpression,
Expression, ExpressionKind, Literal, Statement,
};

use super::FmtVisitor;
Expand All @@ -10,6 +10,7 @@ impl FmtVisitor<'_> {
let span = expr.span;

let rewrite = self.format_expr(expr);
let rewrite = recover_comment_removed(slice!(self, span.start(), span.end()), rewrite);
self.push_rewrite(rewrite, span);

self.last_position = span.end();
Expand Down Expand Up @@ -159,3 +160,30 @@ impl FmtVisitor<'_> {
self.push_str(&block_str);
}
}

fn recover_comment_removed(original: &str, new: String) -> String {
if changed_comment_content(original, &new) {
original.to_string()
} else {
new
}
}

fn changed_comment_content(original: &str, new: &str) -> bool {
comments(original) != comments(new)
}

fn comments(source: &str) -> Vec<String> {
Lexer::new(source)
.skip_comments(false)
.flatten()
.filter_map(|spanned| {
if let Token::LineComment(content) | Token::BlockComment(content) = spanned.into_token()
{
Some(content)
} else {
None
}
})
.collect()
}
3 changes: 3 additions & 0 deletions tooling/nargo_fmt/tests/expected/infix.nr
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,7 @@ fn foo() {
40 + 2;
!40 + 2;
40 + 2 == 42;

40/*test*/ + 2 == 42;
40 + 2/*test*/ == 42;
}
4 changes: 3 additions & 1 deletion tooling/nargo_fmt/tests/expected/unary_operators.nr
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
fn main() {
-1
-1;
-/*test*/1;
-/*test*/1;
}
3 changes: 3 additions & 0 deletions tooling/nargo_fmt/tests/input/infix.nr
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,7 @@ fn foo() {
40 + 2;
!40+2;
40 + 2 == 42;

40/*test*/ + 2 == 42;
40 + 2/*test*/ == 42;
}
4 changes: 3 additions & 1 deletion tooling/nargo_fmt/tests/input/unary_operators.nr
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
fn main() {
-1
-1;
-/*test*/1;
-/*test*/1;
}

0 comments on commit 45cbd24

Please sign in to comment.