Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor token collection to capture trailing token immediately #81017

Merged
merged 1 commit into from
Jan 23, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 0 additions & 11 deletions compiler/rustc_ast/src/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -127,14 +127,10 @@ where
}

pub trait CreateTokenStream: sync::Send + sync::Sync {
fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream>;
fn create_token_stream(&self) -> TokenStream;
}

impl CreateTokenStream for TokenStream {
fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream> {
panic!("Cannot call `add_trailing_semi` on a `TokenStream`!");
}
fn create_token_stream(&self) -> TokenStream {
self.clone()
}
Expand All @@ -151,13 +147,6 @@ impl LazyTokenStream {
LazyTokenStream(Lrc::new(Box::new(inner)))
}

/// Extends the captured stream by one token,
/// which must be a trailing semicolon. This
/// affects the `TokenStream` created by `make_tokenstream`.
pub fn add_trailing_semi(&self) -> LazyTokenStream {
LazyTokenStream(Lrc::new(self.0.add_trailing_semi()))
}

pub fn create_token_stream(&self) -> TokenStream {
self.0.create_token_stream()
}
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_parse/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,7 @@ pub fn nt_to_tokenstream(
} else if matches!(synthesize_tokens, CanSynthesizeMissingTokens::Yes) {
return fake_token_stream(sess, nt);
} else {
panic!("Missing tokens for nt {:?}", pprust::nonterminal_to_string(nt));
panic!("Missing tokens for nt at {:?}: {:?}", nt.span(), pprust::nonterminal_to_string(nt));
}
}

Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_parse/src/parser/item.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use super::diagnostics::{dummy_arg, ConsumeClosingDelim, Error};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{FollowedByType, ForceCollect, Parser, PathStyle};
use super::{FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};

use crate::{maybe_collect_tokens, maybe_whole};

Expand Down Expand Up @@ -125,7 +125,7 @@ impl<'a> Parser<'a> {
let item = maybe_collect_tokens!(self, force_collect, &attrs, |this: &mut Self| {
let item = this.parse_item_common_(attrs, mac_allowed, attrs_allowed, req_name);
unclosed_delims.append(&mut this.unclosed_delims);
item
Ok((item?, TrailingToken::None))
})?;

self.unclosed_delims.append(&mut unclosed_delims);
Expand Down
55 changes: 28 additions & 27 deletions compiler/rustc_parse/src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,11 @@ pub enum ForceCollect {
No,
}

pub enum TrailingToken {
None,
Semi,
}

/// Like `maybe_whole_expr`, but for things other than expressions.
#[macro_export]
macro_rules! maybe_whole {
Expand Down Expand Up @@ -1225,6 +1230,13 @@ impl<'a> Parser<'a> {
}
}

pub fn collect_tokens<R: HasTokens>(
&mut self,
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
) -> PResult<'a, R> {
self.collect_tokens_trailing_token(|this| Ok((f(this)?, TrailingToken::None)))
}
petrochenkov marked this conversation as resolved.
Show resolved Hide resolved

/// Records all tokens consumed by the provided callback,
/// including the current token. These tokens are collected
/// into a `LazyTokenStream`, and returned along with the result
Expand All @@ -1241,9 +1253,9 @@ impl<'a> Parser<'a> {
/// This restriction shouldn't be an issue in practice,
/// since this function is used to record the tokens for
/// a parsed AST item, which always has matching delimiters.
pub fn collect_tokens<R: HasTokens>(
pub fn collect_tokens_trailing_token<R: HasTokens>(
&mut self,
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
f: impl FnOnce(&mut Self) -> PResult<'a, (R, TrailingToken)>,
) -> PResult<'a, R> {
let start_token = (self.token.clone(), self.token_spacing);
let cursor_snapshot = TokenCursor {
Expand All @@ -1256,7 +1268,7 @@ impl<'a> Parser<'a> {
append_unglued_token: self.token_cursor.append_unglued_token.clone(),
};

let mut ret = f(self)?;
let (mut ret, trailing_token) = f(self)?;

// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
// and `num_calls`, we can reconstruct the `TokenStream` seen
Expand All @@ -1275,55 +1287,44 @@ impl<'a> Parser<'a> {
cursor_snapshot: TokenCursor,
num_calls: usize,
desugar_doc_comments: bool,
trailing_semi: bool,
append_unglued_token: Option<TreeAndSpacing>,
}
impl CreateTokenStream for LazyTokenStreamImpl {
fn create_token_stream(&self) -> TokenStream {
let mut num_calls = self.num_calls;
if self.trailing_semi {
num_calls += 1;
}
// The token produced by the final call to `next` or `next_desugared`
// was not actually consumed by the callback. The combination
// of chaining the initial token and using `take` produces the desired
// result - we produce an empty `TokenStream` if no calls were made,
// and omit the final token otherwise.
let mut cursor_snapshot = self.cursor_snapshot.clone();
let tokens = std::iter::once(self.start_token.clone())
.chain((0..num_calls).map(|_| {
.chain((0..self.num_calls).map(|_| {
if self.desugar_doc_comments {
cursor_snapshot.next_desugared()
} else {
cursor_snapshot.next()
}
}))
.take(num_calls);
.take(self.num_calls);

make_token_stream(tokens, self.append_unglued_token.clone())
}
fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream> {
if self.trailing_semi {
panic!("Called `add_trailing_semi` twice!");
}
if self.append_unglued_token.is_some() {
panic!(
"Cannot call `add_trailing_semi` when we have an unglued token {:?}",
self.append_unglued_token
);
}
let mut new = self.clone();
new.trailing_semi = true;
Box::new(new)
}

let mut num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls;
match trailing_token {
TrailingToken::None => {}
TrailingToken::Semi => {
assert_eq!(self.token.kind, token::Semi);
num_calls += 1;
}
}

let lazy_impl = LazyTokenStreamImpl {
start_token,
num_calls: self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls,
num_calls,
cursor_snapshot,
desugar_doc_comments: self.desugar_doc_comments,
trailing_semi: false,
append_unglued_token: self.token_cursor.append_unglued_token.clone(),
};
ret.finalize_tokens(LazyTokenStream::new(lazy_impl));
Expand Down Expand Up @@ -1427,9 +1428,9 @@ macro_rules! maybe_collect_tokens {
if matches!($force_collect, ForceCollect::Yes)
|| $crate::parser::attr::maybe_needs_tokens($attrs)
{
$self.collect_tokens($f)
$self.collect_tokens_trailing_token($f)
} else {
$f($self)
Ok($f($self)?.0)
}
};
}
Loading