Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

rustc_parse top-level cleanups #125815

Merged
merged 14 commits into from
Jun 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion compiler/rustc_builtin_macros/src/cfg_eval.rs
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ impl CfgEval<'_, '_> {
// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
// to the captured `AttrTokenStream` (specifically, we capture
// `AttrTokenTree::AttributesData` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
let mut parser = rustc_parse::stream_to_parser(&self.cfg.sess.psess, orig_tokens, None);
let mut parser = Parser::new(&self.cfg.sess.psess, orig_tokens, None);
parser.capture_cfg = true;
match parse_annotatable_with(&mut parser) {
Ok(a) => annotatable = a,
Expand Down
5 changes: 3 additions & 2 deletions compiler/rustc_builtin_macros/src/cmdline_attrs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,17 @@ use crate::errors;
use rustc_ast::attr::mk_attr;
use rustc_ast::token;
use rustc_ast::{self as ast, AttrItem, AttrStyle};
use rustc_parse::{new_parser_from_source_str, unwrap_or_emit_fatal};
use rustc_session::parse::ParseSess;
use rustc_span::FileName;

pub fn inject(krate: &mut ast::Crate, psess: &ParseSess, attrs: &[String]) {
for raw_attr in attrs {
let mut parser = rustc_parse::new_parser_from_source_str(
let mut parser = unwrap_or_emit_fatal(new_parser_from_source_str(
psess,
FileName::cli_crate_attr_source_code(raw_attr),
raw_attr.clone(),
);
));

let start_span = parser.token.span;
let AttrItem { path, args, tokens: _ } = match parser.parse_attr_item(false) {
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_builtin_macros/src/source_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ use rustc_expand::base::{
};
use rustc_expand::module::DirOwnership;
use rustc_lint_defs::BuiltinLintDiag;
use rustc_parse::new_parser_from_file;
use rustc_parse::parser::{ForceCollect, Parser};
use rustc_parse::{new_parser_from_file, unwrap_or_emit_fatal};
use rustc_session::lint::builtin::INCOMPLETE_INCLUDE;
use rustc_span::source_map::SourceMap;
use rustc_span::symbol::Symbol;
Expand Down Expand Up @@ -126,7 +126,7 @@ pub(crate) fn expand_include<'cx>(
return ExpandResult::Ready(DummyResult::any(sp, guar));
}
};
let p = new_parser_from_file(cx.psess(), &file, Some(sp));
let p = unwrap_or_emit_fatal(new_parser_from_file(cx.psess(), &file, Some(sp)));

// If in the included file we have e.g., `mod bar;`,
// then the path of `bar.rs` should be relative to the directory of `file`.
Expand Down
10 changes: 6 additions & 4 deletions compiler/rustc_driver_impl/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ use rustc_interface::{interface, Queries};
use rustc_lint::unerased_lint_store;
use rustc_metadata::creader::MetadataLoader;
use rustc_metadata::locator;
use rustc_parse::{new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal};
use rustc_session::config::{nightly_options, CG_OPTIONS, Z_OPTIONS};
use rustc_session::config::{ErrorOutputType, Input, OutFileName, OutputType};
use rustc_session::getopts::{self, Matches};
Expand Down Expand Up @@ -1264,12 +1265,13 @@ pub fn handle_options(early_dcx: &EarlyDiagCtxt, args: &[String]) -> Option<geto
}

fn parse_crate_attrs<'a>(sess: &'a Session) -> PResult<'a, ast::AttrVec> {
match &sess.io.input {
Input::File(ifile) => rustc_parse::parse_crate_attrs_from_file(ifile, &sess.psess),
let mut parser = unwrap_or_emit_fatal(match &sess.io.input {
Input::File(file) => new_parser_from_file(&sess.psess, file, None),
Input::Str { name, input } => {
rustc_parse::parse_crate_attrs_from_source_str(name.clone(), input.clone(), &sess.psess)
new_parser_from_source_str(&sess.psess, name.clone(), input.clone())
}
}
});
parser.parse_inner_attributes()
}

/// Runs a closure and catches unwinds triggered by fatal errors.
Expand Down
6 changes: 3 additions & 3 deletions compiler/rustc_expand/src/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ use rustc_data_structures::sync::{self, Lrc};
use rustc_errors::{DiagCtxt, ErrorGuaranteed, PResult};
use rustc_feature::Features;
use rustc_lint_defs::{BufferedEarlyLint, RegisteredTools};
use rustc_parse::{parser, MACRO_ARGUMENTS};
use rustc_parse::{parser::Parser, MACRO_ARGUMENTS};
use rustc_session::config::CollapseMacroDebuginfo;
use rustc_session::{parse::ParseSess, Limit, Session};
use rustc_span::def_id::{CrateNum, DefId, LocalDefId};
Expand Down Expand Up @@ -1149,8 +1149,8 @@ impl<'a> ExtCtxt<'a> {
pub fn monotonic_expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> {
expand::MacroExpander::new(self, true)
}
pub fn new_parser_from_tts(&self, stream: TokenStream) -> parser::Parser<'a> {
rustc_parse::stream_to_parser(&self.sess.psess, stream, MACRO_ARGUMENTS)
pub fn new_parser_from_tts(&self, stream: TokenStream) -> Parser<'a> {
Parser::new(&self.sess.psess, stream, MACRO_ARGUMENTS)
}
pub fn source_map(&self) -> &'a SourceMap {
self.sess.psess.source_map()
Expand Down
5 changes: 3 additions & 2 deletions compiler/rustc_expand/src/module.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ use crate::errors::{
use rustc_ast::ptr::P;
use rustc_ast::{token, AttrVec, Attribute, Inline, Item, ModSpans};
use rustc_errors::{Diag, ErrorGuaranteed};
use rustc_parse::new_parser_from_file;
use rustc_parse::validate_attr;
use rustc_parse::{new_parser_from_file, unwrap_or_emit_fatal};
use rustc_session::parse::ParseSess;
use rustc_session::Session;
use rustc_span::symbol::{sym, Ident};
Expand Down Expand Up @@ -66,7 +66,8 @@ pub(crate) fn parse_external_mod(
}

// Actually parse the external file as a module.
let mut parser = new_parser_from_file(&sess.psess, &mp.file_path, Some(span));
let mut parser =
unwrap_or_emit_fatal(new_parser_from_file(&sess.psess, &mp.file_path, Some(span)));
let (inner_attrs, items, inner_span) =
parser.parse_mod(&token::Eof).map_err(|err| ModError::ParserError(err))?;
attrs.extend(inner_attrs);
Expand Down
5 changes: 2 additions & 3 deletions compiler/rustc_expand/src/proc_macro.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::tokenstream::TokenStream;
use rustc_errors::ErrorGuaranteed;
use rustc_parse::parser::ForceCollect;
use rustc_parse::parser::{ForceCollect, Parser};
use rustc_session::config::ProcMacroExecutionStrategy;
use rustc_span::profiling::SpannedEventArgRecorder;
use rustc_span::Span;
Expand Down Expand Up @@ -154,8 +154,7 @@ impl MultiItemModifier for DeriveProcMacro {
};

let error_count_before = ecx.dcx().err_count();
let mut parser =
rustc_parse::stream_to_parser(&ecx.sess.psess, stream, Some("proc-macro derive"));
let mut parser = Parser::new(&ecx.sess.psess, stream, Some("proc-macro derive"));
let mut items = vec![];

loop {
Expand Down
18 changes: 8 additions & 10 deletions compiler/rustc_expand/src/proc_macro_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@ use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Diag, ErrorGuaranteed, MultiSpan, PResult};
use rustc_parse::lexer::nfc_normalize;
use rustc_parse::parse_stream_from_source_str;
use rustc_parse::parser::Parser;
use rustc_parse::{new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal};
use rustc_session::parse::ParseSess;
use rustc_span::def_id::CrateNum;
use rustc_span::symbol::{self, sym, Symbol};
Expand Down Expand Up @@ -466,7 +467,8 @@ impl server::FreeFunctions for Rustc<'_, '_> {

fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span, Self::Symbol>, ()> {
let name = FileName::proc_macro_source_code(s);
let mut parser = rustc_parse::new_parser_from_source_str(self.psess(), name, s.to_owned());
let mut parser =
unwrap_or_emit_fatal(new_parser_from_source_str(self.psess(), name, s.to_owned()));

let first_span = parser.token.span.data();
let minus_present = parser.eat(&token::BinOp(token::Minus));
Expand Down Expand Up @@ -538,12 +540,12 @@ impl server::TokenStream for Rustc<'_, '_> {
}

fn from_str(&mut self, src: &str) -> Self::TokenStream {
parse_stream_from_source_str(
unwrap_or_emit_fatal(source_str_to_stream(
self.psess(),
FileName::proc_macro_source_code(src),
src.to_string(),
self.psess(),
Some(self.call_site),
)
))
}

fn to_string(&mut self, stream: &Self::TokenStream) -> String {
Expand All @@ -553,11 +555,7 @@ impl server::TokenStream for Rustc<'_, '_> {
fn expand_expr(&mut self, stream: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
// Parse the expression from our tokenstream.
let expr: PResult<'_, _> = try {
let mut p = rustc_parse::stream_to_parser(
self.psess(),
stream.clone(),
Some("proc_macro expand expr"),
);
let mut p = Parser::new(self.psess(), stream.clone(), Some("proc_macro expand expr"));
let expr = p.parse_expr()?;
if p.token != token::Eof {
p.unexpected()?;
Expand Down
6 changes: 3 additions & 3 deletions compiler/rustc_interface/src/interface.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use rustc_lint::LintStore;
use rustc_middle::ty;
use rustc_middle::ty::CurrentGcx;
use rustc_middle::util::Providers;
use rustc_parse::maybe_new_parser_from_source_str;
use rustc_parse::new_parser_from_source_str;
use rustc_query_impl::QueryCtxt;
use rustc_query_system::query::print_query_stack;
use rustc_session::config::{self, Cfg, CheckCfg, ExpectedValues, Input, OutFileName};
Expand Down Expand Up @@ -67,7 +67,7 @@ pub(crate) fn parse_cfg(dcx: &DiagCtxt, cfgs: Vec<String>) -> Cfg {
};
}

match maybe_new_parser_from_source_str(&psess, filename, s.to_string()) {
match new_parser_from_source_str(&psess, filename, s.to_string()) {
Ok(mut parser) => match parser.parse_meta_item() {
Ok(meta_item) if parser.token == token::Eof => {
if meta_item.path.segments.len() != 1 {
Expand Down Expand Up @@ -166,7 +166,7 @@ pub(crate) fn parse_check_cfg(dcx: &DiagCtxt, specs: Vec<String>) -> CheckCfg {
error!("expected `cfg(name, values(\"value1\", \"value2\", ... \"valueN\"))`")
};

let mut parser = match maybe_new_parser_from_source_str(&psess, filename, s.to_string()) {
let mut parser = match new_parser_from_source_str(&psess, filename, s.to_string()) {
Ok(parser) => parser,
Err(errs) => {
errs.into_iter().for_each(|err| err.cancel());
Expand Down
17 changes: 11 additions & 6 deletions compiler/rustc_interface/src/passes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@ use rustc_middle::arena::Arena;
use rustc_middle::dep_graph::DepGraph;
use rustc_middle::ty::{self, GlobalCtxt, RegisteredTools, TyCtxt};
use rustc_middle::util::Providers;
use rustc_parse::{parse_crate_from_file, parse_crate_from_source_str, validate_attr};
use rustc_parse::{
new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal, validate_attr,
};
use rustc_passes::{abi_test, hir_stats, layout_test};
use rustc_resolve::Resolver;
use rustc_session::code_stats::VTableSizeInfo;
Expand All @@ -42,11 +44,14 @@ use std::{env, fs, iter};
use tracing::{info, instrument};

pub fn parse<'a>(sess: &'a Session) -> PResult<'a, ast::Crate> {
let krate = sess.time("parse_crate", || match &sess.io.input {
Input::File(file) => parse_crate_from_file(file, &sess.psess),
Input::Str { input, name } => {
parse_crate_from_source_str(name.clone(), input.clone(), &sess.psess)
}
let krate = sess.time("parse_crate", || {
let mut parser = unwrap_or_emit_fatal(match &sess.io.input {
Input::File(file) => new_parser_from_file(&sess.psess, file, None),
Input::Str { input, name } => {
new_parser_from_source_str(&sess.psess, name.clone(), input.clone())
}
});
parser.parse_crate_mod()
})?;

if sess.opts.unstable_opts.input_stats {
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_parse/src/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ pub(crate) struct UnmatchedDelim {
pub candidate_span: Option<Span>,
}

pub(crate) fn parse_token_trees<'psess, 'src>(
pub(crate) fn lex_token_trees<'psess, 'src>(
psess: &'psess ParseSess,
mut src: &'src str,
mut start_pos: BytePos,
Expand All @@ -66,7 +66,7 @@ pub(crate) fn parse_token_trees<'psess, 'src>(
last_lifetime: None,
};
let (stream, res, unmatched_delims) =
tokentrees::TokenTreesReader::parse_all_token_trees(string_reader);
tokentrees::TokenTreesReader::lex_all_token_trees(string_reader);
match res {
Ok(()) if unmatched_delims.is_empty() => Ok(stream),
_ => {
Expand Down
50 changes: 24 additions & 26 deletions compiler/rustc_parse/src/lexer/tokentrees.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ use super::diagnostics::report_suspicious_mismatch_block;
use super::diagnostics::same_indentation_level;
use super::diagnostics::TokenTreeDiagInfo;
use super::{StringReader, UnmatchedDelim};
use crate::Parser;
use rustc_ast::token::{self, Delimiter, Token};
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
use rustc_ast_pretty::pprust::token_to_string;
Expand All @@ -17,22 +18,21 @@ pub(super) struct TokenTreesReader<'psess, 'src> {
}

impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
pub(super) fn parse_all_token_trees(
pub(super) fn lex_all_token_trees(
string_reader: StringReader<'psess, 'src>,
) -> (TokenStream, Result<(), Vec<PErr<'psess>>>, Vec<UnmatchedDelim>) {
let mut tt_reader = TokenTreesReader {
string_reader,
token: Token::dummy(),
diag_info: TokenTreeDiagInfo::default(),
};
let (_open_spacing, stream, res) =
tt_reader.parse_token_trees(/* is_delimited */ false);
let (_open_spacing, stream, res) = tt_reader.lex_token_trees(/* is_delimited */ false);
(stream, res, tt_reader.diag_info.unmatched_delims)
}

// Parse a stream of tokens into a list of `TokenTree`s. The `Spacing` in
// the result is that of the opening delimiter.
fn parse_token_trees(
// Lex into a token stream. The `Spacing` in the result is that of the
// opening delimiter.
fn lex_token_trees(
&mut self,
is_delimited: bool,
) -> (Spacing, TokenStream, Result<(), Vec<PErr<'psess>>>) {
Expand All @@ -42,12 +42,10 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
let mut buf = Vec::new();
loop {
match self.token.kind {
token::OpenDelim(delim) => {
buf.push(match self.parse_token_tree_open_delim(delim) {
Ok(val) => val,
Err(errs) => return (open_spacing, TokenStream::new(buf), Err(errs)),
})
}
token::OpenDelim(delim) => buf.push(match self.lex_token_tree_open_delim(delim) {
Ok(val) => val,
Err(errs) => return (open_spacing, TokenStream::new(buf), Err(errs)),
}),
token::CloseDelim(delim) => {
return (
open_spacing,
Expand Down Expand Up @@ -95,24 +93,24 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
err
}

fn parse_token_tree_open_delim(
fn lex_token_tree_open_delim(
&mut self,
open_delim: Delimiter,
) -> Result<TokenTree, Vec<PErr<'psess>>> {
// The span for beginning of the delimited section
// The span for beginning of the delimited section.
let pre_span = self.token.span;

self.diag_info.open_braces.push((open_delim, self.token.span));

// Parse the token trees within the delimiters.
// Lex the token trees within the delimiters.
// We stop at any delimiter so we can try to recover if the user
// uses an incorrect delimiter.
let (open_spacing, tts, res) = self.parse_token_trees(/* is_delimited */ true);
let (open_spacing, tts, res) = self.lex_token_trees(/* is_delimited */ true);
if let Err(errs) = res {
return Err(self.unclosed_delim_err(tts, errs));
}

// Expand to cover the entire delimited token tree
// Expand to cover the entire delimited token tree.
let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
let sm = self.string_reader.psess.source_map();

Expand Down Expand Up @@ -150,7 +148,7 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
self.diag_info.last_unclosed_found_span = Some(self.token.span);
// This is a conservative error: only report the last unclosed
// delimiter. The previous unclosed delimiters could actually be
// closed! The parser just hasn't gotten to them yet.
// closed! The lexer just hasn't gotten to them yet.
if let Some(&(_, sp)) = self.diag_info.open_braces.last() {
unclosed_delimiter = Some(sp);
};
Expand Down Expand Up @@ -234,11 +232,11 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
) -> Vec<PErr<'psess>> {
// If there are unclosed delims, see if there are diff markers and if so, point them
// out instead of complaining about the unclosed delims.
let mut parser = crate::stream_to_parser(self.string_reader.psess, tts, None);
let mut parser = Parser::new(self.string_reader.psess, tts, None);
let mut diff_errs = vec![];
// Suggest removing a `{` we think appears in an `if`/`while` condition
// We want to suggest removing a `{` only if we think we're in an `if`/`while` condition, but
// we have no way of tracking this in the lexer itself, so we piggyback on the parser
// Suggest removing a `{` we think appears in an `if`/`while` condition.
// We want to suggest removing a `{` only if we think we're in an `if`/`while` condition,
// but we have no way of tracking this in the lexer itself, so we piggyback on the parser.
let mut in_cond = false;
while parser.token != token::Eof {
if let Err(diff_err) = parser.err_vcs_conflict_marker() {
Expand All @@ -249,14 +247,15 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
parser.token.kind,
token::CloseDelim(Delimiter::Brace) | token::FatArrow
) {
// end of the `if`/`while` body, or the end of a `match` guard
// End of the `if`/`while` body, or the end of a `match` guard.
in_cond = false;
} else if in_cond && parser.token == token::OpenDelim(Delimiter::Brace) {
// Store the `&&` and `let` to use their spans later when creating the diagnostic
let maybe_andand = parser.look_ahead(1, |t| t.clone());
let maybe_let = parser.look_ahead(2, |t| t.clone());
if maybe_andand == token::OpenDelim(Delimiter::Brace) {
// This might be the beginning of the `if`/`while` body (i.e., the end of the condition)
// This might be the beginning of the `if`/`while` body (i.e., the end of the
// condition).
in_cond = false;
} else if maybe_andand == token::AndAnd && maybe_let.is_keyword(kw::Let) {
let mut err = parser.dcx().struct_span_err(
Expand Down Expand Up @@ -288,8 +287,7 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
}

fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'psess> {
// An unexpected closing delimiter (i.e., there is no
// matching opening delimiter).
// An unexpected closing delimiter (i.e., there is no matching opening delimiter).
let token_str = token_to_string(&self.token);
let msg = format!("unexpected closing delimiter: `{token_str}`");
let mut err = self.string_reader.psess.dcx.struct_span_err(self.token.span, msg);
Expand Down
Loading
Loading