From ec1b84cfce26f56e27789908b009c60576c16e37 Mon Sep 17 00:00:00 2001 From: Geoffry Song Date: Thu, 5 Mar 2015 15:06:49 -0500 Subject: [PATCH] Interpolate AST nodes in quasiquote. This changes the `ToTokens` implementations for expressions, statements, etc. with almost-trivial ones that produce `Interpolated(*Nt(...))` pseudo-tokens. In this way, quasiquote now works the same way as macros do: already-parsed AST fragments are used as-is, not reparsed. The `ToSource` trait is removed. Quasiquote no longer involves pretty-printing at all, which removes the need for the `encode_with_hygiene` hack. All associated machinery is removed. A new `Nonterminal` is added, NtArm, which the parser now interpolates. This is just for quasiquote, not macros (although it could be in the future). `ToTokens` is no longer implemented for `Arg` (although this could be added again) and `Generics` (which I don't think makes sense). This breaks any compiler extensions that relied on the ability of `ToTokens` to turn AST fragments back into inspectable token trees. For this reason, this closes #16987. As such, this is a [breaking-change]. Fixes #16472. Fixes #15962. Fixes #17397. Fixes #16617. --- src/libsyntax/ast.rs | 6 - src/libsyntax/ext/quote.rs | 363 ++++++++++----------------- src/libsyntax/fold.rs | 7 + src/libsyntax/parse/lexer/mod.rs | 107 -------- src/libsyntax/parse/mod.rs | 72 ------ src/libsyntax/parse/parser.rs | 7 +- src/libsyntax/parse/token.rs | 7 + src/libsyntax/print/pprust.rs | 122 +++------ src/test/run-pass-fulldeps/qquote.rs | 100 +++----- 9 files changed, 223 insertions(+), 568 deletions(-) diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 26463df187122..91c8152161a4b 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -91,12 +91,6 @@ impl Ident { pub fn as_str<'a>(&'a self) -> &'a str { self.name.as_str() } - - pub fn encode_with_hygiene(&self) -> String { - format!("\x00name_{},ctxt_{}\x00", - self.name.usize(), - self.ctxt) - } } impl fmt::Debug for Ident { diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 5776fa9974076..e100b7705d817 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -30,16 +30,16 @@ pub mod rt { use ext::base::ExtCtxt; use parse::token; use parse; - use print::pprust; use ptr::P; + use std::rc::Rc; - use ast::{TokenTree, Generics, Expr}; + use ast::{TokenTree, Expr}; pub use parse::new_parser_from_tts; - pub use codemap::{BytePos, Span, dummy_spanned}; + pub use codemap::{BytePos, Span, dummy_spanned, DUMMY_SP}; pub trait ToTokens { - fn to_tokens(&self, _cx: &ExtCtxt) -> Vec ; + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec; } impl ToTokens for TokenTree { @@ -70,277 +70,189 @@ pub mod rt { } } - /* Should be (when bugs in default methods are fixed): - - trait ToSource : ToTokens { - // Takes a thing and generates a string containing rust code for it. - pub fn to_source() -> String; - - // If you can make source, you can definitely make tokens. - pub fn to_tokens(cx: &ExtCtxt) -> ~[TokenTree] { - cx.parse_tts(self.to_source()) + impl ToTokens for ast::Ident { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(DUMMY_SP, token::Ident(*self, token::Plain))] } } - */ - - // FIXME: Move this trait to pprust and get rid of *_to_str? - pub trait ToSource { - // Takes a thing and generates a string containing rust code for it. - fn to_source(&self) -> String; + impl ToTokens for ast::Path { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtPath(Box::new(self.clone()))))] + } } - // FIXME (Issue #16472): This should go away after ToToken impls - // are revised to go directly to token-trees. - trait ToSourceWithHygiene : ToSource { - // Takes a thing and generates a string containing rust code - // for it, encoding Idents as special byte sequences to - // maintain hygiene across serialization and deserialization. - fn to_source_with_hygiene(&self) -> String; + impl ToTokens for ast::Ty { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(self.span, token::Interpolated(token::NtTy(P(self.clone()))))] + } } - macro_rules! impl_to_source { - (P<$t:ty>, $pp:ident) => ( - impl ToSource for P<$t> { - fn to_source(&self) -> String { - pprust::$pp(&**self) - } - } - impl ToSourceWithHygiene for P<$t> { - fn to_source_with_hygiene(&self) -> String { - pprust::with_hygiene::$pp(&**self) - } - } - ); - ($t:ty, $pp:ident) => ( - impl ToSource for $t { - fn to_source(&self) -> String { - pprust::$pp(self) - } - } - impl ToSourceWithHygiene for $t { - fn to_source_with_hygiene(&self) -> String { - pprust::with_hygiene::$pp(self) - } - } - ); + impl ToTokens for ast::Block { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(self.span, token::Interpolated(token::NtBlock(P(self.clone()))))] + } } - fn slice_to_source<'a, T: ToSource>(sep: &'static str, xs: &'a [T]) -> String { - xs.iter() - .map(|i| i.to_source()) - .collect::>() - .connect(sep) - .to_string() + impl ToTokens for P { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(self.span, token::Interpolated(token::NtItem(self.clone())))] + } } - fn slice_to_source_with_hygiene<'a, T: ToSourceWithHygiene>( - sep: &'static str, xs: &'a [T]) -> String { - xs.iter() - .map(|i| i.to_source_with_hygiene()) - .collect::>() - .connect(sep) - .to_string() + impl ToTokens for P { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(self.span, token::Interpolated(token::NtImplItem(self.clone())))] + } } - macro_rules! impl_to_source_slice { - ($t:ty, $sep:expr) => ( - impl ToSource for [$t] { - fn to_source(&self) -> String { - slice_to_source($sep, self) - } - } - - impl ToSourceWithHygiene for [$t] { - fn to_source_with_hygiene(&self) -> String { - slice_to_source_with_hygiene($sep, self) - } - } - ) + impl ToTokens for P { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(self.span, token::Interpolated(token::NtTraitItem(self.clone())))] + } } - impl ToSource for ast::Ident { - fn to_source(&self) -> String { - token::get_ident(*self).to_string() + impl ToTokens for P { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(self.span, token::Interpolated(token::NtStmt(self.clone())))] } } - impl ToSourceWithHygiene for ast::Ident { - fn to_source_with_hygiene(&self) -> String { - self.encode_with_hygiene() + impl ToTokens for P { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(self.span, token::Interpolated(token::NtExpr(self.clone())))] } } - impl_to_source! { ast::Path, path_to_string } - impl_to_source! { ast::Ty, ty_to_string } - impl_to_source! { ast::Block, block_to_string } - impl_to_source! { ast::Arg, arg_to_string } - impl_to_source! { Generics, generics_to_string } - impl_to_source! { ast::WhereClause, where_clause_to_string } - impl_to_source! { P, item_to_string } - impl_to_source! { P, impl_item_to_string } - impl_to_source! { P, trait_item_to_string } - impl_to_source! { P, stmt_to_string } - impl_to_source! { P, expr_to_string } - impl_to_source! { P, pat_to_string } - impl_to_source! { ast::Arm, arm_to_string } - impl_to_source_slice! { ast::Ty, ", " } - impl_to_source_slice! { P, "\n\n" } - - impl ToSource for ast::Attribute_ { - fn to_source(&self) -> String { - pprust::attribute_to_string(&dummy_spanned(self.clone())) + impl ToTokens for P { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(self.span, token::Interpolated(token::NtPat(self.clone())))] } } - impl ToSourceWithHygiene for ast::Attribute_ { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + + impl ToTokens for ast::Arm { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtArm(self.clone())))] } } - impl ToSource for str { - fn to_source(&self) -> String { - let lit = dummy_spanned(ast::LitStr( - token::intern_and_get_ident(self), ast::CookedStr)); - pprust::lit_to_string(&lit) - } + macro_rules! impl_to_tokens_slice { + ($t: ty, $sep: expr) => { + impl ToTokens for [$t] { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec { + let mut v = vec![]; + for (i, x) in self.iter().enumerate() { + if i > 0 { + v.push_all(&$sep); + } + v.extend(x.to_tokens(cx)); + } + v + } + } + }; } - impl ToSourceWithHygiene for str { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + + impl_to_tokens_slice! { ast::Ty, [ast::TtToken(DUMMY_SP, token::Comma)] } + impl_to_tokens_slice! { P, [] } + + impl ToTokens for P { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtToken(DUMMY_SP, token::Interpolated(token::NtMeta(self.clone())))] } } - impl ToSource for () { - fn to_source(&self) -> String { - "()".to_string() + impl ToTokens for ast::Attribute { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec { + let mut r = vec![]; + // FIXME: The spans could be better + r.push(ast::TtToken(self.span, token::Pound)); + if self.node.style == ast::AttrInner { + r.push(ast::TtToken(self.span, token::Not)); + } + r.push(ast::TtDelimited(self.span, Rc::new(ast::Delimited { + delim: token::Bracket, + open_span: self.span, + tts: self.node.value.to_tokens(cx), + close_span: self.span, + }))); + r } } - impl ToSourceWithHygiene for () { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + + impl ToTokens for str { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec { + let lit = ast::LitStr( + token::intern_and_get_ident(self), ast::CookedStr); + dummy_spanned(lit).to_tokens(cx) } } - impl ToSource for bool { - fn to_source(&self) -> String { - let lit = dummy_spanned(ast::LitBool(*self)); - pprust::lit_to_string(&lit) + impl ToTokens for () { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { + vec![ast::TtDelimited(DUMMY_SP, Rc::new(ast::Delimited { + delim: token::Paren, + open_span: DUMMY_SP, + tts: vec![], + close_span: DUMMY_SP, + }))] } } - impl ToSourceWithHygiene for bool { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + + impl ToTokens for ast::Lit { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec { + // FIXME: This is wrong + P(ast::Expr { + id: ast::DUMMY_NODE_ID, + node: ast::ExprLit(P(self.clone())), + span: DUMMY_SP, + }).to_tokens(cx) } } - impl ToSource for char { - fn to_source(&self) -> String { - let lit = dummy_spanned(ast::LitChar(*self)); - pprust::lit_to_string(&lit) + impl ToTokens for bool { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec { + dummy_spanned(ast::LitBool(*self)).to_tokens(cx) } } - impl ToSourceWithHygiene for char { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + + impl ToTokens for char { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec { + dummy_spanned(ast::LitChar(*self)).to_tokens(cx) } } - macro_rules! impl_to_source_int { + macro_rules! impl_to_tokens_int { (signed, $t:ty, $tag:expr) => ( - impl ToSource for $t { - fn to_source(&self) -> String { + impl ToTokens for $t { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec { let lit = ast::LitInt(*self as u64, ast::SignedIntLit($tag, ast::Sign::new(*self))); - pprust::lit_to_string(&dummy_spanned(lit)) - } - } - impl ToSourceWithHygiene for $t { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + dummy_spanned(lit).to_tokens(cx) } } ); (unsigned, $t:ty, $tag:expr) => ( - impl ToSource for $t { - fn to_source(&self) -> String { + impl ToTokens for $t { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec { let lit = ast::LitInt(*self as u64, ast::UnsignedIntLit($tag)); - pprust::lit_to_string(&dummy_spanned(lit)) - } - } - impl ToSourceWithHygiene for $t { - fn to_source_with_hygiene(&self) -> String { - self.to_source() + dummy_spanned(lit).to_tokens(cx) } } ); } - impl_to_source_int! { signed, isize, ast::TyIs } - impl_to_source_int! { signed, i8, ast::TyI8 } - impl_to_source_int! { signed, i16, ast::TyI16 } - impl_to_source_int! { signed, i32, ast::TyI32 } - impl_to_source_int! { signed, i64, ast::TyI64 } - - impl_to_source_int! { unsigned, usize, ast::TyUs } - impl_to_source_int! { unsigned, u8, ast::TyU8 } - impl_to_source_int! { unsigned, u16, ast::TyU16 } - impl_to_source_int! { unsigned, u32, ast::TyU32 } - impl_to_source_int! { unsigned, u64, ast::TyU64 } - - // Alas ... we write these out instead. All redundant. - - macro_rules! impl_to_tokens { - ($t:ty) => ( - impl ToTokens for $t { - fn to_tokens(&self, cx: &ExtCtxt) -> Vec { - cx.parse_tts_with_hygiene(self.to_source_with_hygiene()) - } - } - ) - } + impl_to_tokens_int! { signed, isize, ast::TyIs } + impl_to_tokens_int! { signed, i8, ast::TyI8 } + impl_to_tokens_int! { signed, i16, ast::TyI16 } + impl_to_tokens_int! { signed, i32, ast::TyI32 } + impl_to_tokens_int! { signed, i64, ast::TyI64 } - macro_rules! impl_to_tokens_lifetime { - ($t:ty) => ( - impl<'a> ToTokens for $t { - fn to_tokens(&self, cx: &ExtCtxt) -> Vec { - cx.parse_tts_with_hygiene(self.to_source_with_hygiene()) - } - } - ) - } - - impl_to_tokens! { ast::Ident } - impl_to_tokens! { ast::Path } - impl_to_tokens! { P } - impl_to_tokens! { P } - impl_to_tokens! { P } - impl_to_tokens! { P } - impl_to_tokens! { ast::Arm } - impl_to_tokens_lifetime! { &'a [P] } - impl_to_tokens! { ast::Ty } - impl_to_tokens_lifetime! { &'a [ast::Ty] } - impl_to_tokens! { Generics } - impl_to_tokens! { ast::WhereClause } - impl_to_tokens! { P } - impl_to_tokens! { P } - impl_to_tokens! { ast::Block } - impl_to_tokens! { ast::Arg } - impl_to_tokens! { ast::Attribute_ } - impl_to_tokens_lifetime! { &'a str } - impl_to_tokens! { () } - impl_to_tokens! { char } - impl_to_tokens! { bool } - impl_to_tokens! { isize } - impl_to_tokens! { i8 } - impl_to_tokens! { i16 } - impl_to_tokens! { i32 } - impl_to_tokens! { i64 } - impl_to_tokens! { usize } - impl_to_tokens! { u8 } - impl_to_tokens! { u16 } - impl_to_tokens! { u32 } - impl_to_tokens! { u64 } + impl_to_tokens_int! { unsigned, usize, ast::TyUs } + impl_to_tokens_int! { unsigned, u8, ast::TyU8 } + impl_to_tokens_int! { unsigned, u16, ast::TyU16 } + impl_to_tokens_int! { unsigned, u32, ast::TyU32 } + impl_to_tokens_int! { unsigned, u64, ast::TyU64 } pub trait ExtParseUtils { fn parse_item(&self, s: String) -> P; @@ -349,12 +261,6 @@ pub mod rt { fn parse_tts(&self, s: String) -> Vec; } - trait ExtParseUtilsWithHygiene { - // FIXME (Issue #16472): This should go away after ToToken impls - // are revised to go directly to token-trees. - fn parse_tts_with_hygiene(&self, s: String) -> Vec; - } - impl<'a> ExtParseUtils for ExtCtxt<'a> { fn parse_item(&self, s: String) -> P { @@ -386,19 +292,6 @@ pub mod rt { self.parse_sess()) } } - - impl<'a> ExtParseUtilsWithHygiene for ExtCtxt<'a> { - - fn parse_tts_with_hygiene(&self, s: String) -> Vec { - use parse::with_hygiene::parse_tts_from_source_str; - parse_tts_from_source_str("".to_string(), - s, - self.cfg(), - self.parse_sess()) - } - - } - } pub fn expand_quote_tokens<'cx>(cx: &'cx mut ExtCtxt, diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index d7033ce7e48a4..486c7b5870789 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -682,6 +682,13 @@ pub fn noop_fold_interpolated(nt: token::Nonterminal, fld: &mut T) token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)), token::NtPath(box path) => token::NtPath(box fld.fold_path(path)), token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&*tt))), + token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)), + token::NtImplItem(arm) => + token::NtImplItem(fld.fold_impl_item(arm) + .expect_one("expected fold to produce exactly one item")), + token::NtTraitItem(arm) => + token::NtTraitItem(fld.fold_trait_item(arm) + .expect_one("expected fold to produce exactly one item")), } } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index f891318659a87..ba6b266613adb 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -18,7 +18,6 @@ use parse::token::str_to_ident; use std::borrow::{IntoCow, Cow}; use std::char; -use std::fmt; use std::mem::replace; use std::rc::Rc; @@ -70,11 +69,6 @@ pub struct StringReader<'a> { pub peek_tok: token::Token, pub peek_span: Span, - // FIXME (Issue #16472): This field should go away after ToToken impls - // are revised to go directly to token-trees. - /// Is \x00,\x00 is interpreted as encoded ast::Ident? - read_embedded_ident: bool, - // cache a direct reference to the source text, so that we don't have to // retrieve it via `self.filemap.src.as_ref().unwrap()` all the time. source_text: Rc @@ -129,17 +123,6 @@ impl<'a> Reader for TtReader<'a> { } } -// FIXME (Issue #16472): This function should go away after -// ToToken impls are revised to go directly to token-trees. -pub fn make_reader_with_embedded_idents<'b>(span_diagnostic: &'b SpanHandler, - filemap: Rc) - -> StringReader<'b> { - let mut sr = StringReader::new_raw(span_diagnostic, filemap); - sr.read_embedded_ident = true; - sr.advance_token(); - sr -} - impl<'a> StringReader<'a> { /// For comments.rs, which hackily pokes into pos and curr pub fn new_raw<'b>(span_diagnostic: &'b SpanHandler, @@ -161,7 +144,6 @@ impl<'a> StringReader<'a> { /* dummy values; not read */ peek_tok: token::Eof, peek_span: codemap::DUMMY_SP, - read_embedded_ident: false, source_text: source_text }; sr.bump(); @@ -577,81 +559,6 @@ impl<'a> StringReader<'a> { }) } - // FIXME (Issue #16472): The scan_embedded_hygienic_ident function - // should go away after we revise the syntax::ext::quote::ToToken - // impls to go directly to token-trees instead of thing -> string - // -> token-trees. (The function is currently used to resolve - // Issues #15750 and #15962.) - // - // Since this function is only used for certain internal macros, - // and the functionality it provides is not exposed to end user - // programs, pnkfelix deliberately chose to write it in a way that - // favors rustc debugging effectiveness over runtime efficiency. - - /// Scan through input of form \x00name_NNNNNN,ctxt_CCCCCCC\x00 - /// whence: `NNNNNN` is a string of characters forming an integer - /// (the name) and `CCCCCCC` is a string of characters forming an - /// integer (the ctxt), separate by a comma and delimited by a - /// `\x00` marker. - #[inline(never)] - fn scan_embedded_hygienic_ident(&mut self) -> ast::Ident { - fn bump_expecting_char<'a,D:fmt::Debug>(r: &mut StringReader<'a>, - c: char, - described_c: D, - whence: &str) { - match r.curr { - Some(r_c) if r_c == c => r.bump(), - Some(r_c) => panic!("expected {:?}, hit {:?}, {}", described_c, r_c, whence), - None => panic!("expected {:?}, hit EOF, {}", described_c, whence), - } - } - - let whence = "while scanning embedded hygienic ident"; - - // skip over the leading `\x00` - bump_expecting_char(self, '\x00', "nul-byte", whence); - - // skip over the "name_" - for c in "name_".chars() { - bump_expecting_char(self, c, c, whence); - } - - let start_bpos = self.last_pos; - let base = 10; - - // find the integer representing the name - self.scan_digits(base, base); - let encoded_name : u32 = self.with_str_from(start_bpos, |s| { - u32::from_str_radix(s, 10).unwrap_or_else(|_| { - panic!("expected digits representing a name, got {:?}, {}, range [{:?},{:?}]", - s, whence, start_bpos, self.last_pos); - }) - }); - - // skip over the `,` - bump_expecting_char(self, ',', "comma", whence); - - // skip over the "ctxt_" - for c in "ctxt_".chars() { - bump_expecting_char(self, c, c, whence); - } - - // find the integer representing the ctxt - let start_bpos = self.last_pos; - self.scan_digits(base, base); - let encoded_ctxt : ast::SyntaxContext = self.with_str_from(start_bpos, |s| { - u32::from_str_radix(s, 10).unwrap_or_else(|_| { - panic!("expected digits representing a ctxt, got {:?}, {}", s, whence); - }) - }); - - // skip over the `\x00` - bump_expecting_char(self, '\x00', "nul-byte", whence); - - ast::Ident { name: ast::Name(encoded_name), - ctxt: encoded_ctxt, } - } - /// Scan through any digits (base `scan_radix`) or underscores, /// and return how many digits there were. /// @@ -1019,20 +926,6 @@ impl<'a> StringReader<'a> { return token::Literal(num, suffix) } - if self.read_embedded_ident { - match (c.unwrap(), self.nextch(), self.nextnextch()) { - ('\x00', Some('n'), Some('a')) => { - let ast_ident = self.scan_embedded_hygienic_ident(); - return if self.curr_is(':') && self.nextch_is(':') { - token::Ident(ast_ident, token::ModName) - } else { - token::Ident(ast_ident, token::Plain) - }; - } - _ => {} - } - } - match c.expect("next_token_inner called at EOF") { // One-byte tokens. ';' => { self.bump(); return token::Semi; } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 4b7b7b66582e1..fd7b71699b56a 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -168,9 +168,6 @@ pub fn parse_stmt_from_source_str(name: String, maybe_aborted(p.parse_stmt(), p) } -// Note: keep in sync with `with_hygiene::parse_tts_from_source_str` -// until #16472 is resolved. -// // Warning: This parses with quote_depth > 0, which is not the default. pub fn parse_tts_from_source_str(name: String, source: String, @@ -188,8 +185,6 @@ pub fn parse_tts_from_source_str(name: String, maybe_aborted(panictry!(p.parse_all_token_trees()),p) } -// Note: keep in sync with `with_hygiene::new_parser_from_source_str` -// until #16472 is resolved. // Create a new parser from a source string pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, cfg: ast::CrateConfig, @@ -222,8 +217,6 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, p } -// Note: keep this in sync with `with_hygiene::filemap_to_parser` until -// #16472 is resolved. /// Given a filemap and config, return a parser pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc, @@ -279,8 +272,6 @@ pub fn string_to_filemap(sess: &ParseSess, source: String, path: String) sess.span_diagnostic.cm.new_filemap(path, source) } -// Note: keep this in sync with `with_hygiene::filemap_to_tts` (apart -// from the StringReader constructor), until #16472 is resolved. /// Given a filemap, produce a sequence of token-trees pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc) -> Vec { @@ -302,69 +293,6 @@ pub fn tts_to_parser<'a>(sess: &'a ParseSess, p } -// FIXME (Issue #16472): The `with_hygiene` mod should go away after -// ToToken impls are revised to go directly to token-trees. -pub mod with_hygiene { - use ast; - use codemap::FileMap; - use parse::parser::Parser; - use std::rc::Rc; - use super::ParseSess; - use super::{maybe_aborted, string_to_filemap, tts_to_parser}; - - // Note: keep this in sync with `super::parse_tts_from_source_str` until - // #16472 is resolved. - // - // Warning: This parses with quote_depth > 0, which is not the default. - pub fn parse_tts_from_source_str(name: String, - source: String, - cfg: ast::CrateConfig, - sess: &ParseSess) -> Vec { - let mut p = new_parser_from_source_str( - sess, - cfg, - name, - source - ); - p.quote_depth += 1; - // right now this is re-creating the token trees from ... token trees. - maybe_aborted(panictry!(p.parse_all_token_trees()),p) - } - - // Note: keep this in sync with `super::new_parser_from_source_str` until - // #16472 is resolved. - // Create a new parser from a source string - fn new_parser_from_source_str<'a>(sess: &'a ParseSess, - cfg: ast::CrateConfig, - name: String, - source: String) -> Parser<'a> { - filemap_to_parser(sess, string_to_filemap(sess, source, name), cfg) - } - - // Note: keep this in sync with `super::filemap_to_parserr` until - // #16472 is resolved. - /// Given a filemap and config, return a parser - fn filemap_to_parser<'a>(sess: &'a ParseSess, - filemap: Rc, - cfg: ast::CrateConfig) -> Parser<'a> { - tts_to_parser(sess, filemap_to_tts(sess, filemap), cfg) - } - - // Note: keep this in sync with `super::filemap_to_tts` until - // #16472 is resolved. - /// Given a filemap, produce a sequence of token-trees - fn filemap_to_tts(sess: &ParseSess, filemap: Rc) - -> Vec { - // it appears to me that the cfg doesn't matter here... indeed, - // parsing tt's probably shouldn't require a parser at all. - use super::lexer::make_reader_with_embedded_idents as make_reader; - let cfg = Vec::new(); - let srdr = make_reader(&sess.span_diagnostic, filemap); - let mut p1 = Parser::new(sess, cfg, box srdr); - panictry!(p1.parse_all_token_trees()) - } -} - /// Abort if necessary pub fn maybe_aborted(result: T, p: Parser) -> T { p.abort_if_errors(); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 0515d1ae945bd..f88007aeab210 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -1141,7 +1141,8 @@ impl<'a> Parser<'a> { &token::OpenDelim(token::Brace), &token::CloseDelim(token::Brace), seq_sep_none(), - |p| { + |p| -> PResult> { + maybe_whole!(no_clone p, NtTraitItem); let lo = p.span.lo; let mut attrs = p.parse_outer_attributes(); @@ -2930,6 +2931,8 @@ impl<'a> Parser<'a> { } pub fn parse_arm_nopanic(&mut self) -> PResult { + maybe_whole!(no_clone self, NtArm); + let attrs = self.parse_outer_attributes(); let pats = try!(self.parse_pats()); let mut guard = None; @@ -4293,6 +4296,8 @@ impl<'a> Parser<'a> { /// Parse an impl item. pub fn parse_impl_item(&mut self) -> PResult> { + maybe_whole!(no_clone self, NtImplItem); + let lo = self.span.lo; let mut attrs = self.parse_outer_attributes(); let vis = try!(self.parse_visibility()); diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 2bb74944ce91a..aa1353aa48fed 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -373,6 +373,10 @@ pub enum Nonterminal { NtMeta(P), NtPath(Box), NtTT(P), // needs P'ed to break a circularity + // These is not exposed to macros, but is used by quasiquote. + NtArm(ast::Arm), + NtImplItem(P), + NtTraitItem(P), } impl fmt::Debug for Nonterminal { @@ -388,6 +392,9 @@ impl fmt::Debug for Nonterminal { NtMeta(..) => f.pad("NtMeta(..)"), NtPath(..) => f.pad("NtPath(..)"), NtTT(..) => f.pad("NtTT(..)"), + NtArm(..) => f.pad("NtArm(..)"), + NtImplItem(..) => f.pad("NtImplItem(..)"), + NtTraitItem(..) => f.pad("NtTraitItem(..)"), } } } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index c51b5d039784d..3af51c0966012 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -66,7 +66,6 @@ pub struct State<'a> { cur_cmnt_and_lit: CurrentCommentAndLiteral, boxes: Vec, ann: &'a (PpAnn+'a), - encode_idents_with_hygiene: bool, } pub fn rust_printer<'a>(writer: Box) -> State<'a> { @@ -87,7 +86,6 @@ pub fn rust_printer_annotated<'a>(writer: Box, }, boxes: Vec::new(), ann: ann, - encode_idents_with_hygiene: false, } } @@ -179,7 +177,6 @@ impl<'a> State<'a> { }, boxes: Vec::new(), ann: ann, - encode_idents_with_hygiene: false, } } } @@ -295,103 +292,99 @@ pub fn token_to_string(tok: &Token) -> String { token::SpecialVarNt(var) => format!("${}", var.as_str()), token::Interpolated(ref nt) => match *nt { - token::NtExpr(ref e) => expr_to_string(&**e), - token::NtMeta(ref e) => meta_item_to_string(&**e), - token::NtTy(ref e) => ty_to_string(&**e), - token::NtPath(ref e) => path_to_string(&**e), - token::NtItem(..) => "an interpolated item".to_string(), - token::NtBlock(..) => "an interpolated block".to_string(), - token::NtStmt(..) => "an interpolated statement".to_string(), - token::NtPat(..) => "an interpolated pattern".to_string(), - token::NtIdent(..) => "an interpolated identifier".to_string(), - token::NtTT(..) => "an interpolated tt".to_string(), + token::NtExpr(ref e) => expr_to_string(&**e), + token::NtMeta(ref e) => meta_item_to_string(&**e), + token::NtTy(ref e) => ty_to_string(&**e), + token::NtPath(ref e) => path_to_string(&**e), + token::NtItem(..) => "an interpolated item".to_string(), + token::NtBlock(..) => "an interpolated block".to_string(), + token::NtStmt(..) => "an interpolated statement".to_string(), + token::NtPat(..) => "an interpolated pattern".to_string(), + token::NtIdent(..) => "an interpolated identifier".to_string(), + token::NtTT(..) => "an interpolated tt".to_string(), + token::NtArm(..) => "an interpolated arm".to_string(), + token::NtImplItem(..) => "an interpolated impl item".to_string(), + token::NtTraitItem(..) => "an interpolated trait item".to_string(), } } } -// FIXME (Issue #16472): the thing_to_string_impls macro should go away -// after we revise the syntax::ext::quote::ToToken impls to go directly -// to token-trees instead of thing -> string -> token-trees. - -macro_rules! thing_to_string_impls { - ($to_string:ident) => { - pub fn ty_to_string(ty: &ast::Ty) -> String { - $to_string(|s| s.print_type(ty)) + to_string(|s| s.print_type(ty)) } pub fn bounds_to_string(bounds: &[ast::TyParamBound]) -> String { - $to_string(|s| s.print_bounds("", bounds)) + to_string(|s| s.print_bounds("", bounds)) } pub fn pat_to_string(pat: &ast::Pat) -> String { - $to_string(|s| s.print_pat(pat)) + to_string(|s| s.print_pat(pat)) } pub fn arm_to_string(arm: &ast::Arm) -> String { - $to_string(|s| s.print_arm(arm)) + to_string(|s| s.print_arm(arm)) } pub fn expr_to_string(e: &ast::Expr) -> String { - $to_string(|s| s.print_expr(e)) + to_string(|s| s.print_expr(e)) } pub fn lifetime_to_string(e: &ast::Lifetime) -> String { - $to_string(|s| s.print_lifetime(e)) + to_string(|s| s.print_lifetime(e)) } pub fn tt_to_string(tt: &ast::TokenTree) -> String { - $to_string(|s| s.print_tt(tt)) + to_string(|s| s.print_tt(tt)) } pub fn tts_to_string(tts: &[ast::TokenTree]) -> String { - $to_string(|s| s.print_tts(tts)) + to_string(|s| s.print_tts(tts)) } pub fn stmt_to_string(stmt: &ast::Stmt) -> String { - $to_string(|s| s.print_stmt(stmt)) + to_string(|s| s.print_stmt(stmt)) } pub fn attr_to_string(attr: &ast::Attribute) -> String { - $to_string(|s| s.print_attribute(attr)) + to_string(|s| s.print_attribute(attr)) } pub fn item_to_string(i: &ast::Item) -> String { - $to_string(|s| s.print_item(i)) + to_string(|s| s.print_item(i)) } pub fn impl_item_to_string(i: &ast::ImplItem) -> String { - $to_string(|s| s.print_impl_item(i)) + to_string(|s| s.print_impl_item(i)) } pub fn trait_item_to_string(i: &ast::TraitItem) -> String { - $to_string(|s| s.print_trait_item(i)) + to_string(|s| s.print_trait_item(i)) } pub fn generics_to_string(generics: &ast::Generics) -> String { - $to_string(|s| s.print_generics(generics)) + to_string(|s| s.print_generics(generics)) } pub fn where_clause_to_string(i: &ast::WhereClause) -> String { - $to_string(|s| s.print_where_clause(i)) + to_string(|s| s.print_where_clause(i)) } pub fn fn_block_to_string(p: &ast::FnDecl) -> String { - $to_string(|s| s.print_fn_block_args(p)) + to_string(|s| s.print_fn_block_args(p)) } pub fn path_to_string(p: &ast::Path) -> String { - $to_string(|s| s.print_path(p, false, 0)) + to_string(|s| s.print_path(p, false, 0)) } pub fn ident_to_string(id: &ast::Ident) -> String { - $to_string(|s| s.print_ident(*id)) + to_string(|s| s.print_ident(*id)) } pub fn fun_to_string(decl: &ast::FnDecl, unsafety: ast::Unsafety, name: ast::Ident, opt_explicit_self: Option<&ast::ExplicitSelf_>, generics: &ast::Generics) -> String { - $to_string(|s| { + to_string(|s| { try!(s.head("")); try!(s.print_fn(decl, unsafety, abi::Rust, Some(name), generics, opt_explicit_self, ast::Inherited)); @@ -401,7 +394,7 @@ pub fn fun_to_string(decl: &ast::FnDecl, unsafety: ast::Unsafety, name: ast::Ide } pub fn block_to_string(blk: &ast::Block) -> String { - $to_string(|s| { + to_string(|s| { // containing cbox, will be closed by print-block at } try!(s.cbox(indent_unit)); // head-ibox, will be closed by print-block after { @@ -411,59 +404,31 @@ pub fn block_to_string(blk: &ast::Block) -> String { } pub fn meta_item_to_string(mi: &ast::MetaItem) -> String { - $to_string(|s| s.print_meta_item(mi)) + to_string(|s| s.print_meta_item(mi)) } pub fn attribute_to_string(attr: &ast::Attribute) -> String { - $to_string(|s| s.print_attribute(attr)) + to_string(|s| s.print_attribute(attr)) } pub fn lit_to_string(l: &ast::Lit) -> String { - $to_string(|s| s.print_literal(l)) + to_string(|s| s.print_literal(l)) } pub fn explicit_self_to_string(explicit_self: &ast::ExplicitSelf_) -> String { - $to_string(|s| s.print_explicit_self(explicit_self, ast::MutImmutable).map(|_| {})) + to_string(|s| s.print_explicit_self(explicit_self, ast::MutImmutable).map(|_| {})) } pub fn variant_to_string(var: &ast::Variant) -> String { - $to_string(|s| s.print_variant(var)) + to_string(|s| s.print_variant(var)) } pub fn arg_to_string(arg: &ast::Arg) -> String { - $to_string(|s| s.print_arg(arg)) + to_string(|s| s.print_arg(arg)) } pub fn mac_to_string(arg: &ast::Mac) -> String { - $to_string(|s| s.print_mac(arg, ::parse::token::Paren)) -} - -} } - -thing_to_string_impls! { to_string } - -// FIXME (Issue #16472): the whole `with_hygiene` mod should go away -// after we revise the syntax::ext::quote::ToToken impls to go directly -// to token-trees instea of thing -> string -> token-trees. - -pub mod with_hygiene { - use abi; - use ast; - use std::io; - use super::indent_unit; - - // This function is the trick that all the rest of the routines - // hang on. - pub fn to_string_hyg(f: F) -> String where - F: FnOnce(&mut super::State) -> io::Result<()>, - { - super::to_string(move |s| { - s.encode_idents_with_hygiene = true; - f(s) - }) - } - - thing_to_string_impls! { to_string_hyg } + to_string(|s| s.print_mac(arg, ::parse::token::Paren)) } pub fn visibility_qualified(vis: ast::Visibility, s: &str) -> String { @@ -2011,12 +1976,7 @@ impl<'a> State<'a> { } pub fn print_ident(&mut self, ident: ast::Ident) -> io::Result<()> { - if self.encode_idents_with_hygiene { - let encoded = ident.encode_with_hygiene(); - try!(word(&mut self.s, &encoded[..])) - } else { - try!(word(&mut self.s, &token::get_ident(ident))) - } + try!(word(&mut self.s, &token::get_ident(ident))); self.ann.post(self, NodeIdent(&ident)) } diff --git a/src/test/run-pass-fulldeps/qquote.rs b/src/test/run-pass-fulldeps/qquote.rs index 7e11b9d9f2789..95c9654ff1c3c 100644 --- a/src/test/run-pass-fulldeps/qquote.rs +++ b/src/test/run-pass-fulldeps/qquote.rs @@ -8,87 +8,55 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty -// ignore-test - -#![feature(quote)] +#![feature(quote, rustc_private)] extern crate syntax; -use std::io::*; - -use syntax::diagnostic; -use syntax::ast; -use syntax::codemap; -use syntax::codemap::span; -use syntax::parse; -use syntax::print::*; - - -trait fake_ext_ctxt { - fn cfg() -> ast::CrateConfig; - fn parse_sess() -> parse::parse_sess; - fn call_site() -> span; - fn ident_of(st: &str) -> ast::ident; -} - -type fake_session = parse::parse_sess; +use syntax::codemap::DUMMY_SP; +use syntax::print::pprust::*; -impl fake_ext_ctxt for fake_session { - fn cfg() -> ast::CrateConfig { Vec::new() } - fn parse_sess() -> parse::parse_sess { self } - fn call_site() -> span { - codemap::span { - lo: codemap::BytePos(0), - hi: codemap::BytePos(0), - expn_id: codemap::NO_EXPANSION +fn main() { + let ps = syntax::parse::new_parse_sess(); + let mut cx = syntax::ext::base::ExtCtxt::new( + &ps, vec![], + syntax::ext::expand::ExpansionConfig::default("qquote".to_string())); + cx.bt_push(syntax::codemap::ExpnInfo { + call_site: DUMMY_SP, + callee: syntax::codemap::NameAndSpan { + name: "".to_string(), + format: syntax::codemap::MacroBang, + allow_internal_unstable: false, + span: None, } - } - fn ident_of(st: &str) -> ast::ident { - self.interner.intern(st) - } -} - -fn mk_ctxt() -> fake_ext_ctxt { - parse::new_parse_sess(None) as fake_ext_ctxt -} + }); + let cx = &mut cx; -fn main() { - let cx = mk_ctxt(); + macro_rules! check { + ($f: ident, $($e: expr),+; $expect: expr) => ({ + $(assert_eq!($f(&$e), $expect);)+ + }); + } let abc = quote_expr!(cx, 23); - check_pp(ext_cx, abc, pprust::print_expr, "23".to_string()); - + check!(expr_to_string, abc, *quote_expr!(cx, $abc); "23"); let ty = quote_ty!(cx, isize); - check_pp(ext_cx, ty, pprust::print_type, "isize".to_string()); + check!(ty_to_string, ty, *quote_ty!(cx, $ty); "isize"); - let item = quote_item!(cx, static x : isize = 10;).get(); - check_pp(ext_cx, item, pprust::print_item, "static x: isize = 10;".to_string()); + let item = quote_item!(cx, static x: $ty = 10;).unwrap(); + check!(item_to_string, item, quote_item!(cx, $item).unwrap(); "static x: isize = 10;"); - let stmt = quote_stmt!(cx, let x = 20;); - check_pp(ext_cx, *stmt, pprust::print_stmt, "let x = 20;".to_string()); + let twenty: u16 = 20; + let stmt = quote_stmt!(cx, let x = $twenty;).unwrap(); + check!(stmt_to_string, stmt, *quote_stmt!(cx, $stmt).unwrap(); "let x = 20u16;"); let pat = quote_pat!(cx, Some(_)); - check_pp(ext_cx, pat, pprust::print_pat, "Some(_)".to_string()); + check!(pat_to_string, pat, *quote_pat!(cx, $pat); "Some(_)"); - let arm = quote_arm!(cx, (ref x, ref y) => (x, y)); - check_pp(ext_cx, arm, pprust::print_stmt, "(ref x, ref y) = (x, y)".to_string()); + let expr = quote_expr!(cx, (x, y)); + let arm = quote_arm!(cx, (ref x, ref y) => $expr,); + check!(arm_to_string, arm, quote_arm!(cx, $arm); " (ref x, ref y) => (x, y),"); let attr = quote_attr!(cx, #![cfg(foo = "bar")]); - check_pp(ext_cx, attr, pprust::print_attribute, "#![cfg(foo = "bar")]".to_string()); -} - -fn check_pp(cx: fake_ext_ctxt, - expr: T, f: |pprust::ps, T|, expect: String) { - let s = io::with_str_writer(|wr| { - let pp = pprust::rust_printer(wr, cx.parse_sess().interner); - f(pp, expr); - pp::eof(pp.s); - }); - stdout().write_line(s); - if expect != "".to_string() { - println!("expect: '%s', got: '%s'", expect, s); - assert_eq!(s, expect); - } + check!(attribute_to_string, attr, quote_attr!(cx, $attr); r#"#![cfg(foo = "bar")]"#); }