Skip to content

Commit

Permalink
Auto merge of rust-lang#94875 - matthiaskrgr:rollup-tq1li2d, r=matthi…
Browse files Browse the repository at this point in the history
…askrgr

Rollup of 3 pull requests

Successful merges:

 - rust-lang#94150 (rustdoc-json: Include GenericParamDefKind::Type::synthetic in JSON)
 - rust-lang#94833 ([2/2] Implement macro meta-variable expression)
 - rust-lang#94863 (Remove redundant slicing of whole ranges in `bootstrap`)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
  • Loading branch information
bors committed Mar 12, 2022
2 parents 012720f + 49e0137 commit f8a29bd
Show file tree
Hide file tree
Showing 12 changed files with 905 additions and 103 deletions.
157 changes: 147 additions & 10 deletions compiler/rustc_expand/src/mbe/transcribe.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
use crate::base::ExtCtxt;
use crate::mbe;
use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};

use crate::mbe::{self, MetaVarExpr};
use rustc_ast::mut_visit::{self, MutVisitor};
use rustc_ast::token::{self, NtTT, Token};
use rustc_ast::token::{self, NtTT, Token, TokenKind};
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndSpacing};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{pluralize, PResult};
use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed};
use rustc_span::hygiene::{LocalExpnId, Transparency};
use rustc_span::symbol::MacroRulesNormalizedIdent;
use rustc_span::symbol::{sym, Ident, MacroRulesNormalizedIdent};
use rustc_span::Span;

use smallvec::{smallvec, SmallVec};
Expand Down Expand Up @@ -411,13 +411,150 @@ fn lockstep_iter_size(
}
}

/// Used solely by the `count` meta-variable expression, counts the outer-most repetitions at a
/// given optional nested depth.
///
/// For example, a macro parameter of `$( { $( $foo:ident ),* } )*` called with `{ a, b } { c }`:
///
/// * `[ $( ${count(foo)} ),* ]` will return [2, 1] with a, b = 2 and c = 1
/// * `[ $( ${count(foo, 0)} ),* ]` will be the same as `[ $( ${count(foo)} ),* ]`
/// * `[ $( ${count(foo, 1)} ),* ]` will return an error because `${count(foo, 1)}` is
/// declared inside a single repetition and the index `1` implies two nested repetitions.
fn count_repetitions<'a>(
cx: &ExtCtxt<'a>,
depth_opt: Option<usize>,
mut matched: &NamedMatch,
repeats: &[(usize, usize)],
sp: &DelimSpan,
) -> PResult<'a, usize> {
// Recursively count the number of matches in `matched` at given depth
// (or at the top-level of `matched` if no depth is given).
fn count<'a>(
cx: &ExtCtxt<'a>,
declared_lhs_depth: usize,
depth_opt: Option<usize>,
matched: &NamedMatch,
sp: &DelimSpan,
) -> PResult<'a, usize> {
match matched {
MatchedNonterminal(_) => {
if declared_lhs_depth == 0 {
return Err(cx.struct_span_err(
sp.entire(),
"`count` can not be placed inside the inner-most repetition",
));
}
match depth_opt {
None => Ok(1),
Some(_) => Err(out_of_bounds_err(cx, declared_lhs_depth, sp.entire(), "count")),
}
}
MatchedSeq(ref named_matches) => {
let new_declared_lhs_depth = declared_lhs_depth + 1;
match depth_opt {
None => named_matches
.iter()
.map(|elem| count(cx, new_declared_lhs_depth, None, elem, sp))
.sum(),
Some(0) => Ok(named_matches.len()),
Some(depth) => named_matches
.iter()
.map(|elem| count(cx, new_declared_lhs_depth, Some(depth - 1), elem, sp))
.sum(),
}
}
}
}
// `repeats` records all of the nested levels at which we are currently
// matching meta-variables. The meta-var-expr `count($x)` only counts
// matches that occur in this "subtree" of the `NamedMatch` where we
// are currently transcribing, so we need to descend to that subtree
// before we start counting. `matched` contains the various levels of the
// tree as we descend, and its final value is the subtree we are currently at.
for &(idx, _) in repeats {
if let MatchedSeq(ref ads) = matched {
matched = &ads[idx];
}
}
count(cx, 0, depth_opt, matched, sp)
}

/// Returns a `NamedMatch` item declared on the RHS given an arbitrary [Ident]
fn matched_from_ident<'ctx, 'interp, 'rslt>(
cx: &ExtCtxt<'ctx>,
ident: Ident,
interp: &'interp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
) -> PResult<'ctx, &'rslt NamedMatch>
where
'interp: 'rslt,
{
let span = ident.span;
let key = MacroRulesNormalizedIdent::new(ident);
interp.get(&key).ok_or_else(|| {
cx.struct_span_err(
span,
&format!("variable `{}` is not recognized in meta-variable expression", key),
)
})
}

/// Used by meta-variable expressions when an user input is out of the actual declared bounds. For
/// example, index(999999) in an repetition of only three elements.
fn out_of_bounds_err<'a>(
cx: &ExtCtxt<'a>,
max: usize,
span: Span,
ty: &str,
) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
cx.struct_span_err(span, &format!("{ty} depth must be less than {max}"))
}

fn transcribe_metavar_expr<'a>(
_cx: &ExtCtxt<'a>,
_expr: mbe::MetaVarExpr,
_interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
_repeats: &[(usize, usize)],
_result: &mut Vec<TreeAndSpacing>,
_sp: &DelimSpan,
cx: &ExtCtxt<'a>,
expr: MetaVarExpr,
interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
repeats: &[(usize, usize)],
result: &mut Vec<TreeAndSpacing>,
sp: &DelimSpan,
) -> PResult<'a, ()> {
match expr {
MetaVarExpr::Count(original_ident, depth_opt) => {
let matched = matched_from_ident(cx, original_ident, interp)?;
let count = count_repetitions(cx, depth_opt, matched, &repeats, sp)?;
let tt = TokenTree::token(
TokenKind::lit(token::Integer, sym::integer(count), None),
sp.entire(),
);
result.push(tt.into());
}
MetaVarExpr::Ignore(original_ident) => {
// Used to ensure that `original_ident` is present in the LHS
let _ = matched_from_ident(cx, original_ident, interp)?;
}
MetaVarExpr::Index(depth) => match repeats.iter().nth_back(depth) {
Some((index, _)) => {
result.push(
TokenTree::token(
TokenKind::lit(token::Integer, sym::integer(*index), None),
sp.entire(),
)
.into(),
);
}
None => return Err(out_of_bounds_err(cx, repeats.len(), sp.entire(), "index")),
},
MetaVarExpr::Length(depth) => match repeats.iter().nth_back(depth) {
Some((_, length)) => {
result.push(
TokenTree::token(
TokenKind::lit(token::Integer, sym::integer(*length), None),
sp.entire(),
)
.into(),
);
}
None => return Err(out_of_bounds_err(cx, repeats.len(), sp.entire(), "length")),
},
}
Ok(())
}
10 changes: 5 additions & 5 deletions src/bootstrap/setup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,9 +161,9 @@ fn rustup_installed() -> bool {
}

fn stage_dir_exists(stage_path: &str) -> bool {
match fs::create_dir(&stage_path[..]) {
match fs::create_dir(&stage_path) {
Ok(_) => true,
Err(_) => Path::new(&stage_path[..]).exists(),
Err(_) => Path::new(&stage_path).exists(),
}
}

Expand All @@ -179,7 +179,7 @@ fn attempt_toolchain_link(stage_path: &str) {
return;
}

if try_link_toolchain(&stage_path[..]) {
if try_link_toolchain(&stage_path) {
println!(
"Added `stage1` rustup toolchain; try `cargo +stage1 build` on a separate rust project to run a newly-built toolchain"
);
Expand All @@ -188,7 +188,7 @@ fn attempt_toolchain_link(stage_path: &str) {
println!(
"To manually link stage 1 build to `stage1` toolchain, run:\n
`rustup toolchain link stage1 {}`",
&stage_path[..]
&stage_path
);
}
}
Expand Down Expand Up @@ -222,7 +222,7 @@ fn toolchain_is_linked() -> bool {
fn try_link_toolchain(stage_path: &str) -> bool {
Command::new("rustup")
.stdout(std::process::Stdio::null())
.args(&["toolchain", "link", "stage1", &stage_path[..]])
.args(&["toolchain", "link", "stage1", &stage_path])
.output()
.map_or(false, |output| output.status.success())
}
Expand Down
3 changes: 2 additions & 1 deletion src/librustdoc/json/conversions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -340,9 +340,10 @@ impl FromWithTcx<clean::GenericParamDefKind> for GenericParamDefKind {
Lifetime { outlives } => GenericParamDefKind::Lifetime {
outlives: outlives.into_iter().map(|lt| lt.0.to_string()).collect(),
},
Type { did: _, bounds, default, synthetic: _ } => GenericParamDefKind::Type {
Type { did: _, bounds, default, synthetic } => GenericParamDefKind::Type {
bounds: bounds.into_iter().map(|x| x.into_tcx(tcx)).collect(),
default: default.map(|x| (*x).into_tcx(tcx)),
synthetic,
},
Const { did: _, ty, default } => {
GenericParamDefKind::Const { ty: (*ty).into_tcx(tcx), default: default.map(|x| *x) }
Expand Down
40 changes: 36 additions & 4 deletions src/rustdoc-json-types/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use std::path::PathBuf;
use serde::{Deserialize, Serialize};

/// rustdoc format-version.
pub const FORMAT_VERSION: u32 = 12;
pub const FORMAT_VERSION: u32 = 13;

/// A `Crate` is the root of the emitted JSON blob. It contains all type/documentation information
/// about the language items in the local crate, as well as info about external items to allow
Expand Down Expand Up @@ -346,9 +346,41 @@ pub struct GenericParamDef {
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum GenericParamDefKind {
Lifetime { outlives: Vec<String> },
Type { bounds: Vec<GenericBound>, default: Option<Type> },
Const { ty: Type, default: Option<String> },
Lifetime {
outlives: Vec<String>,
},
Type {
bounds: Vec<GenericBound>,
default: Option<Type>,
/// This is normally `false`, which means that this generic parameter is
/// declared in the Rust source text.
///
/// If it is `true`, this generic parameter has been introduced by the
/// compiler behind the scenes.
///
/// # Example
///
/// Consider
///
/// ```ignore (pseudo-rust)
/// pub fn f(_: impl Trait) {}
/// ```
///
/// The compiler will transform this behind the scenes to
///
/// ```ignore (pseudo-rust)
/// pub fn f<impl Trait: Trait>(_: impl Trait) {}
/// ```
///
/// In this example, the generic parameter named `impl Trait` (and which
/// is bound by `Trait`) is synthetic, because it was not originally in
/// the Rust source text.
synthetic: bool,
},
Const {
ty: Type,
default: Option<String>,
},
}

#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
Expand Down
26 changes: 26 additions & 0 deletions src/test/rustdoc-json/fns/generics.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
// ignore-tidy-linelength

#![feature(no_core)]
#![no_core]

// @set wham_id = generics.json "$.index[*][?(@.name=='Wham')].id"
pub trait Wham {}

// @is - "$.index[*][?(@.name=='one_generic_param_fn')].inner.generics.where_predicates" []
// @count - "$.index[*][?(@.name=='one_generic_param_fn')].inner.generics.params[*]" 1
// @is - "$.index[*][?(@.name=='one_generic_param_fn')].inner.generics.params[0].name" '"T"'
// @has - "$.index[*][?(@.name=='one_generic_param_fn')].inner.generics.params[0].kind.type.synthetic" false
// @has - "$.index[*][?(@.name=='one_generic_param_fn')].inner.generics.params[0].kind.type.bounds[0].trait_bound.trait.inner.id" $wham_id
// @is - "$.index[*][?(@.name=='one_generic_param_fn')].inner.decl.inputs" '[["w", {"inner": "T", "kind": "generic"}]]'
pub fn one_generic_param_fn<T: Wham>(w: T) {}

// @is - "$.index[*][?(@.name=='one_synthetic_generic_param_fn')].inner.generics.where_predicates" []
// @count - "$.index[*][?(@.name=='one_synthetic_generic_param_fn')].inner.generics.params[*]" 1
// @is - "$.index[*][?(@.name=='one_synthetic_generic_param_fn')].inner.generics.params[0].name" '"impl Wham"'
// @has - "$.index[*][?(@.name=='one_synthetic_generic_param_fn')].inner.generics.params[0].kind.type.synthetic" true
// @has - "$.index[*][?(@.name=='one_synthetic_generic_param_fn')].inner.generics.params[0].kind.type.bounds[0].trait_bound.trait.inner.id" $wham_id
// @count - "$.index[*][?(@.name=='one_synthetic_generic_param_fn')].inner.decl.inputs[*]" 1
// @is - "$.index[*][?(@.name=='one_synthetic_generic_param_fn')].inner.decl.inputs[0][0]" '"w"'
// @is - "$.index[*][?(@.name=='one_synthetic_generic_param_fn')].inner.decl.inputs[0][1].kind" '"impl_trait"'
// @is - "$.index[*][?(@.name=='one_synthetic_generic_param_fn')].inner.decl.inputs[0][1].inner[0].trait_bound.trait.inner.id" $wham_id
pub fn one_synthetic_generic_param_fn(w: impl Wham) {}
Loading

0 comments on commit f8a29bd

Please sign in to comment.