Skip to content

Commit

Permalink
Auto merge of rust-lang#123027 - matthiaskrgr:rollup-nng7h3z, r=matth…
Browse files Browse the repository at this point in the history
…iaskrgr

Rollup of 8 pull requests

Successful merges:

 - rust-lang#120557 (Add rust-lldb pretty printing for Path and PathBuf)
 - rust-lang#122802 (Provide structured suggestion for unconstrained generic constant)
 - rust-lang#122858 (Tweak `parse_dot_suffix_expr`)
 - rust-lang#122990 (Clarify transmute example)
 - rust-lang#122995 (Clean up unnecessary headers/flags in coverage mir-opt tests)
 - rust-lang#123003 (CFI: Handle dyn with no principal)
 - rust-lang#123005 (CFI: Support complex receivers)
 - rust-lang#123020 (Temporarily remove nnethercote from the review rotation.)

r? `@ghost`
`@rustbot` modify labels: rollup
  • Loading branch information
bors committed Mar 25, 2024
2 parents 42198bf + d96c95f commit 0ce30e7
Show file tree
Hide file tree
Showing 61 changed files with 654 additions and 255 deletions.
1 change: 1 addition & 0 deletions Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -4634,6 +4634,7 @@ dependencies = [
"rustc_session",
"rustc_span",
"rustc_target",
"rustc_trait_selection",
"tracing",
"twox-hash",
]
Expand Down
98 changes: 50 additions & 48 deletions compiler/rustc_parse/src/parser/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ use core::mem;
use core::ops::ControlFlow;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::Spacing;
use rustc_ast::util::case::Case;
use rustc_ast::util::classify;
use rustc_ast::util::parser::{prec_let_scrutinee_needs_par, AssocOp, Fixity};
Expand Down Expand Up @@ -999,13 +998,57 @@ impl<'a> Parser<'a> {
}

pub fn parse_dot_suffix_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
// At this point we've consumed something like `expr.` and `self.token` holds the token
// after the dot.
match self.token.uninterpolate().kind {
token::Ident(..) => self.parse_dot_suffix(base, lo),
token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
Ok(self.parse_expr_tuple_field_access(lo, base, symbol, suffix, None))
let ident_span = self.token.span;
self.bump();
Ok(self.mk_expr_tuple_field_access(lo, ident_span, base, symbol, suffix))
}
token::Literal(token::Lit { kind: token::Float, symbol, suffix }) => {
Ok(self.parse_expr_tuple_field_access_float(lo, base, symbol, suffix))
Ok(match self.break_up_float(symbol, self.token.span) {
// 1e2
DestructuredFloat::Single(sym, _sp) => {
// `foo.1e2`: a single complete dot access, fully consumed. We end up with
// the `1e2` token in `self.prev_token` and the following token in
// `self.token`.
let ident_span = self.token.span;
self.bump();
self.mk_expr_tuple_field_access(lo, ident_span, base, sym, suffix)
}
// 1.
DestructuredFloat::TrailingDot(sym, ident_span, dot_span) => {
// `foo.1.`: a single complete dot access and the start of another.
// We end up with the `sym` (`1`) token in `self.prev_token` and a dot in
// `self.token`.
assert!(suffix.is_none());
self.token = Token::new(token::Ident(sym, IdentIsRaw::No), ident_span);
self.bump_with((Token::new(token::Dot, dot_span), self.token_spacing));
self.mk_expr_tuple_field_access(lo, ident_span, base, sym, None)
}
// 1.2 | 1.2e3
DestructuredFloat::MiddleDot(
sym1,
ident1_span,
_dot_span,
sym2,
ident2_span,
) => {
// `foo.1.2` (or `foo.1.2e3`): two complete dot accesses. We end up with
// the `sym2` (`2` or `2e3`) token in `self.prev_token` and the following
// token in `self.token`.
let next_token2 =
Token::new(token::Ident(sym2, IdentIsRaw::No), ident2_span);
self.bump_with((next_token2, self.token_spacing));
self.bump();
let base1 =
self.mk_expr_tuple_field_access(lo, ident1_span, base, sym1, None);
self.mk_expr_tuple_field_access(lo, ident2_span, base1, sym2, suffix)
}
DestructuredFloat::Error => base,
})
}
_ => {
self.error_unexpected_after_dot();
Expand Down Expand Up @@ -1119,41 +1162,6 @@ impl<'a> Parser<'a> {
}
}

fn parse_expr_tuple_field_access_float(
&mut self,
lo: Span,
base: P<Expr>,
float: Symbol,
suffix: Option<Symbol>,
) -> P<Expr> {
match self.break_up_float(float, self.token.span) {
// 1e2
DestructuredFloat::Single(sym, _sp) => {
self.parse_expr_tuple_field_access(lo, base, sym, suffix, None)
}
// 1.
DestructuredFloat::TrailingDot(sym, ident_span, dot_span) => {
assert!(suffix.is_none());
self.token = Token::new(token::Ident(sym, IdentIsRaw::No), ident_span);
let next_token = (Token::new(token::Dot, dot_span), self.token_spacing);
self.parse_expr_tuple_field_access(lo, base, sym, None, Some(next_token))
}
// 1.2 | 1.2e3
DestructuredFloat::MiddleDot(symbol1, ident1_span, dot_span, symbol2, ident2_span) => {
self.token = Token::new(token::Ident(symbol1, IdentIsRaw::No), ident1_span);
// This needs to be `Spacing::Alone` to prevent regressions.
// See issue #76399 and PR #76285 for more details
let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone);
let base1 =
self.parse_expr_tuple_field_access(lo, base, symbol1, None, Some(next_token1));
let next_token2 = Token::new(token::Ident(symbol2, IdentIsRaw::No), ident2_span);
self.bump_with((next_token2, self.token_spacing)); // `.`
self.parse_expr_tuple_field_access(lo, base1, symbol2, suffix, None)
}
DestructuredFloat::Error => base,
}
}

/// Parse the field access used in offset_of, matched by `$(e:expr)+`.
/// Currently returns a list of idents. However, it should be possible in
/// future to also do array indices, which might be arbitrary expressions.
Expand Down Expand Up @@ -1255,24 +1263,18 @@ impl<'a> Parser<'a> {
Ok(fields.into_iter().collect())
}

fn parse_expr_tuple_field_access(
fn mk_expr_tuple_field_access(
&mut self,
lo: Span,
ident_span: Span,
base: P<Expr>,
field: Symbol,
suffix: Option<Symbol>,
next_token: Option<(Token, Spacing)>,
) -> P<Expr> {
match next_token {
Some(next_token) => self.bump_with(next_token),
None => self.bump(),
}
let span = self.prev_token.span;
let field = ExprKind::Field(base, Ident::new(field, span));
if let Some(suffix) = suffix {
self.expect_no_tuple_index_suffix(span, suffix);
self.expect_no_tuple_index_suffix(ident_span, suffix);
}
self.mk_expr(lo.to(span), field)
self.mk_expr(lo.to(ident_span), ExprKind::Field(base, Ident::new(field, ident_span)))
}

/// Parse a function call expression, `expr(...)`.
Expand Down
1 change: 1 addition & 0 deletions compiler/rustc_symbol_mangling/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ rustc_middle = { path = "../rustc_middle" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
tracing = "0.1"
twox-hash = "1.6.3"
# tidy-alphabetical-end
1 change: 1 addition & 0 deletions compiler/rustc_symbol_mangling/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![doc(rust_logo)]
#![feature(rustdoc_internals)]
#![feature(let_chains)]
#![allow(internal_features)]

#[macro_use]
Expand Down
136 changes: 84 additions & 52 deletions compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ use rustc_data_structures::base_n;
use rustc_data_structures::fx::FxHashMap;
use rustc_hir as hir;
use rustc_middle::ty::layout::IntegerExt;
use rustc_middle::ty::TypeVisitableExt;
use rustc_middle::ty::{
self, Const, ExistentialPredicate, FloatTy, FnSig, Instance, IntTy, List, Region, RegionKind,
TermKind, Ty, TyCtxt, UintTy,
Expand All @@ -21,7 +22,9 @@ use rustc_span::sym;
use rustc_target::abi::call::{Conv, FnAbi, PassMode};
use rustc_target::abi::Integer;
use rustc_target::spec::abi::Abi;
use rustc_trait_selection::traits;
use std::fmt::Write as _;
use std::iter;

use crate::typeid::TypeIdOptions;

Expand Down Expand Up @@ -747,9 +750,8 @@ fn transform_predicates<'tcx>(
tcx: TyCtxt<'tcx>,
predicates: &List<ty::PolyExistentialPredicate<'tcx>>,
) -> &'tcx List<ty::PolyExistentialPredicate<'tcx>> {
let predicates: Vec<ty::PolyExistentialPredicate<'tcx>> = predicates
.iter()
.filter_map(|predicate| match predicate.skip_binder() {
tcx.mk_poly_existential_predicates_from_iter(predicates.iter().filter_map(|predicate| {
match predicate.skip_binder() {
ty::ExistentialPredicate::Trait(trait_ref) => {
let trait_ref = ty::TraitRef::identity(tcx, trait_ref.def_id);
Some(ty::Binder::dummy(ty::ExistentialPredicate::Trait(
Expand All @@ -758,9 +760,8 @@ fn transform_predicates<'tcx>(
}
ty::ExistentialPredicate::Projection(..) => None,
ty::ExistentialPredicate::AutoTrait(..) => Some(predicate),
})
.collect();
tcx.mk_poly_existential_predicates(&predicates)
}
}))
}

/// Transforms args for being encoded and used in the substitution dictionary.
Expand Down Expand Up @@ -1115,51 +1116,46 @@ pub fn typeid_for_instance<'tcx>(
instance.args = strip_receiver_auto(tcx, instance.args)
}

if let Some(impl_id) = tcx.impl_of_method(instance.def_id())
&& let Some(trait_ref) = tcx.impl_trait_ref(impl_id)
{
let impl_method = tcx.associated_item(instance.def_id());
let method_id = impl_method
.trait_item_def_id
.expect("Part of a trait implementation, but not linked to the def_id?");
let trait_method = tcx.associated_item(method_id);
if traits::is_vtable_safe_method(tcx, trait_ref.skip_binder().def_id, trait_method) {
// Trait methods will have a Self polymorphic parameter, where the concreteized
// implementatation will not. We need to walk back to the more general trait method
let trait_ref = tcx.instantiate_and_normalize_erasing_regions(
instance.args,
ty::ParamEnv::reveal_all(),
trait_ref,
);
let invoke_ty = trait_object_ty(tcx, ty::Binder::dummy(trait_ref));

// At the call site, any call to this concrete function through a vtable will be
// `Virtual(method_id, idx)` with appropriate arguments for the method. Since we have the
// original method id, and we've recovered the trait arguments, we can make the callee
// instance we're computing the alias set for match the caller instance.
//
// Right now, our code ignores the vtable index everywhere, so we use 0 as a placeholder.
// If we ever *do* start encoding the vtable index, we will need to generate an alias set
// based on which vtables we are putting this method into, as there will be more than one
// index value when supertraits are involved.
instance.def = ty::InstanceDef::Virtual(method_id, 0);
let abstract_trait_args =
tcx.mk_args_trait(invoke_ty, trait_ref.args.into_iter().skip(1));
instance.args = instance.args.rebase_onto(tcx, impl_id, abstract_trait_args);
}
}

let fn_abi = tcx
.fn_abi_of_instance(tcx.param_env(instance.def_id()).and((instance, ty::List::empty())))
.unwrap_or_else(|instance| {
bug!("typeid_for_instance: couldn't get fn_abi of instance {:?}", instance)
});

// If this instance is a method and self is a reference, get the impl it belongs to
let impl_def_id = tcx.impl_of_method(instance.def_id());
if impl_def_id.is_some() && !fn_abi.args.is_empty() && fn_abi.args[0].layout.ty.is_ref() {
// If this impl is not an inherent impl, get the trait it implements
if let Some(trait_ref) = tcx.impl_trait_ref(impl_def_id.unwrap()) {
// Transform the concrete self into a reference to a trait object
let existential_predicate = trait_ref.map_bound(|trait_ref| {
ty::ExistentialPredicate::Trait(ty::ExistentialTraitRef::erase_self_ty(
tcx, trait_ref,
))
});
let existential_predicates = tcx.mk_poly_existential_predicates(&[ty::Binder::dummy(
existential_predicate.skip_binder(),
)]);
// Is the concrete self mutable?
let self_ty = if fn_abi.args[0].layout.ty.is_mutable_ptr() {
Ty::new_mut_ref(
tcx,
tcx.lifetimes.re_erased,
Ty::new_dynamic(tcx, existential_predicates, tcx.lifetimes.re_erased, ty::Dyn),
)
} else {
Ty::new_imm_ref(
tcx,
tcx.lifetimes.re_erased,
Ty::new_dynamic(tcx, existential_predicates, tcx.lifetimes.re_erased, ty::Dyn),
)
};

// Replace the concrete self in an fn_abi clone by the reference to a trait object
let mut fn_abi = fn_abi.clone();
// HACK(rcvalle): It is okay to not replace or update the entire ArgAbi here because the
// other fields are never used.
fn_abi.args[0].layout.ty = self_ty;

return typeid_for_fnabi(tcx, &fn_abi, options);
}
}

typeid_for_fnabi(tcx, fn_abi, options)
}

Expand All @@ -1171,14 +1167,50 @@ fn strip_receiver_auto<'tcx>(
let ty::Dynamic(preds, lifetime, kind) = ty.kind() else {
bug!("Tried to strip auto traits from non-dynamic type {ty}");
};
let filtered_preds =
if preds.principal().is_some() {
let new_rcvr = if preds.principal().is_some() {
let filtered_preds =
tcx.mk_poly_existential_predicates_from_iter(preds.into_iter().filter(|pred| {
!matches!(pred.skip_binder(), ty::ExistentialPredicate::AutoTrait(..))
}))
} else {
ty::List::empty()
};
let new_rcvr = Ty::new_dynamic(tcx, filtered_preds, *lifetime, *kind);
}));
Ty::new_dynamic(tcx, filtered_preds, *lifetime, *kind)
} else {
// If there's no principal type, re-encode it as a unit, since we don't know anything
// about it. This technically discards the knowledge that it was a type that was made
// into a trait object at some point, but that's not a lot.
tcx.types.unit
};
tcx.mk_args_trait(new_rcvr, args.into_iter().skip(1))
}

fn trait_object_ty<'tcx>(tcx: TyCtxt<'tcx>, poly_trait_ref: ty::PolyTraitRef<'tcx>) -> Ty<'tcx> {
assert!(!poly_trait_ref.has_non_region_param());
let principal_pred = poly_trait_ref.map_bound(|trait_ref| {
ty::ExistentialPredicate::Trait(ty::ExistentialTraitRef::erase_self_ty(tcx, trait_ref))
});
let mut assoc_preds: Vec<_> = traits::supertraits(tcx, poly_trait_ref)
.flat_map(|super_poly_trait_ref| {
tcx.associated_items(super_poly_trait_ref.def_id())
.in_definition_order()
.filter(|item| item.kind == ty::AssocKind::Type)
.map(move |assoc_ty| {
super_poly_trait_ref.map_bound(|super_trait_ref| {
let alias_ty = ty::AliasTy::new(tcx, assoc_ty.def_id, super_trait_ref.args);
let resolved = tcx.normalize_erasing_regions(
ty::ParamEnv::reveal_all(),
alias_ty.to_ty(tcx),
);
ty::ExistentialPredicate::Projection(ty::ExistentialProjection {
def_id: assoc_ty.def_id,
args: ty::ExistentialTraitRef::erase_self_ty(tcx, super_trait_ref).args,
term: resolved.into(),
})
})
})
})
.collect();
assoc_preds.sort_by(|a, b| a.skip_binder().stable_cmp(tcx, &b.skip_binder()));
let preds = tcx.mk_poly_existential_predicates_from_iter(
iter::once(principal_pred).chain(assoc_preds.into_iter()),
);
Ty::new_dynamic(tcx, preds, tcx.lifetimes.re_erased, ty::Dyn)
}
Original file line number Diff line number Diff line change
Expand Up @@ -3536,12 +3536,39 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
let mut err =
self.dcx().struct_span_err(span, "unconstrained generic constant");
let const_span = self.tcx.def_span(uv.def);

let const_ty = self.tcx.type_of(uv.def).instantiate(self.tcx, uv.args);
let cast = if const_ty != self.tcx.types.usize { " as usize" } else { "" };
let msg = "try adding a `where` bound";
match self.tcx.sess.source_map().span_to_snippet(const_span) {
Ok(snippet) => err.help(format!(
"try adding a `where` bound using this expression: `where [(); {snippet}]:`"
)),
_ => err.help("consider adding a `where` bound using this expression"),
};
Ok(snippet) => {
let code = format!("[(); {snippet}{cast}]:");
let def_id = if let ObligationCauseCode::CompareImplItemObligation {
trait_item_def_id,
..
} = obligation.cause.code()
{
trait_item_def_id.as_local()
} else {
Some(obligation.cause.body_id)
};
if let Some(def_id) = def_id
&& let Some(generics) = self.tcx.hir().get_generics(def_id)
{
err.span_suggestion_verbose(
generics.tail_span_for_predicate_suggestion(),
msg,
format!("{} {code}", generics.add_where_or_trailing_comma()),
Applicability::MaybeIncorrect,
);
} else {
err.help(format!("{msg}: where {code}"));
};
}
_ => {
err.help(msg);
}
};
Ok(err)
}
ty::ConstKind::Expr(_) => {
Expand Down
Loading

0 comments on commit 0ce30e7

Please sign in to comment.