diff --git a/.github/workflows/dependencies.yml b/.github/workflows/dependencies.yml index b137497594f2b..5e54ceea3d7f8 100644 --- a/.github/workflows/dependencies.yml +++ b/.github/workflows/dependencies.yml @@ -67,7 +67,7 @@ jobs: - name: cargo update rustbook run: | echo -e "\nrustbook dependencies:" >> cargo_update.log - cargo update --manifest-path src/tools/rustbook 2>&1 | sed '/crates.io index/d' | tee -a cargo_update.log + cargo update --manifest-path src/tools/rustbook/Cargo.toml 2>&1 | sed '/crates.io index/d' | tee -a cargo_update.log - name: upload Cargo.lock artifact for use in PR uses: actions/upload-artifact@v4 with: diff --git a/Cargo.lock b/Cargo.lock index 4bd99b7fbe7ac..0b546d6e5eee0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1775,9 +1775,9 @@ checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" [[package]] name = "indexmap" -version = "2.2.6" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +checksum = "93ead53efc7ea8ed3cfb0c79fc8023fbb782a5432b52830b6518941cebe6505c" dependencies = [ "equivalent", "hashbrown", @@ -3149,6 +3149,7 @@ dependencies = [ "gimli 0.31.0", "object 0.36.2", "regex", + "serde_json", "similar", "wasmparser 0.214.0", ] diff --git a/compiler/rustc/src/main.rs b/compiler/rustc/src/main.rs index 29766fc9d87cb..e9a7397557eb1 100644 --- a/compiler/rustc/src/main.rs +++ b/compiler/rustc/src/main.rs @@ -1,3 +1,6 @@ +// We need this feature as it changes `dylib` linking behavior and allows us to link to `rustc_driver`. +#![feature(rustc_private)] + // A note about jemalloc: rustc uses jemalloc when built for CI and // distribution. The obvious way to do this is with the `#[global_allocator]` // mechanism. However, for complicated reasons (see diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index fc1af3fc3dd11..a44ed82850480 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -585,7 +585,9 @@ impl Pat { } // A slice/array pattern `[P]` can be reparsed as `[T]`, an unsized array, // when `P` can be reparsed as a type `T`. - PatKind::Slice(pats) if pats.len() == 1 => pats[0].to_ty().map(TyKind::Slice)?, + PatKind::Slice(pats) if let [pat] = pats.as_slice() => { + pat.to_ty().map(TyKind::Slice)? + } // A tuple pattern `(P0, .., Pn)` can be reparsed as `(T0, .., Tn)` // assuming `T0` to `Tn` are all syntactically valid as types. PatKind::Tuple(pats) => { @@ -1187,8 +1189,8 @@ impl Expr { /// Does not ensure that the path resolves to a const param, the caller should check this. pub fn is_potential_trivial_const_arg(&self) -> bool { let this = if let ExprKind::Block(block, None) = &self.kind - && block.stmts.len() == 1 - && let StmtKind::Expr(expr) = &block.stmts[0].kind + && let [stmt] = block.stmts.as_slice() + && let StmtKind::Expr(expr) = &stmt.kind { expr } else { @@ -1248,7 +1250,9 @@ impl Expr { expr.to_ty().map(|ty| TyKind::Array(ty, expr_len.clone()))? } - ExprKind::Array(exprs) if exprs.len() == 1 => exprs[0].to_ty().map(TyKind::Slice)?, + ExprKind::Array(exprs) if let [expr] = exprs.as_slice() => { + expr.to_ty().map(TyKind::Slice)? + } ExprKind::Tup(exprs) => { let tys = exprs.iter().map(|expr| expr.to_ty()).collect::>>()?; diff --git a/compiler/rustc_ast_lowering/messages.ftl b/compiler/rustc_ast_lowering/messages.ftl index 9ed93d481e773..a5ee6713be8ed 100644 --- a/compiler/rustc_ast_lowering/messages.ftl +++ b/compiler/rustc_ast_lowering/messages.ftl @@ -167,11 +167,21 @@ ast_lowering_template_modifier = template modifier ast_lowering_this_not_async = this is not `async` +ast_lowering_underscore_array_length_unstable = + using `_` for array lengths is unstable + ast_lowering_underscore_expr_lhs_assign = in expressions, `_` can only be used on the left-hand side of an assignment .label = `_` not allowed here +ast_lowering_unstable_inline_assembly = inline assembly is not stable yet on this architecture +ast_lowering_unstable_inline_assembly_label_operands = + label operands for inline assembly are unstable +ast_lowering_unstable_may_unwind = the `may_unwind` option is unstable + ast_lowering_use_angle_brackets = use angle brackets instead + +ast_lowering_yield = yield syntax is experimental ast_lowering_yield_in_closure = `yield` can only be used in `#[coroutine]` closures, or `gen` blocks .suggestion = use `#[coroutine]` to make this closure a coroutine diff --git a/compiler/rustc_ast_lowering/src/asm.rs b/compiler/rustc_ast_lowering/src/asm.rs index ea7b8c114f490..7d9d689e6d7e9 100644 --- a/compiler/rustc_ast_lowering/src/asm.rs +++ b/compiler/rustc_ast_lowering/src/asm.rs @@ -19,10 +19,12 @@ use super::errors::{ InvalidRegisterClass, RegisterClassOnlyClobber, RegisterConflict, }; use super::LoweringContext; -use crate::{ImplTraitContext, ImplTraitPosition, ParamMode, ResolverAstLoweringExt}; +use crate::{ + fluent_generated as fluent, ImplTraitContext, ImplTraitPosition, ParamMode, + ResolverAstLoweringExt, +}; impl<'a, 'hir> LoweringContext<'a, 'hir> { - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable pub(crate) fn lower_inline_asm( &mut self, sp: Span, @@ -52,7 +54,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { &self.tcx.sess, sym::asm_experimental_arch, sp, - "inline assembly is not stable yet on this architecture", + fluent::ast_lowering_unstable_inline_assembly, ) .emit(); } @@ -64,8 +66,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { self.dcx().emit_err(AttSyntaxOnlyX86 { span: sp }); } if asm.options.contains(InlineAsmOptions::MAY_UNWIND) && !self.tcx.features().asm_unwind { - feature_err(&self.tcx.sess, sym::asm_unwind, sp, "the `may_unwind` option is unstable") - .emit(); + feature_err( + &self.tcx.sess, + sym::asm_unwind, + sp, + fluent::ast_lowering_unstable_may_unwind, + ) + .emit(); } let mut clobber_abis = FxIndexMap::default(); @@ -176,20 +183,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { out_expr: out_expr.as_ref().map(|expr| self.lower_expr(expr)), } } - InlineAsmOperand::Const { anon_const } => { - if !self.tcx.features().asm_const { - feature_err( - sess, - sym::asm_const, - *op_sp, - "const operands for inline assembly are unstable", - ) - .emit(); - } - hir::InlineAsmOperand::Const { - anon_const: self.lower_anon_const_to_anon_const(anon_const), - } - } + InlineAsmOperand::Const { anon_const } => hir::InlineAsmOperand::Const { + anon_const: self.lower_anon_const_to_anon_const(anon_const), + }, InlineAsmOperand::Sym { sym } => { let static_def_id = self .resolver @@ -246,7 +242,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { sess, sym::asm_goto, *op_sp, - "label operands for inline assembly are unstable", + fluent::ast_lowering_unstable_inline_assembly_label_operands, ) .emit(); } diff --git a/compiler/rustc_ast_lowering/src/delegation.rs b/compiler/rustc_ast_lowering/src/delegation.rs index 6e8ff72cf8759..9c073130827c8 100644 --- a/compiler/rustc_ast_lowering/src/delegation.rs +++ b/compiler/rustc_ast_lowering/src/delegation.rs @@ -189,7 +189,7 @@ impl<'hir> LoweringContext<'_, 'hir> { ) -> hir::FnSig<'hir> { let header = if let Some(local_sig_id) = sig_id.as_local() { match self.resolver.delegation_fn_sigs.get(&local_sig_id) { - Some(sig) => self.lower_fn_header(sig.header), + Some(sig) => self.lower_fn_header(sig.header, hir::Safety::Safe), None => self.generate_header_error(), } } else { @@ -275,8 +275,8 @@ impl<'hir> LoweringContext<'_, 'hir> { // FIXME(fn_delegation): Alternatives for target expression lowering: // https://github.com/rust-lang/rfcs/pull/3530#issuecomment-2197170600. fn lower_target_expr(&mut self, block: &Block) -> hir::Expr<'hir> { - if block.stmts.len() == 1 - && let StmtKind::Expr(expr) = &block.stmts[0].kind + if let [stmt] = block.stmts.as_slice() + && let StmtKind::Expr(expr) = &stmt.kind { return self.lower_expr_mut(expr); } diff --git a/compiler/rustc_ast_lowering/src/expr.rs b/compiler/rustc_ast_lowering/src/expr.rs index 124fe6bd380d2..b5d8a547a8fb9 100644 --- a/compiler/rustc_ast_lowering/src/expr.rs +++ b/compiler/rustc_ast_lowering/src/expr.rs @@ -23,7 +23,7 @@ use super::{ ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericArgs, ResolverAstLoweringExt, }; use crate::errors::YieldInClosure; -use crate::{FnDeclKind, ImplTraitPosition}; +use crate::{fluent_generated, FnDeclKind, ImplTraitPosition}; impl<'hir> LoweringContext<'_, 'hir> { fn lower_exprs(&mut self, exprs: &[AstP]) -> &'hir [hir::Expr<'hir>] { @@ -1540,7 +1540,6 @@ impl<'hir> LoweringContext<'_, 'hir> { } } - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind<'hir> { let yielded = opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| self.expr_unit(span)); @@ -1575,7 +1574,7 @@ impl<'hir> LoweringContext<'_, 'hir> { &self.tcx.sess, sym::coroutines, span, - "yield syntax is experimental", + fluent_generated::ast_lowering_yield, ) .emit(); } @@ -1587,7 +1586,7 @@ impl<'hir> LoweringContext<'_, 'hir> { &self.tcx.sess, sym::coroutines, span, - "yield syntax is experimental", + fluent_generated::ast_lowering_yield, ) .emit(); } diff --git a/compiler/rustc_ast_lowering/src/item.rs b/compiler/rustc_ast_lowering/src/item.rs index 7af3945d3f99d..f6065259d8d21 100644 --- a/compiler/rustc_ast_lowering/src/item.rs +++ b/compiler/rustc_ast_lowering/src/item.rs @@ -237,7 +237,7 @@ impl<'hir> LoweringContext<'_, 'hir> { }); let sig = hir::FnSig { decl, - header: this.lower_fn_header(*header), + header: this.lower_fn_header(*header, hir::Safety::Safe), span: this.lower_span(*fn_sig_span), }; hir::ItemKind::Fn(sig, generics, body_id) @@ -668,7 +668,7 @@ impl<'hir> LoweringContext<'_, 'hir> { ForeignItemKind::Fn(box Fn { sig, generics, .. }) => { let fdec = &sig.decl; let itctx = ImplTraitContext::Universal; - let (generics, (fn_dec, fn_args)) = + let (generics, (decl, fn_args)) = self.lower_generics(generics, Const::No, false, i.id, itctx, |this| { ( // Disallow `impl Trait` in foreign items. @@ -682,9 +682,15 @@ impl<'hir> LoweringContext<'_, 'hir> { this.lower_fn_params_to_names(fdec), ) }); - let safety = self.lower_safety(sig.header.safety, hir::Safety::Unsafe); - hir::ForeignItemKind::Fn(fn_dec, fn_args, generics, safety) + // Unmarked safety in unsafe block defaults to unsafe. + let header = self.lower_fn_header(sig.header, hir::Safety::Unsafe); + + hir::ForeignItemKind::Fn( + hir::FnSig { header, decl, span: self.lower_span(sig.span) }, + fn_args, + generics, + ) } ForeignItemKind::Static(box StaticItem { ty, mutability, expr: _, safety }) => { let ty = self @@ -1390,7 +1396,7 @@ impl<'hir> LoweringContext<'_, 'hir> { coroutine_kind: Option, parent_constness: Const, ) -> (&'hir hir::Generics<'hir>, hir::FnSig<'hir>) { - let header = self.lower_fn_header(sig.header); + let header = self.lower_fn_header(sig.header, hir::Safety::Safe); // Don't pass along the user-provided constness of trait associated functions; we don't want to // synthesize a host effect param for them. We reject `const` on them during AST validation. let constness = @@ -1403,14 +1409,18 @@ impl<'hir> LoweringContext<'_, 'hir> { (generics, hir::FnSig { header, decl, span: self.lower_span(sig.span) }) } - pub(super) fn lower_fn_header(&mut self, h: FnHeader) -> hir::FnHeader { + pub(super) fn lower_fn_header( + &mut self, + h: FnHeader, + default_safety: hir::Safety, + ) -> hir::FnHeader { let asyncness = if let Some(CoroutineKind::Async { span, .. }) = h.coroutine_kind { hir::IsAsync::Async(span) } else { hir::IsAsync::NotAsync }; hir::FnHeader { - safety: self.lower_safety(h.safety, hir::Safety::Safe), + safety: self.lower_safety(h.safety, default_safety), asyncness: asyncness, constness: self.lower_constness(h.constness), abi: self.lower_extern(h.ext), diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs index 224787c335beb..81d17a9dec205 100644 --- a/compiler/rustc_ast_lowering/src/lib.rs +++ b/compiler/rustc_ast_lowering/src/lib.rs @@ -2326,7 +2326,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { self.expr_block(block) } - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn lower_array_length(&mut self, c: &AnonConst) -> hir::ArrayLen<'hir> { match c.value.kind { ExprKind::Underscore => { @@ -2340,7 +2339,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { &self.tcx.sess, sym::generic_arg_infer, c.value.span, - "using `_` for array lengths is unstable", + fluent_generated::ast_lowering_underscore_array_length_unstable, ) .stash(c.value.span, StashKey::UnderscoreForArrayLengths); hir::ArrayLen::Body(self.lower_anon_const_to_const_arg(c)) diff --git a/compiler/rustc_ast_passes/src/ast_validation.rs b/compiler/rustc_ast_passes/src/ast_validation.rs index a353c79f12d45..837cb805700d2 100644 --- a/compiler/rustc_ast_passes/src/ast_validation.rs +++ b/compiler/rustc_ast_passes/src/ast_validation.rs @@ -887,7 +887,7 @@ fn validate_generic_param_order(dcx: DiagCtxtHandle<'_>, generics: &[GenericPara impl<'a> Visitor<'a> for AstValidator<'a> { fn visit_attribute(&mut self, attr: &Attribute) { - validate_attr::check_attr(&self.features, &self.session.psess, attr); + validate_attr::check_attr(&self.session.psess, attr); } fn visit_ty(&mut self, ty: &'a Ty) { diff --git a/compiler/rustc_ast_passes/src/feature_gate.rs b/compiler/rustc_ast_passes/src/feature_gate.rs index 3ceb8e0711a21..214a37bca03e2 100644 --- a/compiler/rustc_ast_passes/src/feature_gate.rs +++ b/compiler/rustc_ast_passes/src/feature_gate.rs @@ -559,7 +559,6 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) { gate_all!(mut_ref, "mutable by-reference bindings are experimental"); gate_all!(precise_capturing, "precise captures on `impl Trait` are experimental"); gate_all!(global_registration, "global registration is experimental"); - gate_all!(unsafe_attributes, "`#[unsafe()]` markers for attributes are experimental"); gate_all!(return_type_notation, "return type notation is experimental"); if !visitor.features.never_patterns { diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs index ee4514758c212..c7ff39d23ed26 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -502,8 +502,8 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere if !self.is_beginning_of_line() { self.word(" "); } - if cmnt.lines.len() == 1 { - self.word(cmnt.lines[0].clone()); + if let [line] = cmnt.lines.as_slice() { + self.word(line.clone()); self.hardbreak() } else { self.visual_align(); diff --git a/compiler/rustc_ast_pretty/src/pprust/state/item.rs b/compiler/rustc_ast_pretty/src/pprust/state/item.rs index 56204d8835a33..85a0b3b202292 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state/item.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state/item.rs @@ -783,8 +783,8 @@ impl<'a> State<'a> { } if items.is_empty() { self.word("{}"); - } else if items.len() == 1 { - self.print_use_tree(&items[0].0); + } else if let [(item, _)] = items.as_slice() { + self.print_use_tree(item); } else { self.cbox(INDENT_UNIT); self.word("{"); diff --git a/compiler/rustc_attr/messages.ftl b/compiler/rustc_attr/messages.ftl index eb51e568f81a7..5d9ac23ec490e 100644 --- a/compiler/rustc_attr/messages.ftl +++ b/compiler/rustc_attr/messages.ftl @@ -104,6 +104,9 @@ attr_unknown_meta_item = attr_unknown_version_literal = unknown version literal format, assuming it refers to a future version +attr_unstable_cfg_target_compact = + compact `cfg(target(..))` is experimental and subject to change + attr_unsupported_literal_cfg_string = literal in `cfg` predicate value must be a string attr_unsupported_literal_deprecated_kv_pair = diff --git a/compiler/rustc_attr/src/builtin.rs b/compiler/rustc_attr/src/builtin.rs index 12a19ae5c3db9..d057dcfdf9d0c 100644 --- a/compiler/rustc_attr/src/builtin.rs +++ b/compiler/rustc_attr/src/builtin.rs @@ -20,6 +20,7 @@ use rustc_span::hygiene::Transparency; use rustc_span::symbol::{sym, Symbol}; use rustc_span::Span; +use crate::fluent_generated; use crate::session_diagnostics::{self, IncorrectReprFormatGenericCause}; /// The version placeholder that recently stabilized features contain inside the @@ -521,7 +522,6 @@ pub struct Condition { } /// Tests if a cfg-pattern matches the cfg set -#[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable pub fn cfg_matches( cfg: &ast::MetaItem, sess: &Session, @@ -593,7 +593,6 @@ pub fn parse_version(s: Symbol) -> Option { /// Evaluate a cfg-like condition (with `any` and `all`), using `eval` to /// evaluate individual items. -#[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable pub fn eval_condition( cfg: &ast::MetaItem, sess: &Session, @@ -665,12 +664,12 @@ pub fn eval_condition( res & eval_condition(mi.meta_item().unwrap(), sess, features, eval) }), sym::not => { - if mis.len() != 1 { + let [mi] = mis.as_slice() else { dcx.emit_err(session_diagnostics::ExpectedOneCfgPattern { span: cfg.span }); return false; - } + }; - !eval_condition(mis[0].meta_item().unwrap(), sess, features, eval) + !eval_condition(mi.meta_item().unwrap(), sess, features, eval) } sym::target => { if let Some(features) = features @@ -680,7 +679,7 @@ pub fn eval_condition( sess, sym::cfg_target_compact, cfg.span, - "compact `cfg(target(..))` is experimental and subject to change", + fluent_generated::attr_unstable_cfg_target_compact, ) .emit(); } @@ -1051,10 +1050,10 @@ pub fn parse_repr_attr(sess: &Session, attr: &Attribute) -> Vec { MetaItemKind::List(nested_items) => { if meta_item.has_name(sym::align) { recognised = true; - if nested_items.len() == 1 { + if let [nested_item] = nested_items.as_slice() { sess.dcx().emit_err( session_diagnostics::IncorrectReprFormatExpectInteger { - span: nested_items[0].span(), + span: nested_item.span(), }, ); } else { @@ -1066,10 +1065,10 @@ pub fn parse_repr_attr(sess: &Session, attr: &Attribute) -> Vec { } } else if meta_item.has_name(sym::packed) { recognised = true; - if nested_items.len() == 1 { + if let [nested_item] = nested_items.as_slice() { sess.dcx().emit_err( session_diagnostics::IncorrectReprFormatPackedExpectInteger { - span: nested_items[0].span(), + span: nested_item.span(), }, ); } else { diff --git a/compiler/rustc_borrowck/messages.ftl b/compiler/rustc_borrowck/messages.ftl index c14a617eb91fe..edb25e12864bc 100644 --- a/compiler/rustc_borrowck/messages.ftl +++ b/compiler/rustc_borrowck/messages.ftl @@ -62,6 +62,9 @@ borrowck_could_not_normalize = borrowck_could_not_prove = could not prove `{$predicate}` +borrowck_dereference_suggestion = + dereference the return value + borrowck_func_take_self_moved_place = `{$func}` takes ownership of the receiver `self`, which moves {$place_name} @@ -74,9 +77,24 @@ borrowck_higher_ranked_lifetime_error = borrowck_higher_ranked_subtype_error = higher-ranked subtype error +borrowck_implicit_static = + this has an implicit `'static` lifetime requirement + +borrowck_implicit_static_introduced = + calling this method introduces the `impl`'s `'static` requirement + +borrowck_implicit_static_relax = + consider relaxing the implicit `'static` requirement + borrowck_lifetime_constraints_error = lifetime may not live long enough +borrowck_limitations_implies_static = + due to current limitations in the borrow checker, this implies a `'static` lifetime + +borrowck_move_closure_suggestion = + consider adding 'move' keyword before the nested closure + borrowck_move_out_place_here = {$place} is moved here @@ -163,6 +181,9 @@ borrowck_partial_var_move_by_use_in_coroutine = *[false] moved } due to use in coroutine +borrowck_restrict_to_static = + consider restricting the type parameter to the `'static` lifetime + borrowck_returned_async_block_escaped = returns an `async` block that contains a reference to a captured variable, which then escapes the closure body diff --git a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs index a58c7c43246b5..2b46e5597f778 100644 --- a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs @@ -3989,7 +3989,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { } else { let ty = self.infcx.tcx.type_of(self.mir_def_id()).instantiate_identity(); match ty.kind() { - ty::FnDef(_, _) | ty::FnPtr(_) => self.annotate_fn_sig( + ty::FnDef(_, _) | ty::FnPtr(..) => self.annotate_fn_sig( self.mir_def_id(), self.infcx.tcx.fn_sig(self.mir_def_id()).instantiate_identity(), ), diff --git a/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs b/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs index 3590e12274ca6..d85959c9a291e 100644 --- a/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs +++ b/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs @@ -3,6 +3,8 @@ #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] +use std::assert_matches::assert_matches; + use rustc_errors::{Applicability, Diag}; use rustc_hir as hir; use rustc_hir::intravisit::Visitor; @@ -116,7 +118,7 @@ impl<'tcx> BorrowExplanation<'tcx> { // path_span must be `Some` as otherwise the if condition is true let path_span = path_span.unwrap(); // path_span is only present in the case of closure capture - assert!(matches!(later_use_kind, LaterUseKind::ClosureCapture)); + assert_matches!(later_use_kind, LaterUseKind::ClosureCapture); if !borrow_span.is_some_and(|sp| sp.overlaps(var_or_use_span)) { let path_label = "used here by closure"; let capture_kind_label = message; @@ -147,7 +149,7 @@ impl<'tcx> BorrowExplanation<'tcx> { // path_span must be `Some` as otherwise the if condition is true let path_span = path_span.unwrap(); // path_span is only present in the case of closure capture - assert!(matches!(later_use_kind, LaterUseKind::ClosureCapture)); + assert_matches!(later_use_kind, LaterUseKind::ClosureCapture); if borrow_span.map(|sp| !sp.overlaps(var_or_use_span)).unwrap_or(true) { let path_label = "used here by closure"; let capture_kind_label = message; diff --git a/compiler/rustc_borrowck/src/diagnostics/outlives_suggestion.rs b/compiler/rustc_borrowck/src/diagnostics/outlives_suggestion.rs index 9356c24d018f7..de0df3474299a 100644 --- a/compiler/rustc_borrowck/src/diagnostics/outlives_suggestion.rs +++ b/compiler/rustc_borrowck/src/diagnostics/outlives_suggestion.rs @@ -206,8 +206,8 @@ impl OutlivesSuggestionBuilder { // If there is exactly one suggestable constraints, then just suggest it. Otherwise, emit a // list of diagnostics. - let mut diag = if suggested.len() == 1 { - mbcx.dcx().struct_help(match suggested.last().unwrap() { + let mut diag = if let [constraint] = suggested.as_slice() { + mbcx.dcx().struct_help(match constraint { SuggestedConstraint::Outlives(a, bs) => { let bs: SmallVec<[String; 2]> = bs.iter().map(|r| r.to_string()).collect(); format!("add bound `{a}: {}`", bs.join(" + ")) diff --git a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs index 82df9760d8e7c..451e8bcb16da4 100644 --- a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs @@ -35,7 +35,7 @@ use crate::session_diagnostics::{ LifetimeReturnCategoryErr, RequireStaticErr, VarHereDenote, }; use crate::universal_regions::DefiningTy; -use crate::{borrowck_errors, MirBorrowckCtxt}; +use crate::{borrowck_errors, fluent_generated as fluent, MirBorrowckCtxt}; impl<'tcx> ConstraintDescription for ConstraintCategory<'tcx> { fn description(&self) -> &'static str { @@ -198,7 +198,6 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { // from higher-ranked trait bounds (HRTB). Try to locate span of the trait // and the span which bounded to the trait for adding 'static lifetime suggestion #[allow(rustc::diagnostic_outside_of_impl)] - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn suggest_static_lifetime_for_gat_from_hrtb( &self, diag: &mut Diag<'_>, @@ -251,23 +250,28 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { debug!(?hrtb_bounds); hrtb_bounds.iter().for_each(|bound| { - let Trait(PolyTraitRef { trait_ref, span: trait_span, .. }, _) = bound else { return; }; - diag.span_note( - *trait_span, - "due to current limitations in the borrow checker, this implies a `'static` lifetime" - ); - let Some(generics_fn) = hir.get_generics(self.body.source.def_id().expect_local()) else { return; }; - let Def(_, trait_res_defid) = trait_ref.path.res else { return; }; + let Trait(PolyTraitRef { trait_ref, span: trait_span, .. }, _) = bound else { + return; + }; + diag.span_note(*trait_span, fluent::borrowck_limitations_implies_static); + let Some(generics_fn) = hir.get_generics(self.body.source.def_id().expect_local()) + else { + return; + }; + let Def(_, trait_res_defid) = trait_ref.path.res else { + return; + }; debug!(?generics_fn); generics_fn.predicates.iter().for_each(|predicate| { - let BoundPredicate( - WhereBoundPredicate { - span: bounded_span, - bounded_ty, - bounds, - .. - } - ) = predicate else { return; }; + let BoundPredicate(WhereBoundPredicate { + span: bounded_span, + bounded_ty, + bounds, + .. + }) = predicate + else { + return; + }; bounds.iter().for_each(|bd| { if let Trait(PolyTraitRef { trait_ref: tr_ref, .. }, _) = bd && let Def(_, res_defid) = tr_ref.path.res @@ -277,16 +281,17 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { && generics_fn.params .iter() .rfind(|param| param.def_id.to_def_id() == defid) - .is_some() { - suggestions.push((bounded_span.shrink_to_hi(), " + 'static".to_string())); - } + .is_some() + { + suggestions.push((bounded_span.shrink_to_hi(), " + 'static".to_string())); + } }); }); }); if suggestions.len() > 0 { suggestions.dedup(); diag.multipart_suggestion_verbose( - "consider restricting the type parameter to the `'static` lifetime", + fluent::borrowck_restrict_to_static, suggestions, Applicability::MaybeIncorrect, ); @@ -976,7 +981,6 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { } #[allow(rustc::diagnostic_outside_of_impl)] - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable #[instrument(skip(self, err), level = "debug")] fn suggest_constrain_dyn_trait_in_impl( &self, @@ -994,16 +998,12 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { debug!("trait spans found: {:?}", traits); for span in &traits { let mut multi_span: MultiSpan = vec![*span].into(); - multi_span - .push_span_label(*span, "this has an implicit `'static` lifetime requirement"); - multi_span.push_span_label( - ident.span, - "calling this method introduces the `impl`'s `'static` requirement", - ); + multi_span.push_span_label(*span, fluent::borrowck_implicit_static); + multi_span.push_span_label(ident.span, fluent::borrowck_implicit_static_introduced); err.subdiagnostic(RequireStaticErr::UsedImpl { multi_span }); err.span_suggestion_verbose( span.shrink_to_hi(), - "consider relaxing the implicit `'static` requirement", + fluent::borrowck_implicit_static_relax, " + '_", Applicability::MaybeIncorrect, ); @@ -1045,7 +1045,6 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { } #[allow(rustc::diagnostic_outside_of_impl)] - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable /// When encountering a lifetime error caused by the return type of a closure, check the /// corresponding trait bound and see if dereferencing the closure return value would satisfy /// them. If so, we produce a structured suggestion. @@ -1166,7 +1165,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { if ocx.select_all_or_error().is_empty() && count > 0 { diag.span_suggestion_verbose( tcx.hir().body(*body).value.peel_blocks().span.shrink_to_lo(), - "dereference the return value", + fluent::borrowck_dereference_suggestion, "*".repeat(count), Applicability::MachineApplicable, ); @@ -1174,7 +1173,6 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { } #[allow(rustc::diagnostic_outside_of_impl)] - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn suggest_move_on_borrowing_closure(&self, diag: &mut Diag<'_>) { let map = self.infcx.tcx.hir(); let body = map.body_owned_by(self.mir_def_id()); @@ -1213,7 +1211,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> { if let Some(closure_span) = closure_span { diag.span_suggestion_verbose( closure_span, - "consider adding 'move' keyword before the nested closure", + fluent::borrowck_move_closure_suggestion, "move ", Applicability::MaybeIncorrect, ); diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs index 9c2a0036befa8..30dd45d847c9b 100644 --- a/compiler/rustc_borrowck/src/lib.rs +++ b/compiler/rustc_borrowck/src/lib.rs @@ -1644,7 +1644,7 @@ impl<'mir, 'tcx> MirBorrowckCtxt<'_, 'mir, '_, 'tcx> { | ty::Pat(_, _) | ty::Slice(_) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::Closure(_, _) | ty::CoroutineClosure(_, _) @@ -1689,7 +1689,7 @@ impl<'mir, 'tcx> MirBorrowckCtxt<'_, 'mir, '_, 'tcx> { | ty::RawPtr(_, _) | ty::Ref(_, _, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::CoroutineWitness(..) | ty::Never diff --git a/compiler/rustc_borrowck/src/type_check/canonical.rs b/compiler/rustc_borrowck/src/type_check/canonical.rs index 86cd8b918fc6e..b58691fbeae3a 100644 --- a/compiler/rustc_borrowck/src/type_check/canonical.rs +++ b/compiler/rustc_borrowck/src/type_check/canonical.rs @@ -7,6 +7,7 @@ use rustc_middle::mir::ConstraintCategory; use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable, Upcast}; use rustc_span::def_id::DefId; use rustc_span::Span; +use rustc_trait_selection::traits::query::type_op::custom::CustomTypeOp; use rustc_trait_selection::traits::query::type_op::{self, TypeOpOutput}; use rustc_trait_selection::traits::ObligationCause; @@ -165,6 +166,52 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { result.unwrap_or(value) } + #[instrument(skip(self), level = "debug")] + pub(super) fn struct_tail( + &mut self, + ty: Ty<'tcx>, + location: impl NormalizeLocation, + ) -> Ty<'tcx> { + let tcx = self.tcx(); + if self.infcx.next_trait_solver() { + let body = self.body; + let param_env = self.param_env; + self.fully_perform_op( + location.to_locations(), + ConstraintCategory::Boring, + CustomTypeOp::new( + |ocx| { + let structurally_normalize = |ty| { + ocx.structurally_normalize( + &ObligationCause::misc( + location.to_locations().span(body), + body.source.def_id().expect_local(), + ), + param_env, + ty, + ) + .unwrap_or_else(|_| bug!("struct tail should have been computable, since we computed it in HIR")) + }; + + let tail = tcx.struct_tail_raw( + ty, + structurally_normalize, + || {}, + ); + + Ok(tail) + }, + "normalizing struct tail", + ), + ) + .unwrap_or_else(|guar| Ty::new_error(tcx, guar)) + } else { + let mut normalize = |ty| self.normalize(ty, location); + let tail = tcx.struct_tail_raw(ty, &mut normalize, || {}); + normalize(tail) + } + } + #[instrument(skip(self), level = "debug")] pub(super) fn ascribe_user_type( &mut self, diff --git a/compiler/rustc_borrowck/src/type_check/mod.rs b/compiler/rustc_borrowck/src/type_check/mod.rs index b13773ffe1460..a2669da1b04e8 100644 --- a/compiler/rustc_borrowck/src/type_check/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/mod.rs @@ -1364,7 +1364,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { debug!("func_ty.kind: {:?}", func_ty.kind()); let sig = match func_ty.kind() { - ty::FnDef(..) | ty::FnPtr(_) => func_ty.fn_sig(tcx), + ty::FnDef(..) | ty::FnPtr(..) => func_ty.fn_sig(tcx), _ => { span_mirbug!(self, term, "call to non-function {:?}", func_ty); return; @@ -1989,9 +1989,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { let ty_fn_ptr_from = Ty::new_fn_ptr(tcx, fn_sig); - if let Err(terr) = self.eq_types( - *ty, + if let Err(terr) = self.sub_types( ty_fn_ptr_from, + *ty, location.to_locations(), ConstraintCategory::Cast { unsize_to: None }, ) { @@ -2014,9 +2014,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { let ty_fn_ptr_from = Ty::new_fn_ptr(tcx, tcx.signature_unclosure(sig, *safety)); - if let Err(terr) = self.eq_types( - *ty, + if let Err(terr) = self.sub_types( ty_fn_ptr_from, + *ty, location.to_locations(), ConstraintCategory::Cast { unsize_to: None }, ) { @@ -2329,17 +2329,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { let cast_ty_to = CastTy::from_ty(*ty); match (cast_ty_from, cast_ty_to) { (Some(CastTy::Ptr(src)), Some(CastTy::Ptr(dst))) => { - let mut normalize = |t| self.normalize(t, location); - - // N.B. `struct_tail_with_normalize` only "structurally resolves" - // the type. It is not fully normalized, so we have to normalize it - // afterwards. - let src_tail = - tcx.struct_tail_with_normalize(src.ty, &mut normalize, || ()); - let src_tail = normalize(src_tail); - let dst_tail = - tcx.struct_tail_with_normalize(dst.ty, &mut normalize, || ()); - let dst_tail = normalize(dst_tail); + let src_tail = self.struct_tail(src.ty, location); + let dst_tail = self.struct_tail(dst.ty, location); // This checks (lifetime part of) vtable validity for pointer casts, // which is irrelevant when there are aren't principal traits on both sides (aka only auto traits). @@ -2420,7 +2411,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { let ty_left = left.ty(body, tcx); match ty_left.kind() { // Types with regions are comparable if they have a common super-type. - ty::RawPtr(_, _) | ty::FnPtr(_) => { + ty::RawPtr(_, _) | ty::FnPtr(..) => { let ty_right = right.ty(body, tcx); let common_ty = self.infcx.next_ty_var(body.source_info(location).span); self.sub_types( diff --git a/compiler/rustc_builtin_macros/src/asm.rs b/compiler/rustc_builtin_macros/src/asm.rs index 17439dd3e3eda..ae2627d693867 100644 --- a/compiler/rustc_builtin_macros/src/asm.rs +++ b/compiler/rustc_builtin_macros/src/asm.rs @@ -328,7 +328,7 @@ pub fn parse_asm_args<'a>( /// Otherwise, the suggestion will be incorrect. fn err_duplicate_option(p: &Parser<'_>, symbol: Symbol, span: Span) { // Tool-only output - let full_span = if p.token.kind == token::Comma { span.to(p.token.span) } else { span }; + let full_span = if p.token == token::Comma { span.to(p.token.span) } else { span }; p.dcx().emit_err(errors::AsmOptAlreadyprovided { span, symbol, full_span }); } @@ -338,7 +338,7 @@ fn err_duplicate_option(p: &Parser<'_>, symbol: Symbol, span: Span) { /// Otherwise, the suggestion will be incorrect. fn err_unsupported_option(p: &Parser<'_>, symbol: Symbol, span: Span) { // Tool-only output - let full_span = if p.token.kind == token::Comma { span.to(p.token.span) } else { span }; + let full_span = if p.token == token::Comma { span.to(p.token.span) } else { span }; p.dcx().emit_err(errors::GlobalAsmUnsupportedOption { span, symbol, full_span }); } @@ -745,10 +745,9 @@ fn expand_preparsed_asm( unused_operands.push((args.operands[idx].1, msg)); } } - match unused_operands.len() { - 0 => {} - 1 => { - let (sp, msg) = unused_operands.into_iter().next().unwrap(); + match unused_operands[..] { + [] => {} + [(sp, msg)] => { ecx.dcx() .struct_span_err(sp, msg) .with_span_label(sp, msg) diff --git a/compiler/rustc_builtin_macros/src/cfg_accessible.rs b/compiler/rustc_builtin_macros/src/cfg_accessible.rs index 006b6aa823fbf..3d3bd3aea059c 100644 --- a/compiler/rustc_builtin_macros/src/cfg_accessible.rs +++ b/compiler/rustc_builtin_macros/src/cfg_accessible.rs @@ -47,7 +47,6 @@ impl MultiItemModifier for Expander { ) -> ExpandResult, Annotatable> { let template = AttributeTemplate { list: Some("path"), ..Default::default() }; validate_attr::check_builtin_meta_item( - &ecx.ecfg.features, &ecx.sess.psess, meta_item, ast::AttrStyle::Outer, diff --git a/compiler/rustc_builtin_macros/src/derive.rs b/compiler/rustc_builtin_macros/src/derive.rs index 57bddf0ab60ad..e8704bc2f639d 100644 --- a/compiler/rustc_builtin_macros/src/derive.rs +++ b/compiler/rustc_builtin_macros/src/derive.rs @@ -38,7 +38,6 @@ impl MultiItemModifier for Expander { let template = AttributeTemplate { list: Some("Trait1, Trait2, ..."), ..Default::default() }; validate_attr::check_builtin_meta_item( - features, &sess.psess, meta_item, ast::AttrStyle::Outer, diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs index c8ab8ed681c15..c90a916488606 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs @@ -378,8 +378,8 @@ impl BlockOrExpr { None => cx.expr_block(cx.block(span, ThinVec::new())), Some(expr) => expr, } - } else if self.0.len() == 1 - && let ast::StmtKind::Expr(expr) = &self.0[0].kind + } else if let [stmt] = self.0.as_slice() + && let ast::StmtKind::Expr(expr) = &stmt.kind && self.1.is_none() { // There's only a single statement expression. Pull it out. @@ -1273,7 +1273,7 @@ impl<'a> MethodDef<'a> { } FieldlessVariantsStrategy::Default => (), } - } else if variants.len() == 1 { + } else if let [variant] = variants.as_slice() { // If there is a single variant, we don't need an operation on // the discriminant(s). Just use the most degenerate result. return self.call_substructure_method( @@ -1281,7 +1281,7 @@ impl<'a> MethodDef<'a> { trait_, type_ident, nonselflike_args, - &EnumMatching(0, &variants[0], Vec::new()), + &EnumMatching(0, variant, Vec::new()), ); } } diff --git a/compiler/rustc_builtin_macros/src/format.rs b/compiler/rustc_builtin_macros/src/format.rs index ff82f0f2da7aa..f99530cad18f9 100644 --- a/compiler/rustc_builtin_macros/src/format.rs +++ b/compiler/rustc_builtin_macros/src/format.rs @@ -180,8 +180,8 @@ fn make_format_args( Ok((mut err, suggested)) => { if !suggested { if let ExprKind::Block(block, None) = &efmt.kind - && block.stmts.len() == 1 - && let StmtKind::Expr(expr) = &block.stmts[0].kind + && let [stmt] = block.stmts.as_slice() + && let StmtKind::Expr(expr) = &stmt.kind && let ExprKind::Path(None, path) = &expr.kind && path.is_potential_trivial_const_arg() { @@ -196,8 +196,8 @@ fn make_format_args( } else { let sugg_fmt = match args.explicit_args().len() { 0 => "{}".to_string(), - _ => { - format!("{}{{}}", "{} ".repeat(args.explicit_args().len())) + count => { + format!("{}{{}}", "{} ".repeat(count)) } }; err.span_suggestion( diff --git a/compiler/rustc_builtin_macros/src/util.rs b/compiler/rustc_builtin_macros/src/util.rs index 73cc8ff547d51..0bcd5aef28bab 100644 --- a/compiler/rustc_builtin_macros/src/util.rs +++ b/compiler/rustc_builtin_macros/src/util.rs @@ -17,7 +17,6 @@ pub(crate) fn check_builtin_macro_attribute(ecx: &ExtCtxt<'_>, meta_item: &MetaI // All the built-in macro attributes are "words" at the moment. let template = AttributeTemplate { word: true, ..Default::default() }; validate_attr::check_builtin_meta_item( - &ecx.ecfg.features, &ecx.sess.psess, meta_item, AttrStyle::Outer, diff --git a/compiler/rustc_codegen_cranelift/src/common.rs b/compiler/rustc_codegen_cranelift/src/common.rs index a1b29a4222579..b9000a3874f5d 100644 --- a/compiler/rustc_codegen_cranelift/src/common.rs +++ b/compiler/rustc_codegen_cranelift/src/common.rs @@ -69,7 +69,7 @@ fn clif_type_from_ty<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Option types::F64, FloatTy::F128 => unimplemented!("f16_f128"), }, - ty::FnPtr(_) => pointer_ty(tcx), + ty::FnPtr(..) => pointer_ty(tcx), ty::RawPtr(pointee_ty, _) | ty::Ref(_, pointee_ty, _) => { if has_ptr_meta(tcx, *pointee_ty) { return None; diff --git a/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs b/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs index b21c559e6686c..29deac6073035 100644 --- a/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs +++ b/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs @@ -725,7 +725,8 @@ fn codegen_regular_intrinsic_call<'tcx>( // Cranelift treats stores as volatile by default // FIXME correctly handle unaligned_volatile_store - // FIXME actually do nontemporal stores if requested + // FIXME actually do nontemporal stores if requested (but do not just emit MOVNT on x86; + // see the LLVM backend for details) let dest = CPlace::for_ptr(Pointer::new(ptr), val.layout()); dest.write_cvalue(fx, val); } diff --git a/compiler/rustc_codegen_cranelift/src/value_and_place.rs b/compiler/rustc_codegen_cranelift/src/value_and_place.rs index 8eb2095e52346..fd77502224e4c 100644 --- a/compiler/rustc_codegen_cranelift/src/value_and_place.rs +++ b/compiler/rustc_codegen_cranelift/src/value_and_place.rs @@ -874,7 +874,7 @@ pub(crate) fn assert_assignable<'tcx>( (ty::Ref(_, a, _), ty::RawPtr(b, _)) | (ty::RawPtr(a, _), ty::Ref(_, b, _)) => { assert_assignable(fx, *a, *b, limit - 1); } - (ty::FnPtr(_), ty::FnPtr(_)) => { + (ty::FnPtr(..), ty::FnPtr(..)) => { let from_sig = fx.tcx.normalize_erasing_late_bound_regions( ParamEnv::reveal_all(), from_ty.fn_sig(fx.tcx), diff --git a/compiler/rustc_codegen_gcc/build_system/src/test.rs b/compiler/rustc_codegen_gcc/build_system/src/test.rs index dabf6c5aa3ee4..83fa8059b1a05 100644 --- a/compiler/rustc_codegen_gcc/build_system/src/test.rs +++ b/compiler/rustc_codegen_gcc/build_system/src/test.rs @@ -552,7 +552,7 @@ fn asm_tests(env: &Env, args: &TestArg) -> Result<(), String> { &"--stage", &"0", &"tests/assembly/asm", - &"--rustc-args", + &"--compiletest-rustc-args", &rustc_args, ], Some(&rust_dir), @@ -1020,7 +1020,7 @@ where &"--stage", &"0", &format!("tests/{}", test_type), - &"--rustc-args", + &"--compiletest-rustc-args", &rustc_args, ], Some(&rust_path), diff --git a/compiler/rustc_codegen_gcc/src/builder.rs b/compiler/rustc_codegen_gcc/src/builder.rs index a64371a3d891e..47b378cc1cd82 100644 --- a/compiler/rustc_codegen_gcc/src/builder.rs +++ b/compiler/rustc_codegen_gcc/src/builder.rs @@ -1127,6 +1127,8 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { self.llbb().add_assignment(self.location, aligned_destination, val); // TODO(antoyo): handle align and flags. // NOTE: dummy value here since it's never used. FIXME(antoyo): API should not return a value here? + // When adding support for NONTEMPORAL, make sure to not just emit MOVNT on x86; see the + // LLVM backend for details. self.cx.context.new_rvalue_zero(self.type_i32()) } diff --git a/compiler/rustc_codegen_gcc/src/common.rs b/compiler/rustc_codegen_gcc/src/common.rs index 7a456e1c5d64a..dca6b6494f94b 100644 --- a/compiler/rustc_codegen_gcc/src/common.rs +++ b/compiler/rustc_codegen_gcc/src/common.rs @@ -160,6 +160,11 @@ impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> { self.context.new_struct_constructor(None, struct_type.as_type(), None, values) } + fn const_vector(&self, values: &[RValue<'gcc>]) -> RValue<'gcc> { + let typ = self.type_vector(values[0].get_type(), values.len() as u64); + self.context.new_rvalue_from_vector(None, typ, values) + } + fn const_to_opt_uint(&self, _v: RValue<'gcc>) -> Option { // TODO(antoyo) None diff --git a/compiler/rustc_codegen_gcc/src/type_of.rs b/compiler/rustc_codegen_gcc/src/type_of.rs index d85ed4f12ffab..b7b1be5369c42 100644 --- a/compiler/rustc_codegen_gcc/src/type_of.rs +++ b/compiler/rustc_codegen_gcc/src/type_of.rs @@ -213,9 +213,8 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> { // NOTE: we cannot remove this match like in the LLVM codegen because the call // to fn_ptr_backend_type handle the on-stack attribute. // TODO(antoyo): find a less hackish way to hande the on-stack attribute. - ty::FnPtr(sig) => { - cx.fn_ptr_backend_type(cx.fn_abi_of_fn_ptr(sig, ty::List::empty())) - } + ty::FnPtr(sig_tys, hdr) => cx + .fn_ptr_backend_type(cx.fn_abi_of_fn_ptr(sig_tys.with(hdr), ty::List::empty())), _ => self.scalar_gcc_type_at(cx, scalar, Size::ZERO), }; cx.scalar_types.borrow_mut().insert(self.ty, ty); diff --git a/compiler/rustc_codegen_gcc/tests/run/asm.rs b/compiler/rustc_codegen_gcc/tests/run/asm.rs index 56f2aac3d0a1e..4e05d026868e7 100644 --- a/compiler/rustc_codegen_gcc/tests/run/asm.rs +++ b/compiler/rustc_codegen_gcc/tests/run/asm.rs @@ -3,12 +3,10 @@ // Run-time: // status: 0 -#![feature(asm_const)] - -#[cfg(target_arch="x86_64")] +#[cfg(target_arch = "x86_64")] use std::arch::{asm, global_asm}; -#[cfg(target_arch="x86_64")] +#[cfg(target_arch = "x86_64")] global_asm!( " .global add_asm @@ -22,7 +20,7 @@ extern "C" { fn add_asm(a: i64, b: i64) -> i64; } -#[cfg(target_arch="x86_64")] +#[cfg(target_arch = "x86_64")] pub unsafe fn mem_cpy(dst: *mut u8, src: *const u8, len: usize) { asm!( "rep movsb", @@ -33,7 +31,7 @@ pub unsafe fn mem_cpy(dst: *mut u8, src: *const u8, len: usize) { ); } -#[cfg(target_arch="x86_64")] +#[cfg(target_arch = "x86_64")] fn asm() { unsafe { asm!("nop"); @@ -178,9 +176,8 @@ fn asm() { assert_eq!(array1, array2); } -#[cfg(not(target_arch="x86_64"))] -fn asm() { -} +#[cfg(not(target_arch = "x86_64"))] +fn asm() {} fn main() { asm(); diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs index 1277b7898c2d9..5ff580e295a60 100644 --- a/compiler/rustc_codegen_llvm/src/abi.rs +++ b/compiler/rustc_codegen_llvm/src/abi.rs @@ -430,9 +430,32 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { i += 1; i - 1 }; + + let apply_range_attr = |idx: AttributePlace, scalar: rustc_target::abi::Scalar| { + if cx.sess().opts.optimize != config::OptLevel::No + && llvm_util::get_version() >= (19, 0, 0) + && matches!(scalar.primitive(), Int(..)) + // If the value is a boolean, the range is 0..2 and that ultimately + // become 0..0 when the type becomes i1, which would be rejected + // by the LLVM verifier. + && !scalar.is_bool() + // LLVM also rejects full range. + && !scalar.is_always_valid(cx) + { + attributes::apply_to_llfn( + llfn, + idx, + &[llvm::CreateRangeAttr(cx.llcx, scalar.size(cx), scalar.valid_range(cx))], + ); + } + }; + match &self.ret.mode { PassMode::Direct(attrs) => { attrs.apply_attrs_to_llfn(llvm::AttributePlace::ReturnValue, cx, llfn); + if let abi::Abi::Scalar(scalar) = self.ret.layout.abi { + apply_range_attr(llvm::AttributePlace::ReturnValue, scalar); + } } PassMode::Indirect { attrs, meta_attrs: _, on_stack } => { assert!(!on_stack); @@ -471,8 +494,13 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { ); attributes::apply_to_llfn(llfn, llvm::AttributePlace::Argument(i), &[byval]); } - PassMode::Direct(attrs) - | PassMode::Indirect { attrs, meta_attrs: None, on_stack: false } => { + PassMode::Direct(attrs) => { + let i = apply(attrs); + if let abi::Abi::Scalar(scalar) = arg.layout.abi { + apply_range_attr(llvm::AttributePlace::Argument(i), scalar); + } + } + PassMode::Indirect { attrs, meta_attrs: None, on_stack: false } => { apply(attrs); } PassMode::Indirect { attrs, meta_attrs: Some(meta_attrs), on_stack } => { @@ -481,8 +509,12 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { apply(meta_attrs); } PassMode::Pair(a, b) => { - apply(a); - apply(b); + let i = apply(a); + let ii = apply(b); + if let abi::Abi::ScalarPair(scalar_a, scalar_b) = arg.layout.abi { + apply_range_attr(llvm::AttributePlace::Argument(i), scalar_a); + apply_range_attr(llvm::AttributePlace::Argument(ii), scalar_b); + } } PassMode::Cast { cast, pad_i32 } => { if *pad_i32 { @@ -537,15 +569,18 @@ impl<'ll, 'tcx> FnAbiLlvmExt<'ll, 'tcx> for FnAbi<'tcx, Ty<'tcx>> { } _ => {} } - if let abi::Abi::Scalar(scalar) = self.ret.layout.abi { - // If the value is a boolean, the range is 0..2 and that ultimately - // become 0..0 when the type becomes i1, which would be rejected - // by the LLVM verifier. - if let Int(..) = scalar.primitive() { - if !scalar.is_bool() && !scalar.is_always_valid(bx) { - bx.range_metadata(callsite, scalar.valid_range(bx)); - } - } + if bx.cx.sess().opts.optimize != config::OptLevel::No + && llvm_util::get_version() < (19, 0, 0) + && let abi::Abi::Scalar(scalar) = self.ret.layout.abi + && matches!(scalar.primitive(), Int(..)) + // If the value is a boolean, the range is 0..2 and that ultimately + // become 0..0 when the type becomes i1, which would be rejected + // by the LLVM verifier. + && !scalar.is_bool() + // LLVM also rejects full range. + && !scalar.is_always_valid(bx) + { + bx.range_metadata(callsite, scalar.valid_range(bx)); } for arg in self.args.iter() { match &arg.mode { diff --git a/compiler/rustc_codegen_llvm/src/asm.rs b/compiler/rustc_codegen_llvm/src/asm.rs index aea8395441a86..f931698c38f91 100644 --- a/compiler/rustc_codegen_llvm/src/asm.rs +++ b/compiler/rustc_codegen_llvm/src/asm.rs @@ -1,3 +1,5 @@ +use std::assert_matches::assert_matches; + use libc::{c_char, c_uint}; use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_codegen_ssa::mir::operand::OperandValue; @@ -89,7 +91,7 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { // if the target feature needed by the register class is // disabled. This is necessary otherwise LLVM will try // to actually allocate a register for the dummy output. - assert!(matches!(reg, InlineAsmRegOrRegClass::Reg(_))); + assert_matches!(reg, InlineAsmRegOrRegClass::Reg(_)); clobbers.push(format!("~{}", reg_to_llvm(reg, None))); continue; } else { diff --git a/compiler/rustc_codegen_llvm/src/back/archive.rs b/compiler/rustc_codegen_llvm/src/back/archive.rs index a2ab19ac800b7..2120fc1815cd8 100644 --- a/compiler/rustc_codegen_llvm/src/back/archive.rs +++ b/compiler/rustc_codegen_llvm/src/back/archive.rs @@ -106,9 +106,11 @@ pub struct LlvmArchiveBuilderBuilder; impl ArchiveBuilderBuilder for LlvmArchiveBuilderBuilder { fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box { - // FIXME use ArArchiveBuilder on most targets again once reading thin archives is - // implemented - if true { + // Keeping LlvmArchiveBuilder around in case of a regression caused by using + // ArArchiveBuilder. + // FIXME(#128955) remove a couple of months after #128936 gets merged in case + // no regression is found. + if false { Box::new(LlvmArchiveBuilder { sess, additions: Vec::new() }) } else { Box::new(ArArchiveBuilder::new(sess, &LLVM_OBJECT_READER)) @@ -198,25 +200,11 @@ static LLVM_OBJECT_READER: ObjectReader = ObjectReader { get_xcoff_member_alignment: DEFAULT_OBJECT_READER.get_xcoff_member_alignment, }; -fn should_use_llvm_reader(buf: &[u8]) -> bool { - let is_bitcode = unsafe { llvm::LLVMRustIsBitcode(buf.as_ptr(), buf.len()) }; - - // COFF bigobj file, msvc LTO file or import library. See - // https://github.com/llvm/llvm-project/blob/453f27bc9/llvm/lib/BinaryFormat/Magic.cpp#L38-L51 - let is_unsupported_windows_obj_file = buf.get(0..4) == Some(b"\0\0\xFF\xFF"); - - is_bitcode || is_unsupported_windows_obj_file -} - #[deny(unsafe_op_in_unsafe_fn)] fn get_llvm_object_symbols( buf: &[u8], f: &mut dyn FnMut(&[u8]) -> io::Result<()>, ) -> io::Result { - if !should_use_llvm_reader(buf) { - return (DEFAULT_OBJECT_READER.get_symbols)(buf, f); - } - let mut state = Box::new(f); let err = unsafe { @@ -253,18 +241,10 @@ fn get_llvm_object_symbols( } fn llvm_is_64_bit_object_file(buf: &[u8]) -> bool { - if !should_use_llvm_reader(buf) { - return (DEFAULT_OBJECT_READER.is_64_bit_object_file)(buf); - } - unsafe { llvm::LLVMRustIs64BitSymbolicFile(buf.as_ptr(), buf.len()) } } fn llvm_is_ec_object_file(buf: &[u8]) -> bool { - if !should_use_llvm_reader(buf) { - return (DEFAULT_OBJECT_READER.is_ec_object_file)(buf); - } - unsafe { llvm::LLVMRustIsECObject(buf.as_ptr(), buf.len()) } } diff --git a/compiler/rustc_codegen_llvm/src/builder.rs b/compiler/rustc_codegen_llvm/src/builder.rs index 0f44c5dba5e55..cc081f29e128e 100644 --- a/compiler/rustc_codegen_llvm/src/builder.rs +++ b/compiler/rustc_codegen_llvm/src/builder.rs @@ -728,13 +728,32 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { llvm::LLVMSetVolatile(store, llvm::True); } if flags.contains(MemFlags::NONTEMPORAL) { - // According to LLVM [1] building a nontemporal store must - // *always* point to a metadata value of the integer 1. - // - // [1]: https://llvm.org/docs/LangRef.html#store-instruction - let one = self.cx.const_i32(1); - let node = llvm::LLVMMDNodeInContext(self.cx.llcx, &one, 1); - llvm::LLVMSetMetadata(store, llvm::MD_nontemporal as c_uint, node); + // Make sure that the current target architectures supports "sane" non-temporal + // stores, i.e., non-temporal stores that are equivalent to regular stores except + // for performance. LLVM doesn't seem to care about this, and will happily treat + // `!nontemporal` stores as-if they were normal stores (for reordering optimizations + // etc) even on x86, despite later lowering them to MOVNT which do *not* behave like + // regular stores but require special fences. + // So we keep a list of architectures where `!nontemporal` is known to be truly just + // a hint, and use regular stores everywhere else. + // (In the future, we could alternatively ensure that an sfence gets emitted after a sequence of movnt + // before any kind of synchronizing operation. But it's not clear how to do that with LLVM.) + // For more context, see and + // . + const WELL_BEHAVED_NONTEMPORAL_ARCHS: &[&str] = + &["aarch64", "arm", "riscv32", "riscv64"]; + + let use_nontemporal = + WELL_BEHAVED_NONTEMPORAL_ARCHS.contains(&&*self.cx.tcx.sess.target.arch); + if use_nontemporal { + // According to LLVM [1] building a nontemporal store must + // *always* point to a metadata value of the integer 1. + // + // [1]: https://llvm.org/docs/LangRef.html#store-instruction + let one = self.cx.const_i32(1); + let node = llvm::LLVMMDNodeInContext(self.cx.llcx, &one, 1); + llvm::LLVMSetMetadata(store, llvm::MD_nontemporal as c_uint, node); + } } store } diff --git a/compiler/rustc_codegen_llvm/src/common.rs b/compiler/rustc_codegen_llvm/src/common.rs index 65f974c568936..a399790018439 100644 --- a/compiler/rustc_codegen_llvm/src/common.rs +++ b/compiler/rustc_codegen_llvm/src/common.rs @@ -97,11 +97,6 @@ impl<'ll> CodegenCx<'ll, '_> { unsafe { llvm::LLVMConstArray2(ty, elts.as_ptr(), len) } } - pub fn const_vector(&self, elts: &[&'ll Value]) -> &'ll Value { - let len = c_uint::try_from(elts.len()).expect("LLVMConstVector elements len overflow"); - unsafe { llvm::LLVMConstVector(elts.as_ptr(), len) } - } - pub fn const_bytes(&self, bytes: &[u8]) -> &'ll Value { bytes_in_context(self.llcx, bytes) } @@ -221,6 +216,11 @@ impl<'ll, 'tcx> ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> { struct_in_context(self.llcx, elts, packed) } + fn const_vector(&self, elts: &[&'ll Value]) -> &'ll Value { + let len = c_uint::try_from(elts.len()).expect("LLVMConstVector elements len overflow"); + unsafe { llvm::LLVMConstVector(elts.as_ptr(), len) } + } + fn const_to_opt_uint(&self, v: &'ll Value) -> Option { try_as_const_integral(v).and_then(|v| unsafe { let mut i = 0u64; diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/create_scope_map.rs b/compiler/rustc_codegen_llvm/src/debuginfo/create_scope_map.rs index efe616838bfe4..0a02c230cfcf8 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/create_scope_map.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/create_scope_map.rs @@ -88,7 +88,7 @@ fn make_mir_scope<'ll, 'tcx>( let loc = cx.lookup_debug_loc(scope_data.span.lo()); let file_metadata = file_metadata(cx, &loc.file); - let parent_dbg_scope = match scope_data.inlined { + let dbg_scope = match scope_data.inlined { Some((callee, _)) => { // FIXME(eddyb) this would be `self.monomorphize(&callee)` // if this is moved to `rustc_codegen_ssa::mir::debuginfo`. @@ -102,17 +102,15 @@ fn make_mir_scope<'ll, 'tcx>( cx.dbg_scope_fn(callee, callee_fn_abi, None) }) } - None => parent_scope.dbg_scope, - }; - - let dbg_scope = unsafe { - llvm::LLVMRustDIBuilderCreateLexicalBlock( - DIB(cx), - parent_dbg_scope, - file_metadata, - loc.line, - loc.col, - ) + None => unsafe { + llvm::LLVMRustDIBuilderCreateLexicalBlock( + DIB(cx), + parent_scope.dbg_scope, + file_metadata, + loc.line, + loc.col, + ) + }, }; let inlined_at = scope_data.inlined.map(|(_, callsite_span)| { diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs index ad63858861261..87bea22d8ddaf 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs @@ -456,7 +456,7 @@ pub fn type_di_node<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) -> &'ll D { build_pointer_or_reference_di_node(cx, t, t.boxed_ty(), unique_type_id) } - ty::FnDef(..) | ty::FnPtr(_) => build_subroutine_type_di_node(cx, unique_type_id), + ty::FnDef(..) | ty::FnPtr(..) => build_subroutine_type_di_node(cx, unique_type_id), ty::Closure(..) => build_closure_env_di_node(cx, unique_type_id), ty::CoroutineClosure(..) => build_closure_env_di_node(cx, unique_type_id), ty::Coroutine(..) => enums::build_coroutine_di_node(cx, unique_type_id), diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs index 13006638bb3a3..181022087f3bf 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs @@ -2,7 +2,7 @@ use std::borrow::Cow; use libc::c_uint; use rustc_codegen_ssa::debuginfo::type_names::compute_debuginfo_type_name; -use rustc_codegen_ssa::debuginfo::wants_c_like_enum_debuginfo; +use rustc_codegen_ssa::debuginfo::{tag_base_type, wants_c_like_enum_debuginfo}; use rustc_codegen_ssa::traits::ConstMethods; use rustc_index::IndexVec; use rustc_middle::bug; @@ -12,7 +12,7 @@ use rustc_target::abi::{Align, Endian, Size, TagEncoding, VariantIdx, Variants}; use smallvec::smallvec; use crate::common::CodegenCx; -use crate::debuginfo::metadata::enums::{tag_base_type, DiscrResult}; +use crate::debuginfo::metadata::enums::DiscrResult; use crate::debuginfo::metadata::type_map::{self, Stub, UniqueTypeId}; use crate::debuginfo::metadata::{ build_field_di_node, file_metadata, size_and_align_of, type_di_node, unknown_file_metadata, @@ -190,7 +190,7 @@ pub(super) fn build_enum_type_di_node<'ll, 'tcx>( let enum_type_and_layout = cx.layout_of(enum_type); let enum_type_name = compute_debuginfo_type_name(cx.tcx, enum_type, false); - assert!(!wants_c_like_enum_debuginfo(enum_type_and_layout)); + assert!(!wants_c_like_enum_debuginfo(cx.tcx, enum_type_and_layout)); type_map::build_type_with_children( cx, @@ -265,7 +265,7 @@ pub(super) fn build_coroutine_di_node<'ll, 'tcx>( let coroutine_type_and_layout = cx.layout_of(coroutine_type); let coroutine_type_name = compute_debuginfo_type_name(cx.tcx, coroutine_type, false); - assert!(!wants_c_like_enum_debuginfo(coroutine_type_and_layout)); + assert!(!wants_c_like_enum_debuginfo(cx.tcx, coroutine_type_and_layout)); type_map::build_type_with_children( cx, @@ -381,7 +381,7 @@ fn build_union_fields_for_enum<'ll, 'tcx>( tag_field: usize, untagged_variant_index: Option, ) -> SmallVec<&'ll DIType> { - let tag_base_type = super::tag_base_type(cx, enum_type_and_layout); + let tag_base_type = tag_base_type(cx.tcx, enum_type_and_layout); let variant_names_type_di_node = build_variant_names_type_di_node( cx, @@ -676,7 +676,7 @@ fn build_union_fields_for_direct_tag_coroutine<'ll, 'tcx>( let variant_range = coroutine_args.variant_range(coroutine_def_id, cx.tcx); let variant_count = (variant_range.start.as_u32()..variant_range.end.as_u32()).len(); - let tag_base_type = tag_base_type(cx, coroutine_type_and_layout); + let tag_base_type = tag_base_type(cx.tcx, coroutine_type_and_layout); let variant_names_type_di_node = build_variant_names_type_di_node( cx, @@ -803,7 +803,7 @@ fn build_union_fields_for_direct_tag_enum_or_coroutine<'ll, 'tcx>( assert_eq!( cx.size_and_align_of(enum_type_and_layout.field(cx, tag_field).ty), - cx.size_and_align_of(super::tag_base_type(cx, enum_type_and_layout)) + cx.size_and_align_of(self::tag_base_type(cx.tcx, enum_type_and_layout)) ); // ... and a field for the tag. If the tag is 128 bits wide, this will actually diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs index fc3adaf068111..77cbcd86cdd10 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs @@ -1,17 +1,15 @@ use std::borrow::Cow; use rustc_codegen_ssa::debuginfo::type_names::{compute_debuginfo_type_name, cpp_like_debuginfo}; -use rustc_codegen_ssa::debuginfo::wants_c_like_enum_debuginfo; +use rustc_codegen_ssa::debuginfo::{tag_base_type, wants_c_like_enum_debuginfo}; use rustc_hir::def::CtorKind; use rustc_index::IndexSlice; use rustc_middle::bug; use rustc_middle::mir::CoroutineLayout; -use rustc_middle::ty::layout::{IntegerExt, LayoutOf, PrimitiveExt, TyAndLayout}; +use rustc_middle::ty::layout::{LayoutOf, TyAndLayout}; use rustc_middle::ty::{self, AdtDef, CoroutineArgs, CoroutineArgsExt, Ty, VariantDef}; use rustc_span::Symbol; -use rustc_target::abi::{ - FieldIdx, HasDataLayout, Integer, Primitive, TagEncoding, VariantIdx, Variants, -}; +use rustc_target::abi::{FieldIdx, TagEncoding, VariantIdx, Variants}; use super::type_map::{DINodeCreationResult, UniqueTypeId}; use super::{size_and_align_of, SmallVec}; @@ -39,7 +37,7 @@ pub(super) fn build_enum_type_di_node<'ll, 'tcx>( let enum_type_and_layout = cx.layout_of(enum_type); - if wants_c_like_enum_debuginfo(enum_type_and_layout) { + if wants_c_like_enum_debuginfo(cx.tcx, enum_type_and_layout) { return build_c_style_enum_di_node(cx, enum_adt_def, enum_type_and_layout); } @@ -74,7 +72,7 @@ fn build_c_style_enum_di_node<'ll, 'tcx>( di_node: build_enumeration_type_di_node( cx, &compute_debuginfo_type_name(cx.tcx, enum_type_and_layout.ty, false), - tag_base_type(cx, enum_type_and_layout), + tag_base_type(cx.tcx, enum_type_and_layout), enum_adt_def.discriminants(cx.tcx).map(|(variant_index, discr)| { let name = Cow::from(enum_adt_def.variant(variant_index).name.as_str()); (name, discr.val) @@ -85,48 +83,6 @@ fn build_c_style_enum_di_node<'ll, 'tcx>( } } -/// Extract the type with which we want to describe the tag of the given enum or coroutine. -fn tag_base_type<'ll, 'tcx>( - cx: &CodegenCx<'ll, 'tcx>, - enum_type_and_layout: TyAndLayout<'tcx>, -) -> Ty<'tcx> { - assert!(match enum_type_and_layout.ty.kind() { - ty::Coroutine(..) => true, - ty::Adt(adt_def, _) => adt_def.is_enum(), - _ => false, - }); - - match enum_type_and_layout.layout.variants() { - // A single-variant enum has no discriminant. - Variants::Single { .. } => { - bug!("tag_base_type() called for enum without tag: {:?}", enum_type_and_layout) - } - - Variants::Multiple { tag_encoding: TagEncoding::Niche { .. }, tag, .. } => { - // Niche tags are always normalized to unsized integers of the correct size. - match tag.primitive() { - Primitive::Int(t, _) => t, - Primitive::Float(f) => Integer::from_size(f.size()).unwrap(), - // FIXME(erikdesjardins): handle non-default addrspace ptr sizes - Primitive::Pointer(_) => { - // If the niche is the NULL value of a reference, then `discr_enum_ty` will be - // a RawPtr. CodeView doesn't know what to do with enums whose base type is a - // pointer so we fix this up to just be `usize`. - // DWARF might be able to deal with this but with an integer type we are on - // the safe side there too. - cx.data_layout().ptr_sized_integer() - } - } - .to_ty(cx.tcx, false) - } - - Variants::Multiple { tag_encoding: TagEncoding::Direct, tag, .. } => { - // Direct tags preserve the sign. - tag.primitive().to_ty(cx.tcx) - } - } -} - /// Build a DW_TAG_enumeration_type debuginfo node, with the given base type and variants. /// This is a helper function and does not register anything in the type map by itself. /// diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs index d7e3b47e0bd5b..238fbad4dfd64 100644 --- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs +++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs @@ -2,7 +2,7 @@ use std::borrow::Cow; use libc::c_uint; use rustc_codegen_ssa::debuginfo::type_names::compute_debuginfo_type_name; -use rustc_codegen_ssa::debuginfo::wants_c_like_enum_debuginfo; +use rustc_codegen_ssa::debuginfo::{tag_base_type, wants_c_like_enum_debuginfo}; use rustc_codegen_ssa::traits::ConstMethods; use rustc_middle::bug; use rustc_middle::ty::layout::{LayoutOf, TyAndLayout}; @@ -11,7 +11,6 @@ use rustc_target::abi::{Size, TagEncoding, VariantIdx, Variants}; use smallvec::smallvec; use crate::common::CodegenCx; -use crate::debuginfo::metadata::enums::tag_base_type; use crate::debuginfo::metadata::type_map::{self, Stub, StubInfo, UniqueTypeId}; use crate::debuginfo::metadata::{ file_metadata, size_and_align_of, type_di_node, unknown_file_metadata, visibility_di_flags, @@ -54,7 +53,7 @@ pub(super) fn build_enum_type_di_node<'ll, 'tcx>( let visibility_flags = visibility_di_flags(cx, enum_adt_def.did(), enum_adt_def.did()); - assert!(!wants_c_like_enum_debuginfo(enum_type_and_layout)); + assert!(!wants_c_like_enum_debuginfo(cx.tcx, enum_type_and_layout)); type_map::build_type_with_children( cx, @@ -131,7 +130,7 @@ pub(super) fn build_coroutine_di_node<'ll, 'tcx>( let containing_scope = get_namespace_for_item(cx, coroutine_def_id); let coroutine_type_and_layout = cx.layout_of(coroutine_type); - assert!(!wants_c_like_enum_debuginfo(coroutine_type_and_layout)); + assert!(!wants_c_like_enum_debuginfo(cx.tcx, coroutine_type_and_layout)); let coroutine_type_name = compute_debuginfo_type_name(cx.tcx, coroutine_type, false); @@ -321,7 +320,7 @@ fn build_discr_member_di_node<'ll, 'tcx>( &Variants::Single { .. } => None, &Variants::Multiple { tag_field, .. } => { - let tag_base_type = tag_base_type(cx, enum_or_coroutine_type_and_layout); + let tag_base_type = tag_base_type(cx.tcx, enum_or_coroutine_type_and_layout); let (size, align) = cx.size_and_align_of(tag_base_type); unsafe { diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs index 57d5f6fdf503f..f5558723d11bf 100644 --- a/compiler/rustc_codegen_llvm/src/intrinsic.rs +++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs @@ -1,3 +1,4 @@ +use std::assert_matches::assert_matches; use std::cmp::Ordering; use rustc_codegen_ssa::base::{compare_simd_types, wants_msvc_seh, wants_wasm_eh}; @@ -1142,7 +1143,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>( if cfg!(debug_assertions) { for (ty, arg) in arg_tys.iter().zip(args) { if ty.is_simd() { - assert!(matches!(arg.val, OperandValue::Immediate(_))); + assert_matches!(arg.val, OperandValue::Immediate(_)); } } } diff --git a/compiler/rustc_codegen_llvm/src/lib.rs b/compiler/rustc_codegen_llvm/src/lib.rs index 518a86e0cb06d..43164390a1c6c 100644 --- a/compiler/rustc_codegen_llvm/src/lib.rs +++ b/compiler/rustc_codegen_llvm/src/lib.rs @@ -8,6 +8,7 @@ #![allow(internal_features)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] +#![feature(assert_matches)] #![feature(exact_size_is_empty)] #![feature(extern_types)] #![feature(hash_raw_entry)] diff --git a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs index 80b13c0e1d4b9..faabbcb020d5f 100644 --- a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs +++ b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs @@ -1575,6 +1575,12 @@ extern "C" { pub fn LLVMRustCreateAllocSizeAttr(C: &Context, size_arg: u32) -> &Attribute; pub fn LLVMRustCreateAllocKindAttr(C: &Context, size_arg: u64) -> &Attribute; pub fn LLVMRustCreateMemoryEffectsAttr(C: &Context, effects: MemoryEffects) -> &Attribute; + pub fn LLVMRustCreateRangeAttribute( + C: &Context, + num_bits: c_uint, + lower_words: *const u64, + upper_words: *const u64, + ) -> &Attribute; // Operations on functions pub fn LLVMRustGetOrInsertFunction<'a>( diff --git a/compiler/rustc_codegen_llvm/src/llvm/mod.rs b/compiler/rustc_codegen_llvm/src/llvm/mod.rs index 72691907c0d91..d0db350a149e8 100644 --- a/compiler/rustc_codegen_llvm/src/llvm/mod.rs +++ b/compiler/rustc_codegen_llvm/src/llvm/mod.rs @@ -8,7 +8,7 @@ use std::string::FromUtf8Error; use libc::c_uint; use rustc_data_structures::small_c_str::SmallCStr; use rustc_llvm::RustString; -use rustc_target::abi::Align; +use rustc_target::abi::{Align, Size, WrappingRange}; pub use self::AtomicRmwBinOp::*; pub use self::CallConv::*; @@ -105,6 +105,21 @@ pub fn CreateAllocKindAttr(llcx: &Context, kind_arg: AllocKindFlags) -> &Attribu unsafe { LLVMRustCreateAllocKindAttr(llcx, kind_arg.bits()) } } +pub fn CreateRangeAttr(llcx: &Context, size: Size, range: WrappingRange) -> &Attribute { + let lower = range.start; + let upper = range.end.wrapping_add(1); + let lower_words = [lower as u64, (lower >> 64) as u64]; + let upper_words = [upper as u64, (upper >> 64) as u64]; + unsafe { + LLVMRustCreateRangeAttribute( + llcx, + size.bits().try_into().unwrap(), + lower_words.as_ptr(), + upper_words.as_ptr(), + ) + } +} + #[derive(Copy, Clone)] pub enum AttributePlace { ReturnValue, diff --git a/compiler/rustc_codegen_ssa/src/back/archive.rs b/compiler/rustc_codegen_ssa/src/back/archive.rs index ce55d99f506d5..8eb44d1201640 100644 --- a/compiler/rustc_codegen_ssa/src/back/archive.rs +++ b/compiler/rustc_codegen_ssa/src/back/archive.rs @@ -307,10 +307,15 @@ impl<'a> ArchiveBuilder for ArArchiveBuilder<'a> { let file_name = String::from_utf8(entry.name().to_vec()) .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?; if !skip(&file_name) { - self.entries.push(( - file_name.into_bytes(), - ArchiveEntry::FromArchive { archive_index, file_range: entry.file_range() }, - )); + if entry.is_thin() { + let member_path = archive_path.parent().unwrap().join(Path::new(&file_name)); + self.entries.push((file_name.into_bytes(), ArchiveEntry::File(member_path))); + } else { + self.entries.push(( + file_name.into_bytes(), + ArchiveEntry::FromArchive { archive_index, file_range: entry.file_range() }, + )); + } } } diff --git a/compiler/rustc_codegen_ssa/src/back/write.rs b/compiler/rustc_codegen_ssa/src/back/write.rs index bea12747a5199..70b45a852ca0e 100644 --- a/compiler/rustc_codegen_ssa/src/back/write.rs +++ b/compiler/rustc_codegen_ssa/src/back/write.rs @@ -1,4 +1,5 @@ use std::any::Any; +use std::assert_matches::assert_matches; use std::marker::PhantomData; use std::path::{Path, PathBuf}; use std::sync::mpsc::{channel, Receiver, Sender}; @@ -1963,7 +1964,7 @@ impl SharedEmitterMain { sess.dcx().abort_if_errors(); } Ok(SharedEmitterMessage::InlineAsmError(cookie, msg, level, source)) => { - assert!(matches!(level, Level::Error | Level::Warning | Level::Note)); + assert_matches!(level, Level::Error | Level::Warning | Level::Note); let msg = msg.strip_prefix("error: ").unwrap_or(&msg).to_string(); let mut err = Diag::<()>::new(sess.dcx(), level, msg); diff --git a/compiler/rustc_codegen_ssa/src/debuginfo/mod.rs b/compiler/rustc_codegen_ssa/src/debuginfo/mod.rs index 1eaf593a6d77b..0918660e6be88 100644 --- a/compiler/rustc_codegen_ssa/src/debuginfo/mod.rs +++ b/compiler/rustc_codegen_ssa/src/debuginfo/mod.rs @@ -1,6 +1,7 @@ -use rustc_middle::ty::layout::TyAndLayout; -use rustc_middle::ty::{self}; -use rustc_target::abi::Size; +use rustc_middle::bug; +use rustc_middle::ty::layout::{IntegerExt, PrimitiveExt, TyAndLayout}; +use rustc_middle::ty::{self, Ty, TyCtxt}; +use rustc_target::abi::{Integer, Primitive, Size, TagEncoding, Variants}; // FIXME(eddyb) find a place for this (or a way to replace it). pub mod type_names; @@ -11,13 +12,25 @@ pub mod type_names; /// NOTE: This is somewhat inconsistent right now: For empty enums and enums with a single /// fieldless variant, we generate DW_TAG_struct_type, although a /// DW_TAG_enumeration_type would be a better fit. -pub fn wants_c_like_enum_debuginfo(enum_type_and_layout: TyAndLayout<'_>) -> bool { +pub fn wants_c_like_enum_debuginfo<'tcx>( + tcx: TyCtxt<'tcx>, + enum_type_and_layout: TyAndLayout<'tcx>, +) -> bool { match enum_type_and_layout.ty.kind() { ty::Adt(adt_def, _) => { if !adt_def.is_enum() { return false; } + if type_names::cpp_like_debuginfo(tcx) + && tag_base_type_opt(tcx, enum_type_and_layout) + .map(|ty| ty.primitive_size(tcx).bits()) + == Some(128) + { + // C++-like debuginfo never uses the C-like representation for 128-bit enums. + return false; + } + match adt_def.variants().len() { 0 => false, 1 => { @@ -33,3 +46,51 @@ pub fn wants_c_like_enum_debuginfo(enum_type_and_layout: TyAndLayout<'_>) -> boo _ => false, } } + +/// Extract the type with which we want to describe the tag of the given enum or coroutine. +pub fn tag_base_type<'tcx>(tcx: TyCtxt<'tcx>, enum_type_and_layout: TyAndLayout<'tcx>) -> Ty<'tcx> { + tag_base_type_opt(tcx, enum_type_and_layout).unwrap_or_else(|| { + bug!("tag_base_type() called for enum without tag: {:?}", enum_type_and_layout) + }) +} + +pub fn tag_base_type_opt<'tcx>( + tcx: TyCtxt<'tcx>, + enum_type_and_layout: TyAndLayout<'tcx>, +) -> Option> { + assert!(match enum_type_and_layout.ty.kind() { + ty::Coroutine(..) => true, + ty::Adt(adt_def, _) => adt_def.is_enum(), + _ => false, + }); + + match enum_type_and_layout.layout.variants() { + // A single-variant enum has no discriminant. + Variants::Single { .. } => None, + + Variants::Multiple { tag_encoding: TagEncoding::Niche { .. }, tag, .. } => { + // Niche tags are always normalized to unsized integers of the correct size. + Some( + match tag.primitive() { + Primitive::Int(t, _) => t, + Primitive::Float(f) => Integer::from_size(f.size()).unwrap(), + // FIXME(erikdesjardins): handle non-default addrspace ptr sizes + Primitive::Pointer(_) => { + // If the niche is the NULL value of a reference, then `discr_enum_ty` will be + // a RawPtr. CodeView doesn't know what to do with enums whose base type is a + // pointer so we fix this up to just be `usize`. + // DWARF might be able to deal with this but with an integer type we are on + // the safe side there too. + tcx.data_layout.ptr_sized_integer() + } + } + .to_ty(tcx, false), + ) + } + + Variants::Multiple { tag_encoding: TagEncoding::Direct, tag, .. } => { + // Direct tags preserve the sign. + Some(tag.primitive().to_ty(tcx)) + } + } +} diff --git a/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs b/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs index 2755803892751..f0bc4354f9a75 100644 --- a/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs +++ b/compiler/rustc_codegen_ssa/src/debuginfo/type_names.rs @@ -85,7 +85,7 @@ fn push_debuginfo_type_name<'tcx>( let layout_for_cpp_like_fallback = if cpp_like_debuginfo && def.is_enum() { match tcx.layout_of(ParamEnv::reveal_all().and(t)) { Ok(layout) => { - if !wants_c_like_enum_debuginfo(layout) { + if !wants_c_like_enum_debuginfo(tcx, layout) { Some(layout) } else { // This is a C-like enum so we don't want to use the fallback encoding @@ -106,6 +106,7 @@ fn push_debuginfo_type_name<'tcx>( if let Some(ty_and_layout) = layout_for_cpp_like_fallback { msvc_enum_fallback( + tcx, ty_and_layout, &|output, visited| { push_item_name(tcx, def.did(), true, output); @@ -331,7 +332,7 @@ fn push_debuginfo_type_name<'tcx>( output.push(')'); } } - ty::FnDef(..) | ty::FnPtr(_) => { + ty::FnDef(..) | ty::FnPtr(..) => { // We've encountered a weird 'recursive type' // Currently, the only way to generate such a type // is by using 'impl trait': @@ -421,6 +422,7 @@ fn push_debuginfo_type_name<'tcx>( if cpp_like_debuginfo && t.is_coroutine() { let ty_and_layout = tcx.layout_of(ParamEnv::reveal_all().and(t)).unwrap(); msvc_enum_fallback( + tcx, ty_and_layout, &|output, visited| { push_closure_or_coroutine_name(tcx, def_id, args, true, output, visited); @@ -455,12 +457,13 @@ fn push_debuginfo_type_name<'tcx>( // debugger. For more information, look in // rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs. fn msvc_enum_fallback<'tcx>( + tcx: TyCtxt<'tcx>, ty_and_layout: TyAndLayout<'tcx>, push_inner: &dyn Fn(/*output*/ &mut String, /*visited*/ &mut FxHashSet>), output: &mut String, visited: &mut FxHashSet>, ) { - assert!(!wants_c_like_enum_debuginfo(ty_and_layout)); + assert!(!wants_c_like_enum_debuginfo(tcx, ty_and_layout)); output.push_str("enum2$<"); push_inner(output, visited); push_close_angle_bracket(true, output); diff --git a/compiler/rustc_codegen_ssa/src/lib.rs b/compiler/rustc_codegen_ssa/src/lib.rs index 1b029660433bd..cb6d9d6f66e78 100644 --- a/compiler/rustc_codegen_ssa/src/lib.rs +++ b/compiler/rustc_codegen_ssa/src/lib.rs @@ -4,6 +4,7 @@ #![allow(rustc::untranslatable_diagnostic)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] +#![feature(assert_matches)] #![feature(box_patterns)] #![feature(if_let_guard)] #![feature(let_chains)] diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index bc3076528da24..817e2ca72ec12 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -846,7 +846,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { ), None, ), - ty::FnPtr(_) => (None, Some(callee.immediate())), + ty::FnPtr(..) => (None, Some(callee.immediate())), _ => bug!("{} is not callable", callee.layout.ty), }; @@ -923,8 +923,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { // third argument must be constant. This is // checked by the type-checker. if i == 2 && intrinsic.name == sym::simd_shuffle { + // FIXME: the simd_shuffle argument is actually an array, + // not a vector, so we need this special hack to make sure + // it is passed as an immediate. We should pass the + // shuffle indices as a vector instead to avoid this hack. if let mir::Operand::Constant(constant) = &arg.node { - let (llval, ty) = self.simd_shuffle_indices(bx, constant); + let (llval, ty) = self.immediate_const_vector(bx, constant); return OperandRef { val: Immediate(llval), layout: bx.layout_of(ty), diff --git a/compiler/rustc_codegen_ssa/src/mir/constant.rs b/compiler/rustc_codegen_ssa/src/mir/constant.rs index f4d974f70362e..0aa85b8203825 100644 --- a/compiler/rustc_codegen_ssa/src/mir/constant.rs +++ b/compiler/rustc_codegen_ssa/src/mir/constant.rs @@ -1,6 +1,6 @@ use rustc_middle::mir::interpret::ErrorHandled; use rustc_middle::ty::layout::HasTyCtxt; -use rustc_middle::ty::{self, Ty}; +use rustc_middle::ty::{self, Ty, ValTree}; use rustc_middle::{bug, mir, span_bug}; use rustc_target::abi::Abi; @@ -28,7 +28,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { .expect("erroneous constant missed by mono item collection") } - /// This is a convenience helper for `simd_shuffle_indices`. It has the precondition + /// This is a convenience helper for `immediate_const_vector`. It has the precondition /// that the given `constant` is an `Const::Unevaluated` and must be convertible to /// a `ValTree`. If you want a more general version of this, talk to `wg-const-eval` on zulip. /// @@ -59,23 +59,42 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { self.cx.tcx().const_eval_resolve_for_typeck(ty::ParamEnv::reveal_all(), uv, constant.span) } - /// process constant containing SIMD shuffle indices - pub fn simd_shuffle_indices( + /// process constant containing SIMD shuffle indices & constant vectors + pub fn immediate_const_vector( &mut self, bx: &Bx, constant: &mir::ConstOperand<'tcx>, ) -> (Bx::Value, Ty<'tcx>) { let ty = self.monomorphize(constant.ty()); + let ty_is_simd = ty.is_simd(); + // FIXME: ideally we'd assert that this is a SIMD type, but simd_shuffle + // in its current form relies on a regular array being passed as an + // immediate argument. This hack can be removed once that is fixed. + let field_ty = if ty_is_simd { + ty.simd_size_and_type(bx.tcx()).1 + } else { + ty.builtin_index().unwrap() + }; + let val = self .eval_unevaluated_mir_constant_to_valtree(constant) .ok() .map(|x| x.ok()) .flatten() .map(|val| { - let field_ty = ty.builtin_index().unwrap(); - let values: Vec<_> = val - .unwrap_branch() - .iter() + // Depending on whether this is a SIMD type with an array field + // or a type with many fields (one for each elements), the valtree + // is either a single branch with N children, or a root node + // with exactly one child which then in turn has many children. + // So we look at the first child to determine whether it is a + // leaf or whether we have to go one more layer down. + let branch_or_leaf = val.unwrap_branch(); + let first = branch_or_leaf.get(0).unwrap(); + let field_iter = match first { + ValTree::Branch(_) => first.unwrap_branch().iter(), + ValTree::Leaf(_) => branch_or_leaf.iter(), + }; + let values: Vec<_> = field_iter .map(|field| { if let Some(prim) = field.try_to_scalar() { let layout = bx.layout_of(field_ty); @@ -84,11 +103,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { }; bx.scalar_to_backend(prim, scalar, bx.immediate_backend_type(layout)) } else { - bug!("simd shuffle field {:?}", field) + bug!("field is not a scalar {:?}", field) } }) .collect(); - bx.const_struct(&values, false) + if ty_is_simd { bx.const_vector(&values) } else { bx.const_struct(&values, false) } }) .unwrap_or_else(|| { bx.tcx().dcx().emit_err(errors::ShuffleIndicesEvaluation { span: constant.span }); diff --git a/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs b/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs index 0e495973a01d9..75692540c0345 100644 --- a/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs +++ b/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs @@ -1,3 +1,4 @@ +use std::collections::hash_map::Entry; use std::ops::Range; use rustc_data_structures::fx::FxHashMap; @@ -447,6 +448,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } let mut per_local = IndexVec::from_elem(vec![], &self.mir.local_decls); + let mut params_seen: FxHashMap<_, Bx::DIVariable> = Default::default(); for var in &self.mir.var_debug_info { let dbg_scope_and_span = if full_debug_info { self.adjusted_span_and_dbg_scope(var.source_info) @@ -491,7 +493,18 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { VariableKind::LocalVariable }; - self.cx.create_dbg_var(var.name, var_ty, dbg_scope, var_kind, span) + if let VariableKind::ArgumentVariable(arg_index) = var_kind { + match params_seen.entry((dbg_scope, arg_index)) { + Entry::Occupied(o) => o.get().clone(), + Entry::Vacant(v) => v + .insert( + self.cx.create_dbg_var(var.name, var_ty, dbg_scope, var_kind, span), + ) + .clone(), + } + } else { + self.cx.create_dbg_var(var.name, var_ty, dbg_scope, var_kind, span) + } }); let fragment = if let Some(ref fragment) = var.composite { diff --git a/compiler/rustc_codegen_ssa/src/mir/mod.rs b/compiler/rustc_codegen_ssa/src/mir/mod.rs index 4ce07269cd2c1..de94d87bcea7a 100644 --- a/compiler/rustc_codegen_ssa/src/mir/mod.rs +++ b/compiler/rustc_codegen_ssa/src/mir/mod.rs @@ -106,7 +106,7 @@ pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> { locals: locals::Locals<'tcx, Bx::Value>, /// All `VarDebugInfo` from the MIR body, partitioned by `Local`. - /// This is `None` if no var`#[non_exhaustive]`iable debuginfo/names are needed. + /// This is `None` if no variable debuginfo/names are needed. per_local_var_debug_info: Option>>>, diff --git a/compiler/rustc_codegen_ssa/src/mir/operand.rs b/compiler/rustc_codegen_ssa/src/mir/operand.rs index 2bc2d0f70bfaf..1891de8c0eb37 100644 --- a/compiler/rustc_codegen_ssa/src/mir/operand.rs +++ b/compiler/rustc_codegen_ssa/src/mir/operand.rs @@ -1,3 +1,4 @@ +use std::assert_matches::assert_matches; use std::fmt; use arrayvec::ArrayVec; @@ -389,7 +390,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> { } // Newtype vector of array, e.g. #[repr(simd)] struct S([i32; 4]); (OperandValue::Immediate(llval), Abi::Aggregate { sized: true }) => { - assert!(matches!(self.layout.abi, Abi::Vector { .. })); + assert_matches!(self.layout.abi, Abi::Vector { .. }); let llfield_ty = bx.cx().backend_type(field); @@ -635,7 +636,24 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { self.codegen_consume(bx, place.as_ref()) } - mir::Operand::Constant(ref constant) => self.eval_mir_constant_to_operand(bx, constant), + mir::Operand::Constant(ref constant) => { + let constant_ty = self.monomorphize(constant.ty()); + // Most SIMD vector constants should be passed as immediates. + // (In particular, some intrinsics really rely on this.) + if constant_ty.is_simd() { + // However, some SIMD types do not actually use the vector ABI + // (in particular, packed SIMD types do not). Ensure we exclude those. + let layout = bx.layout_of(constant_ty); + if let Abi::Vector { .. } = layout.abi { + let (llval, ty) = self.immediate_const_vector(bx, constant); + return OperandRef { + val: OperandValue::Immediate(llval), + layout: bx.layout_of(ty), + }; + } + } + self.eval_mir_constant_to_operand(bx, constant) + } } } } diff --git a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs index d91a118bc71a3..3c2c29ac7f7d2 100644 --- a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs +++ b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs @@ -1,3 +1,5 @@ +use std::assert_matches::assert_matches; + use arrayvec::ArrayVec; use rustc_middle::ty::adjustment::PointerCoercion; use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf, TyAndLayout}; @@ -220,7 +222,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { match operand.val { OperandValue::Ref(source_place_val) => { assert_eq!(source_place_val.llextra, None); - assert!(matches!(operand_kind, OperandValueKind::Ref)); + assert_matches!(operand_kind, OperandValueKind::Ref); Some(bx.load_operand(source_place_val.with_type(cast)).val) } OperandValue::ZeroSized => { diff --git a/compiler/rustc_codegen_ssa/src/traits/backend.rs b/compiler/rustc_codegen_ssa/src/traits/backend.rs index 81e96413a9f34..c5e2d55be833d 100644 --- a/compiler/rustc_codegen_ssa/src/traits/backend.rs +++ b/compiler/rustc_codegen_ssa/src/traits/backend.rs @@ -1,4 +1,5 @@ use std::any::Any; +use std::hash::Hash; use rustc_ast::expand::allocator::AllocatorKind; use rustc_data_structures::fx::FxIndexMap; @@ -30,7 +31,7 @@ pub trait BackendTypes { // FIXME(eddyb) find a common convention for all of the debuginfo-related // names (choose between `Dbg`, `Debug`, `DebugInfo`, `DI` etc.). - type DIScope: Copy; + type DIScope: Copy + Hash + PartialEq + Eq; type DILocation: Copy; type DIVariable: Copy; } diff --git a/compiler/rustc_codegen_ssa/src/traits/builder.rs b/compiler/rustc_codegen_ssa/src/traits/builder.rs index 2b802240e03b9..6cf84a012f061 100644 --- a/compiler/rustc_codegen_ssa/src/traits/builder.rs +++ b/compiler/rustc_codegen_ssa/src/traits/builder.rs @@ -1,3 +1,5 @@ +use std::assert_matches::assert_matches; + use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs; use rustc_middle::ty::layout::{HasParamEnv, TyAndLayout}; use rustc_middle::ty::{Instance, Ty}; @@ -254,10 +256,10 @@ pub trait BuilderMethods<'a, 'tcx>: } else { (in_ty, dest_ty) }; - assert!(matches!( + assert_matches!( self.cx().type_kind(float_ty), TypeKind::Half | TypeKind::Float | TypeKind::Double | TypeKind::FP128 - )); + ); assert_eq!(self.cx().type_kind(int_ty), TypeKind::Integer); if let Some(false) = self.cx().sess().opts.unstable_opts.saturating_float_casts { diff --git a/compiler/rustc_codegen_ssa/src/traits/consts.rs b/compiler/rustc_codegen_ssa/src/traits/consts.rs index d93b3e06ca6f1..c15f1fa8e564f 100644 --- a/compiler/rustc_codegen_ssa/src/traits/consts.rs +++ b/compiler/rustc_codegen_ssa/src/traits/consts.rs @@ -30,6 +30,7 @@ pub trait ConstMethods<'tcx>: BackendTypes { fn const_str(&self, s: &str) -> (Self::Value, Self::Value); fn const_struct(&self, elts: &[Self::Value], packed: bool) -> Self::Value; + fn const_vector(&self, elts: &[Self::Value]) -> Self::Value; fn const_to_opt_uint(&self, v: Self::Value) -> Option; fn const_to_opt_u128(&self, v: Self::Value, sign_ext: bool) -> Option; diff --git a/compiler/rustc_const_eval/messages.ftl b/compiler/rustc_const_eval/messages.ftl index c64c73b232340..1442f1832b9b5 100644 --- a/compiler/rustc_const_eval/messages.ftl +++ b/compiler/rustc_const_eval/messages.ftl @@ -41,6 +41,8 @@ const_eval_const_context = {$kind -> *[other] {""} } +const_eval_const_stable = const-stable functions can only call other const-stable functions + const_eval_copy_nonoverlapping_overlapping = `copy_nonoverlapping` called on overlapping ranges @@ -201,6 +203,9 @@ const_eval_invalid_vtable_pointer = const_eval_invalid_vtable_trait = using vtable for trait `{$vtable_trait}` but trait `{$expected_trait}` was expected +const_eval_lazy_lock = + consider wrapping this expression in `std::sync::LazyLock::new(|| ...)` + const_eval_live_drop = destructor of `{$dropped_ty}` cannot be evaluated at compile-time .label = the destructor for this type cannot be evaluated in {const_eval_const_context}s diff --git a/compiler/rustc_const_eval/src/check_consts/check.rs b/compiler/rustc_const_eval/src/check_consts/check.rs index 3ded81b90ffc1..32a9247bcc70a 100644 --- a/compiler/rustc_const_eval/src/check_consts/check.rs +++ b/compiler/rustc_const_eval/src/check_consts/check.rs @@ -1,5 +1,6 @@ //! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations. +use std::assert_matches::assert_matches; use std::mem; use std::ops::Deref; @@ -170,7 +171,7 @@ struct LocalReturnTyVisitor<'ck, 'mir, 'tcx> { impl<'ck, 'mir, 'tcx> TypeVisitor> for LocalReturnTyVisitor<'ck, 'mir, 'tcx> { fn visit_ty(&mut self, t: Ty<'tcx>) { match t.kind() { - ty::FnPtr(_) => {} + ty::FnPtr(..) => {} ty::Ref(_, _, hir::Mutability::Mut) => { self.checker.check_op(ops::mut_ref::MutRef(self.kind)); t.super_visit_with(self) @@ -590,7 +591,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> { if is_int_bool_or_char(lhs_ty) && is_int_bool_or_char(rhs_ty) { // Int, bool, and char operations are fine. } else if lhs_ty.is_fn_ptr() || lhs_ty.is_unsafe_ptr() { - assert!(matches!( + assert_matches!( op, BinOp::Eq | BinOp::Ne @@ -599,7 +600,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> { | BinOp::Ge | BinOp::Gt | BinOp::Offset - )); + ); self.check_op(ops::RawPtrComparison); } else if lhs_ty.is_floating_point() || rhs_ty.is_floating_point() { @@ -725,7 +726,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> { let (mut callee, mut fn_args) = match *fn_ty.kind() { ty::FnDef(def_id, fn_args) => (def_id, fn_args), - ty::FnPtr(_) => { + ty::FnPtr(..) => { self.check_op(ops::FnCallIndirect); return; } diff --git a/compiler/rustc_const_eval/src/check_consts/ops.rs b/compiler/rustc_const_eval/src/check_consts/ops.rs index f47a2ec8f7598..c6361710ac9ca 100644 --- a/compiler/rustc_const_eval/src/check_consts/ops.rs +++ b/compiler/rustc_const_eval/src/check_consts/ops.rs @@ -23,7 +23,7 @@ use rustc_trait_selection::traits::SelectionContext; use tracing::debug; use super::ConstCx; -use crate::errors; +use crate::{errors, fluent_generated}; #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum Status { @@ -310,7 +310,7 @@ impl<'tcx> NonConstOp<'tcx> for FnCallNonConst<'tcx> { } if let ConstContext::Static(_) = ccx.const_kind() { - err.note("consider wrapping this expression in `std::sync::LazyLock::new(|| ...)`"); + err.note(fluent_generated::const_eval_lazy_lock); } err @@ -334,7 +334,7 @@ impl<'tcx> NonConstOp<'tcx> for FnCallUnstable { // FIXME: make this translatable #[allow(rustc::untranslatable_diagnostic)] if ccx.is_const_stable_const_fn() { - err.help("const-stable functions can only call other const-stable functions"); + err.help(fluent_generated::const_eval_const_stable); } else if ccx.tcx.sess.is_nightly_build() { if let Some(feature) = feature { err.help(format!("add `#![feature({feature})]` to the crate attributes to enable")); @@ -605,8 +605,6 @@ impl<'tcx> NonConstOp<'tcx> for StaticAccess { span, format!("referencing statics in {}s is unstable", ccx.const_kind(),), ); - // FIXME: make this translatable - #[allow(rustc::untranslatable_diagnostic)] err .note("`static` and `const` variables can refer to other `const` variables. A `const` variable, however, cannot refer to a `static` variable.") .help("to fix this, the value can be extracted to a `const` and then used."); diff --git a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs index ff27e4000163a..96b3ec6f18728 100644 --- a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs +++ b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs @@ -226,7 +226,7 @@ pub(super) fn op_to_const<'tcx>( let pointee_ty = imm.layout.ty.builtin_deref(false).unwrap(); // `false` = no raw ptrs debug_assert!( matches!( - ecx.tcx.struct_tail_without_normalization(pointee_ty).kind(), + ecx.tcx.struct_tail_for_codegen(pointee_ty, ecx.param_env).kind(), ty::Str | ty::Slice(..), ), "`ConstValue::Slice` is for slice-tailed types only, but got {}", diff --git a/compiler/rustc_const_eval/src/const_eval/valtrees.rs b/compiler/rustc_const_eval/src/const_eval/valtrees.rs index 8227c04594883..c96296eddb844 100644 --- a/compiler/rustc_const_eval/src/const_eval/valtrees.rs +++ b/compiler/rustc_const_eval/src/const_eval/valtrees.rs @@ -132,7 +132,7 @@ fn const_to_valtree_inner<'tcx>( // Technically we could allow function pointers (represented as `ty::Instance`), but this is not guaranteed to // agree with runtime equality tests. - ty::FnPtr(_) => Err(ValTreeCreationError::NonSupportedType(ty)), + ty::FnPtr(..) => Err(ValTreeCreationError::NonSupportedType(ty)), ty::Ref(_, _, _) => { let derefd_place = ecx.deref_pointer(place)?; @@ -195,7 +195,7 @@ fn reconstruct_place_meta<'tcx>( let mut last_valtree = valtree; // Traverse the type, and update `last_valtree` as we go. - let tail = tcx.struct_tail_with_normalize( + let tail = tcx.struct_tail_raw( layout.ty, |ty| ty, || { @@ -353,7 +353,7 @@ pub fn valtree_to_const_value<'tcx>( | ty::CoroutineClosure(..) | ty::Coroutine(..) | ty::CoroutineWitness(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Str | ty::Slice(_) | ty::Dynamic(..) => bug!("no ValTree should have been created for type {:?}", ty.kind()), diff --git a/compiler/rustc_const_eval/src/interpret/call.rs b/compiler/rustc_const_eval/src/interpret/call.rs index fdbdfc7e1b8b3..917a2fa7c6dd4 100644 --- a/compiler/rustc_const_eval/src/interpret/call.rs +++ b/compiler/rustc_const_eval/src/interpret/call.rs @@ -1,5 +1,6 @@ //! Manages calling a concrete function (with known MIR body) with argument passing, //! and returning the return value to the caller. +use std::assert_matches::assert_matches; use std::borrow::Cow; use either::{Left, Right}; @@ -557,7 +558,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { unwind, )? { assert!(!self.tcx.intrinsic(fallback.def_id()).unwrap().must_be_overridden); - assert!(matches!(fallback.def, ty::InstanceKind::Item(_))); + assert_matches!(fallback.def, ty::InstanceKind::Item(_)); return self.init_fn_call( FnVal::Instance(fallback), (caller_abi, caller_fn_abi), diff --git a/compiler/rustc_const_eval/src/interpret/cast.rs b/compiler/rustc_const_eval/src/interpret/cast.rs index c3d7f2234ed95..4901e4b2a4158 100644 --- a/compiler/rustc_const_eval/src/interpret/cast.rs +++ b/compiler/rustc_const_eval/src/interpret/cast.rs @@ -97,7 +97,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer) => { let src = self.read_immediate(src)?; match cast_ty.kind() { - ty::FnPtr(_) => { + ty::FnPtr(..) => { // No change to value self.write_immediate(*src, dest)?; } @@ -230,7 +230,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { src: &ImmTy<'tcx, M::Provenance>, cast_to: TyAndLayout<'tcx>, ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> { - assert_matches!(src.layout.ty.kind(), ty::RawPtr(_, _) | ty::FnPtr(_)); + assert_matches!(src.layout.ty.kind(), ty::RawPtr(_, _) | ty::FnPtr(..)); assert!(cast_to.ty.is_integral()); let scalar = src.to_scalar(); @@ -400,6 +400,12 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { } (ty::Dynamic(data_a, _, ty::Dyn), ty::Dynamic(data_b, _, ty::Dyn)) => { let val = self.read_immediate(src)?; + // MIR building generates odd NOP casts, prevent them from causing unexpected trouble. + // See . + // FIXME: ideally we wouldn't have to do this. + if data_a == data_b { + return self.write_immediate(*val, dest); + } // Take apart the old pointer, and find the dynamic type. let (old_data, old_vptr) = val.to_scalar_pair(); let old_data = old_data.to_pointer(self)?; diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics.rs b/compiler/rustc_const_eval/src/interpret/intrinsics.rs index 9210ec4e16fd0..aef39b9af2f1f 100644 --- a/compiler/rustc_const_eval/src/interpret/intrinsics.rs +++ b/compiler/rustc_const_eval/src/interpret/intrinsics.rs @@ -2,6 +2,8 @@ //! looking at their MIR. Intrinsics/functions supported here are shared by CTFE //! and miri. +use std::assert_matches::assert_matches; + use rustc_hir::def_id::DefId; use rustc_middle::mir::{self, BinOp, ConstValue, NonDivergingIntrinsic}; use rustc_middle::ty::layout::{LayoutOf as _, TyAndLayout, ValidityRequirement}; @@ -79,7 +81,7 @@ pub(crate) fn eval_nullary_intrinsic<'tcx>( | ty::RawPtr(_, _) | ty::Ref(_, _, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::Closure(_, _) | ty::CoroutineClosure(_, _) @@ -510,7 +512,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { dest: &MPlaceTy<'tcx, M::Provenance>, ) -> InterpResult<'tcx> { assert_eq!(a.layout.ty, b.layout.ty); - assert!(matches!(a.layout.ty.kind(), ty::Int(..) | ty::Uint(..))); + assert_matches!(a.layout.ty.kind(), ty::Int(..) | ty::Uint(..)); // Performs an exact division, resulting in undefined behavior where // `x % y != 0` or `y == 0` or `x == T::MIN && y == -1`. @@ -536,8 +538,8 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { r: &ImmTy<'tcx, M::Provenance>, ) -> InterpResult<'tcx, Scalar> { assert_eq!(l.layout.ty, r.layout.ty); - assert!(matches!(l.layout.ty.kind(), ty::Int(..) | ty::Uint(..))); - assert!(matches!(mir_op, BinOp::Add | BinOp::Sub)); + assert_matches!(l.layout.ty.kind(), ty::Int(..) | ty::Uint(..)); + assert_matches!(mir_op, BinOp::Add | BinOp::Sub); let (val, overflowed) = self.binary_op(mir_op.wrapping_to_overflowing().unwrap(), l, r)?.to_scalar_pair(); diff --git a/compiler/rustc_const_eval/src/interpret/machine.rs b/compiler/rustc_const_eval/src/interpret/machine.rs index 7af4e0c285b0b..761ab81e22842 100644 --- a/compiler/rustc_const_eval/src/interpret/machine.rs +++ b/compiler/rustc_const_eval/src/interpret/machine.rs @@ -19,7 +19,7 @@ use rustc_target::spec::abi::Abi as CallAbi; use super::{ throw_unsup, throw_unsup_format, AllocBytes, AllocId, AllocKind, AllocRange, Allocation, ConstAllocation, CtfeProvenance, FnArg, Frame, ImmTy, InterpCx, InterpResult, MPlaceTy, - MemoryKind, Misalignment, OpTy, PlaceTy, Pointer, Provenance, + MemoryKind, Misalignment, OpTy, PlaceTy, Pointer, Provenance, CTFE_ALLOC_SALT, }; /// Data returned by [`Machine::after_stack_pop`], and consumed by @@ -575,6 +575,14 @@ pub trait Machine<'tcx>: Sized { { eval(ecx, val, span, layout) } + + /// Returns the salt to be used for a deduplicated global alloation. + /// If the allocation is for a function, the instance is provided as well + /// (this lets Miri ensure unique addresses for some functions). + fn get_global_alloc_salt( + ecx: &InterpCx<'tcx, Self>, + instance: Option>, + ) -> usize; } /// A lot of the flexibility above is just needed for `Miri`, but all "compile-time" machines @@ -677,4 +685,12 @@ pub macro compile_time_machine(<$tcx: lifetime>) { let (prov, offset) = ptr.into_parts(); Some((prov.alloc_id(), offset, prov.immutable())) } + + #[inline(always)] + fn get_global_alloc_salt( + _ecx: &InterpCx<$tcx, Self>, + _instance: Option>, + ) -> usize { + CTFE_ALLOC_SALT + } } diff --git a/compiler/rustc_const_eval/src/interpret/memory.rs b/compiler/rustc_const_eval/src/interpret/memory.rs index 2e5d0ae773654..910aec9b8e1de 100644 --- a/compiler/rustc_const_eval/src/interpret/memory.rs +++ b/compiler/rustc_const_eval/src/interpret/memory.rs @@ -195,7 +195,10 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { pub fn fn_ptr(&mut self, fn_val: FnVal<'tcx, M::ExtraFnVal>) -> Pointer { let id = match fn_val { - FnVal::Instance(instance) => self.tcx.reserve_and_set_fn_alloc(instance), + FnVal::Instance(instance) => { + let salt = M::get_global_alloc_salt(self, Some(instance)); + self.tcx.reserve_and_set_fn_alloc(instance, salt) + } FnVal::Other(extra) => { // FIXME(RalfJung): Should we have a cache here? let id = self.tcx.reserve_alloc_id(); diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs index 4ced009ab39b6..ad87d6953d3f2 100644 --- a/compiler/rustc_const_eval/src/interpret/operand.rs +++ b/compiler/rustc_const_eval/src/interpret/operand.rs @@ -342,7 +342,7 @@ impl<'tcx, Prov: Provenance> ImmTy<'tcx, Prov> { } // extract fields from types with `ScalarPair` ABI (Immediate::ScalarPair(a_val, b_val), Abi::ScalarPair(a, b)) => { - assert!(matches!(layout.abi, Abi::Scalar(..))); + assert_matches!(layout.abi, Abi::Scalar(..)); Immediate::from(if offset.bytes() == 0 { debug_assert_eq!(layout.size, a.size(cx)); a_val diff --git a/compiler/rustc_const_eval/src/interpret/place.rs b/compiler/rustc_const_eval/src/interpret/place.rs index 470a62026b943..2afdd02c88035 100644 --- a/compiler/rustc_const_eval/src/interpret/place.rs +++ b/compiler/rustc_const_eval/src/interpret/place.rs @@ -1008,7 +1008,8 @@ where // Use cache for immutable strings. let ptr = if mutbl.is_not() { // Use dedup'd allocation function. - let id = tcx.allocate_bytes_dedup(str.as_bytes()); + let salt = M::get_global_alloc_salt(self, None); + let id = tcx.allocate_bytes_dedup(str.as_bytes(), salt); // Turn untagged "global" pointers (obtained via `tcx`) into the machine pointer to the allocation. M::adjust_alloc_root_pointer(&self, Pointer::from(id), Some(kind))? diff --git a/compiler/rustc_const_eval/src/interpret/stack.rs b/compiler/rustc_const_eval/src/interpret/stack.rs index 50dbced6a2a6a..0f6bf5c03364c 100644 --- a/compiler/rustc_const_eval/src/interpret/stack.rs +++ b/compiler/rustc_const_eval/src/interpret/stack.rs @@ -483,7 +483,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { | ty::Bool | ty::Float(_) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::RawPtr(..) | ty::Char | ty::Ref(..) diff --git a/compiler/rustc_const_eval/src/interpret/step.rs b/compiler/rustc_const_eval/src/interpret/step.rs index 2527eca344620..aaee6f6d247f8 100644 --- a/compiler/rustc_const_eval/src/interpret/step.rs +++ b/compiler/rustc_const_eval/src/interpret/step.rs @@ -424,7 +424,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { self.tcx.mk_type_list_from_iter(extra_args.iter().map(|arg| arg.layout().ty)); let (callee, fn_abi, with_caller_location) = match *func.layout.ty.kind() { - ty::FnPtr(_sig) => { + ty::FnPtr(..) => { let fn_ptr = self.read_pointer(&func)?; let fn_val = self.get_ptr_fn(fn_ptr)?; (fn_val, self.fn_abi_of_fn_ptr(fn_sig_binder, extra_args)?, false) diff --git a/compiler/rustc_const_eval/src/interpret/traits.rs b/compiler/rustc_const_eval/src/interpret/traits.rs index fb50661b8263d..cd4faf06655fe 100644 --- a/compiler/rustc_const_eval/src/interpret/traits.rs +++ b/compiler/rustc_const_eval/src/interpret/traits.rs @@ -28,7 +28,9 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { ensure_monomorphic_enough(*self.tcx, ty)?; ensure_monomorphic_enough(*self.tcx, poly_trait_ref)?; - let vtable_symbolic_allocation = self.tcx.reserve_and_set_vtable_alloc(ty, poly_trait_ref); + let salt = M::get_global_alloc_salt(self, None); + let vtable_symbolic_allocation = + self.tcx.reserve_and_set_vtable_alloc(ty, poly_trait_ref, salt); let vtable_ptr = self.global_root_pointer(Pointer::from(vtable_symbolic_allocation))?; Ok(vtable_ptr.into()) } diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs index fadc4ee6c8a0c..26b7251f6dbc5 100644 --- a/compiler/rustc_const_eval/src/interpret/validity.rs +++ b/compiler/rustc_const_eval/src/interpret/validity.rs @@ -616,7 +616,7 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> { self.check_safe_pointer(value, PointerKind::Ref(*mutbl))?; Ok(true) } - ty::FnPtr(_sig) => { + ty::FnPtr(..) => { let value = self.read_scalar(value, ExpectedKind::FnPtr)?; // If we check references recursively, also check that this points to a function. diff --git a/compiler/rustc_const_eval/src/lib.rs b/compiler/rustc_const_eval/src/lib.rs index 780404212c341..d825a47bfdf5d 100644 --- a/compiler/rustc_const_eval/src/lib.rs +++ b/compiler/rustc_const_eval/src/lib.rs @@ -6,7 +6,6 @@ #![feature(box_patterns)] #![feature(decl_macro)] #![feature(if_let_guard)] -#![feature(is_none_or)] #![feature(let_chains)] #![feature(never_type)] #![feature(rustdoc_internals)] diff --git a/compiler/rustc_const_eval/src/util/type_name.rs b/compiler/rustc_const_eval/src/util/type_name.rs index 3aa3b3b74e053..36c7bed5c1194 100644 --- a/compiler/rustc_const_eval/src/util/type_name.rs +++ b/compiler/rustc_const_eval/src/util/type_name.rs @@ -35,7 +35,7 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { | ty::Slice(_) | ty::RawPtr(_, _) | ty::Ref(_, _, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Never | ty::Tuple(_) | ty::Dynamic(_, _, _) => self.pretty_print_type(ty), diff --git a/compiler/rustc_data_structures/Cargo.toml b/compiler/rustc_data_structures/Cargo.toml index 3794a6e043c6d..69cbf8c416105 100644 --- a/compiler/rustc_data_structures/Cargo.toml +++ b/compiler/rustc_data_structures/Cargo.toml @@ -10,7 +10,7 @@ bitflags = "2.4.1" either = "1.0" elsa = "=1.7.1" ena = "0.14.3" -indexmap = { version = "2.0.0" } +indexmap = { version = "2.4.0" } jobserver_crate = { version = "0.1.28", package = "jobserver" } measureme = "11" rustc-hash = "1.1.0" diff --git a/compiler/rustc_data_structures/src/graph/scc/mod.rs b/compiler/rustc_data_structures/src/graph/scc/mod.rs index 96fc8ae3887f5..2a457ffb70b08 100644 --- a/compiler/rustc_data_structures/src/graph/scc/mod.rs +++ b/compiler/rustc_data_structures/src/graph/scc/mod.rs @@ -8,6 +8,7 @@ //! Typical examples would include: minimum element in SCC, maximum element //! reachable from it, etc. +use std::assert_matches::debug_assert_matches; use std::fmt::Debug; use std::ops::Range; @@ -569,7 +570,7 @@ where // This None marks that we still have the initialize this node's frame. debug!(?depth, ?node); - debug_assert!(matches!(self.node_states[node], NodeState::NotVisited)); + debug_assert_matches!(self.node_states[node], NodeState::NotVisited); // Push `node` onto the stack. self.node_states[node] = NodeState::BeingVisited { diff --git a/compiler/rustc_data_structures/src/lib.rs b/compiler/rustc_data_structures/src/lib.rs index 403136e78f4eb..a35f5b1f17db4 100644 --- a/compiler/rustc_data_structures/src/lib.rs +++ b/compiler/rustc_data_structures/src/lib.rs @@ -18,6 +18,7 @@ #![feature(array_windows)] #![feature(ascii_char)] #![feature(ascii_char_variants)] +#![feature(assert_matches)] #![feature(auto_traits)] #![feature(cfg_match)] #![feature(core_intrinsics)] diff --git a/compiler/rustc_data_structures/src/transitive_relation.rs b/compiler/rustc_data_structures/src/transitive_relation.rs index 26b00e0af3a8a..e81ebb9a4be22 100644 --- a/compiler/rustc_data_structures/src/transitive_relation.rs +++ b/compiler/rustc_data_structures/src/transitive_relation.rs @@ -203,9 +203,9 @@ impl TransitiveRelation { /// exists). See `postdom_upper_bound` for details. pub fn mutual_immediate_postdominator(&self, mut mubs: Vec) -> Option { loop { - match mubs.len() { - 0 => return None, - 1 => return Some(mubs[0]), + match mubs[..] { + [] => return None, + [mub] => return Some(mub), _ => { let m = mubs.pop().unwrap(); let n = mubs.pop().unwrap(); diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs index b014fc2dc585e..2b7dc040f6411 100644 --- a/compiler/rustc_driver_impl/src/lib.rs +++ b/compiler/rustc_driver_impl/src/lib.rs @@ -338,12 +338,11 @@ fn run_compiler( config.input = input; true // has input: normal compilation } - Ok(None) => match matches.free.len() { - 0 => false, // no input: we will exit early - 1 => panic!("make_input should have provided valid inputs"), - _ => default_early_dcx.early_fatal(format!( - "multiple input filenames provided (first two filenames are `{}` and `{}`)", - matches.free[0], matches.free[1], + Ok(None) => match matches.free.as_slice() { + [] => false, // no input: we will exit early + [_] => panic!("make_input should have provided valid inputs"), + [fst, snd, ..] => default_early_dcx.early_fatal(format!( + "multiple input filenames provided (first two filenames are `{fst}` and `{snd}`)" )), }, }; @@ -491,34 +490,30 @@ fn make_input( early_dcx: &EarlyDiagCtxt, free_matches: &[String], ) -> Result, ErrorGuaranteed> { - if free_matches.len() == 1 { - let ifile = &free_matches[0]; - if ifile == "-" { - let mut src = String::new(); - if io::stdin().read_to_string(&mut src).is_err() { - // Immediately stop compilation if there was an issue reading - // the input (for example if the input stream is not UTF-8). - let reported = early_dcx - .early_err("couldn't read from stdin, as it did not contain valid UTF-8"); - return Err(reported); - } - if let Ok(path) = env::var("UNSTABLE_RUSTDOC_TEST_PATH") { - let line = env::var("UNSTABLE_RUSTDOC_TEST_LINE").expect( - "when UNSTABLE_RUSTDOC_TEST_PATH is set \ + let [ifile] = free_matches else { return Ok(None) }; + if ifile == "-" { + let mut src = String::new(); + if io::stdin().read_to_string(&mut src).is_err() { + // Immediately stop compilation if there was an issue reading + // the input (for example if the input stream is not UTF-8). + let reported = + early_dcx.early_err("couldn't read from stdin, as it did not contain valid UTF-8"); + return Err(reported); + } + if let Ok(path) = env::var("UNSTABLE_RUSTDOC_TEST_PATH") { + let line = env::var("UNSTABLE_RUSTDOC_TEST_LINE").expect( + "when UNSTABLE_RUSTDOC_TEST_PATH is set \ UNSTABLE_RUSTDOC_TEST_LINE also needs to be set", - ); - let line = isize::from_str_radix(&line, 10) - .expect("UNSTABLE_RUSTDOC_TEST_LINE needs to be an number"); - let file_name = FileName::doc_test_source_code(PathBuf::from(path), line); - Ok(Some(Input::Str { name: file_name, input: src })) - } else { - Ok(Some(Input::Str { name: FileName::anon_source_code(&src), input: src })) - } + ); + let line = isize::from_str_radix(&line, 10) + .expect("UNSTABLE_RUSTDOC_TEST_LINE needs to be an number"); + let file_name = FileName::doc_test_source_code(PathBuf::from(path), line); + Ok(Some(Input::Str { name: file_name, input: src })) } else { - Ok(Some(Input::File(PathBuf::from(ifile)))) + Ok(Some(Input::Str { name: FileName::anon_source_code(&src), input: src })) } } else { - Ok(None) + Ok(Some(Input::File(PathBuf::from(ifile)))) } } diff --git a/compiler/rustc_errors/src/emitter.rs b/compiler/rustc_errors/src/emitter.rs index b8813ea4125fe..9ce5d77ef6c07 100644 --- a/compiler/rustc_errors/src/emitter.rs +++ b/compiler/rustc_errors/src/emitter.rs @@ -226,17 +226,17 @@ pub trait Emitter: Translate { ) { if let Some((sugg, rest)) = suggestions.split_first() { let msg = self.translate_message(&sugg.msg, fluent_args).map_err(Report::new).unwrap(); - if rest.is_empty() && + if rest.is_empty() // ^ if there is only one suggestion // don't display multi-suggestions as labels - sugg.substitutions.len() == 1 && + && let [substitution] = sugg.substitutions.as_slice() // don't display multipart suggestions as labels - sugg.substitutions[0].parts.len() == 1 && + && let [part] = substitution.parts.as_slice() // don't display long messages as labels - msg.split_whitespace().count() < 10 && + && msg.split_whitespace().count() < 10 // don't display multiline suggestions as labels - !sugg.substitutions[0].parts[0].snippet.contains('\n') && - ![ + && !part.snippet.contains('\n') + && ![ // when this style is set we want the suggestion to be a message, not inline SuggestionStyle::HideCodeAlways, // trivial suggestion for tooling's sake, never shown @@ -245,8 +245,8 @@ pub trait Emitter: Translate { SuggestionStyle::ShowAlways, ].contains(&sugg.style) { - let substitution = &sugg.substitutions[0].parts[0].snippet.trim(); - let msg = if substitution.is_empty() || sugg.style.hide_inline() { + let snippet = part.snippet.trim(); + let msg = if snippet.is_empty() || sugg.style.hide_inline() { // This substitution is only removal OR we explicitly don't want to show the // code inline (`hide_inline`). Therefore, we don't show the substitution. format!("help: {msg}") @@ -255,19 +255,18 @@ pub trait Emitter: Translate { format!( "help: {}{}: `{}`", msg, - if self.source_map().is_some_and(|sm| is_case_difference( - sm, - substitution, - sugg.substitutions[0].parts[0].span, - )) { + if self + .source_map() + .is_some_and(|sm| is_case_difference(sm, snippet, part.span,)) + { " (notice the capitalization)" } else { "" }, - substitution, + snippet, ) }; - primary_span.push_span_label(sugg.substitutions[0].parts[0].span, msg); + primary_span.push_span_label(part.span, msg); // We return only the modified primary_span suggestions.clear(); diff --git a/compiler/rustc_errors/src/lib.rs b/compiler/rustc_errors/src/lib.rs index ceebcd46a6f7f..fd203c3831897 100644 --- a/compiler/rustc_errors/src/lib.rs +++ b/compiler/rustc_errors/src/lib.rs @@ -10,6 +10,7 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] #![feature(array_windows)] +#![feature(assert_matches)] #![feature(associated_type_defaults)] #![feature(box_into_inner)] #![feature(box_patterns)] @@ -28,6 +29,7 @@ extern crate self as rustc_errors; +use std::assert_matches::assert_matches; use std::backtrace::{Backtrace, BacktraceStatus}; use std::borrow::Cow; use std::cell::Cell; @@ -1490,7 +1492,7 @@ impl DiagCtxtInner { // Future breakages aren't emitted if they're `Level::Allow` or // `Level::Expect`, but they still need to be constructed and // stashed below, so they'll trigger the must_produce_diag check. - assert!(matches!(diagnostic.level, Error | Warning | Allow | Expect(_))); + assert_matches!(diagnostic.level, Error | Warning | Allow | Expect(_)); self.future_breakage_diagnostics.push(diagnostic.clone()); } @@ -1835,23 +1837,23 @@ impl DelayedDiagInner { } } -/// Level is_error EmissionGuarantee Top-level Sub Used in lints? -/// ----- -------- ----------------- --------- --- -------------- -/// Bug yes BugAbort yes - - -/// Fatal yes FatalAbort/FatalError(*) yes - - -/// Error yes ErrorGuaranteed yes - yes -/// DelayedBug yes ErrorGuaranteed yes - - -/// ForceWarning - () yes - lint-only -/// Warning - () yes yes yes -/// Note - () rare yes - -/// OnceNote - () - yes lint-only -/// Help - () rare yes - -/// OnceHelp - () - yes lint-only -/// FailureNote - () rare - - -/// Allow - () yes - lint-only -/// Expect - () yes - lint-only +/// | Level | is_error | EmissionGuarantee | Top-level | Sub | Used in lints? +/// | ----- | -------- | ----------------- | --------- | --- | -------------- +/// | Bug | yes | BugAbort | yes | - | - +/// | Fatal | yes | FatalAbort/FatalError[^star] | yes | - | - +/// | Error | yes | ErrorGuaranteed | yes | - | yes +/// | DelayedBug | yes | ErrorGuaranteed | yes | - | - +/// | ForceWarning | - | () | yes | - | lint-only +/// | Warning | - | () | yes | yes | yes +/// | Note | - | () | rare | yes | - +/// | OnceNote | - | () | - | yes | lint-only +/// | Help | - | () | rare | yes | - +/// | OnceHelp | - | () | - | yes | lint-only +/// | FailureNote | - | () | rare | - | - +/// | Allow | - | () | yes | - | lint-only +/// | Expect | - | () | yes | - | lint-only /// -/// (*) `FatalAbort` normally, `FatalError` in the non-aborting "almost fatal" case that is +/// [^star]: `FatalAbort` normally, `FatalError` in the non-aborting "almost fatal" case that is /// occasionally used. /// #[derive(Copy, PartialEq, Eq, Clone, Hash, Debug, Encodable, Decodable)] @@ -2024,11 +2026,11 @@ pub fn a_or_an(s: &str) -> &'static str { /// /// Take a list ["a", "b", "c"] and output a display friendly version "a, b and c" pub fn display_list_with_comma_and(v: &[T]) -> String { - match v.len() { - 0 => "".to_string(), - 1 => v[0].to_string(), - 2 => format!("{} and {}", v[0], v[1]), - _ => format!("{}, {}", v[0], display_list_with_comma_and(&v[1..])), + match v { + [] => "".to_string(), + [a] => a.to_string(), + [a, b] => format!("{a} and {b}"), + [a, v @ ..] => format!("{a}, {}", display_list_with_comma_and(v)), } } diff --git a/compiler/rustc_expand/messages.ftl b/compiler/rustc_expand/messages.ftl index 18d95a398fd4f..766d96e268f04 100644 --- a/compiler/rustc_expand/messages.ftl +++ b/compiler/rustc_expand/messages.ftl @@ -129,6 +129,9 @@ expand_module_multiple_candidates = expand_must_repeat_once = this must repeat at least once +expand_non_inline_modules_in_proc_macro_input_are_unstable = + non-inline modules in proc macro input are unstable + expand_not_a_meta_item = not a meta item diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs index c195d69258899..8f9104135cddd 100644 --- a/compiler/rustc_expand/src/base.rs +++ b/compiler/rustc_expand/src/base.rs @@ -1306,12 +1306,12 @@ pub fn parse_macro_name_and_helper_attrs( // that it's of the form `#[proc_macro_derive(Foo)]` or // `#[proc_macro_derive(Foo, attributes(A, ..))]` let list = attr.meta_item_list()?; - if list.len() != 1 && list.len() != 2 { + let ([trait_attr] | [trait_attr, _]) = list.as_slice() else { dcx.emit_err(errors::AttrNoArguments { span: attr.span }); return None; - } - let Some(trait_attr) = list[0].meta_item() else { - dcx.emit_err(errors::NotAMetaItem { span: list[0].span() }); + }; + let Some(trait_attr) = trait_attr.meta_item() else { + dcx.emit_err(errors::NotAMetaItem { span: trait_attr.span() }); return None; }; let trait_ident = match trait_attr.ident() { @@ -1398,8 +1398,6 @@ fn pretty_printing_compatibility_hack(item: &Item, sess: &Session) { }; if crate_matches { - // FIXME: make this translatable - #[allow(rustc::untranslatable_diagnostic)] sess.dcx().emit_fatal(errors::ProcMacroBackCompat { crate_name: "rental".to_string(), fixed_version: "0.5.6".to_string(), diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index f6bf9f5e89f2f..b0d3fecbb479b 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -265,12 +265,7 @@ impl<'a> StripUnconfigured<'a> { /// is in the original source file. Gives a compiler error if the syntax of /// the attribute is incorrect. pub(crate) fn expand_cfg_attr(&self, cfg_attr: &Attribute, recursive: bool) -> Vec { - validate_attr::check_attribute_safety( - self.features.unwrap_or(&Features::default()), - &self.sess.psess, - AttributeSafety::Normal, - &cfg_attr, - ); + validate_attr::check_attribute_safety(&self.sess.psess, AttributeSafety::Normal, &cfg_attr); let Some((cfg_predicate, expanded_attrs)) = rustc_parse::parse_cfg_attr(cfg_attr, &self.sess.psess) @@ -395,11 +390,7 @@ impl<'a> StripUnconfigured<'a> { } }; - validate_attr::deny_builtin_meta_unsafety( - self.features.unwrap_or(&Features::default()), - &self.sess.psess, - &meta_item, - ); + validate_attr::deny_builtin_meta_unsafety(&self.sess.psess, &meta_item); ( parse_cfg(&meta_item, self.sess).map_or(true, |meta_item| { diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index d8cb367e3face..cb6b13282a2ed 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -39,6 +39,7 @@ use crate::errors::{ RecursionLimitReached, RemoveExprNotSupported, RemoveNodeNotSupported, UnsupportedKeyValue, WrongFragmentKind, }; +use crate::fluent_generated; use crate::mbe::diagnostics::annotate_err_with_kind; use crate::module::{mod_dir_path, parse_external_mod, DirOwnership, ParsedExternalMod}; use crate::placeholders::{placeholder, PlaceholderExpander}; @@ -882,7 +883,6 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } impl<'ast, 'a> Visitor<'ast> for GateProcMacroInput<'a> { - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn visit_item(&mut self, item: &'ast ast::Item) { match &item.kind { ItemKind::Mod(_, mod_kind) @@ -892,7 +892,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { self.sess, sym::proc_macro_hygiene, item.span, - "non-inline modules in proc macro input are unstable", + fluent_generated::expand_non_inline_modules_in_proc_macro_input_are_unstable, ) .emit(); } @@ -1876,14 +1876,13 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { // Detect use of feature-gated or invalid attributes on macro invocations // since they will not be detected after macro expansion. - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn check_attributes(&self, attrs: &[ast::Attribute], call: &ast::MacCall) { let features = self.cx.ecfg.features; let mut attrs = attrs.iter().peekable(); let mut span: Option = None; while let Some(attr) = attrs.next() { rustc_ast_passes::feature_gate::check_attribute(attr, self.cx.sess, features); - validate_attr::check_attr(features, &self.cx.sess.psess, attr); + validate_attr::check_attr(&self.cx.sess.psess, attr); let current_span = if let Some(sp) = span { sp.to(attr.span) } else { attr.span }; span = Some(current_span); diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index 6f177107e7025..256713ef73000 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -1154,7 +1154,7 @@ fn check_matcher_core<'tt>( && matches!(kind, NonterminalKind::Pat(PatParam { inferred: true })) && matches!( next_token, - TokenTree::Token(token) if token.kind == BinOp(token::BinOpToken::Or) + TokenTree::Token(token) if *token == BinOp(token::BinOpToken::Or) ) { // It is suggestion to use pat_param, for example: $x:pat -> $x:pat_param. diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs index e5a1c6c789928..b2f7c8f5183a2 100644 --- a/compiler/rustc_expand/src/mbe/quoted.rs +++ b/compiler/rustc_expand/src/mbe/quoted.rs @@ -54,18 +54,24 @@ pub(super) fn parse( // For each token tree in `input`, parse the token into a `self::TokenTree`, consuming // additional trees if need be. - let mut trees = input.trees(); + let mut trees = input.trees().peekable(); while let Some(tree) = trees.next() { // Given the parsed tree, if there is a metavar and we are expecting matchers, actually // parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`). let tree = parse_tree(tree, &mut trees, parsing_patterns, sess, node_id, features, edition); match tree { TokenTree::MetaVar(start_sp, ident) if parsing_patterns => { - let span = match trees.next() { + // Not consuming the next token immediately, as it may not be a colon + let span = match trees.peek() { Some(&tokenstream::TokenTree::Token( Token { kind: token::Colon, span: colon_span }, _, )) => { + // Consume the colon first + trees.next(); + + // It's ok to consume the next tree no matter how, + // since if it's not a token then it will be an invalid declaration. match trees.next() { Some(tokenstream::TokenTree::Token(token, _)) => match token.ident() { Some((fragment, _)) => { @@ -125,12 +131,13 @@ pub(super) fn parse( } _ => token.span, }, - Some(tree) => tree.span(), - None => colon_span, + // Invalid, return a nice source location + _ => colon_span.with_lo(start_sp.lo()), } } - Some(tree) => tree.span(), - None => start_sp, + // Whether it's none or some other tree, it doesn't belong to + // the current meta variable, returning the original span. + _ => start_sp, }; result.push(TokenTree::MetaVarDecl(span, ident, None)); diff --git a/compiler/rustc_feature/src/accepted.rs b/compiler/rustc_feature/src/accepted.rs index 44286cfeeefaa..3d5ecbaae32a2 100644 --- a/compiler/rustc_feature/src/accepted.rs +++ b/compiler/rustc_feature/src/accepted.rs @@ -60,6 +60,8 @@ declare_features! ( (accepted, adx_target_feature, "1.61.0", Some(44839)), /// Allows explicit discriminants on non-unit enum variants. (accepted, arbitrary_enum_discriminant, "1.66.0", Some(60553)), + /// Allows using `const` operands in inline assembly. + (accepted, asm_const, "CURRENT_RUSTC_VERSION", Some(93332)), /// Allows using `sym` operands in inline assembly. (accepted, asm_sym, "1.66.0", Some(93333)), /// Allows the definition of associated constants in `trait` or `impl` blocks. @@ -390,6 +392,8 @@ declare_features! ( (accepted, universal_impl_trait, "1.26.0", Some(34511)), /// Allows arbitrary delimited token streams in non-macro attributes. (accepted, unrestricted_attribute_tokens, "1.34.0", Some(55208)), + /// Allows unsafe attributes. + (accepted, unsafe_attributes, "CURRENT_RUSTC_VERSION", Some(123757)), /// The `unsafe_op_in_unsafe_fn` lint (allowed by default): no longer treat an unsafe function as an unsafe block. (accepted, unsafe_block_in_unsafe_fn, "1.52.0", Some(71668)), /// Allows unsafe on extern declarations and safety qualifiers over internal items. diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index 72ea55d5999a2..d4c54b67f24d5 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -578,12 +578,6 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ EncodeCrossCrate::No, coroutines, experimental!(coroutines) ), - // `#[pointee]` attribute to designate the pointee type in SmartPointer derive-macro - gated!( - pointee, Normal, template!(Word), ErrorFollowing, - EncodeCrossCrate::No, derive_smart_pointer, experimental!(pointee) - ), - // RFC 3543 // `#[patchable_function_entry(prefix_nops = m, entry_nops = n)]` gated!( @@ -643,8 +637,8 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ through unstable paths" ), rustc_attr!( - rustc_deprecated_safe_2024, Normal, template!(Word), WarnFollowing, - EncodeCrossCrate::Yes, + rustc_deprecated_safe_2024, Normal, template!(List: r#"audit_that = "...""#), + ErrorFollowing, EncodeCrossCrate::Yes, "rustc_deprecated_safe_2024 is supposed to be used in libstd only", ), diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs index 47810bc9165ef..459df9ea1b859 100644 --- a/compiler/rustc_feature/src/unstable.rs +++ b/compiler/rustc_feature/src/unstable.rs @@ -348,8 +348,6 @@ declare_features! ( (unstable, alloc_error_handler, "1.29.0", Some(51540)), /// Allows trait methods with arbitrary self types. (unstable, arbitrary_self_types, "1.23.0", Some(44874)), - /// Allows using `const` operands in inline assembly. - (unstable, asm_const, "1.58.0", Some(93332)), /// Enables experimental inline assembly support for additional architectures. (unstable, asm_experimental_arch, "1.58.0", Some(93335)), /// Allows using `label` operands in inline assembly. @@ -624,8 +622,6 @@ declare_features! ( (unstable, type_changing_struct_update, "1.58.0", Some(86555)), /// Allows unnamed fields of struct and union type (incomplete, unnamed_fields, "1.74.0", Some(49804)), - /// Allows unsafe attributes. - (unstable, unsafe_attributes, "1.80.0", Some(123757)), /// Allows const generic parameters to be defined with types that /// are not `Sized`, e.g. `fn foo() {`. (incomplete, unsized_const_params, "CURRENT_RUSTC_VERSION", Some(95174)), diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs index 33e8432596be1..6599e70346117 100644 --- a/compiler/rustc_hir/src/hir.rs +++ b/compiler/rustc_hir/src/hir.rs @@ -3586,7 +3586,7 @@ impl ForeignItem<'_> { #[derive(Debug, Clone, Copy, HashStable_Generic)] pub enum ForeignItemKind<'hir> { /// A foreign function. - Fn(&'hir FnDecl<'hir>, &'hir [Ident], &'hir Generics<'hir>, Safety), + Fn(FnSig<'hir>, &'hir [Ident], &'hir Generics<'hir>), /// A foreign static item (`static ext: u8`). Static(&'hir Ty<'hir>, Mutability, Safety), /// A foreign type. @@ -3645,7 +3645,10 @@ impl<'hir> OwnerNode<'hir> { match self { OwnerNode::TraitItem(TraitItem { kind: TraitItemKind::Fn(fn_sig, _), .. }) | OwnerNode::ImplItem(ImplItem { kind: ImplItemKind::Fn(fn_sig, _), .. }) - | OwnerNode::Item(Item { kind: ItemKind::Fn(fn_sig, _, _), .. }) => Some(fn_sig), + | OwnerNode::Item(Item { kind: ItemKind::Fn(fn_sig, _, _), .. }) + | OwnerNode::ForeignItem(ForeignItem { + kind: ForeignItemKind::Fn(fn_sig, _, _), .. + }) => Some(fn_sig), _ => None, } } @@ -3654,11 +3657,10 @@ impl<'hir> OwnerNode<'hir> { match self { OwnerNode::TraitItem(TraitItem { kind: TraitItemKind::Fn(fn_sig, _), .. }) | OwnerNode::ImplItem(ImplItem { kind: ImplItemKind::Fn(fn_sig, _), .. }) - | OwnerNode::Item(Item { kind: ItemKind::Fn(fn_sig, _, _), .. }) => Some(fn_sig.decl), - OwnerNode::ForeignItem(ForeignItem { - kind: ForeignItemKind::Fn(fn_decl, _, _, _), - .. - }) => Some(fn_decl), + | OwnerNode::Item(Item { kind: ItemKind::Fn(fn_sig, _, _), .. }) + | OwnerNode::ForeignItem(ForeignItem { + kind: ForeignItemKind::Fn(fn_sig, _, _), .. + }) => Some(fn_sig.decl), _ => None, } } @@ -3846,11 +3848,13 @@ impl<'hir> Node<'hir> { match self { Node::TraitItem(TraitItem { kind: TraitItemKind::Fn(fn_sig, _), .. }) | Node::ImplItem(ImplItem { kind: ImplItemKind::Fn(fn_sig, _), .. }) - | Node::Item(Item { kind: ItemKind::Fn(fn_sig, _, _), .. }) => Some(fn_sig.decl), - Node::Expr(Expr { kind: ExprKind::Closure(Closure { fn_decl, .. }), .. }) - | Node::ForeignItem(ForeignItem { - kind: ForeignItemKind::Fn(fn_decl, _, _, _), .. - }) => Some(fn_decl), + | Node::Item(Item { kind: ItemKind::Fn(fn_sig, _, _), .. }) + | Node::ForeignItem(ForeignItem { kind: ForeignItemKind::Fn(fn_sig, _, _), .. }) => { + Some(fn_sig.decl) + } + Node::Expr(Expr { kind: ExprKind::Closure(Closure { fn_decl, .. }), .. }) => { + Some(fn_decl) + } _ => None, } } @@ -3874,7 +3878,10 @@ impl<'hir> Node<'hir> { match self { Node::TraitItem(TraitItem { kind: TraitItemKind::Fn(fn_sig, _), .. }) | Node::ImplItem(ImplItem { kind: ImplItemKind::Fn(fn_sig, _), .. }) - | Node::Item(Item { kind: ItemKind::Fn(fn_sig, _, _), .. }) => Some(fn_sig), + | Node::Item(Item { kind: ItemKind::Fn(fn_sig, _, _), .. }) + | Node::ForeignItem(ForeignItem { kind: ForeignItemKind::Fn(fn_sig, _, _), .. }) => { + Some(fn_sig) + } _ => None, } } @@ -3949,7 +3956,7 @@ impl<'hir> Node<'hir> { pub fn generics(self) -> Option<&'hir Generics<'hir>> { match self { Node::ForeignItem(ForeignItem { - kind: ForeignItemKind::Fn(_, _, generics, _), .. + kind: ForeignItemKind::Fn(_, _, generics), .. }) | Node::TraitItem(TraitItem { generics, .. }) | Node::ImplItem(ImplItem { generics, .. }) => Some(generics), @@ -4039,8 +4046,8 @@ mod size_asserts { static_assert_size!(Expr<'_>, 64); static_assert_size!(ExprKind<'_>, 48); static_assert_size!(FnDecl<'_>, 40); - static_assert_size!(ForeignItem<'_>, 72); - static_assert_size!(ForeignItemKind<'_>, 40); + static_assert_size!(ForeignItem<'_>, 88); + static_assert_size!(ForeignItemKind<'_>, 56); static_assert_size!(GenericArg<'_>, 16); static_assert_size!(GenericBound<'_>, 48); static_assert_size!(Generics<'_>, 56); diff --git a/compiler/rustc_hir/src/intravisit.rs b/compiler/rustc_hir/src/intravisit.rs index dd501f8417e64..a54596e3088dc 100644 --- a/compiler/rustc_hir/src/intravisit.rs +++ b/compiler/rustc_hir/src/intravisit.rs @@ -611,9 +611,9 @@ pub fn walk_foreign_item<'v, V: Visitor<'v>>( try_visit!(visitor.visit_ident(foreign_item.ident)); match foreign_item.kind { - ForeignItemKind::Fn(ref function_declaration, param_names, ref generics, _) => { + ForeignItemKind::Fn(ref sig, param_names, ref generics) => { try_visit!(visitor.visit_generics(generics)); - try_visit!(visitor.visit_fn_decl(function_declaration)); + try_visit!(visitor.visit_fn_decl(sig.decl)); walk_list!(visitor, visit_ident, param_names.iter().copied()); } ForeignItemKind::Static(ref typ, _, _) => { diff --git a/compiler/rustc_hir_analysis/src/check/check.rs b/compiler/rustc_hir_analysis/src/check/check.rs index 2e778fd375961..0135cdf1e9002 100644 --- a/compiler/rustc_hir_analysis/src/check/check.rs +++ b/compiler/rustc_hir_analysis/src/check/check.rs @@ -804,8 +804,8 @@ pub(crate) fn check_item_type(tcx: TyCtxt<'_>, def_id: LocalDefId) { let item = tcx.hir().foreign_item(item.id); match &item.kind { - hir::ForeignItemKind::Fn(fn_decl, _, _, _) => { - require_c_abi_if_c_variadic(tcx, fn_decl, abi, item.span); + hir::ForeignItemKind::Fn(sig, _, _) => { + require_c_abi_if_c_variadic(tcx, sig.decl, abi, item.span); } hir::ForeignItemKind::Static(..) => { check_static_inhabited(tcx, def_id); diff --git a/compiler/rustc_hir_analysis/src/check/intrinsic.rs b/compiler/rustc_hir_analysis/src/check/intrinsic.rs index 4b45ced30c579..c2b2f08132ed8 100644 --- a/compiler/rustc_hir_analysis/src/check/intrinsic.rs +++ b/compiler/rustc_hir_analysis/src/check/intrinsic.rs @@ -30,7 +30,7 @@ fn equate_intrinsic_type<'tcx>( let (generics, span) = match tcx.hir_node_by_def_id(def_id) { hir::Node::Item(hir::Item { kind: hir::ItemKind::Fn(_, generics, _), .. }) | hir::Node::ForeignItem(hir::ForeignItem { - kind: hir::ForeignItemKind::Fn(.., generics, _), + kind: hir::ForeignItemKind::Fn(_, _, generics), .. }) => (tcx.generics_of(def_id), generics.span), _ => { diff --git a/compiler/rustc_hir_analysis/src/check/intrinsicck.rs b/compiler/rustc_hir_analysis/src/check/intrinsicck.rs index 847a1e6470679..3d5a22fce85af 100644 --- a/compiler/rustc_hir_analysis/src/check/intrinsicck.rs +++ b/compiler/rustc_hir_analysis/src/check/intrinsicck.rs @@ -1,3 +1,5 @@ +use std::assert_matches::debug_assert_matches; + use rustc_ast::InlineAsmTemplatePiece; use rustc_data_structures::fx::FxIndexSet; use rustc_hir::{self as hir, LangItem}; @@ -66,7 +68,7 @@ impl<'a, 'tcx> InlineAsmCtxt<'a, 'tcx> { ty::Float(FloatTy::F32) => Some(InlineAsmType::F32), ty::Float(FloatTy::F64) => Some(InlineAsmType::F64), ty::Float(FloatTy::F128) => Some(InlineAsmType::F128), - ty::FnPtr(_) => Some(asm_ty_isize), + ty::FnPtr(..) => Some(asm_ty_isize), ty::RawPtr(ty, _) if self.is_thin_ptr_ty(ty) => Some(asm_ty_isize), ty::Adt(adt, args) if adt.repr().simd() => { let fields = &adt.non_enum_variant().fields; @@ -457,17 +459,17 @@ impl<'a, 'tcx> InlineAsmCtxt<'a, 'tcx> { } // Typeck has checked that Const operands are integers. hir::InlineAsmOperand::Const { anon_const } => { - debug_assert!(matches!( + debug_assert_matches!( self.tcx.type_of(anon_const.def_id).instantiate_identity().kind(), ty::Error(_) | ty::Int(_) | ty::Uint(_) - )); + ); } // Typeck has checked that SymFn refers to a function. hir::InlineAsmOperand::SymFn { anon_const } => { - debug_assert!(matches!( + debug_assert_matches!( self.tcx.type_of(anon_const.def_id).instantiate_identity().kind(), ty::Error(_) | ty::FnDef(..) - )); + ); } // AST lowering guarantees that SymStatic points to a static. hir::InlineAsmOperand::SymStatic { .. } => {} diff --git a/compiler/rustc_hir_analysis/src/check/wfcheck.rs b/compiler/rustc_hir_analysis/src/check/wfcheck.rs index a9f8630741a81..bdf2914fc50c6 100644 --- a/compiler/rustc_hir_analysis/src/check/wfcheck.rs +++ b/compiler/rustc_hir_analysis/src/check/wfcheck.rs @@ -350,8 +350,8 @@ fn check_foreign_item<'tcx>( ); match item.kind { - hir::ForeignItemKind::Fn(decl, ..) => { - check_item_fn(tcx, def_id, item.ident, item.span, decl) + hir::ForeignItemKind::Fn(sig, ..) => { + check_item_fn(tcx, def_id, item.ident, item.span, sig.decl) } hir::ForeignItemKind::Static(ty, ..) => { check_item_type(tcx, def_id, ty.span, UnsizedHandling::AllowIfForeignTail) @@ -951,7 +951,7 @@ fn check_param_wf(tcx: TyCtxt<'_>, param: &hir::GenericParam<'_>) -> Result<(), } else { let mut diag = match ty.kind() { ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::Error(_) => return Ok(()), - ty::FnPtr(_) => tcx.dcx().struct_span_err( + ty::FnPtr(..) => tcx.dcx().struct_span_err( hir_ty.span, "using function pointers as const generic parameters is forbidden", ), diff --git a/compiler/rustc_hir_analysis/src/coherence/builtin.rs b/compiler/rustc_hir_analysis/src/coherence/builtin.rs index bdb5f5b720529..fecd78bc38f47 100644 --- a/compiler/rustc_hir_analysis/src/coherence/builtin.rs +++ b/compiler/rustc_hir_analysis/src/coherence/builtin.rs @@ -1,6 +1,7 @@ //! Check properties that are required by built-in traits and set //! up data structures required by type-checking/codegen. +use std::assert_matches::assert_matches; use std::collections::BTreeMap; use rustc_data_structures::fx::FxHashSet; @@ -129,7 +130,7 @@ fn visit_implementation_of_const_param_ty( checker: &Checker<'_>, kind: LangItem, ) -> Result<(), ErrorGuaranteed> { - assert!(matches!(kind, LangItem::ConstParamTy | LangItem::UnsizedConstParamTy)); + assert_matches!(kind, LangItem::ConstParamTy | LangItem::UnsizedConstParamTy); let tcx = checker.tcx; let header = checker.impl_header; diff --git a/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs b/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs index e2d3ff558cf7d..89acb778d6c29 100644 --- a/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs +++ b/compiler/rustc_hir_analysis/src/coherence/inherent_impls.rs @@ -172,7 +172,7 @@ impl<'tcx> InherentCollect<'tcx> { | ty::RawPtr(_, _) | ty::Ref(..) | ty::Never - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Tuple(..) => self.check_primitive_impl(id, self_ty), ty::Alias(ty::Projection | ty::Inherent | ty::Opaque, _) | ty::Param(_) => { Err(self.tcx.dcx().emit_err(errors::InherentNominal { span: item_span })) diff --git a/compiler/rustc_hir_analysis/src/collect.rs b/compiler/rustc_hir_analysis/src/collect.rs index 8c1e5e78b75dc..f75954c9edfb7 100644 --- a/compiler/rustc_hir_analysis/src/collect.rs +++ b/compiler/rustc_hir_analysis/src/collect.rs @@ -1440,11 +1440,9 @@ fn fn_sig(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::EarlyBinder<'_, ty::PolyFn icx.lowerer().lower_fn_ty(hir_id, header.safety, header.abi, decl, Some(generics), None) } - ForeignItem(&hir::ForeignItem { - kind: ForeignItemKind::Fn(fn_decl, _, _, safety), .. - }) => { + ForeignItem(&hir::ForeignItem { kind: ForeignItemKind::Fn(sig, _, _), .. }) => { let abi = tcx.hir().get_foreign_abi(hir_id); - compute_sig_of_foreign_fn_decl(tcx, def_id, fn_decl, abi, safety) + compute_sig_of_foreign_fn_decl(tcx, def_id, sig.decl, abi, sig.header.safety) } Ctor(data) | Variant(hir::Variant { data, .. }) if data.ctor().is_some() => { @@ -1700,6 +1698,8 @@ fn impl_trait_header(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option, def_id: LocalDefId) -> ty::Generics { .. }) => { if in_trait { - assert!(matches!(tcx.def_kind(fn_def_id), DefKind::AssocFn)) + assert_matches!(tcx.def_kind(fn_def_id), DefKind::AssocFn); } else { - assert!(matches!(tcx.def_kind(fn_def_id), DefKind::AssocFn | DefKind::Fn)) + assert_matches!(tcx.def_kind(fn_def_id), DefKind::AssocFn | DefKind::Fn); } Some(fn_def_id.to_def_id()) } @@ -218,9 +219,9 @@ pub(super) fn generics_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Generics { .. }) => { if in_assoc_ty { - assert!(matches!(tcx.def_kind(parent), DefKind::AssocTy)); + assert_matches!(tcx.def_kind(parent), DefKind::AssocTy); } else { - assert!(matches!(tcx.def_kind(parent), DefKind::TyAlias)); + assert_matches!(tcx.def_kind(parent), DefKind::TyAlias); } debug!("generics_of: parent of opaque ty {:?} is {:?}", def_id, parent); // Opaque types are always nested within another item, and diff --git a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs index a5a56cb845d79..6ac4802b19514 100644 --- a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs @@ -1,3 +1,5 @@ +use std::assert_matches::assert_matches; + use hir::{HirId, Node}; use rustc_data_structures::fx::FxIndexSet; use rustc_hir as hir; @@ -601,7 +603,7 @@ pub(super) fn implied_predicates_with_filter( let Some(trait_def_id) = trait_def_id.as_local() else { // if `assoc_name` is None, then the query should've been redirected to an // external provider - assert!(matches!(filter, PredicateFilter::SelfThatDefines(_))); + assert_matches!(filter, PredicateFilter::SelfThatDefines(_)); return tcx.explicit_super_predicates_of(trait_def_id); }; diff --git a/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs b/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs index e11d3c9c48b4e..ae0c70d232685 100644 --- a/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs +++ b/compiler/rustc_hir_analysis/src/collect/resolve_bound_vars.rs @@ -604,7 +604,7 @@ impl<'a, 'tcx> Visitor<'tcx> for BoundVarContext<'a, 'tcx> { fn visit_foreign_item(&mut self, item: &'tcx hir::ForeignItem<'tcx>) { match item.kind { - hir::ForeignItemKind::Fn(_, _, generics, _) => { + hir::ForeignItemKind::Fn(_, _, generics) => { self.visit_early_late(item.hir_id(), generics, |this| { intravisit::walk_foreign_item(this, item); }) diff --git a/compiler/rustc_hir_analysis/src/delegation.rs b/compiler/rustc_hir_analysis/src/delegation.rs index ca62ef92b83de..20aaa43219f34 100644 --- a/compiler/rustc_hir_analysis/src/delegation.rs +++ b/compiler/rustc_hir_analysis/src/delegation.rs @@ -1,3 +1,5 @@ +use std::assert_matches::debug_assert_matches; + use rustc_data_structures::fx::FxHashMap; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LocalDefId}; @@ -63,7 +65,7 @@ enum FnKind { } fn fn_kind<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> FnKind { - debug_assert!(matches!(tcx.def_kind(def_id), DefKind::Fn | DefKind::AssocFn)); + debug_assert_matches!(tcx.def_kind(def_id), DefKind::Fn | DefKind::AssocFn); let parent = tcx.parent(def_id); match tcx.def_kind(parent) { diff --git a/compiler/rustc_hir_analysis/src/impl_wf_check.rs b/compiler/rustc_hir_analysis/src/impl_wf_check.rs index a8ae620f7a49d..ab441ed4cde99 100644 --- a/compiler/rustc_hir_analysis/src/impl_wf_check.rs +++ b/compiler/rustc_hir_analysis/src/impl_wf_check.rs @@ -8,6 +8,8 @@ //! specialization errors. These things can (and probably should) be //! fixed, but for the moment it's easier to do these checks early. +use std::assert_matches::debug_assert_matches; + use min_specialization::check_min_specialization; use rustc_data_structures::fx::FxHashSet; use rustc_errors::codes::*; @@ -54,7 +56,7 @@ mod min_specialization; pub fn check_impl_wf(tcx: TyCtxt<'_>, impl_def_id: LocalDefId) -> Result<(), ErrorGuaranteed> { let min_specialization = tcx.features().min_specialization; let mut res = Ok(()); - debug_assert!(matches!(tcx.def_kind(impl_def_id), DefKind::Impl { .. })); + debug_assert_matches!(tcx.def_kind(impl_def_id), DefKind::Impl { .. }); res = res.and(enforce_impl_params_are_constrained(tcx, impl_def_id)); if min_specialization { res = res.and(check_min_specialization(tcx, impl_def_id)); diff --git a/compiler/rustc_hir_analysis/src/lib.rs b/compiler/rustc_hir_analysis/src/lib.rs index 061db14ad0a61..291d57f2a176f 100644 --- a/compiler/rustc_hir_analysis/src/lib.rs +++ b/compiler/rustc_hir_analysis/src/lib.rs @@ -62,6 +62,7 @@ This API is completely unstable and subject to change. #![allow(rustc::untranslatable_diagnostic)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] +#![feature(assert_matches)] #![feature(control_flow_enum)] #![feature(if_let_guard)] #![feature(iter_intersperse)] diff --git a/compiler/rustc_hir_analysis/src/variance/constraints.rs b/compiler/rustc_hir_analysis/src/variance/constraints.rs index 92baa41e07f90..ce9e73bf24503 100644 --- a/compiler/rustc_hir_analysis/src/variance/constraints.rs +++ b/compiler/rustc_hir_analysis/src/variance/constraints.rs @@ -317,8 +317,8 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { self.add_constraint(current, data.index, variance); } - ty::FnPtr(sig) => { - self.add_constraints_from_sig(current, sig, variance); + ty::FnPtr(sig_tys, hdr) => { + self.add_constraints_from_sig(current, sig_tys.with(hdr), variance); } ty::Error(_) => { diff --git a/compiler/rustc_hir_pretty/src/lib.rs b/compiler/rustc_hir_pretty/src/lib.rs index 089cee2fa0dec..cff21173f7982 100644 --- a/compiler/rustc_hir_pretty/src/lib.rs +++ b/compiler/rustc_hir_pretty/src/lib.rs @@ -352,16 +352,11 @@ impl<'a> State<'a> { self.maybe_print_comment(item.span.lo()); self.print_outer_attributes(self.attrs(item.hir_id())); match item.kind { - hir::ForeignItemKind::Fn(decl, arg_names, generics, safety) => { + hir::ForeignItemKind::Fn(sig, arg_names, generics) => { self.head(""); self.print_fn( - decl, - hir::FnHeader { - safety, - constness: hir::Constness::NotConst, - abi: Abi::Rust, - asyncness: hir::IsAsync::NotAsync, - }, + sig.decl, + sig.header, Some(item.ident.name), generics, arg_names, diff --git a/compiler/rustc_hir_typeck/src/callee.rs b/compiler/rustc_hir_typeck/src/callee.rs index 07f64ead6f636..44cb08e44eb81 100644 --- a/compiler/rustc_hir_typeck/src/callee.rs +++ b/compiler/rustc_hir_typeck/src/callee.rs @@ -137,7 +137,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // If the callee is a bare function or a closure, then we're all set. match *adjusted_ty.kind() { - ty::FnDef(..) | ty::FnPtr(_) => { + ty::FnDef(..) | ty::FnPtr(..) => { let adjustments = self.adjust_steps(autoderef); self.apply_adjustments(callee_expr, adjustments); return Some(CallStep::Builtin(adjusted_ty)); @@ -467,7 +467,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { (fn_sig, Some(def_id)) } // FIXME(effects): these arms should error because we can't enforce them - ty::FnPtr(sig) => (sig, None), + ty::FnPtr(sig_tys, hdr) => (sig_tys.with(hdr), None), _ => { for arg in arg_exprs { self.check_expr(arg); diff --git a/compiler/rustc_hir_typeck/src/cast.rs b/compiler/rustc_hir_typeck/src/cast.rs index de70273390466..7cd97166ed1e9 100644 --- a/compiler/rustc_hir_typeck/src/cast.rs +++ b/compiler/rustc_hir_typeck/src/cast.rs @@ -97,6 +97,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { return Ok(Some(PointerKind::Thin)); } + let t = self.try_structurally_resolve_type(span, t); + Ok(match *t.kind() { ty::Slice(_) | ty::Str => Some(PointerKind::Length), ty::Dynamic(tty, _, ty::Dyn) => Some(PointerKind::VTable(tty)), diff --git a/compiler/rustc_hir_typeck/src/check.rs b/compiler/rustc_hir_typeck/src/check.rs index 89df464cca040..cd357e4a7adb5 100644 --- a/compiler/rustc_hir_typeck/src/check.rs +++ b/compiler/rustc_hir_typeck/src/check.rs @@ -160,15 +160,11 @@ pub(super) fn check_fn<'a, 'tcx>( fcx.demand_suptype(span, ret_ty, actual_return_ty); // Check that a function marked as `#[panic_handler]` has signature `fn(&PanicInfo) -> !` - if let Some(panic_impl_did) = tcx.lang_items().panic_impl() - && panic_impl_did == fn_def_id.to_def_id() - { - check_panic_info_fn(tcx, panic_impl_did.expect_local(), fn_sig); + if tcx.is_lang_item(fn_def_id.to_def_id(), LangItem::PanicImpl) { + check_panic_info_fn(tcx, fn_def_id, fn_sig); } - if let Some(lang_start_defid) = tcx.lang_items().start_fn() - && lang_start_defid == fn_def_id.to_def_id() - { + if tcx.is_lang_item(fn_def_id.to_def_id(), LangItem::Start) { check_lang_start_fn(tcx, fn_sig, fn_def_id); } diff --git a/compiler/rustc_hir_typeck/src/closure.rs b/compiler/rustc_hir_typeck/src/closure.rs index a7953acc95c80..6b813dc64cec2 100644 --- a/compiler/rustc_hir_typeck/src/closure.rs +++ b/compiler/rustc_hir_typeck/src/closure.rs @@ -14,7 +14,7 @@ use rustc_middle::span_bug; use rustc_middle::ty::visit::{TypeVisitable, TypeVisitableExt}; use rustc_middle::ty::{self, GenericArgs, Ty, TyCtxt, TypeSuperVisitable, TypeVisitor}; use rustc_span::def_id::LocalDefId; -use rustc_span::Span; +use rustc_span::{Span, DUMMY_SP}; use rustc_target::spec::abi::Abi; use rustc_trait_selection::error_reporting::traits::ArgKind; use rustc_trait_selection::traits; @@ -336,9 +336,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .into_iter() .map(|obl| (obl.predicate, obl.cause.span)), ), - ty::FnPtr(sig) => match closure_kind { + ty::FnPtr(sig_tys, hdr) => match closure_kind { hir::ClosureKind::Closure => { - let expected_sig = ExpectedSig { cause_span: None, sig }; + let expected_sig = ExpectedSig { cause_span: None, sig: sig_tys.with(hdr) }; (Some(expected_sig), Some(ty::ClosureKind::Fn)) } hir::ClosureKind::Coroutine(_) | hir::ClosureKind::CoroutineClosure(_) => { @@ -539,6 +539,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// we identify the `FnOnce` bound, and if the output type is /// an inference variable `?Fut`, we check if that is bounded by a `Future` /// projection. + /// + /// This function is actually best-effort with the return type; if we don't find a + /// `Future` projection, we still will return arguments that we extracted from the `FnOnce` + /// projection, and the output will be an unconstrained type variable instead. fn extract_sig_from_projection_and_future_bound( &self, cause_span: Option, @@ -564,24 +568,43 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { }; // FIXME: We may want to elaborate here, though I assume this will be exceedingly rare. + let mut return_ty = None; for bound in self.obligations_for_self_ty(return_vid) { if let Some(ret_projection) = bound.predicate.as_projection_clause() && let Some(ret_projection) = ret_projection.no_bound_vars() && self.tcx.is_lang_item(ret_projection.def_id(), LangItem::FutureOutput) { - let sig = projection.rebind(self.tcx.mk_fn_sig( - input_tys, - ret_projection.term.expect_type(), - false, - hir::Safety::Safe, - Abi::Rust, - )); - - return Some(ExpectedSig { cause_span, sig }); + return_ty = Some(ret_projection.term.expect_type()); + break; } } - None + // SUBTLE: If we didn't find a `Future` bound for the return + // vid, we still want to attempt to provide inference guidance for the async + // closure's arguments. Instantiate a new vid to plug into the output type. + // + // You may be wondering, what if it's higher-ranked? Well, given that we + // found a type variable for the `FnOnce::Output` projection above, we know + // that the output can't mention any of the vars. + // + // Also note that we use a fresh var here for the signature since the signature + // records the output of the *future*, and `return_vid` above is the type + // variable of the future, not its output. + // + // FIXME: We probably should store this signature inference output in a way + // that does not misuse a `FnSig` type, but that can be done separately. + let return_ty = + return_ty.unwrap_or_else(|| self.next_ty_var(cause_span.unwrap_or(DUMMY_SP))); + + let sig = projection.rebind(self.tcx.mk_fn_sig( + input_tys, + return_ty, + false, + hir::Safety::Safe, + Abi::Rust, + )); + + return Some(ExpectedSig { cause_span, sig }); } fn sig_of_closure( diff --git a/compiler/rustc_hir_typeck/src/coercion.rs b/compiler/rustc_hir_typeck/src/coercion.rs index fcd3798eb48e6..d53df251a153d 100644 --- a/compiler/rustc_hir_typeck/src/coercion.rs +++ b/compiler/rustc_hir_typeck/src/coercion.rs @@ -137,7 +137,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { at.lub(DefineOpaqueTypes::Yes, b, a) } else { at.sup(DefineOpaqueTypes::Yes, b, a) - .map(|InferOk { value: (), obligations }| InferOk { value: a, obligations }) + .map(|InferOk { value: (), obligations }| InferOk { value: b, obligations }) }; // In the new solver, lazy norm may allow us to shallowly equate @@ -225,10 +225,10 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // items to drop the unsafe qualifier. self.coerce_from_fn_item(a, b) } - ty::FnPtr(a_f) => { + ty::FnPtr(a_sig_tys, a_hdr) => { // We permit coercion of fn pointers to drop the // unsafe qualifier. - self.coerce_from_fn_pointer(a, a_f, b) + self.coerce_from_fn_pointer(a, a_sig_tys.with(a_hdr), b) } ty::Closure(closure_def_id_a, args_a) => { // Non-capturing closures are coercible to @@ -788,9 +788,8 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { self.commit_if_ok(|snapshot| { let outer_universe = self.infcx.universe(); - let result = if let ty::FnPtr(fn_ty_b) = b.kind() - && let (hir::Safety::Safe, hir::Safety::Unsafe) = - (fn_ty_a.safety(), fn_ty_b.safety()) + let result = if let ty::FnPtr(_, hdr_b) = b.kind() + && let (hir::Safety::Safe, hir::Safety::Unsafe) = (fn_ty_a.safety(), hdr_b.safety) { let unsafe_a = self.tcx.safe_to_unsafe_fn_ty(fn_ty_a); self.unify_and(unsafe_a, b, to_unsafe) @@ -842,7 +841,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { debug!("coerce_from_fn_item(a={:?}, b={:?})", a, b); match b.kind() { - ty::FnPtr(b_sig) => { + ty::FnPtr(_, b_hdr) => { let a_sig = a.fn_sig(self.tcx); if let ty::FnDef(def_id, _) = *a.kind() { // Intrinsics are not coercible to function pointers @@ -852,7 +851,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // Safe `#[target_feature]` functions are not assignable to safe fn pointers (RFC 2396). - if b_sig.safety() == hir::Safety::Safe + if b_hdr.safety == hir::Safety::Safe && !self.tcx.codegen_fn_attrs(def_id).target_features.is_empty() { return Err(TypeError::TargetFeatureCast(def_id)); @@ -910,7 +909,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // // All we care here is if any variable is being captured and not the exact paths, // so we check `upvars_mentioned` for root variables being captured. - ty::FnPtr(fn_ty) + ty::FnPtr(_, hdr) if self .tcx .upvars_mentioned(closure_def_id_a.expect_local()) @@ -923,7 +922,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // or // `unsafe fn(arg0,arg1,...) -> _` let closure_sig = args_a.as_closure().sig(); - let safety = fn_ty.safety(); + let safety = hdr.safety; let pointer_ty = Ty::new_fn_ptr(self.tcx, self.tcx.signature_unclosure(closure_sig, safety)); debug!("coerce_closure_to_fn(a={:?}, b={:?}, pty={:?})", a, b, pointer_ty); diff --git a/compiler/rustc_hir_typeck/src/expectation.rs b/compiler/rustc_hir_typeck/src/expectation.rs index 91deae4174b00..76ae41db5c51c 100644 --- a/compiler/rustc_hir_typeck/src/expectation.rs +++ b/compiler/rustc_hir_typeck/src/expectation.rs @@ -70,7 +70,8 @@ impl<'a, 'tcx> Expectation<'tcx> { /// See the test case `test/ui/coerce-expect-unsized.rs` and #20169 /// for examples of where this comes up,. pub(super) fn rvalue_hint(fcx: &FnCtxt<'a, 'tcx>, ty: Ty<'tcx>) -> Expectation<'tcx> { - match fcx.tcx.struct_tail_without_normalization(ty).kind() { + // FIXME: This is not right, even in the old solver... + match fcx.tcx.struct_tail_raw(ty, |ty| ty, || {}).kind() { ty::Slice(_) | ty::Str | ty::Dynamic(..) => ExpectRvalueLikeUnsized(ty), _ => ExpectHasType(ty), } diff --git a/compiler/rustc_hir_typeck/src/expr.rs b/compiler/rustc_hir_typeck/src/expr.rs index d6ff0cb9462f5..fec6efdc0f71b 100644 --- a/compiler/rustc_hir_typeck/src/expr.rs +++ b/compiler/rustc_hir_typeck/src/expr.rs @@ -2743,15 +2743,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } else if let ty::RawPtr(ptr_ty, _) = expr_t.kind() && let ty::Adt(adt_def, _) = ptr_ty.kind() && let ExprKind::Field(base_expr, _) = expr.kind - && adt_def.variants().len() == 1 - && adt_def - .variants() - .iter() - .next() - .unwrap() - .fields - .iter() - .any(|f| f.ident(self.tcx) == field) + && let [variant] = &adt_def.variants().raw + && variant.fields.iter().any(|f| f.ident(self.tcx) == field) { err.multipart_suggestion( "to access the field, dereference first", diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs index 841d25b54cc88..b169f75796b3a 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs @@ -404,7 +404,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { code: traits::ObligationCauseCode<'tcx>, ) { if !ty.references_error() { - let tail = self.tcx.struct_tail_with_normalize( + let tail = self.tcx.struct_tail_raw( ty, |ty| { if self.next_trait_solver() { diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs index 89e7227eda2c7..7720faddba376 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs @@ -1530,7 +1530,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ty::Int(_) | ty::Uint(_) => Some(ty), ty::Char => Some(tcx.types.u8), ty::RawPtr(..) => Some(tcx.types.usize), - ty::FnDef(..) | ty::FnPtr(_) => Some(tcx.types.usize), + ty::FnDef(..) | ty::FnPtr(..) => Some(tcx.types.usize), _ => None, }); opt_ty.unwrap_or_else(|| self.next_int_var()) diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs index 6b4edcd95d993..703273968c5bb 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs @@ -604,7 +604,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { expected: Ty<'tcx>, found: Ty<'tcx>, ) -> bool { - if let (ty::FnPtr(_), ty::Closure(def_id, _)) = (expected.kind(), found.kind()) { + if let (ty::FnPtr(..), ty::Closure(def_id, _)) = (expected.kind(), found.kind()) { if let Some(upvars) = self.tcx.upvars_mentioned(*def_id) { // Report upto four upvars being captured to reduce the amount error messages // reported back to the user. diff --git a/compiler/rustc_hir_typeck/src/lib.rs b/compiler/rustc_hir_typeck/src/lib.rs index 758a1cefe6341..9ec101196a43f 100644 --- a/compiler/rustc_hir_typeck/src/lib.rs +++ b/compiler/rustc_hir_typeck/src/lib.rs @@ -5,7 +5,6 @@ #![feature(box_patterns)] #![feature(control_flow_enum)] #![feature(if_let_guard)] -#![feature(is_none_or)] #![feature(let_chains)] #![feature(never_type)] #![feature(try_blocks)] diff --git a/compiler/rustc_hir_typeck/src/method/confirm.rs b/compiler/rustc_hir_typeck/src/method/confirm.rs index f14159dc35a57..e0c0adac0767c 100644 --- a/compiler/rustc_hir_typeck/src/method/confirm.rs +++ b/compiler/rustc_hir_typeck/src/method/confirm.rs @@ -654,17 +654,17 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { traits::upcast_choices(self.tcx, source_trait_ref, target_trait_def_id); // must be exactly one trait ref or we'd get an ambig error etc - if upcast_trait_refs.len() != 1 { + let [upcast_trait_ref] = upcast_trait_refs.as_slice() else { span_bug!( self.span, "cannot uniquely upcast `{:?}` to `{:?}`: `{:?}`", source_trait_ref, target_trait_def_id, upcast_trait_refs - ); - } + ) + }; - upcast_trait_refs.into_iter().next().unwrap() + *upcast_trait_ref } fn instantiate_binder_with_fresh_vars(&self, value: ty::Binder<'tcx, T>) -> T diff --git a/compiler/rustc_hir_typeck/src/method/suggest.rs b/compiler/rustc_hir_typeck/src/method/suggest.rs index 94133357a3eb5..b3cf73bac1aa6 100644 --- a/compiler/rustc_hir_typeck/src/method/suggest.rs +++ b/compiler/rustc_hir_typeck/src/method/suggest.rs @@ -50,7 +50,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Not all of these (e.g., unsafe fns) implement `FnOnce`, // so we look for these beforehand. // FIXME(async_closures): These don't impl `FnOnce` by default. - ty::Closure(..) | ty::FnDef(..) | ty::FnPtr(_) => true, + ty::Closure(..) | ty::FnDef(..) | ty::FnPtr(..) => true, // If it's not a simple function, look for things which implement `FnOnce`. _ => { let Some(fn_once) = tcx.lang_items().fn_once_trait() else { @@ -321,19 +321,18 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ); }; let suggest_for_privacy = - |err: &mut Diag<'_>, mut msg: String, sugg: Vec| { - if sugg.len() == 1 { - let msg = format!("\ + |err: &mut Diag<'_>, mut msg: String, suggs: Vec| { + if let [sugg] = suggs.as_slice() { + err.help(format!("\ trait `{}` provides `{item_name}` is implemented but not reachable", - sugg[0].trim() - ); - err.help(msg); + sugg.trim(), + )); } else { - msg += &format!(" but {} not reachable", pluralize!("is", sugg.len())); + msg += &format!(" but {} not reachable", pluralize!("is", suggs.len())); err.span_suggestions( span, msg, - sugg, + suggs, Applicability::MaybeIncorrect, ); } @@ -2988,11 +2987,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } if local_spans.primary_span().is_some() { - let msg = if local_preds.len() == 1 { + let msg = if let [local_pred] = local_preds.as_slice() { format!( "an implementation of `{}` might be missing for `{}`", - local_preds[0].trait_ref.print_trait_sugared(), - local_preds[0].self_ty() + local_pred.trait_ref.print_trait_sugared(), + local_pred.self_ty() ) } else { format!( @@ -3034,11 +3033,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } if foreign_spans.primary_span().is_some() { - let msg = if foreign_preds.len() == 1 { + let msg = if let [foreign_pred] = foreign_preds.as_slice() { format!( "the foreign item type `{}` doesn't implement `{}`", - foreign_preds[0].self_ty(), - foreign_preds[0].trait_ref.print_trait_sugared() + foreign_pred.self_ty(), + foreign_pred.trait_ref.print_trait_sugared() ) } else { format!( @@ -3388,26 +3387,26 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ); self.suggest_use_candidates(candidates, |accessible_sugg, inaccessible_sugg, span| { - let suggest_for_access = |err: &mut Diag<'_>, mut msg: String, sugg: Vec<_>| { + let suggest_for_access = |err: &mut Diag<'_>, mut msg: String, suggs: Vec<_>| { msg += &format!( "; perhaps you want to import {one_of}", - one_of = if sugg.len() == 1 { "it" } else { "one of them" }, + one_of = if suggs.len() == 1 { "it" } else { "one of them" }, ); - err.span_suggestions(span, msg, sugg, Applicability::MaybeIncorrect); + err.span_suggestions(span, msg, suggs, Applicability::MaybeIncorrect); }; - let suggest_for_privacy = |err: &mut Diag<'_>, sugg: Vec| { + let suggest_for_privacy = |err: &mut Diag<'_>, suggs: Vec| { let msg = format!( "{this_trait_is} implemented but not reachable", - this_trait_is = if sugg.len() == 1 { - format!("trait `{}` which provides `{item_name}` is", sugg[0].trim()) + this_trait_is = if let [sugg] = suggs.as_slice() { + format!("trait `{}` which provides `{item_name}` is", sugg.trim()) } else { format!("the following traits which provide `{item_name}` are") } ); - if sugg.len() == 1 { + if suggs.len() == 1 { err.help(msg); } else { - err.span_suggestions(span, msg, sugg, Applicability::MaybeIncorrect); + err.span_suggestions(span, msg, suggs, Applicability::MaybeIncorrect); } }; if accessible_sugg.is_empty() { @@ -3449,6 +3448,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { trait_missing_method: bool, ) { let mut alt_rcvr_sugg = false; + let mut trait_in_other_version_found = false; if let (SelfSource::MethodCall(rcvr), false) = (source, unsatisfied_bounds) { debug!( "suggest_traits_to_import: span={:?}, item_name={:?}, rcvr_ty={:?}, rcvr={:?}", @@ -3490,8 +3490,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // self types and rely on the suggestion to `use` the trait from // `suggest_valid_traits`. let did = Some(pick.item.container_id(self.tcx)); - let skip = skippable.contains(&did); - if pick.autoderefs == 0 && !skip { + if skippable.contains(&did) { + continue; + } + trait_in_other_version_found = self + .detect_and_explain_multiple_crate_versions( + err, + pick.item.def_id, + rcvr.hir_id, + *rcvr_ty, + ); + if pick.autoderefs == 0 && !trait_in_other_version_found { err.span_label( pick.item.ident(self.tcx).span, format!("the method is available for `{rcvr_ty}` here"), @@ -3676,7 +3685,32 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } } - if self.suggest_valid_traits(err, item_name, valid_out_of_scope_traits, true) { + + if let SelfSource::QPath(ty) = source + && !valid_out_of_scope_traits.is_empty() + && let hir::TyKind::Path(path) = ty.kind + && let hir::QPath::Resolved(_, path) = path + && let Some(def_id) = path.res.opt_def_id() + && let Some(assoc) = self + .tcx + .associated_items(valid_out_of_scope_traits[0]) + .filter_by_name_unhygienic(item_name.name) + .next() + { + // See if the `Type::function(val)` where `function` wasn't found corresponds to a + // `Trait` that is imported directly, but `Type` came from a different version of the + // same crate. + let rcvr_ty = self.tcx.type_of(def_id).instantiate_identity(); + trait_in_other_version_found = self.detect_and_explain_multiple_crate_versions( + err, + assoc.def_id, + ty.hir_id, + rcvr_ty, + ); + } + if !trait_in_other_version_found + && self.suggest_valid_traits(err, item_name, valid_out_of_scope_traits, true) + { return; } @@ -4041,6 +4075,62 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } + fn detect_and_explain_multiple_crate_versions( + &self, + err: &mut Diag<'_>, + item_def_id: DefId, + hir_id: hir::HirId, + rcvr_ty: Ty<'_>, + ) -> bool { + let hir_id = self.tcx.parent_hir_id(hir_id); + let Some(traits) = self.tcx.in_scope_traits(hir_id) else { return false }; + if traits.is_empty() { + return false; + } + let trait_def_id = self.tcx.parent(item_def_id); + let krate = self.tcx.crate_name(trait_def_id.krate); + let name = self.tcx.item_name(trait_def_id); + let candidates: Vec<_> = traits + .iter() + .filter(|c| { + c.def_id.krate != trait_def_id.krate + && self.tcx.crate_name(c.def_id.krate) == krate + && self.tcx.item_name(c.def_id) == name + }) + .map(|c| (c.def_id, c.import_ids.get(0).cloned())) + .collect(); + if candidates.is_empty() { + return false; + } + let item_span = self.tcx.def_span(item_def_id); + let msg = format!( + "there are multiple different versions of crate `{krate}` in the dependency graph", + ); + let trait_span = self.tcx.def_span(trait_def_id); + let mut multi_span: MultiSpan = trait_span.into(); + multi_span.push_span_label(trait_span, format!("this is the trait that is needed")); + let descr = self.tcx.associated_item(item_def_id).descr(); + multi_span + .push_span_label(item_span, format!("the {descr} is available for `{rcvr_ty}` here")); + for (def_id, import_def_id) in candidates { + if let Some(import_def_id) = import_def_id { + multi_span.push_span_label( + self.tcx.def_span(import_def_id), + format!( + "`{name}` imported here doesn't correspond to the right version of crate \ + `{krate}`", + ), + ); + } + multi_span.push_span_label( + self.tcx.def_span(def_id), + format!("this is the trait that was imported"), + ); + } + err.span_note(multi_span, msg); + true + } + /// issue #102320, for `unwrap_or` with closure as argument, suggest `unwrap_or_else` /// FIXME: currently not working for suggesting `map_or_else`, see #102408 pub(crate) fn suggest_else_fn_with_closure( diff --git a/compiler/rustc_index/src/vec.rs b/compiler/rustc_index/src/vec.rs index 7438c97eb581c..1cb8bc861af98 100644 --- a/compiler/rustc_index/src/vec.rs +++ b/compiler/rustc_index/src/vec.rs @@ -188,6 +188,11 @@ impl IndexVec { let min_new_len = elem.index() + 1; self.raw.resize_with(min_new_len, fill_value); } + + #[inline] + pub fn append(&mut self, other: &mut Self) { + self.raw.append(&mut other.raw); + } } /// `IndexVec` is often used as a map, so it provides some map-like APIs. diff --git a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs index 3bcb92d8029a2..db5139172b022 100644 --- a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs +++ b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs @@ -438,7 +438,7 @@ impl<'cx, 'tcx> TypeFolder> for Canonicalizer<'cx, 'tcx> { | ty::RawPtr(..) | ty::Ref(..) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(..) | ty::Never | ty::Tuple(..) diff --git a/compiler/rustc_infer/src/infer/outlives/verify.rs b/compiler/rustc_infer/src/infer/outlives/verify.rs index c3d37d9986f6c..1908e1e09c3d5 100644 --- a/compiler/rustc_infer/src/infer/outlives/verify.rs +++ b/compiler/rustc_infer/src/infer/outlives/verify.rs @@ -1,3 +1,5 @@ +use std::assert_matches::assert_matches; + use rustc_middle::ty::{self, OutlivesPredicate, Ty, TyCtxt}; use rustc_type_ir::outlives::{compute_alias_components_recursive, Component}; use smallvec::smallvec; @@ -181,7 +183,7 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> { &self, generic_ty: Ty<'tcx>, ) -> Vec> { - assert!(matches!(generic_ty.kind(), ty::Param(_) | ty::Placeholder(_))); + assert_matches!(generic_ty.kind(), ty::Param(_) | ty::Placeholder(_)); self.declared_generic_bounds_from_env_for_erased_ty(generic_ty) } diff --git a/compiler/rustc_infer/src/lib.rs b/compiler/rustc_infer/src/lib.rs index b65ac8596675a..25ac8ba974bb6 100644 --- a/compiler/rustc_infer/src/lib.rs +++ b/compiler/rustc_infer/src/lib.rs @@ -18,6 +18,7 @@ #![allow(rustc::untranslatable_diagnostic)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![doc(rust_logo)] +#![feature(assert_matches)] #![feature(box_patterns)] #![feature(control_flow_enum)] #![feature(extend_one)] diff --git a/compiler/rustc_interface/src/lib.rs b/compiler/rustc_interface/src/lib.rs index e37b30749ab30..3492df69b8d32 100644 --- a/compiler/rustc_interface/src/lib.rs +++ b/compiler/rustc_interface/src/lib.rs @@ -1,7 +1,6 @@ // tidy-alphabetical-start #![feature(decl_macro)] #![feature(let_chains)] -#![feature(thread_spawn_unchecked)] #![feature(try_blocks)] // tidy-alphabetical-end diff --git a/compiler/rustc_interface/src/util.rs b/compiler/rustc_interface/src/util.rs index 6f53b1c90313b..761d288a7c2eb 100644 --- a/compiler/rustc_interface/src/util.rs +++ b/compiler/rustc_interface/src/util.rs @@ -386,7 +386,6 @@ fn get_codegen_sysroot( } } -#[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable pub(crate) fn check_attr_crate_type( sess: &Session, attrs: &[ast::Attribute], diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs index 6b36944b20889..5f6e7fb314d9f 100644 --- a/compiler/rustc_lint/src/builtin.rs +++ b/compiler/rustc_lint/src/builtin.rs @@ -1853,7 +1853,7 @@ impl KeywordIdents { if !prev_dollar { self.check_ident_token(cx, UnderMacro(true), ident); } - } else if token.kind == TokenKind::Dollar { + } else if *token == TokenKind::Dollar { prev_dollar = true; continue; } diff --git a/compiler/rustc_lint/src/for_loops_over_fallibles.rs b/compiler/rustc_lint/src/for_loops_over_fallibles.rs index 6cb5263ac5432..2793d48dc512b 100644 --- a/compiler/rustc_lint/src/for_loops_over_fallibles.rs +++ b/compiler/rustc_lint/src/for_loops_over_fallibles.rs @@ -1,5 +1,5 @@ use hir::{Expr, Pat}; -use rustc_hir as hir; +use rustc_hir::{self as hir, LangItem}; use rustc_infer::infer::TyCtxtInferExt; use rustc_infer::traits::ObligationCause; use rustc_middle::ty; @@ -126,7 +126,10 @@ fn extract_iterator_next_call<'tcx>( ) -> Option<&'tcx Expr<'tcx>> { // This won't work for `Iterator::next(iter)`, is this an issue? if let hir::ExprKind::MethodCall(_, recv, _, _) = expr.kind - && cx.typeck_results().type_dependent_def_id(expr.hir_id) == cx.tcx.lang_items().next_fn() + && cx + .typeck_results() + .type_dependent_def_id(expr.hir_id) + .is_some_and(|def_id| cx.tcx.is_lang_item(def_id, LangItem::IteratorNext)) { Some(recv) } else { diff --git a/compiler/rustc_lint/src/levels.rs b/compiler/rustc_lint/src/levels.rs index 72920fd045fd4..44117e5d7a573 100644 --- a/compiler/rustc_lint/src/levels.rs +++ b/compiler/rustc_lint/src/levels.rs @@ -717,7 +717,6 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { }; } - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn add(&mut self, attrs: &[ast::Attribute], is_crate_node: bool, source_hir_id: Option) { let sess = self.sess; for (attr_index, attr) in attrs.iter().enumerate() { @@ -1039,7 +1038,6 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> { let (level, src) = self.lint_level(builtin::UNKNOWN_LINTS); // FIXME: make this translatable #[allow(rustc::diagnostic_outside_of_impl)] - #[allow(rustc::untranslatable_diagnostic)] lint_level(self.sess, lint, level, src, Some(span.into()), |lint| { lint.primary_message(fluent::lint_unknown_gated_lint); lint.arg("name", lint_id.lint.name_lower()); diff --git a/compiler/rustc_lint/src/nonstandard_style.rs b/compiler/rustc_lint/src/nonstandard_style.rs index ce7d203d8c05b..d7fd41c0ad7a3 100644 --- a/compiler/rustc_lint/src/nonstandard_style.rs +++ b/compiler/rustc_lint/src/nonstandard_style.rs @@ -527,11 +527,11 @@ impl<'tcx> LateLintPass<'tcx> for NonUpperCaseGlobals { // Lint for constants that look like binding identifiers (#7526) if let PatKind::Path(hir::QPath::Resolved(None, path)) = p.kind { if let Res::Def(DefKind::Const, _) = path.res { - if path.segments.len() == 1 { + if let [segment] = path.segments { NonUpperCaseGlobals::check_upper_case( cx, "constant in pattern", - &path.segments[0].ident, + &segment.ident, ); } } diff --git a/compiler/rustc_lint/src/traits.rs b/compiler/rustc_lint/src/traits.rs index fea96b5366e6c..a0fe4b5af7402 100644 --- a/compiler/rustc_lint/src/traits.rs +++ b/compiler/rustc_lint/src/traits.rs @@ -114,7 +114,7 @@ impl<'tcx> LateLintPass<'tcx> for DropTraitConstraints { let hir::TyKind::TraitObject(bounds, _lifetime, _syntax) = &ty.kind else { return }; for (bound, modifier) in &bounds[..] { let def_id = bound.trait_ref.trait_def_id(); - if cx.tcx.lang_items().drop_trait() == def_id + if def_id.is_some_and(|def_id| cx.tcx.is_lang_item(def_id, LangItem::Drop)) && *modifier != hir::TraitBoundModifier::Maybe { let Some(def_id) = cx.tcx.get_diagnostic_item(sym::needs_drop) else { return }; diff --git a/compiler/rustc_lint/src/types.rs b/compiler/rustc_lint/src/types.rs index 5b17c0d718a44..cb7a07116ced8 100644 --- a/compiler/rustc_lint/src/types.rs +++ b/compiler/rustc_lint/src/types.rs @@ -211,15 +211,12 @@ fn lint_overflowing_range_endpoint<'tcx>( if !is_range_literal(struct_expr) { return false; }; - let ExprKind::Struct(_, eps, _) = &struct_expr.kind else { return false }; - if eps.len() != 2 { - return false; - } + let ExprKind::Struct(_, [start, end], _) = &struct_expr.kind else { return false }; // We can suggest using an inclusive range // (`..=`) instead only if it is the `end` that is // overflowing and only by 1. - if !(eps[1].expr.hir_id == expr.hir_id && lit_val - 1 == max) { + if !(end.expr.hir_id == expr.hir_id && lit_val - 1 == max) { return false; }; @@ -232,7 +229,7 @@ fn lint_overflowing_range_endpoint<'tcx>( }; let sub_sugg = if expr.span.lo() == lit_span.lo() { - let Ok(start) = cx.sess().source_map().span_to_snippet(eps[0].span) else { return false }; + let Ok(start) = cx.sess().source_map().span_to_snippet(start.span) else { return false }; UseInclusiveRange::WithoutParen { sugg: struct_expr.span.shrink_to_lo().to(lit_span.shrink_to_hi()), start, @@ -1025,7 +1022,7 @@ fn ty_is_known_nonnull<'tcx>( let ty = tcx.try_normalize_erasing_regions(param_env, ty).unwrap_or(ty); match ty.kind() { - ty::FnPtr(_) => true, + ty::FnPtr(..) => true, ty::Ref(..) => true, ty::Adt(def, _) if def.is_box() && matches!(mode, CItemKind::Definition) => true, ty::Adt(def, args) if def.repr().transparent() && !def.is_union() => { @@ -1476,7 +1473,8 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { ty::Array(inner_ty, _) => self.check_type_for_ffi(cache, inner_ty), - ty::FnPtr(sig) => { + ty::FnPtr(sig_tys, hdr) => { + let sig = sig_tys.with(hdr); if self.is_internal_abi(sig.abi()) { return FfiUnsafe { ty, @@ -1712,8 +1710,8 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { type Result = ControlFlow>; fn visit_ty(&mut self, ty: Ty<'tcx>) -> Self::Result { - if let ty::FnPtr(sig) = ty.kind() - && !self.visitor.is_internal_abi(sig.abi()) + if let ty::FnPtr(_, hdr) = ty.kind() + && !self.visitor.is_internal_abi(hdr.abi) { self.tys.push(ty); } @@ -1736,13 +1734,16 @@ impl<'tcx> LateLintPass<'tcx> for ImproperCTypesDeclarations { let abi = cx.tcx.hir().get_foreign_abi(it.hir_id()); match it.kind { - hir::ForeignItemKind::Fn(decl, _, _, _) if !vis.is_internal_abi(abi) => { - vis.check_foreign_fn(it.owner_id.def_id, decl); + hir::ForeignItemKind::Fn(sig, _, _) => { + if vis.is_internal_abi(abi) { + vis.check_fn(it.owner_id.def_id, sig.decl) + } else { + vis.check_foreign_fn(it.owner_id.def_id, sig.decl); + } } hir::ForeignItemKind::Static(ty, _, _) if !vis.is_internal_abi(abi) => { vis.check_foreign_static(it.owner_id, ty.span); } - hir::ForeignItemKind::Fn(decl, _, _, _) => vis.check_fn(it.owner_id.def_id, decl), hir::ForeignItemKind::Static(..) | hir::ForeignItemKind::Type => (), } } diff --git a/compiler/rustc_lint/src/unused.rs b/compiler/rustc_lint/src/unused.rs index e2c05129ee238..553d9db12c572 100644 --- a/compiler/rustc_lint/src/unused.rs +++ b/compiler/rustc_lint/src/unused.rs @@ -808,7 +808,7 @@ trait UnusedDelimLint { return; } let spans = match value.kind { - ast::ExprKind::Block(ref block, None) if block.stmts.len() == 1 => block.stmts[0] + ast::ExprKind::Block(ref block, None) if let [stmt] = block.stmts.as_slice() => stmt .span .find_ancestor_inside(value.span) .map(|span| (value.span.with_hi(span.lo()), value.span.with_lo(span.hi()))), @@ -1544,14 +1544,12 @@ impl UnusedImportBraces { } // Trigger the lint only if there is one nested item - if items.len() != 1 { - return; - } + let [(tree, _)] = items.as_slice() else { return }; // Trigger the lint if the nested item is a non-self single item - let node_name = match items[0].0.kind { + let node_name = match tree.kind { ast::UseTreeKind::Simple(rename) => { - let orig_ident = items[0].0.prefix.segments.last().unwrap().ident; + let orig_ident = tree.prefix.segments.last().unwrap().ident; if orig_ident.name == kw::SelfLower { return; } diff --git a/compiler/rustc_lint_defs/src/builtin.rs b/compiler/rustc_lint_defs/src/builtin.rs index c731b03a87590..56d77c9d1d04c 100644 --- a/compiler/rustc_lint_defs/src/builtin.rs +++ b/compiler/rustc_lint_defs/src/builtin.rs @@ -4971,7 +4971,6 @@ declare_lint! { /// ### Example /// /// ```rust - /// #![feature(unsafe_attributes)] /// #![warn(unsafe_attr_outside_unsafe)] /// /// #[no_mangle] diff --git a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp index 2ff7335a0fc81..79a68b2ff0e67 100644 --- a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp +++ b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp @@ -397,6 +397,18 @@ LLVMRustCreateAllocSizeAttr(LLVMContextRef C, uint32_t ElementSizeArg) { std::nullopt)); } +extern "C" LLVMAttributeRef +LLVMRustCreateRangeAttribute(LLVMContextRef C, unsigned NumBits, + const uint64_t LowerWords[], + const uint64_t UpperWords[]) { +#if LLVM_VERSION_GE(19, 0) + return LLVMCreateConstantRangeAttribute(C, Attribute::Range, NumBits, + LowerWords, UpperWords); +#else + report_fatal_error("LLVM 19.0 is required for Range Attribute"); +#endif +} + // These values **must** match ffi::AllocKindFlags. // It _happens_ to match the LLVM values of llvm::AllocFnKind, // but that's happenstance and we do explicit conversions before diff --git a/compiler/rustc_llvm/llvm-wrapper/SymbolWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/SymbolWrapper.cpp index ccf1a5429e2b5..d625935d92594 100644 --- a/compiler/rustc_llvm/llvm-wrapper/SymbolWrapper.cpp +++ b/compiler/rustc_llvm/llvm-wrapper/SymbolWrapper.cpp @@ -77,22 +77,18 @@ LLVMRustGetSymbols(char *BufPtr, size_t BufLen, void *State, Expected> ObjOrErr = getSymbolicFile(Buf->getMemBufferRef(), Context); if (!ObjOrErr) { - Error E = ObjOrErr.takeError(); - SmallString<0> ErrorBuf; - auto Error = raw_svector_ostream(ErrorBuf); - Error << E << '\0'; - return ErrorCallback(Error.str().data()); + return ErrorCallback(toString(ObjOrErr.takeError()).c_str()); } std::unique_ptr Obj = std::move(*ObjOrErr); + if (Obj == nullptr) { + return 0; + } for (const object::BasicSymbolRef &S : Obj->symbols()) { if (!isArchiveSymbol(S)) continue; if (Error E = S.printName(SymName)) { - SmallString<0> ErrorBuf; - auto Error = raw_svector_ostream(ErrorBuf); - Error << E << '\0'; - return ErrorCallback(Error.str().data()); + return ErrorCallback(toString(std::move(E)).c_str()); } SymName << '\0'; if (void *E = Callback(State, SymNameBuf.str().data())) { diff --git a/compiler/rustc_metadata/messages.ftl b/compiler/rustc_metadata/messages.ftl index 415399ed06c7f..d80aa0cc4f414 100644 --- a/compiler/rustc_metadata/messages.ftl +++ b/compiler/rustc_metadata/messages.ftl @@ -41,6 +41,9 @@ metadata_crate_dep_multiple = metadata_crate_dep_not_static = `{$crate_name}` was unavailable as a static crate, preventing fully static linking +metadata_crate_dep_rustc_driver = + `feature(rustc_private)` is needed to link to the compiler's `rustc_driver` library + metadata_crate_location_unknown_type = extern location for {$crate_name} is of an unknown type: {$path} @@ -131,12 +134,18 @@ metadata_lib_framework_apple = metadata_lib_required = crate `{$crate_name}` required to be available in {$kind} format, but was not found in this form +metadata_link_arg_unstable = + link kind `link-arg` is unstable + metadata_link_cfg_form = link cfg must be of the form `cfg(/* predicate */)` metadata_link_cfg_single_predicate = link cfg must have a single predicate argument +metadata_link_cfg_unstable = + link cfg is unstable + metadata_link_framework_apple = link kind `framework` is only supported on Apple targets diff --git a/compiler/rustc_metadata/src/creader.rs b/compiler/rustc_metadata/src/creader.rs index 2fca443ffa035..14a1a7f67e56e 100644 --- a/compiler/rustc_metadata/src/creader.rs +++ b/compiler/rustc_metadata/src/creader.rs @@ -949,7 +949,6 @@ impl<'a, 'tcx> CrateLoader<'a, 'tcx> { } } - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn report_unused_deps(&mut self, krate: &ast::Crate) { // Make a point span rather than covering the whole file let span = krate.spans.inner_span.shrink_to_lo(); diff --git a/compiler/rustc_metadata/src/dependency_format.rs b/compiler/rustc_metadata/src/dependency_format.rs index 17fd260fd794a..39fa23766b5d2 100644 --- a/compiler/rustc_metadata/src/dependency_format.rs +++ b/compiler/rustc_metadata/src/dependency_format.rs @@ -51,7 +51,7 @@ //! Additionally, the algorithm is geared towards finding *any* solution rather //! than finding a number of solutions (there are normally quite a few). -use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_hir::def_id::CrateNum; use rustc_middle::bug; use rustc_middle::middle::dependency_format::{Dependencies, DependencyList, Linkage}; @@ -59,12 +59,14 @@ use rustc_middle::ty::TyCtxt; use rustc_session::config::CrateType; use rustc_session::cstore::CrateDepKind; use rustc_session::cstore::LinkagePreference::{self, RequireDynamic, RequireStatic}; +use rustc_span::sym; use tracing::info; use crate::creader::CStore; use crate::errors::{ BadPanicStrategy, CrateDepMultiple, IncompatiblePanicInDropStrategy, LibRequired, - NonStaticCrateDep, RequiredPanicStrategy, RlibRequired, RustcLibRequired, TwoPanicRuntimes, + NonStaticCrateDep, RequiredPanicStrategy, RlibRequired, RustcDriverHelp, RustcLibRequired, + TwoPanicRuntimes, }; pub(crate) fn calculate(tcx: TyCtxt<'_>) -> Dependencies { @@ -160,25 +162,49 @@ fn calculate_type(tcx: TyCtxt<'_>, ty: CrateType) -> DependencyList { Linkage::Dynamic | Linkage::IncludedFromDylib => {} } + let all_dylibs = || { + tcx.crates(()).iter().filter(|&&cnum| { + !tcx.dep_kind(cnum).macros_only() && tcx.used_crate_source(cnum).dylib.is_some() + }) + }; + + let mut upstream_in_dylibs = FxHashSet::default(); + + if tcx.features().rustc_private { + // We need this to prevent users of `rustc_driver` from linking dynamically to `std` + // which does not work as `std` is also statically linked into `rustc_driver`. + + // Find all libraries statically linked to upstream dylibs. + for &cnum in all_dylibs() { + let deps = tcx.dylib_dependency_formats(cnum); + for &(depnum, style) in deps.iter() { + if let RequireStatic = style { + upstream_in_dylibs.insert(depnum); + } + } + } + } + let mut formats = FxHashMap::default(); // Sweep all crates for found dylibs. Add all dylibs, as well as their // dependencies, ensuring there are no conflicts. The only valid case for a // dependency to be relied upon twice is for both cases to rely on a dylib. - for &cnum in tcx.crates(()).iter() { - if tcx.dep_kind(cnum).macros_only() { + for &cnum in all_dylibs() { + if upstream_in_dylibs.contains(&cnum) { + info!("skipping dylib: {}", tcx.crate_name(cnum)); + // If this dylib is also available statically linked to another dylib + // we try to use that instead. continue; } + let name = tcx.crate_name(cnum); - let src = tcx.used_crate_source(cnum); - if src.dylib.is_some() { - info!("adding dylib: {}", name); - add_library(tcx, cnum, RequireDynamic, &mut formats, &mut unavailable_as_static); - let deps = tcx.dylib_dependency_formats(cnum); - for &(depnum, style) in deps.iter() { - info!("adding {:?}: {}", style, tcx.crate_name(depnum)); - add_library(tcx, depnum, style, &mut formats, &mut unavailable_as_static); - } + info!("adding dylib: {}", name); + add_library(tcx, cnum, RequireDynamic, &mut formats, &mut unavailable_as_static); + let deps = tcx.dylib_dependency_formats(cnum); + for &(depnum, style) in deps.iter() { + info!("adding {:?}: {}", style, tcx.crate_name(depnum)); + add_library(tcx, depnum, style, &mut formats, &mut unavailable_as_static); } } @@ -268,12 +294,15 @@ fn add_library( // This error is probably a little obscure, but I imagine that it // can be refined over time. if link2 != link || link == RequireStatic { + let linking_to_rustc_driver = tcx.sess.psess.unstable_features.is_nightly_build() + && tcx.crates(()).iter().any(|&cnum| tcx.crate_name(cnum) == sym::rustc_driver); tcx.dcx().emit_err(CrateDepMultiple { crate_name: tcx.crate_name(cnum), non_static_deps: unavailable_as_static .drain(..) .map(|cnum| NonStaticCrateDep { crate_name: tcx.crate_name(cnum) }) .collect(), + rustc_driver_help: linking_to_rustc_driver.then_some(RustcDriverHelp), }); } } diff --git a/compiler/rustc_metadata/src/errors.rs b/compiler/rustc_metadata/src/errors.rs index 89970dddf9fd4..42dec978b78cd 100644 --- a/compiler/rustc_metadata/src/errors.rs +++ b/compiler/rustc_metadata/src/errors.rs @@ -38,6 +38,8 @@ pub struct CrateDepMultiple { pub crate_name: Symbol, #[subdiagnostic] pub non_static_deps: Vec, + #[subdiagnostic] + pub rustc_driver_help: Option, } #[derive(Subdiagnostic)] @@ -46,6 +48,10 @@ pub struct NonStaticCrateDep { pub crate_name: Symbol, } +#[derive(Subdiagnostic)] +#[help(metadata_crate_dep_rustc_driver)] +pub struct RustcDriverHelp; + #[derive(Diagnostic)] #[diag(metadata_two_panic_runtimes)] pub struct TwoPanicRuntimes { diff --git a/compiler/rustc_metadata/src/native_libs.rs b/compiler/rustc_metadata/src/native_libs.rs index b19493d12ff33..34497f5ac53f8 100644 --- a/compiler/rustc_metadata/src/native_libs.rs +++ b/compiler/rustc_metadata/src/native_libs.rs @@ -17,7 +17,7 @@ use rustc_span::def_id::{DefId, LOCAL_CRATE}; use rustc_span::symbol::{sym, Symbol}; use rustc_target::spec::abi::Abi; -use crate::errors; +use crate::{errors, fluent_generated}; pub fn find_native_static_library(name: &str, verbatim: bool, sess: &Session) -> PathBuf { let formats = if verbatim { @@ -87,7 +87,6 @@ struct Collector<'tcx> { } impl<'tcx> Collector<'tcx> { - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn process_module(&mut self, module: &ForeignModule) { let ForeignModule { def_id, abi, ref foreign_items } = *module; let def_id = def_id.expect_local(); @@ -161,7 +160,7 @@ impl<'tcx> Collector<'tcx> { sess, sym::link_arg_attribute, span, - "link kind `link-arg` is unstable", + fluent_generated::metadata_link_arg_unstable, ) .emit(); } @@ -201,8 +200,13 @@ impl<'tcx> Collector<'tcx> { continue; }; if !features.link_cfg { - feature_err(sess, sym::link_cfg, item.span(), "link cfg is unstable") - .emit(); + feature_err( + sess, + sym::link_cfg, + item.span(), + fluent_generated::metadata_link_cfg_unstable, + ) + .emit(); } cfg = Some(link_cfg.clone()); } @@ -266,6 +270,8 @@ impl<'tcx> Collector<'tcx> { macro report_unstable_modifier($feature: ident) { if !features.$feature { + // FIXME: make this translatable + #[expect(rustc::untranslatable_diagnostic)] feature_err( sess, sym::$feature, diff --git a/compiler/rustc_middle/src/arena.rs b/compiler/rustc_middle/src/arena.rs index e3d7dff3c66bb..37c10b14054c5 100644 --- a/compiler/rustc_middle/src/arena.rs +++ b/compiler/rustc_middle/src/arena.rs @@ -61,10 +61,6 @@ macro_rules! arena_types { [] dtorck_constraint: rustc_middle::traits::query::DropckConstraint<'tcx>, [] candidate_step: rustc_middle::traits::query::CandidateStep<'tcx>, [] autoderef_bad_ty: rustc_middle::traits::query::MethodAutoderefBadTy<'tcx>, - [] canonical_goal_evaluation: - rustc_type_ir::solve::inspect::CanonicalGoalEvaluationStep< - rustc_middle::ty::TyCtxt<'tcx> - >, [] query_region_constraints: rustc_middle::infer::canonical::QueryRegionConstraints<'tcx>, [] type_op_subtype: rustc_middle::infer::canonical::Canonical<'tcx, diff --git a/compiler/rustc_middle/src/hir/map/mod.rs b/compiler/rustc_middle/src/hir/map/mod.rs index edab6b5ebde84..0f85998204ccb 100644 --- a/compiler/rustc_middle/src/hir/map/mod.rs +++ b/compiler/rustc_middle/src/hir/map/mod.rs @@ -554,53 +554,43 @@ impl<'hir> Map<'hir> { /// } /// ``` pub fn get_fn_id_for_return_block(self, id: HirId) -> Option { - let mut iter = self.parent_iter(id).peekable(); - let mut ignore_tail = false; - if let Node::Expr(Expr { kind: ExprKind::Ret(_), .. }) = self.tcx.hir_node(id) { - // When dealing with `return` statements, we don't care about climbing only tail - // expressions. - ignore_tail = true; - } + let enclosing_body_owner = self.tcx.local_def_id_to_hir_id(self.enclosing_body_owner(id)); + + // Return `None` if the `id` expression is not the returned value of the enclosing body + let mut iter = [id].into_iter().chain(self.parent_id_iter(id)).peekable(); + while let Some(cur_id) = iter.next() { + if enclosing_body_owner == cur_id { + break; + } - let mut prev_hir_id = None; - while let Some((hir_id, node)) = iter.next() { - if let (Some((_, next_node)), false) = (iter.peek(), ignore_tail) { - match next_node { - Node::Block(Block { expr: None, .. }) => return None, - // The current node is not the tail expression of its parent. - Node::Block(Block { expr: Some(e), .. }) if hir_id != e.hir_id => return None, + // A return statement is always the value returned from the enclosing body regardless of + // what the parent expressions are. + if let Node::Expr(Expr { kind: ExprKind::Ret(_), .. }) = self.tcx.hir_node(cur_id) { + break; + } + + // If the current expression's value doesnt get used as the parent expressions value then return `None` + if let Some(&parent_id) = iter.peek() { + match self.tcx.hir_node(parent_id) { + // The current node is not the tail expression of the block expression parent expr. + Node::Block(Block { expr: Some(e), .. }) if cur_id != e.hir_id => return None, Node::Block(Block { expr: Some(e), .. }) if matches!(e.kind, ExprKind::If(_, _, None)) => { return None; } + + // The current expression's value does not pass up through these parent expressions + Node::Block(Block { expr: None, .. }) + | Node::Expr(Expr { kind: ExprKind::Loop(..), .. }) + | Node::LetStmt(..) => return None, + _ => {} } } - match node { - Node::Item(_) - | Node::ForeignItem(_) - | Node::TraitItem(_) - | Node::Expr(Expr { kind: ExprKind::Closure(_), .. }) - | Node::ImplItem(_) - // The input node `id` must be enclosed in the method's body as opposed - // to some other place such as its return type (fixes #114918). - // We verify that indirectly by checking that the previous node is the - // current node's body - if node.body_id().map(|b| b.hir_id) == prev_hir_id => { - return Some(hir_id) - } - // Ignore `return`s on the first iteration - Node::Expr(Expr { kind: ExprKind::Loop(..) | ExprKind::Ret(..), .. }) - | Node::LetStmt(_) => { - return None; - } - _ => {} - } - - prev_hir_id = Some(hir_id); } - None + + Some(enclosing_body_owner) } /// Retrieves the `OwnerId` for `id`'s parent item, or `id` itself if no @@ -826,6 +816,11 @@ impl<'hir> Map<'hir> { }) | Node::ImplItem(ImplItem { kind: ImplItemKind::Fn(sig, ..), span: outer_span, .. + }) + | Node::ForeignItem(ForeignItem { + kind: ForeignItemKind::Fn(sig, ..), + span: outer_span, + .. }) => { // Ensure that the returned span has the item's SyntaxContext, and not the // SyntaxContext of the visibility. @@ -884,10 +879,7 @@ impl<'hir> Map<'hir> { }, Node::Variant(variant) => named_span(variant.span, variant.ident, None), Node::ImplItem(item) => named_span(item.span, item.ident, Some(item.generics)), - Node::ForeignItem(item) => match item.kind { - ForeignItemKind::Fn(decl, _, _, _) => until_within(item.span, decl.output.span()), - _ => named_span(item.span, item.ident, None), - }, + Node::ForeignItem(item) => named_span(item.span, item.ident, None), Node::Ctor(_) => return self.span(self.tcx.parent_hir_id(hir_id)), Node::Expr(Expr { kind: ExprKind::Closure(Closure { fn_decl_span, .. }), diff --git a/compiler/rustc_middle/src/hir/mod.rs b/compiler/rustc_middle/src/hir/mod.rs index fa521ab9f2fa9..596d9f077372e 100644 --- a/compiler/rustc_middle/src/hir/mod.rs +++ b/compiler/rustc_middle/src/hir/mod.rs @@ -202,7 +202,7 @@ pub fn provide(providers: &mut Providers) { .. }) | Node::ForeignItem(&ForeignItem { - kind: ForeignItemKind::Fn(_, idents, _, _), + kind: ForeignItemKind::Fn(_, idents, _), .. }) = tcx.hir_node(tcx.local_def_id_to_hir_id(def_id)) { diff --git a/compiler/rustc_middle/src/mir/interpret/mod.rs b/compiler/rustc_middle/src/mir/interpret/mod.rs index 1851a61d7533c..91e71c12cae45 100644 --- a/compiler/rustc_middle/src/mir/interpret/mod.rs +++ b/compiler/rustc_middle/src/mir/interpret/mod.rs @@ -13,7 +13,6 @@ use std::num::NonZero; use std::{fmt, io}; use rustc_ast::LitKind; -use rustc_attr::InlineAttr; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync::Lock; use rustc_errors::ErrorGuaranteed; @@ -46,7 +45,7 @@ pub use self::pointer::{CtfeProvenance, Pointer, PointerArithmetic, Provenance}; pub use self::value::Scalar; use crate::mir; use crate::ty::codec::{TyDecoder, TyEncoder}; -use crate::ty::{self, GenericArgKind, Instance, Ty, TyCtxt}; +use crate::ty::{self, Instance, Ty, TyCtxt}; /// Uniquely identifies one of the following: /// - A constant @@ -126,11 +125,10 @@ pub fn specialized_encode_alloc_id<'tcx, E: TyEncoder>>( AllocDiscriminant::Alloc.encode(encoder); alloc.encode(encoder); } - GlobalAlloc::Function { instance, unique } => { + GlobalAlloc::Function { instance } => { trace!("encoding {:?} with {:#?}", alloc_id, instance); AllocDiscriminant::Fn.encode(encoder); instance.encode(encoder); - unique.encode(encoder); } GlobalAlloc::VTable(ty, poly_trait_ref) => { trace!("encoding {:?} with {ty:#?}, {poly_trait_ref:#?}", alloc_id); @@ -219,38 +217,32 @@ impl<'s> AllocDecodingSession<'s> { } // Now decode the actual data. - let alloc_id = decoder.with_position(pos, |decoder| { - match alloc_kind { - AllocDiscriminant::Alloc => { - trace!("creating memory alloc ID"); - let alloc = as Decodable<_>>::decode(decoder); - trace!("decoded alloc {:?}", alloc); - decoder.interner().reserve_and_set_memory_alloc(alloc) - } - AllocDiscriminant::Fn => { - trace!("creating fn alloc ID"); - let instance = ty::Instance::decode(decoder); - trace!("decoded fn alloc instance: {:?}", instance); - let unique = bool::decode(decoder); - // Here we cannot call `reserve_and_set_fn_alloc` as that would use a query, which - // is not possible in this context. That's why the allocation stores - // whether it is unique or not. - decoder.interner().reserve_and_set_fn_alloc_internal(instance, unique) - } - AllocDiscriminant::VTable => { - trace!("creating vtable alloc ID"); - let ty = as Decodable>::decode(decoder); - let poly_trait_ref = - > as Decodable>::decode(decoder); - trace!("decoded vtable alloc instance: {ty:?}, {poly_trait_ref:?}"); - decoder.interner().reserve_and_set_vtable_alloc(ty, poly_trait_ref) - } - AllocDiscriminant::Static => { - trace!("creating extern static alloc ID"); - let did = >::decode(decoder); - trace!("decoded static def-ID: {:?}", did); - decoder.interner().reserve_and_set_static_alloc(did) - } + let alloc_id = decoder.with_position(pos, |decoder| match alloc_kind { + AllocDiscriminant::Alloc => { + trace!("creating memory alloc ID"); + let alloc = as Decodable<_>>::decode(decoder); + trace!("decoded alloc {:?}", alloc); + decoder.interner().reserve_and_set_memory_alloc(alloc) + } + AllocDiscriminant::Fn => { + trace!("creating fn alloc ID"); + let instance = ty::Instance::decode(decoder); + trace!("decoded fn alloc instance: {:?}", instance); + decoder.interner().reserve_and_set_fn_alloc(instance, CTFE_ALLOC_SALT) + } + AllocDiscriminant::VTable => { + trace!("creating vtable alloc ID"); + let ty = as Decodable>::decode(decoder); + let poly_trait_ref = + > as Decodable>::decode(decoder); + trace!("decoded vtable alloc instance: {ty:?}, {poly_trait_ref:?}"); + decoder.interner().reserve_and_set_vtable_alloc(ty, poly_trait_ref, CTFE_ALLOC_SALT) + } + AllocDiscriminant::Static => { + trace!("creating extern static alloc ID"); + let did = >::decode(decoder); + trace!("decoded static def-ID: {:?}", did); + decoder.interner().reserve_and_set_static_alloc(did) } }); @@ -265,12 +257,7 @@ impl<'s> AllocDecodingSession<'s> { #[derive(Debug, Clone, Eq, PartialEq, Hash, TyDecodable, TyEncodable, HashStable)] pub enum GlobalAlloc<'tcx> { /// The alloc ID is used as a function pointer. - Function { - instance: Instance<'tcx>, - /// Stores whether this instance is unique, i.e. all pointers to this function use the same - /// alloc ID. - unique: bool, - }, + Function { instance: Instance<'tcx> }, /// This alloc ID points to a symbolic (not-reified) vtable. VTable(Ty<'tcx>, Option>), /// The alloc ID points to a "lazy" static variable that did not get computed (yet). @@ -323,14 +310,17 @@ impl<'tcx> GlobalAlloc<'tcx> { } } +pub const CTFE_ALLOC_SALT: usize = 0; + pub(crate) struct AllocMap<'tcx> { /// Maps `AllocId`s to their corresponding allocations. alloc_map: FxHashMap>, - /// Used to ensure that statics and functions only get one associated `AllocId`. - // - // FIXME: Should we just have two separate dedup maps for statics and functions each? - dedup: FxHashMap, AllocId>, + /// Used to deduplicate global allocations: functions, vtables, string literals, ... + /// + /// The `usize` is a "salt" used by Miri to make deduplication imperfect, thus better emulating + /// the actual guarantees. + dedup: FxHashMap<(GlobalAlloc<'tcx>, usize), AllocId>, /// The `AllocId` to assign to the next requested ID. /// Always incremented; never gets smaller. @@ -368,74 +358,40 @@ impl<'tcx> TyCtxt<'tcx> { /// Reserves a new ID *if* this allocation has not been dedup-reserved before. /// Should not be used for mutable memory. - fn reserve_and_set_dedup(self, alloc: GlobalAlloc<'tcx>) -> AllocId { + fn reserve_and_set_dedup(self, alloc: GlobalAlloc<'tcx>, salt: usize) -> AllocId { let mut alloc_map = self.alloc_map.lock(); if let GlobalAlloc::Memory(mem) = alloc { if mem.inner().mutability.is_mut() { bug!("trying to dedup-reserve mutable memory"); } } - if let Some(&alloc_id) = alloc_map.dedup.get(&alloc) { + let alloc_salt = (alloc, salt); + if let Some(&alloc_id) = alloc_map.dedup.get(&alloc_salt) { return alloc_id; } let id = alloc_map.reserve(); - debug!("creating alloc {alloc:?} with id {id:?}"); - alloc_map.alloc_map.insert(id, alloc.clone()); - alloc_map.dedup.insert(alloc, id); + debug!("creating alloc {:?} with id {id:?}", alloc_salt.0); + alloc_map.alloc_map.insert(id, alloc_salt.0.clone()); + alloc_map.dedup.insert(alloc_salt, id); id } /// Generates an `AllocId` for a memory allocation. If the exact same memory has been /// allocated before, this will return the same `AllocId`. - pub fn reserve_and_set_memory_dedup(self, mem: ConstAllocation<'tcx>) -> AllocId { - self.reserve_and_set_dedup(GlobalAlloc::Memory(mem)) + pub fn reserve_and_set_memory_dedup(self, mem: ConstAllocation<'tcx>, salt: usize) -> AllocId { + self.reserve_and_set_dedup(GlobalAlloc::Memory(mem), salt) } /// Generates an `AllocId` for a static or return a cached one in case this function has been /// called on the same static before. pub fn reserve_and_set_static_alloc(self, static_id: DefId) -> AllocId { - self.reserve_and_set_dedup(GlobalAlloc::Static(static_id)) - } - - /// Generates an `AllocId` for a function. The caller must already have decided whether this - /// function obtains a unique AllocId or gets de-duplicated via the cache. - fn reserve_and_set_fn_alloc_internal(self, instance: Instance<'tcx>, unique: bool) -> AllocId { - let alloc = GlobalAlloc::Function { instance, unique }; - if unique { - // Deduplicate. - self.reserve_and_set_dedup(alloc) - } else { - // Get a fresh ID. - let mut alloc_map = self.alloc_map.lock(); - let id = alloc_map.reserve(); - alloc_map.alloc_map.insert(id, alloc); - id - } + let salt = 0; // Statics have a guaranteed unique address, no salt added. + self.reserve_and_set_dedup(GlobalAlloc::Static(static_id), salt) } - /// Generates an `AllocId` for a function. Depending on the function type, - /// this might get deduplicated or assigned a new ID each time. - pub fn reserve_and_set_fn_alloc(self, instance: Instance<'tcx>) -> AllocId { - // Functions cannot be identified by pointers, as asm-equal functions can get deduplicated - // by the linker (we set the "unnamed_addr" attribute for LLVM) and functions can be - // duplicated across crates. We thus generate a new `AllocId` for every mention of a - // function. This means that `main as fn() == main as fn()` is false, while `let x = main as - // fn(); x == x` is true. However, as a quality-of-life feature it can be useful to identify - // certain functions uniquely, e.g. for backtraces. So we identify whether codegen will - // actually emit duplicate functions. It does that when they have non-lifetime generics, or - // when they can be inlined. All other functions are given a unique address. - // This is not a stable guarantee! The `inline` attribute is a hint and cannot be relied - // upon for anything. But if we don't do this, backtraces look terrible. - let is_generic = instance - .args - .into_iter() - .any(|kind| !matches!(kind.unpack(), GenericArgKind::Lifetime(_))); - let can_be_inlined = match self.codegen_fn_attrs(instance.def_id()).inline { - InlineAttr::Never => false, - _ => true, - }; - let unique = !is_generic && !can_be_inlined; - self.reserve_and_set_fn_alloc_internal(instance, unique) + /// Generates an `AllocId` for a function. Will get deduplicated. + pub fn reserve_and_set_fn_alloc(self, instance: Instance<'tcx>, salt: usize) -> AllocId { + self.reserve_and_set_dedup(GlobalAlloc::Function { instance }, salt) } /// Generates an `AllocId` for a (symbolic, not-reified) vtable. Will get deduplicated. @@ -443,8 +399,9 @@ impl<'tcx> TyCtxt<'tcx> { self, ty: Ty<'tcx>, poly_trait_ref: Option>, + salt: usize, ) -> AllocId { - self.reserve_and_set_dedup(GlobalAlloc::VTable(ty, poly_trait_ref)) + self.reserve_and_set_dedup(GlobalAlloc::VTable(ty, poly_trait_ref), salt) } /// Interns the `Allocation` and return a new `AllocId`, even if there's already an identical diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs index f2d8781413096..5dd0e69cf1fe6 100644 --- a/compiler/rustc_middle/src/mir/pretty.rs +++ b/compiler/rustc_middle/src/mir/pretty.rs @@ -1418,21 +1418,19 @@ pub fn write_allocations<'tcx>( alloc.inner().provenance().ptrs().values().map(|p| p.alloc_id()) } - fn alloc_ids_from_const_val(val: ConstValue<'_>) -> impl Iterator + '_ { + fn alloc_id_from_const_val(val: ConstValue<'_>) -> Option { match val { - ConstValue::Scalar(interpret::Scalar::Ptr(ptr, _)) => { - Either::Left(std::iter::once(ptr.provenance.alloc_id())) - } - ConstValue::Scalar(interpret::Scalar::Int { .. }) => Either::Right(std::iter::empty()), - ConstValue::ZeroSized => Either::Right(std::iter::empty()), + ConstValue::Scalar(interpret::Scalar::Ptr(ptr, _)) => Some(ptr.provenance.alloc_id()), + ConstValue::Scalar(interpret::Scalar::Int { .. }) => None, + ConstValue::ZeroSized => None, ConstValue::Slice { .. } => { // `u8`/`str` slices, shouldn't contain pointers that we want to print. - Either::Right(std::iter::empty()) + None } ConstValue::Indirect { alloc_id, .. } => { // FIXME: we don't actually want to print all of these, since some are printed nicely directly as values inline in MIR. // Really we'd want `pretty_print_const_value` to decide which allocations to print, instead of having a separate visitor. - Either::Left(std::iter::once(alloc_id)) + Some(alloc_id) } } } @@ -1443,7 +1441,9 @@ pub fn write_allocations<'tcx>( match c.const_ { Const::Ty(_, _) | Const::Unevaluated(..) => {} Const::Val(val, _) => { - self.0.extend(alloc_ids_from_const_val(val)); + if let Some(id) = alloc_id_from_const_val(val) { + self.0.insert(id); + } } } } diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index 075eae029041e..92c8d265c09ec 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -65,10 +65,9 @@ use crate::query::plumbing::{ }; use crate::traits::query::{ CanonicalAliasGoal, CanonicalPredicateGoal, CanonicalTyGoal, - CanonicalTypeOpAscribeUserTypeGoal, CanonicalTypeOpEqGoal, CanonicalTypeOpNormalizeGoal, - CanonicalTypeOpProvePredicateGoal, CanonicalTypeOpSubtypeGoal, DropckConstraint, - DropckOutlivesResult, MethodAutoderefStepsResult, NoSolution, NormalizationResult, - OutlivesBound, + CanonicalTypeOpAscribeUserTypeGoal, CanonicalTypeOpNormalizeGoal, + CanonicalTypeOpProvePredicateGoal, DropckConstraint, DropckOutlivesResult, + MethodAutoderefStepsResult, NoSolution, NormalizationResult, OutlivesBound, }; use crate::traits::{ specialization_graph, CodegenObligationError, EvaluationResult, ImplSource, @@ -2090,26 +2089,6 @@ rustc_queries! { desc { "evaluating `type_op_ascribe_user_type` `{:?}`", goal.value.value } } - /// Do not call this query directly: part of the `Eq` type-op - query type_op_eq( - goal: CanonicalTypeOpEqGoal<'tcx> - ) -> Result< - &'tcx Canonical<'tcx, canonical::QueryResponse<'tcx, ()>>, - NoSolution, - > { - desc { "evaluating `type_op_eq` `{:?}`", goal.value.value } - } - - /// Do not call this query directly: part of the `Subtype` type-op - query type_op_subtype( - goal: CanonicalTypeOpSubtypeGoal<'tcx> - ) -> Result< - &'tcx Canonical<'tcx, canonical::QueryResponse<'tcx, ()>>, - NoSolution, - > { - desc { "evaluating `type_op_subtype` `{:?}`", goal.value.value } - } - /// Do not call this query directly: part of the `ProvePredicate` type-op query type_op_prove_predicate( goal: CanonicalTypeOpProvePredicateGoal<'tcx> diff --git a/compiler/rustc_middle/src/ty/consts.rs b/compiler/rustc_middle/src/ty/consts.rs index c380019e63f47..e373292741b90 100644 --- a/compiler/rustc_middle/src/ty/consts.rs +++ b/compiler/rustc_middle/src/ty/consts.rs @@ -305,6 +305,10 @@ impl<'tcx> Const<'tcx> { // mir. match tcx.at(expr.span).lit_to_const(lit_input) { Ok(c) => return Some(c), + Err(_) if lit_input.ty.has_aliases() => { + // allow the `ty` to be an alias type, though we cannot handle it here + return None; + } Err(e) => { tcx.dcx().span_delayed_bug( expr.span, diff --git a/compiler/rustc_middle/src/ty/consts/kind.rs b/compiler/rustc_middle/src/ty/consts/kind.rs index 7f096dd36f8f2..c7c2e8afa1e71 100644 --- a/compiler/rustc_middle/src/ty/consts/kind.rs +++ b/compiler/rustc_middle/src/ty/consts/kind.rs @@ -1,3 +1,5 @@ +use std::assert_matches::assert_matches; + use rustc_macros::{extension, HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable}; use super::Const; @@ -80,7 +82,7 @@ impl<'tcx> Expr<'tcx> { } pub fn binop_args(self) -> (Ty<'tcx>, Ty<'tcx>, Const<'tcx>, Const<'tcx>) { - assert!(matches!(self.kind, ExprKind::Binop(_))); + assert_matches!(self.kind, ExprKind::Binop(_)); match self.args().as_slice() { [lhs_ty, rhs_ty, lhs_ct, rhs_ct] => ( @@ -101,7 +103,7 @@ impl<'tcx> Expr<'tcx> { } pub fn unop_args(self) -> (Ty<'tcx>, Const<'tcx>) { - assert!(matches!(self.kind, ExprKind::UnOp(_))); + assert_matches!(self.kind, ExprKind::UnOp(_)); match self.args().as_slice() { [ty, ct] => (ty.expect_ty(), ct.expect_const()), @@ -125,7 +127,7 @@ impl<'tcx> Expr<'tcx> { } pub fn call_args(self) -> (Ty<'tcx>, Const<'tcx>, impl Iterator>) { - assert!(matches!(self.kind, ExprKind::FunctionCall)); + assert_matches!(self.kind, ExprKind::FunctionCall); match self.args().as_slice() { [func_ty, func, rest @ ..] => ( @@ -152,7 +154,7 @@ impl<'tcx> Expr<'tcx> { } pub fn cast_args(self) -> (Ty<'tcx>, Const<'tcx>, Ty<'tcx>) { - assert!(matches!(self.kind, ExprKind::Cast(_))); + assert_matches!(self.kind, ExprKind::Cast(_)); match self.args().as_slice() { [value_ty, value, to_ty] => { diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index 8f8fd09c9e4d9..9b39b84970420 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -107,8 +107,6 @@ impl<'tcx> Interner for TyCtxt<'tcx> { self.mk_predefined_opaques_in_body(data) } type DefiningOpaqueTypes = &'tcx ty::List; - type CanonicalGoalEvaluationStepRef = - &'tcx solve::inspect::CanonicalGoalEvaluationStep>; type CanonicalVars = CanonicalVarInfos<'tcx>; fn mk_canonical_var_infos(self, infos: &[ty::CanonicalVarInfo]) -> Self::CanonicalVars { self.mk_canonical_var_infos(infos) @@ -277,13 +275,6 @@ impl<'tcx> Interner for TyCtxt<'tcx> { self.debug_assert_args_compatible(def_id, args); } - fn intern_canonical_goal_evaluation_step( - self, - step: solve::inspect::CanonicalGoalEvaluationStep>, - ) -> &'tcx solve::inspect::CanonicalGoalEvaluationStep> { - self.arena.alloc(step) - } - fn mk_type_list_from_iter(self, args: I) -> T::Output where I: Iterator, @@ -427,7 +418,7 @@ impl<'tcx> Interner for TyCtxt<'tcx> { | ty::RawPtr(_, _) | ty::Ref(_, _, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::Closure(..) | ty::CoroutineClosure(..) @@ -1438,11 +1429,11 @@ impl<'tcx> TyCtxt<'tcx> { /// Allocates a read-only byte or string literal for `mir::interpret`. /// Returns the same `AllocId` if called again with the same bytes. - pub fn allocate_bytes_dedup(self, bytes: &[u8]) -> interpret::AllocId { + pub fn allocate_bytes_dedup(self, bytes: &[u8], salt: usize) -> interpret::AllocId { // Create an allocation that just contains these bytes. let alloc = interpret::Allocation::from_bytes_byte_aligned_immutable(bytes); let alloc = self.mk_const_alloc(alloc); - self.reserve_and_set_memory_dedup(alloc) + self.reserve_and_set_memory_dedup(alloc, salt) } /// Returns a range of the start/end indices specified with the @@ -3176,6 +3167,12 @@ impl<'tcx> TyCtxt<'tcx> { pub fn impl_polarity(self, def_id: impl IntoQueryParam) -> ty::ImplPolarity { self.impl_trait_header(def_id).map_or(ty::ImplPolarity::Positive, |h| h.polarity) } + + /// Whether this is a trait implementation that has `#[diagnostic::do_not_recommend]` + pub fn do_not_recommend_impl(self, def_id: DefId) -> bool { + matches!(self.def_kind(def_id), DefKind::Impl { of_trait: true }) + && self.impl_trait_header(def_id).is_some_and(|header| header.do_not_recommend) + } } /// Parameter attributes that can only be determined by examining the body of a function instead diff --git a/compiler/rustc_middle/src/ty/diagnostics.rs b/compiler/rustc_middle/src/ty/diagnostics.rs index 5acc0b7ac7ff1..c14dadc68c903 100644 --- a/compiler/rustc_middle/src/ty/diagnostics.rs +++ b/compiler/rustc_middle/src/ty/diagnostics.rs @@ -6,10 +6,9 @@ use std::ops::ControlFlow; use rustc_data_structures::fx::FxHashMap; use rustc_errors::{into_diag_arg_using_display, Applicability, Diag, DiagArgValue, IntoDiagArg}; -use rustc_hir as hir; use rustc_hir::def::DefKind; use rustc_hir::def_id::DefId; -use rustc_hir::{PredicateOrigin, WherePredicate}; +use rustc_hir::{self as hir, LangItem, PredicateOrigin, WherePredicate}; use rustc_span::{BytePos, Span}; use rustc_type_ir::TyKind::*; @@ -290,8 +289,9 @@ pub fn suggest_constraining_type_params<'a>( let Some(param) = param else { return false }; { - let mut sized_constraints = - constraints.extract_if(|(_, def_id)| *def_id == tcx.lang_items().sized_trait()); + let mut sized_constraints = constraints.extract_if(|(_, def_id)| { + def_id.is_some_and(|def_id| tcx.is_lang_item(def_id, LangItem::Sized)) + }); if let Some((_, def_id)) = sized_constraints.next() { applicability = Applicability::MaybeIncorrect; diff --git a/compiler/rustc_middle/src/ty/error.rs b/compiler/rustc_middle/src/ty/error.rs index 2f9bdb16bb0ab..d974a86a3036f 100644 --- a/compiler/rustc_middle/src/ty/error.rs +++ b/compiler/rustc_middle/src/ty/error.rs @@ -130,7 +130,7 @@ impl<'tcx> Ty<'tcx> { DefKind::Ctor(CtorOf::Variant, _) => "enum constructor".into(), _ => "fn item".into(), }, - ty::FnPtr(_) => "fn pointer".into(), + ty::FnPtr(..) => "fn pointer".into(), ty::Dynamic(inner, ..) if let Some(principal) = inner.principal() => { format!("`dyn {}`", tcx.def_path_str(principal.def_id())).into() } @@ -194,7 +194,7 @@ impl<'tcx> Ty<'tcx> { DefKind::Ctor(CtorOf::Variant, _) => "enum constructor".into(), _ => "fn item".into(), }, - ty::FnPtr(_) => "fn pointer".into(), + ty::FnPtr(..) => "fn pointer".into(), ty::Dynamic(..) => "trait object".into(), ty::Closure(..) | ty::CoroutineClosure(..) => "closure".into(), ty::Coroutine(def_id, ..) => { diff --git a/compiler/rustc_middle/src/ty/flags.rs b/compiler/rustc_middle/src/ty/flags.rs index c3430b5840692..fc079592583e6 100644 --- a/compiler/rustc_middle/src/ty/flags.rs +++ b/compiler/rustc_middle/src/ty/flags.rs @@ -250,9 +250,8 @@ impl FlagComputation { self.add_args(args); } - &ty::FnPtr(fn_sig) => self.bound_computation(fn_sig, |computation, fn_sig| { - computation.add_tys(fn_sig.inputs()); - computation.add_ty(fn_sig.output()); + &ty::FnPtr(sig_tys, _) => self.bound_computation(sig_tys, |computation, sig_tys| { + computation.add_tys(sig_tys.inputs_and_output); }), } } diff --git a/compiler/rustc_middle/src/ty/instance.rs b/compiler/rustc_middle/src/ty/instance.rs index 0496c571f5e0a..6f19739de45ff 100644 --- a/compiler/rustc_middle/src/ty/instance.rs +++ b/compiler/rustc_middle/src/ty/instance.rs @@ -838,7 +838,7 @@ impl<'tcx> Instance<'tcx> { return None; }; - if tcx.lang_items().get(coroutine_callable_item) == Some(trait_item_id) { + if tcx.is_lang_item(trait_item_id, coroutine_callable_item) { let ty::Coroutine(_, id_args) = *tcx.type_of(coroutine_def_id).skip_binder().kind() else { bug!() diff --git a/compiler/rustc_middle/src/ty/layout.rs b/compiler/rustc_middle/src/ty/layout.rs index 9204405d58f11..619981bf021e4 100644 --- a/compiler/rustc_middle/src/ty/layout.rs +++ b/compiler/rustc_middle/src/ty/layout.rs @@ -362,7 +362,7 @@ impl<'tcx> SizeSkeleton<'tcx> { ty::Ref(_, pointee, _) | ty::RawPtr(pointee, _) => { let non_zero = !ty.is_unsafe_ptr(); - let tail = tcx.struct_tail_with_normalize( + let tail = tcx.struct_tail_raw( pointee, |ty| match tcx.try_normalize_erasing_regions(param_env, ty) { Ok(ty) => ty, @@ -801,7 +801,7 @@ where | ty::Int(_) | ty::Uint(_) | ty::Float(_) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Never | ty::FnDef(..) | ty::CoroutineWitness(..) @@ -986,9 +986,11 @@ where safe: None, }) } - ty::FnPtr(fn_sig) if offset.bytes() == 0 => { - tcx.layout_of(param_env.and(Ty::new_fn_ptr(tcx, fn_sig))).ok().map(|layout| { - PointeeInfo { size: layout.size, align: layout.align.abi, safe: None } + ty::FnPtr(..) if offset.bytes() == 0 => { + tcx.layout_of(param_env.and(this.ty)).ok().map(|layout| PointeeInfo { + size: layout.size, + align: layout.align.abi, + safe: None, }) } ty::Ref(_, ty, mt) if offset.bytes() == 0 => { diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index 9736428e6f7c7..1e3b5800cbac6 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -262,6 +262,7 @@ pub struct ImplTraitHeader<'tcx> { pub trait_ref: ty::EarlyBinder<'tcx, ty::TraitRef<'tcx>>, pub polarity: ImplPolarity, pub safety: hir::Safety, + pub do_not_recommend: bool, } #[derive(Copy, Clone, PartialEq, Eq, Debug, TypeFoldable, TypeVisitable)] @@ -2149,6 +2150,6 @@ mod size_asserts { use super::*; // tidy-alphabetical-start static_assert_size!(PredicateKind<'_>, 32); - static_assert_size!(WithCachedTypeInfo>, 56); + static_assert_size!(WithCachedTypeInfo>, 48); // tidy-alphabetical-end } diff --git a/compiler/rustc_middle/src/ty/print/mod.rs b/compiler/rustc_middle/src/ty/print/mod.rs index 6cce79dfdc1cf..cc7467467603a 100644 --- a/compiler/rustc_middle/src/ty/print/mod.rs +++ b/compiler/rustc_middle/src/ty/print/mod.rs @@ -290,7 +290,7 @@ fn characteristic_def_id_of_type_cached<'a>( | ty::Int(_) | ty::Uint(_) | ty::Str - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Alias(..) | ty::Placeholder(..) | ty::Param(_) diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 29d72183dd3ff..56ddf146636c8 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -696,7 +696,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { p!(print(sig), " {{", print_value_path(def_id, args), "}}"); } } - ty::FnPtr(ref bare_fn) => p!(print(bare_fn)), + ty::FnPtr(ref sig_tys, hdr) => p!(print(sig_tys.with(hdr))), ty::Infer(infer_ty) => { if self.should_print_verbose() { p!(write("{:?}", ty.kind())); @@ -1145,7 +1145,9 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { let term = if let Some(ty) = term.skip_binder().as_type() && let ty::Alias(ty::Projection, proj) = ty.kind() && let Some(assoc) = tcx.opt_associated_item(proj.def_id) - && assoc.trait_container(tcx) == tcx.lang_items().coroutine_trait() + && assoc + .trait_container(tcx) + .is_some_and(|def_id| tcx.is_lang_item(def_id, LangItem::Coroutine)) && assoc.name == rustc_span::sym::Return { if let ty::Coroutine(_, args) = args.type_at(0).kind() { @@ -1678,7 +1680,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { } } } - ty::FnPtr(_) => { + ty::FnPtr(..) => { // FIXME: We should probably have a helper method to share code with the "Byte strings" // printing above (which also has to handle pointers to all sorts of things). if let Some(GlobalAlloc::Function { instance, .. }) = @@ -1741,7 +1743,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write { p!(write("{:?}", char::try_from(int).unwrap())) } // Pointer types - ty::Ref(..) | ty::RawPtr(_, _) | ty::FnPtr(_) => { + ty::Ref(..) | ty::RawPtr(_, _) | ty::FnPtr(..) => { let data = int.to_bits(self.tcx().data_layout.pointer_size); self.typed_value( |this| { diff --git a/compiler/rustc_middle/src/ty/structural_impls.rs b/compiler/rustc_middle/src/ty/structural_impls.rs index 8fb44a5f0b1b1..80b33c2cda929 100644 --- a/compiler/rustc_middle/src/ty/structural_impls.rs +++ b/compiler/rustc_middle/src/ty/structural_impls.rs @@ -374,7 +374,7 @@ impl<'tcx> TypeSuperFoldable> for Ty<'tcx> { ), ty::Tuple(ts) => ty::Tuple(ts.try_fold_with(folder)?), ty::FnDef(def_id, args) => ty::FnDef(def_id, args.try_fold_with(folder)?), - ty::FnPtr(f) => ty::FnPtr(f.try_fold_with(folder)?), + ty::FnPtr(sig_tys, hdr) => ty::FnPtr(sig_tys.try_fold_with(folder)?, hdr), ty::Ref(r, ty, mutbl) => { ty::Ref(r.try_fold_with(folder)?, ty.try_fold_with(folder)?, mutbl) } @@ -424,7 +424,7 @@ impl<'tcx> TypeSuperVisitable> for Ty<'tcx> { } ty::Tuple(ts) => ts.visit_with(visitor), ty::FnDef(_, args) => args.visit_with(visitor), - ty::FnPtr(ref f) => f.visit_with(visitor), + ty::FnPtr(ref sig_tys, _) => sig_tys.visit_with(visitor), ty::Ref(r, ty, _) => { try_visit!(r.visit_with(visitor)); ty.visit_with(visitor) diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs index 8c97de1c59b26..d60bfb9faa1aa 100644 --- a/compiler/rustc_middle/src/ty/sty.rs +++ b/compiler/rustc_middle/src/ty/sty.rs @@ -658,7 +658,8 @@ impl<'tcx> Ty<'tcx> { #[inline] pub fn new_fn_ptr(tcx: TyCtxt<'tcx>, fty: PolyFnSig<'tcx>) -> Ty<'tcx> { - Ty::new(tcx, FnPtr(fty)) + let (sig_tys, hdr) = fty.split(); + Ty::new(tcx, FnPtr(sig_tys, hdr)) } #[inline] @@ -969,6 +970,10 @@ impl<'tcx> rustc_type_ir::inherent::Ty> for Ty<'tcx> { /// Type utilities impl<'tcx> Ty<'tcx> { + // It would be nicer if this returned the value instead of a reference, + // like how `Predicate::kind` and `Region::kind` do. (It would result in + // many fewer subsequent dereferences.) But that gives a small but + // noticeable performance hit. See #126069 for details. #[inline(always)] pub fn kind(self) -> &'tcx TyKind<'tcx> { self.0.0 @@ -1182,7 +1187,7 @@ impl<'tcx> Ty<'tcx> { | Float(_) | Uint(_) | FnDef(..) - | FnPtr(_) + | FnPtr(..) | RawPtr(_, _) | Infer(IntVar(_) | FloatVar(_)) ) @@ -1333,7 +1338,7 @@ impl<'tcx> Ty<'tcx> { pub fn fn_sig(self, tcx: TyCtxt<'tcx>) -> PolyFnSig<'tcx> { match self.kind() { FnDef(def_id, args) => tcx.fn_sig(*def_id).instantiate(tcx, args), - FnPtr(f) => *f, + FnPtr(sig_tys, hdr) => sig_tys.with(*hdr), Error(_) => { // ignore errors (#54954) Binder::dummy(ty::FnSig { @@ -1352,12 +1357,12 @@ impl<'tcx> Ty<'tcx> { #[inline] pub fn is_fn(self) -> bool { - matches!(self.kind(), FnDef(..) | FnPtr(_)) + matches!(self.kind(), FnDef(..) | FnPtr(..)) } #[inline] pub fn is_fn_ptr(self) -> bool { - matches!(self.kind(), FnPtr(_)) + matches!(self.kind(), FnPtr(..)) } #[inline] @@ -1590,7 +1595,7 @@ impl<'tcx> Ty<'tcx> { tcx: TyCtxt<'tcx>, normalize: impl FnMut(Ty<'tcx>) -> Ty<'tcx>, ) -> Result, Ty<'tcx>> { - let tail = tcx.struct_tail_with_normalize(self, normalize, || {}); + let tail = tcx.struct_tail_raw(self, normalize, || {}); match tail.kind() { // Sized types ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) @@ -1599,7 +1604,7 @@ impl<'tcx> Ty<'tcx> { | ty::Bool | ty::Float(_) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::RawPtr(..) | ty::Char | ty::Ref(..) @@ -1614,10 +1619,10 @@ impl<'tcx> Ty<'tcx> { | ty::Foreign(..) // `dyn*` has metadata = (). | ty::Dynamic(_, _, ty::DynStar) - // If returned by `struct_tail_with_normalize` this is a unit struct + // If returned by `struct_tail_raw` this is a unit struct // without any fields, or not a struct, and therefore is Sized. | ty::Adt(..) - // If returned by `struct_tail_with_normalize` this is the empty tuple, + // If returned by `struct_tail_raw` this is the empty tuple, // a.k.a. unit type, which is Sized | ty::Tuple(..) => Ok(tcx.types.unit), @@ -1791,7 +1796,7 @@ impl<'tcx> Ty<'tcx> { | ty::Bool | ty::Float(_) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::RawPtr(..) | ty::Char | ty::Ref(..) @@ -1915,7 +1920,7 @@ impl<'tcx> Ty<'tcx> { pub fn is_c_void(self, tcx: TyCtxt<'_>) -> bool { match self.kind() { - ty::Adt(adt, _) => tcx.lang_items().get(LangItem::CVoid) == Some(adt.did()), + ty::Adt(adt, _) => tcx.is_lang_item(adt.did(), LangItem::CVoid), _ => false, } } @@ -1941,7 +1946,7 @@ impl<'tcx> Ty<'tcx> { | RawPtr(_, _) | Ref(_, _, _) | FnDef(_, _) - | FnPtr(_) + | FnPtr(..) | Dynamic(_, _, _) | Closure(_, _) | CoroutineClosure(_, _) @@ -1955,9 +1960,12 @@ impl<'tcx> Ty<'tcx> { } impl<'tcx> rustc_type_ir::inherent::Tys> for &'tcx ty::List> { - fn split_inputs_and_output(self) -> (&'tcx [Ty<'tcx>], Ty<'tcx>) { - let (output, inputs) = self.split_last().unwrap(); - (inputs, *output) + fn inputs(self) -> &'tcx [Ty<'tcx>] { + self.split_last().unwrap().1 + } + + fn output(self) -> Ty<'tcx> { + *self.split_last().unwrap().0 } } @@ -1969,6 +1977,6 @@ mod size_asserts { use super::*; // tidy-alphabetical-start static_assert_size!(ty::RegionKind<'_>, 24); - static_assert_size!(ty::TyKind<'_>, 32); + static_assert_size!(ty::TyKind<'_>, 24); // tidy-alphabetical-end } diff --git a/compiler/rustc_middle/src/ty/util.rs b/compiler/rustc_middle/src/ty/util.rs index 365f434a264e2..b9bf17cbb5ce6 100644 --- a/compiler/rustc_middle/src/ty/util.rs +++ b/compiler/rustc_middle/src/ty/util.rs @@ -171,14 +171,6 @@ impl<'tcx> TyCtxt<'tcx> { } } - /// Attempts to returns the deeply last field of nested structures, but - /// does not apply any normalization in its search. Returns the same type - /// if input `ty` is not a structure at all. - pub fn struct_tail_without_normalization(self, ty: Ty<'tcx>) -> Ty<'tcx> { - let tcx = self; - tcx.struct_tail_with_normalize(ty, |ty| ty, || {}) - } - /// Returns the deeply last field of nested structures, or the same type if /// not a structure at all. Corresponds to the only possible unsized field, /// and its type can be used to determine unsizing strategy. @@ -188,7 +180,7 @@ impl<'tcx> TyCtxt<'tcx> { /// normalization attempt may cause compiler bugs. pub fn struct_tail_for_codegen(self, ty: Ty<'tcx>, param_env: ty::ParamEnv<'tcx>) -> Ty<'tcx> { let tcx = self; - tcx.struct_tail_with_normalize(ty, |ty| tcx.normalize_erasing_regions(param_env, ty), || {}) + tcx.struct_tail_raw(ty, |ty| tcx.normalize_erasing_regions(param_env, ty), || {}) } /// Returns the deeply last field of nested structures, or the same type if @@ -196,12 +188,14 @@ impl<'tcx> TyCtxt<'tcx> { /// and its type can be used to determine unsizing strategy. /// /// This is parameterized over the normalization strategy (i.e. how to - /// handle `::Assoc` and `impl Trait`); pass the identity - /// function to indicate no normalization should take place. + /// handle `::Assoc` and `impl Trait`). You almost certainly do + /// **NOT** want to pass the identity function here, unless you know what + /// you're doing, or you're within normalization code itself and will handle + /// an unnormalized tail recursively. /// /// See also `struct_tail_for_codegen`, which is suitable for use /// during codegen. - pub fn struct_tail_with_normalize( + pub fn struct_tail_raw( self, mut ty: Ty<'tcx>, mut normalize: impl FnMut(Ty<'tcx>) -> Ty<'tcx>, @@ -281,7 +275,7 @@ impl<'tcx> TyCtxt<'tcx> { param_env: ty::ParamEnv<'tcx>, ) -> (Ty<'tcx>, Ty<'tcx>) { let tcx = self; - tcx.struct_lockstep_tails_with_normalize(source, target, |ty| { + tcx.struct_lockstep_tails_raw(source, target, |ty| { tcx.normalize_erasing_regions(param_env, ty) }) } @@ -294,7 +288,7 @@ impl<'tcx> TyCtxt<'tcx> { /// /// See also `struct_lockstep_tails_for_codegen`, which is suitable for use /// during codegen. - pub fn struct_lockstep_tails_with_normalize( + pub fn struct_lockstep_tails_raw( self, source: Ty<'tcx>, target: Ty<'tcx>, @@ -1278,7 +1272,7 @@ impl<'tcx> Ty<'tcx> { | ty::RawPtr(_, _) | ty::FnDef(..) | ty::Error(_) - | ty::FnPtr(_) => true, + | ty::FnPtr(..) => true, ty::Tuple(fields) => fields.iter().all(Self::is_trivially_freeze), ty::Pat(ty, _) | ty::Slice(ty) | ty::Array(ty, _) => ty.is_trivially_freeze(), ty::Adt(..) @@ -1318,7 +1312,7 @@ impl<'tcx> Ty<'tcx> { | ty::RawPtr(_, _) | ty::FnDef(..) | ty::Error(_) - | ty::FnPtr(_) => true, + | ty::FnPtr(..) => true, ty::Tuple(fields) => fields.iter().all(Self::is_trivially_unpin), ty::Pat(ty, _) | ty::Slice(ty) | ty::Array(ty, _) => ty.is_trivially_unpin(), ty::Adt(..) @@ -1357,7 +1351,7 @@ impl<'tcx> Ty<'tcx> { | ty::Ref(..) | ty::RawPtr(..) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Infer(ty::FreshIntTy(_)) | ty::Infer(ty::FreshFloatTy(_)) => AsyncDropGlueMorphology::Noop, @@ -1540,7 +1534,7 @@ impl<'tcx> Ty<'tcx> { ty::Pat(..) | ty::Ref(..) | ty::Array(..) | ty::Slice(_) | ty::Tuple(..) => true, // Raw pointers use bitwise comparison. - ty::RawPtr(_, _) | ty::FnPtr(_) => true, + ty::RawPtr(_, _) | ty::FnPtr(..) => true, // Floating point numbers are not `Eq`. ty::Float(_) => false, @@ -1671,7 +1665,7 @@ pub fn needs_drop_components_with_async<'tcx>( | ty::Float(_) | ty::Never | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Char | ty::RawPtr(_, _) | ty::Ref(..) @@ -1738,7 +1732,7 @@ pub fn is_trivially_const_drop(ty: Ty<'_>) -> bool { | ty::RawPtr(_, _) | ty::Ref(..) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Never | ty::Foreign(_) => true, diff --git a/compiler/rustc_middle/src/ty/vtable.rs b/compiler/rustc_middle/src/ty/vtable.rs index f38f27b84f0b9..951112dfe858e 100644 --- a/compiler/rustc_middle/src/ty/vtable.rs +++ b/compiler/rustc_middle/src/ty/vtable.rs @@ -3,7 +3,7 @@ use std::fmt; use rustc_ast::Mutability; use rustc_macros::HashStable; -use crate::mir::interpret::{alloc_range, AllocId, Allocation, Pointer, Scalar}; +use crate::mir::interpret::{alloc_range, AllocId, Allocation, Pointer, Scalar, CTFE_ALLOC_SALT}; use crate::ty::{self, Instance, PolyTraitRef, Ty, TyCtxt}; #[derive(Clone, Copy, PartialEq, HashStable)] @@ -73,6 +73,11 @@ pub(crate) fn vtable_min_entries<'tcx>( /// Retrieves an allocation that represents the contents of a vtable. /// Since this is a query, allocations are cached and not duplicated. +/// +/// This is an "internal" `AllocId` that should never be used as a value in the interpreted program. +/// The interpreter should use `AllocId` that refer to a `GlobalAlloc::VTable` instead. +/// (This is similar to statics, which also have a similar "internal" `AllocId` storing their +/// initial contents.) pub(super) fn vtable_allocation_provider<'tcx>( tcx: TyCtxt<'tcx>, key: (Ty<'tcx>, Option>), @@ -114,7 +119,7 @@ pub(super) fn vtable_allocation_provider<'tcx>( VtblEntry::MetadataDropInPlace => { if ty.needs_drop(tcx, ty::ParamEnv::reveal_all()) { let instance = ty::Instance::resolve_drop_in_place(tcx, ty); - let fn_alloc_id = tcx.reserve_and_set_fn_alloc(instance); + let fn_alloc_id = tcx.reserve_and_set_fn_alloc(instance, CTFE_ALLOC_SALT); let fn_ptr = Pointer::from(fn_alloc_id); Scalar::from_pointer(fn_ptr, &tcx) } else { @@ -127,7 +132,7 @@ pub(super) fn vtable_allocation_provider<'tcx>( VtblEntry::Method(instance) => { // Prepare the fn ptr we write into the vtable. let instance = instance.polymorphize(tcx); - let fn_alloc_id = tcx.reserve_and_set_fn_alloc(instance); + let fn_alloc_id = tcx.reserve_and_set_fn_alloc(instance, CTFE_ALLOC_SALT); let fn_ptr = Pointer::from(fn_alloc_id); Scalar::from_pointer(fn_ptr, &tcx) } diff --git a/compiler/rustc_middle/src/ty/walk.rs b/compiler/rustc_middle/src/ty/walk.rs index 2dd7a96f19268..abd6df17514bb 100644 --- a/compiler/rustc_middle/src/ty/walk.rs +++ b/compiler/rustc_middle/src/ty/walk.rs @@ -189,9 +189,10 @@ fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>) stack.extend(args.iter().rev()); } ty::Tuple(ts) => stack.extend(ts.iter().rev().map(GenericArg::from)), - ty::FnPtr(sig) => { - stack.push(sig.skip_binder().output().into()); - stack.extend(sig.skip_binder().inputs().iter().copied().rev().map(|ty| ty.into())); + ty::FnPtr(sig_tys, _hdr) => { + stack.extend( + sig_tys.skip_binder().inputs_and_output.iter().rev().map(|ty| ty.into()), + ); } }, GenericArgKind::Lifetime(_) => {} diff --git a/compiler/rustc_mir_build/messages.ftl b/compiler/rustc_mir_build/messages.ftl index dda4debecec67..7baf0256dd890 100644 --- a/compiler/rustc_mir_build/messages.ftl +++ b/compiler/rustc_mir_build/messages.ftl @@ -30,7 +30,7 @@ mir_build_call_to_deprecated_safe_fn_requires_unsafe = call to deprecated safe function `{$function}` is unsafe and requires unsafe block .note = consult the function's documentation for information on how to avoid undefined behavior .label = call to unsafe function - .suggestion = you can wrap the call in an `unsafe` block if you can guarantee the code is only ever called from single-threaded code + .suggestion = you can wrap the call in an `unsafe` block if you can guarantee {$guarantee} mir_build_call_to_fn_with_requires_unsafe = call to function `{$function}` with `#[target_feature]` is unsafe and requires unsafe block diff --git a/compiler/rustc_mir_build/src/build/expr/as_constant.rs b/compiler/rustc_mir_build/src/build/expr/as_constant.rs index 10cf545f1b79d..4430aab73a819 100644 --- a/compiler/rustc_mir_build/src/build/expr/as_constant.rs +++ b/compiler/rustc_mir_build/src/build/expr/as_constant.rs @@ -2,7 +2,9 @@ use rustc_ast as ast; use rustc_hir::LangItem; -use rustc_middle::mir::interpret::{Allocation, LitToConstError, LitToConstInput, Scalar}; +use rustc_middle::mir::interpret::{ + Allocation, LitToConstError, LitToConstInput, Scalar, CTFE_ALLOC_SALT, +}; use rustc_middle::mir::*; use rustc_middle::thir::*; use rustc_middle::ty::{ @@ -140,7 +142,7 @@ fn lit_to_mir_constant<'tcx>( ConstValue::Slice { data: allocation, meta: allocation.inner().size().bytes() } } (ast::LitKind::ByteStr(data, _), ty::Ref(_, inner_ty, _)) if inner_ty.is_array() => { - let id = tcx.allocate_bytes_dedup(data); + let id = tcx.allocate_bytes_dedup(data, CTFE_ALLOC_SALT); ConstValue::Scalar(Scalar::from_pointer(id.into(), &tcx)) } (ast::LitKind::CStr(data, _), ty::Ref(_, inner_ty, _)) if matches!(inner_ty.kind(), ty::Adt(def, _) if tcx.is_lang_item(def.did(), LangItem::CStr)) => diff --git a/compiler/rustc_mir_build/src/check_unsafety.rs b/compiler/rustc_mir_build/src/check_unsafety.rs index 54a4204da71e8..9b85ad0ad0891 100644 --- a/compiler/rustc_mir_build/src/check_unsafety.rs +++ b/compiler/rustc_mir_build/src/check_unsafety.rs @@ -96,9 +96,32 @@ impl<'tcx> UnsafetyVisitor<'_, 'tcx> { // from an edition before 2024. &UnsafeOpKind::CallToUnsafeFunction(Some(id)) if !span.at_least_rust_2024() - && self.tcx.has_attr(id, sym::rustc_deprecated_safe_2024) => + && let Some(attr) = self.tcx.get_attr(id, sym::rustc_deprecated_safe_2024) => { + let suggestion = attr + .meta_item_list() + .unwrap_or_default() + .into_iter() + .find(|item| item.has_name(sym::audit_that)) + .map(|item| { + item.value_str().expect( + "`#[rustc_deprecated_safe_2024(audit_that)]` must have a string value", + ) + }); + let sm = self.tcx.sess.source_map(); + let guarantee = suggestion + .as_ref() + .map(|suggestion| format!("that {}", suggestion)) + .unwrap_or_else(|| String::from("its unsafe preconditions")); + let suggestion = suggestion + .and_then(|suggestion| { + sm.indentation_before(span).map(|indent| { + format!("{}// TODO: Audit that {}.\n", indent, suggestion) // ignore-tidy-todo + }) + }) + .unwrap_or_default(); + self.tcx.emit_node_span_lint( DEPRECATED_SAFE_2024, self.hir_context, @@ -106,8 +129,9 @@ impl<'tcx> UnsafetyVisitor<'_, 'tcx> { CallToDeprecatedSafeFnRequiresUnsafe { span, function: with_no_trimmed_paths!(self.tcx.def_path_str(id)), + guarantee, sub: CallToDeprecatedSafeFnRequiresUnsafeSub { - indent: sm.indentation_before(span).unwrap_or_default(), + start_of_line_suggestion: suggestion, start_of_line: sm.span_extend_to_line(span).shrink_to_lo(), left: span.shrink_to_lo(), right: span.shrink_to_hi(), diff --git a/compiler/rustc_mir_build/src/errors.rs b/compiler/rustc_mir_build/src/errors.rs index 42eca71ca3f30..34577f102d1c5 100644 --- a/compiler/rustc_mir_build/src/errors.rs +++ b/compiler/rustc_mir_build/src/errors.rs @@ -28,6 +28,7 @@ pub(crate) struct CallToDeprecatedSafeFnRequiresUnsafe { #[label] pub(crate) span: Span, pub(crate) function: String, + pub(crate) guarantee: String, #[subdiagnostic] pub(crate) sub: CallToDeprecatedSafeFnRequiresUnsafeSub, } @@ -35,10 +36,8 @@ pub(crate) struct CallToDeprecatedSafeFnRequiresUnsafe { #[derive(Subdiagnostic)] #[multipart_suggestion(mir_build_suggestion, applicability = "machine-applicable")] pub(crate) struct CallToDeprecatedSafeFnRequiresUnsafeSub { - pub(crate) indent: String, - #[suggestion_part( - code = "{indent}// TODO: Audit that the environment access only happens in single-threaded code.\n" // ignore-tidy-todo - )] + pub(crate) start_of_line_suggestion: String, + #[suggestion_part(code = "{start_of_line_suggestion}")] pub(crate) start_of_line: Span, #[suggestion_part(code = "unsafe {{ ")] pub(crate) left: Span, diff --git a/compiler/rustc_mir_build/src/thir/cx/mod.rs b/compiler/rustc_mir_build/src/thir/cx/mod.rs index 6120b1453cfab..5b5f97cb51410 100644 --- a/compiler/rustc_mir_build/src/thir/cx/mod.rs +++ b/compiler/rustc_mir_build/src/thir/cx/mod.rs @@ -8,7 +8,7 @@ use rustc_hir as hir; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::lang_items::LangItem; -use rustc_hir::{HirId, Node}; +use rustc_hir::HirId; use rustc_middle::bug; use rustc_middle::middle::region; use rustc_middle::thir::*; @@ -110,11 +110,7 @@ impl<'tcx> Cx<'tcx> { } #[instrument(level = "debug", skip(self))] - fn pattern_from_hir(&mut self, p: &hir::Pat<'_>) -> Box> { - let p = match self.tcx.hir_node(p.hir_id) { - Node::Pat(p) => p, - node => bug!("pattern became {:?}", node), - }; + fn pattern_from_hir(&mut self, p: &'tcx hir::Pat<'tcx>) -> Box> { pat_from_hir(self.tcx, self.param_env, self.typeck_results(), p) } diff --git a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs index efaa75800fca4..85b9dacb1293c 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs @@ -483,9 +483,11 @@ impl<'p, 'tcx> MatchVisitor<'p, 'tcx> { // Check if the match is exhaustive. let witnesses = report.non_exhaustiveness_witnesses; if !witnesses.is_empty() { - if source == hir::MatchSource::ForLoopDesugar && arms.len() == 2 { + if source == hir::MatchSource::ForLoopDesugar + && let [_, snd_arm] = *arms + { // the for loop pattern is not irrefutable - let pat = &self.thir[arms[1]].pattern; + let pat = &self.thir[snd_arm].pattern; // `pat` should be `Some()` from a desugared for loop. debug_assert_eq!(pat.span.desugaring_kind(), Some(DesugaringKind::ForLoop)); let PatKind::Variant { ref subpatterns, .. } = pat.kind else { bug!() }; @@ -700,10 +702,12 @@ impl<'p, 'tcx> MatchVisitor<'p, 'tcx> { && adt.is_enum() && let Constructor::Variant(variant_index) = witness_1.ctor() { - let variant = adt.variant(*variant_index); - let inhabited = variant.inhabited_predicate(self.tcx, *adt).instantiate(self.tcx, args); - assert!(inhabited.apply(self.tcx, cx.param_env, cx.module)); - !inhabited.apply_ignore_module(self.tcx, cx.param_env) + let variant_inhabited = adt + .variant(*variant_index) + .inhabited_predicate(self.tcx, *adt) + .instantiate(self.tcx, args); + variant_inhabited.apply(self.tcx, cx.param_env, cx.module) + && !variant_inhabited.apply_ignore_module(self.tcx, cx.param_env) } else { false }; diff --git a/compiler/rustc_mir_dataflow/src/impls/initialized.rs b/compiler/rustc_mir_dataflow/src/impls/initialized.rs index e9e8ddefa020b..7822fb17f7298 100644 --- a/compiler/rustc_mir_dataflow/src/impls/initialized.rs +++ b/compiler/rustc_mir_dataflow/src/impls/initialized.rs @@ -1,3 +1,5 @@ +use std::assert_matches::assert_matches; + use rustc_index::bit_set::{BitSet, ChunkedBitSet}; use rustc_index::Idx; use rustc_middle::bug; @@ -496,7 +498,7 @@ impl<'tcx> GenKillAnalysis<'tcx> for MaybeUninitializedPlaces<'_, '_, 'tcx> { }); if self.skip_unreachable_unwind.contains(location.block) { let mir::TerminatorKind::Drop { target, unwind, .. } = terminator.kind else { bug!() }; - assert!(matches!(unwind, mir::UnwindAction::Cleanup(_))); + assert_matches!(unwind, mir::UnwindAction::Cleanup(_)); TerminatorEdges::Single(target) } else { terminator.edges() diff --git a/compiler/rustc_mir_dataflow/src/lib.rs b/compiler/rustc_mir_dataflow/src/lib.rs index b0808ba2067e3..8708bebeeb087 100644 --- a/compiler/rustc_mir_dataflow/src/lib.rs +++ b/compiler/rustc_mir_dataflow/src/lib.rs @@ -1,4 +1,5 @@ // tidy-alphabetical-start +#![feature(assert_matches)] #![feature(associated_type_defaults)] #![feature(box_patterns)] #![feature(exact_size_is_empty)] diff --git a/compiler/rustc_mir_dataflow/src/move_paths/builder.rs b/compiler/rustc_mir_dataflow/src/move_paths/builder.rs index c26a72e454382..86091379f5a92 100644 --- a/compiler/rustc_mir_dataflow/src/move_paths/builder.rs +++ b/compiler/rustc_mir_dataflow/src/move_paths/builder.rs @@ -158,7 +158,7 @@ impl<'b, 'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> Gatherer<'b, 'a, 'tcx, F> { | ty::Pat(_, _) | ty::Slice(_) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::Closure(..) | ty::CoroutineClosure(..) @@ -201,7 +201,7 @@ impl<'b, 'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> Gatherer<'b, 'a, 'tcx, F> { | ty::RawPtr(_, _) | ty::Ref(_, _, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::CoroutineWitness(..) | ty::Never diff --git a/compiler/rustc_mir_dataflow/src/value_analysis.rs b/compiler/rustc_mir_dataflow/src/value_analysis.rs index ca8a2777045f4..139fd592f69a3 100644 --- a/compiler/rustc_mir_dataflow/src/value_analysis.rs +++ b/compiler/rustc_mir_dataflow/src/value_analysis.rs @@ -32,6 +32,7 @@ //! Because of that, we can assume that the only way to change the value behind a tracked place is //! by direct assignment. +use std::assert_matches::assert_matches; use std::fmt::{Debug, Formatter}; use std::ops::Range; @@ -54,7 +55,7 @@ use crate::{Analysis, AnalysisDomain, JoinSemiLattice, SwitchIntEdgeEffects}; pub trait ValueAnalysis<'tcx> { /// For each place of interest, the analysis tracks a value of the given type. - type Value: Clone + JoinSemiLattice + HasBottom + HasTop; + type Value: Clone + JoinSemiLattice + HasBottom + HasTop + Debug; const NAME: &'static str; @@ -344,7 +345,7 @@ impl<'tcx, T: ValueAnalysis<'tcx>> AnalysisDomain<'tcx> for ValueAnalysisWrapper fn initialize_start_block(&self, body: &Body<'tcx>, state: &mut Self::Domain) { // The initial state maps all tracked places of argument projections to ⊤ and the rest to ⊥. - assert!(matches!(state, State::Unreachable)); + assert_matches!(state, State::Unreachable); *state = State::new_reachable(); for arg in body.args_iter() { state.flood(PlaceRef { local: arg, projection: &[] }, self.0.map()); diff --git a/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs b/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs index f52a4524d784b..edb6bc4fbea2f 100644 --- a/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs +++ b/compiler/rustc_mir_transform/src/abort_unwinding_calls.rs @@ -64,7 +64,7 @@ impl<'tcx> MirPass<'tcx> for AbortUnwindingCalls { let ty = func.ty(body, tcx); let sig = ty.fn_sig(tcx); let fn_def_id = match ty.kind() { - ty::FnPtr(_) => None, + ty::FnPtr(..) => None, &ty::FnDef(def_id, _) => Some(def_id), _ => span_bug!(span, "invalid callee of type {:?}", ty), }; diff --git a/compiler/rustc_mir_transform/src/coroutine/by_move_body.rs b/compiler/rustc_mir_transform/src/coroutine/by_move_body.rs index 69d21a63f557b..7ea36f08232e2 100644 --- a/compiler/rustc_mir_transform/src/coroutine/by_move_body.rs +++ b/compiler/rustc_mir_transform/src/coroutine/by_move_body.rs @@ -78,6 +78,8 @@ use rustc_middle::mir::{self, dump_mir, MirPass}; use rustc_middle::ty::{self, InstanceKind, Ty, TyCtxt, TypeVisitableExt}; use rustc_target::abi::{FieldIdx, VariantIdx}; +use crate::pass_manager::validate_body; + pub struct ByMoveBody; impl<'tcx> MirPass<'tcx> for ByMoveBody { @@ -131,20 +133,40 @@ impl<'tcx> MirPass<'tcx> for ByMoveBody { |(parent_field_idx, parent_capture), (child_field_idx, child_capture)| { // Store this set of additional projections (fields and derefs). // We need to re-apply them later. - let child_precise_captures = - &child_capture.place.projections[parent_capture.place.projections.len()..]; + let mut child_precise_captures = child_capture.place.projections + [parent_capture.place.projections.len()..] + .to_vec(); - // If the parent captures by-move, and the child captures by-ref, then we - // need to peel an additional `deref` off of the body of the child. - let needs_deref = child_capture.is_by_ref() && !parent_capture.is_by_ref(); - if needs_deref { - assert_ne!( - coroutine_kind, - ty::ClosureKind::FnOnce, + // If the parent capture is by-ref, then we need to apply an additional + // deref before applying any further projections to this place. + if parent_capture.is_by_ref() { + child_precise_captures.insert( + 0, + Projection { ty: parent_capture.place.ty(), kind: ProjectionKind::Deref }, + ); + } + // If the child capture is by-ref, then we need to apply a "ref" + // projection (i.e. `&`) at the end. But wait! We don't have that + // as a projection kind. So instead, we can apply its dual and + // *peel* a deref off of the place when it shows up in the MIR body. + // Luckily, by construction this is always possible. + let peel_deref = if child_capture.is_by_ref() { + assert!( + parent_capture.is_by_ref() || coroutine_kind != ty::ClosureKind::FnOnce, "`FnOnce` coroutine-closures return coroutines that capture from \ their body; it will always result in a borrowck error!" ); - } + true + } else { + false + }; + + // Regarding the behavior above, you may think that it's redundant to both + // insert a deref and then peel a deref if the parent and child are both + // captured by-ref. This would be correct, except for the case where we have + // precise capturing projections, since the inserted deref is to the *beginning* + // and the peeled deref is at the *end*. I cannot seem to actually find a + // case where this happens, though, but let's keep this code flexible. // Finally, store the type of the parent's captured place. We need // this when building the field projection in the MIR body later on. @@ -164,7 +186,7 @@ impl<'tcx> MirPass<'tcx> for ByMoveBody { ( FieldIdx::from_usize(parent_field_idx + num_args), parent_capture_ty, - needs_deref, + peel_deref, child_precise_captures, ), ) @@ -192,6 +214,10 @@ impl<'tcx> MirPass<'tcx> for ByMoveBody { let mut by_move_body = body.clone(); MakeByMoveBody { tcx, field_remapping, by_move_coroutine_ty }.visit_body(&mut by_move_body); dump_mir(tcx, false, "coroutine_by_move", &0, &by_move_body, |_, _| Ok(())); + + // Let's just always validate this body. + validate_body(tcx, &mut by_move_body, "Initial coroutine_by_move body".to_string()); + // FIXME: use query feeding to generate the body right here and then only store the `DefId` of the new body. by_move_body.source = mir::MirSource::from_instance(InstanceKind::CoroutineKindShim { coroutine_def_id: coroutine_def_id.to_def_id(), @@ -202,7 +228,7 @@ impl<'tcx> MirPass<'tcx> for ByMoveBody { struct MakeByMoveBody<'tcx> { tcx: TyCtxt<'tcx>, - field_remapping: UnordMap, bool, &'tcx [Projection<'tcx>])>, + field_remapping: UnordMap, bool, Vec>)>, by_move_coroutine_ty: Ty<'tcx>, } @@ -223,14 +249,14 @@ impl<'tcx> MutVisitor<'tcx> for MakeByMoveBody<'tcx> { if place.local == ty::CAPTURE_STRUCT_LOCAL && let Some((&mir::ProjectionElem::Field(idx, _), projection)) = place.projection.split_first() - && let Some(&(remapped_idx, remapped_ty, needs_deref, bridging_projections)) = + && let Some(&(remapped_idx, remapped_ty, peel_deref, ref bridging_projections)) = self.field_remapping.get(&idx) { // As noted before, if the parent closure captures a field by value, and // the child captures a field by ref, then for the by-move body we're // generating, we also are taking that field by value. Peel off a deref, // since a layer of ref'ing has now become redundant. - let final_projections = if needs_deref { + let final_projections = if peel_deref { let Some((mir::ProjectionElem::Deref, projection)) = projection.split_first() else { bug!( diff --git a/compiler/rustc_mir_transform/src/coverage/graph.rs b/compiler/rustc_mir_transform/src/coverage/graph.rs index c3f4bbf1a6521..31b207751942b 100644 --- a/compiler/rustc_mir_transform/src/coverage/graph.rs +++ b/compiler/rustc_mir_transform/src/coverage/graph.rs @@ -350,8 +350,8 @@ fn bcb_filtered_successors<'a, 'tcx>(terminator: &'a Terminator<'tcx>) -> Covera // An inline asm terminator can normally be chained, except when it diverges or uses asm // goto. InlineAsm { ref targets, .. } => { - if targets.len() == 1 { - CoverageSuccessors::Chainable(targets[0]) + if let [target] = targets[..] { + CoverageSuccessors::Chainable(target) } else { CoverageSuccessors::NotChainable(targets) } diff --git a/compiler/rustc_mir_transform/src/early_otherwise_branch.rs b/compiler/rustc_mir_transform/src/early_otherwise_branch.rs index e4fec786814d6..49e41c35f1f24 100644 --- a/compiler/rustc_mir_transform/src/early_otherwise_branch.rs +++ b/compiler/rustc_mir_transform/src/early_otherwise_branch.rs @@ -309,11 +309,11 @@ fn verify_candidate_branch<'tcx>( ) -> bool { // In order for the optimization to be correct, the branch must... // ...have exactly one statement - if branch.statements.len() != 1 { + let [statement] = branch.statements.as_slice() else { return false; - } + }; // ...assign the discriminant of `place` in that statement - let StatementKind::Assign(boxed) = &branch.statements[0].kind else { return false }; + let StatementKind::Assign(boxed) = &statement.kind else { return false }; let (discr_place, Rvalue::Discriminant(from_place)) = &**boxed else { return false }; if *from_place != place { return false; diff --git a/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs b/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs index 4132e604f20e0..9a2cc057232f3 100644 --- a/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs +++ b/compiler/rustc_mir_transform/src/ffi_unwind_calls.rs @@ -57,7 +57,7 @@ fn has_ffi_unwind_calls(tcx: TyCtxt<'_>, local_def_id: LocalDefId) -> bool { }; let fn_def_id = match ty.kind() { - ty::FnPtr(_) => None, + ty::FnPtr(..) => None, &ty::FnDef(def_id, _) => { // Rust calls cannot themselves create foreign unwinds (even if they use a non-Rust ABI). // So the leak of the foreign unwind into Rust can only be elsewhere, not here. diff --git a/compiler/rustc_mir_transform/src/inline.rs b/compiler/rustc_mir_transform/src/inline.rs index 324ddc5e799d5..61fc5fc881606 100644 --- a/compiler/rustc_mir_transform/src/inline.rs +++ b/compiler/rustc_mir_transform/src/inline.rs @@ -726,7 +726,7 @@ impl<'tcx> Inliner<'tcx> { // Insert all of the (mapped) parts of the callee body into the caller. caller_body.local_decls.extend(callee_body.drain_vars_and_temps()); - caller_body.source_scopes.extend(&mut callee_body.source_scopes.drain(..)); + caller_body.source_scopes.append(&mut callee_body.source_scopes); if self .tcx .sess @@ -740,7 +740,7 @@ impl<'tcx> Inliner<'tcx> { // still getting consistent results from the mir-opt tests. caller_body.var_debug_info.append(&mut callee_body.var_debug_info); } - caller_body.basic_blocks_mut().extend(callee_body.basic_blocks_mut().drain(..)); + caller_body.basic_blocks_mut().append(callee_body.basic_blocks_mut()); caller_body[callsite.block].terminator = Some(Terminator { source_info: callsite.source_info, diff --git a/compiler/rustc_mir_transform/src/instsimplify.rs b/compiler/rustc_mir_transform/src/instsimplify.rs index 2fc5f7e536ba8..1589653968c2e 100644 --- a/compiler/rustc_mir_transform/src/instsimplify.rs +++ b/compiler/rustc_mir_transform/src/instsimplify.rs @@ -264,9 +264,7 @@ impl<'tcx> InstSimplifyContext<'tcx, '_> { }; // It's definitely not a clone if there are multiple arguments - if args.len() != 1 { - return; - } + let [arg] = &args[..] else { return }; let Some(destination_block) = *target else { return }; @@ -280,7 +278,7 @@ impl<'tcx> InstSimplifyContext<'tcx, '_> { // These types are easily available from locals, so check that before // doing DefId lookups to figure out what we're actually calling. - let arg_ty = args[0].node.ty(self.local_decls, self.tcx); + let arg_ty = arg.node.ty(self.local_decls, self.tcx); let ty::Ref(_region, inner_ty, Mutability::Not) = *arg_ty.kind() else { return }; diff --git a/compiler/rustc_mir_transform/src/promote_consts.rs b/compiler/rustc_mir_transform/src/promote_consts.rs index dc8e50ac8cd28..48a3266ae6f0d 100644 --- a/compiler/rustc_mir_transform/src/promote_consts.rs +++ b/compiler/rustc_mir_transform/src/promote_consts.rs @@ -468,7 +468,7 @@ impl<'tcx> Validator<'_, 'tcx> { if let ty::RawPtr(_, _) | ty::FnPtr(..) = lhs_ty.kind() { // Raw and fn pointer operations are not allowed inside consts and thus not promotable. - assert!(matches!( + assert_matches!( op, BinOp::Eq | BinOp::Ne @@ -477,7 +477,7 @@ impl<'tcx> Validator<'_, 'tcx> { | BinOp::Ge | BinOp::Gt | BinOp::Offset - )); + ); return Err(Unpromotable); } diff --git a/compiler/rustc_mir_transform/src/shim.rs b/compiler/rustc_mir_transform/src/shim.rs index f41f3ef656c51..09b4e5e07113b 100644 --- a/compiler/rustc_mir_transform/src/shim.rs +++ b/compiler/rustc_mir_transform/src/shim.rs @@ -437,7 +437,7 @@ fn build_clone_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Ty<'tcx>) - let src = tcx.mk_place_deref(Place::from(Local::new(1 + 0))); match self_ty.kind() { - ty::FnDef(..) | ty::FnPtr(_) => builder.copy_shim(), + ty::FnDef(..) | ty::FnPtr(..) => builder.copy_shim(), ty::Closure(_, args) => builder.tuple_like_shim(dest, src, args.as_closure().upvar_tys()), ty::CoroutineClosure(_, args) => { builder.tuple_like_shim(dest, src, args.as_coroutine_closure().upvar_tys()) @@ -996,7 +996,7 @@ pub fn build_adt_ctor(tcx: TyCtxt<'_>, ctor_id: DefId) -> Body<'_> { /// } /// ``` fn build_fn_ptr_addr_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Ty<'tcx>) -> Body<'tcx> { - assert!(matches!(self_ty.kind(), ty::FnPtr(..)), "expected fn ptr, found {self_ty}"); + assert_matches!(self_ty.kind(), ty::FnPtr(..), "expected fn ptr, found {self_ty}"); let span = tcx.def_span(def_id); let Some(sig) = tcx.fn_sig(def_id).instantiate(tcx, &[self_ty.into()]).no_bound_vars() else { span_bug!(span, "FnPtr::addr with bound vars for `{self_ty}`"); diff --git a/compiler/rustc_mir_transform/src/validate.rs b/compiler/rustc_mir_transform/src/validate.rs index 746d423b7a98c..491ae1c0d083f 100644 --- a/compiler/rustc_mir_transform/src/validate.rs +++ b/compiler/rustc_mir_transform/src/validate.rs @@ -898,8 +898,8 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { self.param_env, adt_def.non_enum_variant().fields[field].ty(self.tcx, args), ); - if fields.len() == 1 { - let src_ty = fields.raw[0].ty(self.body, self.tcx); + if let [field] = fields.raw.as_slice() { + let src_ty = field.ty(self.body, self.tcx); if !self.mir_assign_valid_types(src_ty, dest_ty) { self.fail(location, "union field has the wrong type"); } @@ -967,11 +967,9 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { self.fail(location, "RawPtr should be in runtime MIR only"); } - if fields.len() != 2 { - self.fail(location, "raw pointer aggregate must have 2 fields"); - } else { - let data_ptr_ty = fields.raw[0].ty(self.body, self.tcx); - let metadata_ty = fields.raw[1].ty(self.body, self.tcx); + if let [data_ptr, metadata] = fields.raw.as_slice() { + let data_ptr_ty = data_ptr.ty(self.body, self.tcx); + let metadata_ty = metadata.ty(self.body, self.tcx); if let ty::RawPtr(in_pointee, in_mut) = data_ptr_ty.kind() { if *in_mut != mutability { self.fail(location, "input and output mutability must match"); @@ -998,6 +996,8 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { self.fail(location, "metadata for pointer-to-thin must be unit"); } } + } else { + self.fail(location, "raw pointer aggregate must have 2 fields"); } } }, diff --git a/compiler/rustc_monomorphize/src/partitioning.rs b/compiler/rustc_monomorphize/src/partitioning.rs index 65a3d8d1742d9..610ad41ce5210 100644 --- a/compiler/rustc_monomorphize/src/partitioning.rs +++ b/compiler/rustc_monomorphize/src/partitioning.rs @@ -371,7 +371,7 @@ fn merge_codegen_units<'tcx>( // Move the items from `cgu_src` to `cgu_dst`. Some of them may be // duplicate inlined items, in which case the destination CGU is // unaffected. Recalculate size estimates afterwards. - cgu_dst.items_mut().extend(cgu_src.items_mut().drain(..)); + cgu_dst.items_mut().append(cgu_src.items_mut()); cgu_dst.compute_size_estimate(); // Record that `cgu_dst` now contains all the stuff that was in @@ -410,7 +410,7 @@ fn merge_codegen_units<'tcx>( // Move the items from `smallest` to `second_smallest`. Some of them // may be duplicate inlined items, in which case the destination CGU is // unaffected. Recalculate size estimates afterwards. - second_smallest.items_mut().extend(smallest.items_mut().drain(..)); + second_smallest.items_mut().append(smallest.items_mut()); second_smallest.compute_size_estimate(); // Don't update `cgu_contents`, that's only for incremental builds. @@ -648,7 +648,9 @@ fn characteristic_def_id_of_mono_item<'tcx>( if let Some(impl_def_id) = tcx.impl_of_method(def_id) { if tcx.sess.opts.incremental.is_some() - && tcx.trait_id_of_impl(impl_def_id) == tcx.lang_items().drop_trait() + && tcx + .trait_id_of_impl(impl_def_id) + .is_some_and(|def_id| tcx.is_lang_item(def_id, LangItem::Drop)) { // Put `Drop::drop` into the same cgu as `drop_in_place` // since `drop_in_place` is the only thing that can diff --git a/compiler/rustc_next_trait_solver/src/canonicalizer.rs b/compiler/rustc_next_trait_solver/src/canonicalizer.rs index 82488088e30e6..394518daa4250 100644 --- a/compiler/rustc_next_trait_solver/src/canonicalizer.rs +++ b/compiler/rustc_next_trait_solver/src/canonicalizer.rs @@ -360,7 +360,7 @@ impl, I: Interner> TypeFolder for Canonicaliz | ty::Ref(_, _, _) | ty::Pat(_, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::Closure(..) | ty::CoroutineClosure(..) diff --git a/compiler/rustc_next_trait_solver/src/coherence.rs b/compiler/rustc_next_trait_solver/src/coherence.rs index f22ea41c51222..2461ef0c0df57 100644 --- a/compiler/rustc_next_trait_solver/src/coherence.rs +++ b/compiler/rustc_next_trait_solver/src/coherence.rs @@ -334,7 +334,7 @@ where | ty::Str | ty::FnDef(..) | ty::Pat(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Array(..) | ty::Slice(..) | ty::RawPtr(..) diff --git a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs index 84921e87fb3f4..bb05eb4c25697 100644 --- a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs @@ -533,7 +533,7 @@ where | ty::RawPtr(_, _) | ty::Ref(_, _, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(..) | ty::Closure(..) | ty::CoroutineClosure(..) @@ -620,7 +620,7 @@ where | ty::RawPtr(_, _) | ty::Ref(_, _, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Alias(..) | ty::Closure(..) | ty::CoroutineClosure(..) diff --git a/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs b/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs index 00837f7cdd87d..a57338acaab29 100644 --- a/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs +++ b/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs @@ -31,7 +31,7 @@ where | ty::Bool | ty::Float(_) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Error(_) | ty::Never | ty::Char => Ok(vec![]), @@ -117,7 +117,7 @@ where | ty::Bool | ty::Float(_) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::RawPtr(..) | ty::Char | ty::Ref(..) @@ -178,7 +178,7 @@ where { match ty.kind() { // impl Copy/Clone for FnDef, FnPtr - ty::FnDef(..) | ty::FnPtr(_) | ty::Error(_) => Ok(vec![]), + ty::FnDef(..) | ty::FnPtr(..) | ty::Error(_) => Ok(vec![]), // Implementations are provided in core ty::Uint(_) @@ -269,7 +269,8 @@ pub(in crate::solve) fn extract_tupled_inputs_and_output_from_callable { + ty::FnPtr(sig_tys, hdr) => { + let sig = sig_tys.with(hdr); if sig.is_fn_trait_compatible() { Ok(Some( sig.map_bound(|sig| (Ty::new_tup(cx, sig.inputs().as_slice()), sig.output())), @@ -460,7 +461,7 @@ pub(in crate::solve) fn extract_tupled_inputs_and_output_from_async_callable { let sig = self_ty.fn_sig(cx); - if sig.skip_binder().is_fn_trait_compatible() && !cx.has_target_features(def_id) { + if sig.is_fn_trait_compatible() && !cx.has_target_features(def_id) { fn_item_to_async_callable(cx, sig) } else { Err(NoSolution) @@ -468,7 +469,7 @@ pub(in crate::solve) fn extract_tupled_inputs_and_output_from_async_callable { let sig = self_ty.fn_sig(cx); - if sig.skip_binder().is_fn_trait_compatible() { + if sig.is_fn_trait_compatible() { fn_item_to_async_callable(cx, sig) } else { Err(NoSolution) diff --git a/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs b/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs index a3c21666bd67c..86fb036cd3df8 100644 --- a/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs +++ b/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs @@ -5,11 +5,10 @@ //! see the comment on [ProofTreeBuilder]. use std::marker::PhantomData; -use std::mem; use derive_where::derive_where; use rustc_type_ir::inherent::*; -use rustc_type_ir::{self as ty, search_graph, Interner}; +use rustc_type_ir::{self as ty, Interner}; use crate::delegate::SolverDelegate; use crate::solve::eval_ctxt::canonical; @@ -94,31 +93,10 @@ impl WipGoalEvaluation { } } -#[derive_where(PartialEq, Eq; I: Interner)] -pub(in crate::solve) enum WipCanonicalGoalEvaluationKind { - Overflow, - CycleInStack, - ProvisionalCacheHit, - Interned { final_revision: I::CanonicalGoalEvaluationStepRef }, -} - -impl std::fmt::Debug for WipCanonicalGoalEvaluationKind { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Overflow => write!(f, "Overflow"), - Self::CycleInStack => write!(f, "CycleInStack"), - Self::ProvisionalCacheHit => write!(f, "ProvisionalCacheHit"), - Self::Interned { final_revision: _ } => { - f.debug_struct("Interned").finish_non_exhaustive() - } - } - } -} - #[derive_where(PartialEq, Eq, Debug; I: Interner)] struct WipCanonicalGoalEvaluation { goal: CanonicalInput, - kind: Option>, + encountered_overflow: bool, /// Only used for uncached goals. After we finished evaluating /// the goal, this is interned and moved into `kind`. final_revision: Option>, @@ -127,25 +105,17 @@ struct WipCanonicalGoalEvaluation { impl WipCanonicalGoalEvaluation { fn finalize(self) -> inspect::CanonicalGoalEvaluation { - // We've already interned the final revision in - // `fn finalize_canonical_goal_evaluation`. - assert!(self.final_revision.is_none()); - let kind = match self.kind.unwrap() { - WipCanonicalGoalEvaluationKind::Overflow => { + inspect::CanonicalGoalEvaluation { + goal: self.goal, + kind: if self.encountered_overflow { + assert!(self.final_revision.is_none()); inspect::CanonicalGoalEvaluationKind::Overflow - } - WipCanonicalGoalEvaluationKind::CycleInStack => { - inspect::CanonicalGoalEvaluationKind::CycleInStack - } - WipCanonicalGoalEvaluationKind::ProvisionalCacheHit => { - inspect::CanonicalGoalEvaluationKind::ProvisionalCacheHit - } - WipCanonicalGoalEvaluationKind::Interned { final_revision } => { + } else { + let final_revision = self.final_revision.unwrap().finalize(); inspect::CanonicalGoalEvaluationKind::Evaluation { final_revision } - } - }; - - inspect::CanonicalGoalEvaluation { goal: self.goal, kind, result: self.result.unwrap() } + }, + result: self.result.unwrap(), + } } } @@ -308,7 +278,7 @@ impl, I: Interner> ProofTreeBuilder { ) -> ProofTreeBuilder { self.nested(|| WipCanonicalGoalEvaluation { goal, - kind: None, + encountered_overflow: false, final_revision: None, result: None, }) @@ -329,11 +299,11 @@ impl, I: Interner> ProofTreeBuilder { } } - pub fn canonical_goal_evaluation_kind(&mut self, kind: WipCanonicalGoalEvaluationKind) { + pub fn canonical_goal_evaluation_overflow(&mut self) { if let Some(this) = self.as_mut() { match this { DebugSolver::CanonicalGoalEvaluation(canonical_goal_evaluation) => { - assert_eq!(canonical_goal_evaluation.kind.replace(kind), None); + canonical_goal_evaluation.encountered_overflow = true; } _ => unreachable!(), }; @@ -547,51 +517,3 @@ impl, I: Interner> ProofTreeBuilder { } } } - -impl search_graph::ProofTreeBuilder for ProofTreeBuilder -where - D: SolverDelegate, - I: Interner, -{ - fn try_apply_proof_tree( - &mut self, - proof_tree: Option, - ) -> bool { - if !self.is_noop() { - if let Some(final_revision) = proof_tree { - let kind = WipCanonicalGoalEvaluationKind::Interned { final_revision }; - self.canonical_goal_evaluation_kind(kind); - true - } else { - false - } - } else { - true - } - } - - fn on_provisional_cache_hit(&mut self) { - self.canonical_goal_evaluation_kind(WipCanonicalGoalEvaluationKind::ProvisionalCacheHit); - } - - fn on_cycle_in_stack(&mut self) { - self.canonical_goal_evaluation_kind(WipCanonicalGoalEvaluationKind::CycleInStack); - } - - fn finalize_canonical_goal_evaluation( - &mut self, - tcx: I, - ) -> Option { - self.as_mut().map(|this| match this { - DebugSolver::CanonicalGoalEvaluation(evaluation) => { - let final_revision = mem::take(&mut evaluation.final_revision).unwrap(); - let final_revision = - tcx.intern_canonical_goal_evaluation_step(final_revision.finalize()); - let kind = WipCanonicalGoalEvaluationKind::Interned { final_revision }; - assert_eq!(evaluation.kind.replace(kind), None); - final_revision - } - _ => unreachable!(), - }) - } -} diff --git a/compiler/rustc_next_trait_solver/src/solve/search_graph.rs b/compiler/rustc_next_trait_solver/src/solve/search_graph.rs index fe053a506e712..81c89fad8e8a0 100644 --- a/compiler/rustc_next_trait_solver/src/solve/search_graph.rs +++ b/compiler/rustc_next_trait_solver/src/solve/search_graph.rs @@ -1,12 +1,13 @@ +use std::convert::Infallible; use std::marker::PhantomData; use rustc_type_ir::inherent::*; -use rustc_type_ir::search_graph::{self, CycleKind, UsageKind}; +use rustc_type_ir::search_graph::{self, PathKind}; use rustc_type_ir::solve::{CanonicalInput, Certainty, QueryResult}; use rustc_type_ir::Interner; -use super::inspect::{self, ProofTreeBuilder}; -use super::FIXPOINT_STEP_LIMIT; +use super::inspect::ProofTreeBuilder; +use super::{has_no_inference_or_external_constraints, FIXPOINT_STEP_LIMIT}; use crate::delegate::SolverDelegate; /// This type is never constructed. We only use it to implement `search_graph::Delegate` @@ -22,43 +23,48 @@ where { type Cx = D::Interner; + const ENABLE_PROVISIONAL_CACHE: bool = true; + type ValidationScope = Infallible; + fn enter_validation_scope( + _cx: Self::Cx, + _input: CanonicalInput, + ) -> Option { + None + } + const FIXPOINT_STEP_LIMIT: usize = FIXPOINT_STEP_LIMIT; type ProofTreeBuilder = ProofTreeBuilder; + fn inspect_is_noop(inspect: &mut Self::ProofTreeBuilder) -> bool { + inspect.is_noop() + } + const DIVIDE_AVAILABLE_DEPTH_ON_OVERFLOW: usize = 4; fn recursion_limit(cx: I) -> usize { cx.recursion_limit() } fn initial_provisional_result( cx: I, - kind: CycleKind, + kind: PathKind, input: CanonicalInput, ) -> QueryResult { match kind { - CycleKind::Coinductive => response_no_constraints(cx, input, Certainty::Yes), - CycleKind::Inductive => response_no_constraints(cx, input, Certainty::overflow(false)), + PathKind::Coinductive => response_no_constraints(cx, input, Certainty::Yes), + PathKind::Inductive => response_no_constraints(cx, input, Certainty::overflow(false)), } } - fn reached_fixpoint( - cx: I, - kind: UsageKind, + fn is_initial_provisional_result( + cx: Self::Cx, + kind: PathKind, input: CanonicalInput, - provisional_result: Option>, result: QueryResult, ) -> bool { - if let Some(r) = provisional_result { - r == result - } else { - match kind { - UsageKind::Single(CycleKind::Coinductive) => { - response_no_constraints(cx, input, Certainty::Yes) == result - } - UsageKind::Single(CycleKind::Inductive) => { - response_no_constraints(cx, input, Certainty::overflow(false)) == result - } - UsageKind::Mixed => false, + match kind { + PathKind::Coinductive => response_no_constraints(cx, input, Certainty::Yes) == result, + PathKind::Inductive => { + response_no_constraints(cx, input, Certainty::overflow(false)) == result } } } @@ -68,7 +74,7 @@ where inspect: &mut ProofTreeBuilder, input: CanonicalInput, ) -> QueryResult { - inspect.canonical_goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::Overflow); + inspect.canonical_goal_evaluation_overflow(); response_no_constraints(cx, input, Certainty::overflow(true)) } @@ -76,6 +82,22 @@ where response_no_constraints(cx, input, Certainty::overflow(false)) } + fn is_ambiguous_result(result: QueryResult) -> bool { + result.is_ok_and(|response| { + has_no_inference_or_external_constraints(response) + && matches!(response.value.certainty, Certainty::Maybe(_)) + }) + } + + fn propagate_ambiguity( + cx: I, + for_input: CanonicalInput, + from_result: QueryResult, + ) -> QueryResult { + let certainty = from_result.unwrap().value.certainty; + response_no_constraints(cx, for_input, certainty) + } + fn step_is_coinductive(cx: I, input: CanonicalInput) -> bool { input.value.goal.predicate.is_coinductive(cx) } diff --git a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs index b1dba712f797a..1314b7eb6ffc6 100644 --- a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs +++ b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs @@ -1145,7 +1145,7 @@ where | ty::RawPtr(_, _) | ty::Ref(_, _, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Closure(..) | ty::CoroutineClosure(..) | ty::Coroutine(_, _) diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs index fb4ed5b93b27f..e7a7105803fcd 100644 --- a/compiler/rustc_parse/src/lexer/tokentrees.rs +++ b/compiler/rustc_parse/src/lexer/tokentrees.rs @@ -229,7 +229,7 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> { } else { let this_spacing = if next_tok.is_punct() { Spacing::Joint - } else if next_tok.kind == token::Eof { + } else if next_tok == token::Eof { Spacing::Alone } else { Spacing::JointHidden diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index e6b04080c8d85..370792714936a 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -5,6 +5,7 @@ #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] #![feature(array_windows)] +#![feature(assert_matches)] #![feature(box_patterns)] #![feature(debug_closure_helpers)] #![feature(if_let_guard)] diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs index 8fdfbcee38546..6391ff901cb74 100644 --- a/compiler/rustc_parse/src/parser/attr.rs +++ b/compiler/rustc_parse/src/parser/attr.rs @@ -4,11 +4,14 @@ use rustc_ast::token::{self, Delimiter}; use rustc_errors::codes::*; use rustc_errors::{Diag, PResult}; use rustc_span::symbol::kw; -use rustc_span::{sym, BytePos, Span}; +use rustc_span::{BytePos, Span}; use thin_vec::ThinVec; use tracing::debug; -use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle}; +use super::{ + AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle, Trailing, + UsePreAttrPos, +}; use crate::{errors, fluent_generated as fluent, maybe_whole}; // Public for rustfmt usage @@ -162,7 +165,7 @@ impl<'a> Parser<'a> { } loop { // skip any other attributes, we want the item - if snapshot.token.kind == token::Pound { + if snapshot.token == token::Pound { if let Err(err) = snapshot.parse_attribute(InnerAttrPolicy::Permitted) { err.cancel(); return Some(replacement_span); @@ -257,11 +260,11 @@ impl<'a> Parser<'a> { pub fn parse_attr_item(&mut self, force_collect: ForceCollect) -> PResult<'a, ast::AttrItem> { maybe_whole!(self, NtMeta, |attr| attr.into_inner()); - let do_parse = |this: &mut Self, _empty_attrs| { + // Attr items don't have attributes. + self.collect_tokens(None, AttrWrapper::empty(), force_collect, |this, _empty_attrs| { let is_unsafe = this.eat_keyword(kw::Unsafe); let unsafety = if is_unsafe { let unsafe_span = this.prev_token.span; - this.psess.gated_spans.gate(sym::unsafe_attributes, unsafe_span); this.expect(&token::OpenDelim(Delimiter::Parenthesis))?; ast::Safety::Unsafe(unsafe_span) } else { @@ -273,10 +276,12 @@ impl<'a> Parser<'a> { if is_unsafe { this.expect(&token::CloseDelim(Delimiter::Parenthesis))?; } - Ok((ast::AttrItem { unsafety, path, args, tokens: None }, false)) - }; - // Attr items don't have attributes. - self.collect_tokens_trailing_token(AttrWrapper::empty(), force_collect, do_parse) + Ok(( + ast::AttrItem { unsafety, path, args, tokens: None }, + Trailing::No, + UsePreAttrPos::No, + )) + }) } /// Parses attributes that appear after the opening of an item. These should @@ -309,8 +314,8 @@ impl<'a> Parser<'a> { }; if let Some(attr) = attr { // If we are currently capturing tokens (i.e. we are within a call to - // `Parser::collect_tokens_trailing_tokens`) record the token positions of this - // inner attribute, for possible later processing in a `LazyAttrTokenStream`. + // `Parser::collect_tokens`) record the token positions of this inner attribute, + // for possible later processing in a `LazyAttrTokenStream`. if let Capturing::Yes = self.capture_state.capturing { let end_pos = self.num_bump_calls; let parser_range = ParserRange(start_pos..end_pos); @@ -343,7 +348,7 @@ impl<'a> Parser<'a> { // Presumably, the majority of the time there will only be one attr. let mut expanded_attrs = Vec::with_capacity(1); - while self.token.kind != token::Eof { + while self.token != token::Eof { let lo = self.token.span; let item = self.parse_attr_item(ForceCollect::Yes)?; expanded_attrs.push((item, lo.to(self.prev_token.span))); @@ -359,7 +364,7 @@ impl<'a> Parser<'a> { pub(crate) fn parse_meta_seq_top(&mut self) -> PResult<'a, ThinVec> { // Presumably, the majority of the time there will only be one attr. let mut nmis = ThinVec::with_capacity(1); - while self.token.kind != token::Eof { + while self.token != token::Eof { nmis.push(self.parse_meta_item_inner()?); if !self.eat(&token::Comma) { break; @@ -400,7 +405,6 @@ impl<'a> Parser<'a> { }; let unsafety = if is_unsafe { let unsafe_span = self.prev_token.span; - self.psess.gated_spans.gate(sym::unsafe_attributes, unsafe_span); self.expect(&token::OpenDelim(Delimiter::Parenthesis))?; ast::Safety::Unsafe(unsafe_span) diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index abf61036c2deb..49df2811d525b 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -12,9 +12,23 @@ use rustc_span::{sym, Span, DUMMY_SP}; use super::{ Capturing, FlatToken, ForceCollect, NodeRange, NodeReplacement, Parser, ParserRange, - TokenCursor, + TokenCursor, Trailing, }; +// When collecting tokens, this fully captures the start point. Usually its +// just after outer attributes, but occasionally it's before. +#[derive(Clone, Debug)] +pub(super) struct CollectPos { + start_token: (Token, Spacing), + cursor_snapshot: TokenCursor, + start_pos: u32, +} + +pub(super) enum UsePreAttrPos { + No, + Yes, +} + /// A wrapper type to ensure that the parser handles outer attributes correctly. /// When we parse outer attributes, we need to ensure that we capture tokens /// for the attribute target. This allows us to perform cfg-expansion on @@ -22,30 +36,32 @@ use super::{ /// /// This wrapper prevents direct access to the underlying `ast::AttrVec`. /// Parsing code can only get access to the underlying attributes -/// by passing an `AttrWrapper` to `collect_tokens_trailing_token`. +/// by passing an `AttrWrapper` to `collect_tokens`. /// This makes it difficult to accidentally construct an AST node /// (which stores an `ast::AttrVec`) without first collecting tokens. /// /// This struct has its own module, to ensure that the parser code /// cannot directly access the `attrs` field. #[derive(Debug, Clone)] -pub struct AttrWrapper { +pub(super) struct AttrWrapper { attrs: AttrVec, // The start of the outer attributes in the parser's token stream. // This lets us create a `NodeReplacement` for the entire attribute - // target, including outer attributes. - start_pos: u32, + // target, including outer attributes. `None` if there are no outer + // attributes. + start_pos: Option, } impl AttrWrapper { pub(super) fn new(attrs: AttrVec, start_pos: u32) -> AttrWrapper { - AttrWrapper { attrs, start_pos } + AttrWrapper { attrs, start_pos: Some(start_pos) } } - pub fn empty() -> AttrWrapper { - AttrWrapper { attrs: AttrVec::new(), start_pos: u32::MAX } + + pub(super) fn empty() -> AttrWrapper { + AttrWrapper { attrs: AttrVec::new(), start_pos: None } } - pub(crate) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec { + pub(super) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec { psess.dcx().span_delayed_bug( self.attrs.get(0).map(|attr| attr.span).unwrap_or(DUMMY_SP), "AttrVec is taken for recovery but no error is produced", @@ -56,12 +72,12 @@ impl AttrWrapper { /// Prepend `self.attrs` to `attrs`. // FIXME: require passing an NT to prevent misuse of this method - pub(crate) fn prepend_to_nt_inner(mut self, attrs: &mut AttrVec) { + pub(super) fn prepend_to_nt_inner(mut self, attrs: &mut AttrVec) { mem::swap(attrs, &mut self.attrs); attrs.extend(self.attrs); } - pub fn is_empty(&self) -> bool { + pub(super) fn is_empty(&self) -> bool { self.attrs.is_empty() } } @@ -77,7 +93,7 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool { } // From a value of this type we can reconstruct the `TokenStream` seen by the -// `f` callback passed to a call to `Parser::collect_tokens_trailing_token`, by +// `f` callback passed to a call to `Parser::collect_tokens`, by // replaying the getting of the tokens. This saves us producing a `TokenStream` // if it is never needed, e.g. a captured `macro_rules!` argument that is never // passed to a proc macro. In practice, token stream creation happens rarely @@ -166,16 +182,30 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl { } impl<'a> Parser<'a> { + pub(super) fn collect_pos(&self) -> CollectPos { + CollectPos { + start_token: (self.token.clone(), self.token_spacing), + cursor_snapshot: self.token_cursor.clone(), + start_pos: self.num_bump_calls, + } + } + /// Parses code with `f`. If appropriate, it records the tokens (in /// `LazyAttrTokenStream` form) that were parsed in the result, accessible - /// via the `HasTokens` trait. The second (bool) part of the callback's + /// via the `HasTokens` trait. The `Trailing` part of the callback's /// result indicates if an extra token should be captured, e.g. a comma or - /// semicolon. + /// semicolon. The `UsePreAttrPos` part of the callback's result indicates + /// if we should use `pre_attr_pos` as the collection start position (only + /// required in a few cases). /// /// The `attrs` passed in are in `AttrWrapper` form, which is opaque. The /// `AttrVec` within is passed to `f`. See the comment on `AttrWrapper` for /// details. /// + /// `pre_attr_pos` is the position before the outer attributes (or the node + /// itself, if no outer attributes are present). It is only needed if `f` + /// can return `UsePreAttrPos::Yes`. + /// /// Note: If your callback consumes an opening delimiter (including the /// case where `self.token` is an opening delimiter on entry to this /// function), you must also consume the corresponding closing delimiter. @@ -197,11 +227,12 @@ impl<'a> Parser<'a> { /// } // 32..33 /// } // 33..34 /// ``` - pub fn collect_tokens_trailing_token( + pub(super) fn collect_tokens( &mut self, + pre_attr_pos: Option, attrs: AttrWrapper, force_collect: ForceCollect, - f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, (R, bool)>, + f: impl FnOnce(&mut Self, AttrVec) -> PResult<'a, (R, Trailing, UsePreAttrPos)>, ) -> PResult<'a, R> { // We must collect if anything could observe the collected tokens, i.e. // if any of the following conditions hold. @@ -220,23 +251,20 @@ impl<'a> Parser<'a> { return Ok(f(self, attrs.attrs)?.0); } - let start_token = (self.token.clone(), self.token_spacing); - let cursor_snapshot = self.token_cursor.clone(); - let start_pos = self.num_bump_calls; + let mut collect_pos = self.collect_pos(); let has_outer_attrs = !attrs.attrs.is_empty(); let parser_replacements_start = self.capture_state.parser_replacements.len(); // We set and restore `Capturing::Yes` on either side of the call to - // `f`, so we can distinguish the outermost call to - // `collect_tokens_trailing_token` (e.g. parsing `m` in the example - // above) from any inner (indirectly recursive) calls (e.g. parsing `g` - // in the example above). This distinction is used below and in - // `Parser::parse_inner_attributes`. - let (mut ret, capture_trailing) = { + // `f`, so we can distinguish the outermost call to `collect_tokens` + // (e.g. parsing `m` in the example above) from any inner (indirectly + // recursive) calls (e.g. parsing `g` in the example above). This + // distinction is used below and in `Parser::parse_inner_attributes`. + let (mut ret, capture_trailing, use_pre_attr_pos) = { let prev_capturing = mem::replace(&mut self.capture_state.capturing, Capturing::Yes); - let ret_and_trailing = f(self, attrs.attrs); + let res = f(self, attrs.attrs); self.capture_state.capturing = prev_capturing; - ret_and_trailing? + res? }; // When we're not in `capture_cfg` mode, then skip collecting and @@ -279,10 +307,18 @@ impl<'a> Parser<'a> { return Ok(ret); } + // Replace the post-attribute collection start position with the + // pre-attribute position supplied, if `f` indicated it is necessary. + // (The caller is responsible for providing a non-`None` `pre_attr_pos` + // if this is a possibility.) + if matches!(use_pre_attr_pos, UsePreAttrPos::Yes) { + collect_pos = pre_attr_pos.unwrap(); + } + let parser_replacements_end = self.capture_state.parser_replacements.len(); assert!( - !(self.break_last_token && capture_trailing), + !(self.break_last_token && matches!(capture_trailing, Trailing::Yes)), "Cannot set break_last_token and have trailing token" ); @@ -294,7 +330,7 @@ impl<'a> Parser<'a> { // `AttrTokenStream`, we will create the proper token. + self.break_last_token as u32; - let num_calls = end_pos - start_pos; + let num_calls = end_pos - collect_pos.start_pos; // Take the captured `ParserRange`s for any inner attributes that we parsed in // `Parser::parse_inner_attributes`, and pair them in a `ParserReplacement` with `None`, @@ -328,7 +364,9 @@ impl<'a> Parser<'a> { .iter() .cloned() .chain(inner_attr_parser_replacements.iter().cloned()) - .map(|(parser_range, data)| (NodeRange::new(parser_range, start_pos), data)) + .map(|(parser_range, data)| { + (NodeRange::new(parser_range, collect_pos.start_pos), data) + }) .collect() }; @@ -355,9 +393,9 @@ impl<'a> Parser<'a> { // - `tokens`: lazy tokens for `g` (with its inner attr deleted). let tokens = LazyAttrTokenStream::new(LazyAttrTokenStreamImpl { - start_token, + start_token: collect_pos.start_token, + cursor_snapshot: collect_pos.cursor_snapshot, num_calls, - cursor_snapshot, break_last_token: self.break_last_token, node_replacements, }); @@ -368,9 +406,9 @@ impl<'a> Parser<'a> { } // If `capture_cfg` is set and we're inside a recursive call to - // `collect_tokens_trailing_token`, then we need to register a replace range - // if we have `#[cfg]` or `#[cfg_attr]`. This allows us to run eager cfg-expansion - // on the captured token stream. + // `collect_tokens`, then we need to register a replace range if we + // have `#[cfg]` or `#[cfg_attr]`. This allows us to run eager + // cfg-expansion on the captured token stream. if self.capture_cfg && matches!(self.capture_state.capturing, Capturing::Yes) && has_cfg_or_cfg_attr(ret.attrs()) @@ -389,7 +427,8 @@ impl<'a> Parser<'a> { // Set things up so that the entire AST node that we just parsed, including attributes, // will be replaced with `target` in the lazy token stream. This will allow us to // cfg-expand this AST node. - let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos }; + let start_pos = + if has_outer_attrs { attrs.start_pos.unwrap() } else { collect_pos.start_pos }; let target = AttrsTarget { attrs: ret.attrs().iter().cloned().collect(), tokens }; self.capture_state .parser_replacements @@ -490,7 +529,6 @@ mod size_asserts { use super::*; // tidy-alphabetical-start - static_assert_size!(AttrWrapper, 16); static_assert_size!(LazyAttrTokenStreamImpl, 96); // tidy-alphabetical-end } diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 47ca85ba060b9..ef1387c50fa8c 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -474,8 +474,8 @@ impl<'a> Parser<'a> { // If this isn't the case however, and the suggestion is a token the // content of which is the same as the found token's, we remove it as well. if !eq { - if let TokenType::Token(kind) = &token { - if kind == &self.token.kind { + if let TokenType::Token(kind) = token { + if self.token == *kind { return false; } } @@ -506,7 +506,7 @@ impl<'a> Parser<'a> { } else if !sm.is_multiline(self.prev_token.span.until(self.token.span)) { // The current token is in the same line as the prior token, not recoverable. } else if [token::Comma, token::Colon].contains(&self.token.kind) - && self.prev_token.kind == token::CloseDelim(Delimiter::Parenthesis) + && self.prev_token == token::CloseDelim(Delimiter::Parenthesis) { // Likely typo: The current token is on a new line and is expected to be // `.`, `;`, `?`, or an operator after a close delimiter token. @@ -518,7 +518,7 @@ impl<'a> Parser<'a> { // https://github.com/rust-lang/rust/issues/72253 } else if self.look_ahead(1, |t| { t == &token::CloseDelim(Delimiter::Brace) - || t.can_begin_expr() && t.kind != token::Colon + || t.can_begin_expr() && *t != token::Colon }) && [token::Comma, token::Colon].contains(&self.token.kind) { // Likely typo: `,` → `;` or `:` → `;`. This is triggered if the current token is @@ -562,7 +562,7 @@ impl<'a> Parser<'a> { } } - if self.token.kind == TokenKind::EqEq + if self.token == TokenKind::EqEq && self.prev_token.is_ident() && expected.iter().any(|tok| matches!(tok, TokenType::Token(TokenKind::Eq))) { @@ -655,9 +655,9 @@ impl<'a> Parser<'a> { // positive for a `cr#` that wasn't intended to start a c-string literal, but identifying // that in the parser requires unbounded lookahead, so we only add a hint to the existing // error rather than replacing it entirely. - if ((self.prev_token.kind == TokenKind::Ident(sym::c, IdentIsRaw::No) + if ((self.prev_token == TokenKind::Ident(sym::c, IdentIsRaw::No) && matches!(&self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. }))) - || (self.prev_token.kind == TokenKind::Ident(sym::cr, IdentIsRaw::No) + || (self.prev_token == TokenKind::Ident(sym::cr, IdentIsRaw::No) && matches!( &self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. }) | token::Pound @@ -673,7 +673,7 @@ impl<'a> Parser<'a> { // `pub` may be used for an item or `pub(crate)` if self.prev_token.is_ident_named(sym::public) && (self.token.can_begin_item() - || self.token.kind == TokenKind::OpenDelim(Delimiter::Parenthesis)) + || self.token == TokenKind::OpenDelim(Delimiter::Parenthesis)) { err.span_suggestion_short( self.prev_token.span, @@ -772,7 +772,7 @@ impl<'a> Parser<'a> { ), ); if self.token == token::Pound - && self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Bracket)) + && self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Bracket)) { // We have // #[attr] @@ -867,7 +867,7 @@ impl<'a> Parser<'a> { let str_span = self.prev_token.span; let mut span = self.token.span; let mut count = 0; - while self.token.kind == TokenKind::Pound + while self.token == TokenKind::Pound && !sm.is_multiline(span.shrink_to_hi().until(self.token.span.shrink_to_lo())) { span = span.with_hi(self.token.span.hi()); @@ -1167,7 +1167,7 @@ impl<'a> Parser<'a> { return; } - if token::PathSep == self.token.kind && segment.args.is_none() { + if self.token == token::PathSep && segment.args.is_none() { let snapshot = self.create_snapshot_for_diagnostic(); self.bump(); let lo = self.token.span; @@ -1176,13 +1176,11 @@ impl<'a> Parser<'a> { let span = lo.to(self.prev_token.span); // Detect trailing `>` like in `x.collect::Vec<_>>()`. let mut trailing_span = self.prev_token.span.shrink_to_hi(); - while self.token.kind == token::BinOp(token::Shr) - || self.token.kind == token::Gt - { + while self.token == token::BinOp(token::Shr) || self.token == token::Gt { trailing_span = trailing_span.to(self.token.span); self.bump(); } - if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) { + if self.token == token::OpenDelim(Delimiter::Parenthesis) { // Recover from bad turbofish: `foo.collect::Vec<_>()`. segment.args = Some(AngleBracketedArgs { args, span }.into()); @@ -1430,7 +1428,7 @@ impl<'a> Parser<'a> { self.restore_snapshot(snapshot); } } - return if token::PathSep == self.token.kind { + return if self.token == token::PathSep { // We have some certainty that this was a bad turbofish at this point. // `foo< bar >::` if let ExprKind::Binary(o, ..) = inner_op.kind @@ -1462,7 +1460,7 @@ impl<'a> Parser<'a> { Err(self.dcx().create_err(err)) } } - } else if token::OpenDelim(Delimiter::Parenthesis) == self.token.kind { + } else if self.token == token::OpenDelim(Delimiter::Parenthesis) { // We have high certainty that this was a bad turbofish at this point. // `foo< bar >(` if let ExprKind::Binary(o, ..) = inner_op.kind @@ -1528,7 +1526,7 @@ impl<'a> Parser<'a> { ]; self.consume_tts(1, &modifiers); - if self.token.kind == token::Eof { + if self.token == token::Eof { // Not entirely sure that what we consumed were fn arguments, rollback. self.restore_snapshot(snapshot); Err(()) @@ -1811,7 +1809,7 @@ impl<'a> Parser<'a> { /// This function gets called in places where a semicolon is NOT expected and if there's a /// semicolon it emits the appropriate error and returns true. pub fn maybe_consume_incorrect_semicolon(&mut self, previous_item: Option<&Item>) -> bool { - if self.token.kind != TokenKind::Semi { + if self.token != TokenKind::Semi { return false; } @@ -2405,10 +2403,10 @@ impl<'a> Parser<'a> { modifier: &[(token::TokenKind, i64)], ) { while acc > 0 { - if let Some((_, val)) = modifier.iter().find(|(t, _)| *t == self.token.kind) { + if let Some((_, val)) = modifier.iter().find(|(t, _)| self.token == *t) { acc += *val; } - if self.token.kind == token::Eof { + if self.token == token::Eof { break; } self.bump(); @@ -2489,13 +2487,14 @@ impl<'a> Parser<'a> { pub(super) fn handle_unambiguous_unbraced_const_arg(&mut self) -> PResult<'a, P> { let start = self.token.span; let attrs = self.parse_outer_attributes()?; - let expr = self.parse_expr_res(Restrictions::CONST_EXPR, attrs).map_err(|mut err| { - err.span_label( - start.shrink_to_lo(), - "while parsing a const generic argument starting here", - ); - err - })?; + let (expr, _) = + self.parse_expr_res(Restrictions::CONST_EXPR, attrs).map_err(|mut err| { + err.span_label( + start.shrink_to_lo(), + "while parsing a const generic argument starting here", + ); + err + })?; if !self.expr_is_valid_const_arg(&expr) { self.dcx().emit_err(ConstGenericWithoutBraces { span: expr.span, @@ -2598,7 +2597,7 @@ impl<'a> Parser<'a> { } }) .is_some() - || self.token.kind == TokenKind::Dot; + || self.token == TokenKind::Dot; // This will be true when a trait object type `Foo +` or a path which was a `const fn` with // type params has been parsed. let was_op = @@ -2615,9 +2614,9 @@ impl<'a> Parser<'a> { let attrs = self.parse_outer_attributes()?; self.parse_expr_res(Restrictions::CONST_EXPR, attrs) })() { - Ok(expr) => { + Ok((expr, _)) => { // Find a mistake like `MyTrait`. - if token::EqEq == snapshot.token.kind { + if snapshot.token == token::EqEq { err.span_suggestion( snapshot.token.span, "if you meant to use an associated type binding, replace `==` with `=`", @@ -2627,7 +2626,7 @@ impl<'a> Parser<'a> { let guar = err.emit(); let value = self.mk_expr_err(start.to(expr.span), guar); return Ok(GenericArg::Const(AnonConst { id: ast::DUMMY_NODE_ID, value })); - } else if token::Colon == snapshot.token.kind + } else if snapshot.token == token::Colon && expr.span.lo() == snapshot.token.span.hi() && matches!(expr.kind, ExprKind::Path(..)) { @@ -2642,8 +2641,7 @@ impl<'a> Parser<'a> { return Ok(GenericArg::Type( self.mk_ty(start.to(expr.span), TyKind::Err(guar)), )); - } else if token::Comma == self.token.kind || self.token.kind.should_end_const_arg() - { + } else if self.token == token::Comma || self.token.kind.should_end_const_arg() { // Avoid the following output by checking that we consumed a full const arg: // help: expressions must be enclosed in braces to be used as const generic // arguments @@ -2674,7 +2672,7 @@ impl<'a> Parser<'a> { })() { // Since we don't know the exact reason why we failed to parse the type or the // expression, employ a simple heuristic to weed out some pathological cases. - Ok(expr) if let token::Comma | token::Gt = snapshot.token.kind => { + Ok((expr, _)) if let token::Comma | token::Gt = snapshot.token.kind => { self.restore_snapshot(snapshot); Some(expr) } @@ -2846,8 +2844,8 @@ impl<'a> Parser<'a> { pub(crate) fn maybe_recover_unexpected_block_label(&mut self) -> bool { // Check for `'a : {` if !(self.check_lifetime() - && self.look_ahead(1, |tok| tok.kind == token::Colon) - && self.look_ahead(2, |tok| tok.kind == token::OpenDelim(Delimiter::Brace))) + && self.look_ahead(1, |t| *t == token::Colon) + && self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Brace))) { return false; } @@ -3001,7 +2999,7 @@ impl<'a> Parser<'a> { // >>>>>>> let mut end = None; loop { - if self.token.kind == TokenKind::Eof { + if self.token == TokenKind::Eof { break; } if let Some(span) = self.conflict_marker(&TokenKind::OrOr, &TokenKind::BinOp(token::Or)) diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index ccf8dcdf0b6f2..e0917ba43e41c 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -36,7 +36,7 @@ use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{ AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions, - SemiColonMode, SeqSep, TokenType, Trailing, + SemiColonMode, SeqSep, TokenType, Trailing, UsePreAttrPos, }; use crate::{errors, maybe_recover_from_interpolated_ty_qpath}; @@ -59,15 +59,30 @@ impl<'a> Parser<'a> { self.current_closure.take(); let attrs = self.parse_outer_attributes()?; - self.parse_expr_res(Restrictions::empty(), attrs) + self.parse_expr_res(Restrictions::empty(), attrs).map(|res| res.0) } /// Parses an expression, forcing tokens to be collected. pub fn parse_expr_force_collect(&mut self) -> PResult<'a, P> { self.current_closure.take(); + // If the expression is associative (e.g. `1 + 2`), then any preceding + // outer attribute actually belongs to the first inner sub-expression. + // In which case we must use the pre-attr pos to include the attribute + // in the collected tokens for the outer expression. + let pre_attr_pos = self.collect_pos(); let attrs = self.parse_outer_attributes()?; - self.collect_tokens_no_attrs(|this| this.parse_expr_res(Restrictions::empty(), attrs)) + self.collect_tokens( + Some(pre_attr_pos), + AttrWrapper::empty(), + ForceCollect::Yes, + |this, _empty_attrs| { + let (expr, is_assoc) = this.parse_expr_res(Restrictions::empty(), attrs)?; + let use_pre_attr_pos = + if is_assoc { UsePreAttrPos::Yes } else { UsePreAttrPos::No }; + Ok((expr, Trailing::No, use_pre_attr_pos)) + }, + ) } pub fn parse_expr_anon_const(&mut self) -> PResult<'a, AnonConst> { @@ -77,7 +92,7 @@ impl<'a> Parser<'a> { fn parse_expr_catch_underscore(&mut self, restrictions: Restrictions) -> PResult<'a, P> { let attrs = self.parse_outer_attributes()?; match self.parse_expr_res(restrictions, attrs) { - Ok(expr) => Ok(expr), + Ok((expr, _)) => Ok(expr), Err(err) => match self.token.ident() { Some((Ident { name: kw::Underscore, .. }, IdentIsRaw::No)) if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) => @@ -104,18 +119,20 @@ impl<'a> Parser<'a> { &mut self, r: Restrictions, attrs: AttrWrapper, - ) -> PResult<'a, P> { + ) -> PResult<'a, (P, bool)> { self.with_res(r, |this| this.parse_expr_assoc_with(0, attrs)) } /// Parses an associative expression with operators of at least `min_prec` precedence. + /// The `bool` in the return value indicates if it was an assoc expr, i.e. with an operator + /// followed by a subexpression (e.g. `1 + 2`). pub(super) fn parse_expr_assoc_with( &mut self, min_prec: usize, attrs: AttrWrapper, - ) -> PResult<'a, P> { + ) -> PResult<'a, (P, bool)> { let lhs = if self.token.is_range_separator() { - return self.parse_expr_prefix_range(attrs); + return self.parse_expr_prefix_range(attrs).map(|res| (res, false)); } else { self.parse_expr_prefix(attrs)? }; @@ -123,15 +140,17 @@ impl<'a> Parser<'a> { } /// Parses the rest of an associative expression (i.e. the part after the lhs) with operators - /// of at least `min_prec` precedence. + /// of at least `min_prec` precedence. The `bool` in the return value indicates if something + /// was actually parsed. pub(super) fn parse_expr_assoc_rest_with( &mut self, min_prec: usize, starts_stmt: bool, mut lhs: P, - ) -> PResult<'a, P> { + ) -> PResult<'a, (P, bool)> { + let mut parsed_something = false; if !self.should_continue_as_assoc_expr(&lhs) { - return Ok(lhs); + return Ok((lhs, parsed_something)); } self.expected_tokens.push(TokenType::Operator); @@ -156,16 +175,17 @@ impl<'a> Parser<'a> { self.err_larrow_operator(self.token.span); } + parsed_something = true; self.bump(); if op.node.is_comparison() { if let Some(expr) = self.check_no_chained_comparison(&lhs, &op)? { - return Ok(expr); + return Ok((expr, parsed_something)); } } // Look for JS' `===` and `!==` and recover if (op.node == AssocOp::Equal || op.node == AssocOp::NotEqual) - && self.token.kind == token::Eq + && self.token == token::Eq && self.prev_token.span.hi() == self.token.span.lo() { let sp = op.span.to(self.token.span); @@ -190,7 +210,7 @@ impl<'a> Parser<'a> { // Look for PHP's `<>` and recover if op.node == AssocOp::Less - && self.token.kind == token::Gt + && self.token == token::Gt && self.prev_token.span.hi() == self.token.span.lo() { let sp = op.span.to(self.token.span); @@ -208,7 +228,7 @@ impl<'a> Parser<'a> { // Look for C++'s `<=>` and recover if op.node == AssocOp::LessEqual - && self.token.kind == token::Gt + && self.token == token::Gt && self.prev_token.span.hi() == self.token.span.lo() { let sp = op.span.to(self.token.span); @@ -263,7 +283,7 @@ impl<'a> Parser<'a> { // the special cases. The code is here only for future convenience. Fixity::None => 1, }; - let rhs = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| { + let (rhs, _) = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| { let attrs = this.parse_outer_attributes()?; this.parse_expr_assoc_with(prec + prec_adjustment, attrs) })?; @@ -319,7 +339,7 @@ impl<'a> Parser<'a> { } } - Ok(lhs) + Ok((lhs, parsed_something)) } fn should_continue_as_assoc_expr(&mut self, lhs: &Expr) -> bool { @@ -441,7 +461,8 @@ impl<'a> Parser<'a> { let attrs = self.parse_outer_attributes()?; Some( self.parse_expr_assoc_with(prec + 1, attrs) - .map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))?, + .map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))? + .0, ) } else { None @@ -498,7 +519,7 @@ impl<'a> Parser<'a> { // RHS must be parsed with more associativity than the dots. let attrs = this.parse_outer_attributes()?; this.parse_expr_assoc_with(op.unwrap().precedence() + 1, attrs) - .map(|x| (lo.to(x.span), Some(x))) + .map(|(x, _)| (lo.to(x.span), Some(x))) .map_err(|err| this.maybe_err_dotdotlt_syntax(maybe_lt, err))? } else { (lo, None) @@ -694,12 +715,12 @@ impl<'a> Parser<'a> { // `foo: ` ExprKind::Path(None, ast::Path { segments, .. }), token::Ident(kw::For | kw::Loop | kw::While, IdentIsRaw::No), - ) if segments.len() == 1 => { + ) if let [segment] = segments.as_slice() => { let snapshot = self.create_snapshot_for_diagnostic(); let label = Label { ident: Ident::from_str_and_span( - &format!("'{}", segments[0].ident), - segments[0].ident.span, + &format!("'{}", segment.ident), + segment.ident.span, ), }; match self.parse_expr_labeled(label, false) { @@ -882,7 +903,7 @@ impl<'a> Parser<'a> { let mut res = ensure_sufficient_stack(|| { loop { let has_question = - if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) { + if self.prev_token == TokenKind::Ident(kw::Return, IdentIsRaw::No) { // We are using noexpect here because we don't expect a `?` directly after // a `return` which could be suggested otherwise. self.eat_noexpect(&token::Question) @@ -894,20 +915,19 @@ impl<'a> Parser<'a> { e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e)); continue; } - let has_dot = - if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) { - // We are using noexpect here because we don't expect a `.` directly after - // a `return` which could be suggested otherwise. - self.eat_noexpect(&token::Dot) - } else if self.token.kind == TokenKind::RArrow && self.may_recover() { - // Recovery for `expr->suffix`. - self.bump(); - let span = self.prev_token.span; - self.dcx().emit_err(errors::ExprRArrowCall { span }); - true - } else { - self.eat(&token::Dot) - }; + let has_dot = if self.prev_token == TokenKind::Ident(kw::Return, IdentIsRaw::No) { + // We are using noexpect here because we don't expect a `.` directly after + // a `return` which could be suggested otherwise. + self.eat_noexpect(&token::Dot) + } else if self.token == TokenKind::RArrow && self.may_recover() { + // Recovery for `expr->suffix`. + self.bump(); + let span = self.prev_token.span; + self.dcx().emit_err(errors::ExprRArrowCall { span }); + true + } else { + self.eat(&token::Dot) + }; if has_dot { // expr.f e = self.parse_dot_suffix_expr(lo, e)?; @@ -1206,7 +1226,7 @@ impl<'a> Parser<'a> { } fn mk_expr_tuple_field_access( - &mut self, + &self, lo: Span, ident_span: Span, base: P, @@ -1221,7 +1241,7 @@ impl<'a> Parser<'a> { /// Parse a function call expression, `expr(...)`. fn parse_expr_fn_call(&mut self, lo: Span, fun: P) -> P { - let snapshot = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) { + let snapshot = if self.token == token::OpenDelim(Delimiter::Parenthesis) { Some((self.create_snapshot_for_diagnostic(), fun.kind.clone())) } else { None @@ -1585,7 +1605,7 @@ impl<'a> Parser<'a> { // Suggests using '<=' if there is an error parsing qpath when the previous token // is an '=' token. Only emits suggestion if the '<' token and '=' token are // directly adjacent (i.e. '=<') - if maybe_eq_tok.kind == TokenKind::Eq && maybe_eq_tok.span.hi() == lt_span.lo() { + if maybe_eq_tok == TokenKind::Eq && maybe_eq_tok.span.hi() == lt_span.lo() { let eq_lt = maybe_eq_tok.span.to(lt_span); err.span_suggestion(eq_lt, "did you mean", "<=", Applicability::Unspecified); } @@ -2230,7 +2250,7 @@ impl<'a> Parser<'a> { return Ok(()); } - if self.token.kind == token::Comma { + if self.token == token::Comma { if !self.psess.source_map().is_multiline(prev_span.until(self.token.span)) { return Ok(()); } @@ -2336,7 +2356,7 @@ impl<'a> Parser<'a> { let token = self.token.clone(); let attrs = self.parse_outer_attributes()?; match self.parse_expr_res(restrictions, attrs) { - Ok(expr) => expr, + Ok((expr, _)) => expr, Err(err) => self.recover_closure_body(err, before, prev, token, lo, decl_hi)?, } } @@ -2360,7 +2380,7 @@ impl<'a> Parser<'a> { None => {} } - if self.token.kind == TokenKind::Semi + if self.token == TokenKind::Semi && matches!(self.token_cursor.stack.last(), Some((.., Delimiter::Parenthesis))) && self.may_recover() { @@ -2446,7 +2466,7 @@ impl<'a> Parser<'a> { fn parse_fn_block_param(&mut self) -> PResult<'a, Param> { let lo = self.token.span; let attrs = self.parse_outer_attributes()?; - self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| { + self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| { let pat = this.parse_pat_no_top_alt(Some(Expected::ParameterName), None)?; let ty = if this.eat(&token::Colon) { this.parse_ty()? @@ -2463,7 +2483,8 @@ impl<'a> Parser<'a> { id: DUMMY_NODE_ID, is_placeholder: false, }, - this.token == token::Comma, + Trailing::from(this.token == token::Comma), + UsePreAttrPos::No, )) }) } @@ -2557,7 +2578,7 @@ impl<'a> Parser<'a> { ); } else { // Look for usages of '=>' where '>=' might be intended - if maybe_fatarrow.kind == token::FatArrow { + if maybe_fatarrow == token::FatArrow { err.span_suggestion( maybe_fatarrow.span, "you might have meant to write a \"greater than or equal to\" comparison", @@ -2584,7 +2605,7 @@ impl<'a> Parser<'a> { /// Parses the condition of a `if` or `while` expression. fn parse_expr_cond(&mut self) -> PResult<'a, P> { let attrs = self.parse_outer_attributes()?; - let mut cond = + let (mut cond, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, attrs)?; CondChecker::new(self).visit_expr(&mut cond); @@ -2606,7 +2627,7 @@ impl<'a> Parser<'a> { missing_let: None, comparison: None, }; - if self.prev_token.kind == token::BinOp(token::Or) { + if self.prev_token == token::BinOp(token::Or) { // This was part of a closure, the that part of the parser recover. return Err(self.dcx().create_err(err)); } else { @@ -2633,7 +2654,7 @@ impl<'a> Parser<'a> { self.expect(&token::Eq)?; } let attrs = self.parse_outer_attributes()?; - let expr = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), attrs)?; + let (expr, _) = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), attrs)?; let span = lo.to(expr.span); Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span, recovered))) } @@ -2742,7 +2763,7 @@ impl<'a> Parser<'a> { } fn parse_for_head(&mut self) -> PResult<'a, (P, P)> { - let begin_paren = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) { + let begin_paren = if self.token == token::OpenDelim(Delimiter::Parenthesis) { // Record whether we are about to parse `for (`. // This is used below for recovery in case of `for ( $stuff ) $block` // in which case we will suggest `for $stuff $block`. @@ -2767,7 +2788,7 @@ impl<'a> Parser<'a> { // We know for sure we have seen `for ($SOMETHING in`. In the happy path this would // happen right before the return of this method. let attrs = self.parse_outer_attributes()?; - let expr = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs) { + let (expr, _) = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs) { Ok(expr) => expr, Err(expr_err) => { // We don't know what followed the `in`, so cancel and bubble up the @@ -2776,7 +2797,7 @@ impl<'a> Parser<'a> { return Err(err); } }; - return if self.token.kind == token::CloseDelim(Delimiter::Parenthesis) { + return if self.token == token::CloseDelim(Delimiter::Parenthesis) { // We know for sure we have seen `for ($SOMETHING in $EXPR)`, so we recover the // parser state and emit a targeted suggestion. let span = vec![start_span, self.token.span]; @@ -2802,7 +2823,7 @@ impl<'a> Parser<'a> { } self.check_for_for_in_in_typo(self.prev_token.span); let attrs = self.parse_outer_attributes()?; - let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?; + let (expr, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?; Ok((pat, expr)) } @@ -2922,7 +2943,7 @@ impl<'a> Parser<'a> { fn parse_expr_match(&mut self) -> PResult<'a, P> { let match_span = self.prev_token.span; let attrs = self.parse_outer_attributes()?; - let scrutinee = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?; + let (scrutinee, _) = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, attrs)?; self.parse_match_block(match_span, match_span, scrutinee, MatchKind::Prefix) } @@ -2995,7 +3016,7 @@ impl<'a> Parser<'a> { first_expr: &P, arrow_span: Span, ) -> Option<(Span, ErrorGuaranteed)> { - if self.token.kind != token::Semi { + if self.token != token::Semi { return None; } let start_snapshot = self.create_snapshot_for_diagnostic(); @@ -3024,18 +3045,18 @@ impl<'a> Parser<'a> { // We might have either a `,` -> `;` typo, or a block without braces. We need // a more subtle parsing strategy. loop { - if self.token.kind == token::CloseDelim(Delimiter::Brace) { + if self.token == token::CloseDelim(Delimiter::Brace) { // We have reached the closing brace of the `match` expression. return Some(err(self, stmts)); } - if self.token.kind == token::Comma { + if self.token == token::Comma { self.restore_snapshot(start_snapshot); return None; } let pre_pat_snapshot = self.create_snapshot_for_diagnostic(); match self.parse_pat_no_top_alt(None, None) { Ok(_pat) => { - if self.token.kind == token::FatArrow { + if self.token == token::FatArrow { // Reached arm end. self.restore_snapshot(pre_pat_snapshot); return Some(err(self, stmts)); @@ -3070,7 +3091,7 @@ impl<'a> Parser<'a> { pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> { let attrs = self.parse_outer_attributes()?; - self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| { + self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| { let lo = this.token.span; let (pat, guard) = this.parse_match_arm_pat_and_guard()?; @@ -3127,7 +3148,7 @@ impl<'a> Parser<'a> { let arm_start_span = this.token.span; let attrs = this.parse_outer_attributes()?; - let expr = + let (expr, _) = this.parse_expr_res(Restrictions::STMT_EXPR, attrs).map_err(|mut err| { err.span_label(arrow_span, "while parsing the `match` arm starting here"); err @@ -3244,7 +3265,8 @@ impl<'a> Parser<'a> { id: DUMMY_NODE_ID, is_placeholder: false, }, - false, + Trailing::No, + UsePreAttrPos::No, )) }) } @@ -3286,7 +3308,7 @@ impl<'a> Parser<'a> { } fn parse_match_arm_pat_and_guard(&mut self) -> PResult<'a, (P, Option>)> { - if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) { + if self.token == token::OpenDelim(Delimiter::Parenthesis) { // Detect and recover from `($pat if $cond) => $arm`. let left = self.token.span; match self.parse_pat_allow_top_alt( @@ -3335,8 +3357,9 @@ impl<'a> Parser<'a> { fn parse_match_guard_condition(&mut self) -> PResult<'a, P> { let attrs = self.parse_outer_attributes()?; - self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, attrs).map_err( - |mut err| { + match self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, attrs) { + Ok((expr, _)) => Ok(expr), + Err(mut err) => { if self.prev_token == token::OpenDelim(Delimiter::Brace) { let sugg_sp = self.prev_token.span.shrink_to_lo(); // Consume everything within the braces, let's avoid further parse @@ -3344,7 +3367,7 @@ impl<'a> Parser<'a> { self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore); let msg = "you might have meant to start a match arm after the match guard"; if self.eat(&token::CloseDelim(Delimiter::Brace)) { - let applicability = if self.token.kind != token::FatArrow { + let applicability = if self.token != token::FatArrow { // We have high confidence that we indeed didn't have a struct // literal in the match guard, but rather we had some operation // that ended in a path, immediately followed by a block that was @@ -3356,9 +3379,9 @@ impl<'a> Parser<'a> { err.span_suggestion_verbose(sugg_sp, msg, "=> ", applicability); } } - err - }, - ) + Err(err) + } + } } pub(crate) fn is_builtin(&self) -> bool { @@ -3565,7 +3588,7 @@ impl<'a> Parser<'a> { && self.look_ahead(1, |t| { AssocOp::from_token(t).is_some() || matches!(t.kind, token::OpenDelim(_)) - || t.kind == token::Dot + || *t == token::Dot }) { // Looks like they tried to write a shorthand, complex expression. @@ -3709,7 +3732,7 @@ impl<'a> Parser<'a> { fn parse_expr_field(&mut self) -> PResult<'a, ExprField> { let attrs = self.parse_outer_attributes()?; self.recover_vcs_conflict_marker(); - self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| { + self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| { let lo = this.token.span; // Check if a colon exists one ahead. This means we're parsing a fieldname. @@ -3753,7 +3776,8 @@ impl<'a> Parser<'a> { id: DUMMY_NODE_ID, is_placeholder: false, }, - this.token == token::Comma, + Trailing::from(this.token == token::Comma), + UsePreAttrPos::No, )) }) } @@ -3847,15 +3871,17 @@ impl<'a> Parser<'a> { attrs: AttrWrapper, f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, P>, ) -> PResult<'a, P> { - self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| { + self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| { let res = f(this, attrs)?; - let trailing = (this.restrictions.contains(Restrictions::STMT_EXPR) - && this.token.kind == token::Semi) - // FIXME: pass an additional condition through from the place - // where we know we need a comma, rather than assuming that - // `#[attr] expr,` always captures a trailing comma. - || this.token.kind == token::Comma; - Ok((res, trailing)) + let trailing = Trailing::from( + this.restrictions.contains(Restrictions::STMT_EXPR) + && this.token == token::Semi + // FIXME: pass an additional condition through from the place + // where we know we need a comma, rather than assuming that + // `#[attr] expr,` always captures a trailing comma. + || this.token == token::Comma, + ); + Ok((res, trailing, UsePreAttrPos::No)) }) } } diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs index 9124c15707de5..af3b6f740e3d7 100644 --- a/compiler/rustc_parse/src/parser/generics.rs +++ b/compiler/rustc_parse/src/parser/generics.rs @@ -7,7 +7,7 @@ use rustc_span::symbol::{kw, Ident}; use rustc_span::Span; use thin_vec::ThinVec; -use super::{ForceCollect, Parser}; +use super::{ForceCollect, Parser, Trailing, UsePreAttrPos}; use crate::errors::{ self, MultipleWhereClauses, UnexpectedDefaultValueForLifetimeInGenericParameters, UnexpectedSelfInGenericParameters, WhereClauseBeforeTupleStructBody, @@ -169,94 +169,88 @@ impl<'a> Parser<'a> { let mut done = false; while !done { let attrs = self.parse_outer_attributes()?; - let param = - self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| { - if this.eat_keyword_noexpect(kw::SelfUpper) { - // `Self` as a generic param is invalid. Here we emit the diagnostic and continue parsing - // as if `Self` never existed. - this.dcx().emit_err(UnexpectedSelfInGenericParameters { - span: this.prev_token.span, - }); + let param = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| { + if this.eat_keyword_noexpect(kw::SelfUpper) { + // `Self` as a generic param is invalid. Here we emit the diagnostic and continue parsing + // as if `Self` never existed. + this.dcx() + .emit_err(UnexpectedSelfInGenericParameters { span: this.prev_token.span }); + + // Eat a trailing comma, if it exists. + let _ = this.eat(&token::Comma); + } + + let param = if this.check_lifetime() { + let lifetime = this.expect_lifetime(); + // Parse lifetime parameter. + let (colon_span, bounds) = if this.eat(&token::Colon) { + (Some(this.prev_token.span), this.parse_lt_param_bounds()) + } else { + (None, Vec::new()) + }; - // Eat a trailing comma, if it exists. - let _ = this.eat(&token::Comma); + if this.check_noexpect(&token::Eq) && this.look_ahead(1, |t| t.is_lifetime()) { + let lo = this.token.span; + // Parse `= 'lifetime`. + this.bump(); // `=` + this.bump(); // `'lifetime` + let span = lo.to(this.prev_token.span); + this.dcx().emit_err(UnexpectedDefaultValueForLifetimeInGenericParameters { + span, + }); } - let param = if this.check_lifetime() { - let lifetime = this.expect_lifetime(); - // Parse lifetime parameter. - let (colon_span, bounds) = if this.eat(&token::Colon) { - (Some(this.prev_token.span), this.parse_lt_param_bounds()) - } else { - (None, Vec::new()) - }; - - if this.check_noexpect(&token::Eq) - && this.look_ahead(1, |t| t.is_lifetime()) - { - let lo = this.token.span; - // Parse `= 'lifetime`. - this.bump(); // `=` - this.bump(); // `'lifetime` - let span = lo.to(this.prev_token.span); - this.dcx().emit_err( - UnexpectedDefaultValueForLifetimeInGenericParameters { span }, - ); + Some(ast::GenericParam { + ident: lifetime.ident, + id: lifetime.id, + attrs, + bounds, + kind: ast::GenericParamKind::Lifetime, + is_placeholder: false, + colon_span, + }) + } else if this.check_keyword(kw::Const) { + // Parse const parameter. + Some(this.parse_const_param(attrs)?) + } else if this.check_ident() { + // Parse type parameter. + Some(this.parse_ty_param(attrs)?) + } else if this.token.can_begin_type() { + // Trying to write an associated type bound? (#26271) + let snapshot = this.create_snapshot_for_diagnostic(); + match this.parse_ty_where_predicate() { + Ok(where_predicate) => { + this.dcx().emit_err(errors::BadAssocTypeBounds { + span: where_predicate.span(), + }); + // FIXME - try to continue parsing other generics? } - - Some(ast::GenericParam { - ident: lifetime.ident, - id: lifetime.id, - attrs, - bounds, - kind: ast::GenericParamKind::Lifetime, - is_placeholder: false, - colon_span, - }) - } else if this.check_keyword(kw::Const) { - // Parse const parameter. - Some(this.parse_const_param(attrs)?) - } else if this.check_ident() { - // Parse type parameter. - Some(this.parse_ty_param(attrs)?) - } else if this.token.can_begin_type() { - // Trying to write an associated type bound? (#26271) - let snapshot = this.create_snapshot_for_diagnostic(); - match this.parse_ty_where_predicate() { - Ok(where_predicate) => { - this.dcx().emit_err(errors::BadAssocTypeBounds { - span: where_predicate.span(), - }); - // FIXME - try to continue parsing other generics? - return Ok((None, false)); - } - Err(err) => { - err.cancel(); - // FIXME - maybe we should overwrite 'self' outside of `collect_tokens`? - this.restore_snapshot(snapshot); - return Ok((None, false)); - } + Err(err) => { + err.cancel(); + // FIXME - maybe we should overwrite 'self' outside of `collect_tokens`? + this.restore_snapshot(snapshot); } - } else { - // Check for trailing attributes and stop parsing. - if !attrs.is_empty() { - if !params.is_empty() { - this.dcx() - .emit_err(errors::AttrAfterGeneric { span: attrs[0].span }); - } else { - this.dcx() - .emit_err(errors::AttrWithoutGenerics { span: attrs[0].span }); - } + } + return Ok((None, Trailing::No, UsePreAttrPos::No)); + } else { + // Check for trailing attributes and stop parsing. + if !attrs.is_empty() { + if !params.is_empty() { + this.dcx().emit_err(errors::AttrAfterGeneric { span: attrs[0].span }); + } else { + this.dcx() + .emit_err(errors::AttrWithoutGenerics { span: attrs[0].span }); } - return Ok((None, false)); - }; - - if !this.eat(&token::Comma) { - done = true; } - // We just ate the comma, so no need to capture the trailing token. - Ok((param, false)) - })?; + return Ok((None, Trailing::No, UsePreAttrPos::No)); + }; + + if !this.eat(&token::Comma) { + done = true; + } + // We just ate the comma, so no need to capture the trailing token. + Ok((param, Trailing::No, UsePreAttrPos::No)) + })?; if let Some(param) = param { params.push(param); @@ -393,7 +387,7 @@ impl<'a> Parser<'a> { if let Some(struct_) = struct_ && self.may_recover() - && self.token.kind == token::OpenDelim(Delimiter::Parenthesis) + && self.token == token::OpenDelim(Delimiter::Parenthesis) { snapshot = Some((struct_, self.create_snapshot_for_diagnostic())); }; diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index baa5eb2df635d..47820e93c23d2 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -20,7 +20,9 @@ use tracing::debug; use super::diagnostics::{dummy_arg, ConsumeClosingDelim}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; -use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Trailing}; +use super::{ + AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Trailing, UsePreAttrPos, +}; use crate::errors::{self, MacroExpandsToAdtField}; use crate::{fluent_generated as fluent, maybe_whole}; @@ -127,7 +129,7 @@ impl<'a> Parser<'a> { Some(item.into_inner()) }); - self.collect_tokens_trailing_token(attrs, force_collect, |this, mut attrs| { + self.collect_tokens(None, attrs, force_collect, |this, mut attrs| { let lo = this.token.span; let vis = this.parse_visibility(FollowedByType::No)?; let mut def = this.parse_defaultness(); @@ -145,7 +147,7 @@ impl<'a> Parser<'a> { let span = lo.to(this.prev_token.span); let id = DUMMY_NODE_ID; let item = Item { ident, attrs, id, kind, vis, span, tokens: None }; - return Ok((Some(item), false)); + return Ok((Some(item), Trailing::No, UsePreAttrPos::No)); } // At this point, we have failed to parse an item. @@ -160,7 +162,7 @@ impl<'a> Parser<'a> { if !attrs_allowed { this.recover_attrs_no_item(&attrs)?; } - Ok((None, false)) + Ok((None, Trailing::No, UsePreAttrPos::No)) }) } @@ -354,7 +356,7 @@ impl<'a> Parser<'a> { fn is_reuse_path_item(&mut self) -> bool { // no: `reuse ::path` for compatibility reasons with macro invocations self.token.is_keyword(kw::Reuse) - && self.look_ahead(1, |t| t.is_path_start() && t.kind != token::PathSep) + && self.look_ahead(1, |t| t.is_path_start() && *t != token::PathSep) } /// Are we sure this could not possibly be a macro invocation? @@ -471,9 +473,8 @@ impl<'a> Parser<'a> { Err(mut err) => { // Maybe the user misspelled `macro_rules` (issue #91227) if self.token.is_ident() - && path.segments.len() == 1 - && edit_distance("macro_rules", &path.segments[0].ident.to_string(), 2) - .is_some() + && let [segment] = path.segments.as_slice() + && edit_distance("macro_rules", &segment.ident.to_string(), 2).is_some() { err.span_suggestion( path.span, @@ -500,7 +501,7 @@ impl<'a> Parser<'a> { let mut err = self.dcx().struct_span_err(end.span, msg); if end.is_doc_comment() { err.span_label(end.span, "this doc comment doesn't document anything"); - } else if self.token.kind == TokenKind::Semi { + } else if self.token == TokenKind::Semi { err.span_suggestion_verbose( self.token.span, "consider removing this semicolon", @@ -778,12 +779,12 @@ impl<'a> Parser<'a> { && self .span_to_snippet(self.prev_token.span) .is_ok_and(|snippet| snippet == "}") - && self.token.kind == token::Semi; + && self.token == token::Semi; let mut semicolon_span = self.token.span; if !is_unnecessary_semicolon { // #105369, Detect spurious `;` before assoc fn body is_unnecessary_semicolon = self.token == token::OpenDelim(Delimiter::Brace) - && self.prev_token.kind == token::Semi; + && self.prev_token == token::Semi; semicolon_span = self.prev_token.span; } // We have to bail or we'll potentially never make progress. @@ -1195,7 +1196,7 @@ impl<'a> Parser<'a> { // FIXME: This recovery should be tested better. if safety == Safety::Default && self.token.is_keyword(kw::Unsafe) - && self.look_ahead(1, |t| t.kind == token::OpenDelim(Delimiter::Brace)) + && self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace)) { self.expect(&token::OpenDelim(Delimiter::Brace)).unwrap_err().emit(); safety = Safety::Unsafe(self.token.span); @@ -1259,7 +1260,7 @@ impl<'a> Parser<'a> { && self.is_keyword_ahead(1, &[kw::Extern]) && self.look_ahead( 2 + self.look_ahead(2, |t| t.can_begin_string_literal() as usize), - |t| t.kind == token::OpenDelim(Delimiter::Brace), + |t| *t == token::OpenDelim(Delimiter::Brace), ) } @@ -1344,7 +1345,7 @@ impl<'a> Parser<'a> { ) -> PResult<'a, (Ident, StaticItem)> { let ident = self.parse_ident()?; - if self.token.kind == TokenKind::Lt && self.may_recover() { + if self.token == TokenKind::Lt && self.may_recover() { let generics = self.parse_generics()?; self.dcx().emit_err(errors::StaticWithGenerics { span: generics.span }); } @@ -1547,86 +1548,82 @@ impl<'a> Parser<'a> { self.recover_vcs_conflict_marker(); let help = "enum variants can be `Variant`, `Variant = `, \ `Variant(Type, ..., TypeN)` or `Variant { fields: Types }`"; - self.collect_tokens_trailing_token( - variant_attrs, - ForceCollect::No, - |this, variant_attrs| { - let vlo = this.token.span; - - let vis = this.parse_visibility(FollowedByType::No)?; - if !this.recover_nested_adt_item(kw::Enum)? { - return Ok((None, false)); - } - let ident = this.parse_field_ident("enum", vlo)?; - - if this.token == token::Not { - if let Err(err) = this.unexpected() { - err.with_note(fluent::parse_macro_expands_to_enum_variant).emit(); - } + self.collect_tokens(None, variant_attrs, ForceCollect::No, |this, variant_attrs| { + let vlo = this.token.span; - this.bump(); - this.parse_delim_args()?; + let vis = this.parse_visibility(FollowedByType::No)?; + if !this.recover_nested_adt_item(kw::Enum)? { + return Ok((None, Trailing::No, UsePreAttrPos::No)); + } + let ident = this.parse_field_ident("enum", vlo)?; - return Ok((None, this.token == token::Comma)); + if this.token == token::Not { + if let Err(err) = this.unexpected() { + err.with_note(fluent::parse_macro_expands_to_enum_variant).emit(); } - let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) { - // Parse a struct variant. - let (fields, recovered) = - match this.parse_record_struct_body("struct", ident.span, false) { - Ok((fields, recovered)) => (fields, recovered), - Err(mut err) => { - if this.token == token::Colon { - // We handle `enum` to `struct` suggestion in the caller. - return Err(err); - } - this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]); - this.bump(); // } - err.span_label(span, "while parsing this enum"); - err.help(help); - let guar = err.emit(); - (thin_vec![], Recovered::Yes(guar)) - } - }; - VariantData::Struct { fields, recovered: recovered.into() } - } else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) { - let body = match this.parse_tuple_struct_body() { - Ok(body) => body, + this.bump(); + this.parse_delim_args()?; + + return Ok((None, Trailing::from(this.token == token::Comma), UsePreAttrPos::No)); + } + + let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) { + // Parse a struct variant. + let (fields, recovered) = + match this.parse_record_struct_body("struct", ident.span, false) { + Ok((fields, recovered)) => (fields, recovered), Err(mut err) => { if this.token == token::Colon { // We handle `enum` to `struct` suggestion in the caller. return Err(err); } - this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis)]); - this.bump(); // ) + this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]); + this.bump(); // } err.span_label(span, "while parsing this enum"); err.help(help); - err.emit(); - thin_vec![] + let guar = err.emit(); + (thin_vec![], Recovered::Yes(guar)) } }; - VariantData::Tuple(body, DUMMY_NODE_ID) - } else { - VariantData::Unit(DUMMY_NODE_ID) + VariantData::Struct { fields, recovered: recovered.into() } + } else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) { + let body = match this.parse_tuple_struct_body() { + Ok(body) => body, + Err(mut err) => { + if this.token == token::Colon { + // We handle `enum` to `struct` suggestion in the caller. + return Err(err); + } + this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis)]); + this.bump(); // ) + err.span_label(span, "while parsing this enum"); + err.help(help); + err.emit(); + thin_vec![] + } }; + VariantData::Tuple(body, DUMMY_NODE_ID) + } else { + VariantData::Unit(DUMMY_NODE_ID) + }; - let disr_expr = - if this.eat(&token::Eq) { Some(this.parse_expr_anon_const()?) } else { None }; + let disr_expr = + if this.eat(&token::Eq) { Some(this.parse_expr_anon_const()?) } else { None }; - let vr = ast::Variant { - ident, - vis, - id: DUMMY_NODE_ID, - attrs: variant_attrs, - data: struct_def, - disr_expr, - span: vlo.to(this.prev_token.span), - is_placeholder: false, - }; + let vr = ast::Variant { + ident, + vis, + id: DUMMY_NODE_ID, + attrs: variant_attrs, + data: struct_def, + disr_expr, + span: vlo.to(this.prev_token.span), + is_placeholder: false, + }; - Ok((Some(vr), this.token == token::Comma)) - }, - ) + Ok((Some(vr), Trailing::from(this.token == token::Comma), UsePreAttrPos::No)) + }) .map_err(|mut err| { err.help(help); err @@ -1778,7 +1775,7 @@ impl<'a> Parser<'a> { // Unit like structs are handled in parse_item_struct function self.parse_paren_comma_seq(|p| { let attrs = p.parse_outer_attributes()?; - p.collect_tokens_trailing_token(attrs, ForceCollect::No, |p, attrs| { + p.collect_tokens(None, attrs, ForceCollect::No, |p, attrs| { let mut snapshot = None; if p.is_vcs_conflict_marker(&TokenKind::BinOp(token::Shl), &TokenKind::Lt) { // Account for `<<<<<<<` diff markers. We can't proactively error here because @@ -1816,7 +1813,8 @@ impl<'a> Parser<'a> { attrs, is_placeholder: false, }, - p.token == token::Comma, + Trailing::from(p.token == token::Comma), + UsePreAttrPos::No, )) }) }) @@ -1828,10 +1826,11 @@ impl<'a> Parser<'a> { self.recover_vcs_conflict_marker(); let attrs = self.parse_outer_attributes()?; self.recover_vcs_conflict_marker(); - self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| { + self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| { let lo = this.token.span; let vis = this.parse_visibility(FollowedByType::No)?; - this.parse_single_struct_field(adt_ty, lo, vis, attrs).map(|field| (field, false)) + this.parse_single_struct_field(adt_ty, lo, vis, attrs) + .map(|field| (field, Trailing::No, UsePreAttrPos::No)) }) } @@ -1915,7 +1914,7 @@ impl<'a> Parser<'a> { let mut err = self.dcx().struct_span_err(sp, msg); if self.token.is_ident() - || (self.token.kind == TokenKind::Pound + || (self.token == TokenKind::Pound && (self.look_ahead(1, |t| t == &token::OpenDelim(Delimiter::Bracket)))) { // This is likely another field, TokenKind::Pound is used for `#[..]` @@ -1938,8 +1937,8 @@ impl<'a> Parser<'a> { fn expect_field_ty_separator(&mut self) -> PResult<'a, ()> { if let Err(err) = self.expect(&token::Colon) { let sm = self.psess.source_map(); - let eq_typo = self.token.kind == token::Eq && self.look_ahead(1, |t| t.is_path_start()); - let semi_typo = self.token.kind == token::Semi + let eq_typo = self.token == token::Eq && self.look_ahead(1, |t| t.is_path_start()); + let semi_typo = self.token == token::Semi && self.look_ahead(1, |t| { t.is_path_start() // We check that we are in a situation like `foo; bar` to avoid bad suggestions @@ -1975,7 +1974,7 @@ impl<'a> Parser<'a> { attrs: AttrVec, ) -> PResult<'a, FieldDef> { let name = self.parse_field_ident(adt_ty, lo)?; - if self.token.kind == token::Not { + if self.token == token::Not { if let Err(mut err) = self.unexpected() { // Encounter the macro invocation err.subdiagnostic(MacroExpandsToAdtField { adt_ty }); @@ -1984,10 +1983,10 @@ impl<'a> Parser<'a> { } self.expect_field_ty_separator()?; let ty = self.parse_ty_for_field_def()?; - if self.token.kind == token::Colon && self.look_ahead(1, |tok| tok.kind != token::Colon) { + if self.token == token::Colon && self.look_ahead(1, |t| *t != token::Colon) { self.dcx().emit_err(errors::SingleColonStructType { span: self.token.span }); } - if self.token.kind == token::Eq { + if self.token == token::Eq { self.bump(); let const_expr = self.parse_expr_anon_const()?; let sp = ty.span.shrink_to_hi().to(const_expr.value.span); @@ -2065,7 +2064,7 @@ impl<'a> Parser<'a> { .parse_ident_common(false) // Cancel this error, we don't need it. .map_err(|err| err.cancel()) - && self.token.kind == TokenKind::Colon + && self.token == TokenKind::Colon { err.span_suggestion( removal_span, @@ -2368,12 +2367,12 @@ impl<'a> Parser<'a> { match self.expected_one_of_not_found(&[], expected) { Ok(error_guaranteed) => Ok(error_guaranteed), Err(mut err) => { - if self.token.kind == token::CloseDelim(Delimiter::Brace) { + if self.token == token::CloseDelim(Delimiter::Brace) { // The enclosing `mod`, `trait` or `impl` is being closed, so keep the `fn` in // the AST for typechecking. err.span_label(ident_span, "while parsing this `fn`"); Ok(err.emit()) - } else if self.token.kind == token::RArrow + } else if self.token == token::RArrow && let Some(fn_params_end) = fn_params_end { // Instead of a function body, the parser has encountered a right arrow @@ -2446,7 +2445,7 @@ impl<'a> Parser<'a> { fn_params_end: Option, ) -> PResult<'a, Option>> { let has_semi = if req_body { - self.token.kind == TokenKind::Semi + self.token == TokenKind::Semi } else { // Only include `;` in list of expected tokens if body is not required self.check(&TokenKind::Semi) @@ -2459,7 +2458,7 @@ impl<'a> Parser<'a> { } else if self.check(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() { self.parse_block_common(self.token.span, BlockCheckMode::Default, false) .map(|(attrs, body)| (attrs, Some(body)))? - } else if self.token.kind == token::Eq { + } else if self.token == token::Eq { // Recover `fn foo() = $expr;`. self.bump(); // `=` let eq_sp = self.prev_token.span; @@ -2762,7 +2761,7 @@ impl<'a> Parser<'a> { pub(super) fn parse_fn_params(&mut self, req_name: ReqName) -> PResult<'a, ThinVec> { let mut first_param = true; // Parse the arguments, starting out with `self` being allowed... - if self.token.kind != TokenKind::OpenDelim(Delimiter::Parenthesis) + if self.token != TokenKind::OpenDelim(Delimiter::Parenthesis) // might be typo'd trait impl, handled elsewhere && !self.token.is_keyword(kw::For) { @@ -2806,12 +2805,12 @@ impl<'a> Parser<'a> { fn parse_param_general(&mut self, req_name: ReqName, first_param: bool) -> PResult<'a, Param> { let lo = self.token.span; let attrs = self.parse_outer_attributes()?; - self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| { + self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| { // Possibly parse `self`. Recover if we parsed it and it wasn't allowed here. if let Some(mut param) = this.parse_self_param()? { param.attrs = attrs; let res = if first_param { Ok(param) } else { this.recover_bad_self_param(param) }; - return Ok((res?, false)); + return Ok((res?, Trailing::No, UsePreAttrPos::No)); } let is_name_required = match this.token.kind { @@ -2827,7 +2826,7 @@ impl<'a> Parser<'a> { this.parameter_without_type(&mut err, pat, is_name_required, first_param) { let guar = err.emit(); - Ok((dummy_arg(ident, guar), false)) + Ok((dummy_arg(ident, guar), Trailing::No, UsePreAttrPos::No)) } else { Err(err) }; @@ -2870,7 +2869,8 @@ impl<'a> Parser<'a> { Ok(( Param { attrs, id: ast::DUMMY_NODE_ID, is_placeholder: false, pat, span, ty }, - false, + Trailing::No, + UsePreAttrPos::No, )) }) } diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 4b8e4c25e16c2..61e3fa2e6c560 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -10,10 +10,11 @@ mod path; mod stmt; mod ty; +use std::assert_matches::debug_assert_matches; use std::ops::Range; use std::{fmt, mem, slice}; -use attr_wrapper::AttrWrapper; +use attr_wrapper::{AttrWrapper, UsePreAttrPos}; pub use diagnostics::AttemptLocalParseRecovery; pub(crate) use expr::ForbiddenLetReason; pub(crate) use item::FnParseMode; @@ -237,6 +238,7 @@ impl NodeRange { // is the position of the function's start token. This gives // `NodeRange(10..15)`. fn new(ParserRange(parser_range): ParserRange, start_pos: u32) -> NodeRange { + assert!(parser_range.start >= start_pos && parser_range.end >= start_pos); NodeRange((parser_range.start - start_pos)..(parser_range.end - start_pos)) } } @@ -252,7 +254,7 @@ enum Capturing { Yes, } -// This state is used by `Parser::collect_tokens_trailing_token`. +// This state is used by `Parser::collect_tokens`. #[derive(Clone, Debug)] struct CaptureState { capturing: Capturing, @@ -387,6 +389,12 @@ enum Trailing { Yes, } +impl From for Trailing { + fn from(b: bool) -> Trailing { + if b { Trailing::Yes } else { Trailing::No } + } +} + #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub(super) enum TokenDescription { ReservedIdentifier, @@ -458,8 +466,8 @@ impl<'a> Parser<'a> { parser.bump(); // Change this from 1 back to 0 after the bump. This eases debugging of - // `Parser::collect_tokens_trailing_token` nicer because it makes the - // token positions 0-indexed which is nicer than 1-indexed. + // `Parser::collect_tokens` because 0-indexed token positions are nicer + // than 1-indexed token positions. parser.num_bump_calls = 0; parser @@ -526,7 +534,7 @@ impl<'a> Parser<'a> { } else if inedible.contains(&self.token.kind) { // leave it in the input Ok(Recovered::No) - } else if self.token.kind != token::Eof + } else if self.token != token::Eof && self.last_unexpected_token_span == Some(self.token.span) { FatalError.raise(); @@ -755,7 +763,7 @@ impl<'a> Parser<'a> { /// compound tokens like multi-character operators in process. /// Returns `true` if the token was eaten. fn break_and_eat(&mut self, expected: TokenKind) -> bool { - if self.token.kind == expected { + if self.token == expected { self.bump(); return true; } @@ -881,7 +889,7 @@ impl<'a> Parser<'a> { let token_str = pprust::token_kind_to_string(t); match self.current_closure.take() { - Some(closure_spans) if self.token.kind == TokenKind::Semi => { + Some(closure_spans) if self.token == TokenKind::Semi => { // Finding a semicolon instead of a comma // after a closure body indicates that the // closure body may be a block but the user @@ -909,7 +917,7 @@ impl<'a> Parser<'a> { // If this was a missing `@` in a binding pattern // bail with a suggestion // https://github.com/rust-lang/rust/issues/72373 - if self.prev_token.is_ident() && self.token.kind == token::DotDot { + if self.prev_token.is_ident() && self.token == token::DotDot { let msg = format!( "if you meant to bind the contents of the rest of the array \ pattern into `{}`, use `@`", @@ -1166,10 +1174,12 @@ impl<'a> Parser<'a> { match self.token_cursor.tree_cursor.look_ahead(0) { Some(tree) => { // Indexing stayed within the current token tree. - return match tree { - TokenTree::Token(token, _) => looker(token), - TokenTree::Delimited(dspan, _, delim, _) => { - looker(&Token::new(token::OpenDelim(*delim), dspan.open)) + match tree { + TokenTree::Token(token, _) => return looker(token), + &TokenTree::Delimited(dspan, _, delim, _) => { + if delim != Delimiter::Invisible { + return looker(&Token::new(token::OpenDelim(delim), dspan.open)); + } } }; } @@ -1385,7 +1395,7 @@ impl<'a> Parser<'a> { // can capture these tokens if necessary. self.bump(); if self.token_cursor.stack.len() == target_depth { - debug_assert!(matches!(self.token.kind, token::CloseDelim(_))); + debug_assert_matches!(self.token.kind, token::CloseDelim(_)); break; } } @@ -1543,11 +1553,9 @@ impl<'a> Parser<'a> { ) -> PResult<'a, R> { // The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use // `ForceCollect::Yes` - self.collect_tokens_trailing_token( - AttrWrapper::empty(), - ForceCollect::Yes, - |this, _attrs| Ok((f(this)?, false)), - ) + self.collect_tokens(None, AttrWrapper::empty(), ForceCollect::Yes, |this, _attrs| { + Ok((f(this)?, Trailing::No, UsePreAttrPos::No)) + }) } /// `::{` or `::*` diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index 5bfb8bdf776a0..eb9a957032f6c 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -13,7 +13,7 @@ use rustc_span::symbol::{kw, sym, Ident}; use rustc_span::{BytePos, ErrorGuaranteed, Span}; use thin_vec::{thin_vec, ThinVec}; -use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing}; +use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing, UsePreAttrPos}; use crate::errors::{ self, AmbiguousRangePattern, DotDotDotForRemainingFields, DotDotDotRangeToPatternNotAllowed, DotDotDotRestPattern, EnumPatternInsteadOfIdentifier, ExpectedBindingLeftOfAt, @@ -369,7 +369,7 @@ impl<'a> Parser<'a> { .and_then(|(ident, _)| ident.name.as_str().chars().next()) .is_some_and(char::is_lowercase) }) - && self.look_ahead(2, |tok| tok.kind == token::OpenDelim(Delimiter::Parenthesis)); + && self.look_ahead(2, |t| *t == token::OpenDelim(Delimiter::Parenthesis)); // Check for operators. // `|` is excluded as it is used in pattern alternatives and lambdas, @@ -377,9 +377,9 @@ impl<'a> Parser<'a> { // `[` is included for indexing operations, // `[]` is excluded as `a[]` isn't an expression and should be recovered as `a, []` (cf. `tests/ui/parser/pat-lt-bracket-7.rs`) let has_trailing_operator = matches!(self.token.kind, token::BinOp(op) if op != BinOpToken::Or) - || self.token.kind == token::Question - || (self.token.kind == token::OpenDelim(Delimiter::Bracket) - && self.look_ahead(1, |tok| tok.kind != token::CloseDelim(Delimiter::Bracket))); + || self.token == token::Question + || (self.token == token::OpenDelim(Delimiter::Bracket) + && self.look_ahead(1, |t| *t != token::CloseDelim(Delimiter::Bracket))); if !has_trailing_method && !has_trailing_operator { // Nothing to recover here. @@ -403,7 +403,7 @@ impl<'a> Parser<'a> { // Parse an associative expression such as `+ expr`, `% expr`, ... // Assignements, ranges and `|` are disabled by [`Restrictions::IS_PAT`]. - if let Ok(expr) = + if let Ok((expr, _)) = snapshot.parse_expr_assoc_rest_with(0, false, expr).map_err(|err| err.cancel()) { // We got a valid expression. @@ -413,7 +413,7 @@ impl<'a> Parser<'a> { let is_bound = is_end_bound // is_start_bound: either `..` or `)..` || self.token.is_range_separator() - || self.token.kind == token::CloseDelim(Delimiter::Parenthesis) + || self.token == token::CloseDelim(Delimiter::Parenthesis) && self.look_ahead(1, Token::is_range_separator); // Check that `parse_expr_assoc_with` didn't eat a rhs. @@ -450,7 +450,7 @@ impl<'a> Parser<'a> { lo = self.token.span; } - let pat = if self.check(&token::BinOp(token::And)) || self.token.kind == token::AndAnd { + let pat = if self.check(&token::BinOp(token::And)) || self.token == token::AndAnd { self.parse_pat_deref(expected)? } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) { self.parse_pat_tuple_or_parens()? @@ -625,7 +625,7 @@ impl<'a> Parser<'a> { /// /// [and]: https://docs.microsoft.com/en-us/dotnet/fsharp/language-reference/pattern-matching fn recover_intersection_pat(&mut self, lhs: P) -> PResult<'a, P> { - if self.token.kind != token::At { + if self.token != token::At { // Next token is not `@` so it's not going to be an intersection pattern. return Ok(lhs); } @@ -958,14 +958,14 @@ impl<'a> Parser<'a> { self.check_inline_const(dist) || self.look_ahead(dist, |t| { t.is_path_start() // e.g. `MY_CONST`; - || t.kind == token::Dot // e.g. `.5` for recovery; + || *t == token::Dot // e.g. `.5` for recovery; || matches!(t.kind, token::Literal(..) | token::BinOp(token::Minus)) || t.is_bool_lit() || t.is_whole_expr() || t.is_lifetime() // recover `'a` instead of `'a'` || (self.may_recover() // recover leading `(` - && t.kind == token::OpenDelim(Delimiter::Parenthesis) - && self.look_ahead(dist + 1, |t| t.kind != token::OpenDelim(Delimiter::Parenthesis)) + && *t == token::OpenDelim(Delimiter::Parenthesis) + && self.look_ahead(dist + 1, |t| *t != token::OpenDelim(Delimiter::Parenthesis)) && self.is_pat_range_end_start(dist + 1)) }) } @@ -1302,24 +1302,23 @@ impl<'a> Parser<'a> { } } - let field = - self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| { - let field = match this.parse_pat_field(lo, attrs) { - Ok(field) => Ok(field), - Err(err) => { - if let Some(delayed_err) = delayed_err.take() { - delayed_err.emit(); - } - return Err(err); + let field = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| { + let field = match this.parse_pat_field(lo, attrs) { + Ok(field) => Ok(field), + Err(err) => { + if let Some(delayed_err) = delayed_err.take() { + delayed_err.emit(); } - }?; - ate_comma = this.eat(&token::Comma); + return Err(err); + } + }?; + ate_comma = this.eat(&token::Comma); - last_non_comma_dotdot_span = Some(this.prev_token.span); + last_non_comma_dotdot_span = Some(this.prev_token.span); - // We just ate a comma, so there's no need to capture a trailing token. - Ok((field, false)) - })?; + // We just ate a comma, so there's no need to capture a trailing token. + Ok((field, Trailing::No, UsePreAttrPos::No)) + })?; fields.push(field) } diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs index 70d2c98d4f1bd..b58f398efede2 100644 --- a/compiler/rustc_parse/src/parser/path.rs +++ b/compiler/rustc_parse/src/parser/path.rs @@ -358,9 +358,9 @@ impl<'a> Parser<'a> { })?; let span = lo.to(self.prev_token.span); AngleBracketedArgs { args, span }.into() - } else if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) + } else if self.token == token::OpenDelim(Delimiter::Parenthesis) // FIXME(return_type_notation): Could also recover `...` here. - && self.look_ahead(1, |tok| tok.kind == token::DotDot) + && self.look_ahead(1, |t| *t == token::DotDot) { self.bump(); // ( self.bump(); // .. @@ -384,7 +384,7 @@ impl<'a> Parser<'a> { let token_before_parsing = self.token.clone(); let mut snapshot = None; if self.may_recover() - && prev_token_before_parsing.kind == token::PathSep + && prev_token_before_parsing == token::PathSep && (style == PathStyle::Expr && self.token.can_begin_expr() || style == PathStyle::Pat && self.token.can_begin_pattern()) { @@ -393,7 +393,7 @@ impl<'a> Parser<'a> { let (inputs, _) = match self.parse_paren_comma_seq(|p| p.parse_ty()) { Ok(output) => output, - Err(mut error) if prev_token_before_parsing.kind == token::PathSep => { + Err(mut error) if prev_token_before_parsing == token::PathSep => { error.span_label( prev_token_before_parsing.span.to(token_before_parsing.span), "while parsing this parenthesized list of type arguments starting here", @@ -826,7 +826,8 @@ impl<'a> Parser<'a> { // We can only resolve single-segment paths at the moment, because multi-segment paths // require type-checking: see `visit_generic_arg` in `src/librustc_resolve/late.rs`. ast::ExprKind::Path(None, path) - if path.segments.len() == 1 && path.segments[0].args.is_none() => + if let [segment] = path.segments.as_slice() + && segment.args.is_none() => { true } @@ -912,7 +913,7 @@ impl<'a> Parser<'a> { let snapshot = self.create_snapshot_for_diagnostic(); let attrs = self.parse_outer_attributes()?; match self.parse_expr_res(Restrictions::CONST_EXPR, attrs) { - Ok(expr) => { + Ok((expr, _)) => { return Ok(Some(self.dummy_const_arg_needs_braces( self.dcx().struct_span_err(expr.span, "invalid const generic expression"), expr.span, diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index a3b782d651d3f..69044192780bc 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -21,6 +21,7 @@ use super::pat::{PatternLocation, RecoverComma}; use super::path::PathStyle; use super::{ AttrWrapper, BlockMode, FnParseMode, ForceCollect, Parser, Restrictions, SemiColonMode, + Trailing, UsePreAttrPos, }; use crate::errors::MalformedLoopLabel; use crate::{errors, maybe_whole}; @@ -45,6 +46,7 @@ impl<'a> Parser<'a> { capture_semi: bool, force_collect: ForceCollect, ) -> PResult<'a, Option> { + let pre_attr_pos = self.collect_pos(); let attrs = self.parse_outer_attributes()?; let lo = self.token.span; @@ -65,11 +67,15 @@ impl<'a> Parser<'a> { } Ok(Some(if self.token.is_keyword(kw::Let) { - self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| { + self.collect_tokens(None, attrs, force_collect, |this, attrs| { this.expect_keyword(kw::Let)?; let local = this.parse_local(attrs)?; - let trailing = capture_semi && this.token.kind == token::Semi; - Ok((this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), trailing)) + let trailing = Trailing::from(capture_semi && this.token == token::Semi); + Ok(( + this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), + trailing, + UsePreAttrPos::No, + )) })? } else if self.is_kw_followed_by_ident(kw::Mut) && self.may_recover() { self.recover_stmt_local_after_let( @@ -103,10 +109,18 @@ impl<'a> Parser<'a> { // or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something // that starts like a path (1 token), but it fact not a path. // Also, we avoid stealing syntax from `parse_item_`. - let stmt = self.collect_tokens_trailing_token( + // + // `UsePreAttrPos::Yes` here means the attribute belongs unconditionally to the + // expression, not the statement. (But the statement attributes/tokens are obtained + // from the expression anyway, because `Stmt` delegates `HasAttrs`/`HasTokens` to + // the things within `StmtKind`.) + let stmt = self.collect_tokens( + Some(pre_attr_pos), AttrWrapper::empty(), force_collect, - |this, _empty_attrs| Ok((this.parse_stmt_path_start(lo, attrs)?, false)), + |this, _empty_attrs| { + Ok((this.parse_stmt_path_start(lo, attrs)?, Trailing::No, UsePreAttrPos::Yes)) + }, ); match stmt { Ok(stmt) => stmt, @@ -128,12 +142,15 @@ impl<'a> Parser<'a> { self.error_outer_attrs(attrs); self.mk_stmt(lo, StmtKind::Empty) } else if self.token != token::CloseDelim(Delimiter::Brace) { - // Remainder are line-expr stmts. - let e = self.collect_tokens_trailing_token( + // Remainder are line-expr stmts. This is similar to the `parse_stmt_path_start` case + // above. + let e = self.collect_tokens( + Some(pre_attr_pos), AttrWrapper::empty(), force_collect, |this, _empty_attrs| { - Ok((this.parse_expr_res(Restrictions::STMT_EXPR, attrs)?, false)) + let (expr, _) = this.parse_expr_res(Restrictions::STMT_EXPR, attrs)?; + Ok((expr, Trailing::No, UsePreAttrPos::Yes)) }, )?; if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(kw::Else) { @@ -150,12 +167,16 @@ impl<'a> Parser<'a> { } fn parse_stmt_path_start(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a, Stmt> { - let stmt = self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| { + let stmt = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| { let path = this.parse_path(PathStyle::Expr)?; if this.eat(&token::Not) { let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?; - return Ok((stmt_mac, this.token == token::Semi)); + return Ok(( + stmt_mac, + Trailing::from(this.token == token::Semi), + UsePreAttrPos::No, + )); } let expr = if this.eat(&token::OpenDelim(Delimiter::Brace)) { @@ -169,13 +190,17 @@ impl<'a> Parser<'a> { this.parse_expr_dot_or_call_with(attrs, expr, lo) })?; // `DUMMY_SP` will get overwritten later in this function - Ok((this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)), false)) + Ok(( + this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)), + Trailing::No, + UsePreAttrPos::No, + )) })?; if let StmtKind::Expr(expr) = stmt.kind { - // Perform this outside of the `collect_tokens_trailing_token` closure, - // since our outer attributes do not apply to this part of the expression - let expr = self.with_res(Restrictions::STMT_EXPR, |this| { + // Perform this outside of the `collect_tokens` closure, since our + // outer attributes do not apply to this part of the expression. + let (expr, _) = self.with_res(Restrictions::STMT_EXPR, |this| { this.parse_expr_assoc_rest_with(0, true, expr) })?; Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Expr(expr))) @@ -209,7 +234,7 @@ impl<'a> Parser<'a> { let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac)); let e = self.maybe_recover_from_bad_qpath(e)?; let e = self.parse_expr_dot_or_call_with(attrs, e, lo)?; - let e = self.parse_expr_assoc_rest_with(0, false, e)?; + let (e, _) = self.parse_expr_assoc_rest_with(0, false, e)?; StmtKind::Expr(e) }; Ok(self.mk_stmt(lo.to(hi), kind)) @@ -239,10 +264,14 @@ impl<'a> Parser<'a> { subdiagnostic: fn(Span) -> errors::InvalidVariableDeclarationSub, force_collect: ForceCollect, ) -> PResult<'a, Stmt> { - let stmt = self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| { + let stmt = self.collect_tokens(None, attrs, force_collect, |this, attrs| { let local = this.parse_local(attrs)?; // FIXME - maybe capture semicolon in recovery? - Ok((this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), false)) + Ok(( + this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), + Trailing::No, + UsePreAttrPos::No, + )) })?; self.dcx() .emit_err(errors::InvalidVariableDeclaration { span: lo, sub: subdiagnostic(lo) }); @@ -676,7 +705,7 @@ impl<'a> Parser<'a> { match &expr.kind { ExprKind::Path(None, ast::Path { segments, .. }) - if segments.len() == 1 => + if let [segment] = segments.as_slice() => { if self.token == token::Colon && self.look_ahead(1, |token| { @@ -693,8 +722,8 @@ impl<'a> Parser<'a> { let snapshot = self.create_snapshot_for_diagnostic(); let label = Label { ident: Ident::from_str_and_span( - &format!("'{}", segments[0].ident), - segments[0].ident.span, + &format!("'{}", segment.ident), + segment.ident.span, ), }; match self.parse_expr_labeled(label, false) { @@ -760,7 +789,7 @@ impl<'a> Parser<'a> { ) ), ); - let suggest_eq = if self.token.kind == token::Dot + let suggest_eq = if self.token == token::Dot && let _ = self.bump() && let mut snapshot = self.create_snapshot_for_diagnostic() && let Ok(_) = snapshot diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs index 2d82742f66c02..cb8e8d309887b 100644 --- a/compiler/rustc_parse/src/parser/tests.rs +++ b/compiler/rustc_parse/src/parser/tests.rs @@ -1,3 +1,4 @@ +use std::assert_matches::assert_matches; use std::io::prelude::*; use std::iter::Peekable; use std::path::{Path, PathBuf}; @@ -1747,7 +1748,7 @@ fn out_of_line_mod() { .unwrap(); let ast::ItemKind::Mod(_, mod_kind) = &item.kind else { panic!() }; - assert!(matches!(mod_kind, ast::ModKind::Loaded(items, ..) if items.len() == 2)); + assert_matches!(mod_kind, ast::ModKind::Loaded(items, ..) if items.len() == 2); }); } diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index 352ddd9eac4a5..a8e8270673afb 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -420,7 +420,7 @@ impl<'a> Parser<'a> { let mut trailing_plus = false; let (ts, trailing) = self.parse_paren_comma_seq(|p| { let ty = p.parse_ty()?; - trailing_plus = p.prev_token.kind == TokenKind::BinOp(token::Plus); + trailing_plus = p.prev_token == TokenKind::BinOp(token::Plus); Ok(ty) })?; @@ -499,8 +499,8 @@ impl<'a> Parser<'a> { let elt_ty = match self.parse_ty() { Ok(ty) => ty, Err(err) - if self.look_ahead(1, |t| t.kind == token::CloseDelim(Delimiter::Bracket)) - | self.look_ahead(1, |t| t.kind == token::Semi) => + if self.look_ahead(1, |t| *t == token::CloseDelim(Delimiter::Bracket)) + | self.look_ahead(1, |t| *t == token::Semi) => { // Recover from `[LIT; EXPR]` and `[LIT]` self.bump(); @@ -601,7 +601,7 @@ impl<'a> Parser<'a> { let span_start = self.token.span; let ast::FnHeader { ext, safety, constness, coroutine_kind } = self.parse_fn_front_matter(&inherited_vis, Case::Sensitive)?; - if self.may_recover() && self.token.kind == TokenKind::Lt { + if self.may_recover() && self.token == TokenKind::Lt { self.recover_fn_ptr_with_generics(lo, &mut params, param_insertion_point)?; } let decl = self.parse_fn_decl(|_| false, AllowPlus::No, recover_return_sign)?; @@ -681,7 +681,7 @@ impl<'a> Parser<'a> { // Always parse bounds greedily for better error recovery. let bounds = self.parse_generic_bounds()?; - *impl_dyn_multi = bounds.len() > 1 || self.prev_token.kind == TokenKind::BinOp(token::Plus); + *impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::BinOp(token::Plus); Ok(TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds)) } @@ -727,8 +727,7 @@ impl<'a> Parser<'a> { self.check_keyword(kw::Dyn) && (self.token.uninterpolated_span().at_least_rust_2018() || self.look_ahead(1, |t| { - (can_begin_dyn_bound_in_edition_2015(t) - || t.kind == TokenKind::BinOp(token::Star)) + (can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::BinOp(token::Star)) && !can_continue_type_after_non_fn_ident(t) })) } @@ -750,7 +749,7 @@ impl<'a> Parser<'a> { // Always parse bounds greedily for better error recovery. let bounds = self.parse_generic_bounds()?; - *impl_dyn_multi = bounds.len() > 1 || self.prev_token.kind == TokenKind::BinOp(token::Plus); + *impl_dyn_multi = bounds.len() > 1 || self.prev_token == TokenKind::BinOp(token::Plus); Ok(TyKind::TraitObject(bounds, syntax)) } @@ -1060,7 +1059,7 @@ impl<'a> Parser<'a> { } let mut path = if self.token.is_keyword(kw::Fn) - && self.look_ahead(1, |tok| tok.kind == TokenKind::OpenDelim(Delimiter::Parenthesis)) + && self.look_ahead(1, |t| *t == TokenKind::OpenDelim(Delimiter::Parenthesis)) && let Some(path) = self.recover_path_from_fn() { path diff --git a/compiler/rustc_parse/src/validate_attr.rs b/compiler/rustc_parse/src/validate_attr.rs index a64c00f3b6cbc..fce41bd90be7b 100644 --- a/compiler/rustc_parse/src/validate_attr.rs +++ b/compiler/rustc_parse/src/validate_attr.rs @@ -7,9 +7,7 @@ use rustc_ast::{ NestedMetaItem, Safety, }; use rustc_errors::{Applicability, FatalError, PResult}; -use rustc_feature::{ - AttributeSafety, AttributeTemplate, BuiltinAttribute, Features, BUILTIN_ATTRIBUTE_MAP, -}; +use rustc_feature::{AttributeSafety, AttributeTemplate, BuiltinAttribute, BUILTIN_ATTRIBUTE_MAP}; use rustc_session::errors::report_lit_error; use rustc_session::lint::builtin::{ILL_FORMED_ATTRIBUTE_INPUT, UNSAFE_ATTR_OUTSIDE_UNSAFE}; use rustc_session::lint::BuiltinLintDiag; @@ -18,7 +16,7 @@ use rustc_span::{sym, BytePos, Span, Symbol}; use crate::{errors, parse_in}; -pub fn check_attr(features: &Features, psess: &ParseSess, attr: &Attribute) { +pub fn check_attr(psess: &ParseSess, attr: &Attribute) { if attr.is_doc_comment() { return; } @@ -28,7 +26,7 @@ pub fn check_attr(features: &Features, psess: &ParseSess, attr: &Attribute) { // All non-builtin attributes are considered safe let safety = attr_info.map(|x| x.safety).unwrap_or(AttributeSafety::Normal); - check_attribute_safety(features, psess, safety, attr); + check_attribute_safety(psess, safety, attr); // Check input tokens for built-in and key-value attributes. match attr_info { @@ -36,9 +34,9 @@ pub fn check_attr(features: &Features, psess: &ParseSess, attr: &Attribute) { Some(BuiltinAttribute { name, template, .. }) if *name != sym::rustc_dummy => { match parse_meta(psess, attr) { // Don't check safety again, we just did that - Ok(meta) => check_builtin_meta_item( - features, psess, &meta, attr.style, *name, *template, false, - ), + Ok(meta) => { + check_builtin_meta_item(psess, &meta, attr.style, *name, *template, false) + } Err(err) => { err.emit(); } @@ -157,16 +155,7 @@ fn is_attr_template_compatible(template: &AttributeTemplate, meta: &ast::MetaIte } } -pub fn check_attribute_safety( - features: &Features, - psess: &ParseSess, - safety: AttributeSafety, - attr: &Attribute, -) { - if !features.unsafe_attributes { - return; - } - +pub fn check_attribute_safety(psess: &ParseSess, safety: AttributeSafety, attr: &Attribute) { let attr_item = attr.get_normal_item(); if safety == AttributeSafety::Unsafe { @@ -215,21 +204,18 @@ pub fn check_attribute_safety( // Called by `check_builtin_meta_item` and code that manually denies // `unsafe(...)` in `cfg` -pub fn deny_builtin_meta_unsafety(features: &Features, psess: &ParseSess, meta: &MetaItem) { +pub fn deny_builtin_meta_unsafety(psess: &ParseSess, meta: &MetaItem) { // This only supports denying unsafety right now - making builtin attributes // support unsafety will requite us to thread the actual `Attribute` through // for the nice diagnostics. - if features.unsafe_attributes { - if let Safety::Unsafe(unsafe_span) = meta.unsafety { - psess - .dcx() - .emit_err(errors::InvalidAttrUnsafe { span: unsafe_span, name: meta.path.clone() }); - } + if let Safety::Unsafe(unsafe_span) = meta.unsafety { + psess + .dcx() + .emit_err(errors::InvalidAttrUnsafe { span: unsafe_span, name: meta.path.clone() }); } } pub fn check_builtin_meta_item( - features: &Features, psess: &ParseSess, meta: &MetaItem, style: ast::AttrStyle, @@ -246,7 +232,7 @@ pub fn check_builtin_meta_item( } if deny_unsafety { - deny_builtin_meta_unsafety(features, psess, meta); + deny_builtin_meta_unsafety(psess, meta); } } diff --git a/compiler/rustc_passes/messages.ftl b/compiler/rustc_passes/messages.ftl index 59c9d1e49f5b3..1ea4ca375f156 100644 --- a/compiler/rustc_passes/messages.ftl +++ b/compiler/rustc_passes/messages.ftl @@ -223,6 +223,9 @@ passes_doc_masked_only_extern_crate = .not_an_extern_crate_label = not an `extern crate` item .note = read for more information +passes_doc_rust_logo = + the `#[doc(rust_logo)]` attribute is used for Rust branding + passes_doc_test_literal = `#![doc(test(...)]` does not take a literal passes_doc_test_takes_list = @@ -427,6 +430,10 @@ passes_link_section = .warn = {-passes_previously_accepted} .label = not a function or static +passes_linkage = + attribute should be applied to a function or static + .label = not a function definition or static + passes_macro_export = `#[macro_export]` only has an effect on macro definitions @@ -595,6 +602,9 @@ passes_remove_fields = *[other] fields } +passes_repr_align_function = + `repr(align)` attributes on functions are unstable + passes_repr_conflicting = conflicting representation hints diff --git a/compiler/rustc_passes/src/abi_test.rs b/compiler/rustc_passes/src/abi_test.rs index 839b96fb3de84..d0cc123c41a8a 100644 --- a/compiler/rustc_passes/src/abi_test.rs +++ b/compiler/rustc_passes/src/abi_test.rs @@ -127,14 +127,17 @@ fn dump_abi_of_fn_type(tcx: TyCtxt<'_>, item_def_id: LocalDefId, attr: &Attribut for meta_item in meta_items { match meta_item.name_or_empty() { sym::debug => { - let ty::FnPtr(sig) = ty.kind() else { + let ty::FnPtr(sig_tys, hdr) = ty.kind() else { span_bug!( meta_item.span(), "`#[rustc_abi(debug)]` on a type alias requires function pointer type" ); }; let abi = unwrap_fn_abi( - tcx.fn_abi_of_fn_ptr(param_env.and((*sig, /* extra_args */ ty::List::empty()))), + tcx.fn_abi_of_fn_ptr( + param_env + .and((sig_tys.with(*hdr), /* extra_args */ ty::List::empty())), + ), tcx, item_def_id, ); @@ -155,7 +158,7 @@ fn dump_abi_of_fn_type(tcx: TyCtxt<'_>, item_def_id: LocalDefId, attr: &Attribut "`#[rustc_abi(assert_eq)]` on a type alias requires pair type" ); }; - let ty::FnPtr(sig1) = field1.kind() else { + let ty::FnPtr(sig_tys1, hdr1) = field1.kind() else { span_bug!( meta_item.span(), "`#[rustc_abi(assert_eq)]` on a type alias requires pair of function pointer types" @@ -163,12 +166,13 @@ fn dump_abi_of_fn_type(tcx: TyCtxt<'_>, item_def_id: LocalDefId, attr: &Attribut }; let abi1 = unwrap_fn_abi( tcx.fn_abi_of_fn_ptr( - param_env.and((*sig1, /* extra_args */ ty::List::empty())), + param_env + .and((sig_tys1.with(*hdr1), /* extra_args */ ty::List::empty())), ), tcx, item_def_id, ); - let ty::FnPtr(sig2) = field2.kind() else { + let ty::FnPtr(sig_tys2, hdr2) = field2.kind() else { span_bug!( meta_item.span(), "`#[rustc_abi(assert_eq)]` on a type alias requires pair of function pointer types" @@ -176,7 +180,8 @@ fn dump_abi_of_fn_type(tcx: TyCtxt<'_>, item_def_id: LocalDefId, attr: &Attribut }; let abi2 = unwrap_fn_abi( tcx.fn_abi_of_fn_ptr( - param_env.and((*sig2, /* extra_args */ ty::List::empty())), + param_env + .and((sig_tys2.with(*hdr2), /* extra_args */ ty::List::empty())), ), tcx, item_def_id, diff --git a/compiler/rustc_passes/src/check_attr.rs b/compiler/rustc_passes/src/check_attr.rs index c8d4c190113b2..a47add929ebaa 100644 --- a/compiler/rustc_passes/src/check_attr.rs +++ b/compiler/rustc_passes/src/check_attr.rs @@ -243,6 +243,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { [sym::coroutine, ..] => { self.check_coroutine(attr, target); } + [sym::linkage, ..] => self.check_linkage(attr, span, target), [ // ok sym::allow @@ -256,7 +257,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { | sym::cfi_encoding // FIXME(cfi_encoding) | sym::may_dangle // FIXME(dropck_eyepatch) | sym::pointee // FIXME(derive_smart_pointer) - | sym::linkage // FIXME(linkage) | sym::omit_gdb_pretty_printer_section // FIXME(omit_gdb_pretty_printer_section) | sym::used // handled elsewhere to restrict to static items | sym::repr // handled elsewhere to restrict to type decls items @@ -1142,7 +1142,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { /// of one item. Read the documentation of [`check_doc_inline`] for more information. /// /// [`check_doc_inline`]: Self::check_doc_inline - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn check_doc_attrs( &self, attr: &Attribute, @@ -1220,7 +1219,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { &self.tcx.sess, sym::rustdoc_internals, meta.span(), - "the `#[doc(rust_logo)]` attribute is used for Rust branding", + fluent::passes_doc_rust_logo, ) .emit(); } @@ -1736,7 +1735,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } /// Checks if the `#[repr]` attributes on `item` are valid. - #[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn check_repr( &self, attrs: &[Attribute], @@ -1793,7 +1791,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> { &self.tcx.sess, sym::fn_align, hint.span(), - "`repr(align)` attributes on functions are unstable", + fluent::passes_repr_align_function, ) .emit(); } @@ -2213,8 +2211,8 @@ impl<'tcx> CheckAttrVisitor<'tcx> { attr.name_or_empty(), sym::allow | sym::warn | sym::deny | sym::forbid | sym::expect ) && let Some(meta) = attr.meta_item_list() - && meta.len() == 1 - && let Some(item) = meta[0].meta_item() + && let [meta] = meta.as_slice() + && let Some(item) = meta.meta_item() && let MetaItemKind::NameValue(_) = &item.kind && item.path == sym::reason { @@ -2349,6 +2347,19 @@ impl<'tcx> CheckAttrVisitor<'tcx> { } } } + + fn check_linkage(&self, attr: &Attribute, span: Span, target: Target) { + match target { + Target::Fn + | Target::Method(..) + | Target::Static + | Target::ForeignStatic + | Target::ForeignFn => {} + _ => { + self.dcx().emit_err(errors::Linkage { attr_span: attr.span, span }); + } + } + } } impl<'tcx> Visitor<'tcx> for CheckAttrVisitor<'tcx> { diff --git a/compiler/rustc_passes/src/debugger_visualizer.rs b/compiler/rustc_passes/src/debugger_visualizer.rs index 0537d3a69f6dc..5d871bacb1dae 100644 --- a/compiler/rustc_passes/src/debugger_visualizer.rs +++ b/compiler/rustc_passes/src/debugger_visualizer.rs @@ -19,9 +19,7 @@ impl DebuggerVisualizerCollector<'_> { return; }; - let hint = if hints.len() == 1 { - &hints[0] - } else { + let [hint] = hints.as_slice() else { self.sess.dcx().emit_err(DebugVisualizerInvalid { span: attr.span }); return; }; diff --git a/compiler/rustc_passes/src/entry.rs b/compiler/rustc_passes/src/entry.rs index 48f55d4c3a0d5..ecb345bb51a49 100644 --- a/compiler/rustc_passes/src/entry.rs +++ b/compiler/rustc_passes/src/entry.rs @@ -106,7 +106,6 @@ fn check_and_search_item(id: ItemId, ctxt: &mut EntryContext<'_>) { } } -#[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable fn configure_main(tcx: TyCtxt<'_>, visitor: &EntryContext<'_>) -> Option<(DefId, EntryFnType)> { if let Some((def_id, _)) = visitor.start_fn { Some((def_id.to_def_id(), EntryFnType::Start)) diff --git a/compiler/rustc_passes/src/errors.rs b/compiler/rustc_passes/src/errors.rs index 36dfc40e7628b..3a043e0e3c196 100644 --- a/compiler/rustc_passes/src/errors.rs +++ b/compiler/rustc_passes/src/errors.rs @@ -643,6 +643,15 @@ pub struct CoroutineOnNonClosure { pub span: Span, } +#[derive(Diagnostic)] +#[diag(passes_linkage)] +pub struct Linkage { + #[primary_span] + pub attr_span: Span, + #[label] + pub span: Span, +} + #[derive(Diagnostic)] #[diag(passes_empty_confusables)] pub(crate) struct EmptyConfusables { diff --git a/compiler/rustc_pattern_analysis/src/lib.rs b/compiler/rustc_pattern_analysis/src/lib.rs index e37fa072b6d65..6c9c848bb10ae 100644 --- a/compiler/rustc_pattern_analysis/src/lib.rs +++ b/compiler/rustc_pattern_analysis/src/lib.rs @@ -5,6 +5,7 @@ // tidy-alphabetical-start #![allow(rustc::diagnostic_outside_of_impl)] #![allow(rustc::untranslatable_diagnostic)] +#![cfg_attr(feature = "rustc", feature(let_chains))] // tidy-alphabetical-end pub mod constructor; diff --git a/compiler/rustc_pattern_analysis/src/rustc.rs b/compiler/rustc_pattern_analysis/src/rustc.rs index 25f7cc17c11ff..d7885e05a2ffc 100644 --- a/compiler/rustc_pattern_analysis/src/rustc.rs +++ b/compiler/rustc_pattern_analysis/src/rustc.rs @@ -23,6 +23,7 @@ use crate::constructor::{ }; use crate::lints::lint_nonexhaustive_missing_variants; use crate::pat_column::PatternColumn; +use crate::rustc::print::EnumInfo; use crate::usefulness::{compute_match_usefulness, PlaceValidity}; use crate::{errors, Captures, PatCx, PrivateUninhabitedField}; @@ -412,7 +413,7 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> { | ty::Foreign(_) | ty::RawPtr(_, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Pat(_, _) | ty::Dynamic(_, _, _) | ty::Closure(..) @@ -773,17 +774,16 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> { } } - /// Convert to a [`print::Pat`] for diagnostic purposes. - fn hoist_pat_range(&self, range: &IntRange, ty: RevealedTy<'tcx>) -> print::Pat<'tcx> { - use print::{Pat, PatKind}; + /// Prints an [`IntRange`] to a string for diagnostic purposes. + fn print_pat_range(&self, range: &IntRange, ty: RevealedTy<'tcx>) -> String { use MaybeInfiniteInt::*; let cx = self; - let kind = if matches!((range.lo, range.hi), (NegInfinity, PosInfinity)) { - PatKind::Wild + if matches!((range.lo, range.hi), (NegInfinity, PosInfinity)) { + "_".to_string() } else if range.is_singleton() { let lo = cx.hoist_pat_range_bdy(range.lo, ty); let value = lo.as_finite().unwrap(); - PatKind::Constant { value } + value.to_string() } else { // We convert to an inclusive range for diagnostics. let mut end = rustc_hir::RangeEnd::Included; @@ -806,99 +806,96 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> { range.hi }; let hi = cx.hoist_pat_range_bdy(hi, ty); - PatKind::Range(Box::new(PatRange { lo, hi, end, ty: ty.inner() })) - }; - - Pat { ty: ty.inner(), kind } + PatRange { lo, hi, end, ty: ty.inner() }.to_string() + } } /// Prints a [`WitnessPat`] to an owned string, for diagnostic purposes. + /// + /// This panics for patterns that don't appear in diagnostics, like float ranges. pub fn print_witness_pat(&self, pat: &WitnessPat<'p, 'tcx>) -> String { - // This works by converting the witness pattern to a `print::Pat` - // and then printing that, but callers don't need to know that. - self.hoist_witness_pat(pat).to_string() - } - - /// Convert to a [`print::Pat`] for diagnostic purposes. This panics for patterns that don't - /// appear in diagnostics, like float ranges. - fn hoist_witness_pat(&self, pat: &WitnessPat<'p, 'tcx>) -> print::Pat<'tcx> { - use print::{FieldPat, Pat, PatKind}; let cx = self; - let is_wildcard = |pat: &Pat<'_>| matches!(pat.kind, PatKind::Wild); - let mut subpatterns = pat.iter_fields().map(|p| Box::new(cx.hoist_witness_pat(p))); - let kind = match pat.ctor() { - Bool(b) => PatKind::Constant { value: mir::Const::from_bool(cx.tcx, *b) }, - IntRange(range) => return self.hoist_pat_range(range, *pat.ty()), - Struct | Variant(_) | UnionField => match pat.ty().kind() { - ty::Tuple(..) => PatKind::Leaf { - subpatterns: subpatterns - .enumerate() - .map(|(i, pattern)| FieldPat { field: FieldIdx::new(i), pattern }) - .collect(), - }, - ty::Adt(adt_def, _) if adt_def.is_box() => { - // Without `box_patterns`, the only legal pattern of type `Box` is `_` (outside - // of `std`). So this branch is only reachable when the feature is enabled and - // the pattern is a box pattern. - PatKind::Deref { subpattern: subpatterns.next().unwrap() } - } - ty::Adt(adt_def, _args) => { - let variant_index = RustcPatCtxt::variant_index_for_adt(&pat.ctor(), *adt_def); - let subpatterns = subpatterns - .enumerate() - .map(|(i, pattern)| FieldPat { field: FieldIdx::new(i), pattern }) - .collect(); + let print = |p| cx.print_witness_pat(p); + match pat.ctor() { + Bool(b) => b.to_string(), + Str(s) => s.to_string(), + IntRange(range) => return self.print_pat_range(range, *pat.ty()), + Struct if pat.ty().is_box() => { + // Outside of the `alloc` crate, the only way to create a struct pattern + // of type `Box` is to use a `box` pattern via #[feature(box_patterns)]. + format!("box {}", print(&pat.fields[0])) + } + Struct | Variant(_) | UnionField => { + let enum_info = match *pat.ty().kind() { + ty::Adt(adt_def, _) if adt_def.is_enum() => EnumInfo::Enum { + adt_def, + variant_index: RustcPatCtxt::variant_index_for_adt(pat.ctor(), adt_def), + }, + ty::Adt(..) | ty::Tuple(..) => EnumInfo::NotEnum, + _ => bug!("unexpected ctor for type {:?} {:?}", pat.ctor(), *pat.ty()), + }; - if adt_def.is_enum() { - PatKind::Variant { adt_def: *adt_def, variant_index, subpatterns } - } else { - PatKind::Leaf { subpatterns } - } - } - _ => bug!("unexpected ctor for type {:?} {:?}", pat.ctor(), *pat.ty()), - }, - // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should - // be careful to reconstruct the correct constant pattern here. However a string - // literal pattern will never be reported as a non-exhaustiveness witness, so we - // ignore this issue. - Ref => PatKind::Deref { subpattern: subpatterns.next().unwrap() }, + let subpatterns = pat + .iter_fields() + .enumerate() + .map(|(i, pat)| print::FieldPat { + field: FieldIdx::new(i), + pattern: print(pat), + is_wildcard: would_print_as_wildcard(cx.tcx, pat), + }) + .collect::>(); + + let mut s = String::new(); + print::write_struct_like( + &mut s, + self.tcx, + pat.ty().inner(), + &enum_info, + &subpatterns, + ) + .unwrap(); + s + } + Ref => { + let mut s = String::new(); + print::write_ref_like(&mut s, pat.ty().inner(), &print(&pat.fields[0])).unwrap(); + s + } Slice(slice) => { - match slice.kind { - SliceKind::FixedLen(_) => PatKind::Slice { - prefix: subpatterns.collect(), - slice: None, - suffix: Box::new([]), - }, - SliceKind::VarLen(prefix, _) => { - let mut subpatterns = subpatterns.peekable(); - let mut prefix: Vec<_> = subpatterns.by_ref().take(prefix).collect(); - if slice.array_len.is_some() { - // Improves diagnostics a bit: if the type is a known-size array, instead - // of reporting `[x, _, .., _, y]`, we prefer to report `[x, .., y]`. - // This is incorrect if the size is not known, since `[_, ..]` captures - // arrays of lengths `>= 1` whereas `[..]` captures any length. - while !prefix.is_empty() && is_wildcard(prefix.last().unwrap()) { - prefix.pop(); - } - while subpatterns.peek().is_some() - && is_wildcard(subpatterns.peek().unwrap()) - { - subpatterns.next(); - } - } - let suffix: Box<[_]> = subpatterns.collect(); - let wild = Pat { ty: pat.ty().inner(), kind: PatKind::Wild }; - PatKind::Slice { - prefix: prefix.into_boxed_slice(), - slice: Some(Box::new(wild)), - suffix, - } + let (prefix_len, has_dot_dot) = match slice.kind { + SliceKind::FixedLen(len) => (len, false), + SliceKind::VarLen(prefix_len, _) => (prefix_len, true), + }; + + let (mut prefix, mut suffix) = pat.fields.split_at(prefix_len); + + // If the pattern contains a `..`, but is applied to values of statically-known + // length (arrays), then we can slightly simplify diagnostics by merging any + // adjacent wildcard patterns into the `..`: `[x, _, .., _, y]` => `[x, .., y]`. + // (This simplification isn't allowed for slice values, because in that case + // `[x, .., y]` would match some slices that `[x, _, .., _, y]` would not.) + if has_dot_dot && slice.array_len.is_some() { + while let [rest @ .., last] = prefix + && would_print_as_wildcard(cx.tcx, last) + { + prefix = rest; + } + while let [first, rest @ ..] = suffix + && would_print_as_wildcard(cx.tcx, first) + { + suffix = rest; } } + + let prefix = prefix.iter().map(print).collect::>(); + let suffix = suffix.iter().map(print).collect::>(); + + let mut s = String::new(); + print::write_slice_like(&mut s, &prefix, has_dot_dot, &suffix).unwrap(); + s } - &Str(value) => PatKind::Constant { value }, - Never if self.tcx.features().never_patterns => PatKind::Never, - Never | Wildcard | NonExhaustive | Hidden | PrivateUninhabited => PatKind::Wild, + Never if self.tcx.features().never_patterns => "!".to_string(), + Never | Wildcard | NonExhaustive | Hidden | PrivateUninhabited => "_".to_string(), Missing { .. } => bug!( "trying to convert a `Missing` constructor into a `Pat`; this is probably a bug, `Missing` should have been processed in `apply_constructors`" @@ -906,9 +903,23 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> { F16Range(..) | F32Range(..) | F64Range(..) | F128Range(..) | Opaque(..) | Or => { bug!("can't convert to pattern: {:?}", pat) } - }; + } + } +} - Pat { ty: pat.ty().inner(), kind } +/// Returns `true` if the given pattern would be printed as a wildcard (`_`). +fn would_print_as_wildcard(tcx: TyCtxt<'_>, p: &WitnessPat<'_, '_>) -> bool { + match p.ctor() { + Constructor::IntRange(IntRange { + lo: MaybeInfiniteInt::NegInfinity, + hi: MaybeInfiniteInt::PosInfinity, + }) + | Constructor::Wildcard + | Constructor::NonExhaustive + | Constructor::Hidden + | Constructor::PrivateUninhabited => true, + Constructor::Never if !tcx.features().never_patterns => true, + _ => false, } } @@ -968,7 +979,7 @@ impl<'p, 'tcx: 'p> PatCx for RustcPatCtxt<'p, 'tcx> { overlaps_on: IntRange, overlaps_with: &[&crate::pat::DeconstructedPat], ) { - let overlap_as_pat = self.hoist_pat_range(&overlaps_on, *pat.ty()); + let overlap_as_pat = self.print_pat_range(&overlaps_on, *pat.ty()); let overlaps: Vec<_> = overlaps_with .iter() .map(|pat| pat.data().span) @@ -1008,7 +1019,7 @@ impl<'p, 'tcx: 'p> PatCx for RustcPatCtxt<'p, 'tcx> { suggested_range.end = rustc_hir::RangeEnd::Included; suggested_range.to_string() }; - let gap_as_pat = self.hoist_pat_range(&gap, *pat.ty()); + let gap_as_pat = self.print_pat_range(&gap, *pat.ty()); if gapped_with.is_empty() { // If `gapped_with` is empty, `gap == T::MAX`. self.tcx.emit_node_span_lint( diff --git a/compiler/rustc_pattern_analysis/src/rustc/print.rs b/compiler/rustc_pattern_analysis/src/rustc/print.rs index 4b76764e8b136..17e389df17ed2 100644 --- a/compiler/rustc_pattern_analysis/src/rustc/print.rs +++ b/compiler/rustc_pattern_analysis/src/rustc/print.rs @@ -11,183 +11,150 @@ use std::fmt; -use rustc_middle::thir::PatRange; -use rustc_middle::ty::{self, AdtDef, Ty}; -use rustc_middle::{bug, mir}; +use rustc_middle::bug; +use rustc_middle::ty::{self, AdtDef, Ty, TyCtxt}; use rustc_span::sym; use rustc_target::abi::{FieldIdx, VariantIdx}; #[derive(Clone, Debug)] -pub(crate) struct FieldPat<'tcx> { +pub(crate) struct FieldPat { pub(crate) field: FieldIdx, - pub(crate) pattern: Box>, + pub(crate) pattern: String, + pub(crate) is_wildcard: bool, } -#[derive(Clone, Debug)] -pub(crate) struct Pat<'tcx> { - pub(crate) ty: Ty<'tcx>, - pub(crate) kind: PatKind<'tcx>, +/// Returns a closure that will return `""` when called the first time, +/// and then return `", "` when called any subsequent times. +/// Useful for printing comma-separated lists. +fn start_or_comma() -> impl FnMut() -> &'static str { + let mut first = true; + move || { + if first { + first = false; + "" + } else { + ", " + } + } } #[derive(Clone, Debug)] -pub(crate) enum PatKind<'tcx> { - Wild, - - Variant { - adt_def: AdtDef<'tcx>, - variant_index: VariantIdx, - subpatterns: Vec>, - }, - - Leaf { - subpatterns: Vec>, - }, +pub(crate) enum EnumInfo<'tcx> { + Enum { adt_def: AdtDef<'tcx>, variant_index: VariantIdx }, + NotEnum, +} - Deref { - subpattern: Box>, - }, +pub(crate) fn write_struct_like<'tcx>( + f: &mut impl fmt::Write, + tcx: TyCtxt<'_>, + ty: Ty<'tcx>, + enum_info: &EnumInfo<'tcx>, + subpatterns: &[FieldPat], +) -> fmt::Result { + let variant_and_name = match *enum_info { + EnumInfo::Enum { adt_def, variant_index } => { + let variant = adt_def.variant(variant_index); + let adt_did = adt_def.did(); + let name = if tcx.is_diagnostic_item(sym::Option, adt_did) + || tcx.is_diagnostic_item(sym::Result, adt_did) + { + variant.name.to_string() + } else { + format!("{}::{}", tcx.def_path_str(adt_def.did()), variant.name) + }; + Some((variant, name)) + } + EnumInfo::NotEnum => ty.ty_adt_def().and_then(|adt_def| { + Some((adt_def.non_enum_variant(), tcx.def_path_str(adt_def.did()))) + }), + }; - Constant { - value: mir::Const<'tcx>, - }, + let mut start_or_comma = start_or_comma(); - Range(Box>), + if let Some((variant, name)) = &variant_and_name { + write!(f, "{name}")?; - Slice { - prefix: Box<[Box>]>, - slice: Option>>, - suffix: Box<[Box>]>, - }, + // Only for Adt we can have `S {...}`, + // which we handle separately here. + if variant.ctor.is_none() { + write!(f, " {{ ")?; - Never, -} + let mut printed = 0; + for &FieldPat { field, ref pattern, is_wildcard } in subpatterns { + if is_wildcard { + continue; + } + let field_name = variant.fields[field].name; + write!(f, "{}{field_name}: {pattern}", start_or_comma())?; + printed += 1; + } -impl<'tcx> fmt::Display for Pat<'tcx> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - // Printing lists is a chore. - let mut first = true; - let mut start_or_continue = |s| { - if first { - first = false; - "" - } else { - s + let is_union = ty.ty_adt_def().is_some_and(|adt| adt.is_union()); + if printed < variant.fields.len() && (!is_union || printed == 0) { + write!(f, "{}..", start_or_comma())?; } - }; - let mut start_or_comma = || start_or_continue(", "); - - match self.kind { - PatKind::Wild => write!(f, "_"), - PatKind::Never => write!(f, "!"), - PatKind::Variant { ref subpatterns, .. } | PatKind::Leaf { ref subpatterns } => { - let variant_and_name = match self.kind { - PatKind::Variant { adt_def, variant_index, .. } => ty::tls::with(|tcx| { - let variant = adt_def.variant(variant_index); - let adt_did = adt_def.did(); - let name = if tcx.get_diagnostic_item(sym::Option) == Some(adt_did) - || tcx.get_diagnostic_item(sym::Result) == Some(adt_did) - { - variant.name.to_string() - } else { - format!("{}::{}", tcx.def_path_str(adt_def.did()), variant.name) - }; - Some((variant, name)) - }), - _ => self.ty.ty_adt_def().and_then(|adt_def| { - if !adt_def.is_enum() { - ty::tls::with(|tcx| { - Some((adt_def.non_enum_variant(), tcx.def_path_str(adt_def.did()))) - }) - } else { - None - } - }), - }; - - if let Some((variant, name)) = &variant_and_name { - write!(f, "{name}")?; - - // Only for Adt we can have `S {...}`, - // which we handle separately here. - if variant.ctor.is_none() { - write!(f, " {{ ")?; - - let mut printed = 0; - for p in subpatterns { - if let PatKind::Wild = p.pattern.kind { - continue; - } - let name = variant.fields[p.field].name; - write!(f, "{}{}: {}", start_or_comma(), name, p.pattern)?; - printed += 1; - } - - let is_union = self.ty.ty_adt_def().is_some_and(|adt| adt.is_union()); - if printed < variant.fields.len() && (!is_union || printed == 0) { - write!(f, "{}..", start_or_comma())?; - } - - return write!(f, " }}"); - } - } - let num_fields = - variant_and_name.as_ref().map_or(subpatterns.len(), |(v, _)| v.fields.len()); - if num_fields != 0 || variant_and_name.is_none() { - write!(f, "(")?; - for i in 0..num_fields { - write!(f, "{}", start_or_comma())?; - - // Common case: the field is where we expect it. - if let Some(p) = subpatterns.get(i) { - if p.field.index() == i { - write!(f, "{}", p.pattern)?; - continue; - } - } - - // Otherwise, we have to go looking for it. - if let Some(p) = subpatterns.iter().find(|p| p.field.index() == i) { - write!(f, "{}", p.pattern)?; - } else { - write!(f, "_")?; - } - } - write!(f, ")")?; - } + return write!(f, " }}"); + } + } - Ok(()) - } - PatKind::Deref { ref subpattern } => { - match self.ty.kind() { - ty::Adt(def, _) if def.is_box() => write!(f, "box ")?, - ty::Ref(_, _, mutbl) => { - write!(f, "&{}", mutbl.prefix_str())?; - } - _ => bug!("{} is a bad Deref pattern type", self.ty), + let num_fields = variant_and_name.as_ref().map_or(subpatterns.len(), |(v, _)| v.fields.len()); + if num_fields != 0 || variant_and_name.is_none() { + write!(f, "(")?; + for i in 0..num_fields { + write!(f, "{}", start_or_comma())?; + + // Common case: the field is where we expect it. + if let Some(p) = subpatterns.get(i) { + if p.field.index() == i { + write!(f, "{}", p.pattern)?; + continue; } - write!(f, "{subpattern}") } - PatKind::Constant { value } => write!(f, "{value}"), - PatKind::Range(ref range) => write!(f, "{range}"), - PatKind::Slice { ref prefix, ref slice, ref suffix } => { - write!(f, "[")?; - for p in prefix.iter() { - write!(f, "{}{}", start_or_comma(), p)?; - } - if let Some(ref slice) = *slice { - write!(f, "{}", start_or_comma())?; - match slice.kind { - PatKind::Wild => {} - _ => write!(f, "{slice}")?, - } - write!(f, "..")?; - } - for p in suffix.iter() { - write!(f, "{}{}", start_or_comma(), p)?; - } - write!(f, "]") + + // Otherwise, we have to go looking for it. + if let Some(p) = subpatterns.iter().find(|p| p.field.index() == i) { + write!(f, "{}", p.pattern)?; + } else { + write!(f, "_")?; } } + write!(f, ")")?; + } + + Ok(()) +} + +pub(crate) fn write_ref_like<'tcx>( + f: &mut impl fmt::Write, + ty: Ty<'tcx>, + subpattern: &str, +) -> fmt::Result { + match ty.kind() { + ty::Ref(_, _, mutbl) => { + write!(f, "&{}", mutbl.prefix_str())?; + } + _ => bug!("{ty} is a bad ref pattern type"), + } + write!(f, "{subpattern}") +} + +pub(crate) fn write_slice_like( + f: &mut impl fmt::Write, + prefix: &[String], + has_dot_dot: bool, + suffix: &[String], +) -> fmt::Result { + let mut start_or_comma = start_or_comma(); + write!(f, "[")?; + for p in prefix.iter() { + write!(f, "{}{}", start_or_comma(), p)?; + } + if has_dot_dot { + write!(f, "{}..", start_or_comma())?; + } + for p in suffix.iter() { + write!(f, "{}{}", start_or_comma(), p)?; } + write!(f, "]") } diff --git a/compiler/rustc_query_system/src/dep_graph/debug.rs b/compiler/rustc_query_system/src/dep_graph/debug.rs index 4d009d63de59c..12ed574271162 100644 --- a/compiler/rustc_query_system/src/dep_graph/debug.rs +++ b/compiler/rustc_query_system/src/dep_graph/debug.rs @@ -46,15 +46,14 @@ pub struct EdgeFilter { impl EdgeFilter { pub fn new(test: &str) -> Result> { - let parts: Vec<_> = test.split("->").collect(); - if parts.len() != 2 { - Err(format!("expected a filter like `a&b -> c&d`, not `{test}`").into()) - } else { + if let [source, target] = *test.split("->").collect::>() { Ok(EdgeFilter { - source: DepNodeFilter::new(parts[0]), - target: DepNodeFilter::new(parts[1]), + source: DepNodeFilter::new(source), + target: DepNodeFilter::new(target), index_to_node: Lock::new(FxHashMap::default()), }) + } else { + Err(format!("expected a filter like `a&b -> c&d`, not `{test}`").into()) } } diff --git a/compiler/rustc_resolve/src/build_reduced_graph.rs b/compiler/rustc_resolve/src/build_reduced_graph.rs index 307625bcfb17c..d57dabdd78d91 100644 --- a/compiler/rustc_resolve/src/build_reduced_graph.rs +++ b/compiler/rustc_resolve/src/build_reduced_graph.rs @@ -374,7 +374,6 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { root_span, root_id, vis, - used: Default::default(), }); self.r.indeterminate_imports.push(import); @@ -890,8 +889,10 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { span: item.span, module_path: Vec::new(), vis, - used: Cell::new(used.then_some(Used::Other)), }); + if used { + self.r.import_use_map.insert(import, Used::Other); + } self.r.potentially_unused_imports.push(import); let imported_binding = self.r.import(binding, import); if parent == self.r.graph_root { @@ -1091,7 +1092,6 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { span, module_path: Vec::new(), vis: ty::Visibility::Restricted(CRATE_DEF_ID), - used: Default::default(), }) }; @@ -1256,8 +1256,8 @@ impl<'a, 'b, 'tcx> BuildReducedGraphVisitor<'a, 'b, 'tcx> { span, module_path: Vec::new(), vis, - used: Cell::new(Some(Used::Other)), }); + self.r.import_use_map.insert(import, Used::Other); let import_binding = self.r.import(binding, import); self.r.define(self.r.graph_root, ident, MacroNS, import_binding); } else { diff --git a/compiler/rustc_resolve/src/check_unused.rs b/compiler/rustc_resolve/src/check_unused.rs index 75b8ecebdd914..1cee876b80fea 100644 --- a/compiler/rustc_resolve/src/check_unused.rs +++ b/compiler/rustc_resolve/src/check_unused.rs @@ -381,9 +381,9 @@ impl Resolver<'_, '_> { for import in self.potentially_unused_imports.iter() { match import.kind { - _ if import.used.get().is_some() - || import.vis.is_public() - || import.span.is_dummy() => + _ if import.vis.is_public() + || import.span.is_dummy() + || self.import_use_map.contains_key(import) => { if let ImportKind::MacroUse { .. } = import.kind { if !import.span.is_dummy() { diff --git a/compiler/rustc_resolve/src/imports.rs b/compiler/rustc_resolve/src/imports.rs index 4a891d12281c4..42171edf75749 100644 --- a/compiler/rustc_resolve/src/imports.rs +++ b/compiler/rustc_resolve/src/imports.rs @@ -176,7 +176,6 @@ pub(crate) struct ImportData<'a> { /// The resolution of `module_path`. pub imported_module: Cell>>, pub vis: ty::Visibility, - pub used: Cell>, } /// All imports are unique and allocated on a same arena, @@ -484,7 +483,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { }); self.record_use(target, dummy_binding, Used::Other); } else if import.imported_module.get().is_none() { - import.used.set(Some(Used::Other)); + self.import_use_map.insert(import, Used::Other); if let Some(id) = import.id() { self.used_imports.insert(id); } @@ -1347,7 +1346,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { // module defined by a block). // Skip if the import is public or was used through non scope-based resolution, // e.g. through a module-relative path. - if import.used.get() == Some(Used::Other) + if self.import_use_map.get(&import) == Some(&Used::Other) || self.effective_visibilities.is_exported(self.local_def_id(id)) { return false; diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs index a5bd2fdd8f37b..4a70fc0f30847 100644 --- a/compiler/rustc_resolve/src/late.rs +++ b/compiler/rustc_resolve/src/late.rs @@ -445,8 +445,8 @@ impl<'a> PathSource<'a> { Some(ExprKind::Call(call_expr, _)) => match &call_expr.kind { // the case of `::some_crate()` ExprKind::Path(_, path) - if path.segments.len() == 2 - && path.segments[0].ident.name == kw::PathRoot => + if let [segment, _] = path.segments.as_slice() + && segment.ident.name == kw::PathRoot => { "external crate" } @@ -2396,15 +2396,14 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> { } fn future_proof_import(&mut self, use_tree: &UseTree) { - let segments = &use_tree.prefix.segments; - if !segments.is_empty() { - let ident = segments[0].ident; + if let [segment, rest @ ..] = use_tree.prefix.segments.as_slice() { + let ident = segment.ident; if ident.is_path_segment_keyword() || ident.span.is_rust_2015() { return; } let nss = match use_tree.kind { - UseTreeKind::Simple(..) if segments.len() == 1 => &[TypeNS, ValueNS][..], + UseTreeKind::Simple(..) if rest.is_empty() => &[TypeNS, ValueNS][..], _ => &[TypeNS], }; let report_error = |this: &Self, ns| { @@ -4009,16 +4008,15 @@ impl<'a: 'ast, 'b, 'ast, 'tcx> LateResolutionVisitor<'a, 'b, 'ast, 'tcx> { if this.should_report_errs() { if candidates.is_empty() { - if path.len() == 2 && prefix_path.len() == 1 { + if path.len() == 2 + && let [segment] = prefix_path + { // Delay to check whether methond name is an associated function or not // ``` // let foo = Foo {}; // foo::bar(); // possibly suggest to foo.bar(); //``` - err.stash( - prefix_path[0].ident.span, - rustc_errors::StashKey::CallAssocMethod, - ); + err.stash(segment.ident.span, rustc_errors::StashKey::CallAssocMethod); } else { // When there is no suggested imports, we can just emit the error // and suggestions immediately. Note that we bypass the usually error diff --git a/compiler/rustc_resolve/src/late/diagnostics.rs b/compiler/rustc_resolve/src/late/diagnostics.rs index f9896fb21964d..f778b0ee3acc0 100644 --- a/compiler/rustc_resolve/src/late/diagnostics.rs +++ b/compiler/rustc_resolve/src/late/diagnostics.rs @@ -650,14 +650,14 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> { let typo_sugg = self .lookup_typo_candidate(path, following_seg, source.namespace(), is_expected) .to_opt_suggestion(); - if path.len() == 1 + if let [segment] = path && !matches!(source, PathSource::Delegation) && self.self_type_is_available() { if let Some(candidate) = self.lookup_assoc_candidate(ident, ns, is_expected, source.is_call()) { - let self_is_available = self.self_value_is_available(path[0].ident.span); + let self_is_available = self.self_value_is_available(segment.ident.span); // Account for `Foo { field }` when suggesting `self.field` so we result on // `Foo { field: self.field }`. let pre = match source { @@ -665,7 +665,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> { if expr .fields .iter() - .any(|f| f.ident == path[0].ident && f.is_shorthand) => + .any(|f| f.ident == segment.ident && f.is_shorthand) => { format!("{path_str}: ") } @@ -1258,8 +1258,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> { ) }) .collect(); - if targets.len() == 1 { - let target = targets[0]; + if let [target] = targets.as_slice() { return Some(TypoSuggestion::single_item_from_ident(target.0.ident, target.1)); } } @@ -2105,8 +2104,8 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> { filter_fn: &impl Fn(Res) -> bool, ) -> TypoCandidate { let mut names = Vec::new(); - if path.len() == 1 { - let mut ctxt = path.last().unwrap().ident.span.ctxt(); + if let [segment] = path { + let mut ctxt = segment.ident.span.ctxt(); // Search in lexical scope. // Walk backwards up the ribs in scope and collect candidates. diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs index 89ac839651fb1..02fdc1ae6685e 100644 --- a/compiler/rustc_resolve/src/lib.rs +++ b/compiler/rustc_resolve/src/lib.rs @@ -1016,6 +1016,8 @@ pub struct Resolver<'a, 'tcx> { partial_res_map: NodeMap, /// Resolutions for import nodes, which have multiple resolutions in different namespaces. import_res_map: NodeMap>>, + /// An import will be inserted into this map if it has been used. + import_use_map: FxHashMap, Used>, /// Resolutions for labels (node IDs of their corresponding blocks or loops). label_res_map: NodeMap, /// Resolutions for lifetimes. @@ -1422,6 +1424,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { pat_span_map: Default::default(), partial_res_map: Default::default(), import_res_map: Default::default(), + import_use_map: Default::default(), label_res_map: Default::default(), lifetimes_res_map: Default::default(), extra_lifetime_params_map: Default::default(), @@ -1839,7 +1842,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } /// Test if AmbiguityError ambi is any identical to any one inside ambiguity_errors - fn matches_previous_ambiguity_error(&mut self, ambi: &AmbiguityError<'_>) -> bool { + fn matches_previous_ambiguity_error(&self, ambi: &AmbiguityError<'_>) -> bool { for ambiguity_error in &self.ambiguity_errors { // if the span location and ident as well as its span are the same if ambiguity_error.kind == ambi.kind @@ -1900,10 +1903,9 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } } - let old_used = import.used.get(); - let new_used = Some(used); - if new_used > old_used { - import.used.set(new_used); + let old_used = self.import_use_map.entry(import).or_insert(used); + if *old_used < used { + *old_used = used; } if let Some(id) = import.id() { self.used_imports.insert(id); diff --git a/compiler/rustc_resolve/src/macros.rs b/compiler/rustc_resolve/src/macros.rs index da7278175e93a..7203fbe4a0c18 100644 --- a/compiler/rustc_resolve/src/macros.rs +++ b/compiler/rustc_resolve/src/macros.rs @@ -109,8 +109,8 @@ pub(crate) fn sub_namespace_match( // `format!("{}", path)`, because that tries to insert // line-breaks and is slow. fn fast_print_path(path: &ast::Path) -> Symbol { - if path.segments.len() == 1 { - path.segments[0].ident.name + if let [segment] = path.segments.as_slice() { + segment.ident.name } else { let mut path_str = String::with_capacity(64); for (i, segment) in path.segments.iter().enumerate() { @@ -738,10 +738,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { // Possibly apply the macro helper hack if deleg_impl.is_none() && kind == Some(MacroKind::Bang) - && path.len() == 1 - && path[0].ident.span.ctxt().outer_expn_data().local_inner_macros + && let [segment] = path.as_slice() + && segment.ident.span.ctxt().outer_expn_data().local_inner_macros { - let root = Ident::new(kw::DollarCrate, path[0].ident.span); + let root = Ident::new(kw::DollarCrate, segment.ident.span); path.insert(0, Segment::from_ident(root)); } diff --git a/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/encode.rs b/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/encode.rs index 61de338eab1c0..a4e4f50e8f2d4 100644 --- a/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/encode.rs +++ b/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/encode.rs @@ -607,10 +607,15 @@ pub fn encode_ty<'tcx>( typeid.push_str(&s); } - ty::FnPtr(fn_sig) => { + ty::FnPtr(sig_tys, hdr) => { // PFE let mut s = String::from("P"); - s.push_str(&encode_fnsig(tcx, &fn_sig.skip_binder(), dict, TypeIdOptions::empty())); + s.push_str(&encode_fnsig( + tcx, + &sig_tys.with(*hdr).skip_binder(), + dict, + TypeIdOptions::empty(), + )); compress(dict, DictKey::Ty(ty, TyQ::None), &mut s); typeid.push_str(&s); } diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs index 4b87f5d62b21e..df72e2430fd50 100644 --- a/compiler/rustc_session/src/options.rs +++ b/compiler/rustc_session/src/options.rs @@ -912,16 +912,9 @@ mod parse { match v { None => false, Some(s) => { - let parts = s.split('=').collect::>(); - if parts.len() != 2 { - return false; - } - let crate_name = parts[0].to_string(); - let fuel = parts[1].parse::(); - if fuel.is_err() { - return false; - } - *slot = Some((crate_name, fuel.unwrap())); + let [crate_name, fuel] = *s.split('=').collect::>() else { return false }; + let Ok(fuel) = fuel.parse::() else { return false }; + *slot = Some((crate_name.to_string(), fuel)); true } } diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs index 672dddf871e02..693867c3853da 100644 --- a/compiler/rustc_session/src/session.rs +++ b/compiler/rustc_session/src/session.rs @@ -1188,7 +1188,12 @@ fn validate_commandline_args_with_session_available(sess: &Session) { // Sanitizers can only be used on platforms that we know have working sanitizer codegen. let supported_sanitizers = sess.target.options.supported_sanitizers; - let unsupported_sanitizers = sess.opts.unstable_opts.sanitizer - supported_sanitizers; + let mut unsupported_sanitizers = sess.opts.unstable_opts.sanitizer - supported_sanitizers; + // Niche: if `fixed-x18`, or effectively switching on `reserved-x18` flag, is enabled + // we should allow Shadow Call Stack sanitizer. + if sess.opts.unstable_opts.fixed_x18 && sess.target.arch == "aarch64" { + unsupported_sanitizers -= SanitizerSet::SHADOWCALLSTACK; + } match unsupported_sanitizers.into_iter().count() { 0 => {} 1 => { diff --git a/compiler/rustc_smir/src/rustc_internal/internal.rs b/compiler/rustc_smir/src/rustc_internal/internal.rs index a4577461094bb..f52cb010a872f 100644 --- a/compiler/rustc_smir/src/rustc_internal/internal.rs +++ b/compiler/rustc_smir/src/rustc_internal/internal.rs @@ -131,7 +131,10 @@ impl RustcInternal for RigidTy { RigidTy::FnDef(def, args) => { rustc_ty::TyKind::FnDef(def.0.internal(tables, tcx), args.internal(tables, tcx)) } - RigidTy::FnPtr(sig) => rustc_ty::TyKind::FnPtr(sig.internal(tables, tcx)), + RigidTy::FnPtr(sig) => { + let (sig_tys, hdr) = sig.internal(tables, tcx).split(); + rustc_ty::TyKind::FnPtr(sig_tys, hdr) + } RigidTy::Closure(def, args) => { rustc_ty::TyKind::Closure(def.0.internal(tables, tcx), args.internal(tables, tcx)) } diff --git a/compiler/rustc_smir/src/rustc_smir/convert/ty.rs b/compiler/rustc_smir/src/rustc_smir/convert/ty.rs index ef2eb7d52eaef..332fe22d86955 100644 --- a/compiler/rustc_smir/src/rustc_smir/convert/ty.rs +++ b/compiler/rustc_smir/src/rustc_smir/convert/ty.rs @@ -352,7 +352,9 @@ impl<'tcx> Stable<'tcx> for ty::TyKind<'tcx> { ty::FnDef(def_id, generic_args) => { TyKind::RigidTy(RigidTy::FnDef(tables.fn_def(*def_id), generic_args.stable(tables))) } - ty::FnPtr(poly_fn_sig) => TyKind::RigidTy(RigidTy::FnPtr(poly_fn_sig.stable(tables))), + ty::FnPtr(sig_tys, hdr) => { + TyKind::RigidTy(RigidTy::FnPtr(sig_tys.with(*hdr).stable(tables))) + } ty::Dynamic(existential_predicates, region, dyn_kind) => { TyKind::RigidTy(RigidTy::Dynamic( existential_predicates diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 32fca6733bb55..a2e94492f8c23 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -472,6 +472,7 @@ symbols! { attr, attr_literals, attributes, + audit_that, augmented_assignments, auto_traits, automatically_derived, @@ -1619,6 +1620,7 @@ symbols! { rustc_dirty, rustc_do_not_const_check, rustc_doc_primitive, + rustc_driver, rustc_dummy, rustc_dump_def_parents, rustc_dump_item_bounds, diff --git a/compiler/rustc_symbol_mangling/src/v0.rs b/compiler/rustc_symbol_mangling/src/v0.rs index c2451c08d1158..3a606f244e37a 100644 --- a/compiler/rustc_symbol_mangling/src/v0.rs +++ b/compiler/rustc_symbol_mangling/src/v0.rs @@ -427,7 +427,8 @@ impl<'tcx> Printer<'tcx> for SymbolMangler<'tcx> { self.print_def_path(def_id, &[])?; } - ty::FnPtr(sig) => { + ty::FnPtr(sig_tys, hdr) => { + let sig = sig_tys.with(hdr); self.push("F"); self.in_binder(&sig, |cx, sig| { if sig.safety == hir::Safety::Unsafe { diff --git a/compiler/rustc_target/src/abi/call/powerpc64.rs b/compiler/rustc_target/src/abi/call/powerpc64.rs index 11a6cb52babc9..749eea0ef6350 100644 --- a/compiler/rustc_target/src/abi/call/powerpc64.rs +++ b/compiler/rustc_target/src/abi/call/powerpc64.rs @@ -41,64 +41,23 @@ where }) } -fn classify_ret<'a, Ty, C>(cx: &C, ret: &mut ArgAbi<'a, Ty>, abi: ABI) +fn classify<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>, abi: ABI, is_ret: bool) where Ty: TyAbiInterface<'a, C> + Copy, C: HasDataLayout, { - if !ret.layout.is_sized() { + if arg.is_ignore() || !arg.layout.is_sized() { // Not touching this... return; } - if !ret.layout.is_aggregate() { - ret.extend_integer_width_to(64); + if !arg.layout.is_aggregate() { + arg.extend_integer_width_to(64); return; } // The ELFv1 ABI doesn't return aggregates in registers - if abi == ELFv1 { - ret.make_indirect(); - return; - } - - if let Some(uniform) = is_homogeneous_aggregate(cx, ret, abi) { - ret.cast_to(uniform); - return; - } - - let size = ret.layout.size; - let bits = size.bits(); - if bits <= 128 { - let unit = if cx.data_layout().endian == Endian::Big { - Reg { kind: RegKind::Integer, size } - } else if bits <= 8 { - Reg::i8() - } else if bits <= 16 { - Reg::i16() - } else if bits <= 32 { - Reg::i32() - } else { - Reg::i64() - }; - - ret.cast_to(Uniform::new(unit, size)); - return; - } - - ret.make_indirect(); -} - -fn classify_arg<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>, abi: ABI) -where - Ty: TyAbiInterface<'a, C> + Copy, - C: HasDataLayout, -{ - if !arg.layout.is_sized() { - // Not touching this... - return; - } - if !arg.layout.is_aggregate() { - arg.extend_integer_width_to(64); + if is_ret && abi == ELFv1 { + arg.make_indirect(); return; } @@ -108,7 +67,10 @@ where } let size = arg.layout.size; - if size.bits() <= 64 { + if is_ret && size.bits() > 128 { + // Non-homogeneous aggregates larger than two doublewords are returned indirectly. + arg.make_indirect(); + } else if size.bits() <= 64 { // Aggregates smaller than a doubleword should appear in // the least-significant bits of the parameter doubleword. arg.cast_to(Reg { kind: RegKind::Integer, size }) @@ -138,14 +100,9 @@ where } }; - if !fn_abi.ret.is_ignore() { - classify_ret(cx, &mut fn_abi.ret, abi); - } + classify(cx, &mut fn_abi.ret, abi, true); for arg in fn_abi.args.iter_mut() { - if arg.is_ignore() { - continue; - } - classify_arg(cx, arg, abi); + classify(cx, arg, abi, false); } } diff --git a/compiler/rustc_target/src/spec/abi/tests.rs b/compiler/rustc_target/src/spec/abi/tests.rs index 251a12fe7aa6b..4823058dd6970 100644 --- a/compiler/rustc_target/src/spec/abi/tests.rs +++ b/compiler/rustc_target/src/spec/abi/tests.rs @@ -1,3 +1,5 @@ +use std::assert_matches::assert_matches; + use super::*; #[allow(non_snake_case)] @@ -16,7 +18,7 @@ fn lookup_cdecl() { #[test] fn lookup_baz() { let abi = lookup("baz"); - assert!(matches!(abi, Err(AbiUnsupported::Unrecognized))) + assert_matches!(abi, Err(AbiUnsupported::Unrecognized)); } #[test] diff --git a/compiler/rustc_target/src/spec/mod.rs b/compiler/rustc_target/src/spec/mod.rs index 946947124c6eb..80f89a0ab2be0 100644 --- a/compiler/rustc_target/src/spec/mod.rs +++ b/compiler/rustc_target/src/spec/mod.rs @@ -1561,6 +1561,7 @@ supported_targets! { ("powerpc-unknown-linux-gnu", powerpc_unknown_linux_gnu), ("powerpc-unknown-linux-gnuspe", powerpc_unknown_linux_gnuspe), ("powerpc-unknown-linux-musl", powerpc_unknown_linux_musl), + ("powerpc-unknown-linux-muslspe", powerpc_unknown_linux_muslspe), ("powerpc64-ibm-aix", powerpc64_ibm_aix), ("powerpc64-unknown-linux-gnu", powerpc64_unknown_linux_gnu), ("powerpc64-unknown-linux-musl", powerpc64_unknown_linux_musl), @@ -3149,11 +3150,10 @@ impl Target { if let Some(a) = o.as_array() { for o in a { if let Some(s) = o.as_str() { - let p = s.split('=').collect::>(); - if p.len() == 2 { - let k = p[0].to_string(); - let v = p[1].to_string(); - base.$key_name.to_mut().push((k.into(), v.into())); + if let [k, v] = *s.split('=').collect::>() { + base.$key_name + .to_mut() + .push((k.to_string().into(), v.to_string().into())) } } } diff --git a/compiler/rustc_target/src/spec/targets/aarch64_apple_darwin.rs b/compiler/rustc_target/src/spec/targets/aarch64_apple_darwin.rs index f37781c3f638a..65f2a1e306987 100644 --- a/compiler/rustc_target/src/spec/targets/aarch64_apple_darwin.rs +++ b/compiler/rustc_target/src/spec/targets/aarch64_apple_darwin.rs @@ -17,7 +17,7 @@ pub fn target() -> Target { llvm_target: macos_llvm_target(arch).into(), metadata: crate::spec::TargetMetadata { description: Some("ARM64 macOS (11.0+, Big Sur+)".into()), - tier: Some(2), + tier: Some(1), host_tools: Some(true), std: Some(true), }, diff --git a/compiler/rustc_target/src/spec/targets/powerpc_unknown_linux_muslspe.rs b/compiler/rustc_target/src/spec/targets/powerpc_unknown_linux_muslspe.rs new file mode 100644 index 0000000000000..d19015729ec19 --- /dev/null +++ b/compiler/rustc_target/src/spec/targets/powerpc_unknown_linux_muslspe.rs @@ -0,0 +1,28 @@ +use crate::abi::Endian; +use crate::spec::{base, Cc, LinkerFlavor, Lld, StackProbeType, Target, TargetOptions}; + +pub fn target() -> Target { + let mut base = base::linux_musl::opts(); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-mspe"]); + base.max_atomic_width = Some(32); + base.stack_probes = StackProbeType::Inline; + + Target { + llvm_target: "powerpc-unknown-linux-muslspe".into(), + metadata: crate::spec::TargetMetadata { + description: Some("PowerPC SPE Linux with musl".into()), + tier: Some(3), + host_tools: Some(false), + std: Some(true), + }, + pointer_width: 32, + data_layout: "E-m:e-p:32:32-Fn32-i64:64-n32".into(), + arch: "powerpc".into(), + options: TargetOptions { + abi: "spe".into(), + endian: Endian::Big, + mcount: "_mcount".into(), + ..base + }, + } +} diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs index 8ccb2a8483ae3..5193333be8e90 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs @@ -1087,9 +1087,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { values } - (ty::FnDef(did1, args1), ty::FnPtr(sig2)) => { + (ty::FnDef(did1, args1), ty::FnPtr(sig_tys2, hdr2)) => { let sig1 = self.tcx.fn_sig(*did1).instantiate(self.tcx, args1); - let mut values = self.cmp_fn_sig(&sig1, sig2); + let mut values = self.cmp_fn_sig(&sig1, &sig_tys2.with(*hdr2)); values.0.push_highlighted(format!( " {{{}}}", self.tcx.def_path_str_with_args(*did1, args1) @@ -1097,16 +1097,18 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { values } - (ty::FnPtr(sig1), ty::FnDef(did2, args2)) => { + (ty::FnPtr(sig_tys1, hdr1), ty::FnDef(did2, args2)) => { let sig2 = self.tcx.fn_sig(*did2).instantiate(self.tcx, args2); - let mut values = self.cmp_fn_sig(sig1, &sig2); + let mut values = self.cmp_fn_sig(&sig_tys1.with(*hdr1), &sig2); values .1 .push_normal(format!(" {{{}}}", self.tcx.def_path_str_with_args(*did2, args2))); values } - (ty::FnPtr(sig1), ty::FnPtr(sig2)) => self.cmp_fn_sig(sig1, sig2), + (ty::FnPtr(sig_tys1, hdr1), ty::FnPtr(sig_tys2, hdr2)) => { + self.cmp_fn_sig(&sig_tys1.with(*hdr1), &sig_tys2.with(*hdr2)) + } _ => { let mut strs = (DiagStyledString::new(), DiagStyledString::new()); diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs index 864510bb65047..05c79170902de 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/note_and_explain.rs @@ -4,6 +4,7 @@ use rustc_hir as hir; use rustc_hir::def::DefKind; use rustc_middle::traits::{ObligationCause, ObligationCauseCode}; use rustc_middle::ty::error::{ExpectedFound, TypeError}; +use rustc_middle::ty::fast_reject::{DeepRejectCtxt, TreatParams}; use rustc_middle::ty::print::{FmtPrinter, Printer}; use rustc_middle::ty::{self, suggest_constraining_type_param, Ty}; use rustc_span::def_id::DefId; @@ -313,11 +314,15 @@ impl Trait for X { (ty::Dynamic(t, _, ty::DynKind::Dyn), _) if let Some(def_id) = t.principal_def_id() => { - let mut impl_def_ids = vec![]; + let mut has_matching_impl = false; tcx.for_each_relevant_impl(def_id, values.found, |did| { - impl_def_ids.push(did) + if DeepRejectCtxt::new(tcx, TreatParams::ForLookup) + .types_may_unify(values.found, tcx.type_of(did).skip_binder()) + { + has_matching_impl = true; + } }); - if let [_] = &impl_def_ids[..] { + if has_matching_impl { let trait_name = tcx.item_name(def_id); diag.help(format!( "`{}` implements `{trait_name}` so you could box the found value \ @@ -330,11 +335,15 @@ impl Trait for X { (_, ty::Dynamic(t, _, ty::DynKind::Dyn)) if let Some(def_id) = t.principal_def_id() => { - let mut impl_def_ids = vec![]; + let mut has_matching_impl = false; tcx.for_each_relevant_impl(def_id, values.expected, |did| { - impl_def_ids.push(did) + if DeepRejectCtxt::new(tcx, TreatParams::ForLookup) + .types_may_unify(values.expected, tcx.type_of(did).skip_binder()) + { + has_matching_impl = true; + } }); - if let [_] = &impl_def_ids[..] { + if has_matching_impl { let trait_name = tcx.item_name(def_id); diag.help(format!( "`{}` implements `{trait_name}` so you could change the expected \ @@ -346,11 +355,15 @@ impl Trait for X { (ty::Dynamic(t, _, ty::DynKind::DynStar), _) if let Some(def_id) = t.principal_def_id() => { - let mut impl_def_ids = vec![]; + let mut has_matching_impl = false; tcx.for_each_relevant_impl(def_id, values.found, |did| { - impl_def_ids.push(did) + if DeepRejectCtxt::new(tcx, TreatParams::ForLookup) + .types_may_unify(values.found, tcx.type_of(did).skip_binder()) + { + has_matching_impl = true; + } }); - if let [_] = &impl_def_ids[..] { + if has_matching_impl { let trait_name = tcx.item_name(def_id); diag.help(format!( "`{}` implements `{trait_name}`, `#[feature(dyn_star)]` is likely \ @@ -441,9 +454,9 @@ impl Trait for X { } } } - (ty::FnPtr(sig), ty::FnDef(def_id, _)) - | (ty::FnDef(def_id, _), ty::FnPtr(sig)) => { - if tcx.fn_sig(def_id).skip_binder().safety() < sig.safety() { + (ty::FnPtr(_, hdr), ty::FnDef(def_id, _)) + | (ty::FnDef(def_id, _), ty::FnPtr(_, hdr)) => { + if tcx.fn_sig(def_id).skip_binder().safety() < hdr.safety { diag.note( "unsafe functions cannot be coerced into safe function pointers", ); diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/suggest.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/suggest.rs index ee159aa0b77a6..35f68a56d2ddc 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/suggest.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/suggest.rs @@ -383,8 +383,9 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { return; } match (&expected_inner.kind(), &found_inner.kind()) { - (ty::FnPtr(sig), ty::FnDef(did, args)) => { - let expected_sig = &(self.normalize_fn_sig)(*sig); + (ty::FnPtr(sig_tys, hdr), ty::FnDef(did, args)) => { + let sig = sig_tys.with(*hdr); + let expected_sig = &(self.normalize_fn_sig)(sig); let found_sig = &(self.normalize_fn_sig)(self.tcx.fn_sig(*did).instantiate(self.tcx, args)); @@ -402,11 +403,11 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { (false, true) => FunctionPointerSuggestion::RemoveRef { span, fn_name }, (true, true) => { diag.subdiagnostic(FnItemsAreDistinct); - FunctionPointerSuggestion::CastRef { span, fn_name, sig: *sig } + FunctionPointerSuggestion::CastRef { span, fn_name, sig } } (false, false) => { diag.subdiagnostic(FnItemsAreDistinct); - FunctionPointerSuggestion::Cast { span, fn_name, sig: *sig } + FunctionPointerSuggestion::Cast { span, fn_name, sig } } }; diag.subdiagnostic(sugg); @@ -449,10 +450,10 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { diag.subdiagnostic(sug); } - (ty::FnDef(did, args), ty::FnPtr(sig)) => { + (ty::FnDef(did, args), ty::FnPtr(sig_tys, hdr)) => { let expected_sig = &(self.normalize_fn_sig)(self.tcx.fn_sig(*did).instantiate(self.tcx, args)); - let found_sig = &(self.normalize_fn_sig)(*sig); + let found_sig = &(self.normalize_fn_sig)(sig_tys.with(*hdr)); if !self.same_type_modulo_infer(*found_sig, *expected_sig) { return; diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs index 1cee82f04ea0e..adeabb3b77da2 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs @@ -230,8 +230,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { post_message, ); - let (err_msg, safe_transmute_explanation) = if Some(main_trait_ref.def_id()) - == self.tcx.lang_items().transmute_trait() + let (err_msg, safe_transmute_explanation) = if self.tcx.is_lang_item(main_trait_ref.def_id(), LangItem::TransmuteTrait) { // Recompute the safe transmute reason and use that for the error reporting match self.get_safe_transmute_error_and_reason( @@ -375,7 +374,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { let impl_candidates = self.find_similar_impl_candidates(leaf_trait_predicate); suggested = if let &[cand] = &impl_candidates[..] { let cand = cand.trait_ref; - if let (ty::FnPtr(_), ty::FnDef(..)) = + if let (ty::FnPtr(..), ty::FnDef(..)) = (cand.self_ty().kind(), main_trait_ref.self_ty().skip_binder().kind()) { err.span_suggestion( @@ -687,10 +686,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { let mut applied_do_not_recommend = false; loop { if let ObligationCauseCode::ImplDerived(ref c) = base_cause { - if self.tcx.has_attrs_with_path( - c.impl_or_alias_def_id, - &[sym::diagnostic, sym::do_not_recommend], - ) { + if self.tcx.do_not_recommend_impl(c.impl_or_alias_def_id) { let code = (*c.derived.parent_code).clone(); obligation.cause.map_code(|_| code); obligation.predicate = c.derived.parent_trait_pred.upcast(self.tcx); @@ -793,8 +789,8 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // is unimplemented is because async closures don't implement `Fn`/`FnMut` // if they have captures. if let Some(by_ref_captures) = by_ref_captures - && let ty::FnPtr(sig) = by_ref_captures.kind() - && !sig.skip_binder().output().is_unit() + && let ty::FnPtr(sig_tys, _) = by_ref_captures.kind() + && !sig_tys.skip_binder().output().is_unit() { let mut err = self.dcx().create_err(AsyncClosureNotFn { span: self.tcx.def_span(closure_def_id), @@ -944,8 +940,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // The current method call returns `Result<_, ()>` && self.can_eq(obligation.param_env, ty, found_ty) // There's a single argument in the method call and it is a closure - && args.len() == 1 - && let Some(arg) = args.get(0) + && let [arg] = args && let hir::ExprKind::Closure(closure) = arg.kind // The closure has a block for its body with no tail expression && let body = self.tcx.hir().body(closure.body) @@ -1061,7 +1056,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { "`{ty}` is forbidden as the type of a const generic parameter", ) } - ty::FnPtr(_) => { + ty::FnPtr(..) => { struct_span_code_err!( self.dcx(), span, @@ -1630,11 +1625,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { .tcx .all_impls(def_id) // ignore `do_not_recommend` items - .filter(|def_id| { - !self - .tcx - .has_attrs_with_path(*def_id, &[sym::diagnostic, sym::do_not_recommend]) - }) + .filter(|def_id| !self.tcx.do_not_recommend_impl(*def_id)) // Ignore automatically derived impls and `!Trait` impls. .filter_map(|def_id| self.tcx.impl_trait_header(def_id)) .filter_map(|header| { @@ -1698,11 +1689,11 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { err.highlighted_span_help( span, vec![ - StringPart::normal("you have ".to_string()), + StringPart::normal("there are ".to_string()), StringPart::highlighted("multiple different versions".to_string()), StringPart::normal(" of crate `".to_string()), StringPart::highlighted(format!("{name}")), - StringPart::normal("` in your dependency graph".to_string()), + StringPart::normal("` the your dependency graph".to_string()), ], ); let candidates = if impl_candidates.is_empty() { @@ -1844,10 +1835,10 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { if let &[cand] = &candidates[..] { let (desc, mention_castable) = match (cand.self_ty().kind(), trait_ref.self_ty().skip_binder().kind()) { - (ty::FnPtr(_), ty::FnDef(..)) => { + (ty::FnPtr(..), ty::FnDef(..)) => { (" implemented for fn pointer `", ", cast using `as`") } - (ty::FnPtr(_), _) => (" implemented for fn pointer `", ""), + (ty::FnPtr(..), _) => (" implemented for fn pointer `", ""), _ => (" implemented for `", ""), }; err.highlighted_help(vec![ @@ -1904,12 +1895,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { let impl_candidates = impl_candidates .into_iter() .cloned() - .filter(|cand| { - !self.tcx.has_attrs_with_path( - cand.impl_def_id, - &[sym::diagnostic, sym::do_not_recommend], - ) - }) + .filter(|cand| !self.tcx.do_not_recommend_impl(cand.impl_def_id)) .collect::>(); let def_id = trait_ref.def_id(); @@ -2743,6 +2729,10 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { | Node::ImplItem(&hir::ImplItem { kind: hir::ImplItemKind::Fn(ref sig, _), .. }) | Node::TraitItem(&hir::TraitItem { kind: hir::TraitItemKind::Fn(ref sig, _), .. + }) + | Node::ForeignItem(&hir::ForeignItem { + kind: hir::ForeignItemKind::Fn(ref sig, _, _), + .. }) => ( sig.span, None, diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented.rs index 3a082893c5c66..f656f9b0e383f 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/on_unimplemented.rs @@ -73,10 +73,10 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { } }); - let impl_def_id_and_args = if self_match_impls.len() == 1 { - self_match_impls[0] - } else if fuzzy_match_impls.len() == 1 { - fuzzy_match_impls[0] + let impl_def_id_and_args = if let [impl_] = self_match_impls[..] { + impl_ + } else if let [impl_] = fuzzy_match_impls[..] { + impl_ } else { return None; }; diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs index 0d15ef55e24e7..d0f76f0d50ea6 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs @@ -1077,10 +1077,10 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { let Some((def_id_or_name, output, inputs)) = (self.autoderef_steps)(found).into_iter().find_map(|(found, _)| { match *found.kind() { - ty::FnPtr(fn_sig) => Some(( + ty::FnPtr(sig_tys, _) => Some(( DefIdOrName::Name("function pointer"), - fn_sig.output(), - fn_sig.inputs(), + sig_tys.output(), + sig_tys.inputs(), )), ty::FnDef(def_id, _) => { let fn_sig = found.fn_sig(self.tcx); @@ -1977,20 +1977,22 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { let ObligationCauseCode::FunctionArg { arg_hir_id, .. } = cause else { return; }; - let ty::FnPtr(expected) = expected.kind() else { + let ty::FnPtr(sig_tys, hdr) = expected.kind() else { return; }; - let ty::FnPtr(found) = found.kind() else { + let expected = sig_tys.with(*hdr); + let ty::FnPtr(sig_tys, hdr) = found.kind() else { return; }; + let found = sig_tys.with(*hdr); let Node::Expr(arg) = self.tcx.hir_node(*arg_hir_id) else { return; }; let hir::ExprKind::Path(path) = arg.kind else { return; }; - let expected_inputs = self.tcx.instantiate_bound_regions_with_erased(*expected).inputs(); - let found_inputs = self.tcx.instantiate_bound_regions_with_erased(*found).inputs(); + let expected_inputs = self.tcx.instantiate_bound_regions_with_erased(expected).inputs(); + let found_inputs = self.tcx.instantiate_bound_regions_with_erased(found).inputs(); let both_tys = expected_inputs.iter().copied().zip(found_inputs.iter().copied()); let arg_expr = |infcx: &InferCtxt<'tcx>, name, expected: Ty<'tcx>, found: Ty<'tcx>| { @@ -2829,7 +2831,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { // Do not suggest relaxing if there is an explicit `Sized` obligation. && !bounds.iter() .filter_map(|bound| bound.trait_ref()) - .any(|tr| tr.trait_def_id() == tcx.lang_items().sized_trait()) + .any(|tr| tr.trait_def_id().is_some_and(|def_id| tcx.is_lang_item(def_id, LangItem::Sized))) { let (span, separator) = if let [.., last] = bounds { (last.span().shrink_to_hi(), " +") @@ -4790,13 +4792,13 @@ fn hint_missing_borrow<'tcx>( } let found_args = match found.kind() { - ty::FnPtr(f) => infcx.enter_forall(*f, |f| f.inputs().iter()), + ty::FnPtr(sig_tys, _) => infcx.enter_forall(*sig_tys, |sig_tys| sig_tys.inputs().iter()), kind => { span_bug!(span, "found was converted to a FnPtr above but is now {:?}", kind) } }; let expected_args = match expected.kind() { - ty::FnPtr(f) => infcx.enter_forall(*f, |f| f.inputs().iter()), + ty::FnPtr(sig_tys, _) => infcx.enter_forall(*sig_tys, |sig_tys| sig_tys.inputs().iter()), kind => { span_bug!(span, "expected was converted to a FnPtr above but is now {:?}", kind) } @@ -5300,7 +5302,8 @@ impl<'v> Visitor<'v> for FindTypeParam { match ty.kind { hir::TyKind::Ptr(_) | hir::TyKind::Ref(..) | hir::TyKind::TraitObject(..) => {} hir::TyKind::Path(hir::QPath::Resolved(None, path)) - if path.segments.len() == 1 && path.segments[0].ident.name == self.param => + if let [segment] = path.segments + && segment.ident.name == self.param => { if !self.nested { debug!(?ty, "FindTypeParam::visit_ty"); diff --git a/compiler/rustc_trait_selection/src/solve/fulfill.rs b/compiler/rustc_trait_selection/src/solve/fulfill.rs index 49fa775a0a191..de8951ef72046 100644 --- a/compiler/rustc_trait_selection/src/solve/fulfill.rs +++ b/compiler/rustc_trait_selection/src/solve/fulfill.rs @@ -13,7 +13,6 @@ use rustc_middle::bug; use rustc_middle::ty::error::{ExpectedFound, TypeError}; use rustc_middle::ty::{self, TyCtxt}; use rustc_next_trait_solver::solve::{GenerateProofTree, SolverDelegateEvalExt as _}; -use rustc_span::symbol::sym; use super::delegate::SolverDelegate; use super::inspect::{self, ProofTreeInferCtxtExt, ProofTreeVisitor}; @@ -440,10 +439,7 @@ impl<'tcx> ProofTreeVisitor<'tcx> for BestObligation<'tcx> { source: CandidateSource::Impl(impl_def_id), result: _, } = candidate.kind() - && goal - .infcx() - .tcx - .has_attrs_with_path(impl_def_id, &[sym::diagnostic, sym::do_not_recommend]) + && goal.infcx().tcx.do_not_recommend_impl(impl_def_id) { return ControlFlow::Break(self.obligation.clone()); } diff --git a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs index e8de8457440ff..51dda25d8adcc 100644 --- a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs +++ b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs @@ -9,6 +9,8 @@ //! coherence right now and was annoying to implement, so I am leaving it //! as is until we start using it for something else. +use std::assert_matches::assert_matches; + use rustc_ast_ir::try_visit; use rustc_ast_ir::visit::VisitorResult; use rustc_infer::infer::{DefineOpaqueTypes, InferCtxt, InferOk}; @@ -273,10 +275,10 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> { steps.push(step) } inspect::ProbeStep::MakeCanonicalResponse { shallow_certainty: c } => { - assert!(matches!( + assert_matches!( shallow_certainty.replace(c), None | Some(Certainty::Maybe(MaybeCause::Ambiguity)) - )); + ); } inspect::ProbeStep::NestedProbe(ref probe) => { match probe.kind { @@ -332,13 +334,9 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> { pub fn candidates(&'a self) -> Vec> { let mut candidates = vec![]; - let last_eval_step = match self.evaluation_kind { - inspect::CanonicalGoalEvaluationKind::Overflow - | inspect::CanonicalGoalEvaluationKind::CycleInStack - | inspect::CanonicalGoalEvaluationKind::ProvisionalCacheHit => { - warn!("unexpected root evaluation: {:?}", self.evaluation_kind); - return vec![]; - } + let last_eval_step = match &self.evaluation_kind { + // An annoying edge case in case the recursion limit is 0. + inspect::CanonicalGoalEvaluationKind::Overflow => return vec![], inspect::CanonicalGoalEvaluationKind::Evaluation { final_revision } => final_revision, }; diff --git a/compiler/rustc_trait_selection/src/solve/normalize.rs b/compiler/rustc_trait_selection/src/solve/normalize.rs index 419d7e704dee3..c93c40b482680 100644 --- a/compiler/rustc_trait_selection/src/solve/normalize.rs +++ b/compiler/rustc_trait_selection/src/solve/normalize.rs @@ -1,3 +1,4 @@ +use std::assert_matches::assert_matches; use std::fmt::Debug; use std::marker::PhantomData; @@ -63,7 +64,7 @@ where E: FromSolverError<'tcx, NextSolverError<'tcx>>, { fn normalize_alias_ty(&mut self, alias_ty: Ty<'tcx>) -> Result, Vec> { - assert!(matches!(alias_ty.kind(), ty::Alias(..))); + assert_matches!(alias_ty.kind(), ty::Alias(..)); let infcx = self.at.infcx; let tcx = infcx.tcx; diff --git a/compiler/rustc_trait_selection/src/traits/misc.rs b/compiler/rustc_trait_selection/src/traits/misc.rs index 9a127e752a61a..3e65194577e0d 100644 --- a/compiler/rustc_trait_selection/src/traits/misc.rs +++ b/compiler/rustc_trait_selection/src/traits/misc.rs @@ -1,5 +1,7 @@ //! Miscellaneous type-system utilities that are too small to deserve their own modules. +use std::assert_matches::assert_matches; + use hir::LangItem; use rustc_ast::Mutability; use rustc_data_structures::fx::FxIndexSet; @@ -92,7 +94,7 @@ pub fn type_allowed_to_implement_const_param_ty<'tcx>( lang_item: LangItem, parent_cause: ObligationCause<'tcx>, ) -> Result<(), ConstParamTyImplementationError<'tcx>> { - assert!(matches!(lang_item, LangItem::ConstParamTy | LangItem::UnsizedConstParamTy)); + assert_matches!(lang_item, LangItem::ConstParamTy | LangItem::UnsizedConstParamTy); let inner_tys: Vec<_> = match *self_type.kind() { // Trivially okay as these types are all: diff --git a/compiler/rustc_trait_selection/src/traits/project.rs b/compiler/rustc_trait_selection/src/traits/project.rs index 4b62a5c59b2f0..568214fe02298 100644 --- a/compiler/rustc_trait_selection/src/traits/project.rs +++ b/compiler/rustc_trait_selection/src/traits/project.rs @@ -1110,7 +1110,7 @@ fn assemble_candidates_from_impls<'cx, 'tcx>( | ty::Error(_) => false, } } else if tcx.is_lang_item(trait_ref.def_id, LangItem::PointeeTrait) { - let tail = selcx.tcx().struct_tail_with_normalize( + let tail = selcx.tcx().struct_tail_raw( self_ty, |ty| { // We throw away any obligations we get from this, since we normalize @@ -1149,10 +1149,10 @@ fn assemble_candidates_from_impls<'cx, 'tcx>( | ty::Never // Extern types have unit metadata, according to RFC 2850 | ty::Foreign(_) - // If returned by `struct_tail_without_normalization` this is a unit struct + // If returned by `struct_tail` this is a unit struct // without any fields, or not a struct, and therefore is Sized. | ty::Adt(..) - // If returned by `struct_tail_without_normalization` this is the empty tuple. + // If returned by `struct_tail` this is the empty tuple. | ty::Tuple(..) // Integers and floats are always Sized, and so have unit type metadata. | ty::Infer(ty::InferTy::IntVar(_) | ty::InferTy::FloatVar(..)) => true, @@ -1636,7 +1636,7 @@ fn confirm_fn_pointer_candidate<'cx, 'tcx>( .generics_of(def_id) .host_effect_index .map_or(tcx.consts.true_, |idx| args.const_at(idx)), - ty::FnPtr(_) => tcx.consts.true_, + ty::FnPtr(..) => tcx.consts.true_, _ => unreachable!("only expected FnPtr or FnDef in `confirm_fn_pointer_candidate`"), }; diff --git a/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs b/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs index d3a1ed52d2e62..7d30e6524490c 100644 --- a/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs +++ b/compiler/rustc_trait_selection/src/traits/query/dropck_outlives.rs @@ -33,7 +33,7 @@ pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool { | ty::Float(_) | ty::Never | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Char | ty::CoroutineWitness(..) | ty::RawPtr(_, _) @@ -224,7 +224,7 @@ pub fn dtorck_constraint_for_ty_inner<'tcx>( | ty::RawPtr(..) | ty::Ref(..) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::CoroutineWitness(..) => { // these types never have a destructor } diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/eq.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/eq.rs deleted file mode 100644 index 656130cda19eb..0000000000000 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/eq.rs +++ /dev/null @@ -1,33 +0,0 @@ -pub use rustc_middle::traits::query::type_op::Eq; -use rustc_middle::traits::query::NoSolution; -use rustc_middle::traits::ObligationCause; -use rustc_middle::ty::{ParamEnvAnd, TyCtxt}; - -use crate::infer::canonical::{Canonical, CanonicalQueryResponse}; -use crate::traits::ObligationCtxt; - -impl<'tcx> super::QueryTypeOp<'tcx> for Eq<'tcx> { - type QueryResponse = (); - - fn try_fast_path( - _tcx: TyCtxt<'tcx>, - key: &ParamEnvAnd<'tcx, Eq<'tcx>>, - ) -> Option { - if key.value.a == key.value.b { Some(()) } else { None } - } - - fn perform_query( - tcx: TyCtxt<'tcx>, - canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Self>>, - ) -> Result, NoSolution> { - tcx.type_op_eq(canonicalized) - } - - fn perform_locally_with_next_solver( - ocx: &ObligationCtxt<'_, 'tcx>, - key: ParamEnvAnd<'tcx, Self>, - ) -> Result { - ocx.eq(&ObligationCause::dummy(), key.param_env, key.value.a, key.value.b)?; - Ok(()) - } -} diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs index 2f64ed963f967..a765de92afd55 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/mod.rs @@ -16,12 +16,10 @@ use crate::traits::{ObligationCause, ObligationCtxt}; pub mod ascribe_user_type; pub mod custom; -pub mod eq; pub mod implied_outlives_bounds; pub mod normalize; pub mod outlives; pub mod prove_predicate; -pub mod subtype; pub use rustc_middle::traits::query::type_op::*; @@ -170,44 +168,12 @@ where // collecting region constraints via `region_constraints`. let (mut output, _) = scrape_region_constraints( infcx, - |_ocx| { - let (output, ei, mut obligations, _) = + |ocx| { + let (output, ei, obligations, _) = Q::fully_perform_into(self, infcx, &mut region_constraints, span)?; error_info = ei; - // Typically, instantiating NLL query results does not - // create obligations. However, in some cases there - // are unresolved type variables, and unify them *can* - // create obligations. In that case, we have to go - // fulfill them. We do this via a (recursive) query. - while !obligations.is_empty() { - trace!("{:#?}", obligations); - let mut progress = false; - for obligation in std::mem::take(&mut obligations) { - let obligation = infcx.resolve_vars_if_possible(obligation); - match ProvePredicate::fully_perform_into( - obligation.param_env.and(ProvePredicate::new(obligation.predicate)), - infcx, - &mut region_constraints, - span, - ) { - Ok(((), _, new, certainty)) => { - obligations.extend(new); - progress = true; - if let Certainty::Ambiguous = certainty { - obligations.push(obligation); - } - } - Err(_) => obligations.push(obligation), - } - } - if !progress { - infcx.dcx().span_bug( - span, - format!("ambiguity processing {obligations:?} from {self:?}"), - ); - } - } + ocx.register_obligations(obligations); Ok(output) }, "fully_perform", diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs index 294c6bfc1243d..d6687c762c311 100644 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs +++ b/compiler/rustc_trait_selection/src/traits/query/type_op/prove_predicate.rs @@ -1,3 +1,4 @@ +use rustc_hir::LangItem; use rustc_infer::traits::Obligation; pub use rustc_middle::traits::query::type_op::ProvePredicate; use rustc_middle::traits::query::NoSolution; @@ -20,8 +21,7 @@ impl<'tcx> super::QueryTypeOp<'tcx> for ProvePredicate<'tcx> { // such cases. if let ty::PredicateKind::Clause(ty::ClauseKind::Trait(trait_ref)) = key.value.predicate.kind().skip_binder() - && let Some(sized_def_id) = tcx.lang_items().sized_trait() - && trait_ref.def_id() == sized_def_id + && tcx.is_lang_item(trait_ref.def_id(), LangItem::Sized) && trait_ref.self_ty().is_trivially_sized(tcx) { return Some(()); diff --git a/compiler/rustc_trait_selection/src/traits/query/type_op/subtype.rs b/compiler/rustc_trait_selection/src/traits/query/type_op/subtype.rs deleted file mode 100644 index 892c2a1f11309..0000000000000 --- a/compiler/rustc_trait_selection/src/traits/query/type_op/subtype.rs +++ /dev/null @@ -1,30 +0,0 @@ -pub use rustc_middle::traits::query::type_op::Subtype; -use rustc_middle::traits::query::NoSolution; -use rustc_middle::traits::ObligationCause; -use rustc_middle::ty::{ParamEnvAnd, TyCtxt}; - -use crate::infer::canonical::{Canonical, CanonicalQueryResponse}; -use crate::traits::ObligationCtxt; - -impl<'tcx> super::QueryTypeOp<'tcx> for Subtype<'tcx> { - type QueryResponse = (); - - fn try_fast_path(_tcx: TyCtxt<'tcx>, key: &ParamEnvAnd<'tcx, Self>) -> Option<()> { - if key.value.sub == key.value.sup { Some(()) } else { None } - } - - fn perform_query( - tcx: TyCtxt<'tcx>, - canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Self>>, - ) -> Result, NoSolution> { - tcx.type_op_subtype(canonicalized) - } - - fn perform_locally_with_next_solver( - ocx: &ObligationCtxt<'_, 'tcx>, - key: ParamEnvAnd<'tcx, Self>, - ) -> Result { - ocx.sub(&ObligationCause::dummy(), key.param_env, key.value.sub, key.value.sup)?; - Ok(()) - } -} diff --git a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs index 9de62031311b7..cb8deeaedb66f 100644 --- a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs +++ b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs @@ -468,8 +468,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { candidates.vec.push(AsyncClosureCandidate); } // Provide an impl, but only for suitable `fn` pointers. - ty::FnPtr(sig) => { - if sig.is_fn_trait_compatible() { + ty::FnPtr(sig_tys, hdr) => { + if sig_tys.with(hdr).is_fn_trait_compatible() { candidates.vec.push(AsyncClosureCandidate); } } @@ -535,8 +535,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { candidates.ambiguous = true; // Could wind up being a fn() type. } // Provide an impl, but only for suitable `fn` pointers. - ty::FnPtr(sig) => { - if sig.is_fn_trait_compatible() { + ty::FnPtr(sig_tys, hdr) => { + if sig_tys.with(hdr).is_fn_trait_compatible() { candidates .vec .push(FnPointerCandidate { fn_host_effect: self.tcx().consts.true_ }); @@ -819,7 +819,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { | ty::RawPtr(_, _) | ty::Ref(..) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Closure(..) | ty::CoroutineClosure(..) | ty::Coroutine(..) @@ -1207,7 +1207,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { | ty::RawPtr(_, _) | ty::Ref(..) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Never | ty::Foreign(_) | ty::Array(..) @@ -1290,7 +1290,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { | ty::Ref(_, _, _) | ty::FnDef(_, _) | ty::Pat(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::Closure(..) | ty::CoroutineClosure(..) @@ -1339,7 +1339,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let self_ty = self.infcx.resolve_vars_if_possible(obligation.self_ty()); match self_ty.skip_binder().kind() { - ty::FnPtr(_) => candidates.vec.push(BuiltinCandidate { has_nested: false }), + ty::FnPtr(..) => candidates.vec.push(BuiltinCandidate { has_nested: false }), ty::Bool | ty::Char | ty::Int(_) diff --git a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs index ddd8b970cc813..0d7ceca430174 100644 --- a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs +++ b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs @@ -1398,7 +1398,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { | ty::RawPtr(_, _) | ty::Ref(..) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Never | ty::Foreign(_) => {} diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs index 1b2767a6a627f..f002fa27db27f 100644 --- a/compiler/rustc_trait_selection/src/traits/select/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs @@ -2113,7 +2113,7 @@ impl<'tcx> SelectionContext<'_, 'tcx> { | ty::Bool | ty::Float(_) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::RawPtr(..) | ty::Char | ty::Ref(..) @@ -2170,7 +2170,7 @@ impl<'tcx> SelectionContext<'_, 'tcx> { use self::BuiltinImplConditions::{Ambiguous, None, Where}; match *self_ty.kind() { - ty::FnDef(..) | ty::FnPtr(_) | ty::Error(_) => Where(ty::Binder::dummy(Vec::new())), + ty::FnDef(..) | ty::FnPtr(..) | ty::Error(_) => Where(ty::Binder::dummy(Vec::new())), ty::Uint(_) | ty::Int(_) @@ -2332,7 +2332,7 @@ impl<'tcx> SelectionContext<'_, 'tcx> { | ty::Bool | ty::Float(_) | ty::FnDef(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Error(_) | ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) | ty::Never diff --git a/compiler/rustc_trait_selection/src/traits/wf.rs b/compiler/rustc_trait_selection/src/traits/wf.rs index 7e5fe7e3c942b..a3982c3d987ef 100644 --- a/compiler/rustc_trait_selection/src/traits/wf.rs +++ b/compiler/rustc_trait_selection/src/traits/wf.rs @@ -812,7 +812,7 @@ impl<'a, 'tcx> TypeVisitor> for WfPredicates<'a, 'tcx> { return upvars.visit_with(self); } - ty::FnPtr(_) => { + ty::FnPtr(..) => { // Let the visitor iterate into the argument/return // types appearing in the fn signature. } diff --git a/compiler/rustc_traits/src/type_op.rs b/compiler/rustc_traits/src/type_op.rs index 5affadaac38c3..f34adf8575565 100644 --- a/compiler/rustc_traits/src/type_op.rs +++ b/compiler/rustc_traits/src/type_op.rs @@ -10,18 +10,14 @@ use rustc_trait_selection::traits::query::normalize::QueryNormalizeExt; use rustc_trait_selection::traits::query::type_op::ascribe_user_type::{ type_op_ascribe_user_type_with_span, AscribeUserType, }; -use rustc_trait_selection::traits::query::type_op::eq::Eq; use rustc_trait_selection::traits::query::type_op::normalize::Normalize; use rustc_trait_selection::traits::query::type_op::prove_predicate::ProvePredicate; -use rustc_trait_selection::traits::query::type_op::subtype::Subtype; use rustc_trait_selection::traits::{Normalized, Obligation, ObligationCause, ObligationCtxt}; pub(crate) fn provide(p: &mut Providers) { *p = Providers { type_op_ascribe_user_type, - type_op_eq, type_op_prove_predicate, - type_op_subtype, type_op_normalize_ty, type_op_normalize_clause, type_op_normalize_fn_sig, @@ -39,16 +35,6 @@ fn type_op_ascribe_user_type<'tcx>( }) } -fn type_op_eq<'tcx>( - tcx: TyCtxt<'tcx>, - canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Eq<'tcx>>>, -) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, ()>>, NoSolution> { - tcx.infer_ctxt().enter_canonical_trait_query(&canonicalized, |ocx, key| { - let (param_env, Eq { a, b }) = key.into_parts(); - Ok(ocx.eq(&ObligationCause::dummy(), param_env, a, b)?) - }) -} - fn type_op_normalize<'tcx, T>( ocx: &ObligationCtxt<'_, 'tcx>, key: ParamEnvAnd<'tcx, Normalize>, @@ -91,16 +77,6 @@ fn type_op_normalize_poly_fn_sig<'tcx>( tcx.infer_ctxt().enter_canonical_trait_query(&canonicalized, type_op_normalize) } -fn type_op_subtype<'tcx>( - tcx: TyCtxt<'tcx>, - canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Subtype<'tcx>>>, -) -> Result<&'tcx Canonical<'tcx, QueryResponse<'tcx, ()>>, NoSolution> { - tcx.infer_ctxt().enter_canonical_trait_query(&canonicalized, |ocx, key| { - let (param_env, Subtype { sub, sup }) = key.into_parts(); - Ok(ocx.sup(&ObligationCause::dummy(), param_env, sup, sub)?) - }) -} - fn type_op_prove_predicate<'tcx>( tcx: TyCtxt<'tcx>, canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, ProvePredicate<'tcx>>>, diff --git a/compiler/rustc_ty_utils/src/abi.rs b/compiler/rustc_ty_utils/src/abi.rs index d90c3bedc7017..34c426f2aa6df 100644 --- a/compiler/rustc_ty_utils/src/abi.rs +++ b/compiler/rustc_ty_utils/src/abi.rs @@ -621,7 +621,7 @@ fn fn_abi_new_uncached<'tcx>( let rust_abi = matches!(sig.abi, RustIntrinsic | Rust | RustCall); let is_drop_in_place = - fn_def_id.is_some() && fn_def_id == cx.tcx.lang_items().drop_in_place_fn(); + fn_def_id.is_some_and(|def_id| cx.tcx.is_lang_item(def_id, LangItem::DropInPlace)); let arg_of = |ty: Ty<'tcx>, arg_idx: Option| -> Result<_, &'tcx FnAbiError<'tcx>> { let span = tracing::debug_span!("arg_of"); diff --git a/compiler/rustc_ty_utils/src/instance.rs b/compiler/rustc_ty_utils/src/instance.rs index 43e491387091e..c7ed6e6110fab 100644 --- a/compiler/rustc_ty_utils/src/instance.rs +++ b/compiler/rustc_ty_utils/src/instance.rs @@ -248,7 +248,7 @@ fn resolve_associated_item<'tcx>( if name == sym::clone { let self_ty = trait_ref.self_ty(); match self_ty.kind() { - ty::FnDef(..) | ty::FnPtr(_) => (), + ty::FnDef(..) | ty::FnPtr(..) => (), ty::Coroutine(..) | ty::CoroutineWitness(..) | ty::Closure(..) diff --git a/compiler/rustc_ty_utils/src/layout.rs b/compiler/rustc_ty_utils/src/layout.rs index 1eb03fc3bd6a1..234e1a6d55e65 100644 --- a/compiler/rustc_ty_utils/src/layout.rs +++ b/compiler/rustc_ty_utils/src/layout.rs @@ -184,7 +184,7 @@ fn layout_of_uncached<'tcx>( ty::Int(ity) => scalar(Int(Integer::from_int_ty(dl, ity), true)), ty::Uint(ity) => scalar(Int(Integer::from_uint_ty(dl, ity), false)), ty::Float(fty) => scalar(Float(Float::from_float_ty(fty))), - ty::FnPtr(_) => { + ty::FnPtr(..) => { let mut ptr = scalar_unit(Pointer(dl.instruction_address_space)); ptr.valid_range_mut().start = 1; tcx.mk_layout(LayoutS::scalar(cx, ptr)) @@ -219,9 +219,13 @@ fn layout_of_uncached<'tcx>( // its struct tail cannot be normalized either, so try to get a // more descriptive layout error here, which will lead to less confusing // diagnostics. + // + // We use the raw struct tail function here to get the first tail + // that is an alias, which is likely the cause of the normalization + // error. match tcx.try_normalize_erasing_regions( param_env, - tcx.struct_tail_without_normalization(pointee), + tcx.struct_tail_raw(pointee, |ty| ty, || {}), ) { Ok(_) => {} Err(better_err) => { diff --git a/compiler/rustc_ty_utils/src/layout_sanity_check.rs b/compiler/rustc_ty_utils/src/layout_sanity_check.rs index d8e0443c50b85..2223aca28d1f6 100644 --- a/compiler/rustc_ty_utils/src/layout_sanity_check.rs +++ b/compiler/rustc_ty_utils/src/layout_sanity_check.rs @@ -249,7 +249,7 @@ pub(super) fn sanity_check_layout<'tcx>( if let Variants::Multiple { variants, .. } = &layout.variants { for variant in variants.iter() { // No nested "multiple". - assert!(matches!(variant.variants, Variants::Single { .. })); + assert_matches!(variant.variants, Variants::Single { .. }); // Variants should have the same or a smaller size as the full thing, // and same for alignment. if variant.size > layout.size { diff --git a/compiler/rustc_type_ir/src/binder.rs b/compiler/rustc_type_ir/src/binder.rs index c1f6fb36324ed..d42efbc91e196 100644 --- a/compiler/rustc_type_ir/src/binder.rs +++ b/compiler/rustc_type_ir/src/binder.rs @@ -8,7 +8,7 @@ use derive_where::derive_where; use rustc_macros::{HashStable_NoContext, TyDecodable, TyEncodable}; #[cfg(feature = "nightly")] use rustc_serialize::Decodable; -use tracing::debug; +use tracing::instrument; use crate::data_structures::SsoHashSet; use crate::fold::{FallibleTypeFolder, TypeFoldable, TypeFolder, TypeSuperFoldable}; @@ -86,6 +86,7 @@ macro_rules! impl_binder_encode_decode { #[cfg(feature = "nightly")] impl_binder_encode_decode! { ty::FnSig, + ty::FnSigTys, ty::TraitPredicate, ty::ExistentialPredicate, ty::TraitRef, @@ -247,21 +248,6 @@ impl Binder { // `self.value` is equivalent to `self.skip_binder()` if self.value.has_escaping_bound_vars() { None } else { Some(self.skip_binder()) } } - - /// Splits the contents into two things that share the same binder - /// level as the original, returning two distinct binders. - /// - /// `f` should consider bound regions at depth 1 to be free, and - /// anything it produces with bound regions at depth 1 will be - /// bound in the resulting return values. - pub fn split(self, f: F) -> (Binder, Binder) - where - F: FnOnce(T) -> (U, V), - { - let Binder { value, bound_vars } = self; - let (u, v) = f(value); - (Binder { value: u, bound_vars }, Binder { value: v, bound_vars }) - } } impl Binder> { @@ -831,28 +817,20 @@ impl<'a, I: Interner> ArgFolder<'a, I> { /// As indicated in the diagram, here the same type `&'a i32` is instantiated once, but in the /// first case we do not increase the De Bruijn index and in the second case we do. The reason /// is that only in the second case have we passed through a fn binder. + #[instrument(level = "trace", skip(self), fields(binders_passed = self.binders_passed), ret)] fn shift_vars_through_binders>(&self, val: T) -> T { - debug!( - "shift_vars(val={:?}, binders_passed={:?}, has_escaping_bound_vars={:?})", - val, - self.binders_passed, - val.has_escaping_bound_vars() - ); - if self.binders_passed == 0 || !val.has_escaping_bound_vars() { - return val; + val + } else { + ty::fold::shift_vars(self.cx, val, self.binders_passed) } - - let result = ty::fold::shift_vars(TypeFolder::cx(self), val, self.binders_passed); - debug!("shift_vars: shifted result = {:?}", result); - - result } fn shift_region_through_binders(&self, region: I::Region) -> I::Region { if self.binders_passed == 0 || !region.has_escaping_bound_vars() { - return region; + region + } else { + ty::fold::shift_region(self.cx, region, self.binders_passed) } - ty::fold::shift_region(self.cx, region, self.binders_passed) } } diff --git a/compiler/rustc_type_ir/src/fast_reject.rs b/compiler/rustc_type_ir/src/fast_reject.rs index 456accd1a1b78..fab4a0991175d 100644 --- a/compiler/rustc_type_ir/src/fast_reject.rs +++ b/compiler/rustc_type_ir/src/fast_reject.rs @@ -135,7 +135,9 @@ pub fn simplify_type( ty::CoroutineWitness(def_id, _) => Some(SimplifiedType::CoroutineWitness(def_id)), ty::Never => Some(SimplifiedType::Never), ty::Tuple(tys) => Some(SimplifiedType::Tuple(tys.len())), - ty::FnPtr(f) => Some(SimplifiedType::Function(f.skip_binder().inputs().len())), + ty::FnPtr(sig_tys, _hdr) => { + Some(SimplifiedType::Function(sig_tys.skip_binder().inputs().len())) + } ty::Placeholder(..) => Some(SimplifiedType::Placeholder), ty::Param(_) => match treat_params { TreatParams::ForLookup => Some(SimplifiedType::Placeholder), @@ -307,17 +309,14 @@ impl DeepRejectCtxt { obl_preds.principal_def_id() == impl_preds.principal_def_id() ) } - ty::FnPtr(obl_sig) => match k { - ty::FnPtr(impl_sig) => { - let ty::FnSig { inputs_and_output, c_variadic, safety, abi } = - obl_sig.skip_binder(); - let impl_sig = impl_sig.skip_binder(); + ty::FnPtr(obl_sig_tys, obl_hdr) => match k { + ty::FnPtr(impl_sig_tys, impl_hdr) => { + let obl_sig_tys = obl_sig_tys.skip_binder().inputs_and_output; + let impl_sig_tys = impl_sig_tys.skip_binder().inputs_and_output; - abi == impl_sig.abi - && c_variadic == impl_sig.c_variadic - && safety == impl_sig.safety - && inputs_and_output.len() == impl_sig.inputs_and_output.len() - && iter::zip(inputs_and_output.iter(), impl_sig.inputs_and_output.iter()) + obl_hdr == impl_hdr + && obl_sig_tys.len() == impl_sig_tys.len() + && iter::zip(obl_sig_tys.iter(), impl_sig_tys.iter()) .all(|(obl, imp)| self.types_may_unify(obl, imp)) } _ => false, diff --git a/compiler/rustc_type_ir/src/fold.rs b/compiler/rustc_type_ir/src/fold.rs index d37bacc7d359f..8e3534b0e9eb4 100644 --- a/compiler/rustc_type_ir/src/fold.rs +++ b/compiler/rustc_type_ir/src/fold.rs @@ -48,7 +48,7 @@ use std::mem; use rustc_index::{Idx, IndexVec}; -use tracing::debug; +use tracing::instrument; use crate::data_structures::Lrc; use crate::inherent::*; @@ -417,15 +417,14 @@ pub fn shift_region(cx: I, region: I::Region, amount: u32) -> I::Re } } +#[instrument(level = "trace", skip(cx), ret)] pub fn shift_vars(cx: I, value: T, amount: u32) -> T where T: TypeFoldable, { - debug!("shift_vars(value={:?}, amount={})", value, amount); - if amount == 0 || !value.has_escaping_bound_vars() { - return value; + value + } else { + value.fold_with(&mut Shifter::new(cx, amount)) } - - value.fold_with(&mut Shifter::new(cx, amount)) } diff --git a/compiler/rustc_type_ir/src/inherent.rs b/compiler/rustc_type_ir/src/inherent.rs index 263ba676427c5..958360faede57 100644 --- a/compiler/rustc_type_ir/src/inherent.rs +++ b/compiler/rustc_type_ir/src/inherent.rs @@ -133,12 +133,12 @@ pub trait Ty>: } fn is_fn_ptr(self) -> bool { - matches!(self.kind(), ty::FnPtr(_)) + matches!(self.kind(), ty::FnPtr(..)) } fn fn_sig(self, interner: I) -> ty::Binder> { match self.kind() { - ty::FnPtr(sig) => sig, + ty::FnPtr(sig_tys, hdr) => sig_tys.with(hdr), ty::FnDef(def_id, args) => interner.fn_sig(def_id).instantiate(interner, args), ty::Error(_) => { // ignore errors (#54954) @@ -181,7 +181,7 @@ pub trait Ty>: | ty::RawPtr(_, _) | ty::Ref(_, _, _) | ty::FnDef(_, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::Closure(_, _) | ty::CoroutineClosure(_, _) @@ -203,7 +203,9 @@ pub trait Ty>: pub trait Tys>: Copy + Debug + Hash + Eq + SliceLike + TypeFoldable + Default { - fn split_inputs_and_output(self) -> (I::FnInputTys, I::Ty); + fn inputs(self) -> I::FnInputTys; + + fn output(self) -> I::Ty; } pub trait Abi>: Copy + Debug + Hash + Eq + Relate { diff --git a/compiler/rustc_type_ir/src/interner.rs b/compiler/rustc_type_ir/src/interner.rs index c251540c0fc29..f2492ede4f5ea 100644 --- a/compiler/rustc_type_ir/src/interner.rs +++ b/compiler/rustc_type_ir/src/interner.rs @@ -11,7 +11,6 @@ use crate::inherent::*; use crate::ir_print::IrPrint; use crate::lang_items::TraitSolverLangItem; use crate::relate::Relate; -use crate::solve::inspect::CanonicalGoalEvaluationStep; use crate::solve::{ CanonicalInput, ExternalConstraintsData, PredefinedOpaquesData, QueryResult, SolverMode, }; @@ -65,11 +64,6 @@ pub trait Interner: + Eq + TypeVisitable + SliceLike; - type CanonicalGoalEvaluationStepRef: Copy - + Debug - + Hash - + Eq - + Deref>; type CanonicalVars: Copy + Debug @@ -177,11 +171,6 @@ pub trait Interner: fn debug_assert_args_compatible(self, def_id: Self::DefId, args: Self::GenericArgs); - fn intern_canonical_goal_evaluation_step( - self, - step: CanonicalGoalEvaluationStep, - ) -> Self::CanonicalGoalEvaluationStepRef; - fn mk_type_list_from_iter(self, args: I) -> T::Output where I: Iterator, @@ -390,7 +379,6 @@ impl CollectAndApply for Result { } impl search_graph::Cx for I { - type ProofTree = Option; type Input = CanonicalInput; type Result = QueryResult; diff --git a/compiler/rustc_type_ir/src/outlives.rs b/compiler/rustc_type_ir/src/outlives.rs index 2f26a43918383..bfcea6a81d313 100644 --- a/compiler/rustc_type_ir/src/outlives.rs +++ b/compiler/rustc_type_ir/src/outlives.rs @@ -186,7 +186,7 @@ impl TypeVisitor for OutlivesCollector<'_, I> { | ty::Slice(_) | ty::RawPtr(_, _) | ty::Ref(_, _, _) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Dynamic(_, _, _) | ty::Tuple(_) => { ty.super_visit_with(self); diff --git a/compiler/rustc_type_ir/src/relate.rs b/compiler/rustc_type_ir/src/relate.rs index 9fd3534d1fa6b..578436b622a87 100644 --- a/compiler/rustc_type_ir/src/relate.rs +++ b/compiler/rustc_type_ir/src/relate.rs @@ -524,8 +524,8 @@ pub fn structurally_relate_tys>( Ok(Ty::new_fn_def(cx, a_def_id, args)) } - (ty::FnPtr(a_fty), ty::FnPtr(b_fty)) => { - let fty = relation.relate(a_fty, b_fty)?; + (ty::FnPtr(a_sig_tys, a_hdr), ty::FnPtr(b_sig_tys, b_hdr)) => { + let fty = relation.relate(a_sig_tys.with(a_hdr), b_sig_tys.with(b_hdr))?; Ok(Ty::new_fn_ptr(cx, fty)) } diff --git a/compiler/rustc_type_ir/src/search_graph/global_cache.rs b/compiler/rustc_type_ir/src/search_graph/global_cache.rs index be4f1069cd167..47f7cefac6ad1 100644 --- a/compiler/rustc_type_ir/src/search_graph/global_cache.rs +++ b/compiler/rustc_type_ir/src/search_graph/global_cache.rs @@ -1,18 +1,17 @@ use derive_where::derive_where; -use rustc_index::IndexVec; -use super::{AvailableDepth, Cx, StackDepth, StackEntry}; -use crate::data_structures::{HashMap, HashSet}; - -#[derive_where(Debug, Clone, Copy; X: Cx)] -struct QueryData { - result: X::Result, - proof_tree: X::ProofTree, -} +use super::{AvailableDepth, Cx, NestedGoals}; +use crate::data_structures::HashMap; struct Success { - data: X::Tracked>, additional_depth: usize, + nested_goals: NestedGoals, + result: X::Tracked, +} + +struct WithOverflow { + nested_goals: NestedGoals, + result: X::Tracked, } /// The cache entry for a given input. @@ -23,24 +22,15 @@ struct Success { #[derive_where(Default; X: Cx)] struct CacheEntry { success: Option>, - /// We have to be careful when caching roots of cycles. - /// - /// See the doc comment of `StackEntry::cycle_participants` for more - /// details. - nested_goals: HashSet, - with_overflow: HashMap>>, + with_overflow: HashMap>, } #[derive_where(Debug; X: Cx)] pub(super) struct CacheData<'a, X: Cx> { pub(super) result: X::Result, - pub(super) proof_tree: X::ProofTree, pub(super) additional_depth: usize, pub(super) encountered_overflow: bool, - // FIXME: This is currently unused, but impacts the design - // by requiring a closure for `Cx::with_global_cache`. - #[allow(dead_code)] - pub(super) nested_goals: &'a HashSet, + pub(super) nested_goals: &'a NestedGoals, } #[derive_where(Default; X: Cx)] pub struct GlobalCache { @@ -55,20 +45,21 @@ impl GlobalCache { input: X::Input, result: X::Result, - proof_tree: X::ProofTree, dep_node: X::DepNodeIndex, additional_depth: usize, encountered_overflow: bool, - nested_goals: &HashSet, + nested_goals: NestedGoals, ) { - let data = cx.mk_tracked(QueryData { result, proof_tree }, dep_node); + let result = cx.mk_tracked(result, dep_node); let entry = self.map.entry(input).or_default(); - entry.nested_goals.extend(nested_goals); if encountered_overflow { - entry.with_overflow.insert(additional_depth, data); + let with_overflow = WithOverflow { nested_goals, result }; + let prev = entry.with_overflow.insert(additional_depth, with_overflow); + assert!(prev.is_none()); } else { - entry.success = Some(Success { data, additional_depth }); + let prev = entry.success.replace(Success { additional_depth, nested_goals, result }); + assert!(prev.is_none()); } } @@ -80,36 +71,37 @@ impl GlobalCache { &'a self, cx: X, input: X::Input, - stack: &IndexVec>, available_depth: AvailableDepth, + mut candidate_is_applicable: impl FnMut(&NestedGoals) -> bool, ) -> Option> { let entry = self.map.get(&input)?; - if stack.iter().any(|e| entry.nested_goals.contains(&e.input)) { - return None; - } - - if let Some(ref success) = entry.success { - if available_depth.cache_entry_is_applicable(success.additional_depth) { - let QueryData { result, proof_tree } = cx.get_tracked(&success.data); + if let Some(Success { additional_depth, ref nested_goals, ref result }) = entry.success { + if available_depth.cache_entry_is_applicable(additional_depth) + && candidate_is_applicable(nested_goals) + { return Some(CacheData { - result, - proof_tree, - additional_depth: success.additional_depth, + result: cx.get_tracked(&result), + additional_depth, encountered_overflow: false, - nested_goals: &entry.nested_goals, + nested_goals, }); } } - entry.with_overflow.get(&available_depth.0).map(|e| { - let QueryData { result, proof_tree } = cx.get_tracked(e); - CacheData { - result, - proof_tree, - additional_depth: available_depth.0, - encountered_overflow: true, - nested_goals: &entry.nested_goals, + let additional_depth = available_depth.0; + if let Some(WithOverflow { nested_goals, result }) = + entry.with_overflow.get(&additional_depth) + { + if candidate_is_applicable(nested_goals) { + return Some(CacheData { + result: cx.get_tracked(result), + additional_depth, + encountered_overflow: true, + nested_goals, + }); } - }) + } + + None } } diff --git a/compiler/rustc_type_ir/src/search_graph/mod.rs b/compiler/rustc_type_ir/src/search_graph/mod.rs index 4abf99b1ded8a..d47c9e725f350 100644 --- a/compiler/rustc_type_ir/src/search_graph/mod.rs +++ b/compiler/rustc_type_ir/src/search_graph/mod.rs @@ -1,19 +1,32 @@ +/// The search graph is responsible for caching and cycle detection in the trait +/// solver. Making sure that caching doesn't result in soundness bugs or unstable +/// query results is very challenging and makes this one of the most-involved +/// self-contained components of the compiler. +/// +/// We added fuzzing support to test its correctness. The fuzzers used to verify +/// the current implementation can be found in https://github.com/lcnr/search_graph_fuzz. +/// +/// This is just a quick overview of the general design, please check out the relevant +/// [rustc-dev-guide chapter](https://rustc-dev-guide.rust-lang.org/solve/caching.html) for +/// more details. Caching is split between a global cache and the per-cycle `provisional_cache`. +/// The global cache has to be completely unobservable, while the per-cycle cache may impact +/// behavior as long as the resulting behavior is still correct. +use std::cmp::Ordering; +use std::collections::BTreeSet; use std::fmt::Debug; use std::hash::Hash; use std::marker::PhantomData; -use std::mem; use derive_where::derive_where; use rustc_index::{Idx, IndexVec}; use tracing::debug; -use crate::data_structures::{HashMap, HashSet}; +use crate::data_structures::HashMap; use crate::solve::SolverMode; mod global_cache; use global_cache::CacheData; pub use global_cache::GlobalCache; -mod validate; /// The search graph does not simply use `Interner` directly /// to enable its fuzzing without having to stub the rest of @@ -22,7 +35,6 @@ mod validate; /// about `Input` and `Result` as they are implementation details /// of the search graph. pub trait Cx: Copy { - type ProofTree: Debug + Copy; type Input: Debug + Eq + Hash + Copy; type Result: Debug + Eq + Hash + Copy; @@ -43,30 +55,41 @@ pub trait Cx: Copy { ) -> R; } -pub trait ProofTreeBuilder { - fn try_apply_proof_tree(&mut self, proof_tree: X::ProofTree) -> bool; - fn on_provisional_cache_hit(&mut self); - fn on_cycle_in_stack(&mut self); - fn finalize_canonical_goal_evaluation(&mut self, cx: X) -> X::ProofTree; -} - pub trait Delegate { type Cx: Cx; + /// Whether to use the provisional cache. Set to `false` by a fuzzer when + /// validating the search graph. + const ENABLE_PROVISIONAL_CACHE: bool; + type ValidationScope; + /// Returning `Some` disables the global cache for the current goal. + /// + /// The `ValidationScope` is used when fuzzing the search graph to track + /// for which goals the global cache has been disabled. This is necessary + /// as we may otherwise ignore the global cache entry for some goal `G` + /// only to later use it, failing to detect a cycle goal and potentially + /// changing the result. + fn enter_validation_scope( + cx: Self::Cx, + input: ::Input, + ) -> Option; + const FIXPOINT_STEP_LIMIT: usize; - type ProofTreeBuilder: ProofTreeBuilder; + type ProofTreeBuilder; + fn inspect_is_noop(inspect: &mut Self::ProofTreeBuilder) -> bool; + + const DIVIDE_AVAILABLE_DEPTH_ON_OVERFLOW: usize; fn recursion_limit(cx: Self::Cx) -> usize; fn initial_provisional_result( cx: Self::Cx, - kind: CycleKind, + kind: PathKind, input: ::Input, ) -> ::Result; - fn reached_fixpoint( + fn is_initial_provisional_result( cx: Self::Cx, - kind: UsageKind, + kind: PathKind, input: ::Input, - provisional_result: Option<::Result>, result: ::Result, ) -> bool; fn on_stack_overflow( @@ -79,6 +102,13 @@ pub trait Delegate { input: ::Input, ) -> ::Result; + fn is_ambiguous_result(result: ::Result) -> bool; + fn propagate_ambiguity( + cx: Self::Cx, + for_input: ::Input, + from_result: ::Result, + ) -> ::Result; + fn step_is_coinductive(cx: Self::Cx, input: ::Input) -> bool; } @@ -86,19 +116,20 @@ pub trait Delegate { /// result. In the case we return an initial provisional result depending /// on the kind of cycle. #[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum CycleKind { +pub enum PathKind { Coinductive, Inductive, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum UsageKind { - Single(CycleKind), + Single(PathKind), Mixed, } impl UsageKind { fn merge(self, other: Self) -> Self { match (self, other) { + (UsageKind::Mixed, _) | (_, UsageKind::Mixed) => UsageKind::Mixed, (UsageKind::Single(lhs), UsageKind::Single(rhs)) => { if lhs == rhs { UsageKind::Single(lhs) @@ -106,11 +137,11 @@ impl UsageKind { UsageKind::Mixed } } - (UsageKind::Mixed, UsageKind::Mixed) - | (UsageKind::Mixed, UsageKind::Single(_)) - | (UsageKind::Single(_), UsageKind::Mixed) => UsageKind::Mixed, } } + fn and_merge(&mut self, other: Self) { + *self = self.merge(other); + } } #[derive(Debug, Clone, Copy)] @@ -132,7 +163,7 @@ impl AvailableDepth { } Some(if last.encountered_overflow { - AvailableDepth(last.available_depth.0 / 2) + AvailableDepth(last.available_depth.0 / D::DIVIDE_AVAILABLE_DEPTH_ON_OVERFLOW) } else { AvailableDepth(last.available_depth.0 - 1) }) @@ -148,97 +179,181 @@ impl AvailableDepth { } } +/// All cycle heads a given goal depends on, ordered by their stack depth. +/// +/// We therefore pop the cycle heads from highest to lowest. +#[derive(Clone, Debug, PartialEq, Eq, Default)] +struct CycleHeads { + heads: BTreeSet, +} + +impl CycleHeads { + fn is_empty(&self) -> bool { + self.heads.is_empty() + } + + fn highest_cycle_head(&self) -> StackDepth { + *self.heads.last().unwrap() + } + + fn opt_highest_cycle_head(&self) -> Option { + self.heads.last().copied() + } + + fn opt_lowest_cycle_head(&self) -> Option { + self.heads.first().copied() + } + + fn remove_highest_cycle_head(&mut self) { + let last = self.heads.pop_last(); + debug_assert_ne!(last, None); + } + + fn insert(&mut self, head: StackDepth) { + self.heads.insert(head); + } + + fn merge(&mut self, heads: &CycleHeads) { + for &head in heads.heads.iter() { + self.insert(head); + } + } + + /// Update the cycle heads of a goal at depth `this` given the cycle heads + /// of a nested goal. This merges the heads after filtering the parent goal + /// itself. + fn extend_from_child(&mut self, this: StackDepth, child: &CycleHeads) { + for &head in child.heads.iter() { + match head.cmp(&this) { + Ordering::Less => {} + Ordering::Equal => continue, + Ordering::Greater => unreachable!(), + } + + self.insert(head); + } + } +} + +/// The nested goals of each stack entry and the path from the +/// stack entry to that nested goal. +/// +/// We only start tracking nested goals once we've either encountered +/// overflow or a solver cycle. This is a performance optimization to +/// avoid tracking nested goals on the happy path. +/// +/// We use nested goals for two reasons: +/// - when rebasing provisional cache entries +/// - when checking whether we have to ignore a global cache entry as reevaluating +/// it would encounter a cycle or use a provisional cache entry. +/// +/// We need to disable the global cache if using it would hide a cycle, as +/// cycles can impact behavior. The cycle ABA may have different final +/// results from a the cycle BAB depending on the cycle root. +#[derive_where(Debug, Default; X: Cx)] +struct NestedGoals { + nested_goals: HashMap, +} +impl NestedGoals { + fn is_empty(&self) -> bool { + self.nested_goals.is_empty() + } + + fn insert(&mut self, input: X::Input, path_from_entry: UsageKind) { + self.nested_goals.entry(input).or_insert(path_from_entry).and_merge(path_from_entry); + } + + fn merge(&mut self, nested_goals: &NestedGoals) { + #[allow(rustc::potential_query_instability)] + for (input, path_from_entry) in nested_goals.iter() { + self.insert(input, path_from_entry); + } + } + + /// Adds the nested goals of a nested goal, given that the path `step_kind` from this goal + /// to the parent goal. + /// + /// If the path from this goal to the nested goal is inductive, the paths from this goal + /// to all nested goals of that nested goal are also inductive. Otherwise the paths are + /// the same as for the child. + fn extend_from_child(&mut self, step_kind: PathKind, nested_goals: &NestedGoals) { + #[allow(rustc::potential_query_instability)] + for (input, path_from_entry) in nested_goals.iter() { + let path_from_entry = match step_kind { + PathKind::Coinductive => path_from_entry, + PathKind::Inductive => UsageKind::Single(PathKind::Inductive), + }; + self.insert(input, path_from_entry); + } + } + + #[rustc_lint_query_instability] + #[allow(rustc::potential_query_instability)] + fn iter(&self) -> impl Iterator + '_ { + self.nested_goals.iter().map(|(i, p)| (*i, *p)) + } + + fn get(&self, input: X::Input) -> Option { + self.nested_goals.get(&input).copied() + } + + fn contains(&self, input: X::Input) -> bool { + self.nested_goals.contains_key(&input) + } +} + rustc_index::newtype_index! { #[orderable] #[gate_rustc_only] pub struct StackDepth {} } +/// Stack entries of the evaluation stack. Its fields tend to be lazily +/// when popping a child goal or completely immutable. #[derive_where(Debug; X: Cx)] struct StackEntry { input: X::Input, + /// The available depth of a given goal, immutable. available_depth: AvailableDepth, /// The maximum depth reached by this stack entry, only up-to date /// for the top of the stack and lazily updated for the rest. reached_depth: StackDepth, - /// Whether this entry is a non-root cycle participant. - /// - /// We must not move the result of non-root cycle participants to the - /// global cache. We store the highest stack depth of a head of a cycle - /// this goal is involved in. This necessary to soundly cache its - /// provisional result. - non_root_cycle_participant: Option, + /// All cycle heads this goal depends on. Lazily updated and only + /// up-to date for the top of the stack. + heads: CycleHeads, + /// Whether evaluating this goal encountered overflow. Lazily updated. encountered_overflow: bool, + /// Whether this goal has been used as the root of a cycle. This gets + /// eagerly updated when encountering a cycle. has_been_used: Option, - /// We put only the root goal of a coinductive cycle into the global cache. - /// - /// If we were to use that result when later trying to prove another cycle - /// participant, we can end up with unstable query results. - /// - /// See tests/ui/next-solver/coinduction/incompleteness-unstable-result.rs for - /// an example of where this is needed. - /// - /// There can be multiple roots on the same stack, so we need to track - /// cycle participants per root: - /// ```plain - /// A :- B - /// B :- A, C - /// C :- D - /// D :- C - /// ``` - nested_goals: HashSet, + /// The nested goals of this goal, see the doc comment of the type. + nested_goals: NestedGoals, + /// Starts out as `None` and gets set when rerunning this /// goal in case we encounter a cycle. provisional_result: Option, } -/// The provisional result for a goal which is not on the stack. -#[derive(Debug)] -struct DetachedEntry { - /// The head of the smallest non-trivial cycle involving this entry. - /// - /// Given the following rules, when proving `A` the head for - /// the provisional entry of `C` would be `B`. - /// ```plain - /// A :- B - /// B :- C - /// C :- A + B + C - /// ``` - head: StackDepth, - result: X::Result, -} - -/// Stores the stack depth of a currently evaluated goal *and* already -/// computed results for goals which depend on other goals still on the stack. -/// -/// The provisional result may depend on whether the stack above it is inductive -/// or coinductive. Because of this, we store separate provisional results for -/// each case. If an provisional entry is not applicable, it may be the case -/// that we already have provisional result while computing a goal. In this case -/// we prefer the provisional result to potentially avoid fixpoint iterations. -/// See tests/ui/traits/next-solver/cycles/mixed-cycles-2.rs for an example. -/// -/// The provisional cache can theoretically result in changes to the observable behavior, -/// see tests/ui/traits/next-solver/cycles/provisional-cache-impacts-behavior.rs. -#[derive_where(Default; X: Cx)] +/// A provisional result of an already computed goals which depends on other +/// goals still on the stack. +#[derive_where(Debug; X: Cx)] struct ProvisionalCacheEntry { - stack_depth: Option, - with_inductive_stack: Option>, - with_coinductive_stack: Option>, -} - -impl ProvisionalCacheEntry { - fn is_empty(&self) -> bool { - self.stack_depth.is_none() - && self.with_inductive_stack.is_none() - && self.with_coinductive_stack.is_none() - } + /// Whether evaluating the goal encountered overflow. This is used to + /// disable the cache entry except if the last goal on the stack is + /// already involved in this cycle. + encountered_overflow: bool, + /// All cycle heads this cache entry depends on. + heads: CycleHeads, + /// The path from the highest cycle head to this goal. + path_from_head: PathKind, + nested_goals: NestedGoals, + result: X::Result, } pub struct SearchGraph, X: Cx = ::Cx> { @@ -247,7 +362,11 @@ pub struct SearchGraph, X: Cx = ::Cx> { /// /// An element is *deeper* in the stack if its index is *lower*. stack: IndexVec>, - provisional_cache: HashMap>, + /// The provisional cache contains entries for already computed goals which + /// still depend on goals higher-up in the stack. We don't move them to the + /// global cache and track them locally instead. A provisional cache entry + /// is only valid until the result of one of its cycle heads changes. + provisional_cache: HashMap>>, _marker: PhantomData, } @@ -266,77 +385,66 @@ impl, X: Cx> SearchGraph { self.mode } - fn update_parent_goal(&mut self, reached_depth: StackDepth, encountered_overflow: bool) { - if let Some(parent) = self.stack.raw.last_mut() { + /// Lazily update the stack entry for the parent goal. + /// This behavior is shared between actually evaluating goals + /// and using existing global cache entries to make sure they + /// have the same impact on the remaining evaluation. + fn update_parent_goal( + cx: X, + stack: &mut IndexVec>, + reached_depth: StackDepth, + heads: &CycleHeads, + encountered_overflow: bool, + nested_goals: &NestedGoals, + ) { + if let Some(parent_index) = stack.last_index() { + let parent = &mut stack[parent_index]; parent.reached_depth = parent.reached_depth.max(reached_depth); parent.encountered_overflow |= encountered_overflow; + + parent.heads.extend_from_child(parent_index, heads); + let step_kind = Self::step_kind(cx, parent.input); + parent.nested_goals.extend_from_child(step_kind, nested_goals); + // Once we've got goals which encountered overflow or a cycle, + // we track all goals whose behavior may depend depend on these + // goals as this change may cause them to now depend on additional + // goals, resulting in new cycles. See the dev-guide for examples. + if !nested_goals.is_empty() { + parent.nested_goals.insert(parent.input, UsageKind::Single(PathKind::Coinductive)) + } } } pub fn is_empty(&self) -> bool { - self.stack.is_empty() + if self.stack.is_empty() { + debug_assert!(self.provisional_cache.is_empty()); + true + } else { + false + } } - fn stack_coinductive_from( - cx: X, - stack: &IndexVec>, - head: StackDepth, - ) -> bool { - stack.raw[head.index()..].iter().all(|entry| D::step_is_coinductive(cx, entry.input)) - } - - // When encountering a solver cycle, the result of the current goal - // depends on goals lower on the stack. - // - // We have to therefore be careful when caching goals. Only the final result - // of the cycle root, i.e. the lowest goal on the stack involved in this cycle, - // is moved to the global cache while all others are stored in a provisional cache. - // - // We update both the head of this cycle to rerun its evaluation until - // we reach a fixpoint and all other cycle participants to make sure that - // their result does not get moved to the global cache. - fn tag_cycle_participants( - stack: &mut IndexVec>, - usage_kind: Option, - head: StackDepth, - ) { - if let Some(usage_kind) = usage_kind { - stack[head].has_been_used = - Some(stack[head].has_been_used.map_or(usage_kind, |prev| prev.merge(usage_kind))); - } - debug_assert!(stack[head].has_been_used.is_some()); - - // The current root of these cycles. Note that this may not be the final - // root in case a later goal depends on a goal higher up the stack. - let mut current_root = head; - while let Some(parent) = stack[current_root].non_root_cycle_participant { - current_root = parent; - debug_assert!(stack[current_root].has_been_used.is_some()); - } + /// The number of goals currently in the search graph. This should only be + /// used for debugging purposes. + pub fn debug_current_depth(&self) -> usize { + self.stack.len() + } - let (stack, cycle_participants) = stack.raw.split_at_mut(head.index() + 1); - let current_cycle_root = &mut stack[current_root.as_usize()]; - for entry in cycle_participants { - entry.non_root_cycle_participant = entry.non_root_cycle_participant.max(Some(head)); - current_cycle_root.nested_goals.insert(entry.input); - current_cycle_root.nested_goals.extend(mem::take(&mut entry.nested_goals)); - } + fn step_kind(cx: X, input: X::Input) -> PathKind { + if D::step_is_coinductive(cx, input) { PathKind::Coinductive } else { PathKind::Inductive } } - fn clear_dependent_provisional_results( - provisional_cache: &mut HashMap>, + /// Whether the path from `head` to the current stack entry is inductive or coinductive. + fn stack_path_kind( + cx: X, + stack: &IndexVec>, head: StackDepth, - ) { - #[allow(rustc::potential_query_instability)] - provisional_cache.retain(|_, entry| { - if entry.with_coinductive_stack.as_ref().is_some_and(|p| p.head == head) { - entry.with_coinductive_stack.take(); - } - if entry.with_inductive_stack.as_ref().is_some_and(|p| p.head == head) { - entry.with_inductive_stack.take(); - } - !entry.is_empty() - }); + ) -> PathKind { + if stack.raw[head.index()..].iter().all(|entry| D::step_is_coinductive(cx, entry.input)) { + PathKind::Coinductive + } else { + PathKind::Inductive + } } /// Probably the most involved method of the whole solver. @@ -348,89 +456,65 @@ impl, X: Cx> SearchGraph { cx: X, input: X::Input, inspect: &mut D::ProofTreeBuilder, - mut prove_goal: impl FnMut(&mut Self, &mut D::ProofTreeBuilder) -> X::Result, + mut evaluate_goal: impl FnMut(&mut Self, &mut D::ProofTreeBuilder) -> X::Result, ) -> X::Result { - self.check_invariants(); - // Check for overflow. let Some(available_depth) = AvailableDepth::allowed_depth_for_nested::(cx, &self.stack) else { - if let Some(last) = self.stack.raw.last_mut() { - last.encountered_overflow = true; - } - - debug!("encountered stack overflow"); - return D::on_stack_overflow(cx, inspect, input); + return self.handle_overflow(cx, input, inspect); }; - if let Some(result) = self.lookup_global_cache(cx, input, available_depth, inspect) { + // We check the provisional cache before checking the global cache. This simplifies + // the implementation as we can avoid worrying about cases where both the global and + // provisional cache may apply, e.g. consider the following example + // + // - xxBA overflow + // - A + // - BA cycle + // - CB :x: + if let Some(result) = self.lookup_provisional_cache(cx, input) { return result; } - // Check whether the goal is in the provisional cache. - // The provisional result may rely on the path to its cycle roots, - // so we have to check the path of the current goal matches that of - // the cache entry. - let cache_entry = self.provisional_cache.entry(input).or_default(); - if let Some(entry) = cache_entry - .with_coinductive_stack - .as_ref() - .filter(|p| Self::stack_coinductive_from(cx, &self.stack, p.head)) - .or_else(|| { - cache_entry - .with_inductive_stack - .as_ref() - .filter(|p| !Self::stack_coinductive_from(cx, &self.stack, p.head)) - }) - { - debug!("provisional cache hit"); - // We have a nested goal which is already in the provisional cache, use - // its result. We do not provide any usage kind as that should have been - // already set correctly while computing the cache entry. - inspect.on_provisional_cache_hit(); - Self::tag_cycle_participants(&mut self.stack, None, entry.head); - return entry.result; - } else if let Some(stack_depth) = cache_entry.stack_depth { - debug!("encountered cycle with depth {stack_depth:?}"); - // We have a nested goal which directly relies on a goal deeper in the stack. - // - // We start by tagging all cycle participants, as that's necessary for caching. - // - // Finally we can return either the provisional response or the initial response - // in case we're in the first fixpoint iteration for this goal. - inspect.on_cycle_in_stack(); - - let is_coinductive_cycle = Self::stack_coinductive_from(cx, &self.stack, stack_depth); - let cycle_kind = - if is_coinductive_cycle { CycleKind::Coinductive } else { CycleKind::Inductive }; - Self::tag_cycle_participants( - &mut self.stack, - Some(UsageKind::Single(cycle_kind)), - stack_depth, - ); - - // Return the provisional result or, if we're in the first iteration, - // start with no constraints. - return if let Some(result) = self.stack[stack_depth].provisional_result { - result - } else { - D::initial_provisional_result(cx, cycle_kind, input) - }; + // Lookup the global cache unless we're building proof trees or are currently + // fuzzing. + let validate_cache = if !D::inspect_is_noop(inspect) { + None + } else if let Some(scope) = D::enter_validation_scope(cx, input) { + // When validating the global cache we need to track the goals for which the + // global cache has been disabled as it may otherwise change the result for + // cyclic goals. We don't care about goals which are not on the current stack + // so it's fine to drop their scope eagerly. + self.lookup_global_cache_untracked(cx, input, available_depth) + .inspect(|expected| debug!(?expected, "validate cache entry")) + .map(|r| (scope, r)) + } else if let Some(result) = self.lookup_global_cache(cx, input, available_depth) { + return result; } else { - // No entry, we push this goal on the stack and try to prove it. - let depth = self.stack.next_index(); - let entry = StackEntry { - input, - available_depth, - reached_depth: depth, - non_root_cycle_participant: None, - encountered_overflow: false, - has_been_used: None, - nested_goals: Default::default(), - provisional_result: None, - }; - assert_eq!(self.stack.push(entry), depth); - cache_entry.stack_depth = Some(depth); + None + }; + + // Detect cycles on the stack. We do this after the global cache lookup to + // avoid iterating over the stack in case a goal has already been computed. + // This may not have an actual performance impact and we could reorder them + // as it may reduce the number of `nested_goals` we need to track. + if let Some(result) = self.check_cycle_on_stack(cx, input) { + debug_assert!(validate_cache.is_none(), "global cache and cycle on stack"); + return result; + } + + // Unfortunate, it looks like we actually have to compute this goalrar. + let depth = self.stack.next_index(); + let entry = StackEntry { + input, + available_depth, + reached_depth: depth, + heads: Default::default(), + encountered_overflow: false, + has_been_used: None, + nested_goals: Default::default(), + provisional_result: None, }; + assert_eq!(self.stack.push(entry), depth); // This is for global caching, so we properly track query dependencies. // Everything that affects the `result` should be performed within this @@ -439,65 +523,320 @@ impl, X: Cx> SearchGraph { // must not be added to the global cache. Notably, this is the case for // trait solver cycles participants. let ((final_entry, result), dep_node) = cx.with_cached_task(|| { - for _ in 0..D::FIXPOINT_STEP_LIMIT { - match self.fixpoint_step_in_task(cx, input, inspect, &mut prove_goal) { - StepResult::Done(final_entry, result) => return (final_entry, result), - StepResult::HasChanged => debug!("fixpoint changed provisional results"), - } + self.evaluate_goal_in_task(cx, input, inspect, &mut evaluate_goal) + }); + + // We've finished computing the goal and have popped it from the stack, + // lazily update its parent goal. + Self::update_parent_goal( + cx, + &mut self.stack, + final_entry.reached_depth, + &final_entry.heads, + final_entry.encountered_overflow, + &final_entry.nested_goals, + ); + + // We're now done with this goal. We only add the root of cycles to the global cache. + // In case this goal is involved in a larger cycle add it to the provisional cache. + if final_entry.heads.is_empty() { + if let Some((_scope, expected)) = validate_cache { + // Do not try to move a goal into the cache again if we're testing + // the global cache. + assert_eq!(result, expected, "input={input:?}"); + } else if D::inspect_is_noop(inspect) { + self.insert_global_cache(cx, input, final_entry, result, dep_node) } + } else if D::ENABLE_PROVISIONAL_CACHE { + debug_assert!(validate_cache.is_none()); + let entry = self.provisional_cache.entry(input).or_default(); + let StackEntry { heads, nested_goals, encountered_overflow, .. } = final_entry; + let path_from_head = Self::stack_path_kind(cx, &self.stack, heads.highest_cycle_head()); + entry.push(ProvisionalCacheEntry { + encountered_overflow, + heads, + path_from_head, + nested_goals, + result, + }); + } else { + debug_assert!(validate_cache.is_none()); + } + + result + } + + fn handle_overflow( + &mut self, + cx: X, + input: X::Input, + inspect: &mut D::ProofTreeBuilder, + ) -> X::Result { + if let Some(last) = self.stack.raw.last_mut() { + last.encountered_overflow = true; + // If computing a goal `B` depends on another goal `A` and + // `A` has a nested goal which overflows, then computing `B` + // at the same depth, but with `A` already on the stack, + // would encounter a solver cycle instead, potentially + // changing the result. + // + // We must therefore not use the global cache entry for `B` in that case. + // See tests/ui/traits/next-solver/cycles/hidden-by-overflow.rs + last.nested_goals.insert(last.input, UsageKind::Single(PathKind::Coinductive)); + } - debug!("canonical cycle overflow"); - let current_entry = self.stack.pop().unwrap(); - debug_assert!(current_entry.has_been_used.is_none()); - let result = D::on_fixpoint_overflow(cx, input); - (current_entry, result) + debug!("encountered stack overflow"); + D::on_stack_overflow(cx, inspect, input) + } + + /// When reevaluating a goal with a changed provisional result, all provisional cache entry + /// which depend on this goal get invalidated. + fn clear_dependent_provisional_results(&mut self) { + let head = self.stack.next_index(); + #[allow(rustc::potential_query_instability)] + self.provisional_cache.retain(|_, entries| { + entries.retain(|entry| entry.heads.highest_cycle_head() != head); + !entries.is_empty() }); + } - let proof_tree = inspect.finalize_canonical_goal_evaluation(cx); + /// A necessary optimization to handle complex solver cycles. A provisional cache entry + /// relies on a set of cycle heads and the path towards these heads. When popping a cycle + /// head from the stack after we've finished computing it, we can't be sure that the + /// provisional cache entry is still applicable. We need to keep the cache entries to + /// prevent hangs. + /// + /// What we therefore do is check whether the cycle kind of all cycles the goal of a + /// provisional cache entry is involved in would stay the same when computing the + /// goal without its cycle head on the stack. For more details, see the relevant + /// [rustc-dev-guide chapter](https://rustc-dev-guide.rust-lang.org/solve/caching.html). + /// + /// This can be thought of rotating the sub-tree of this provisional result and changing + /// its entry point while making sure that all paths through this sub-tree stay the same. + /// + /// + /// In case the popped cycle head failed to reach a fixpoint anything which depends on + /// its provisional result is invalid. Actually discarding provisional cache entries in + /// this case would cause hangs, so we instead change the result of dependant provisional + /// cache entries to also be ambiguous. This causes some undesirable ambiguity for nested + /// goals whose result doesn't actually depend on this cycle head, but that's acceptable + /// to me. + fn rebase_provisional_cache_entries( + &mut self, + cx: X, + stack_entry: &StackEntry, + mut mutate_result: impl FnMut(X::Input, X::Result) -> X::Result, + ) { + let head = self.stack.next_index(); + #[allow(rustc::potential_query_instability)] + self.provisional_cache.retain(|&input, entries| { + entries.retain_mut(|entry| { + let ProvisionalCacheEntry { + encountered_overflow: _, + heads, + path_from_head, + nested_goals, + result, + } = entry; + if heads.highest_cycle_head() != head { + return true; + } - self.update_parent_goal(final_entry.reached_depth, final_entry.encountered_overflow); + // We don't try rebasing if the path from the current head + // to the cache entry is not coinductive or if the path from + // the cache entry to the current head is not coinductive. + // + // Both of these constraints could be weakened, but by only + // accepting coinductive paths we don't have to worry about + // changing the cycle kind of the remaining cycles. We can + // extend this in the future once there's a known issue + // caused by it. + if *path_from_head != PathKind::Coinductive + || nested_goals.get(stack_entry.input).unwrap() + != UsageKind::Single(PathKind::Coinductive) + { + return false; + } - // We're now done with this goal. In case this goal is involved in a larger cycle - // do not remove it from the provisional cache and update its provisional result. - // We only add the root of cycles to the global cache. - if let Some(head) = final_entry.non_root_cycle_participant { - let coinductive_stack = Self::stack_coinductive_from(cx, &self.stack, head); + // Merge the cycle heads of the provisional cache entry and the + // popped head. If the popped cycle head was a root, discard all + // provisional cache entries which depend on it. + heads.remove_highest_cycle_head(); + heads.merge(&stack_entry.heads); + let Some(head) = heads.opt_highest_cycle_head() else { + return false; + }; - let entry = self.provisional_cache.get_mut(&input).unwrap(); - entry.stack_depth = None; - if coinductive_stack { - entry.with_coinductive_stack = Some(DetachedEntry { head, result }); - } else { - entry.with_inductive_stack = Some(DetachedEntry { head, result }); + // As we've made sure that the path from the new highest cycle + // head to the uses of the popped cycle head are fully coinductive, + // we can be sure that the paths to all nested goals of the popped + // cycle head remain the same. We can simply merge them. + nested_goals.merge(&stack_entry.nested_goals); + // We now care about the path from the next highest cycle head to the + // provisional cache entry. + *path_from_head = Self::stack_path_kind(cx, &self.stack, head); + // Mutate the result of the provisional cache entry in case we did + // not reach a fixpoint. + *result = mutate_result(input, *result); + true + }); + !entries.is_empty() + }); + } + + fn lookup_provisional_cache(&mut self, cx: X, input: X::Input) -> Option { + if !D::ENABLE_PROVISIONAL_CACHE { + return None; + } + + let entries = self.provisional_cache.get(&input)?; + for &ProvisionalCacheEntry { + encountered_overflow, + ref heads, + path_from_head, + ref nested_goals, + result, + } in entries + { + let head = heads.highest_cycle_head(); + if encountered_overflow { + // This check is overly strict and very subtle. We need to make sure that if + // a global cache entry depends on some goal without adding it to its + // `nested_goals`, that goal must never have an applicable provisional + // cache entry to avoid incorrectly applying the cache entry. + // + // As we'd have to otherwise track literally all nested goals, we only + // apply provisional cache entries which encountered overflow once the + // current goal is already part of the same cycle. This check could be + // improved but seems to be good enough for now. + let last = self.stack.raw.last().unwrap(); + if !last.heads.opt_lowest_cycle_head().is_some_and(|lowest| lowest <= head) { + continue; + } } - } else { - // When encountering a cycle, both inductive and coinductive, we only - // move the root into the global cache. We also store all other cycle - // participants involved. - // - // We must not use the global cache entry of a root goal if a cycle - // participant is on the stack. This is necessary to prevent unstable - // results. See the comment of `StackEntry::nested_goals` for - // more details. - self.provisional_cache.remove(&input); - let additional_depth = final_entry.reached_depth.as_usize() - self.stack.len(); - cx.with_global_cache(self.mode, |cache| { - cache.insert( + + // A provisional cache entry is only valid if the current path from its + // highest cycle head to the goal is the same. + if path_from_head == Self::stack_path_kind(cx, &self.stack, head) { + // While we don't have to track the full depth of the provisional cache entry, + // we do have to increment the required depth by one as we'd have already failed + // with overflow otherwise + let next_index = self.stack.next_index(); + let last = &mut self.stack.raw.last_mut().unwrap(); + let path_from_entry = Self::step_kind(cx, last.input); + last.nested_goals.insert(input, UsageKind::Single(path_from_entry)); + + Self::update_parent_goal( cx, - input, - result, - proof_tree, - dep_node, - additional_depth, - final_entry.encountered_overflow, - &final_entry.nested_goals, - ) - }) + &mut self.stack, + next_index, + heads, + false, + nested_goals, + ); + debug_assert!(self.stack[head].has_been_used.is_some()); + debug!(?head, ?path_from_head, "provisional cache hit"); + return Some(result); + } } - self.check_invariants(); + None + } - result + /// Even if there is a global cache entry for a given goal, we need to make sure + /// evaluating this entry would not have ended up depending on either a goal + /// already on the stack or a provisional cache entry. + fn candidate_is_applicable( + cx: X, + stack: &IndexVec>, + provisional_cache: &HashMap>>, + nested_goals: &NestedGoals, + ) -> bool { + // If the global cache entry didn't depend on any nested goals, it always + // applies. + if nested_goals.is_empty() { + return true; + } + + // If a nested goal of the global cache entry is on the stack, we would + // definitely encounter a cycle. + if stack.iter().any(|e| nested_goals.contains(e.input)) { + debug!("cache entry not applicable due to stack"); + return false; + } + + // The global cache entry is also invalid if there's a provisional cache entry + // would apply for any of its nested goals. + #[allow(rustc::potential_query_instability)] + for (input, path_from_global_entry) in nested_goals.iter() { + let Some(entries) = provisional_cache.get(&input) else { + continue; + }; + + debug!(?input, ?path_from_global_entry, ?entries, "candidate_is_applicable"); + // A provisional cache entry is applicable if the path to + // its highest cycle head is equal to the expected path. + for &ProvisionalCacheEntry { + encountered_overflow, + ref heads, + path_from_head, + nested_goals: _, + result: _, + } in entries.iter() + { + // We don't have to worry about provisional cache entries which encountered + // overflow, see the relevant comment in `lookup_provisional_cache`. + if encountered_overflow { + continue; + } + + // A provisional cache entry only applies if the path from its highest head + // matches the path when encountering the goal. + let head = heads.highest_cycle_head(); + let full_path = match Self::stack_path_kind(cx, stack, head) { + PathKind::Coinductive => path_from_global_entry, + PathKind::Inductive => UsageKind::Single(PathKind::Inductive), + }; + + match (full_path, path_from_head) { + (UsageKind::Mixed, _) + | (UsageKind::Single(PathKind::Coinductive), PathKind::Coinductive) + | (UsageKind::Single(PathKind::Inductive), PathKind::Inductive) => { + debug!( + ?full_path, + ?path_from_head, + "cache entry not applicable due to matching paths" + ); + return false; + } + _ => debug!(?full_path, ?path_from_head, "paths don't match"), + } + } + } + + true + } + + /// Used when fuzzing the global cache. Accesses the global cache without + /// updating the state of the search graph. + fn lookup_global_cache_untracked( + &self, + cx: X, + input: X::Input, + available_depth: AvailableDepth, + ) -> Option { + cx.with_global_cache(self.mode, |cache| { + cache + .get(cx, input, available_depth, |nested_goals| { + Self::candidate_is_applicable( + cx, + &self.stack, + &self.provisional_cache, + nested_goals, + ) + }) + .map(|c| c.result) + }) } /// Try to fetch a previously computed result from the global cache, @@ -508,97 +847,206 @@ impl, X: Cx> SearchGraph { cx: X, input: X::Input, available_depth: AvailableDepth, - inspect: &mut D::ProofTreeBuilder, ) -> Option { cx.with_global_cache(self.mode, |cache| { - let CacheData { - result, - proof_tree, - additional_depth, - encountered_overflow, - nested_goals: _, // FIXME: consider nested goals here. - } = cache.get(cx, input, &self.stack, available_depth)?; - - // If we're building a proof tree and the current cache entry does not - // contain a proof tree, we do not use the entry but instead recompute - // the goal. We simply overwrite the existing entry once we're done, - // caching the proof tree. - if !inspect.try_apply_proof_tree(proof_tree) { - return None; - } + let CacheData { result, additional_depth, encountered_overflow, nested_goals } = cache + .get(cx, input, available_depth, |nested_goals| { + Self::candidate_is_applicable( + cx, + &self.stack, + &self.provisional_cache, + nested_goals, + ) + })?; // Update the reached depth of the current goal to make sure // its state is the same regardless of whether we've used the // global cache or not. let reached_depth = self.stack.next_index().plus(additional_depth); - self.update_parent_goal(reached_depth, encountered_overflow); + // We don't move cycle participants to the global cache, so the + // cycle heads are always empty. + let heads = Default::default(); + Self::update_parent_goal( + cx, + &mut self.stack, + reached_depth, + &heads, + encountered_overflow, + nested_goals, + ); - debug!("global cache hit"); + debug!(?additional_depth, "global cache hit"); Some(result) }) } -} -enum StepResult { - Done(StackEntry, X::Result), - HasChanged, -} + fn check_cycle_on_stack(&mut self, cx: X, input: X::Input) -> Option { + let (head, _stack_entry) = self.stack.iter_enumerated().find(|(_, e)| e.input == input)?; + debug!("encountered cycle with depth {head:?}"); + // We have a nested goal which directly relies on a goal deeper in the stack. + // + // We start by tagging all cycle participants, as that's necessary for caching. + // + // Finally we can return either the provisional response or the initial response + // in case we're in the first fixpoint iteration for this goal. + let path_kind = Self::stack_path_kind(cx, &self.stack, head); + let usage_kind = UsageKind::Single(path_kind); + self.stack[head].has_been_used = + Some(self.stack[head].has_been_used.map_or(usage_kind, |prev| prev.merge(usage_kind))); + + // Subtle: when encountering a cyclic goal, we still first checked for overflow, + // so we have to update the reached depth. + let next_index = self.stack.next_index(); + let last_index = self.stack.last_index().unwrap(); + let last = &mut self.stack[last_index]; + last.reached_depth = last.reached_depth.max(next_index); + + let path_from_entry = Self::step_kind(cx, last.input); + last.nested_goals.insert(input, UsageKind::Single(path_from_entry)); + last.nested_goals.insert(last.input, UsageKind::Single(PathKind::Coinductive)); + if last_index != head { + last.heads.insert(head); + } + + // Return the provisional result or, if we're in the first iteration, + // start with no constraints. + if let Some(result) = self.stack[head].provisional_result { + Some(result) + } else { + Some(D::initial_provisional_result(cx, path_kind, input)) + } + } + + /// Whether we've reached a fixpoint when evaluating a cycle head. + fn reached_fixpoint( + &mut self, + cx: X, + stack_entry: &StackEntry, + usage_kind: UsageKind, + result: X::Result, + ) -> bool { + if let Some(prev) = stack_entry.provisional_result { + prev == result + } else if let UsageKind::Single(kind) = usage_kind { + D::is_initial_provisional_result(cx, kind, stack_entry.input, result) + } else { + false + } + } -impl, X: Cx> SearchGraph { /// When we encounter a coinductive cycle, we have to fetch the /// result of that cycle while we are still computing it. Because /// of this we continuously recompute the cycle until the result /// of the previous iteration is equal to the final result, at which /// point we are done. - fn fixpoint_step_in_task( + fn evaluate_goal_in_task( &mut self, cx: X, input: X::Input, inspect: &mut D::ProofTreeBuilder, - prove_goal: &mut F, - ) -> StepResult - where - F: FnMut(&mut Self, &mut D::ProofTreeBuilder) -> X::Result, - { - let result = prove_goal(self, inspect); - let stack_entry = self.stack.pop().unwrap(); - debug_assert_eq!(stack_entry.input, input); - - // If the current goal is not the root of a cycle, we are done. - let Some(usage_kind) = stack_entry.has_been_used else { - return StepResult::Done(stack_entry, result); - }; + mut evaluate_goal: impl FnMut(&mut Self, &mut D::ProofTreeBuilder) -> X::Result, + ) -> (StackEntry, X::Result) { + let mut i = 0; + loop { + let result = evaluate_goal(self, inspect); + let stack_entry = self.stack.pop().unwrap(); + debug_assert_eq!(stack_entry.input, input); - // If it is a cycle head, we have to keep trying to prove it until - // we reach a fixpoint. We need to do so for all cycle heads, - // not only for the root. - // - // See tests/ui/traits/next-solver/cycles/fixpoint-rerun-all-cycle-heads.rs - // for an example. - - // Start by clearing all provisional cache entries which depend on this - // the current goal. - Self::clear_dependent_provisional_results( - &mut self.provisional_cache, - self.stack.next_index(), - ); + // If the current goal is not the root of a cycle, we are done. + // + // There are no provisional cache entries which depend on this goal. + let Some(usage_kind) = stack_entry.has_been_used else { + return (stack_entry, result); + }; - // Check whether we reached a fixpoint, either because the final result - // is equal to the provisional result of the previous iteration, or because - // this was only the root of either coinductive or inductive cycles, and the - // final result is equal to the initial response for that case. - // - // If we did not reach a fixpoint, update the provisional result and reevaluate. - if D::reached_fixpoint(cx, usage_kind, input, stack_entry.provisional_result, result) { - StepResult::Done(stack_entry, result) - } else { - let depth = self.stack.push(StackEntry { + // If it is a cycle head, we have to keep trying to prove it until + // we reach a fixpoint. We need to do so for all cycle heads, + // not only for the root. + // + // See tests/ui/traits/next-solver/cycles/fixpoint-rerun-all-cycle-heads.rs + // for an example. + // + // Check whether we reached a fixpoint, either because the final result + // is equal to the provisional result of the previous iteration, or because + // this was only the root of either coinductive or inductive cycles, and the + // final result is equal to the initial response for that case. + if self.reached_fixpoint(cx, &stack_entry, usage_kind, result) { + self.rebase_provisional_cache_entries(cx, &stack_entry, |_, result| result); + return (stack_entry, result); + } + + // If computing this goal results in ambiguity with no constraints, + // we do not rerun it. It's incredibly difficult to get a different + // response in the next iteration in this case. These changes would + // likely either be caused by incompleteness or can change the maybe + // cause from ambiguity to overflow. Returning ambiguity always + // preserves soundness and completeness even if the goal is be known + // to succeed or fail. + // + // This prevents exponential blowup affecting multiple major crates. + // As we only get to this branch if we haven't yet reached a fixpoint, + // we also taint all provisional cache entries which depend on the + // current goal. + if D::is_ambiguous_result(result) { + self.rebase_provisional_cache_entries(cx, &stack_entry, |input, _| { + D::propagate_ambiguity(cx, input, result) + }); + return (stack_entry, result); + }; + + // If we've reached the fixpoint step limit, we bail with overflow and taint all + // provisional cache entries which depend on the current goal. + i += 1; + if i >= D::FIXPOINT_STEP_LIMIT { + debug!("canonical cycle overflow"); + let result = D::on_fixpoint_overflow(cx, input); + self.rebase_provisional_cache_entries(cx, &stack_entry, |input, _| { + D::on_fixpoint_overflow(cx, input) + }); + return (stack_entry, result); + } + + // Clear all provisional cache entries which depend on a previous provisional + // result of this goal and rerun. + self.clear_dependent_provisional_results(); + + debug!(?result, "fixpoint changed provisional results"); + self.stack.push(StackEntry { has_been_used: None, provisional_result: Some(result), ..stack_entry }); - debug_assert_eq!(self.provisional_cache[&input].stack_depth, Some(depth)); - StepResult::HasChanged } } + + /// When encountering a cycle, both inductive and coinductive, we only + /// move the root into the global cache. We also store all other cycle + /// participants involved. + /// + /// We must not use the global cache entry of a root goal if a cycle + /// participant is on the stack. This is necessary to prevent unstable + /// results. See the comment of `StackEntry::nested_goals` for + /// more details. + fn insert_global_cache( + &mut self, + cx: X, + input: X::Input, + final_entry: StackEntry, + result: X::Result, + dep_node: X::DepNodeIndex, + ) { + let additional_depth = final_entry.reached_depth.as_usize() - self.stack.len(); + debug!(?final_entry, ?result, "insert global cache"); + cx.with_global_cache(self.mode, |cache| { + cache.insert( + cx, + input, + result, + dep_node, + additional_depth, + final_entry.encountered_overflow, + final_entry.nested_goals, + ) + }) + } } diff --git a/compiler/rustc_type_ir/src/search_graph/validate.rs b/compiler/rustc_type_ir/src/search_graph/validate.rs deleted file mode 100644 index 1ae806834ba7d..0000000000000 --- a/compiler/rustc_type_ir/src/search_graph/validate.rs +++ /dev/null @@ -1,75 +0,0 @@ -use super::*; - -impl, X: Cx> SearchGraph { - #[allow(rustc::potential_query_instability)] - pub(super) fn check_invariants(&self) { - if !cfg!(debug_assertions) { - return; - } - - let SearchGraph { mode: _, stack, provisional_cache, _marker } = self; - if stack.is_empty() { - assert!(provisional_cache.is_empty()); - } - - for (depth, entry) in stack.iter_enumerated() { - let StackEntry { - input, - available_depth: _, - reached_depth: _, - non_root_cycle_participant, - encountered_overflow: _, - has_been_used, - ref nested_goals, - provisional_result, - } = *entry; - let cache_entry = provisional_cache.get(&entry.input).unwrap(); - assert_eq!(cache_entry.stack_depth, Some(depth)); - if let Some(head) = non_root_cycle_participant { - assert!(head < depth); - assert!(nested_goals.is_empty()); - assert_ne!(stack[head].has_been_used, None); - - let mut current_root = head; - while let Some(parent) = stack[current_root].non_root_cycle_participant { - current_root = parent; - } - assert!(stack[current_root].nested_goals.contains(&input)); - } - - if !nested_goals.is_empty() { - assert!(provisional_result.is_some() || has_been_used.is_some()); - for entry in stack.iter().take(depth.as_usize()) { - assert_eq!(nested_goals.get(&entry.input), None); - } - } - } - - for (&input, entry) in &self.provisional_cache { - let ProvisionalCacheEntry { stack_depth, with_coinductive_stack, with_inductive_stack } = - entry; - assert!( - stack_depth.is_some() - || with_coinductive_stack.is_some() - || with_inductive_stack.is_some() - ); - - if let &Some(stack_depth) = stack_depth { - assert_eq!(stack[stack_depth].input, input); - } - - let check_detached = |detached_entry: &DetachedEntry| { - let DetachedEntry { head, result: _ } = *detached_entry; - assert_ne!(stack[head].has_been_used, None); - }; - - if let Some(with_coinductive_stack) = with_coinductive_stack { - check_detached(with_coinductive_stack); - } - - if let Some(with_inductive_stack) = with_inductive_stack { - check_detached(with_inductive_stack); - } - } - } -} diff --git a/compiler/rustc_type_ir/src/solve/inspect.rs b/compiler/rustc_type_ir/src/solve/inspect.rs index 47d5e0dace71f..099c66f6bdc81 100644 --- a/compiler/rustc_type_ir/src/solve/inspect.rs +++ b/compiler/rustc_type_ir/src/solve/inspect.rs @@ -69,9 +69,7 @@ pub struct CanonicalGoalEvaluation { #[derive_where(PartialEq, Eq, Hash, Debug; I: Interner)] pub enum CanonicalGoalEvaluationKind { Overflow, - CycleInStack, - ProvisionalCacheHit, - Evaluation { final_revision: I::CanonicalGoalEvaluationStepRef }, + Evaluation { final_revision: CanonicalGoalEvaluationStep }, } #[derive_where(PartialEq, Eq, Hash, Debug; I: Interner)] diff --git a/compiler/rustc_type_ir/src/solve/mod.rs b/compiler/rustc_type_ir/src/solve/mod.rs index 444fd01f01281..00fc6ba1c5c8f 100644 --- a/compiler/rustc_type_ir/src/solve/mod.rs +++ b/compiler/rustc_type_ir/src/solve/mod.rs @@ -340,11 +340,3 @@ impl MaybeCause { } } } - -#[derive_where(PartialEq, Eq, Debug; I: Interner)] -pub struct CacheData { - pub result: QueryResult, - pub proof_tree: Option, - pub additional_depth: usize, - pub encountered_overflow: bool, -} diff --git a/compiler/rustc_type_ir/src/ty_kind.rs b/compiler/rustc_type_ir/src/ty_kind.rs index 7e48f1b20a868..328b6739d9756 100644 --- a/compiler/rustc_type_ir/src/ty_kind.rs +++ b/compiler/rustc_type_ir/src/ty_kind.rs @@ -143,7 +143,12 @@ pub enum TyKind { /// fn foo() -> i32 { 1 } /// let bar: fn() -> i32 = foo; /// ``` - FnPtr(ty::Binder>), + /// + /// These two fields are equivalent to a `ty::Binder>`. But by + /// splitting that into two pieces, we get a more compact data layout that + /// reduces the size of `TyKind` by 8 bytes. It is a very hot type, so it's + /// worth the mild inconvenience. + FnPtr(ty::Binder>, FnHeader), /// A trait object. Written as `dyn for<'b> Trait<'b, Assoc = u32> + Send + 'a`. Dynamic(I::BoundExistentialPredicates, I::Region, DynKind), @@ -288,7 +293,7 @@ impl fmt::Debug for TyKind { RawPtr(ty, mutbl) => write!(f, "*{} {:?}", mutbl.ptr_str(), ty), Ref(r, t, m) => write!(f, "&{:?} {}{:?}", r, m.prefix_str(), t), FnDef(d, s) => f.debug_tuple("FnDef").field(d).field(&s).finish(), - FnPtr(s) => write!(f, "{s:?}"), + FnPtr(sig_tys, hdr) => write!(f, "{:?}", sig_tys.with(*hdr)), Dynamic(p, r, repr) => match repr { DynKind::Dyn => write!(f, "dyn {p:?} + {r:?}"), DynKind::DynStar => write!(f, "dyn* {p:?} + {r:?}"), @@ -868,16 +873,12 @@ pub struct FnSig { } impl FnSig { - pub fn split_inputs_and_output(self) -> (I::FnInputTys, I::Ty) { - self.inputs_and_output.split_inputs_and_output() - } - pub fn inputs(self) -> I::FnInputTys { - self.split_inputs_and_output().0 + self.inputs_and_output.inputs() } pub fn output(self) -> I::Ty { - self.split_inputs_and_output().1 + self.inputs_and_output.output() } pub fn is_fn_trait_compatible(self) -> bool { @@ -922,6 +923,13 @@ impl ty::Binder> { pub fn is_fn_trait_compatible(&self) -> bool { self.skip_binder().is_fn_trait_compatible() } + + // Used to split a single value into the two fields in `TyKind::FnPtr`. + pub fn split(self) -> (ty::Binder>, FnHeader) { + let hdr = + FnHeader { c_variadic: self.c_variadic(), safety: self.safety(), abi: self.abi() }; + (self.map_bound(|sig| FnSigTys { inputs_and_output: sig.inputs_and_output }), hdr) + } } impl fmt::Debug for FnSig { @@ -935,7 +943,7 @@ impl fmt::Debug for FnSig { } write!(f, "fn(")?; - let (inputs, output) = sig.split_inputs_and_output(); + let inputs = sig.inputs(); for (i, ty) in inputs.iter().enumerate() { if i > 0 { write!(f, ", ")?; @@ -951,9 +959,69 @@ impl fmt::Debug for FnSig { } write!(f, ")")?; + let output = sig.output(); match output.kind() { Tuple(list) if list.is_empty() => Ok(()), _ => write!(f, " -> {:?}", sig.output()), } } } + +// This is just a `FnSig` without the `FnHeader` fields. +#[derive_where(Clone, Copy, Debug, PartialEq, Eq, Hash; I: Interner)] +#[cfg_attr(feature = "nightly", derive(TyEncodable, TyDecodable, HashStable_NoContext))] +#[derive(TypeVisitable_Generic, TypeFoldable_Generic, Lift_Generic)] +pub struct FnSigTys { + pub inputs_and_output: I::Tys, +} + +impl FnSigTys { + pub fn inputs(self) -> I::FnInputTys { + self.inputs_and_output.inputs() + } + + pub fn output(self) -> I::Ty { + self.inputs_and_output.output() + } +} + +impl ty::Binder> { + // Used to combine the two fields in `TyKind::FnPtr` into a single value. + pub fn with(self, hdr: FnHeader) -> ty::Binder> { + self.map_bound(|sig_tys| FnSig { + inputs_and_output: sig_tys.inputs_and_output, + c_variadic: hdr.c_variadic, + safety: hdr.safety, + abi: hdr.abi, + }) + } + + #[inline] + pub fn inputs(self) -> ty::Binder { + self.map_bound(|sig_tys| sig_tys.inputs()) + } + + #[inline] + #[track_caller] + pub fn input(self, index: usize) -> ty::Binder { + self.map_bound(|sig_tys| sig_tys.inputs().get(index).unwrap()) + } + + pub fn inputs_and_output(self) -> ty::Binder { + self.map_bound(|sig_tys| sig_tys.inputs_and_output) + } + + #[inline] + pub fn output(self) -> ty::Binder { + self.map_bound(|sig_tys| sig_tys.output()) + } +} + +#[derive_where(Clone, Copy, Debug, PartialEq, Eq, Hash; I: Interner)] +#[cfg_attr(feature = "nightly", derive(TyEncodable, TyDecodable, HashStable_NoContext))] +#[derive(TypeVisitable_Generic, TypeFoldable_Generic, Lift_Generic)] +pub struct FnHeader { + pub c_variadic: bool, + pub safety: I::Safety, + pub abi: I::Abi, +} diff --git a/compiler/rustc_type_ir/src/ty_kind/closure.rs b/compiler/rustc_type_ir/src/ty_kind/closure.rs index 81717ce4a2266..9d907baeeb302 100644 --- a/compiler/rustc_type_ir/src/ty_kind/closure.rs +++ b/compiler/rustc_type_ir/src/ty_kind/closure.rs @@ -197,7 +197,7 @@ impl ClosureArgs { /// Extracts the signature from the closure. pub fn sig(self) -> ty::Binder> { match self.sig_as_fn_ptr_ty().kind() { - ty::FnPtr(sig) => sig, + ty::FnPtr(sig_tys, hdr) => sig_tys.with(hdr), ty => panic!("closure_sig_as_fn_ptr_ty is not a fn-ptr: {ty:?}"), } } @@ -292,21 +292,23 @@ impl CoroutineClosureArgs { pub fn coroutine_closure_sig(self) -> ty::Binder> { let interior = self.coroutine_witness_ty(); - let ty::FnPtr(sig) = self.signature_parts_ty().kind() else { panic!() }; - sig.map_bound(|sig| { - let [resume_ty, tupled_inputs_ty] = *sig.inputs().as_slice() else { + let ty::FnPtr(sig_tys, hdr) = self.signature_parts_ty().kind() else { panic!() }; + sig_tys.map_bound(|sig_tys| { + let [resume_ty, tupled_inputs_ty] = *sig_tys.inputs().as_slice() else { panic!(); }; - let [yield_ty, return_ty] = *sig.output().tuple_fields().as_slice() else { panic!() }; + let [yield_ty, return_ty] = *sig_tys.output().tuple_fields().as_slice() else { + panic!() + }; CoroutineClosureSignature { interior, tupled_inputs_ty, resume_ty, yield_ty, return_ty, - c_variadic: sig.c_variadic, - safety: sig.safety, - abi: sig.abi, + c_variadic: hdr.c_variadic, + safety: hdr.safety, + abi: hdr.abi, } }) } @@ -321,7 +323,7 @@ impl CoroutineClosureArgs { pub fn has_self_borrows(&self) -> bool { match self.coroutine_captures_by_ref_ty().kind() { - ty::FnPtr(sig) => sig + ty::FnPtr(sig_tys, _) => sig_tys .skip_binder() .visit_with(&mut HasRegionsBoundAt { binder: ty::INNERMOST }) .is_break(), @@ -460,11 +462,11 @@ impl CoroutineClosureSignature { ) -> I::Ty { match kind { ty::ClosureKind::Fn | ty::ClosureKind::FnMut => { - let ty::FnPtr(sig) = coroutine_captures_by_ref_ty.kind() else { + let ty::FnPtr(sig_tys, _) = coroutine_captures_by_ref_ty.kind() else { panic!(); }; let coroutine_captures_by_ref_ty = - sig.output().skip_binder().fold_with(&mut FoldEscapingRegions { + sig_tys.output().skip_binder().fold_with(&mut FoldEscapingRegions { interner: cx, region: env_region, debruijn: ty::INNERMOST, diff --git a/library/Cargo.lock b/library/Cargo.lock index b36399d880e5d..815f5bb1385bf 100644 --- a/library/Cargo.lock +++ b/library/Cargo.lock @@ -339,6 +339,7 @@ dependencies = [ "std_detect", "unwind", "wasi", + "windows-targets 0.0.0", ] [[package]] @@ -421,9 +422,13 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets", + "windows-targets 0.52.5", ] +[[package]] +name = "windows-targets" +version = "0.0.0" + [[package]] name = "windows-targets" version = "0.52.5" diff --git a/library/Cargo.toml b/library/Cargo.toml index c4513b4c127d8..d8ece6b0ebd3e 100644 --- a/library/Cargo.toml +++ b/library/Cargo.toml @@ -8,6 +8,7 @@ members = [ exclude = [ # stdarch has its own Cargo workspace "stdarch", + "windows_targets" ] [profile.release.package.compiler_builtins] diff --git a/library/alloc/src/fmt.rs b/library/alloc/src/fmt.rs index 4b9b90fc1f157..571fcd177aae7 100644 --- a/library/alloc/src/fmt.rs +++ b/library/alloc/src/fmt.rs @@ -581,7 +581,7 @@ pub use core::fmt::Alignment; #[stable(feature = "rust1", since = "1.0.0")] pub use core::fmt::Error; #[unstable(feature = "debug_closure_helpers", issue = "117729")] -pub use core::fmt::FormatterFn; +pub use core::fmt::{from_fn, FromFn}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::fmt::{write, Arguments}; #[stable(feature = "rust1", since = "1.0.0")] diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index 5b84df9ecef30..9c8fa7ceff4e5 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -1,7 +1,7 @@ #![unstable(feature = "raw_vec_internals", reason = "unstable const warnings", issue = "none")] -use core::alloc::LayoutError; -use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties}; +use core::marker::PhantomData; +use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties}; use core::ptr::{self, NonNull, Unique}; use core::{cmp, hint}; @@ -40,6 +40,13 @@ struct Cap(usize); impl Cap { const ZERO: Cap = unsafe { Cap(0) }; + + /// `Cap(cap)`, except if `T` is a ZST then `Cap::ZERO`. + /// + /// # Safety: cap must be <= `isize::MAX`. + unsafe fn new(cap: usize) -> Self { + if T::IS_ZST { Cap::ZERO } else { unsafe { Self(cap) } } + } } /// A low-level utility for more ergonomically allocating, reallocating, and deallocating @@ -66,7 +73,19 @@ impl Cap { /// `Box<[T]>`, since `capacity()` won't yield the length. #[allow(missing_debug_implementations)] pub(crate) struct RawVec { - ptr: Unique, + inner: RawVecInner, + _marker: PhantomData, +} + +/// Like a `RawVec`, but only generic over the allocator, not the type. +/// +/// As such, all the methods need the layout passed-in as a parameter. +/// +/// Having this separation reduces the amount of code we need to monomorphize, +/// as most operations don't need the actual type, just its layout. +#[allow(missing_debug_implementations)] +struct RawVecInner { + ptr: Unique, /// Never used for ZSTs; it's `capacity()`'s responsibility to return usize::MAX in that case. /// /// # Safety @@ -90,8 +109,9 @@ impl RawVec { /// `RawVec` with capacity `usize::MAX`. Useful for implementing /// delayed allocation. #[must_use] + #[rustc_const_stable(feature = "raw_vec_internals_const", since = "1.81")] pub const fn new() -> Self { - Self::new_in(Global) + Self { inner: RawVecInner::new::(), _marker: PhantomData } } /// Creates a `RawVec` (on the system heap) with exactly the @@ -113,10 +133,7 @@ impl RawVec { #[must_use] #[inline] pub fn with_capacity(capacity: usize) -> Self { - match Self::try_allocate_in(capacity, AllocInit::Uninitialized, Global) { - Ok(res) => res, - Err(err) => handle_error(err), - } + Self { inner: RawVecInner::with_capacity(capacity, T::LAYOUT), _marker: PhantomData } } /// Like `with_capacity`, but guarantees the buffer is zeroed. @@ -124,29 +141,56 @@ impl RawVec { #[must_use] #[inline] pub fn with_capacity_zeroed(capacity: usize) -> Self { - Self::with_capacity_zeroed_in(capacity, Global) + Self { + inner: RawVecInner::with_capacity_zeroed_in(capacity, Global, T::LAYOUT), + _marker: PhantomData, + } } } -impl RawVec { - // Tiny Vecs are dumb. Skip to: - // - 8 if the element size is 1, because any heap allocators is likely - // to round up a request of less than 8 bytes to at least 8 bytes. - // - 4 if elements are moderate-sized (<= 1 KiB). - // - 1 otherwise, to avoid wasting too much space for very short Vecs. - pub(crate) const MIN_NON_ZERO_CAP: usize = if mem::size_of::() == 1 { +impl RawVecInner { + #[must_use] + #[rustc_const_stable(feature = "raw_vec_internals_const", since = "1.81")] + const fn new() -> Self { + Self::new_in(Global, core::mem::align_of::()) + } + + #[cfg(not(any(no_global_oom_handling, test)))] + #[must_use] + #[inline] + fn with_capacity(capacity: usize, elem_layout: Layout) -> Self { + match Self::try_allocate_in(capacity, AllocInit::Uninitialized, Global, elem_layout) { + Ok(res) => res, + Err(err) => handle_error(err), + } + } +} + +// Tiny Vecs are dumb. Skip to: +// - 8 if the element size is 1, because any heap allocators is likely +// to round up a request of less than 8 bytes to at least 8 bytes. +// - 4 if elements are moderate-sized (<= 1 KiB). +// - 1 otherwise, to avoid wasting too much space for very short Vecs. +const fn min_non_zero_cap(size: usize) -> usize { + if size == 1 { 8 - } else if mem::size_of::() <= 1024 { + } else if size <= 1024 { 4 } else { 1 - }; + } +} + +impl RawVec { + #[cfg(not(no_global_oom_handling))] + pub(crate) const MIN_NON_ZERO_CAP: usize = min_non_zero_cap(size_of::()); /// Like `new`, but parameterized over the choice of allocator for /// the returned `RawVec`. + #[inline] + #[rustc_const_stable(feature = "raw_vec_internals_const", since = "1.81")] pub const fn new_in(alloc: A) -> Self { - // `cap: 0` means "unallocated". zero-sized types are ignored. - Self { ptr: Unique::dangling(), cap: Cap::ZERO, alloc } + Self { inner: RawVecInner::new_in(alloc, align_of::()), _marker: PhantomData } } /// Like `with_capacity`, but parameterized over the choice of @@ -154,9 +198,9 @@ impl RawVec { #[cfg(not(no_global_oom_handling))] #[inline] pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { - match Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc) { - Ok(res) => res, - Err(err) => handle_error(err), + Self { + inner: RawVecInner::with_capacity_in(capacity, alloc, T::LAYOUT), + _marker: PhantomData, } } @@ -164,7 +208,10 @@ impl RawVec { /// allocator for the returned `RawVec`. #[inline] pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result { - Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc) + match RawVecInner::try_with_capacity_in(capacity, alloc, T::LAYOUT) { + Ok(inner) => Ok(Self { inner, _marker: PhantomData }), + Err(e) => Err(e), + } } /// Like `with_capacity_zeroed`, but parameterized over the choice @@ -172,9 +219,9 @@ impl RawVec { #[cfg(not(no_global_oom_handling))] #[inline] pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self { - match Self::try_allocate_in(capacity, AllocInit::Zeroed, alloc) { - Ok(res) => res, - Err(err) => handle_error(err), + Self { + inner: RawVecInner::with_capacity_zeroed_in(capacity, alloc, T::LAYOUT), + _marker: PhantomData, } } @@ -200,45 +247,7 @@ impl RawVec { let me = ManuallyDrop::new(self); unsafe { let slice = ptr::slice_from_raw_parts_mut(me.ptr() as *mut MaybeUninit, len); - Box::from_raw_in(slice, ptr::read(&me.alloc)) - } - } - - fn try_allocate_in( - capacity: usize, - init: AllocInit, - alloc: A, - ) -> Result { - // Don't allocate here because `Drop` will not deallocate when `capacity` is 0. - - if T::IS_ZST || capacity == 0 { - Ok(Self::new_in(alloc)) - } else { - // We avoid `unwrap_or_else` here because it bloats the amount of - // LLVM IR generated. - let layout = match Layout::array::(capacity) { - Ok(layout) => layout, - Err(_) => return Err(CapacityOverflow.into()), - }; - - if let Err(err) = alloc_guard(layout.size()) { - return Err(err); - } - - let result = match init { - AllocInit::Uninitialized => alloc.allocate(layout), - #[cfg(not(no_global_oom_handling))] - AllocInit::Zeroed => alloc.allocate_zeroed(layout), - }; - let ptr = match result { - Ok(ptr) => ptr, - Err(_) => return Err(AllocError { layout, non_exhaustive: () }.into()), - }; - - // Allocators currently return a `NonNull<[u8]>` whose length - // matches the size requested. If that ever changes, the capacity - // here should change to `ptr.len() / mem::size_of::()`. - Ok(Self { ptr: Unique::from(ptr.cast()), cap: unsafe { Cap(capacity) }, alloc }) + Box::from_raw_in(slice, ptr::read(&me.inner.alloc)) } } @@ -254,8 +263,15 @@ impl RawVec { /// guaranteed. #[inline] pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, alloc: A) -> Self { - let cap = if T::IS_ZST { Cap::ZERO } else { unsafe { Cap(capacity) } }; - Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap, alloc } + // SAFETY: Precondition passed to the caller + unsafe { + let ptr = ptr.cast(); + let capacity = Cap::new::(capacity); + Self { + inner: RawVecInner::from_raw_parts_in(ptr, capacity, alloc), + _marker: PhantomData, + } + } } /// A convenience method for hoisting the non-null precondition out of [`RawVec::from_raw_parts_in`]. @@ -264,9 +280,13 @@ impl RawVec { /// /// See [`RawVec::from_raw_parts_in`]. #[inline] - pub(crate) unsafe fn from_nonnull_in(ptr: NonNull, capacity: usize, alloc: A) -> Self { - let cap = if T::IS_ZST { Cap::ZERO } else { unsafe { Cap(capacity) } }; - Self { ptr: Unique::from(ptr), cap, alloc } + pub unsafe fn from_nonnull_in(ptr: NonNull, capacity: usize, alloc: A) -> Self { + // SAFETY: Precondition passed to the caller + unsafe { + let ptr = ptr.cast(); + let capacity = Cap::new::(capacity); + Self { inner: RawVecInner::from_nonnull_in(ptr, capacity, alloc), _marker: PhantomData } + } } /// Gets a raw pointer to the start of the allocation. Note that this is @@ -274,43 +294,26 @@ impl RawVec { /// be careful. #[inline] pub fn ptr(&self) -> *mut T { - self.ptr.as_ptr() + self.inner.ptr() } #[inline] pub fn non_null(&self) -> NonNull { - NonNull::from(self.ptr) + self.inner.non_null() } /// Gets the capacity of the allocation. /// /// This will always be `usize::MAX` if `T` is zero-sized. - #[inline(always)] + #[inline] pub fn capacity(&self) -> usize { - if T::IS_ZST { usize::MAX } else { self.cap.0 } + self.inner.capacity(size_of::()) } /// Returns a shared reference to the allocator backing this `RawVec`. + #[inline] pub fn allocator(&self) -> &A { - &self.alloc - } - - fn current_memory(&self) -> Option<(NonNull, Layout)> { - if T::IS_ZST || self.cap.0 == 0 { - None - } else { - // We could use Layout::array here which ensures the absence of isize and usize overflows - // and could hypothetically handle differences between stride and size, but this memory - // has already been allocated so we know it can't overflow and currently Rust does not - // support such types. So we can do better by skipping some checks and avoid an unwrap. - const { assert!(mem::size_of::() % mem::align_of::() == 0) }; - unsafe { - let align = mem::align_of::(); - let size = mem::size_of::().unchecked_mul(self.cap.0); - let layout = Layout::from_size_align_unchecked(size, align); - Some((self.ptr.cast().into(), layout)) - } - } + self.inner.allocator() } /// Ensures that the buffer contains at least enough space to hold `len + @@ -335,24 +338,7 @@ impl RawVec { #[cfg(not(no_global_oom_handling))] #[inline] pub fn reserve(&mut self, len: usize, additional: usize) { - // Callers expect this function to be very cheap when there is already sufficient capacity. - // Therefore, we move all the resizing and error-handling logic from grow_amortized and - // handle_reserve behind a call, while making sure that this function is likely to be - // inlined as just a comparison and a call if the comparison fails. - #[cold] - fn do_reserve_and_handle( - slf: &mut RawVec, - len: usize, - additional: usize, - ) { - if let Err(err) = slf.grow_amortized(len, additional) { - handle_error(err); - } - } - - if self.needs_to_grow(len, additional) { - do_reserve_and_handle(self, len, additional); - } + self.inner.reserve(len, additional, T::LAYOUT) } /// A specialized version of `self.reserve(len, 1)` which requires the @@ -360,21 +346,12 @@ impl RawVec { #[cfg(not(no_global_oom_handling))] #[inline(never)] pub fn grow_one(&mut self) { - if let Err(err) = self.grow_amortized(self.cap.0, 1) { - handle_error(err); - } + self.inner.grow_one(T::LAYOUT) } /// The same as `reserve`, but returns on errors instead of panicking or aborting. pub fn try_reserve(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> { - if self.needs_to_grow(len, additional) { - self.grow_amortized(len, additional)?; - } - unsafe { - // Inform the optimizer that the reservation has succeeded or wasn't needed - hint::assert_unchecked(!self.needs_to_grow(len, additional)); - } - Ok(()) + self.inner.try_reserve(len, additional, T::LAYOUT) } /// Ensures that the buffer contains at least enough space to hold `len + @@ -396,9 +373,7 @@ impl RawVec { /// Aborts on OOM. #[cfg(not(no_global_oom_handling))] pub fn reserve_exact(&mut self, len: usize, additional: usize) { - if let Err(err) = self.try_reserve_exact(len, additional) { - handle_error(err); - } + self.inner.reserve_exact(len, additional, T::LAYOUT) } /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting. @@ -407,14 +382,7 @@ impl RawVec { len: usize, additional: usize, ) -> Result<(), TryReserveError> { - if self.needs_to_grow(len, additional) { - self.grow_exact(len, additional)?; - } - unsafe { - // Inform the optimizer that the reservation has succeeded or wasn't needed - hint::assert_unchecked(!self.needs_to_grow(len, additional)); - } - Ok(()) + self.inner.try_reserve_exact(len, additional, T::LAYOUT) } /// Shrinks the buffer down to the specified capacity. If the given amount @@ -430,22 +398,230 @@ impl RawVec { #[cfg(not(no_global_oom_handling))] #[inline] pub fn shrink_to_fit(&mut self, cap: usize) { - if let Err(err) = self.shrink(cap) { + self.inner.shrink_to_fit(cap, T::LAYOUT) + } +} + +unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec { + /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. + fn drop(&mut self) { + // SAFETY: We are in a Drop impl, self.inner will not be used again. + unsafe { self.inner.deallocate(T::LAYOUT) } + } +} + +impl RawVecInner { + #[inline] + #[rustc_const_stable(feature = "raw_vec_internals_const", since = "1.81")] + const fn new_in(alloc: A, align: usize) -> Self { + let ptr = unsafe { core::mem::transmute(align) }; + // `cap: 0` means "unallocated". zero-sized types are ignored. + Self { ptr, cap: Cap::ZERO, alloc } + } + + #[cfg(not(no_global_oom_handling))] + #[inline] + fn with_capacity_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self { + match Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout) { + Ok(this) => { + unsafe { + // Make it more obvious that a subsquent Vec::reserve(capacity) will not allocate. + hint::assert_unchecked(!this.needs_to_grow(0, capacity, elem_layout)); + } + this + } + Err(err) => handle_error(err), + } + } + + #[inline] + fn try_with_capacity_in( + capacity: usize, + alloc: A, + elem_layout: Layout, + ) -> Result { + Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout) + } + + #[cfg(not(no_global_oom_handling))] + #[inline] + fn with_capacity_zeroed_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self { + match Self::try_allocate_in(capacity, AllocInit::Zeroed, alloc, elem_layout) { + Ok(res) => res, + Err(err) => handle_error(err), + } + } + + fn try_allocate_in( + capacity: usize, + init: AllocInit, + alloc: A, + elem_layout: Layout, + ) -> Result { + // We avoid `unwrap_or_else` here because it bloats the amount of + // LLVM IR generated. + let layout = match layout_array(capacity, elem_layout) { + Ok(layout) => layout, + Err(_) => return Err(CapacityOverflow.into()), + }; + + // Don't allocate here because `Drop` will not deallocate when `capacity` is 0. + if layout.size() == 0 { + return Ok(Self::new_in(alloc, elem_layout.align())); + } + + if let Err(err) = alloc_guard(layout.size()) { + return Err(err); + } + + let result = match init { + AllocInit::Uninitialized => alloc.allocate(layout), + #[cfg(not(no_global_oom_handling))] + AllocInit::Zeroed => alloc.allocate_zeroed(layout), + }; + let ptr = match result { + Ok(ptr) => ptr, + Err(_) => return Err(AllocError { layout, non_exhaustive: () }.into()), + }; + + // Allocators currently return a `NonNull<[u8]>` whose length + // matches the size requested. If that ever changes, the capacity + // here should change to `ptr.len() / mem::size_of::()`. + Ok(Self { ptr: Unique::from(ptr.cast()), cap: unsafe { Cap(capacity) }, alloc }) + } + + #[inline] + unsafe fn from_raw_parts_in(ptr: *mut u8, cap: Cap, alloc: A) -> Self { + Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap, alloc } + } + + #[inline] + unsafe fn from_nonnull_in(ptr: NonNull, cap: Cap, alloc: A) -> Self { + Self { ptr: Unique::from(ptr), cap, alloc } + } + + #[inline] + fn ptr(&self) -> *mut T { + self.non_null::().as_ptr() + } + + #[inline] + fn non_null(&self) -> NonNull { + self.ptr.cast().into() + } + + #[inline] + fn capacity(&self, elem_size: usize) -> usize { + if elem_size == 0 { usize::MAX } else { self.cap.0 } + } + + #[inline] + fn allocator(&self) -> &A { + &self.alloc + } + + #[inline] + fn current_memory(&self, elem_layout: Layout) -> Option<(NonNull, Layout)> { + if elem_layout.size() == 0 || self.cap.0 == 0 { + None + } else { + // We could use Layout::array here which ensures the absence of isize and usize overflows + // and could hypothetically handle differences between stride and size, but this memory + // has already been allocated so we know it can't overflow and currently Rust does not + // support such types. So we can do better by skipping some checks and avoid an unwrap. + unsafe { + let alloc_size = elem_layout.size().unchecked_mul(self.cap.0); + let layout = Layout::from_size_align_unchecked(alloc_size, elem_layout.align()); + Some((self.ptr.into(), layout)) + } + } + } + + #[cfg(not(no_global_oom_handling))] + #[inline] + fn reserve(&mut self, len: usize, additional: usize, elem_layout: Layout) { + // Callers expect this function to be very cheap when there is already sufficient capacity. + // Therefore, we move all the resizing and error-handling logic from grow_amortized and + // handle_reserve behind a call, while making sure that this function is likely to be + // inlined as just a comparison and a call if the comparison fails. + #[cold] + fn do_reserve_and_handle( + slf: &mut RawVecInner, + len: usize, + additional: usize, + elem_layout: Layout, + ) { + if let Err(err) = slf.grow_amortized(len, additional, elem_layout) { + handle_error(err); + } + } + + if self.needs_to_grow(len, additional, elem_layout) { + do_reserve_and_handle(self, len, additional, elem_layout); + } + } + + #[cfg(not(no_global_oom_handling))] + #[inline] + fn grow_one(&mut self, elem_layout: Layout) { + if let Err(err) = self.grow_amortized(self.cap.0, 1, elem_layout) { handle_error(err); } } -} -impl RawVec { - /// Returns if the buffer needs to grow to fulfill the needed extra capacity. - /// Mainly used to make inlining reserve-calls possible without inlining `grow`. - fn needs_to_grow(&self, len: usize, additional: usize) -> bool { - additional > self.capacity().wrapping_sub(len) + fn try_reserve( + &mut self, + len: usize, + additional: usize, + elem_layout: Layout, + ) -> Result<(), TryReserveError> { + if self.needs_to_grow(len, additional, elem_layout) { + self.grow_amortized(len, additional, elem_layout)?; + } + unsafe { + // Inform the optimizer that the reservation has succeeded or wasn't needed + hint::assert_unchecked(!self.needs_to_grow(len, additional, elem_layout)); + } + Ok(()) } - /// # Safety: - /// - /// `cap` must not exceed `isize::MAX`. + #[cfg(not(no_global_oom_handling))] + fn reserve_exact(&mut self, len: usize, additional: usize, elem_layout: Layout) { + if let Err(err) = self.try_reserve_exact(len, additional, elem_layout) { + handle_error(err); + } + } + + fn try_reserve_exact( + &mut self, + len: usize, + additional: usize, + elem_layout: Layout, + ) -> Result<(), TryReserveError> { + if self.needs_to_grow(len, additional, elem_layout) { + self.grow_exact(len, additional, elem_layout)?; + } + unsafe { + // Inform the optimizer that the reservation has succeeded or wasn't needed + hint::assert_unchecked(!self.needs_to_grow(len, additional, elem_layout)); + } + Ok(()) + } + + #[cfg(not(no_global_oom_handling))] + #[inline] + fn shrink_to_fit(&mut self, cap: usize, elem_layout: Layout) { + if let Err(err) = self.shrink(cap, elem_layout) { + handle_error(err); + } + } + + #[inline] + fn needs_to_grow(&self, len: usize, additional: usize, elem_layout: Layout) -> bool { + additional > self.capacity(elem_layout.size()).wrapping_sub(len) + } + + #[inline] unsafe fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) { // Allocators currently return a `NonNull<[u8]>` whose length matches // the size requested. If that ever changes, the capacity here should @@ -454,18 +630,16 @@ impl RawVec { self.cap = unsafe { Cap(cap) }; } - // This method is usually instantiated many times. So we want it to be as - // small as possible, to improve compile times. But we also want as much of - // its contents to be statically computable as possible, to make the - // generated code run faster. Therefore, this method is carefully written - // so that all of the code that depends on `T` is within it, while as much - // of the code that doesn't depend on `T` as possible is in functions that - // are non-generic over `T`. - fn grow_amortized(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> { + fn grow_amortized( + &mut self, + len: usize, + additional: usize, + elem_layout: Layout, + ) -> Result<(), TryReserveError> { // This is ensured by the calling contexts. debug_assert!(additional > 0); - if T::IS_ZST { + if elem_layout.size() == 0 { // Since we return a capacity of `usize::MAX` when `elem_size` is // 0, getting to here necessarily means the `RawVec` is overfull. return Err(CapacityOverflow.into()); @@ -477,33 +651,34 @@ impl RawVec { // This guarantees exponential growth. The doubling cannot overflow // because `cap <= isize::MAX` and the type of `cap` is `usize`. let cap = cmp::max(self.cap.0 * 2, required_cap); - let cap = cmp::max(Self::MIN_NON_ZERO_CAP, cap); + let cap = cmp::max(min_non_zero_cap(elem_layout.size()), cap); - let new_layout = Layout::array::(cap); + let new_layout = layout_array(cap, elem_layout)?; - // `finish_grow` is non-generic over `T`. - let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?; + let ptr = finish_grow(new_layout, self.current_memory(elem_layout), &mut self.alloc)?; // SAFETY: finish_grow would have resulted in a capacity overflow if we tried to allocate more than `isize::MAX` items + unsafe { self.set_ptr_and_cap(ptr, cap) }; Ok(()) } - // The constraints on this method are much the same as those on - // `grow_amortized`, but this method is usually instantiated less often so - // it's less critical. - fn grow_exact(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> { - if T::IS_ZST { + fn grow_exact( + &mut self, + len: usize, + additional: usize, + elem_layout: Layout, + ) -> Result<(), TryReserveError> { + if elem_layout.size() == 0 { // Since we return a capacity of `usize::MAX` when the type size is // 0, getting to here necessarily means the `RawVec` is overfull. return Err(CapacityOverflow.into()); } let cap = len.checked_add(additional).ok_or(CapacityOverflow)?; - let new_layout = Layout::array::(cap); + let new_layout = layout_array(cap, elem_layout)?; - // `finish_grow` is non-generic over `T`. - let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?; - // SAFETY: `finish_grow` would have resulted in a capacity overflow if we tried to allocate more than `isize::MAX` items + let ptr = finish_grow(new_layout, self.current_memory(elem_layout), &mut self.alloc)?; + // SAFETY: finish_grow would have resulted in a capacity overflow if we tried to allocate more than `isize::MAX` items unsafe { self.set_ptr_and_cap(ptr, cap); } @@ -512,10 +687,10 @@ impl RawVec { #[cfg(not(no_global_oom_handling))] #[inline] - fn shrink(&mut self, cap: usize) -> Result<(), TryReserveError> { - assert!(cap <= self.capacity(), "Tried to shrink to a larger capacity"); + fn shrink(&mut self, cap: usize, elem_layout: Layout) -> Result<(), TryReserveError> { + assert!(cap <= self.capacity(elem_layout.size()), "Tried to shrink to a larger capacity"); // SAFETY: Just checked this isn't trying to grow - unsafe { self.shrink_unchecked(cap) } + unsafe { self.shrink_unchecked(cap, elem_layout) } } /// `shrink`, but without the capacity check. @@ -529,23 +704,27 @@ impl RawVec { /// # Safety /// `cap <= self.capacity()` #[cfg(not(no_global_oom_handling))] - unsafe fn shrink_unchecked(&mut self, cap: usize) -> Result<(), TryReserveError> { - let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) }; - // See current_memory() why this assert is here - const { assert!(mem::size_of::() % mem::align_of::() == 0) }; + unsafe fn shrink_unchecked( + &mut self, + cap: usize, + elem_layout: Layout, + ) -> Result<(), TryReserveError> { + let (ptr, layout) = + if let Some(mem) = self.current_memory(elem_layout) { mem } else { return Ok(()) }; // If shrinking to 0, deallocate the buffer. We don't reach this point // for the T::IS_ZST case since current_memory() will have returned // None. if cap == 0 { unsafe { self.alloc.deallocate(ptr, layout) }; - self.ptr = Unique::dangling(); + self.ptr = + unsafe { Unique::new_unchecked(ptr::without_provenance_mut(elem_layout.align())) }; self.cap = Cap::ZERO; } else { let ptr = unsafe { - // `Layout::array` cannot overflow here because it would have + // Layout cannot overflow here because it would have // overflowed earlier when capacity was larger. - let new_size = mem::size_of::().unchecked_mul(cap); + let new_size = elem_layout.size().unchecked_mul(cap); let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); self.alloc .shrink(ptr, layout, new_layout) @@ -558,24 +737,32 @@ impl RawVec { } Ok(()) } + + /// # Safety + /// + /// This function deallocates the owned allocation, but does not update `ptr` or `cap` to + /// prevent double-free or use-after-free. Essentially, do not do anything with the caller + /// after this function returns. + /// Ideally this function would take `self` by move, but it cannot because it exists to be + /// called from a `Drop` impl. + unsafe fn deallocate(&mut self, elem_layout: Layout) { + if let Some((ptr, layout)) = self.current_memory(elem_layout) { + unsafe { + self.alloc.deallocate(ptr, layout); + } + } + } } -// This function is outside `RawVec` to minimize compile times. See the comment -// above `RawVec::grow_amortized` for details. (The `A` parameter isn't -// significant, because the number of different `A` types seen in practice is -// much smaller than the number of `T` types.) #[inline(never)] fn finish_grow( - new_layout: Result, + new_layout: Layout, current_memory: Option<(NonNull, Layout)>, alloc: &mut A, ) -> Result, TryReserveError> where A: Allocator, { - // Check for the error here to minimize the size of `RawVec::grow_*`. - let new_layout = new_layout.map_err(|_| CapacityOverflow)?; - alloc_guard(new_layout.size())?; let memory = if let Some((ptr, old_layout)) = current_memory { @@ -592,15 +779,6 @@ where memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () }.into()) } -unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec { - /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. - fn drop(&mut self) { - if let Some((ptr, layout)) = self.current_memory() { - unsafe { self.alloc.deallocate(ptr, layout) } - } - } -} - // Central function for reserve error handling. #[cfg(not(no_global_oom_handling))] #[cold] @@ -627,3 +805,8 @@ fn alloc_guard(alloc_size: usize) -> Result<(), TryReserveError> { Ok(()) } } + +#[inline] +fn layout_array(cap: usize, elem_layout: Layout) -> Result { + elem_layout.repeat(cap).map(|(layout, _pad)| layout).map_err(|_| CapacityOverflow.into()) +} diff --git a/library/alloc/src/raw_vec/tests.rs b/library/alloc/src/raw_vec/tests.rs index 48c6e5f46f8db..d78ded104fb09 100644 --- a/library/alloc/src/raw_vec/tests.rs +++ b/library/alloc/src/raw_vec/tests.rs @@ -43,9 +43,9 @@ fn allocator_param() { let a = BoundedAlloc { fuel: Cell::new(500) }; let mut v: RawVec = RawVec::with_capacity_in(50, a); - assert_eq!(v.alloc.fuel.get(), 450); + assert_eq!(v.inner.alloc.fuel.get(), 450); v.reserve(50, 150); // (causes a realloc, thus using 50 + 150 = 200 units of fuel) - assert_eq!(v.alloc.fuel.get(), 250); + assert_eq!(v.inner.alloc.fuel.get(), 250); } #[test] @@ -86,7 +86,7 @@ struct ZST; fn zst_sanity(v: &RawVec) { assert_eq!(v.capacity(), usize::MAX); assert_eq!(v.ptr(), core::ptr::Unique::::dangling().as_ptr()); - assert_eq!(v.current_memory(), None); + assert_eq!(v.inner.current_memory(T::LAYOUT), None); } #[test] @@ -106,22 +106,11 @@ fn zst() { let v: RawVec = RawVec::with_capacity_in(100, Global); zst_sanity(&v); - let v: RawVec = RawVec::try_allocate_in(0, AllocInit::Uninitialized, Global).unwrap(); - zst_sanity(&v); - - let v: RawVec = RawVec::try_allocate_in(100, AllocInit::Uninitialized, Global).unwrap(); - zst_sanity(&v); - - let mut v: RawVec = - RawVec::try_allocate_in(usize::MAX, AllocInit::Uninitialized, Global).unwrap(); + let mut v: RawVec = RawVec::with_capacity_in(usize::MAX, Global); zst_sanity(&v); // Check all these operations work as expected with zero-sized elements. - assert!(!v.needs_to_grow(100, usize::MAX - 100)); - assert!(v.needs_to_grow(101, usize::MAX - 100)); - zst_sanity(&v); - v.reserve(100, usize::MAX - 100); //v.reserve(101, usize::MAX - 100); // panics, in `zst_reserve_panic` below zst_sanity(&v); @@ -138,12 +127,12 @@ fn zst() { assert_eq!(v.try_reserve_exact(101, usize::MAX - 100), cap_err); zst_sanity(&v); - assert_eq!(v.grow_amortized(100, usize::MAX - 100), cap_err); - assert_eq!(v.grow_amortized(101, usize::MAX - 100), cap_err); + assert_eq!(v.inner.grow_amortized(100, usize::MAX - 100, ZST::LAYOUT), cap_err); + assert_eq!(v.inner.grow_amortized(101, usize::MAX - 100, ZST::LAYOUT), cap_err); zst_sanity(&v); - assert_eq!(v.grow_exact(100, usize::MAX - 100), cap_err); - assert_eq!(v.grow_exact(101, usize::MAX - 100), cap_err); + assert_eq!(v.inner.grow_exact(100, usize::MAX - 100, ZST::LAYOUT), cap_err); + assert_eq!(v.inner.grow_exact(101, usize::MAX - 100, ZST::LAYOUT), cap_err); zst_sanity(&v); } diff --git a/library/alloc/src/string.rs b/library/alloc/src/string.rs index 124230812df56..e628be1546f76 100644 --- a/library/alloc/src/string.rs +++ b/library/alloc/src/string.rs @@ -2643,14 +2643,54 @@ impl ToString for i8 { } } -#[doc(hidden)] +// Generic/generated code can sometimes have multiple, nested references +// for strings, including `&&&str`s that would never be written +// by hand. This macro generates twelve layers of nested `&`-impl +// for primitive strings. #[cfg(not(no_global_oom_handling))] -#[stable(feature = "str_to_string_specialization", since = "1.9.0")] -impl ToString for str { - #[inline] - fn to_string(&self) -> String { - String::from(self) - } +macro_rules! to_string_str_wrap_in_ref { + {x $($x:ident)*} => { + &to_string_str_wrap_in_ref! { $($x)* } + }; + {} => { str }; +} +#[cfg(not(no_global_oom_handling))] +macro_rules! to_string_expr_wrap_in_deref { + {$self:expr ; x $($x:ident)*} => { + *(to_string_expr_wrap_in_deref! { $self ; $($x)* }) + }; + {$self:expr ;} => { $self }; +} +#[cfg(not(no_global_oom_handling))] +macro_rules! to_string_str { + {$($($x:ident)*),+} => { + $( + #[doc(hidden)] + #[stable(feature = "str_to_string_specialization", since = "1.9.0")] + impl ToString for to_string_str_wrap_in_ref!($($x)*) { + #[inline] + fn to_string(&self) -> String { + String::from(to_string_expr_wrap_in_deref!(self ; $($x)*)) + } + } + )+ + }; +} + +#[cfg(not(no_global_oom_handling))] +to_string_str! { + x x x x x x x x x x x x, + x x x x x x x x x x x, + x x x x x x x x x x, + x x x x x x x x x, + x x x x x x x x, + x x x x x x x, + x x x x x x, + x x x x x, + x x x x, + x x x, + x x, + x, } #[doc(hidden)] diff --git a/library/alloc/tests/task.rs b/library/alloc/tests/task.rs index 034039a1eae9d..390dec14484ba 100644 --- a/library/alloc/tests/task.rs +++ b/library/alloc/tests/task.rs @@ -4,7 +4,7 @@ use alloc::task::{LocalWake, Wake}; use core::task::{LocalWaker, Waker}; #[test] -#[cfg_attr(miri, should_panic)] // `will_wake` doesn't guarantee that this test will work, and indeed on Miri it fails +#[cfg_attr(miri, ignore)] // `will_wake` doesn't guarantee that this test will work, and indeed on Miri it can fail fn test_waker_will_wake_clone() { struct NoopWaker; @@ -20,7 +20,7 @@ fn test_waker_will_wake_clone() { } #[test] -#[cfg_attr(miri, should_panic)] // `will_wake` doesn't guarantee that this test will work, and indeed on Miri it fails +#[cfg_attr(miri, ignore)] // `will_wake` doesn't guarantee that this test will work, and indeed on Miri it can fail fn test_local_waker_will_wake_clone() { struct NoopWaker; diff --git a/library/core/src/array/mod.rs b/library/core/src/array/mod.rs index 5c826b9993f86..61c713c9e81cf 100644 --- a/library/core/src/array/mod.rs +++ b/library/core/src/array/mod.rs @@ -889,6 +889,7 @@ impl Guard<'_, T> { } impl Drop for Guard<'_, T> { + #[inline] fn drop(&mut self) { debug_assert!(self.initialized <= self.array_mut.len()); diff --git a/library/core/src/ascii/ascii_char.rs b/library/core/src/ascii/ascii_char.rs index 34a05ac38884d..375358dddf5c1 100644 --- a/library/core/src/ascii/ascii_char.rs +++ b/library/core/src/ascii/ascii_char.rs @@ -3,7 +3,7 @@ //! suggestions from rustc if you get anything slightly wrong in here, and overall //! helps with clarity as we're also referring to `char` intentionally in here. -use crate::fmt::{self, Write}; +use crate::fmt; use crate::mem::transmute; /// One of the 128 Unicode characters from U+0000 through U+007F, @@ -583,9 +583,10 @@ impl fmt::Display for AsciiChar { #[unstable(feature = "ascii_char", issue = "110998")] impl fmt::Debug for AsciiChar { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - #[inline] - fn backslash(a: AsciiChar) -> ([AsciiChar; 4], u8) { - ([AsciiChar::ReverseSolidus, a, AsciiChar::Null, AsciiChar::Null], 2) + use AsciiChar::{Apostrophe, Null, ReverseSolidus as Backslash}; + + fn backslash(a: AsciiChar) -> ([AsciiChar; 6], usize) { + ([Apostrophe, Backslash, a, Apostrophe, Null, Null], 4) } let (buf, len) = match self { @@ -595,24 +596,17 @@ impl fmt::Debug for AsciiChar { AsciiChar::LineFeed => backslash(AsciiChar::SmallN), AsciiChar::ReverseSolidus => backslash(AsciiChar::ReverseSolidus), AsciiChar::Apostrophe => backslash(AsciiChar::Apostrophe), - _ => { - let byte = self.to_u8(); - if !byte.is_ascii_control() { - ([*self, AsciiChar::Null, AsciiChar::Null, AsciiChar::Null], 1) - } else { - const HEX_DIGITS: [AsciiChar; 16] = *b"0123456789abcdef".as_ascii().unwrap(); + _ if self.to_u8().is_ascii_control() => { + const HEX_DIGITS: [AsciiChar; 16] = *b"0123456789abcdef".as_ascii().unwrap(); - let hi = HEX_DIGITS[usize::from(byte >> 4)]; - let lo = HEX_DIGITS[usize::from(byte & 0xf)]; - ([AsciiChar::ReverseSolidus, AsciiChar::SmallX, hi, lo], 4) - } + let byte = self.to_u8(); + let hi = HEX_DIGITS[usize::from(byte >> 4)]; + let lo = HEX_DIGITS[usize::from(byte & 0xf)]; + ([Apostrophe, Backslash, AsciiChar::SmallX, hi, lo, Apostrophe], 6) } + _ => ([Apostrophe, *self, Apostrophe, Null, Null, Null], 3), }; - f.write_char('\'')?; - for byte in &buf[..len as usize] { - f.write_str(byte.as_str())?; - } - f.write_char('\'') + f.write_str(buf[..len].as_str()) } } diff --git a/library/core/src/clone.rs b/library/core/src/clone.rs index 76a89eaaff86e..2150463067205 100644 --- a/library/core/src/clone.rs +++ b/library/core/src/clone.rs @@ -36,8 +36,7 @@ #![stable(feature = "rust1", since = "1.0.0")] -use crate::mem::{self, MaybeUninit}; -use crate::ptr; +mod uninit; /// A common trait for the ability to explicitly duplicate an object. /// @@ -248,7 +247,7 @@ pub unsafe trait CloneToUninit { /// * `dst` must be properly aligned. /// * `dst` must have the same [pointer metadata] (slice length or `dyn` vtable) as `self`. /// - /// [valid]: ptr#safety + /// [valid]: crate::ptr#safety /// [pointer metadata]: crate::ptr::metadata() /// /// # Panics @@ -272,124 +271,42 @@ pub unsafe trait CloneToUninit { #[unstable(feature = "clone_to_uninit", issue = "126799")] unsafe impl CloneToUninit for T { - default unsafe fn clone_to_uninit(&self, dst: *mut Self) { - // SAFETY: The safety conditions of clone_to_uninit() are a superset of those of - // ptr::write(). - unsafe { - // We hope the optimizer will figure out to create the cloned value in-place, - // skipping ever storing it on the stack and the copy to the destination. - ptr::write(dst, self.clone()); - } - } -} - -// Specialized implementation for types that are [`Copy`], not just [`Clone`], -// and can therefore be copied bitwise. -#[unstable(feature = "clone_to_uninit", issue = "126799")] -unsafe impl CloneToUninit for T { + #[inline] unsafe fn clone_to_uninit(&self, dst: *mut Self) { - // SAFETY: The safety conditions of clone_to_uninit() are a superset of those of - // ptr::copy_nonoverlapping(). - unsafe { - ptr::copy_nonoverlapping(self, dst, 1); - } + // SAFETY: we're calling a specialization with the same contract + unsafe { ::clone_one(self, dst) } } } #[unstable(feature = "clone_to_uninit", issue = "126799")] unsafe impl CloneToUninit for [T] { + #[inline] #[cfg_attr(debug_assertions, track_caller)] - default unsafe fn clone_to_uninit(&self, dst: *mut Self) { - let len = self.len(); - // This is the most likely mistake to make, so check it as a debug assertion. - debug_assert_eq!( - len, - dst.len(), - "clone_to_uninit() source and destination must have equal lengths", - ); - - // SAFETY: The produced `&mut` is valid because: - // * The caller is obligated to provide a pointer which is valid for writes. - // * All bytes pointed to are in MaybeUninit, so we don't care about the memory's - // initialization status. - let uninit_ref = unsafe { &mut *(dst as *mut [MaybeUninit]) }; - - // Copy the elements - let mut initializing = InitializingSlice::from_fully_uninit(uninit_ref); - for element_ref in self.iter() { - // If the clone() panics, `initializing` will take care of the cleanup. - initializing.push(element_ref.clone()); - } - // If we reach here, then the entire slice is initialized, and we've satisfied our - // responsibilities to the caller. Disarm the cleanup guard by forgetting it. - mem::forget(initializing); + unsafe fn clone_to_uninit(&self, dst: *mut Self) { + // SAFETY: we're calling a specialization with the same contract + unsafe { ::clone_slice(self, dst) } } } #[unstable(feature = "clone_to_uninit", issue = "126799")] -unsafe impl CloneToUninit for [T] { +unsafe impl CloneToUninit for str { + #[inline] #[cfg_attr(debug_assertions, track_caller)] unsafe fn clone_to_uninit(&self, dst: *mut Self) { - let len = self.len(); - // This is the most likely mistake to make, so check it as a debug assertion. - debug_assert_eq!( - len, - dst.len(), - "clone_to_uninit() source and destination must have equal lengths", - ); - - // SAFETY: The safety conditions of clone_to_uninit() are a superset of those of - // ptr::copy_nonoverlapping(). - unsafe { - ptr::copy_nonoverlapping(self.as_ptr(), dst.as_mut_ptr(), len); - } + // SAFETY: str is just a [u8] with UTF-8 invariant + unsafe { self.as_bytes().clone_to_uninit(dst as *mut [u8]) } } } -/// Ownership of a collection of values stored in a non-owned `[MaybeUninit]`, some of which -/// are not yet initialized. This is sort of like a `Vec` that doesn't own its allocation. -/// Its responsibility is to provide cleanup on unwind by dropping the values that *are* -/// initialized, unless disarmed by forgetting. -/// -/// This is a helper for `impl CloneToUninit for [T]`. -struct InitializingSlice<'a, T> { - data: &'a mut [MaybeUninit], - /// Number of elements of `*self.data` that are initialized. - initialized_len: usize, -} - -impl<'a, T> InitializingSlice<'a, T> { - #[inline] - fn from_fully_uninit(data: &'a mut [MaybeUninit]) -> Self { - Self { data, initialized_len: 0 } - } - - /// Push a value onto the end of the initialized part of the slice. - /// - /// # Panics - /// - /// Panics if the slice is already fully initialized. - #[inline] - fn push(&mut self, value: T) { - MaybeUninit::write(&mut self.data[self.initialized_len], value); - self.initialized_len += 1; - } -} - -impl<'a, T> Drop for InitializingSlice<'a, T> { - #[cold] // will only be invoked on unwind - fn drop(&mut self) { - let initialized_slice = ptr::slice_from_raw_parts_mut( - MaybeUninit::slice_as_mut_ptr(self.data), - self.initialized_len, - ); - // SAFETY: - // * the pointer is valid because it was made from a mutable reference - // * `initialized_len` counts the initialized elements as an invariant of this type, - // so each of the pointed-to elements is initialized and may be dropped. - unsafe { - ptr::drop_in_place::<[T]>(initialized_slice); - } +#[unstable(feature = "clone_to_uninit", issue = "126799")] +unsafe impl CloneToUninit for crate::ffi::CStr { + #[cfg_attr(debug_assertions, track_caller)] + unsafe fn clone_to_uninit(&self, dst: *mut Self) { + // SAFETY: For now, CStr is just a #[repr(trasnsparent)] [c_char] with some invariants. + // And we can cast [c_char] to [u8] on all supported platforms (see: to_bytes_with_nul). + // The pointer metadata properly preserves the length (NUL included). + // See: `cstr_metadata_is_length_with_nul` in tests. + unsafe { self.to_bytes_with_nul().clone_to_uninit(dst as *mut [u8]) } } } diff --git a/library/core/src/clone/uninit.rs b/library/core/src/clone/uninit.rs new file mode 100644 index 0000000000000..8b738bec796de --- /dev/null +++ b/library/core/src/clone/uninit.rs @@ -0,0 +1,128 @@ +use crate::mem::{self, MaybeUninit}; +use crate::ptr; + +/// Private specialization trait used by CloneToUninit, as per +/// [the dev guide](https://std-dev-guide.rust-lang.org/policy/specialization.html). +pub(super) unsafe trait CopySpec: Clone { + unsafe fn clone_one(src: &Self, dst: *mut Self); + unsafe fn clone_slice(src: &[Self], dst: *mut [Self]); +} + +unsafe impl CopySpec for T { + #[inline] + default unsafe fn clone_one(src: &Self, dst: *mut Self) { + // SAFETY: The safety conditions of clone_to_uninit() are a superset of those of + // ptr::write(). + unsafe { + // We hope the optimizer will figure out to create the cloned value in-place, + // skipping ever storing it on the stack and the copy to the destination. + ptr::write(dst, src.clone()); + } + } + + #[inline] + #[cfg_attr(debug_assertions, track_caller)] + default unsafe fn clone_slice(src: &[Self], dst: *mut [Self]) { + let len = src.len(); + // This is the most likely mistake to make, so check it as a debug assertion. + debug_assert_eq!( + len, + dst.len(), + "clone_to_uninit() source and destination must have equal lengths", + ); + + // SAFETY: The produced `&mut` is valid because: + // * The caller is obligated to provide a pointer which is valid for writes. + // * All bytes pointed to are in MaybeUninit, so we don't care about the memory's + // initialization status. + let uninit_ref = unsafe { &mut *(dst as *mut [MaybeUninit]) }; + + // Copy the elements + let mut initializing = InitializingSlice::from_fully_uninit(uninit_ref); + for element_ref in src { + // If the clone() panics, `initializing` will take care of the cleanup. + initializing.push(element_ref.clone()); + } + // If we reach here, then the entire slice is initialized, and we've satisfied our + // responsibilities to the caller. Disarm the cleanup guard by forgetting it. + mem::forget(initializing); + } +} + +// Specialized implementation for types that are [`Copy`], not just [`Clone`], +// and can therefore be copied bitwise. +unsafe impl CopySpec for T { + #[inline] + unsafe fn clone_one(src: &Self, dst: *mut Self) { + // SAFETY: The safety conditions of clone_to_uninit() are a superset of those of + // ptr::copy_nonoverlapping(). + unsafe { + ptr::copy_nonoverlapping(src, dst, 1); + } + } + + #[inline] + #[cfg_attr(debug_assertions, track_caller)] + unsafe fn clone_slice(src: &[Self], dst: *mut [Self]) { + let len = src.len(); + // This is the most likely mistake to make, so check it as a debug assertion. + debug_assert_eq!( + len, + dst.len(), + "clone_to_uninit() source and destination must have equal lengths", + ); + + // SAFETY: The safety conditions of clone_to_uninit() are a superset of those of + // ptr::copy_nonoverlapping(). + unsafe { + ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), len); + } + } +} + +/// Ownership of a collection of values stored in a non-owned `[MaybeUninit]`, some of which +/// are not yet initialized. This is sort of like a `Vec` that doesn't own its allocation. +/// Its responsibility is to provide cleanup on unwind by dropping the values that *are* +/// initialized, unless disarmed by forgetting. +/// +/// This is a helper for `impl CloneToUninit for [T]`. +struct InitializingSlice<'a, T> { + data: &'a mut [MaybeUninit], + /// Number of elements of `*self.data` that are initialized. + initialized_len: usize, +} + +impl<'a, T> InitializingSlice<'a, T> { + #[inline] + fn from_fully_uninit(data: &'a mut [MaybeUninit]) -> Self { + Self { data, initialized_len: 0 } + } + + /// Push a value onto the end of the initialized part of the slice. + /// + /// # Panics + /// + /// Panics if the slice is already fully initialized. + #[inline] + fn push(&mut self, value: T) { + MaybeUninit::write(&mut self.data[self.initialized_len], value); + self.initialized_len += 1; + } +} + +impl<'a, T> Drop for InitializingSlice<'a, T> { + #[cold] // will only be invoked on unwind + fn drop(&mut self) { + let initialized_slice = ptr::slice_from_raw_parts_mut( + MaybeUninit::slice_as_mut_ptr(self.data), + self.initialized_len, + ); + // SAFETY: + // * the pointer is valid because it was made from a mutable reference + // * `initialized_len` counts the initialized elements as an invariant of this type, + // so each of the pointed-to elements is initialized and may be dropped. + unsafe { + ptr::drop_in_place::<[T]>(initialized_slice); + } + } +} diff --git a/library/core/src/fmt/builders.rs b/library/core/src/fmt/builders.rs index 794ca1851b13d..467fa17a6f367 100644 --- a/library/core/src/fmt/builders.rs +++ b/library/core/src/fmt/builders.rs @@ -1018,7 +1018,8 @@ impl<'a, 'b: 'a> DebugMap<'a, 'b> { } } -/// Implements [`fmt::Debug`] and [`fmt::Display`] using a function. +/// Creates a type whose [`fmt::Debug`] and [`fmt::Display`] impls are provided with the function +/// `f`. /// /// # Examples /// @@ -1030,17 +1031,25 @@ impl<'a, 'b: 'a> DebugMap<'a, 'b> { /// assert_eq!(format!("{}", value), "a"); /// assert_eq!(format!("{:?}", value), "'a'"); /// -/// let wrapped = fmt::FormatterFn(|f| write!(f, "{value:?}")); +/// let wrapped = fmt::from_fn(|f| write!(f, "{value:?}")); /// assert_eq!(format!("{}", wrapped), "'a'"); /// assert_eq!(format!("{:?}", wrapped), "'a'"); /// ``` #[unstable(feature = "debug_closure_helpers", issue = "117729")] -pub struct FormatterFn(pub F) +pub fn from_fn) -> fmt::Result>(f: F) -> FromFn { + FromFn(f) +} + +/// Implements [`fmt::Debug`] and [`fmt::Display`] using a function. +/// +/// Created with [`from_fn`]. +#[unstable(feature = "debug_closure_helpers", issue = "117729")] +pub struct FromFn(F) where F: Fn(&mut fmt::Formatter<'_>) -> fmt::Result; #[unstable(feature = "debug_closure_helpers", issue = "117729")] -impl fmt::Debug for FormatterFn +impl fmt::Debug for FromFn where F: Fn(&mut fmt::Formatter<'_>) -> fmt::Result, { @@ -1050,7 +1059,7 @@ where } #[unstable(feature = "debug_closure_helpers", issue = "117729")] -impl fmt::Display for FormatterFn +impl fmt::Display for FromFn where F: Fn(&mut fmt::Formatter<'_>) -> fmt::Result, { diff --git a/library/core/src/fmt/mod.rs b/library/core/src/fmt/mod.rs index 60c0dc7685253..45c2b6a6a0f73 100644 --- a/library/core/src/fmt/mod.rs +++ b/library/core/src/fmt/mod.rs @@ -34,7 +34,7 @@ pub enum Alignment { } #[unstable(feature = "debug_closure_helpers", issue = "117729")] -pub use self::builders::FormatterFn; +pub use self::builders::{from_fn, FromFn}; #[stable(feature = "debug_builders", since = "1.2.0")] pub use self::builders::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple}; @@ -1626,6 +1626,11 @@ impl<'a> Formatter<'a> { self.buf.write_str(data) } + /// Glue for usage of the [`write!`] macro with implementors of this trait. + /// + /// This method should generally not be invoked manually, but rather through + /// the [`write!`] macro itself. + /// /// Writes some formatted information into this instance. /// /// # Examples diff --git a/library/core/src/future/ready.rs b/library/core/src/future/ready.rs index a07b63fb62b90..6f6da8ce51ddf 100644 --- a/library/core/src/future/ready.rs +++ b/library/core/src/future/ready.rs @@ -34,13 +34,12 @@ impl Ready { /// # Examples /// /// ``` - /// #![feature(ready_into_inner)] /// use std::future; /// /// let a = future::ready(1); /// assert_eq!(a.into_inner(), 1); /// ``` - #[unstable(feature = "ready_into_inner", issue = "101196")] + #[stable(feature = "ready_into_inner", since = "CURRENT_RUSTC_VERSION")] #[must_use] #[inline] pub fn into_inner(self) -> T { diff --git a/library/core/src/intrinsics.rs b/library/core/src/intrinsics.rs index 6f1e0cb747194..bd99e90376aa5 100644 --- a/library/core/src/intrinsics.rs +++ b/library/core/src/intrinsics.rs @@ -2675,12 +2675,12 @@ extern "rust-intrinsic" { #[rustc_nounwind] pub fn catch_unwind(try_fn: fn(*mut u8), data: *mut u8, catch_fn: fn(*mut u8, *mut u8)) -> i32; - /// Emits a `!nontemporal` store according to LLVM (see their docs). - /// Probably will never become stable. + /// Emits a `nontemporal` store, which gives a hint to the CPU that the data should not be held + /// in cache. Except for performance, this is fully equivalent to `ptr.write(val)`. /// - /// Do NOT use this intrinsic; "nontemporal" operations do not exist in our memory model! - /// It exists to support current stdarch, but the plan is to change stdarch and remove this intrinsic. - /// See for some more discussion. + /// Not all architectures provide such an operation. For instance, x86 does not: while `MOVNT` + /// exists, that operation is *not* equivalent to `ptr.write(val)` (`MOVNT` writes can be reordered + /// in ways that are not allowed for regular writes). #[rustc_nounwind] pub fn nontemporal_store(ptr: *mut T, val: T); diff --git a/library/core/src/iter/adapters/take.rs b/library/core/src/iter/adapters/take.rs index 297dd0acaddc1..4c8f9fe16da0f 100644 --- a/library/core/src/iter/adapters/take.rs +++ b/library/core/src/iter/adapters/take.rs @@ -317,3 +317,60 @@ impl SpecTake for Take { } } } + +#[stable(feature = "exact_size_take_repeat", since = "CURRENT_RUSTC_VERSION")] +impl DoubleEndedIterator for Take> { + #[inline] + fn next_back(&mut self) -> Option { + self.next() + } + + #[inline] + fn nth_back(&mut self, n: usize) -> Option { + self.nth(n) + } + + #[inline] + fn try_rfold(&mut self, init: Acc, fold: Fold) -> R + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> R, + R: Try, + { + self.try_fold(init, fold) + } + + #[inline] + fn rfold(self, init: Acc, fold: Fold) -> Acc + where + Self: Sized, + Fold: FnMut(Acc, Self::Item) -> Acc, + { + self.fold(init, fold) + } + + #[inline] + #[rustc_inherit_overflow_checks] + fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero> { + self.advance_by(n) + } +} + +// Note: It may be tempting to impl DoubleEndedIterator for Take. +// One must fight that temptation since such implementation wouldn’t be correct +// because we have no way to return value of nth invocation of repeater followed +// by n-1st without remembering all results. + +#[stable(feature = "exact_size_take_repeat", since = "CURRENT_RUSTC_VERSION")] +impl ExactSizeIterator for Take> { + fn len(&self) -> usize { + self.n + } +} + +#[stable(feature = "exact_size_take_repeat", since = "CURRENT_RUSTC_VERSION")] +impl A, A> ExactSizeIterator for Take> { + fn len(&self) -> usize { + self.n + } +} diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs index 07daa32afa8a3..e3640627c562a 100644 --- a/library/core/src/lib.rs +++ b/library/core/src/lib.rs @@ -170,6 +170,7 @@ #![feature(internal_impls_macro)] #![feature(ip)] #![feature(is_ascii_octdigit)] +#![feature(is_val_statically_known)] #![feature(isqrt)] #![feature(link_cfg)] #![feature(offset_of_enum)] @@ -192,12 +193,12 @@ // // Language features: // tidy-alphabetical-start +#![cfg_attr(bootstrap, feature(asm_const))] #![cfg_attr(bootstrap, feature(min_exhaustive_patterns))] #![feature(abi_unadjusted)] #![feature(adt_const_params)] #![feature(allow_internal_unsafe)] #![feature(allow_internal_unstable)] -#![feature(asm_const)] #![feature(auto_traits)] #![feature(cfg_sanitize)] #![feature(cfg_target_has_atomic)] diff --git a/library/core/src/marker.rs b/library/core/src/marker.rs index 6a83ec2eb1e0e..374fa086aecb7 100644 --- a/library/core/src/marker.rs +++ b/library/core/src/marker.rs @@ -1060,7 +1060,7 @@ pub trait FnPtr: Copy + Clone { } /// Derive macro generating impls of traits related to smart pointers. -#[rustc_builtin_macro] +#[rustc_builtin_macro(SmartPointer, attributes(pointee))] #[allow_internal_unstable(dispatch_from_dyn, coerce_unsized, unsize)] #[unstable(feature = "derive_smart_pointer", issue = "123430")] pub macro SmartPointer($item:item) { diff --git a/library/core/src/mem/mod.rs b/library/core/src/mem/mod.rs index ea2dcdce6e89e..7a9ca4011be84 100644 --- a/library/core/src/mem/mod.rs +++ b/library/core/src/mem/mod.rs @@ -5,6 +5,7 @@ #![stable(feature = "rust1", since = "1.0.0")] +use crate::alloc::Layout; use crate::marker::DiscriminantKind; use crate::{clone, cmp, fmt, hash, intrinsics, ptr}; @@ -1238,6 +1239,10 @@ pub trait SizedTypeProperties: Sized { #[doc(hidden)] #[unstable(feature = "sized_type_properties", issue = "none")] const IS_ZST: bool = size_of::() == 0; + + #[doc(hidden)] + #[unstable(feature = "sized_type_properties", issue = "none")] + const LAYOUT: Layout = Layout::new::(); } #[doc(hidden)] #[unstable(feature = "sized_type_properties", issue = "none")] diff --git a/library/core/src/net/ip_addr.rs b/library/core/src/net/ip_addr.rs index 3e036b88128c7..919f681f911f9 100644 --- a/library/core/src/net/ip_addr.rs +++ b/library/core/src/net/ip_addr.rs @@ -1,6 +1,7 @@ use super::display_buffer::DisplayBuffer; use crate::cmp::Ordering; use crate::fmt::{self, Write}; +use crate::hash::{Hash, Hasher}; use crate::iter; use crate::mem::transmute; use crate::ops::{BitAnd, BitAndAssign, BitOr, BitOrAssign, Not}; @@ -67,12 +68,22 @@ pub enum IpAddr { /// assert!("0000000.0.0.0".parse::().is_err()); // first octet is a zero in octal /// assert!("0xcb.0x0.0x71.0x00".parse::().is_err()); // all octets are in hex /// ``` -#[derive(Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Copy, Clone, PartialEq, Eq)] #[stable(feature = "rust1", since = "1.0.0")] pub struct Ipv4Addr { octets: [u8; 4], } +#[stable(feature = "rust1", since = "1.0.0")] +impl Hash for Ipv4Addr { + fn hash(&self, state: &mut H) { + // Hashers are often more efficient at hashing a fixed-width integer + // than a bytestring, so convert before hashing. We don't use to_bits() + // here as that may involve a byteswap which is unnecessary. + u32::from_ne_bytes(self.octets).hash(state); + } +} + /// An IPv6 address. /// /// IPv6 addresses are defined as 128-bit integers in [IETF RFC 4291]. @@ -149,12 +160,22 @@ pub struct Ipv4Addr { /// assert_eq!("::1".parse(), Ok(localhost)); /// assert_eq!(localhost.is_loopback(), true); /// ``` -#[derive(Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Copy, Clone, PartialEq, Eq)] #[stable(feature = "rust1", since = "1.0.0")] pub struct Ipv6Addr { octets: [u8; 16], } +#[stable(feature = "rust1", since = "1.0.0")] +impl Hash for Ipv6Addr { + fn hash(&self, state: &mut H) { + // Hashers are often more efficient at hashing a fixed-width integer + // than a bytestring, so convert before hashing. We don't use to_bits() + // here as that may involve unnecessary byteswaps. + u128::from_ne_bytes(self.octets).hash(state); + } +} + /// Scope of an [IPv6 multicast address] as defined in [IETF RFC 7346 section 2]. /// /// # Stability Guarantees diff --git a/library/core/src/num/f128.rs b/library/core/src/num/f128.rs index 0c04f47fe7df1..38e69e7641ab4 100644 --- a/library/core/src/num/f128.rs +++ b/library/core/src/num/f128.rs @@ -290,7 +290,7 @@ impl f128 { #[inline] #[rustc_const_unstable(feature = "const_float_classify", issue = "72505")] pub(crate) const fn abs_private(self) -> f128 { - // SAFETY: This transmutation is fine. Probably. For the reasons std is using it. + // SAFETY: This transmutation is fine just like in `to_bits`/`from_bits`. unsafe { mem::transmute::(mem::transmute::(self) & !Self::SIGN_MASK) } @@ -439,22 +439,12 @@ impl f128 { #[unstable(feature = "f128", issue = "116909")] #[rustc_const_unstable(feature = "const_float_classify", issue = "72505")] pub const fn classify(self) -> FpCategory { - // Other float types cannot use a bitwise classify because they may suffer a variety - // of errors if the backend chooses to cast to different float types (x87). `f128` cannot - // fit into any other float types so this is not a concern, and we rely on bit patterns. + // Other float types suffer from various platform bugs that violate the usual IEEE semantics + // and also make bitwise classification not always work reliably. However, `f128` cannot fit + // into any other float types so this is not a concern, and we can rely on bit patterns. - // SAFETY: POD bitcast, same as in `to_bits`. - let bits = unsafe { mem::transmute::(self) }; - Self::classify_bits(bits) - } - - /// This operates on bits, and only bits, so it can ignore concerns about weird FPUs. - /// FIXME(jubilee): In a just world, this would be the entire impl for classify, - /// plus a transmute. We do not live in a just world, but we can make it more so. - #[inline] - #[rustc_const_unstable(feature = "const_float_classify", issue = "72505")] - const fn classify_bits(b: u128) -> FpCategory { - match (b & Self::MAN_MASK, b & Self::EXP_MASK) { + let bits = self.to_bits(); + match (bits & Self::MAN_MASK, bits & Self::EXP_MASK) { (0, Self::EXP_MASK) => FpCategory::Infinite, (_, Self::EXP_MASK) => FpCategory::Nan, (0, 0) => FpCategory::Zero, @@ -922,48 +912,7 @@ impl f128 { #[must_use = "this returns the result of the operation, without modifying the original"] pub const fn to_bits(self) -> u128 { // SAFETY: `u128` is a plain old datatype so we can always transmute to it. - // ...sorta. - // - // It turns out that at runtime, it is possible for a floating point number - // to be subject to a floating point mode that alters nonzero subnormal numbers - // to zero on reads and writes, aka "denormals are zero" and "flush to zero". - // - // And, of course evaluating to a NaN value is fairly nondeterministic. - // More precisely: when NaN should be returned is knowable, but which NaN? - // So far that's defined by a combination of LLVM and the CPU, not Rust. - // This function, however, allows observing the bitstring of a NaN, - // thus introspection on CTFE. - // - // In order to preserve, at least for the moment, const-to-runtime equivalence, - // we reject any of these possible situations from happening. - #[inline] - #[rustc_const_unstable(feature = "const_float_bits_conv", issue = "72447")] - const fn ct_f128_to_u128(ct: f128) -> u128 { - // FIXME(f16_f128): we should use `.classify()` like `f32` and `f64`, but that - // is not available on all platforms (needs `netf2` and `unordtf2`). So classify - // the bits instead. - - // SAFETY: this is a POD transmutation - let bits = unsafe { mem::transmute::(ct) }; - match f128::classify_bits(bits) { - FpCategory::Nan => { - panic!("const-eval error: cannot use f128::to_bits on a NaN") - } - FpCategory::Subnormal => { - panic!("const-eval error: cannot use f128::to_bits on a subnormal number") - } - FpCategory::Infinite | FpCategory::Normal | FpCategory::Zero => bits, - } - } - - #[inline(always)] // See https://github.com/rust-lang/compiler-builtins/issues/491 - fn rt_f128_to_u128(x: f128) -> u128 { - // SAFETY: `u128` is a plain old datatype so we can always... uh... - // ...look, just pretend you forgot what you just read. - // Stability concerns. - unsafe { mem::transmute(x) } - } - intrinsics::const_eval_select((self,), ct_f128_to_u128, rt_f128_to_u128) + unsafe { mem::transmute(self) } } /// Raw transmutation from `u128`. @@ -1011,49 +960,8 @@ impl f128 { #[rustc_const_unstable(feature = "const_float_bits_conv", issue = "72447")] pub const fn from_bits(v: u128) -> Self { // It turns out the safety issues with sNaN were overblown! Hooray! - // SAFETY: `u128` is a plain old datatype so we can always transmute from it - // ...sorta. - // - // It turns out that at runtime, it is possible for a floating point number - // to be subject to floating point modes that alter nonzero subnormal numbers - // to zero on reads and writes, aka "denormals are zero" and "flush to zero". - // This is not a problem usually, but at least one tier2 platform for Rust - // actually exhibits this behavior by default: thumbv7neon - // aka "the Neon FPU in AArch32 state" - // - // And, of course evaluating to a NaN value is fairly nondeterministic. - // More precisely: when NaN should be returned is knowable, but which NaN? - // So far that's defined by a combination of LLVM and the CPU, not Rust. - // This function, however, allows observing the bitstring of a NaN, - // thus introspection on CTFE. - // - // In order to preserve, at least for the moment, const-to-runtime equivalence, - // reject any of these possible situations from happening. - #[inline] - #[rustc_const_unstable(feature = "const_float_bits_conv", issue = "72447")] - const fn ct_u128_to_f128(ct: u128) -> f128 { - match f128::classify_bits(ct) { - FpCategory::Subnormal => { - panic!("const-eval error: cannot use f128::from_bits on a subnormal number") - } - FpCategory::Nan => { - panic!("const-eval error: cannot use f128::from_bits on NaN") - } - FpCategory::Infinite | FpCategory::Normal | FpCategory::Zero => { - // SAFETY: It's not a frumious number - unsafe { mem::transmute::(ct) } - } - } - } - - #[inline(always)] // See https://github.com/rust-lang/compiler-builtins/issues/491 - fn rt_u128_to_f128(x: u128) -> f128 { - // SAFETY: `u128` is a plain old datatype so we can always... uh... - // ...look, just pretend you forgot what you just read. - // Stability concerns. - unsafe { mem::transmute(x) } - } - intrinsics::const_eval_select((v,), ct_u128_to_f128, rt_u128_to_f128) + // SAFETY: `u128` is a plain old datatype so we can always transmute from it. + unsafe { mem::transmute(v) } } /// Returns the memory representation of this floating point number as a byte array in diff --git a/library/core/src/num/f16.rs b/library/core/src/num/f16.rs index e5b1148e19215..41bd34a270238 100644 --- a/library/core/src/num/f16.rs +++ b/library/core/src/num/f16.rs @@ -284,7 +284,7 @@ impl f16 { #[inline] #[rustc_const_unstable(feature = "const_float_classify", issue = "72505")] pub(crate) const fn abs_private(self) -> f16 { - // SAFETY: This transmutation is fine. Probably. For the reasons std is using it. + // SAFETY: This transmutation is fine just like in `to_bits`/`from_bits`. unsafe { mem::transmute::(mem::transmute::(self) & !Self::SIGN_MASK) } } @@ -426,15 +426,15 @@ impl f16 { pub const fn classify(self) -> FpCategory { // A previous implementation for f32/f64 tried to only use bitmask-based checks, // using `to_bits` to transmute the float to its bit repr and match on that. - // Unfortunately, floating point numbers can be much worse than that. - // This also needs to not result in recursive evaluations of `to_bits`. + // If we only cared about being "technically" correct, that's an entirely legit + // implementation. // - - // Platforms without native support generally convert to `f32` to perform operations, - // and most of these platforms correctly round back to `f16` after each operation. - // However, some platforms have bugs where they keep the excess `f32` precision (e.g. - // WASM, see llvm/llvm-project#96437). This implementation makes a best-effort attempt - // to account for that excess precision. + // Unfortunately, there are platforms out there that do not correctly implement the IEEE + // float semantics Rust relies on: some hardware flushes denormals to zero, and some + // platforms convert to `f32` to perform operations without properly rounding back (e.g. + // WASM, see llvm/llvm-project#96437). These are platforms bugs, and Rust will misbehave on + // such platforms, but we can at least try to make things seem as sane as possible by being + // careful here. if self.is_infinite() { // Thus, a value may compare unequal to infinity, despite having a "full" exponent mask. FpCategory::Infinite @@ -446,49 +446,20 @@ impl f16 { // as correctness requires avoiding equality tests that may be Subnormal == -0.0 // because it may be wrong under "denormals are zero" and "flush to zero" modes. // Most of std's targets don't use those, but they are used for thumbv7neon. - // So, this does use bitpattern matching for the rest. - - // SAFETY: f16 to u16 is fine. Usually. - // If classify has gotten this far, the value is definitely in one of these categories. - unsafe { f16::partial_classify(self) } - } - } - - /// This doesn't actually return a right answer for NaN on purpose, - /// seeing as how it cannot correctly discern between a floating point NaN, - /// and some normal floating point numbers truncated from an x87 FPU. - /// - /// # Safety - /// - /// This requires making sure you call this function for values it answers correctly on, - /// otherwise it returns a wrong answer. This is not important for memory safety per se, - /// but getting floats correct is important for not accidentally leaking const eval - /// runtime-deviating logic which may or may not be acceptable. - #[inline] - #[rustc_const_unstable(feature = "const_float_classify", issue = "72505")] - const unsafe fn partial_classify(self) -> FpCategory { - // SAFETY: The caller is not asking questions for which this will tell lies. - let b = unsafe { mem::transmute::(self) }; - match (b & Self::MAN_MASK, b & Self::EXP_MASK) { - (0, Self::EXP_MASK) => FpCategory::Infinite, - (0, 0) => FpCategory::Zero, - (_, 0) => FpCategory::Subnormal, - _ => FpCategory::Normal, - } - } - - /// This operates on bits, and only bits, so it can ignore concerns about weird FPUs. - /// FIXME(jubilee): In a just world, this would be the entire impl for classify, - /// plus a transmute. We do not live in a just world, but we can make it more so. - #[inline] - #[rustc_const_unstable(feature = "const_float_classify", issue = "72505")] - const fn classify_bits(b: u16) -> FpCategory { - match (b & Self::MAN_MASK, b & Self::EXP_MASK) { - (0, Self::EXP_MASK) => FpCategory::Infinite, - (_, Self::EXP_MASK) => FpCategory::Nan, - (0, 0) => FpCategory::Zero, - (_, 0) => FpCategory::Subnormal, - _ => FpCategory::Normal, + // So, this does use bitpattern matching for the rest. On x87, due to the incorrect + // float codegen on this hardware, this doesn't actually return a right answer for NaN + // because it cannot correctly discern between a floating point NaN, and some normal + // floating point numbers truncated from an x87 FPU -- but we took care of NaN above, so + // we are fine. + // FIXME(jubilee): This probably could at least answer things correctly for Infinity, + // like the f64 version does, but I need to run more checks on how things go on x86. + // I fear losing mantissa data that would have answered that differently. + let b = self.to_bits(); + match (b & Self::MAN_MASK, b & Self::EXP_MASK) { + (0, 0) => FpCategory::Zero, + (_, 0) => FpCategory::Subnormal, + _ => FpCategory::Normal, + } } } @@ -952,48 +923,7 @@ impl f16 { #[must_use = "this returns the result of the operation, without modifying the original"] pub const fn to_bits(self) -> u16 { // SAFETY: `u16` is a plain old datatype so we can always transmute to it. - // ...sorta. - // - // It turns out that at runtime, it is possible for a floating point number - // to be subject to a floating point mode that alters nonzero subnormal numbers - // to zero on reads and writes, aka "denormals are zero" and "flush to zero". - // - // And, of course evaluating to a NaN value is fairly nondeterministic. - // More precisely: when NaN should be returned is knowable, but which NaN? - // So far that's defined by a combination of LLVM and the CPU, not Rust. - // This function, however, allows observing the bitstring of a NaN, - // thus introspection on CTFE. - // - // In order to preserve, at least for the moment, const-to-runtime equivalence, - // we reject any of these possible situations from happening. - #[inline] - #[rustc_const_unstable(feature = "const_float_bits_conv", issue = "72447")] - const fn ct_f16_to_u16(ct: f16) -> u16 { - // FIXME(f16_f128): we should use `.classify()` like `f32` and `f64`, but we don't yet - // want to rely on that on all platforms because it is nondeterministic (e.g. x86 has - // convention discrepancies calling intrinsics). So just classify the bits instead. - - // SAFETY: this is a POD transmutation - let bits = unsafe { mem::transmute::(ct) }; - match f16::classify_bits(bits) { - FpCategory::Nan => { - panic!("const-eval error: cannot use f16::to_bits on a NaN") - } - FpCategory::Subnormal => { - panic!("const-eval error: cannot use f16::to_bits on a subnormal number") - } - FpCategory::Infinite | FpCategory::Normal | FpCategory::Zero => bits, - } - } - - #[inline(always)] // See https://github.com/rust-lang/compiler-builtins/issues/491 - fn rt_f16_to_u16(x: f16) -> u16 { - // SAFETY: `u16` is a plain old datatype so we can always... uh... - // ...look, just pretend you forgot what you just read. - // Stability concerns. - unsafe { mem::transmute(x) } - } - intrinsics::const_eval_select((self,), ct_f16_to_u16, rt_f16_to_u16) + unsafe { mem::transmute(self) } } /// Raw transmutation from `u16`. @@ -1040,49 +970,8 @@ impl f16 { #[rustc_const_unstable(feature = "const_float_bits_conv", issue = "72447")] pub const fn from_bits(v: u16) -> Self { // It turns out the safety issues with sNaN were overblown! Hooray! - // SAFETY: `u16` is a plain old datatype so we can always transmute from it - // ...sorta. - // - // It turns out that at runtime, it is possible for a floating point number - // to be subject to floating point modes that alter nonzero subnormal numbers - // to zero on reads and writes, aka "denormals are zero" and "flush to zero". - // This is not a problem usually, but at least one tier2 platform for Rust - // actually exhibits this behavior by default: thumbv7neon - // aka "the Neon FPU in AArch32 state" - // - // And, of course evaluating to a NaN value is fairly nondeterministic. - // More precisely: when NaN should be returned is knowable, but which NaN? - // So far that's defined by a combination of LLVM and the CPU, not Rust. - // This function, however, allows observing the bitstring of a NaN, - // thus introspection on CTFE. - // - // In order to preserve, at least for the moment, const-to-runtime equivalence, - // reject any of these possible situations from happening. - #[inline] - #[rustc_const_unstable(feature = "const_float_bits_conv", issue = "72447")] - const fn ct_u16_to_f16(ct: u16) -> f16 { - match f16::classify_bits(ct) { - FpCategory::Subnormal => { - panic!("const-eval error: cannot use f16::from_bits on a subnormal number") - } - FpCategory::Nan => { - panic!("const-eval error: cannot use f16::from_bits on NaN") - } - FpCategory::Infinite | FpCategory::Normal | FpCategory::Zero => { - // SAFETY: It's not a frumious number - unsafe { mem::transmute::(ct) } - } - } - } - - #[inline(always)] // See https://github.com/rust-lang/compiler-builtins/issues/491 - fn rt_u16_to_f16(x: u16) -> f16 { - // SAFETY: `u16` is a plain old datatype so we can always... uh... - // ...look, just pretend you forgot what you just read. - // Stability concerns. - unsafe { mem::transmute(x) } - } - intrinsics::const_eval_select((v,), ct_u16_to_f16, rt_u16_to_f16) + // SAFETY: `u16` is a plain old datatype so we can always transmute from it. + unsafe { mem::transmute(v) } } /// Returns the memory representation of this floating point number as a byte array in diff --git a/library/core/src/num/f32.rs b/library/core/src/num/f32.rs index 7710e23edf0ba..719727e2f1e0a 100644 --- a/library/core/src/num/f32.rs +++ b/library/core/src/num/f32.rs @@ -529,7 +529,7 @@ impl f32 { #[inline] #[rustc_const_unstable(feature = "const_float_classify", issue = "72505")] pub(crate) const fn abs_private(self) -> f32 { - // SAFETY: This transmutation is fine. Probably. For the reasons std is using it. + // SAFETY: This transmutation is fine just like in `to_bits`/`from_bits`. unsafe { mem::transmute::(mem::transmute::(self) & !Self::SIGN_MASK) } } @@ -654,18 +654,20 @@ impl f32 { pub const fn classify(self) -> FpCategory { // A previous implementation tried to only use bitmask-based checks, // using f32::to_bits to transmute the float to its bit repr and match on that. - // Unfortunately, floating point numbers can be much worse than that. - // This also needs to not result in recursive evaluations of f64::to_bits. + // If we only cared about being "technically" correct, that's an entirely legit + // implementation. + // + // Unfortunately, there is hardware out there that does not correctly implement the IEEE + // float semantics Rust relies on: x87 uses a too-large mantissa and exponent, and some + // hardware flushes subnormals to zero. These are platforms bugs, and Rust will misbehave on + // such hardware, but we can at least try to make things seem as sane as possible by being + // careful here. // - // On some processors, in some cases, LLVM will "helpfully" lower floating point ops, - // in spite of a request for them using f32 and f64, to things like x87 operations. - // These have an f64's mantissa, but can have a larger than normal exponent. // FIXME(jubilee): Using x87 operations is never necessary in order to function // on x86 processors for Rust-to-Rust calls, so this issue should not happen. // Code generation should be adjusted to use non-C calling conventions, avoiding this. - // if self.is_infinite() { - // Thus, a value may compare unequal to infinity, despite having a "full" exponent mask. + // A value may compare unequal to infinity, despite having a "full" exponent mask. FpCategory::Infinite } else if self.is_nan() { // And it may not be NaN, as it can simply be an "overextended" finite value. @@ -675,48 +677,20 @@ impl f32 { // as correctness requires avoiding equality tests that may be Subnormal == -0.0 // because it may be wrong under "denormals are zero" and "flush to zero" modes. // Most of std's targets don't use those, but they are used for thumbv7neon. - // So, this does use bitpattern matching for the rest. - - // SAFETY: f32 to u32 is fine. Usually. - // If classify has gotten this far, the value is definitely in one of these categories. - unsafe { f32::partial_classify(self) } - } - } - - // This doesn't actually return a right answer for NaN on purpose, - // seeing as how it cannot correctly discern between a floating point NaN, - // and some normal floating point numbers truncated from an x87 FPU. - // FIXME(jubilee): This probably could at least answer things correctly for Infinity, - // like the f64 version does, but I need to run more checks on how things go on x86. - // I fear losing mantissa data that would have answered that differently. - // - // # Safety - // This requires making sure you call this function for values it answers correctly on, - // otherwise it returns a wrong answer. This is not important for memory safety per se, - // but getting floats correct is important for not accidentally leaking const eval - // runtime-deviating logic which may or may not be acceptable. - #[rustc_const_unstable(feature = "const_float_classify", issue = "72505")] - const unsafe fn partial_classify(self) -> FpCategory { - // SAFETY: The caller is not asking questions for which this will tell lies. - let b = unsafe { mem::transmute::(self) }; - match (b & Self::MAN_MASK, b & Self::EXP_MASK) { - (0, 0) => FpCategory::Zero, - (_, 0) => FpCategory::Subnormal, - _ => FpCategory::Normal, - } - } - - // This operates on bits, and only bits, so it can ignore concerns about weird FPUs. - // FIXME(jubilee): In a just world, this would be the entire impl for classify, - // plus a transmute. We do not live in a just world, but we can make it more so. - #[rustc_const_unstable(feature = "const_float_classify", issue = "72505")] - const fn classify_bits(b: u32) -> FpCategory { - match (b & Self::MAN_MASK, b & Self::EXP_MASK) { - (0, Self::EXP_MASK) => FpCategory::Infinite, - (_, Self::EXP_MASK) => FpCategory::Nan, - (0, 0) => FpCategory::Zero, - (_, 0) => FpCategory::Subnormal, - _ => FpCategory::Normal, + // So, this does use bitpattern matching for the rest. On x87, due to the incorrect + // float codegen on this hardware, this doesn't actually return a right answer for NaN + // because it cannot correctly discern between a floating point NaN, and some normal + // floating point numbers truncated from an x87 FPU -- but we took care of NaN above, so + // we are fine. + // FIXME(jubilee): This probably could at least answer things correctly for Infinity, + // like the f64 version does, but I need to run more checks on how things go on x86. + // I fear losing mantissa data that would have answered that differently. + let b = self.to_bits(); + match (b & Self::MAN_MASK, b & Self::EXP_MASK) { + (0, 0) => FpCategory::Zero, + (_, 0) => FpCategory::Subnormal, + _ => FpCategory::Normal, + } } } @@ -1143,51 +1117,7 @@ impl f32 { #[inline] pub const fn to_bits(self) -> u32 { // SAFETY: `u32` is a plain old datatype so we can always transmute to it. - // ...sorta. - // - // It turns out that at runtime, it is possible for a floating point number - // to be subject to a floating point mode that alters nonzero subnormal numbers - // to zero on reads and writes, aka "denormals are zero" and "flush to zero". - // This is not a problem per se, but at least one tier2 platform for Rust - // actually exhibits this behavior by default. - // - // In addition, on x86 targets with SSE or SSE2 disabled and the x87 FPU enabled, - // i.e. not soft-float, the way Rust does parameter passing can actually alter - // a number that is "not infinity" to have the same exponent as infinity, - // in a slightly unpredictable manner. - // - // And, of course evaluating to a NaN value is fairly nondeterministic. - // More precisely: when NaN should be returned is knowable, but which NaN? - // So far that's defined by a combination of LLVM and the CPU, not Rust. - // This function, however, allows observing the bitstring of a NaN, - // thus introspection on CTFE. - // - // In order to preserve, at least for the moment, const-to-runtime equivalence, - // we reject any of these possible situations from happening. - #[rustc_const_unstable(feature = "const_float_bits_conv", issue = "72447")] - const fn ct_f32_to_u32(ct: f32) -> u32 { - match ct.classify() { - FpCategory::Nan => { - panic!("const-eval error: cannot use f32::to_bits on a NaN") - } - FpCategory::Subnormal => { - panic!("const-eval error: cannot use f32::to_bits on a subnormal number") - } - FpCategory::Infinite | FpCategory::Normal | FpCategory::Zero => { - // SAFETY: We have a normal floating point number. Now we transmute, i.e. do a bitcopy. - unsafe { mem::transmute::(ct) } - } - } - } - - #[inline(always)] // See https://github.com/rust-lang/compiler-builtins/issues/491 - fn rt_f32_to_u32(x: f32) -> u32 { - // SAFETY: `u32` is a plain old datatype so we can always... uh... - // ...look, just pretend you forgot what you just read. - // Stability concerns. - unsafe { mem::transmute(x) } - } - intrinsics::const_eval_select((self,), ct_f32_to_u32, rt_f32_to_u32) + unsafe { mem::transmute(self) } } /// Raw transmutation from `u32`. @@ -1232,53 +1162,8 @@ impl f32 { #[inline] pub const fn from_bits(v: u32) -> Self { // It turns out the safety issues with sNaN were overblown! Hooray! - // SAFETY: `u32` is a plain old datatype so we can always transmute from it - // ...sorta. - // - // It turns out that at runtime, it is possible for a floating point number - // to be subject to floating point modes that alter nonzero subnormal numbers - // to zero on reads and writes, aka "denormals are zero" and "flush to zero". - // This is not a problem usually, but at least one tier2 platform for Rust - // actually exhibits this behavior by default: thumbv7neon - // aka "the Neon FPU in AArch32 state" - // - // In addition, on x86 targets with SSE or SSE2 disabled and the x87 FPU enabled, - // i.e. not soft-float, the way Rust does parameter passing can actually alter - // a number that is "not infinity" to have the same exponent as infinity, - // in a slightly unpredictable manner. - // - // And, of course evaluating to a NaN value is fairly nondeterministic. - // More precisely: when NaN should be returned is knowable, but which NaN? - // So far that's defined by a combination of LLVM and the CPU, not Rust. - // This function, however, allows observing the bitstring of a NaN, - // thus introspection on CTFE. - // - // In order to preserve, at least for the moment, const-to-runtime equivalence, - // reject any of these possible situations from happening. - #[rustc_const_unstable(feature = "const_float_bits_conv", issue = "72447")] - const fn ct_u32_to_f32(ct: u32) -> f32 { - match f32::classify_bits(ct) { - FpCategory::Subnormal => { - panic!("const-eval error: cannot use f32::from_bits on a subnormal number") - } - FpCategory::Nan => { - panic!("const-eval error: cannot use f32::from_bits on NaN") - } - FpCategory::Infinite | FpCategory::Normal | FpCategory::Zero => { - // SAFETY: It's not a frumious number - unsafe { mem::transmute::(ct) } - } - } - } - - #[inline(always)] // See https://github.com/rust-lang/compiler-builtins/issues/491 - fn rt_u32_to_f32(x: u32) -> f32 { - // SAFETY: `u32` is a plain old datatype so we can always... uh... - // ...look, just pretend you forgot what you just read. - // Stability concerns. - unsafe { mem::transmute(x) } - } - intrinsics::const_eval_select((v,), ct_u32_to_f32, rt_u32_to_f32) + // SAFETY: `u32` is a plain old datatype so we can always transmute from it. + unsafe { mem::transmute(v) } } /// Returns the memory representation of this floating point number as a byte array in diff --git a/library/core/src/num/f64.rs b/library/core/src/num/f64.rs index a89859be7efe8..85eb152ad1f19 100644 --- a/library/core/src/num/f64.rs +++ b/library/core/src/num/f64.rs @@ -528,7 +528,7 @@ impl f64 { #[inline] #[rustc_const_unstable(feature = "const_float_classify", issue = "72505")] pub(crate) const fn abs_private(self) -> f64 { - // SAFETY: This transmutation is fine. Probably. For the reasons std is using it. + // SAFETY: This transmutation is fine just like in `to_bits`/`from_bits`. unsafe { mem::transmute::(mem::transmute::(self) & !Self::SIGN_MASK) } } @@ -653,12 +653,14 @@ impl f64 { pub const fn classify(self) -> FpCategory { // A previous implementation tried to only use bitmask-based checks, // using f64::to_bits to transmute the float to its bit repr and match on that. - // Unfortunately, floating point numbers can be much worse than that. - // This also needs to not result in recursive evaluations of f64::to_bits. + // If we only cared about being "technically" correct, that's an entirely legit + // implementation. + // + // Unfortunately, there is hardware out there that does not correctly implement the IEEE + // float semantics Rust relies on: x87 uses a too-large exponent, and some hardware flushes + // subnormals to zero. These are platforms bugs, and Rust will misbehave on such hardware, + // but we can at least try to make things seem as sane as possible by being careful here. // - // On some processors, in some cases, LLVM will "helpfully" lower floating point ops, - // in spite of a request for them using f32 and f64, to things like x87 operations. - // These have an f64's mantissa, but can have a larger than normal exponent. // FIXME(jubilee): Using x87 operations is never necessary in order to function // on x86 processors for Rust-to-Rust calls, so this issue should not happen. // Code generation should be adjusted to use non-C calling conventions, avoiding this. @@ -672,41 +674,18 @@ impl f64 { // as correctness requires avoiding equality tests that may be Subnormal == -0.0 // because it may be wrong under "denormals are zero" and "flush to zero" modes. // Most of std's targets don't use those, but they are used for thumbv7neon. - // So, this does use bitpattern matching for the rest. - - // SAFETY: f64 to u64 is fine. Usually. - // If control flow has gotten this far, the value is definitely in one of the categories - // that f64::partial_classify can correctly analyze. - unsafe { f64::partial_classify(self) } - } - } - - // This doesn't actually return a right answer for NaN on purpose, - // seeing as how it cannot correctly discern between a floating point NaN, - // and some normal floating point numbers truncated from an x87 FPU. - #[rustc_const_unstable(feature = "const_float_classify", issue = "72505")] - const unsafe fn partial_classify(self) -> FpCategory { - // SAFETY: The caller is not asking questions for which this will tell lies. - let b = unsafe { mem::transmute::(self) }; - match (b & Self::MAN_MASK, b & Self::EXP_MASK) { - (0, Self::EXP_MASK) => FpCategory::Infinite, - (0, 0) => FpCategory::Zero, - (_, 0) => FpCategory::Subnormal, - _ => FpCategory::Normal, - } - } - - // This operates on bits, and only bits, so it can ignore concerns about weird FPUs. - // FIXME(jubilee): In a just world, this would be the entire impl for classify, - // plus a transmute. We do not live in a just world, but we can make it more so. - #[rustc_const_unstable(feature = "const_float_classify", issue = "72505")] - const fn classify_bits(b: u64) -> FpCategory { - match (b & Self::MAN_MASK, b & Self::EXP_MASK) { - (0, Self::EXP_MASK) => FpCategory::Infinite, - (_, Self::EXP_MASK) => FpCategory::Nan, - (0, 0) => FpCategory::Zero, - (_, 0) => FpCategory::Subnormal, - _ => FpCategory::Normal, + // So, this does use bitpattern matching for the rest. On x87, due to the incorrect + // float codegen on this hardware, this doesn't actually return a right answer for NaN + // because it cannot correctly discern between a floating point NaN, and some normal + // floating point numbers truncated from an x87 FPU -- but we took care of NaN above, so + // we are fine. + let b = self.to_bits(); + match (b & Self::MAN_MASK, b & Self::EXP_MASK) { + (0, Self::EXP_MASK) => FpCategory::Infinite, + (0, 0) => FpCategory::Zero, + (_, 0) => FpCategory::Subnormal, + _ => FpCategory::Normal, + } } } @@ -1134,33 +1113,7 @@ impl f64 { #[inline] pub const fn to_bits(self) -> u64 { // SAFETY: `u64` is a plain old datatype so we can always transmute to it. - // ...sorta. - // - // See the SAFETY comment in f64::from_bits for more. - #[rustc_const_unstable(feature = "const_float_bits_conv", issue = "72447")] - const fn ct_f64_to_u64(ct: f64) -> u64 { - match ct.classify() { - FpCategory::Nan => { - panic!("const-eval error: cannot use f64::to_bits on a NaN") - } - FpCategory::Subnormal => { - panic!("const-eval error: cannot use f64::to_bits on a subnormal number") - } - FpCategory::Infinite | FpCategory::Normal | FpCategory::Zero => { - // SAFETY: We have a normal floating point number. Now we transmute, i.e. do a bitcopy. - unsafe { mem::transmute::(ct) } - } - } - } - - #[inline(always)] // See https://github.com/rust-lang/compiler-builtins/issues/491 - fn rt_f64_to_u64(rt: f64) -> u64 { - // SAFETY: `u64` is a plain old datatype so we can always... uh... - // ...look, just pretend you forgot what you just read. - // Stability concerns. - unsafe { mem::transmute::(rt) } - } - intrinsics::const_eval_select((self,), ct_f64_to_u64, rt_f64_to_u64) + unsafe { mem::transmute(self) } } /// Raw transmutation from `u64`. @@ -1205,58 +1158,8 @@ impl f64 { #[inline] pub const fn from_bits(v: u64) -> Self { // It turns out the safety issues with sNaN were overblown! Hooray! - // SAFETY: `u64` is a plain old datatype so we can always transmute from it - // ...sorta. - // - // It turns out that at runtime, it is possible for a floating point number - // to be subject to floating point modes that alter nonzero subnormal numbers - // to zero on reads and writes, aka "denormals are zero" and "flush to zero". - // This is not a problem usually, but at least one tier2 platform for Rust - // actually exhibits an FTZ behavior by default: thumbv7neon - // aka "the Neon FPU in AArch32 state" - // - // Even with this, not all instructions exhibit the FTZ behaviors on thumbv7neon, - // so this should load the same bits if LLVM emits the "correct" instructions, - // but LLVM sometimes makes interesting choices about float optimization, - // and other FPUs may do similar. Thus, it is wise to indulge luxuriously in caution. - // - // In addition, on x86 targets with SSE or SSE2 disabled and the x87 FPU enabled, - // i.e. not soft-float, the way Rust does parameter passing can actually alter - // a number that is "not infinity" to have the same exponent as infinity, - // in a slightly unpredictable manner. - // - // And, of course evaluating to a NaN value is fairly nondeterministic. - // More precisely: when NaN should be returned is knowable, but which NaN? - // So far that's defined by a combination of LLVM and the CPU, not Rust. - // This function, however, allows observing the bitstring of a NaN, - // thus introspection on CTFE. - // - // In order to preserve, at least for the moment, const-to-runtime equivalence, - // reject any of these possible situations from happening. - #[rustc_const_unstable(feature = "const_float_bits_conv", issue = "72447")] - const fn ct_u64_to_f64(ct: u64) -> f64 { - match f64::classify_bits(ct) { - FpCategory::Subnormal => { - panic!("const-eval error: cannot use f64::from_bits on a subnormal number") - } - FpCategory::Nan => { - panic!("const-eval error: cannot use f64::from_bits on NaN") - } - FpCategory::Infinite | FpCategory::Normal | FpCategory::Zero => { - // SAFETY: It's not a frumious number - unsafe { mem::transmute::(ct) } - } - } - } - - #[inline(always)] // See https://github.com/rust-lang/compiler-builtins/issues/491 - fn rt_u64_to_f64(rt: u64) -> f64 { - // SAFETY: `u64` is a plain old datatype so we can always... uh... - // ...look, just pretend you forgot what you just read. - // Stability concerns. - unsafe { mem::transmute::(rt) } - } - intrinsics::const_eval_select((v,), ct_u64_to_f64, rt_u64_to_f64) + // SAFETY: `u64` is a plain old datatype so we can always transmute from it. + unsafe { mem::transmute(v) } } /// Returns the memory representation of this floating point number as a byte array in diff --git a/library/core/src/num/int_macros.rs b/library/core/src/num/int_macros.rs index dd88e859b30e7..17cf2a7b261fd 100644 --- a/library/core/src/num/int_macros.rs +++ b/library/core/src/num/int_macros.rs @@ -1496,18 +1496,17 @@ macro_rules! int_impl { let mut base = self; let mut acc: Self = 1; - while exp > 1 { + loop { if (exp & 1) == 1 { acc = try_opt!(acc.checked_mul(base)); + // since exp!=0, finally the exp must be 1. + if exp == 1 { + return Some(acc); + } } exp /= 2; base = try_opt!(base.checked_mul(base)); } - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - acc.checked_mul(base) } /// Strict exponentiation. Computes `self.pow(exp)`, panicking if @@ -1547,18 +1546,17 @@ macro_rules! int_impl { let mut base = self; let mut acc: Self = 1; - while exp > 1 { + loop { if (exp & 1) == 1 { acc = acc.strict_mul(base); + // since exp!=0, finally the exp must be 1. + if exp == 1 { + return acc; + } } exp /= 2; base = base.strict_mul(base); } - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - acc.strict_mul(base) } /// Returns the square root of the number, rounded down. @@ -2175,6 +2173,7 @@ macro_rules! int_impl { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] + #[rustc_allow_const_fn_unstable(is_val_statically_known)] pub const fn wrapping_pow(self, mut exp: u32) -> Self { if exp == 0 { return 1; @@ -2182,19 +2181,36 @@ macro_rules! int_impl { let mut base = self; let mut acc: Self = 1; - while exp > 1 { - if (exp & 1) == 1 { - acc = acc.wrapping_mul(base); + if intrinsics::is_val_statically_known(exp) { + while exp > 1 { + if (exp & 1) == 1 { + acc = acc.wrapping_mul(base); + } + exp /= 2; + base = base.wrapping_mul(base); } - exp /= 2; - base = base.wrapping_mul(base); - } - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - acc.wrapping_mul(base) + // since exp!=0, finally the exp must be 1. + // Deal with the final bit of the exponent separately, since + // squaring the base afterwards is not necessary. + acc.wrapping_mul(base) + } else { + // This is faster than the above when the exponent is not known + // at compile time. We can't use the same code for the constant + // exponent case because LLVM is currently unable to unroll + // this loop. + loop { + if (exp & 1) == 1 { + acc = acc.wrapping_mul(base); + // since exp!=0, finally the exp must be 1. + if exp == 1 { + return acc; + } + } + exp /= 2; + base = base.wrapping_mul(base); + } + } } /// Calculates `self` + `rhs`. @@ -2690,9 +2706,14 @@ macro_rules! int_impl { // Scratch space for storing results of overflowing_mul. let mut r; - while exp > 1 { + loop { if (exp & 1) == 1 { r = acc.overflowing_mul(base); + // since exp!=0, finally the exp must be 1. + if exp == 1 { + r.1 |= overflown; + return r; + } acc = r.0; overflown |= r.1; } @@ -2701,14 +2722,6 @@ macro_rules! int_impl { base = r.0; overflown |= r.1; } - - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - r = acc.overflowing_mul(base); - r.1 |= overflown; - r } /// Raises self to the power of `exp`, using exponentiation by squaring. @@ -2728,6 +2741,7 @@ macro_rules! int_impl { without modifying the original"] #[inline] #[rustc_inherit_overflow_checks] + #[rustc_allow_const_fn_unstable(is_val_statically_known)] pub const fn pow(self, mut exp: u32) -> Self { if exp == 0 { return 1; @@ -2735,19 +2749,37 @@ macro_rules! int_impl { let mut base = self; let mut acc = 1; - while exp > 1 { - if (exp & 1) == 1 { - acc = acc * base; + if intrinsics::is_val_statically_known(exp) { + while exp > 1 { + if (exp & 1) == 1 { + acc = acc * base; + } + exp /= 2; + base = base * base; } - exp /= 2; - base = base * base; - } - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - acc * base + // since exp!=0, finally the exp must be 1. + // Deal with the final bit of the exponent separately, since + // squaring the base afterwards is not necessary and may cause a + // needless overflow. + acc * base + } else { + // This is faster than the above when the exponent is not known + // at compile time. We can't use the same code for the constant + // exponent case because LLVM is currently unable to unroll + // this loop. + loop { + if (exp & 1) == 1 { + acc = acc * base; + // since exp!=0, finally the exp must be 1. + if exp == 1 { + return acc; + } + } + exp /= 2; + base = base * base; + } + } } /// Returns the square root of the number, rounded down. diff --git a/library/core/src/num/uint_macros.rs b/library/core/src/num/uint_macros.rs index a2e17fae76873..719a6a55940dc 100644 --- a/library/core/src/num/uint_macros.rs +++ b/library/core/src/num/uint_macros.rs @@ -1622,20 +1622,17 @@ macro_rules! uint_impl { let mut base = self; let mut acc: Self = 1; - while exp > 1 { + loop { if (exp & 1) == 1 { acc = try_opt!(acc.checked_mul(base)); + // since exp!=0, finally the exp must be 1. + if exp == 1 { + return Some(acc); + } } exp /= 2; base = try_opt!(base.checked_mul(base)); } - - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - - acc.checked_mul(base) } /// Strict exponentiation. Computes `self.pow(exp)`, panicking if @@ -1675,18 +1672,17 @@ macro_rules! uint_impl { let mut base = self; let mut acc: Self = 1; - while exp > 1 { + loop { if (exp & 1) == 1 { acc = acc.strict_mul(base); + // since exp!=0, finally the exp must be 1. + if exp == 1 { + return acc; + } } exp /= 2; base = base.strict_mul(base); } - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - acc.strict_mul(base) } /// Saturating integer addition. Computes `self + rhs`, saturating at @@ -2138,6 +2134,7 @@ macro_rules! uint_impl { #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] + #[rustc_allow_const_fn_unstable(is_val_statically_known)] pub const fn wrapping_pow(self, mut exp: u32) -> Self { if exp == 0 { return 1; @@ -2145,19 +2142,36 @@ macro_rules! uint_impl { let mut base = self; let mut acc: Self = 1; - while exp > 1 { - if (exp & 1) == 1 { - acc = acc.wrapping_mul(base); + if intrinsics::is_val_statically_known(exp) { + while exp > 1 { + if (exp & 1) == 1 { + acc = acc.wrapping_mul(base); + } + exp /= 2; + base = base.wrapping_mul(base); } - exp /= 2; - base = base.wrapping_mul(base); - } - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - acc.wrapping_mul(base) + // since exp!=0, finally the exp must be 1. + // Deal with the final bit of the exponent separately, since + // squaring the base afterwards is not necessary. + acc.wrapping_mul(base) + } else { + // This is faster than the above when the exponent is not known + // at compile time. We can't use the same code for the constant + // exponent case because LLVM is currently unable to unroll + // this loop. + loop { + if (exp & 1) == 1 { + acc = acc.wrapping_mul(base); + // since exp!=0, finally the exp must be 1. + if exp == 1 { + return acc; + } + } + exp /= 2; + base = base.wrapping_mul(base); + } + } } /// Calculates `self` + `rhs`. @@ -2603,9 +2617,14 @@ macro_rules! uint_impl { // Scratch space for storing results of overflowing_mul. let mut r; - while exp > 1 { + loop { if (exp & 1) == 1 { r = acc.overflowing_mul(base); + // since exp!=0, finally the exp must be 1. + if exp == 1 { + r.1 |= overflown; + return r; + } acc = r.0; overflown |= r.1; } @@ -2614,15 +2633,6 @@ macro_rules! uint_impl { base = r.0; overflown |= r.1; } - - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - r = acc.overflowing_mul(base); - r.1 |= overflown; - - r } /// Raises self to the power of `exp`, using exponentiation by squaring. @@ -2640,6 +2650,7 @@ macro_rules! uint_impl { without modifying the original"] #[inline] #[rustc_inherit_overflow_checks] + #[rustc_allow_const_fn_unstable(is_val_statically_known)] pub const fn pow(self, mut exp: u32) -> Self { if exp == 0 { return 1; @@ -2647,19 +2658,37 @@ macro_rules! uint_impl { let mut base = self; let mut acc = 1; - while exp > 1 { - if (exp & 1) == 1 { - acc = acc * base; + if intrinsics::is_val_statically_known(exp) { + while exp > 1 { + if (exp & 1) == 1 { + acc = acc * base; + } + exp /= 2; + base = base * base; } - exp /= 2; - base = base * base; - } - // since exp!=0, finally the exp must be 1. - // Deal with the final bit of the exponent separately, since - // squaring the base afterwards is not necessary and may cause a - // needless overflow. - acc * base + // since exp!=0, finally the exp must be 1. + // Deal with the final bit of the exponent separately, since + // squaring the base afterwards is not necessary and may cause a + // needless overflow. + acc * base + } else { + // This is faster than the above when the exponent is not known + // at compile time. We can't use the same code for the constant + // exponent case because LLVM is currently unable to unroll + // this loop. + loop { + if (exp & 1) == 1 { + acc = acc * base; + // since exp!=0, finally the exp must be 1. + if exp == 1 { + return acc; + } + } + exp /= 2; + base = base * base; + } + } } /// Returns the square root of the number, rounded down. diff --git a/library/core/src/ops/control_flow.rs b/library/core/src/ops/control_flow.rs index a2709c66b06ad..ab73dc19fcc73 100644 --- a/library/core/src/ops/control_flow.rs +++ b/library/core/src/ops/control_flow.rs @@ -116,7 +116,9 @@ impl ops::Try for ControlFlow { } #[unstable(feature = "try_trait_v2", issue = "84277")] -impl ops::FromResidual for ControlFlow { +// Note: manually specifying the residual type instead of using the default to work around +// https://github.com/rust-lang/rust/issues/99940 +impl ops::FromResidual> for ControlFlow { #[inline] fn from_residual(residual: ControlFlow) -> Self { match residual { diff --git a/library/core/src/ops/coroutine.rs b/library/core/src/ops/coroutine.rs index 13df888d24c5c..c7d596d74c383 100644 --- a/library/core/src/ops/coroutine.rs +++ b/library/core/src/ops/coroutine.rs @@ -69,6 +69,7 @@ pub enum CoroutineState { #[lang = "coroutine"] #[unstable(feature = "coroutine_trait", issue = "43122")] #[fundamental] +#[must_use = "coroutines are lazy and do nothing unless resumed"] pub trait Coroutine { /// The type of value this coroutine yields. /// diff --git a/library/core/src/option.rs b/library/core/src/option.rs index 6c89c81018038..50cb22b7eb3f5 100644 --- a/library/core/src/option.rs +++ b/library/core/src/option.rs @@ -656,8 +656,6 @@ impl Option { /// # Examples /// /// ``` - /// #![feature(is_none_or)] - /// /// let x: Option = Some(2); /// assert_eq!(x.is_none_or(|x| x > 1), true); /// @@ -669,7 +667,7 @@ impl Option { /// ``` #[must_use] #[inline] - #[unstable(feature = "is_none_or", issue = "126383")] + #[stable(feature = "is_none_or", since = "CURRENT_RUSTC_VERSION")] pub fn is_none_or(self, f: impl FnOnce(T) -> bool) -> bool { match self { None => true, @@ -2495,7 +2493,9 @@ impl ops::Try for Option { } #[unstable(feature = "try_trait_v2", issue = "84277")] -impl ops::FromResidual for Option { +// Note: manually specifying the residual type instead of using the default to work around +// https://github.com/rust-lang/rust/issues/99940 +impl ops::FromResidual> for Option { #[inline] fn from_residual(residual: Option) -> Self { match residual { diff --git a/library/core/src/panic/location.rs b/library/core/src/panic/location.rs index 8c04994ac0fc4..930edffd50517 100644 --- a/library/core/src/panic/location.rs +++ b/library/core/src/panic/location.rs @@ -44,7 +44,7 @@ impl<'a> Location<'a> { /// /// # Examples /// - /// ``` + /// ```standalone /// use std::panic::Location; /// /// /// Returns the [`Location`] at which it is called. diff --git a/library/core/src/task/wake.rs b/library/core/src/task/wake.rs index 8ce3eb2ea3921..7e5c1574f5367 100644 --- a/library/core/src/task/wake.rs +++ b/library/core/src/task/wake.rs @@ -502,6 +502,8 @@ impl Waker { #[must_use] #[stable(feature = "futures_api", since = "1.36.0")] pub fn will_wake(&self, other: &Waker) -> bool { + // We optimize this by comparing vtable addresses instead of vtable contents. + // This is permitted since the function is documented as best-effort. let RawWaker { data: a_data, vtable: a_vtable } = self.waker; let RawWaker { data: b_data, vtable: b_vtable } = other.waker; a_data == b_data && ptr::eq(a_vtable, b_vtable) @@ -528,10 +530,18 @@ impl Waker { /// Returns a reference to a `Waker` that does nothing when used. /// + // Note! Much of the documentation for this method is duplicated + // in the docs for `LocalWaker::noop`. + // If you edit it, consider editing the other copy too. + // /// This is mostly useful for writing tests that need a [`Context`] to poll /// some futures, but are not expecting those futures to wake the waker or /// do not need to do anything specific if it happens. /// + /// More generally, using `Waker::noop()` to poll a future + /// means discarding the notification of when the future should be polled again. + /// So it should only be used when such a notification will not be needed to make progress. + /// /// If an owned `Waker` is needed, `clone()` this one. /// /// # Examples @@ -761,7 +771,11 @@ impl LocalWaker { #[must_use] #[unstable(feature = "local_waker", issue = "118959")] pub fn will_wake(&self, other: &LocalWaker) -> bool { - self.waker == other.waker + // We optimize this by comparing vtable addresses instead of vtable contents. + // This is permitted since the function is documented as best-effort. + let RawWaker { data: a_data, vtable: a_vtable } = self.waker; + let RawWaker { data: b_data, vtable: b_vtable } = other.waker; + a_data == b_data && ptr::eq(a_vtable, b_vtable) } /// Creates a new `LocalWaker` from [`RawWaker`]. @@ -777,12 +791,22 @@ impl LocalWaker { Self { waker } } - /// Creates a new `LocalWaker` that does nothing when `wake` is called. + /// Returns a reference to a `LocalWaker` that does nothing when used. /// + // Note! Much of the documentation for this method is duplicated + // in the docs for `Waker::noop`. + // If you edit it, consider editing the other copy too. + // /// This is mostly useful for writing tests that need a [`Context`] to poll /// some futures, but are not expecting those futures to wake the waker or /// do not need to do anything specific if it happens. /// + /// More generally, using `LocalWaker::noop()` to poll a future + /// means discarding the notification of when the future should be polled again, + /// So it should only be used when such a notification will not be needed to make progress. + /// + /// If an owned `LocalWaker` is needed, `clone()` this one. + /// /// # Examples /// /// ``` diff --git a/library/core/tests/ascii_char.rs b/library/core/tests/ascii_char.rs new file mode 100644 index 0000000000000..75b5fd4b9e61d --- /dev/null +++ b/library/core/tests/ascii_char.rs @@ -0,0 +1,28 @@ +use core::ascii::Char; +use core::fmt::Write; + +/// Tests Display implementation for ascii::Char. +#[test] +fn test_display() { + let want = (0..128u8).map(|b| b as char).collect::(); + let mut got = String::with_capacity(128); + for byte in 0..128 { + write!(&mut got, "{}", Char::from_u8(byte).unwrap()).unwrap(); + } + assert_eq!(want, got); +} + +/// Tests Debug implementation for ascii::Char. +#[test] +fn test_debug_control() { + for byte in 0..128u8 { + let mut want = format!("{:?}", byte as char); + // `char` uses `'\u{#}'` representation where ascii::char uses `'\x##'`. + // Transform former into the latter. + if let Some(rest) = want.strip_prefix("'\\u{") { + want = format!("'\\x{:0>2}'", rest.strip_suffix("}'").unwrap()); + } + let chr = core::ascii::Char::from_u8(byte).unwrap(); + assert_eq!(want, format!("{chr:?}"), "byte: {byte}"); + } +} diff --git a/library/core/tests/clone.rs b/library/core/tests/clone.rs index b7130f16f8795..71a328733b7c4 100644 --- a/library/core/tests/clone.rs +++ b/library/core/tests/clone.rs @@ -1,5 +1,7 @@ use core::clone::CloneToUninit; +use core::ffi::CStr; use core::mem::MaybeUninit; +use core::ptr; #[test] #[allow(suspicious_double_ref_op)] @@ -81,3 +83,41 @@ fn test_clone_to_uninit_slice_drops_on_panic() { drop(a); assert_eq!(COUNTER.load(Relaxed), 0); } + +#[test] +fn test_clone_to_uninit_str() { + let a = "hello"; + + let mut storage: MaybeUninit<[u8; 5]> = MaybeUninit::uninit(); + unsafe { a.clone_to_uninit(storage.as_mut_ptr() as *mut [u8] as *mut str) }; + assert_eq!(a.as_bytes(), unsafe { storage.assume_init() }.as_slice()); + + let mut b: Box = "world".into(); + assert_eq!(a.len(), b.len()); + assert_ne!(a, &*b); + unsafe { a.clone_to_uninit(ptr::from_mut::(&mut b)) }; + assert_eq!(a, &*b); +} + +#[test] +fn test_clone_to_uninit_cstr() { + let a = c"hello"; + + let mut storage: MaybeUninit<[u8; 6]> = MaybeUninit::uninit(); + unsafe { a.clone_to_uninit(storage.as_mut_ptr() as *mut [u8] as *mut CStr) }; + assert_eq!(a.to_bytes_with_nul(), unsafe { storage.assume_init() }.as_slice()); + + let mut b: Box = c"world".into(); + assert_eq!(a.count_bytes(), b.count_bytes()); + assert_ne!(a, &*b); + unsafe { a.clone_to_uninit(ptr::from_mut::(&mut b)) }; + assert_eq!(a, &*b); +} + +#[test] +fn cstr_metadata_is_length_with_nul() { + let s: &CStr = c"abcdef"; + let p: *const CStr = ptr::from_ref(s); + let bytes: *const [u8] = p as *const [u8]; + assert_eq!(s.to_bytes_with_nul().len(), bytes.len()); +} diff --git a/library/core/tests/iter/adapters/take.rs b/library/core/tests/iter/adapters/take.rs index 39afa2cbfcaf2..65a8a93b4a916 100644 --- a/library/core/tests/iter/adapters/take.rs +++ b/library/core/tests/iter/adapters/take.rs @@ -170,3 +170,93 @@ fn test_byref_take_consumed_items() { assert_eq!(count, 70); assert_eq!(inner, 90..90); } + +#[test] +fn test_exact_size_take_repeat() { + let mut iter = core::iter::repeat(42).take(40); + assert_eq!((40, Some(40)), iter.size_hint()); + assert_eq!(40, iter.len()); + + assert_eq!(Some(42), iter.next()); + assert_eq!((39, Some(39)), iter.size_hint()); + assert_eq!(39, iter.len()); + + assert_eq!(Some(42), iter.next_back()); + assert_eq!((38, Some(38)), iter.size_hint()); + assert_eq!(38, iter.len()); + + assert_eq!(Some(42), iter.nth(3)); + assert_eq!((34, Some(34)), iter.size_hint()); + assert_eq!(34, iter.len()); + + assert_eq!(Some(42), iter.nth_back(3)); + assert_eq!((30, Some(30)), iter.size_hint()); + assert_eq!(30, iter.len()); + + assert_eq!(Ok(()), iter.advance_by(10)); + assert_eq!((20, Some(20)), iter.size_hint()); + assert_eq!(20, iter.len()); + + assert_eq!(Ok(()), iter.advance_back_by(10)); + assert_eq!((10, Some(10)), iter.size_hint()); + assert_eq!(10, iter.len()); +} + +#[test] +fn test_exact_size_take_repeat_with() { + let mut counter = 0; + let mut iter = core::iter::repeat_with(move || { + counter += 1; + counter + }) + .take(40); + assert_eq!((40, Some(40)), iter.size_hint()); + assert_eq!(40, iter.len()); + + assert_eq!(Some(1), iter.next()); + assert_eq!((39, Some(39)), iter.size_hint()); + assert_eq!(39, iter.len()); + + assert_eq!(Some(5), iter.nth(3)); + assert_eq!((35, Some(35)), iter.size_hint()); + assert_eq!(35, iter.len()); + + assert_eq!(Ok(()), iter.advance_by(10)); + assert_eq!((25, Some(25)), iter.size_hint()); + assert_eq!(25, iter.len()); + + assert_eq!(Some(16), iter.next()); + assert_eq!((24, Some(24)), iter.size_hint()); + assert_eq!(24, iter.len()); +} + +// This is https://github.com/rust-lang/rust/issues/104729 with all uses of +// repeat(0) were replaced by repeat(0).take(20). +#[test] +fn test_reverse_on_zip() { + let vec_1 = [1; 10]; + + let zipped_iter = vec_1.iter().copied().zip(core::iter::repeat(0).take(20)); + + // Forward + for (one, zero) in zipped_iter { + assert_eq!((1, 0), (one, zero)); + } + + let rev_vec_iter = vec_1.iter().rev(); + let rev_repeat_iter = std::iter::repeat(0).take(20).rev(); + + // Manual reversed zip + let rev_zipped_iter = rev_vec_iter.zip(rev_repeat_iter); + + for (&one, zero) in rev_zipped_iter { + assert_eq!((1, 0), (one, zero)); + } + + let zipped_iter = vec_1.iter().zip(core::iter::repeat(0).take(20)); + + // Cannot call rev here for automatic reversed zip constuction + for (&one, zero) in zipped_iter.rev() { + assert_eq!((1, 0), (one, zero)); + } +} diff --git a/library/core/tests/lib.rs b/library/core/tests/lib.rs index 1e336bf96b8fa..8872b4cbfd5ba 100644 --- a/library/core/tests/lib.rs +++ b/library/core/tests/lib.rs @@ -122,6 +122,7 @@ mod alloc; mod any; mod array; mod ascii; +mod ascii_char; mod asserting; mod async_iter; mod atomic; diff --git a/library/core/tests/ops.rs b/library/core/tests/ops.rs index 2ee0abd399bb6..501e0f33fe4cc 100644 --- a/library/core/tests/ops.rs +++ b/library/core/tests/ops.rs @@ -1,4 +1,5 @@ mod control_flow; +mod from_residual; use core::ops::{ Bound, Deref, DerefMut, Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive, diff --git a/library/core/tests/ops/from_residual.rs b/library/core/tests/ops/from_residual.rs new file mode 100644 index 0000000000000..d5c86ccbcd317 --- /dev/null +++ b/library/core/tests/ops/from_residual.rs @@ -0,0 +1,26 @@ +//! Regression test that Option and ControlFlow can have downstream FromResidual impls. +//! cc https://github.com/rust-lang/rust/issues/99940, +//! This does NOT test that issue in general; Option and ControlFlow's FromResidual +//! impls in core were changed to not be affected by that issue. + +use core::ops::{ControlFlow, FromResidual}; + +struct Local; + +impl FromResidual for Option { + fn from_residual(_: Local) -> Option { + unimplemented!() + } +} + +impl FromResidual for ControlFlow { + fn from_residual(_: Local) -> ControlFlow { + unimplemented!() + } +} + +impl FromResidual for Result { + fn from_residual(_: Local) -> Result { + unimplemented!() + } +} diff --git a/library/core/tests/ptr.rs b/library/core/tests/ptr.rs index bc1940ebf32b5..78d1b137e63f5 100644 --- a/library/core/tests/ptr.rs +++ b/library/core/tests/ptr.rs @@ -810,9 +810,12 @@ fn ptr_metadata() { assert_ne!(address_1, address_2); // Different erased type => different vtable pointer assert_ne!(address_2, address_3); - // Same erased type and same trait => same vtable pointer - assert_eq!(address_3, address_4); - assert_eq!(address_3, address_5); + // Same erased type and same trait => same vtable pointer. + // This is *not guaranteed*, so we skip it in Miri. + if !cfg!(miri) { + assert_eq!(address_3, address_4); + assert_eq!(address_3, address_5); + } } } diff --git a/library/std/Cargo.toml b/library/std/Cargo.toml index 2ce284c85e2d6..7f23681ee45fc 100644 --- a/library/std/Cargo.toml +++ b/library/std/Cargo.toml @@ -57,6 +57,9 @@ object = { version = "0.36.0", default-features = false, optional = true, featur 'archive', ] } +[target.'cfg(windows)'.dependencies.windows-targets] +path = "../windows_targets" + [dev-dependencies] rand = { version = "0.8.5", default-features = false, features = ["alloc"] } rand_xorshift = "0.3.0" @@ -116,7 +119,7 @@ std_detect_env_override = ["std_detect/std_detect_env_override"] # Enable using raw-dylib for Windows imports. # This will eventually be the default. -windows_raw_dylib = [] +windows_raw_dylib = ["windows-targets/windows_raw_dylib"] [package.metadata.fortanix-sgx] # Maximum possible number of threads when testing diff --git a/library/std/src/env.rs b/library/std/src/env.rs index 50ae83090c7e1..80890e61471c6 100644 --- a/library/std/src/env.rs +++ b/library/std/src/env.rs @@ -355,7 +355,13 @@ impl Error for VarError { /// } /// assert_eq!(env::var(key), Ok("VALUE".to_string())); /// ``` -#[rustc_deprecated_safe_2024] +#[cfg_attr(bootstrap, rustc_deprecated_safe_2024)] +#[cfg_attr( + not(bootstrap), + rustc_deprecated_safe_2024( + audit_that = "the environment access only happens in single-threaded code" + ) +)] #[stable(feature = "env", since = "1.0.0")] pub unsafe fn set_var, V: AsRef>(key: K, value: V) { let (key, value) = (key.as_ref(), value.as_ref()); @@ -419,7 +425,13 @@ pub unsafe fn set_var, V: AsRef>(key: K, value: V) { /// } /// assert!(env::var(key).is_err()); /// ``` -#[rustc_deprecated_safe_2024] +#[cfg_attr(bootstrap, rustc_deprecated_safe_2024)] +#[cfg_attr( + not(bootstrap), + rustc_deprecated_safe_2024( + audit_that = "the environment access only happens in single-threaded code" + ) +)] #[stable(feature = "env", since = "1.0.0")] pub unsafe fn remove_var>(key: K) { let key = key.as_ref(); diff --git a/library/std/src/ffi/os_str.rs b/library/std/src/ffi/os_str.rs index a501bcc98cf38..918eec2d0d8ef 100644 --- a/library/std/src/ffi/os_str.rs +++ b/library/std/src/ffi/os_str.rs @@ -3,10 +3,13 @@ #[cfg(test)] mod tests; +use core::clone::CloneToUninit; + use crate::borrow::{Borrow, Cow}; use crate::collections::TryReserveError; use crate::hash::{Hash, Hasher}; use crate::ops::{self, Range}; +use crate::ptr::addr_of_mut; use crate::rc::Rc; use crate::str::FromStr; use crate::sync::Arc; @@ -1261,6 +1264,16 @@ impl Clone for Box { } } +#[unstable(feature = "clone_to_uninit", issue = "126799")] +unsafe impl CloneToUninit for OsStr { + #[inline] + #[cfg_attr(debug_assertions, track_caller)] + unsafe fn clone_to_uninit(&self, dst: *mut Self) { + // SAFETY: we're just a wrapper around a platform-specific Slice + unsafe { self.inner.clone_to_uninit(addr_of_mut!((*dst).inner)) } + } +} + #[stable(feature = "shared_from_slice2", since = "1.24.0")] impl From for Arc { /// Converts an [`OsString`] into an [Arc]<[OsStr]> by moving the [`OsString`] diff --git a/library/std/src/ffi/os_str/tests.rs b/library/std/src/ffi/os_str/tests.rs index 5b39b9e34d8c7..67147934b4db3 100644 --- a/library/std/src/ffi/os_str/tests.rs +++ b/library/std/src/ffi/os_str/tests.rs @@ -1,4 +1,6 @@ use super::*; +use crate::mem::MaybeUninit; +use crate::ptr; #[test] fn test_os_string_with_capacity() { @@ -286,3 +288,18 @@ fn slice_surrogate_edge() { assert_eq!(post_crab.slice_encoded_bytes(..4), "🦀"); assert_eq!(post_crab.slice_encoded_bytes(4..), surrogate); } + +#[test] +fn clone_to_uninit() { + let a = OsStr::new("hello.txt"); + + let mut storage = vec![MaybeUninit::::uninit(); size_of_val::(a)]; + unsafe { a.clone_to_uninit(ptr::from_mut::<[_]>(storage.as_mut_slice()) as *mut OsStr) }; + assert_eq!(a.as_encoded_bytes(), unsafe { MaybeUninit::slice_assume_init_ref(&storage) }); + + let mut b: Box = OsStr::new("world.exe").into(); + assert_eq!(size_of_val::(a), size_of_val::(&b)); + assert_ne!(a, &*b); + unsafe { a.clone_to_uninit(ptr::from_mut::(&mut b)) }; + assert_eq!(a, &*b); +} diff --git a/library/std/src/lib.rs b/library/std/src/lib.rs index 93a74ef739b90..2530a37638757 100644 --- a/library/std/src/lib.rs +++ b/library/std/src/lib.rs @@ -319,6 +319,7 @@ // tidy-alphabetical-start #![feature(c_str_module)] #![feature(char_internals)] +#![feature(clone_to_uninit)] #![feature(core_intrinsics)] #![feature(core_io_borrowed_buf)] #![feature(duration_constants)] diff --git a/library/std/src/os/unix/process.rs b/library/std/src/os/unix/process.rs index c53423675bd00..46202441d4e38 100644 --- a/library/std/src/os/unix/process.rs +++ b/library/std/src/os/unix/process.rs @@ -109,13 +109,21 @@ pub trait CommandExt: Sealed { /// Schedules a closure to be run just before the `exec` function is /// invoked. /// - /// This method is stable and usable, but it should be unsafe. To fix - /// that, it got deprecated in favor of the unsafe [`pre_exec`]. + /// `before_exec` used to be a safe method, but it needs to be unsafe since the closure may only + /// perform operations that are *async-signal-safe*. Hence it got deprecated in favor of the + /// unsafe [`pre_exec`]. Meanwhile, Rust gained the ability to make an existing safe method + /// fully unsafe in a new edition, which is how `before_exec` became `unsafe`. It still also + /// remains deprecated; `pre_exec` should be used instead. /// /// [`pre_exec`]: CommandExt::pre_exec #[stable(feature = "process_exec", since = "1.15.0")] #[deprecated(since = "1.37.0", note = "should be unsafe, use `pre_exec` instead")] - fn before_exec(&mut self, f: F) -> &mut process::Command + #[cfg_attr(bootstrap, rustc_deprecated_safe_2024)] + #[cfg_attr( + not(bootstrap), + rustc_deprecated_safe_2024(audit_that = "the closure is async-signal-safe") + )] + unsafe fn before_exec(&mut self, f: F) -> &mut process::Command where F: FnMut() -> io::Result<()> + Send + Sync + 'static, { diff --git a/library/std/src/panic.rs b/library/std/src/panic.rs index 4c496ade81cda..6f0952c41ede5 100644 --- a/library/std/src/panic.rs +++ b/library/std/src/panic.rs @@ -440,13 +440,12 @@ impl BacktraceStyle { } fn from_u8(s: u8) -> Option { - Some(match s { - 0 => return None, - 1 => BacktraceStyle::Short, - 2 => BacktraceStyle::Full, - 3 => BacktraceStyle::Off, - _ => unreachable!(), - }) + match s { + 1 => Some(BacktraceStyle::Short), + 2 => Some(BacktraceStyle::Full), + 3 => Some(BacktraceStyle::Off), + _ => None, + } } } @@ -465,7 +464,7 @@ static SHOULD_CAPTURE: AtomicU8 = AtomicU8::new(0); pub fn set_backtrace_style(style: BacktraceStyle) { if cfg!(feature = "backtrace") { // If the `backtrace` feature of this crate is enabled, set the backtrace style. - SHOULD_CAPTURE.store(style.as_u8(), Ordering::Release); + SHOULD_CAPTURE.store(style.as_u8(), Ordering::Relaxed); } } @@ -498,7 +497,9 @@ pub fn get_backtrace_style() -> Option { // to optimize away callers. return None; } - if let Some(style) = BacktraceStyle::from_u8(SHOULD_CAPTURE.load(Ordering::Acquire)) { + + let current = SHOULD_CAPTURE.load(Ordering::Relaxed); + if let Some(style) = BacktraceStyle::from_u8(current) { return Some(style); } @@ -509,8 +510,11 @@ pub fn get_backtrace_style() -> Option { None if crate::sys::FULL_BACKTRACE_DEFAULT => BacktraceStyle::Full, None => BacktraceStyle::Off, }; - set_backtrace_style(format); - Some(format) + + match SHOULD_CAPTURE.compare_exchange(0, format.as_u8(), Ordering::Relaxed, Ordering::Relaxed) { + Ok(_) => Some(format), + Err(new) => BacktraceStyle::from_u8(new), + } } #[cfg(test)] diff --git a/library/std/src/path.rs b/library/std/src/path.rs index 80163667636ae..9eaa0e01c2c00 100644 --- a/library/std/src/path.rs +++ b/library/std/src/path.rs @@ -70,6 +70,8 @@ #[cfg(test)] mod tests; +use core::clone::CloneToUninit; + use crate::borrow::{Borrow, Cow}; use crate::collections::TryReserveError; use crate::error::Error; @@ -3109,6 +3111,16 @@ impl Path { } } +#[unstable(feature = "clone_to_uninit", issue = "126799")] +unsafe impl CloneToUninit for Path { + #[inline] + #[cfg_attr(debug_assertions, track_caller)] + unsafe fn clone_to_uninit(&self, dst: *mut Self) { + // SAFETY: Path is just a wrapper around OsStr + unsafe { self.inner.clone_to_uninit(core::ptr::addr_of_mut!((*dst).inner)) } + } +} + #[stable(feature = "rust1", since = "1.0.0")] impl AsRef for Path { #[inline] diff --git a/library/std/src/path/tests.rs b/library/std/src/path/tests.rs index a12e42cba0c5c..6436872087d6c 100644 --- a/library/std/src/path/tests.rs +++ b/library/std/src/path/tests.rs @@ -3,6 +3,8 @@ use core::hint::black_box; use super::*; use crate::collections::{BTreeSet, HashSet}; use crate::hash::DefaultHasher; +use crate::mem::MaybeUninit; +use crate::ptr; #[allow(unknown_lints, unused_macro_rules)] macro_rules! t ( @@ -2054,3 +2056,20 @@ fn bench_hash_path_long(b: &mut test::Bencher) { black_box(hasher.finish()); } + +#[test] +fn clone_to_uninit() { + let a = Path::new("hello.txt"); + + let mut storage = vec![MaybeUninit::::uninit(); size_of_val::(a)]; + unsafe { a.clone_to_uninit(ptr::from_mut::<[_]>(storage.as_mut_slice()) as *mut Path) }; + assert_eq!(a.as_os_str().as_encoded_bytes(), unsafe { + MaybeUninit::slice_assume_init_ref(&storage) + }); + + let mut b: Box = Path::new("world.exe").into(); + assert_eq!(size_of_val::(a), size_of_val::(&b)); + assert_ne!(a, &*b); + unsafe { a.clone_to_uninit(ptr::from_mut::(&mut b)) }; + assert_eq!(a, &*b); +} diff --git a/library/std/src/sys/os_str/bytes.rs b/library/std/src/sys/os_str/bytes.rs index 0f8bd6453528e..992767211d083 100644 --- a/library/std/src/sys/os_str/bytes.rs +++ b/library/std/src/sys/os_str/bytes.rs @@ -1,6 +1,9 @@ //! The underlying OsString/OsStr implementation on Unix and many other //! systems: just a `Vec`/`[u8]`. +use core::clone::CloneToUninit; +use core::ptr::addr_of_mut; + use crate::borrow::Cow; use crate::collections::TryReserveError; use crate::fmt::Write; @@ -345,3 +348,13 @@ impl Slice { self.inner.eq_ignore_ascii_case(&other.inner) } } + +#[unstable(feature = "clone_to_uninit", issue = "126799")] +unsafe impl CloneToUninit for Slice { + #[inline] + #[cfg_attr(debug_assertions, track_caller)] + unsafe fn clone_to_uninit(&self, dst: *mut Self) { + // SAFETY: we're just a wrapper around [u8] + unsafe { self.inner.clone_to_uninit(addr_of_mut!((*dst).inner)) } + } +} diff --git a/library/std/src/sys/os_str/wtf8.rs b/library/std/src/sys/os_str/wtf8.rs index ed975ba58b5e2..433237aa6e7bf 100644 --- a/library/std/src/sys/os_str/wtf8.rs +++ b/library/std/src/sys/os_str/wtf8.rs @@ -1,5 +1,8 @@ //! The underlying OsString/OsStr implementation on Windows is a //! wrapper around the "WTF-8" encoding; see the `wtf8` module for more. +use core::clone::CloneToUninit; +use core::ptr::addr_of_mut; + use crate::borrow::Cow; use crate::collections::TryReserveError; use crate::rc::Rc; @@ -268,3 +271,13 @@ impl Slice { self.inner.eq_ignore_ascii_case(&other.inner) } } + +#[unstable(feature = "clone_to_uninit", issue = "126799")] +unsafe impl CloneToUninit for Slice { + #[inline] + #[cfg_attr(debug_assertions, track_caller)] + unsafe fn clone_to_uninit(&self, dst: *mut Self) { + // SAFETY: we're just a wrapper around Wtf8 + unsafe { self.inner.clone_to_uninit(addr_of_mut!((*dst).inner)) } + } +} diff --git a/library/std/src/sys/pal/unix/fs.rs b/library/std/src/sys/pal/unix/fs.rs index bdb83f0785784..be13e1ae9b32f 100644 --- a/library/std/src/sys/pal/unix/fs.rs +++ b/library/std/src/sys/pal/unix/fs.rs @@ -1552,17 +1552,6 @@ impl fmt::Debug for File { None } - #[cfg(any( - target_os = "linux", - target_os = "freebsd", - target_os = "hurd", - target_os = "netbsd", - target_os = "openbsd", - target_os = "vxworks", - target_os = "solaris", - target_os = "illumos", - target_vendor = "apple", - ))] fn get_mode(fd: c_int) -> Option<(bool, bool)> { let mode = unsafe { libc::fcntl(fd, libc::F_GETFL) }; if mode == -1 { @@ -1576,22 +1565,6 @@ impl fmt::Debug for File { } } - #[cfg(not(any( - target_os = "linux", - target_os = "freebsd", - target_os = "hurd", - target_os = "netbsd", - target_os = "openbsd", - target_os = "vxworks", - target_os = "solaris", - target_os = "illumos", - target_vendor = "apple", - )))] - fn get_mode(_fd: c_int) -> Option<(bool, bool)> { - // FIXME(#24570): implement this for other Unix platforms - None - } - let fd = self.as_raw_fd(); let mut b = f.debug_struct("File"); b.field("fd", &fd); diff --git a/library/std/src/sys/pal/unix/rand.rs b/library/std/src/sys/pal/unix/rand.rs index 8a78ea8e7ccc7..cc0852aab4396 100644 --- a/library/std/src/sys/pal/unix/rand.rs +++ b/library/std/src/sys/pal/unix/rand.rs @@ -2,7 +2,9 @@ pub fn hashmap_random_keys() -> (u64, u64) { const KEY_LEN: usize = core::mem::size_of::(); let mut v = [0u8; KEY_LEN * 2]; - imp::fill_bytes(&mut v); + if let Err(err) = read(&mut v) { + panic!("failed to retrieve random hash map seed: {err}"); + } let key1 = v[0..KEY_LEN].try_into().unwrap(); let key2 = v[KEY_LEN..].try_into().unwrap(); @@ -10,27 +12,78 @@ pub fn hashmap_random_keys() -> (u64, u64) { (u64::from_ne_bytes(key1), u64::from_ne_bytes(key2)) } -#[cfg(all( - unix, - not(target_os = "openbsd"), - not(target_os = "netbsd"), - not(target_os = "fuchsia"), - not(target_os = "redox"), - not(target_os = "vxworks"), - not(target_os = "emscripten"), - not(target_os = "vita"), - not(target_vendor = "apple"), +cfg_if::cfg_if! { + if #[cfg(any( + target_vendor = "apple", + target_os = "openbsd", + target_os = "emscripten", + target_os = "vita", + all(target_os = "netbsd", not(netbsd10)), + target_os = "fuchsia", + target_os = "vxworks", + ))] { + // Some systems have a syscall that directly retrieves random data. + // If that is guaranteed to be available, use it. + use imp::syscall as read; + } else { + // Otherwise, try the syscall to see if it exists only on some systems + // and fall back to reading from the random device otherwise. + fn read(bytes: &mut [u8]) -> crate::io::Result<()> { + use crate::fs::File; + use crate::io::Read; + use crate::sync::OnceLock; + + #[cfg(any( + target_os = "linux", + target_os = "android", + target_os = "espidf", + target_os = "horizon", + target_os = "freebsd", + target_os = "dragonfly", + target_os = "solaris", + target_os = "illumos", + netbsd10, + ))] + if let Some(res) = imp::syscall(bytes) { + return res; + } + + const PATH: &'static str = if cfg!(target_os = "redox") { + "/scheme/rand" + } else { + "/dev/urandom" + }; + + static FILE: OnceLock = OnceLock::new(); + + FILE.get_or_try_init(|| File::open(PATH))?.read_exact(bytes) + } + } +} + +// All these systems a `getrandom` syscall. +// +// It is not guaranteed to be available, so return None to fallback to the file +// implementation. +#[cfg(any( + target_os = "linux", + target_os = "android", + target_os = "espidf", + target_os = "horizon", + target_os = "freebsd", + target_os = "dragonfly", + target_os = "solaris", + target_os = "illumos", + netbsd10, ))] mod imp { - use crate::fs::File; - use crate::io::Read; - #[cfg(any(target_os = "linux", target_os = "android"))] - use crate::sys::weak::syscall; + use crate::io::{Error, Result}; + use crate::sync::atomic::{AtomicBool, Ordering}; + use crate::sys::os::errno; #[cfg(any(target_os = "linux", target_os = "android"))] fn getrandom(buf: &mut [u8]) -> libc::ssize_t { - use crate::sync::atomic::{AtomicBool, Ordering}; - use crate::sys::os::errno; + use crate::sys::weak::syscall; // A weak symbol allows interposition, e.g. for perf measurements that want to // disable randomness for consistency. Otherwise, we'll try a raw syscall. @@ -59,6 +112,7 @@ mod imp { } #[cfg(any( + target_os = "dragonfly", target_os = "espidf", target_os = "horizon", target_os = "freebsd", @@ -70,51 +124,11 @@ mod imp { unsafe { libc::getrandom(buf.as_mut_ptr().cast(), buf.len(), 0) } } - #[cfg(target_os = "dragonfly")] - fn getrandom(buf: &mut [u8]) -> libc::ssize_t { - extern "C" { - fn getrandom( - buf: *mut libc::c_void, - buflen: libc::size_t, - flags: libc::c_uint, - ) -> libc::ssize_t; - } - unsafe { getrandom(buf.as_mut_ptr().cast(), buf.len(), 0) } - } - - #[cfg(not(any( - target_os = "linux", - target_os = "android", - target_os = "espidf", - target_os = "horizon", - target_os = "freebsd", - target_os = "dragonfly", - target_os = "solaris", - target_os = "illumos", - netbsd10 - )))] - fn getrandom_fill_bytes(_buf: &mut [u8]) -> bool { - false - } - - #[cfg(any( - target_os = "linux", - target_os = "android", - target_os = "espidf", - target_os = "horizon", - target_os = "freebsd", - target_os = "dragonfly", - target_os = "solaris", - target_os = "illumos", - netbsd10 - ))] - fn getrandom_fill_bytes(v: &mut [u8]) -> bool { - use crate::sync::atomic::{AtomicBool, Ordering}; - use crate::sys::os::errno; - + pub fn syscall(v: &mut [u8]) -> Option> { static GETRANDOM_UNAVAILABLE: AtomicBool = AtomicBool::new(false); + if GETRANDOM_UNAVAILABLE.load(Ordering::Relaxed) { - return false; + return None; } let mut read = 0; @@ -125,8 +139,7 @@ mod imp { if err == libc::EINTR { continue; } else if err == libc::ENOSYS || err == libc::EPERM { - // Fall back to reading /dev/urandom if `getrandom` is not - // supported on the current kernel. + // `getrandom` is not supported on the current system. // // Also fall back in case it is disabled by something like // seccomp or inside of docker. @@ -142,123 +155,83 @@ mod imp { // https://github.com/moby/moby/issues/42680 // GETRANDOM_UNAVAILABLE.store(true, Ordering::Relaxed); - return false; + return None; } else if err == libc::EAGAIN { - return false; + // getrandom has failed because it would have blocked as the + // non-blocking pool (urandom) has not been initialized in + // the kernel yet due to a lack of entropy. Fallback to + // reading from `/dev/urandom` which will return potentially + // insecure random data to avoid blocking applications which + // could depend on this call without ever knowing they do and + // don't have a work around. + return None; } else { - panic!("unexpected getrandom error: {err}"); + return Some(Err(Error::from_raw_os_error(err))); } } else { read += result as usize; } } - true - } - - pub fn fill_bytes(v: &mut [u8]) { - // getrandom_fill_bytes here can fail if getrandom() returns EAGAIN, - // meaning it would have blocked because the non-blocking pool (urandom) - // has not initialized in the kernel yet due to a lack of entropy. The - // fallback we do here is to avoid blocking applications which could - // depend on this call without ever knowing they do and don't have a - // work around. The PRNG of /dev/urandom will still be used but over a - // possibly predictable entropy pool. - if getrandom_fill_bytes(v) { - return; - } - // getrandom failed because it is permanently or temporarily (because - // of missing entropy) unavailable. Open /dev/urandom, read from it, - // and close it again. - let mut file = File::open("/dev/urandom").expect("failed to open /dev/urandom"); - file.read_exact(v).expect("failed to read /dev/urandom") + Some(Ok(())) } } -#[cfg(target_vendor = "apple")] +#[cfg(any( + target_os = "macos", // Supported since macOS 10.12+. + target_os = "openbsd", + target_os = "emscripten", + target_os = "vita", +))] mod imp { - use libc::{c_int, c_void, size_t}; - - use crate::io; - - #[inline(always)] - fn random_failure() -> ! { - panic!("unexpected random generation error: {}", io::Error::last_os_error()); - } - - #[cfg(target_os = "macos")] - fn getentropy_fill_bytes(v: &mut [u8]) { - extern "C" { - fn getentropy(bytes: *mut c_void, count: size_t) -> c_int; - } + use crate::io::{Error, Result}; + pub fn syscall(v: &mut [u8]) -> Result<()> { // getentropy(2) permits a maximum buffer size of 256 bytes for s in v.chunks_mut(256) { - let ret = unsafe { getentropy(s.as_mut_ptr().cast(), s.len()) }; + let ret = unsafe { libc::getentropy(s.as_mut_ptr().cast(), s.len()) }; if ret == -1 { - random_failure() + return Err(Error::last_os_error()); } } - } - #[cfg(not(target_os = "macos"))] - fn ccrandom_fill_bytes(v: &mut [u8]) { - extern "C" { - fn CCRandomGenerateBytes(bytes: *mut c_void, count: size_t) -> c_int; - } - - let ret = unsafe { CCRandomGenerateBytes(v.as_mut_ptr().cast(), v.len()) }; - if ret == -1 { - random_failure() - } - } - - pub fn fill_bytes(v: &mut [u8]) { - // All supported versions of macOS (10.12+) support getentropy. - // - // `getentropy` is measurably faster (via Divan) then the other alternatives so its preferred - // when usable. - #[cfg(target_os = "macos")] - getentropy_fill_bytes(v); - - // On Apple platforms, `CCRandomGenerateBytes` and `SecRandomCopyBytes` simply - // call into `CCRandomCopyBytes` with `kCCRandomDefault`. `CCRandomCopyBytes` - // manages a CSPRNG which is seeded from the kernel's CSPRNG and which runs on - // its own thread accessed via GCD. This seems needlessly heavyweight for our purposes - // so we only use it on non-Mac OSes where the better entrypoints are blocked. - // - // `CCRandomGenerateBytes` is used instead of `SecRandomCopyBytes` because the former is accessible - // via `libSystem` (libc) while the other needs to link to `Security.framework`. - // - // Note that while `getentropy` has a available attribute in the macOS headers, the lack - // of a header in the iOS (and others) SDK means that its can cause app store rejections. - // Just use `CCRandomGenerateBytes` instead. - #[cfg(not(target_os = "macos"))] - ccrandom_fill_bytes(v); + Ok(()) } } -#[cfg(any(target_os = "openbsd", target_os = "emscripten", target_os = "vita"))] +// On Apple platforms, `CCRandomGenerateBytes` and `SecRandomCopyBytes` simply +// call into `CCRandomCopyBytes` with `kCCRandomDefault`. `CCRandomCopyBytes` +// manages a CSPRNG which is seeded from the kernel's CSPRNG and which runs on +// its own thread accessed via GCD. This seems needlessly heavyweight for our purposes +// so we only use it when `getentropy` is blocked, which appears to be the case +// on all platforms except macOS (see #102643). +// +// `CCRandomGenerateBytes` is used instead of `SecRandomCopyBytes` because the former is accessible +// via `libSystem` (libc) while the other needs to link to `Security.framework`. +#[cfg(all(target_vendor = "apple", not(target_os = "macos")))] mod imp { - use crate::sys::os::errno; + use libc::size_t; - pub fn fill_bytes(v: &mut [u8]) { - // getentropy(2) permits a maximum buffer size of 256 bytes - for s in v.chunks_mut(256) { - let ret = unsafe { libc::getentropy(s.as_mut_ptr() as *mut libc::c_void, s.len()) }; - if ret == -1 { - panic!("unexpected getentropy error: {}", errno()); - } + use crate::ffi::{c_int, c_void}; + use crate::io::{Error, Result}; + + pub fn syscall(v: &mut [u8]) -> Result<()> { + extern "C" { + fn CCRandomGenerateBytes(bytes: *mut c_void, count: size_t) -> c_int; } + + let ret = unsafe { CCRandomGenerateBytes(v.as_mut_ptr().cast(), v.len()) }; + if ret != -1 { Ok(()) } else { Err(Error::last_os_error()) } } } // FIXME: once the 10.x release becomes the minimum, this can be dropped for simplification. #[cfg(all(target_os = "netbsd", not(netbsd10)))] mod imp { + use crate::io::{Error, Result}; use crate::ptr; - pub fn fill_bytes(v: &mut [u8]) { + pub fn syscall(v: &mut [u8]) -> Result<()> { let mib = [libc::CTL_KERN, libc::KERN_ARND]; // kern.arandom permits a maximum buffer size of 256 bytes for s in v.chunks_mut(256) { @@ -273,39 +246,30 @@ mod imp { 0, ) }; - if ret == -1 || s_len != s.len() { - panic!( - "kern.arandom sysctl failed! (returned {}, s.len() {}, oldlenp {})", - ret, - s.len(), - s_len - ); + if ret == -1 { + return Err(Error::last_os_error()); + } else if s_len != s.len() { + // FIXME(joboet): this can't actually happen, can it? + panic!("read less bytes than requested from kern.arandom"); } } + + Ok(()) } } #[cfg(target_os = "fuchsia")] mod imp { + use crate::io::Result; + #[link(name = "zircon")] extern "C" { fn zx_cprng_draw(buffer: *mut u8, len: usize); } - pub fn fill_bytes(v: &mut [u8]) { - unsafe { zx_cprng_draw(v.as_mut_ptr(), v.len()) } - } -} - -#[cfg(target_os = "redox")] -mod imp { - use crate::fs::File; - use crate::io::Read; - - pub fn fill_bytes(v: &mut [u8]) { - // Open rand:, read from it, and close it again. - let mut file = File::open("rand:").expect("failed to open rand:"); - file.read_exact(v).expect("failed to read rand:") + pub fn syscall(v: &mut [u8]) -> Result<()> { + unsafe { zx_cprng_draw(v.as_mut_ptr(), v.len()) }; + Ok(()) } } @@ -314,25 +278,25 @@ mod imp { use core::sync::atomic::AtomicBool; use core::sync::atomic::Ordering::Relaxed; - use crate::io; + use crate::io::{Error, Result}; - pub fn fill_bytes(v: &mut [u8]) { + pub fn syscall(v: &mut [u8]) -> Result<()> { static RNG_INIT: AtomicBool = AtomicBool::new(false); while !RNG_INIT.load(Relaxed) { let ret = unsafe { libc::randSecure() }; if ret < 0 { - panic!("couldn't generate random bytes: {}", io::Error::last_os_error()); + return Err(Error::last_os_error()); } else if ret > 0 { RNG_INIT.store(true, Relaxed); break; } + unsafe { libc::usleep(10) }; } + let ret = unsafe { libc::randABytes(v.as_mut_ptr() as *mut libc::c_uchar, v.len() as libc::c_int) }; - if ret < 0 { - panic!("couldn't generate random bytes: {}", io::Error::last_os_error()); - } + if ret >= 0 { Ok(()) } else { Err(Error::last_os_error()) } } } diff --git a/library/std/src/sys/pal/windows/alloc.rs b/library/std/src/sys/pal/windows/alloc.rs index 92b68b26032c6..2205885687dea 100644 --- a/library/std/src/sys/pal/windows/alloc.rs +++ b/library/std/src/sys/pal/windows/alloc.rs @@ -4,7 +4,7 @@ use crate::alloc::{GlobalAlloc, Layout, System}; use crate::ffi::c_void; use crate::ptr; use crate::sync::atomic::{AtomicPtr, Ordering}; -use crate::sys::c::{self, windows_targets}; +use crate::sys::c; use crate::sys::common::alloc::{realloc_fallback, MIN_ALIGN}; #[cfg(test)] diff --git a/library/std/src/sys/pal/windows/c.rs b/library/std/src/sys/pal/windows/c.rs index 08b75186aef90..2f5d75dc4bc23 100644 --- a/library/std/src/sys/pal/windows/c.rs +++ b/library/std/src/sys/pal/windows/c.rs @@ -8,8 +8,6 @@ use core::ffi::{c_uint, c_ulong, c_ushort, c_void, CStr}; use core::{mem, ptr}; -pub(super) mod windows_targets; - mod windows_sys; pub use windows_sys::*; diff --git a/library/std/src/sys/pal/windows/c/windows_sys.rs b/library/std/src/sys/pal/windows/c/windows_sys.rs index 9f22f54819509..529c96a0e1e6b 100644 --- a/library/std/src/sys/pal/windows/c/windows_sys.rs +++ b/library/std/src/sys/pal/windows/c/windows_sys.rs @@ -3317,4 +3317,3 @@ pub struct WSADATA { #[cfg(target_arch = "arm")] pub enum CONTEXT {} // ignore-tidy-filelength -use super::windows_targets; diff --git a/library/std/src/sys_common/wtf8.rs b/library/std/src/sys_common/wtf8.rs index 277c9506febbb..063451ad54e1c 100644 --- a/library/std/src/sys_common/wtf8.rs +++ b/library/std/src/sys_common/wtf8.rs @@ -19,12 +19,14 @@ mod tests; use core::char::{encode_utf16_raw, encode_utf8_raw}; +use core::clone::CloneToUninit; use core::str::next_code_point; use crate::borrow::Cow; use crate::collections::TryReserveError; use crate::hash::{Hash, Hasher}; use crate::iter::FusedIterator; +use crate::ptr::addr_of_mut; use crate::rc::Rc; use crate::sync::Arc; use crate::sys_common::AsInner; @@ -1046,3 +1048,13 @@ impl Hash for Wtf8 { 0xfeu8.hash(state) } } + +#[unstable(feature = "clone_to_uninit", issue = "126799")] +unsafe impl CloneToUninit for Wtf8 { + #[inline] + #[cfg_attr(debug_assertions, track_caller)] + unsafe fn clone_to_uninit(&self, dst: *mut Self) { + // SAFETY: we're just a wrapper around [u8] + unsafe { self.bytes.clone_to_uninit(addr_of_mut!((*dst).bytes)) } + } +} diff --git a/library/std/src/thread/mod.rs b/library/std/src/thread/mod.rs index 59720f77465e1..e29c28f3c7ec2 100644 --- a/library/std/src/thread/mod.rs +++ b/library/std/src/thread/mod.rs @@ -412,7 +412,6 @@ impl Builder { /// # Examples /// /// ``` - /// #![feature(thread_spawn_unchecked)] /// use std::thread; /// /// let builder = thread::Builder::new(); @@ -433,26 +432,25 @@ impl Builder { /// ``` /// /// [`io::Result`]: crate::io::Result - #[unstable(feature = "thread_spawn_unchecked", issue = "55132")] - pub unsafe fn spawn_unchecked<'a, F, T>(self, f: F) -> io::Result> + #[stable(feature = "thread_spawn_unchecked", since = "CURRENT_RUSTC_VERSION")] + pub unsafe fn spawn_unchecked(self, f: F) -> io::Result> where F: FnOnce() -> T, - F: Send + 'a, - T: Send + 'a, + F: Send, + T: Send, { Ok(JoinHandle(unsafe { self.spawn_unchecked_(f, None) }?)) } - unsafe fn spawn_unchecked_<'a, 'scope, F, T>( + unsafe fn spawn_unchecked_<'scope, F, T>( self, f: F, scope_data: Option>, ) -> io::Result> where F: FnOnce() -> T, - F: Send + 'a, - T: Send + 'a, - 'scope: 'a, + F: Send, + T: Send, { let Builder { name, stack_size } = self; @@ -532,7 +530,7 @@ impl Builder { // will call `decrement_num_running_threads` and therefore signal that this thread is // done. drop(their_packet); - // Here, the lifetime `'a` and even `'scope` can end. `main` keeps running for a bit + // Here, the lifetime `'scope` can end. `main` keeps running for a bit // after that before returning itself. }; diff --git a/library/test/src/types.rs b/library/test/src/types.rs index c3be3466cb928..802cab989c6a9 100644 --- a/library/test/src/types.rs +++ b/library/test/src/types.rs @@ -250,3 +250,37 @@ pub struct TestDescAndFn { pub desc: TestDesc, pub testfn: TestFn, } + +impl TestDescAndFn { + pub const fn new_doctest( + test_name: &'static str, + ignore: bool, + source_file: &'static str, + start_line: usize, + no_run: bool, + should_panic: bool, + testfn: TestFn, + ) -> Self { + Self { + desc: TestDesc { + name: StaticTestName(test_name), + ignore, + ignore_message: None, + source_file, + start_line, + start_col: 0, + end_line: 0, + end_col: 0, + compile_fail: false, + no_run, + should_panic: if should_panic { + options::ShouldPanic::Yes + } else { + options::ShouldPanic::No + }, + test_type: TestType::DocTest, + }, + testfn, + } + } +} diff --git a/library/windows_targets/Cargo.toml b/library/windows_targets/Cargo.toml new file mode 100644 index 0000000000000..94d7c8210647c --- /dev/null +++ b/library/windows_targets/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "windows-targets" +description = "A drop-in replacement for the real windows-targets crate for use in std only." +version = "0.0.0" +edition = "2021" + +[features] +# Enable using raw-dylib for Windows imports. +# This will eventually be the default. +windows_raw_dylib = [] diff --git a/library/std/src/sys/pal/windows/c/windows_targets.rs b/library/windows_targets/src/lib.rs similarity index 95% rename from library/std/src/sys/pal/windows/c/windows_targets.rs rename to library/windows_targets/src/lib.rs index 252bceb70942b..1965b6cf4ce8f 100644 --- a/library/std/src/sys/pal/windows/c/windows_targets.rs +++ b/library/windows_targets/src/lib.rs @@ -2,6 +2,10 @@ //! //! This is a simple wrapper around an `extern` block with a `#[link]` attribute. //! It's very roughly equivalent to the windows-targets crate. +#![no_std] +#![no_core] +#![feature(decl_macro)] +#![feature(no_core)] #[cfg(feature = "windows_raw_dylib")] pub macro link { diff --git a/src/bootstrap/src/bin/main.rs b/src/bootstrap/src/bin/main.rs index dc8b5487a61e8..f03f03e2d9398 100644 --- a/src/bootstrap/src/bin/main.rs +++ b/src/bootstrap/src/bin/main.rs @@ -11,13 +11,19 @@ use std::str::FromStr; use std::{env, process}; use bootstrap::{ - find_recent_config_change_ids, human_readable_changes, t, Build, Config, Subcommand, + find_recent_config_change_ids, human_readable_changes, t, Build, Config, Flags, Subcommand, CONFIG_CHANGE_HISTORY, }; fn main() { let args = env::args().skip(1).collect::>(); - let config = Config::parse(&args); + + if Flags::try_parse_verbose_help(&args) { + return; + } + + let flags = Flags::parse(&args); + let config = Config::parse(flags); let mut build_lock; let _build_lock_guard; diff --git a/src/bootstrap/src/bin/rustc.rs b/src/bootstrap/src/bin/rustc.rs index 011c289d932db..d04e2fbeb7853 100644 --- a/src/bootstrap/src/bin/rustc.rs +++ b/src/bootstrap/src/bin/rustc.rs @@ -89,6 +89,25 @@ fn main() { rustc_real }; + // Get the name of the crate we're compiling, if any. + let crate_name = parse_value_from_args(&orig_args, "--crate-name"); + + // When statically linking `std` into `rustc_driver`, remove `-C prefer-dynamic` + if env::var("RUSTC_LINK_STD_INTO_RUSTC_DRIVER").unwrap() == "1" + && crate_name == Some("rustc_driver") + && stage != "0" + { + if let Some(pos) = args.iter().enumerate().position(|(i, a)| { + a == "-C" && args.get(i + 1).map(|a| a == "prefer-dynamic").unwrap_or(false) + }) { + args.remove(pos); + args.remove(pos); + } + if let Some(pos) = args.iter().position(|a| a == "-Cprefer-dynamic") { + args.remove(pos); + } + } + let mut cmd = match env::var_os("RUSTC_WRAPPER_REAL") { Some(wrapper) if !wrapper.is_empty() => { let mut cmd = Command::new(wrapper); @@ -99,9 +118,6 @@ fn main() { }; cmd.args(&args).env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); - // Get the name of the crate we're compiling, if any. - let crate_name = parse_value_from_args(&orig_args, "--crate-name"); - if let Some(crate_name) = crate_name { if let Some(target) = env::var_os("RUSTC_TIME") { if target == "all" diff --git a/src/bootstrap/src/core/build_steps/clean.rs b/src/bootstrap/src/core/build_steps/clean.rs index 4931036718227..f608e5d715e49 100644 --- a/src/bootstrap/src/core/build_steps/clean.rs +++ b/src/bootstrap/src/core/build_steps/clean.rs @@ -121,7 +121,7 @@ fn clean(build: &Build, all: bool, stage: Option) { fn clean_specific_stage(build: &Build, stage: u32) { for host in &build.hosts { - let entries = match build.out.join(host.triple).read_dir() { + let entries = match build.out.join(host).read_dir() { Ok(iter) => iter, Err(_) => continue, }; @@ -148,7 +148,7 @@ fn clean_default(build: &Build) { rm_rf(&build.out.join("bootstrap-shims-dump")); rm_rf(&build.out.join("rustfmt.stamp")); - let mut hosts: Vec<_> = build.hosts.iter().map(|t| build.out.join(t.triple)).collect(); + let mut hosts: Vec<_> = build.hosts.iter().map(|t| build.out.join(t)).collect(); // After cross-compilation, artifacts of the host architecture (which may differ from build.host) // might not get removed. // Adding its path (linked one for easier accessibility) will solve this problem. diff --git a/src/bootstrap/src/core/build_steps/compile.rs b/src/bootstrap/src/core/build_steps/compile.rs index 7cf804528a4b3..af3e71b005fa7 100644 --- a/src/bootstrap/src/core/build_steps/compile.rs +++ b/src/bootstrap/src/core/build_steps/compile.rs @@ -246,7 +246,7 @@ impl Step for Std { .rustc_snapshot_sysroot() .join("lib") .join("rustlib") - .join(compiler.host.triple) + .join(compiler.host) .join("bin"); if src_sysroot_bin.exists() { let target_sysroot_bin = @@ -432,7 +432,7 @@ fn copy_self_contained_objects( DependencyType::TargetSelfContained, ); } - } else if target.ends_with("windows-gnu") { + } else if target.is_windows_gnu() { for obj in ["crt2.o", "dllcrt2.o"].iter() { let src = compiler_file(builder, &builder.cc(target), target, CLang::C, obj); let target = libdir_self_contained.join(obj); @@ -651,8 +651,8 @@ impl Step for StdLink { compiler: self.compiler, force_recompile: self.force_recompile, }); - let libdir = sysroot.join(lib).join("rustlib").join(target.triple).join("lib"); - let hostdir = sysroot.join(lib).join("rustlib").join(compiler.host.triple).join("lib"); + let libdir = sysroot.join(lib).join("rustlib").join(target).join("lib"); + let hostdir = sysroot.join(lib).join("rustlib").join(compiler.host).join("lib"); (libdir, hostdir) } else { let libdir = builder.sysroot_libdir(target_compiler, target); @@ -670,12 +670,12 @@ impl Step for StdLink { .build .config .initial_rustc - .starts_with(builder.out.join(compiler.host.triple).join("stage0/bin")) + .starts_with(builder.out.join(compiler.host).join("stage0/bin")) { // Copy bin files from stage0/bin to stage0-sysroot/bin - let sysroot = builder.out.join(compiler.host.triple).join("stage0-sysroot"); + let sysroot = builder.out.join(compiler.host).join("stage0-sysroot"); - let host = compiler.host.triple; + let host = compiler.host; let stage0_bin_dir = builder.out.join(host).join("stage0/bin"); let sysroot_bin_dir = sysroot.join("bin"); t!(fs::create_dir_all(&sysroot_bin_dir)); @@ -793,7 +793,7 @@ impl Step for StartupObjects { fn run(self, builder: &Builder<'_>) -> Vec<(PathBuf, DependencyType)> { let for_compiler = self.compiler; let target = self.target; - if !target.ends_with("windows-gnu") { + if !target.is_windows_gnu() { return vec![]; } @@ -1558,7 +1558,7 @@ impl Step for Sysroot { /// For all other stages, it's the same stage directory that the compiler lives in. fn run(self, builder: &Builder<'_>) -> PathBuf { let compiler = self.compiler; - let host_dir = builder.out.join(compiler.host.triple); + let host_dir = builder.out.join(compiler.host); let sysroot_dir = |stage| { if stage == 0 { diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs index 59359bf80de67..74fd83a8bd638 100644 --- a/src/bootstrap/src/core/build_steps/dist.rs +++ b/src/bootstrap/src/core/build_steps/dist.rs @@ -275,12 +275,8 @@ fn make_win_dist( } //Copy platform tools to platform-specific bin directory - let target_bin_dir = plat_root - .join("lib") - .join("rustlib") - .join(target.triple) - .join("bin") - .join("self-contained"); + let target_bin_dir = + plat_root.join("lib").join("rustlib").join(target).join("bin").join("self-contained"); fs::create_dir_all(&target_bin_dir).expect("creating target_bin_dir failed"); for src in target_tools { builder.copy_link_to_folder(&src, &target_bin_dir); @@ -295,12 +291,8 @@ fn make_win_dist( ); //Copy platform libs to platform-specific lib directory - let target_lib_dir = plat_root - .join("lib") - .join("rustlib") - .join(target.triple) - .join("lib") - .join("self-contained"); + let target_lib_dir = + plat_root.join("lib").join("rustlib").join(target).join("lib").join("self-contained"); fs::create_dir_all(&target_lib_dir).expect("creating target_lib_dir failed"); for src in target_libs { builder.copy_link_to_folder(&src, &target_lib_dir); @@ -450,7 +442,7 @@ impl Step for Rustc { // component for now. maybe_install_llvm_runtime(builder, host, image); - let dst_dir = image.join("lib/rustlib").join(&*host.triple).join("bin"); + let dst_dir = image.join("lib/rustlib").join(host).join("bin"); t!(fs::create_dir_all(&dst_dir)); // Copy over lld if it's there @@ -607,7 +599,7 @@ fn verify_uefi_rlib_format(builder: &Builder<'_>, target: TargetSelection, stamp /// Copy stamped files into an image's `target/lib` directory. fn copy_target_libs(builder: &Builder<'_>, target: TargetSelection, image: &Path, stamp: &Path) { - let dst = image.join("lib/rustlib").join(target.triple).join("lib"); + let dst = image.join("lib/rustlib").join(target).join("lib"); let self_contained_dst = dst.join("self-contained"); t!(fs::create_dir_all(&dst)); t!(fs::create_dir_all(&self_contained_dst)); @@ -769,7 +761,7 @@ impl Step for Analysis { let src = builder .stage_out(compiler, Mode::Std) - .join(target.triple) + .join(target) .join(builder.cargo_dir()) .join("deps") .join("save-analysis"); @@ -1509,7 +1501,7 @@ impl Step for Extended { tarballs.push(builder.ensure(Rustc { compiler: builder.compiler(stage, target) })); tarballs.push(builder.ensure(Std { compiler, target }).expect("missing std")); - if target.ends_with("windows-gnu") { + if target.is_windows_gnu() { tarballs.push(builder.ensure(Mingw { host: target }).expect("missing mingw")); } @@ -1684,7 +1676,7 @@ impl Step for Extended { prepare(tool); } } - if target.ends_with("windows-gnu") { + if target.is_windows_gnu() { prepare("rust-mingw"); } @@ -1831,7 +1823,7 @@ impl Step for Extended { .arg("-t") .arg(etc.join("msi/remove-duplicates.xsl")) .run(builder); - if target.ends_with("windows-gnu") { + if target.is_windows_gnu() { command(&heat) .current_dir(&exe) .arg("dir") @@ -1877,7 +1869,7 @@ impl Step for Extended { if built_tools.contains("miri") { cmd.arg("-dMiriDir=miri"); } - if target.ends_with("windows-gnu") { + if target.is_windows_gnu() { cmd.arg("-dGccDir=rust-mingw"); } cmd.run(builder); @@ -1902,7 +1894,7 @@ impl Step for Extended { } candle("AnalysisGroup.wxs".as_ref()); - if target.ends_with("windows-gnu") { + if target.is_windows_gnu() { candle("GccGroup.wxs".as_ref()); } @@ -1942,7 +1934,7 @@ impl Step for Extended { cmd.arg("DocsGroup.wixobj"); } - if target.ends_with("windows-gnu") { + if target.is_windows_gnu() { cmd.arg("GccGroup.wixobj"); } // ICE57 wrongly complains about the shortcuts @@ -1974,7 +1966,7 @@ fn add_env(builder: &Builder<'_>, cmd: &mut BootstrapCommand, target: TargetSele if target.contains("windows-gnullvm") { cmd.env("CFG_MINGW", "1").env("CFG_ABI", "LLVM"); - } else if target.contains("windows-gnu") { + } else if target.is_windows_gnu() { cmd.env("CFG_MINGW", "1").env("CFG_ABI", "GNU"); } else { cmd.env("CFG_MINGW", "0").env("CFG_ABI", "MSVC"); @@ -2088,7 +2080,7 @@ fn maybe_install_llvm( /// Maybe add libLLVM.so to the target lib-dir for linking. pub fn maybe_install_llvm_target(builder: &Builder<'_>, target: TargetSelection, sysroot: &Path) { - let dst_libdir = sysroot.join("lib/rustlib").join(&*target.triple).join("lib"); + let dst_libdir = sysroot.join("lib/rustlib").join(target).join("lib"); // We do not need to copy LLVM files into the sysroot if it is not // dynamically linked; it is already included into librustc_llvm // statically. diff --git a/src/bootstrap/src/core/build_steps/doc.rs b/src/bootstrap/src/core/build_steps/doc.rs index 2cd5db706c26a..301633559fe71 100644 --- a/src/bootstrap/src/core/build_steps/doc.rs +++ b/src/bootstrap/src/core/build_steps/doc.rs @@ -699,13 +699,12 @@ fn doc_std( let compiler = builder.compiler(stage, builder.config.build); let target_doc_dir_name = if format == DocumentationFormat::Json { "json-doc" } else { "doc" }; - let target_dir = - builder.stage_out(compiler, Mode::Std).join(target.triple).join(target_doc_dir_name); + let target_dir = builder.stage_out(compiler, Mode::Std).join(target).join(target_doc_dir_name); // This is directory where the compiler will place the output of the command. // We will then copy the files from this directory into the final `out` directory, the specified // as a function parameter. - let out_dir = target_dir.join(target.triple).join("doc"); + let out_dir = target_dir.join(target).join("doc"); let mut cargo = builder::Cargo::new(builder, compiler, Mode::Std, SourceType::InTree, target, Kind::Doc); @@ -846,7 +845,7 @@ impl Step for Rustc { let mut to_open = None; - let out_dir = builder.stage_out(compiler, Mode::Rustc).join(target.triple).join("doc"); + let out_dir = builder.stage_out(compiler, Mode::Rustc).join(target).join("doc"); for krate in &*self.crates { // Create all crate output directories first to make sure rustdoc uses // relative links. @@ -992,7 +991,7 @@ macro_rules! tool_doc { // see https://github.com/rust-lang/rust/pull/122066#issuecomment-1983049222 // cargo.rustdocflag("--generate-link-to-definition"); - let out_dir = builder.stage_out(compiler, Mode::ToolRustc).join(target.triple).join("doc"); + let out_dir = builder.stage_out(compiler, Mode::ToolRustc).join(target).join("doc"); $(for krate in $crates { let dir_name = krate.replace("-", "_"); t!(fs::create_dir_all(out_dir.join(&*dir_name))); diff --git a/src/bootstrap/src/core/build_steps/llvm.rs b/src/bootstrap/src/core/build_steps/llvm.rs index 0b755d1753c54..0376b183a1c07 100644 --- a/src/bootstrap/src/core/build_steps/llvm.rs +++ b/src/bootstrap/src/core/build_steps/llvm.rs @@ -194,6 +194,7 @@ pub(crate) fn is_ci_llvm_available(config: &Config, asserts: bool) -> bool { let supported_platforms = [ // tier 1 ("aarch64-unknown-linux-gnu", false), + ("aarch64-apple-darwin", false), ("i686-pc-windows-gnu", false), ("i686-pc-windows-msvc", false), ("i686-unknown-linux-gnu", false), @@ -202,7 +203,6 @@ pub(crate) fn is_ci_llvm_available(config: &Config, asserts: bool) -> bool { ("x86_64-pc-windows-gnu", true), ("x86_64-pc-windows-msvc", true), // tier 2 with host tools - ("aarch64-apple-darwin", false), ("aarch64-pc-windows-msvc", false), ("aarch64-unknown-linux-musl", false), ("arm-unknown-linux-gnueabi", false), diff --git a/src/bootstrap/src/core/build_steps/test.rs b/src/bootstrap/src/core/build_steps/test.rs index 597d7733abe75..cc01afd4c18c6 100644 --- a/src/bootstrap/src/core/build_steps/test.rs +++ b/src/bootstrap/src/core/build_steps/test.rs @@ -149,7 +149,7 @@ You can skip linkcheck with --skip src/tools/linkchecker" let _guard = builder.msg(Kind::Test, compiler.stage, "Linkcheck", bootstrap_host, bootstrap_host); let _time = helpers::timeit(builder); - linkchecker.delay_failure().arg(builder.out.join(host.triple).join("doc")).run(builder); + linkchecker.delay_failure().arg(builder.out.join(host).join("doc")).run(builder); } fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { @@ -434,8 +434,8 @@ impl Miri { builder: &Builder<'_>, compiler: Compiler, target: TargetSelection, - ) -> String { - let miri_sysroot = builder.out.join(compiler.host.triple).join("miri-sysroot"); + ) -> PathBuf { + let miri_sysroot = builder.out.join(compiler.host).join("miri-sysroot"); let mut cargo = builder::Cargo::new( builder, compiler, @@ -467,7 +467,7 @@ impl Miri { // Output is "\n". let sysroot = stdout.trim_end(); builder.verbose(|| println!("`cargo miri setup --print-sysroot` said: {sysroot:?}")); - sysroot.to_owned() + PathBuf::from(sysroot) } } @@ -520,12 +520,14 @@ impl Step for Miri { builder.ensure(compile::Std::new(target_compiler, host)); let host_sysroot = builder.sysroot(target_compiler); - // Miri has its own "target dir" for ui test dependencies. Make sure it gets cleared - // properly when rustc changes. Similar to `Builder::cargo`, we skip this in dry runs to - // make sure the relevant compiler has been set up properly. + // Miri has its own "target dir" for ui test dependencies. Make sure it gets cleared when + // the sysroot gets rebuilt, to avoid "found possibly newer version of crate `std`" errors. if !builder.config.dry_run() { let ui_test_dep_dir = builder.stage_out(host_compiler, Mode::ToolStd).join("miri_ui"); - builder.clear_if_dirty(&ui_test_dep_dir, &builder.rustc(host_compiler)); + // The mtime of `miri_sysroot` changes when the sysroot gets rebuilt (also see + // ). + // We can hence use that directly as a signal to clear the ui test dir. + builder.clear_if_dirty(&ui_test_dep_dir, &miri_sysroot); } // Run `cargo test`. @@ -1113,7 +1115,7 @@ HELP: to skip test's attempt to check tidiness, pass `--skip src/tools/tidy` to } fn testdir(builder: &Builder<'_>, host: TargetSelection) -> PathBuf { - builder.out.join(host.triple).join("test") + builder.out.join(host).join("test") } macro_rules! default_test { @@ -1815,7 +1817,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the let mut flags = if is_rustdoc { Vec::new() } else { vec!["-Crpath".to_string()] }; flags.push(format!("-Cdebuginfo={}", builder.config.rust_debuginfo_level_tests)); - flags.extend(builder.config.cmd.rustc_args().iter().map(|s| s.to_string())); + flags.extend(builder.config.cmd.compiletest_rustc_args().iter().map(|s| s.to_string())); if suite != "mir-opt" { if let Some(linker) = builder.linker(target) { @@ -2683,7 +2685,7 @@ impl Step for Crate { if builder.download_rustc() && compiler.stage > 0 { let sysroot = builder .out - .join(compiler.host.triple) + .join(compiler.host) .join(format!("stage{}-test-sysroot", compiler.stage)); cargo.env("RUSTC_SYSROOT", sysroot); } diff --git a/src/bootstrap/src/core/builder.rs b/src/bootstrap/src/core/builder.rs index 71fddefb4eb1f..522c14fc15649 100644 --- a/src/bootstrap/src/core/builder.rs +++ b/src/bootstrap/src/core/builder.rs @@ -1107,6 +1107,12 @@ impl<'a> Builder<'a> { StepDescription::run(v, self, paths); } + /// Returns if `std` should be statically linked into `rustc_driver`. + /// It's currently not done on `windows-gnu` due to linker bugs. + pub fn link_std_into_rustc_driver(&self, target: TargetSelection) -> bool { + !target.triple.ends_with("-windows-gnu") + } + /// Obtain a compiler at a given stage and for a given host (i.e., this is the target that the /// compiler will run on, *not* the target it will build code for). Explicitly does not take /// `Compiler` since all `Compiler` instances are meant to be obtained through this function, @@ -1166,7 +1172,7 @@ impl<'a> Builder<'a> { .sysroot(self.compiler) .join(lib) .join("rustlib") - .join(self.target.triple) + .join(self.target) .join("lib"); // Avoid deleting the rustlib/ directory we just copied // (in `impl Step for Sysroot`). @@ -1249,7 +1255,7 @@ impl<'a> Builder<'a> { // Ensure that the downloaded LLVM libraries can be found. if self.config.llvm_from_ci { - let ci_llvm_lib = self.out.join(&*compiler.host.triple).join("ci-llvm").join("lib"); + let ci_llvm_lib = self.out.join(compiler.host).join("ci-llvm").join("lib"); dylib_dirs.push(ci_llvm_lib); } @@ -1499,9 +1505,9 @@ impl<'a> Builder<'a> { Mode::Rustc | Mode::ToolRustc => self.compiler_doc_out(target), Mode::Std => { if self.config.cmd.json() { - out_dir.join(target.triple).join("json-doc") + out_dir.join(target).join("json-doc") } else { - out_dir.join(target.triple).join("doc") + out_dir.join(target).join("doc") } } _ => panic!("doc mode {mode:?} not expected"), @@ -2169,10 +2175,18 @@ impl<'a> Builder<'a> { // When we build Rust dylibs they're all intended for intermediate // usage, so make sure we pass the -Cprefer-dynamic flag instead of // linking all deps statically into the dylib. - if matches!(mode, Mode::Std | Mode::Rustc) { + if matches!(mode, Mode::Std) { + rustflags.arg("-Cprefer-dynamic"); + } + if matches!(mode, Mode::Rustc) && !self.link_std_into_rustc_driver(target) { rustflags.arg("-Cprefer-dynamic"); } + cargo.env( + "RUSTC_LINK_STD_INTO_RUSTC_DRIVER", + if self.link_std_into_rustc_driver(target) { "1" } else { "0" }, + ); + // When building incrementally we default to a lower ThinLTO import limit // (unless explicitly specified otherwise). This will produce a somewhat // slower code but give way better compile times. @@ -2219,11 +2233,6 @@ impl<'a> Builder<'a> { rustdocflags.arg("--cfg=parallel_compiler"); } - // Pass the value of `--rustc-args` from test command. If it's not a test command, this won't set anything. - self.config.cmd.rustc_args().iter().for_each(|v| { - rustflags.arg(v); - }); - Cargo { command: cargo, compiler, diff --git a/src/bootstrap/src/core/builder/tests.rs b/src/bootstrap/src/core/builder/tests.rs index f19a4dd6d490f..c554684d5a75b 100644 --- a/src/bootstrap/src/core/builder/tests.rs +++ b/src/bootstrap/src/core/builder/tests.rs @@ -3,13 +3,14 @@ use std::thread; use super::*; use crate::core::build_steps::doc::DocumentationFormat; use crate::core::config::Config; +use crate::Flags; fn configure(cmd: &str, host: &[&str], target: &[&str]) -> Config { configure_with_args(&[cmd.to_owned()], host, target) } fn configure_with_args(cmd: &[String], host: &[&str], target: &[&str]) -> Config { - let mut config = Config::parse(cmd); + let mut config = Config::parse(Flags::parse(cmd)); // don't save toolstates config.save_toolstates = None; config.dry_run = DryRun::SelfCheck; @@ -23,7 +24,7 @@ fn configure_with_args(cmd: &[String], host: &[&str], target: &[&str]) -> Config let submodule_build = Build::new(Config { // don't include LLVM, so CI doesn't require ninja/cmake to be installed rust_codegen_backends: vec![], - ..Config::parse(&["check".to_owned()]) + ..Config::parse(Flags::parse(&["check".to_owned()])) }); submodule_build.require_submodule("src/doc/book", None); config.submodules = Some(false); @@ -632,7 +633,7 @@ mod dist { config.paths = vec!["library/std".into()]; config.cmd = Subcommand::Test { test_args: vec![], - rustc_args: vec![], + compiletest_rustc_args: vec![], no_fail_fast: false, no_doc: true, doc: false, @@ -703,7 +704,7 @@ mod dist { let mut config = configure(&["A-A"], &["A-A"]); config.cmd = Subcommand::Test { test_args: vec![], - rustc_args: vec![], + compiletest_rustc_args: vec![], no_fail_fast: false, doc: true, no_doc: false, diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs index fa9d36e9672e2..ba2cb57a7849a 100644 --- a/src/bootstrap/src/core/config/config.rs +++ b/src/bootstrap/src/core/config/config.rs @@ -36,6 +36,14 @@ macro_rules! check_ci_llvm { }; } +/// This file is embedded in the overlay directory of the tarball sources. It is +/// useful in scenarios where developers want to see how the tarball sources were +/// generated. +/// +/// We also use this file to compare the host's config.toml against the CI rustc builder +/// configuration to detect any incompatible options. +pub(crate) const BUILDER_CONFIG_FILENAME: &str = "builder-config"; + #[derive(Clone, Default)] pub enum DryRun { /// This isn't a dry run. @@ -47,7 +55,7 @@ pub enum DryRun { UserSelected, } -#[derive(Copy, Clone, Default, PartialEq, Eq)] +#[derive(Copy, Clone, Default, Debug, Eq, PartialEq)] pub enum DebuginfoLevel { #[default] None, @@ -117,7 +125,7 @@ impl Display for DebuginfoLevel { /// 2) MSVC /// - Self-contained: `-Clinker=` /// - External: `-Clinker=lld` -#[derive(Default, Copy, Clone)] +#[derive(Copy, Clone, Default, Debug, PartialEq)] pub enum LldMode { /// Do not use LLD #[default] @@ -515,6 +523,10 @@ impl TargetSelection { self.contains("windows") } + pub fn is_windows_gnu(&self) -> bool { + self.ends_with("windows-gnu") + } + /// Path to the file defining the custom target, if any. pub fn filepath(&self) -> Option<&Path> { self.file.as_ref().map(Path::new) @@ -543,6 +555,14 @@ impl PartialEq<&str> for TargetSelection { } } +// Targets are often used as directory names throughout bootstrap. +// This impl makes it more ergonomics to use them as such. +impl AsRef for TargetSelection { + fn as_ref(&self) -> &Path { + self.triple.as_ref() + } +} + /// Per-target configuration stored in the global configuration structure. #[derive(Debug, Default, Clone, PartialEq, Eq)] pub struct Target { @@ -1193,41 +1213,42 @@ impl Config { } } - pub fn parse(args: &[String]) -> Config { - #[cfg(test)] - fn get_toml(_: &Path) -> TomlConfig { - TomlConfig::default() - } + #[cfg(test)] + fn get_toml(_: &Path) -> Result { + Ok(TomlConfig::default()) + } - #[cfg(not(test))] - fn get_toml(file: &Path) -> TomlConfig { - let contents = - t!(fs::read_to_string(file), format!("config file {} not found", file.display())); - // Deserialize to Value and then TomlConfig to prevent the Deserialize impl of - // TomlConfig and sub types to be monomorphized 5x by toml. - toml::from_str(&contents) - .and_then(|table: toml::Value| TomlConfig::deserialize(table)) - .unwrap_or_else(|err| { - if let Ok(Some(changes)) = toml::from_str(&contents) - .and_then(|table: toml::Value| ChangeIdWrapper::deserialize(table)).map(|change_id| change_id.inner.map(crate::find_recent_config_change_ids)) - { - if !changes.is_empty() { - println!( - "WARNING: There have been changes to x.py since you last updated:\n{}", - crate::human_readable_changes(&changes) - ); - } + #[cfg(not(test))] + fn get_toml(file: &Path) -> Result { + let contents = + t!(fs::read_to_string(file), format!("config file {} not found", file.display())); + // Deserialize to Value and then TomlConfig to prevent the Deserialize impl of + // TomlConfig and sub types to be monomorphized 5x by toml. + toml::from_str(&contents) + .and_then(|table: toml::Value| TomlConfig::deserialize(table)) + .inspect_err(|_| { + if let Ok(Some(changes)) = toml::from_str(&contents) + .and_then(|table: toml::Value| ChangeIdWrapper::deserialize(table)) + .map(|change_id| change_id.inner.map(crate::find_recent_config_change_ids)) + { + if !changes.is_empty() { + println!( + "WARNING: There have been changes to x.py since you last updated:\n{}", + crate::human_readable_changes(&changes) + ); } + } + }) + } - eprintln!("failed to parse TOML configuration '{}': {err}", file.display()); - exit!(2); - }) - } - Self::parse_inner(args, get_toml) + pub fn parse(flags: Flags) -> Config { + Self::parse_inner(flags, Self::get_toml) } - pub(crate) fn parse_inner(args: &[String], get_toml: impl Fn(&Path) -> TomlConfig) -> Config { - let mut flags = Flags::parse(args); + pub(crate) fn parse_inner( + mut flags: Flags, + get_toml: impl Fn(&Path) -> Result, + ) -> Config { let mut config = Config::default_opts(); // Set flags. @@ -1335,7 +1356,10 @@ impl Config { } else { toml_path.clone() }); - get_toml(&toml_path) + get_toml(&toml_path).unwrap_or_else(|e| { + eprintln!("ERROR: Failed to parse '{}': {e}", toml_path.display()); + exit!(2); + }) } else { config.config = None; TomlConfig::default() @@ -1366,7 +1390,13 @@ impl Config { include_path.push("bootstrap"); include_path.push("defaults"); include_path.push(format!("config.{include}.toml")); - let included_toml = get_toml(&include_path); + let included_toml = get_toml(&include_path).unwrap_or_else(|e| { + eprintln!( + "ERROR: Failed to parse default config profile at '{}': {e}", + include_path.display() + ); + exit!(2); + }); toml.merge(included_toml, ReplaceOpt::IgnoreDuplicate); } @@ -1472,7 +1502,7 @@ impl Config { config.download_beta_toolchain(); config .out - .join(config.build.triple) + .join(config.build) .join("stage0") .join("bin") .join(exe("rustc", config.build)) @@ -1487,7 +1517,7 @@ impl Config { config.download_beta_toolchain(); config .out - .join(config.build.triple) + .join(config.build) .join("stage0") .join("bin") .join(exe("cargo", config.build)) @@ -1583,24 +1613,6 @@ impl Config { let mut is_user_configured_rust_channel = false; if let Some(rust) = toml.rust { - if let Some(commit) = config.download_ci_rustc_commit(rust.download_rustc.clone()) { - // Primarily used by CI runners to avoid handling download-rustc incompatible - // options one by one on shell scripts. - let disable_ci_rustc_if_incompatible = - env::var_os("DISABLE_CI_RUSTC_IF_INCOMPATIBLE") - .is_some_and(|s| s == "1" || s == "true"); - - if let Err(e) = check_incompatible_options_for_ci_rustc(&rust) { - if disable_ci_rustc_if_incompatible { - config.download_rustc_commit = None; - } else { - panic!("{}", e); - } - } else { - config.download_rustc_commit = Some(commit); - } - } - let Rust { optimize: optimize_toml, debug: debug_toml, @@ -1648,7 +1660,7 @@ impl Config { new_symbol_mangling, profile_generate, profile_use, - download_rustc: _, + download_rustc, lto, validate_mir_opts, frame_pointers, @@ -1660,6 +1672,8 @@ impl Config { is_user_configured_rust_channel = channel.is_some(); set(&mut config.channel, channel.clone()); + config.download_rustc_commit = config.download_ci_rustc_commit(download_rustc); + debug = debug_toml; debug_assertions = debug_assertions_toml; debug_assertions_std = debug_assertions_std_toml; @@ -2286,13 +2300,13 @@ impl Config { /// The absolute path to the downloaded LLVM artifacts. pub(crate) fn ci_llvm_root(&self) -> PathBuf { assert!(self.llvm_from_ci); - self.out.join(&*self.build.triple).join("ci-llvm") + self.out.join(self.build).join("ci-llvm") } /// Directory where the extracted `rustc-dev` component is stored. pub(crate) fn ci_rustc_dir(&self) -> PathBuf { assert!(self.download_rustc()); - self.out.join(self.build.triple).join("ci-rustc") + self.out.join(self.build).join("ci-rustc") } /// Determine whether llvm should be linked dynamically. @@ -2342,6 +2356,45 @@ impl Config { None => None, Some(commit) => { self.download_ci_rustc(commit); + + if let Some(config_path) = &self.config { + let builder_config_path = + self.out.join(self.build.triple).join("ci-rustc").join(BUILDER_CONFIG_FILENAME); + + let ci_config_toml = match Self::get_toml(&builder_config_path) { + Ok(ci_config_toml) => ci_config_toml, + Err(e) if e.to_string().contains("unknown field") => { + println!("WARNING: CI rustc has some fields that are no longer supported in bootstrap; download-rustc will be disabled."); + println!("HELP: Consider rebasing to a newer commit if available."); + return None; + }, + Err(e) => { + eprintln!("ERROR: Failed to parse CI rustc config at '{}': {e}", builder_config_path.display()); + exit!(2); + }, + }; + + let current_config_toml = Self::get_toml(config_path).unwrap(); + + // Check the config compatibility + // FIXME: this doesn't cover `--set` flags yet. + let res = check_incompatible_options_for_ci_rustc( + current_config_toml, + ci_config_toml, + ); + + let disable_ci_rustc_if_incompatible = + env::var_os("DISABLE_CI_RUSTC_IF_INCOMPATIBLE") + .is_some_and(|s| s == "1" || s == "true"); + + if disable_ci_rustc_if_incompatible && res.is_err() { + println!("WARNING: download-rustc is disabled with `DISABLE_CI_RUSTC_IF_INCOMPATIBLE` env."); + return None; + } + + res.unwrap(); + } + Some(commit.clone()) } }) @@ -2659,31 +2712,52 @@ impl Config { } } -/// Checks the CI rustc incompatible options by destructuring the `Rust` instance -/// and makes sure that no rust options from config.toml are missed. -fn check_incompatible_options_for_ci_rustc(rust: &Rust) -> Result<(), String> { +/// Compares the current Rust options against those in the CI rustc builder and detects any incompatible options. +/// It does this by destructuring the `Rust` instance to make sure every `Rust` field is covered and not missing. +fn check_incompatible_options_for_ci_rustc( + current_config_toml: TomlConfig, + ci_config_toml: TomlConfig, +) -> Result<(), String> { macro_rules! err { - ($name:expr) => { - if $name.is_some() { - return Err(format!( - "ERROR: Setting `rust.{}` is incompatible with `rust.download-rustc`.", - stringify!($name).replace("_", "-") - )); - } + ($current:expr, $expected:expr) => { + if let Some(current) = &$current { + if Some(current) != $expected.as_ref() { + return Err(format!( + "ERROR: Setting `rust.{}` is incompatible with `rust.download-rustc`. \ + Current value: {:?}, Expected value(s): {}{:?}", + stringify!($expected).replace("_", "-"), + $current, + if $expected.is_some() { "None/" } else { "" }, + $expected, + )); + }; + }; }; } macro_rules! warn { - ($name:expr) => { - if $name.is_some() { - println!( - "WARNING: `rust.{}` has no effect with `rust.download-rustc`.", - stringify!($name).replace("_", "-") - ); - } + ($current:expr, $expected:expr) => { + if let Some(current) = &$current { + if Some(current) != $expected.as_ref() { + println!( + "WARNING: `rust.{}` has no effect with `rust.download-rustc`. \ + Current value: {:?}, Expected value(s): {}{:?}", + stringify!($expected).replace("_", "-"), + $current, + if $expected.is_some() { "None/" } else { "" }, + $expected, + ); + }; + }; }; } + let (Some(current_rust_config), Some(ci_rust_config)) = + (current_config_toml.rust, ci_config_toml.rust) + else { + return Ok(()); + }; + let Rust { // Following options are the CI rustc incompatible ones. optimize, @@ -2741,7 +2815,7 @@ fn check_incompatible_options_for_ci_rustc(rust: &Rust) -> Result<(), String> { download_rustc: _, validate_mir_opts: _, frame_pointers: _, - } = rust; + } = ci_rust_config; // There are two kinds of checks for CI rustc incompatible options: // 1. Checking an option that may change the compiler behaviour/output. @@ -2749,22 +2823,23 @@ fn check_incompatible_options_for_ci_rustc(rust: &Rust) -> Result<(), String> { // // If the option belongs to the first category, we call `err` macro for a hard error; // otherwise, we just print a warning with `warn` macro. - err!(optimize); - err!(debug_logging); - err!(debuginfo_level_rustc); - err!(default_linker); - err!(rpath); - err!(strip); - err!(stack_protector); - err!(lld_mode); - err!(llvm_tools); - err!(llvm_bitcode_linker); - err!(jemalloc); - err!(lto); - - warn!(channel); - warn!(description); - warn!(incremental); + + err!(current_rust_config.optimize, optimize); + err!(current_rust_config.debug_logging, debug_logging); + err!(current_rust_config.debuginfo_level_rustc, debuginfo_level_rustc); + err!(current_rust_config.rpath, rpath); + err!(current_rust_config.strip, strip); + err!(current_rust_config.lld_mode, lld_mode); + err!(current_rust_config.llvm_tools, llvm_tools); + err!(current_rust_config.llvm_bitcode_linker, llvm_bitcode_linker); + err!(current_rust_config.jemalloc, jemalloc); + err!(current_rust_config.default_linker, default_linker); + err!(current_rust_config.stack_protector, stack_protector); + err!(current_rust_config.lto, lto); + + warn!(current_rust_config.channel, channel); + warn!(current_rust_config.description, description); + warn!(current_rust_config.incremental, incremental); Ok(()) } diff --git a/src/bootstrap/src/core/config/flags.rs b/src/bootstrap/src/core/config/flags.rs index 19f752da81c13..c3f1740281492 100644 --- a/src/bootstrap/src/core/config/flags.rs +++ b/src/bootstrap/src/core/config/flags.rs @@ -183,9 +183,9 @@ pub struct Flags { } impl Flags { - pub fn parse(args: &[String]) -> Self { - let first = String::from("x.py"); - let it = std::iter::once(&first).chain(args.iter()); + /// Check if ` -h -v` was passed. + /// If yes, print the available paths and return `true`. + pub fn try_parse_verbose_help(args: &[String]) -> bool { // We need to check for ` -h -v`, in which case we list the paths #[derive(Parser)] #[command(disable_help_flag(true))] @@ -198,10 +198,10 @@ impl Flags { cmd: Kind, } if let Ok(HelpVerboseOnly { help: true, verbose: 1.., cmd: subcommand }) = - HelpVerboseOnly::try_parse_from(it.clone()) + HelpVerboseOnly::try_parse_from(normalize_args(args)) { println!("NOTE: updating submodules before printing available paths"); - let config = Config::parse(&[String::from("build")]); + let config = Config::parse(Self::parse(&[String::from("build")])); let build = Build::new(config); let paths = Builder::get_help(&build, subcommand); if let Some(s) = paths { @@ -209,13 +209,23 @@ impl Flags { } else { panic!("No paths available for subcommand `{}`", subcommand.as_str()); } - crate::exit!(0); + true + } else { + false } + } - Flags::parse_from(it) + pub fn parse(args: &[String]) -> Self { + Flags::parse_from(normalize_args(args)) } } +fn normalize_args(args: &[String]) -> Vec { + let first = String::from("x.py"); + let it = std::iter::once(first).chain(args.iter().cloned()); + it.collect() +} + #[derive(Debug, Clone, Default, clap::Subcommand)] pub enum Subcommand { #[command(aliases = ["b"], long_about = "\n @@ -347,9 +357,9 @@ pub enum Subcommand { /// extra arguments to be passed for the test tool being used /// (e.g. libtest, compiletest or rustdoc) test_args: Vec, - /// extra options to pass the compiler when running tests + /// extra options to pass the compiler when running compiletest tests #[arg(long, value_name = "ARGS", allow_hyphen_values(true))] - rustc_args: Vec, + compiletest_rustc_args: Vec, #[arg(long)] /// do not run doc tests no_doc: bool, @@ -392,9 +402,6 @@ pub enum Subcommand { /// extra arguments to be passed for the test tool being used /// (e.g. libtest, compiletest or rustdoc) test_args: Vec, - /// extra options to pass the compiler when running tests - #[arg(long, value_name = "ARGS", allow_hyphen_values(true))] - rustc_args: Vec, #[arg(long)] /// do not run doc tests no_doc: bool, @@ -499,10 +506,10 @@ impl Subcommand { } } - pub fn rustc_args(&self) -> Vec<&str> { + pub fn compiletest_rustc_args(&self) -> Vec<&str> { match *self { - Subcommand::Test { ref rustc_args, .. } | Subcommand::Miri { ref rustc_args, .. } => { - rustc_args.iter().flat_map(|s| s.split_whitespace()).collect() + Subcommand::Test { ref compiletest_rustc_args, .. } => { + compiletest_rustc_args.iter().flat_map(|s| s.split_whitespace()).collect() } _ => vec![], } diff --git a/src/bootstrap/src/core/config/mod.rs b/src/bootstrap/src/core/config/mod.rs index 23556e8bc5d38..9f09dd13f2985 100644 --- a/src/bootstrap/src/core/config/mod.rs +++ b/src/bootstrap/src/core/config/mod.rs @@ -1,6 +1,6 @@ #[allow(clippy::module_inception)] mod config; -pub(crate) mod flags; +pub mod flags; #[cfg(test)] mod tests; diff --git a/src/bootstrap/src/core/config/tests.rs b/src/bootstrap/src/core/config/tests.rs index 6e695d269cf2c..378d069672f5d 100644 --- a/src/bootstrap/src/core/config/tests.rs +++ b/src/bootstrap/src/core/config/tests.rs @@ -12,9 +12,10 @@ use crate::core::build_steps::clippy::get_clippy_rules_in_order; use crate::core::config::{LldMode, Target, TargetSelection, TomlConfig}; fn parse(config: &str) -> Config { - Config::parse_inner(&["check".to_string(), "--config=/does/not/exist".to_string()], |&_| { - toml::from_str(&config).unwrap() - }) + Config::parse_inner( + Flags::parse(&["check".to_string(), "--config=/does/not/exist".to_string()]), + |&_| toml::from_str(&config), + ) } #[test] @@ -108,7 +109,7 @@ fn clap_verify() { #[test] fn override_toml() { let config = Config::parse_inner( - &[ + Flags::parse(&[ "check".to_owned(), "--config=/does/not/exist".to_owned(), "--set=change-id=1".to_owned(), @@ -121,7 +122,7 @@ fn override_toml() { "--set=target.x86_64-unknown-linux-gnu.rpath=false".to_owned(), "--set=target.aarch64-unknown-linux-gnu.sanitizers=false".to_owned(), "--set=target.aarch64-apple-darwin.runner=apple".to_owned(), - ], + ]), |&_| { toml::from_str( r#" @@ -150,7 +151,6 @@ runner = "x86_64-runner" "#, ) - .unwrap() }, ); assert_eq!(config.change_id, Some(1), "setting top-level value"); @@ -201,19 +201,19 @@ runner = "x86_64-runner" #[should_panic] fn override_toml_duplicate() { Config::parse_inner( - &[ + Flags::parse(&[ "check".to_owned(), "--config=/does/not/exist".to_string(), "--set=change-id=1".to_owned(), "--set=change-id=2".to_owned(), - ], - |&_| toml::from_str("change-id = 0").unwrap(), + ]), + |&_| toml::from_str("change-id = 0"), ); } #[test] fn profile_user_dist() { - fn get_toml(file: &Path) -> TomlConfig { + fn get_toml(file: &Path) -> Result { let contents = if file.ends_with("config.toml") || env::var_os("RUST_BOOTSTRAP_CONFIG").is_some() { "profile = \"user\"".to_owned() @@ -222,11 +222,9 @@ fn profile_user_dist() { std::fs::read_to_string(file).unwrap() }; - toml::from_str(&contents) - .and_then(|table: toml::Value| TomlConfig::deserialize(table)) - .unwrap() + toml::from_str(&contents).and_then(|table: toml::Value| TomlConfig::deserialize(table)) } - Config::parse_inner(&["check".to_owned()], get_toml); + Config::parse_inner(Flags::parse(&["check".to_owned()]), get_toml); } #[test] @@ -301,7 +299,7 @@ fn order_of_clippy_rules() { "-Aclippy::foo1".to_string(), "-Aclippy::foo2".to_string(), ]; - let config = Config::parse(&args); + let config = Config::parse(Flags::parse(&args)); let actual = match &config.cmd { crate::Subcommand::Clippy { allow, deny, warn, forbid, .. } => { diff --git a/src/bootstrap/src/core/download.rs b/src/bootstrap/src/core/download.rs index 4d1aea3cd956a..ae39afa1fa270 100644 --- a/src/bootstrap/src/core/download.rs +++ b/src/bootstrap/src/core/download.rs @@ -9,6 +9,7 @@ use std::sync::OnceLock; use build_helper::ci::CiEnv; use xz2::bufread::XzDecoder; +use crate::core::config::BUILDER_CONFIG_FILENAME; use crate::utils::exec::{command, BootstrapCommand}; use crate::utils::helpers::{check_run, exe, hex_encode, move_file, program_out_of_date}; use crate::{t, Config}; @@ -273,11 +274,12 @@ impl Config { let mut tar = tar::Archive::new(decompressor); + let is_ci_rustc = dst.ends_with("ci-rustc"); + // `compile::Sysroot` needs to know the contents of the `rustc-dev` tarball to avoid adding // it to the sysroot unless it was explicitly requested. But parsing the 100 MB tarball is slow. // Cache the entries when we extract it so we only have to read it once. - let mut recorded_entries = - if dst.ends_with("ci-rustc") { recorded_entries(dst, pattern) } else { None }; + let mut recorded_entries = if is_ci_rustc { recorded_entries(dst, pattern) } else { None }; for member in t!(tar.entries()) { let mut member = t!(member); @@ -287,10 +289,12 @@ impl Config { continue; } let mut short_path = t!(original_path.strip_prefix(directory_prefix)); - if !short_path.starts_with(pattern) { + let is_builder_config = short_path.to_str() == Some(BUILDER_CONFIG_FILENAME); + + if !(short_path.starts_with(pattern) || (is_ci_rustc && is_builder_config)) { continue; } - short_path = t!(short_path.strip_prefix(pattern)); + short_path = short_path.strip_prefix(pattern).unwrap_or(short_path); let dst_path = dst.join(short_path); self.verbose(|| { println!("extracting {} to {}", original_path.display(), dst.display()) @@ -379,7 +383,7 @@ impl Config { let version = &self.stage0_metadata.compiler.version; let host = self.build; - let bin_root = self.out.join(host.triple).join("stage0"); + let bin_root = self.out.join(host).join("stage0"); let clippy_stamp = bin_root.join(".clippy-stamp"); let cargo_clippy = bin_root.join("bin").join(exe("cargo-clippy", host)); if cargo_clippy.exists() && !program_out_of_date(&clippy_stamp, date) { @@ -412,7 +416,7 @@ impl Config { let channel = format!("{version}-{date}"); let host = self.build; - let bin_root = self.out.join(host.triple).join("rustfmt"); + let bin_root = self.out.join(host).join("rustfmt"); let rustfmt_path = bin_root.join("bin").join(exe("rustfmt", host)); let rustfmt_stamp = bin_root.join(".rustfmt-stamp"); if rustfmt_path.exists() && !program_out_of_date(&rustfmt_stamp, &channel) { @@ -592,7 +596,7 @@ impl Config { t!(fs::create_dir_all(&cache_dir)); } - let bin_root = self.out.join(self.build.triple).join(destination); + let bin_root = self.out.join(self.build).join(destination); let tarball = cache_dir.join(&filename); let (base_url, url, should_verify) = match mode { DownloadSource::CI => { @@ -703,9 +707,7 @@ download-rustc = false let file_times = fs::FileTimes::new().set_accessed(now).set_modified(now); let llvm_config = llvm_root.join("bin").join(exe("llvm-config", self.build)); - let llvm_config_file = t!(File::options().write(true).open(llvm_config)); - - t!(llvm_config_file.set_times(file_times)); + t!(crate::utils::helpers::set_file_times(llvm_config, file_times)); if self.should_fix_bins_and_dylibs() { let llvm_lib = llvm_root.join("lib"); diff --git a/src/bootstrap/src/lib.rs b/src/bootstrap/src/lib.rs index 7768814a62e9c..5747a77b0ba8c 100644 --- a/src/bootstrap/src/lib.rs +++ b/src/bootstrap/src/lib.rs @@ -37,13 +37,15 @@ use crate::core::builder; use crate::core::builder::{Builder, Kind}; use crate::core::config::{flags, DryRun, LldMode, LlvmLibunwind, Target, TargetSelection}; use crate::utils::exec::{command, BehaviorOnFailure, BootstrapCommand, CommandOutput, OutputMode}; -use crate::utils::helpers::{self, dir_is_empty, exe, libdir, mtime, output, symlink_dir}; +use crate::utils::helpers::{ + self, dir_is_empty, exe, libdir, mtime, output, set_file_times, symlink_dir, +}; mod core; mod utils; pub use core::builder::PathSet; -pub use core::config::flags::Subcommand; +pub use core::config::flags::{Flags, Subcommand}; pub use core::config::Config; pub use utils::change_tracker::{ @@ -458,7 +460,7 @@ impl Build { } // Make a symbolic link so we can use a consistent directory in the documentation. - let build_triple = build.out.join(build.build.triple); + let build_triple = build.out.join(build.build); t!(fs::create_dir_all(&build_triple)); let host = build.out.join("host"); if host.is_symlink() { @@ -813,10 +815,7 @@ impl Build { } fn tools_dir(&self, compiler: Compiler) -> PathBuf { - let out = self - .out - .join(&*compiler.host.triple) - .join(format!("stage{}-tools-bin", compiler.stage)); + let out = self.out.join(compiler.host).join(format!("stage{}-tools-bin", compiler.stage)); t!(fs::create_dir_all(&out)); out } @@ -833,14 +832,14 @@ impl Build { Mode::ToolBootstrap => "-bootstrap-tools", Mode::ToolStd | Mode::ToolRustc => "-tools", }; - self.out.join(&*compiler.host.triple).join(format!("stage{}{}", compiler.stage, suffix)) + self.out.join(compiler.host).join(format!("stage{}{}", compiler.stage, suffix)) } /// Returns the root output directory for all Cargo output in a given stage, /// running a particular compiler, whether or not we're building the /// standard library, and targeting the specified architecture. fn cargo_out(&self, compiler: Compiler, mode: Mode, target: TargetSelection) -> PathBuf { - self.stage_out(compiler, mode).join(&*target.triple).join(self.cargo_dir()) + self.stage_out(compiler, mode).join(target).join(self.cargo_dir()) } /// Root output directory of LLVM for `target` @@ -851,7 +850,7 @@ impl Build { if self.config.llvm_from_ci && self.config.build == target { self.config.ci_llvm_root() } else { - self.out.join(&*target.triple).join("llvm") + self.out.join(target).join("llvm") } } @@ -860,31 +859,31 @@ impl Build { } fn lld_out(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("lld") + self.out.join(target).join("lld") } /// Output directory for all documentation for a target fn doc_out(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("doc") + self.out.join(target).join("doc") } /// Output directory for all JSON-formatted documentation for a target fn json_doc_out(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("json-doc") + self.out.join(target).join("json-doc") } fn test_out(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("test") + self.out.join(target).join("test") } /// Output directory for all documentation for a target fn compiler_doc_out(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("compiler-doc") + self.out.join(target).join("compiler-doc") } /// Output directory for some generated md crate documentation for a target (temporary) fn md_doc_out(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("md-doc") + self.out.join(target).join("md-doc") } /// Returns `true` if this is an external version of LLVM not managed by bootstrap. @@ -964,7 +963,7 @@ impl Build { /// Directory for libraries built from C/C++ code and shared between stages. fn native_dir(&self, target: TargetSelection) -> PathBuf { - self.out.join(&*target.triple).join("native") + self.out.join(target).join("native") } /// Root output directory for rust_test_helpers library compiled for @@ -1069,11 +1068,29 @@ Executed at: {executed_at}"#, } }; - let fail = |message: &str| { + let fail = |message: &str, output: CommandOutput| -> ! { if self.is_verbose() { println!("{message}"); } else { - println!("Command has failed. Rerun with -v to see more details."); + let (stdout, stderr) = (output.stdout_if_present(), output.stderr_if_present()); + // If the command captures output, the user would not see any indication that + // it has failed. In this case, print a more verbose error, since to provide more + // context. + if stdout.is_some() || stderr.is_some() { + if let Some(stdout) = + output.stdout_if_present().take_if(|s| !s.trim().is_empty()) + { + println!("STDOUT:\n{stdout}\n"); + } + if let Some(stderr) = + output.stderr_if_present().take_if(|s| !s.trim().is_empty()) + { + println!("STDERR:\n{stderr}\n"); + } + println!("Command {command:?} has failed. Rerun with -v to see more details."); + } else { + println!("Command has failed. Rerun with -v to see more details."); + } } exit!(1); }; @@ -1082,14 +1099,14 @@ Executed at: {executed_at}"#, match command.failure_behavior { BehaviorOnFailure::DelayFail => { if self.fail_fast { - fail(&message); + fail(&message, output); } let mut failures = self.delayed_failures.borrow_mut(); failures.push(message); } BehaviorOnFailure::Exit => { - fail(&message); + fail(&message, output); } BehaviorOnFailure::Ignore => { // If failures are allowed, either the error has been printed already @@ -1787,21 +1804,20 @@ Executed at: {executed_at}"#, } } if let Ok(()) = fs::hard_link(&src, dst) { - // Attempt to "easy copy" by creating a hard link - // (symlinks don't work on windows), but if that fails - // just fall back to a slow `copy` operation. + // Attempt to "easy copy" by creating a hard link (symlinks are priviledged on windows), + // but if that fails just fall back to a slow `copy` operation. } else { if let Err(e) = fs::copy(&src, dst) { panic!("failed to copy `{}` to `{}`: {}", src.display(), dst.display(), e) } t!(fs::set_permissions(dst, metadata.permissions())); + // Restore file times because changing permissions on e.g. Linux using `chmod` can cause + // file access time to change. let file_times = fs::FileTimes::new() .set_accessed(t!(metadata.accessed())) .set_modified(t!(metadata.modified())); - - let dst_file = t!(fs::File::open(dst)); - t!(dst_file.set_times(file_times)); + t!(set_file_times(dst, file_times)); } } diff --git a/src/bootstrap/src/utils/change_tracker.rs b/src/bootstrap/src/utils/change_tracker.rs index c0212b318bbdf..0b0791a7d62c5 100644 --- a/src/bootstrap/src/utils/change_tracker.rs +++ b/src/bootstrap/src/utils/change_tracker.rs @@ -225,6 +225,11 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[ severity: ChangeSeverity::Info, summary: "New option `llvm.libzstd` to control whether llvm is built with zstd support.", }, + ChangeInfo { + change_id: 128841, + severity: ChangeSeverity::Warning, + summary: "./x test --rustc-args was renamed to --compiletest-rustc-args as it only applies there. ./x miri --rustc-args was also removed.", + }, ChangeInfo { change_id: 129176, severity: ChangeSeverity::Info, diff --git a/src/bootstrap/src/utils/exec.rs b/src/bootstrap/src/utils/exec.rs index 9f0d0b7e9691f..530d760a584c8 100644 --- a/src/bootstrap/src/utils/exec.rs +++ b/src/bootstrap/src/utils/exec.rs @@ -291,6 +291,11 @@ impl CommandOutput { .expect("Cannot parse process stdout as UTF-8") } + #[must_use] + pub fn stdout_if_present(&self) -> Option { + self.stdout.as_ref().and_then(|s| String::from_utf8(s.clone()).ok()) + } + #[must_use] pub fn stdout_if_ok(&self) -> Option { if self.is_success() { Some(self.stdout()) } else { None } @@ -303,6 +308,11 @@ impl CommandOutput { ) .expect("Cannot parse process stderr as UTF-8") } + + #[must_use] + pub fn stderr_if_present(&self) -> Option { + self.stderr.as_ref().and_then(|s| String::from_utf8(s.clone()).ok()) + } } impl Default for CommandOutput { diff --git a/src/bootstrap/src/utils/helpers.rs b/src/bootstrap/src/utils/helpers.rs index 65e75f114bbeb..a856c99ff55a5 100644 --- a/src/bootstrap/src/utils/helpers.rs +++ b/src/bootstrap/src/utils/helpers.rs @@ -544,3 +544,15 @@ pub fn get_closest_merge_base_commit( Ok(output_result(git.as_command_mut())?.trim().to_owned()) } + +/// Sets the file times for a given file at `path`. +pub fn set_file_times>(path: P, times: fs::FileTimes) -> io::Result<()> { + // Windows requires file to be writable to modify file times. But on Linux CI the file does not + // need to be writable to modify file times and might be read-only. + let f = if cfg!(windows) { + fs::File::options().write(true).open(path)? + } else { + fs::File::open(path)? + }; + f.set_times(times) +} diff --git a/src/bootstrap/src/utils/helpers/tests.rs b/src/bootstrap/src/utils/helpers/tests.rs index f0cb324674fc6..86016a91e49b1 100644 --- a/src/bootstrap/src/utils/helpers/tests.rs +++ b/src/bootstrap/src/utils/helpers/tests.rs @@ -3,9 +3,10 @@ use std::io::Write; use std::path::PathBuf; use crate::utils::helpers::{ - check_cfg_arg, extract_beta_rev, hex_encode, make, program_out_of_date, symlink_dir, + check_cfg_arg, extract_beta_rev, hex_encode, make, program_out_of_date, set_file_times, + symlink_dir, }; -use crate::Config; +use crate::{Config, Flags}; #[test] fn test_make() { @@ -58,7 +59,8 @@ fn test_check_cfg_arg() { #[test] fn test_program_out_of_date() { - let config = Config::parse(&["check".to_owned(), "--config=/does/not/exist".to_owned()]); + let config = + Config::parse(Flags::parse(&["check".to_owned(), "--config=/does/not/exist".to_owned()])); let tempfile = config.tempdir().join(".tmp-stamp-file"); File::create(&tempfile).unwrap().write_all(b"dummy value").unwrap(); assert!(tempfile.exists()); @@ -73,7 +75,8 @@ fn test_program_out_of_date() { #[test] fn test_symlink_dir() { - let config = Config::parse(&["check".to_owned(), "--config=/does/not/exist".to_owned()]); + let config = + Config::parse(Flags::parse(&["check".to_owned(), "--config=/does/not/exist".to_owned()])); let tempdir = config.tempdir().join(".tmp-dir"); let link_path = config.tempdir().join(".tmp-link"); @@ -90,3 +93,25 @@ fn test_symlink_dir() { #[cfg(not(windows))] fs::remove_file(link_path).unwrap(); } + +#[test] +fn test_set_file_times_sanity_check() { + let config = + Config::parse(Flags::parse(&["check".to_owned(), "--config=/does/not/exist".to_owned()])); + let tempfile = config.tempdir().join(".tmp-file"); + + { + File::create(&tempfile).unwrap().write_all(b"dummy value").unwrap(); + assert!(tempfile.exists()); + } + + // This might only fail on Windows (if file is default read-only then we try to modify file + // times). + let unix_epoch = std::time::SystemTime::UNIX_EPOCH; + let target_time = fs::FileTimes::new().set_accessed(unix_epoch).set_modified(unix_epoch); + set_file_times(&tempfile, target_time).unwrap(); + + let found_metadata = fs::metadata(tempfile).unwrap(); + assert_eq!(found_metadata.accessed().unwrap(), unix_epoch); + assert_eq!(found_metadata.modified().unwrap(), unix_epoch) +} diff --git a/src/bootstrap/src/utils/tarball.rs b/src/bootstrap/src/utils/tarball.rs index ef8b401201560..381aa0c309a84 100644 --- a/src/bootstrap/src/utils/tarball.rs +++ b/src/bootstrap/src/utils/tarball.rs @@ -9,6 +9,7 @@ use std::path::{Path, PathBuf}; use crate::core::build_steps::dist::distdir; use crate::core::builder::{Builder, Kind}; +use crate::core::config::BUILDER_CONFIG_FILENAME; use crate::utils::exec::BootstrapCommand; use crate::utils::helpers::{move_file, t}; use crate::utils::{channel, helpers}; @@ -325,7 +326,7 @@ impl<'a> Tarball<'a> { // Add config file if present. if let Some(config) = &self.builder.config.config { - self.add_renamed_file(config, &self.overlay_dir, "builder-config"); + self.add_renamed_file(config, &self.overlay_dir, BUILDER_CONFIG_FILENAME); } for file in self.overlay.legal_and_readme() { diff --git a/src/ci/docker/scripts/x86_64-gnu-llvm.sh b/src/ci/docker/scripts/x86_64-gnu-llvm.sh index b3921f114217d..98290f5a72cd8 100755 --- a/src/ci/docker/scripts/x86_64-gnu-llvm.sh +++ b/src/ci/docker/scripts/x86_64-gnu-llvm.sh @@ -18,9 +18,9 @@ if [[ -z "${PR_CI_JOB}" ]]; then # compiler, and is sensitive to the addition of new flags. ../x.py --stage 1 test tests/ui-fulldeps - # The tests are run a second time with the size optimizations enabled. - ../x.py --stage 1 test library/std library/alloc library/core \ - --rustc-args "--cfg feature=\"optimize_for_size\"" + # Rebuild the stdlib with the size optimizations enabled and run tests again. + RUSTFLAGS_NOT_BOOTSTRAP="--cfg feature=\"optimize_for_size\"" ../x.py --stage 1 test \ + library/std library/alloc library/core fi # NOTE: intentionally uses all of `x.py`, `x`, and `x.ps1` to make sure they all work on Linux. diff --git a/src/doc/book b/src/doc/book index 67fa536768013..04bc1396bb857 160000 --- a/src/doc/book +++ b/src/doc/book @@ -1 +1 @@ -Subproject commit 67fa536768013d9d5a13f3a06790521d511ef711 +Subproject commit 04bc1396bb857f35b5dda1d773c9571e1f253304 diff --git a/src/doc/edition-guide b/src/doc/edition-guide index 5454de3d12b9c..aeeb287d41a03 160000 --- a/src/doc/edition-guide +++ b/src/doc/edition-guide @@ -1 +1 @@ -Subproject commit 5454de3d12b9ccc6375b629cf7ccda8264640aac +Subproject commit aeeb287d41a0332c210da122bea8e0e91844ab3e diff --git a/src/doc/nomicon b/src/doc/nomicon index 0ebdacadbda8c..6ecf95c5f2bfa 160000 --- a/src/doc/nomicon +++ b/src/doc/nomicon @@ -1 +1 @@ -Subproject commit 0ebdacadbda8ce2cd8fbf93985e15af61a7ab895 +Subproject commit 6ecf95c5f2bfa0e6314dfe282bf775fd1405f7e9 diff --git a/src/doc/reference b/src/doc/reference index 2e191814f163e..62cd0df95061b 160000 --- a/src/doc/reference +++ b/src/doc/reference @@ -1 +1 @@ -Subproject commit 2e191814f163ee1e77e2d6094eee4dd78a289c5b +Subproject commit 62cd0df95061ba0ac886333f5cd7f3012f149da1 diff --git a/src/doc/rust-by-example b/src/doc/rust-by-example index 89aecb6951b77..8f94061936e49 160000 --- a/src/doc/rust-by-example +++ b/src/doc/rust-by-example @@ -1 +1 @@ -Subproject commit 89aecb6951b77bc746da73df8c9f2b2ceaad494a +Subproject commit 8f94061936e492159f4f6c09c0f917a7521893ff diff --git a/src/doc/rustc-dev-guide b/src/doc/rustc-dev-guide index 0c4d55cb59fe4..43d83780db545 160000 --- a/src/doc/rustc-dev-guide +++ b/src/doc/rustc-dev-guide @@ -1 +1 @@ -Subproject commit 0c4d55cb59fe440d1a630e4e5774d043968edb3f +Subproject commit 43d83780db545a1ed6d45773312fc578987e3968 diff --git a/src/doc/rustc/src/SUMMARY.md b/src/doc/rustc/src/SUMMARY.md index 467fd6f43e48f..cb0b2e63452de 100644 --- a/src/doc/rustc/src/SUMMARY.md +++ b/src/doc/rustc/src/SUMMARY.md @@ -61,6 +61,7 @@ - [mipsisa\*r6\*-unknown-linux-gnu\*](platform-support/mips-release-6.md) - [nvptx64-nvidia-cuda](platform-support/nvptx64-nvidia-cuda.md) - [powerpc-unknown-openbsd](platform-support/powerpc-unknown-openbsd.md) + - [powerpc-unknown-linux-muslspe](platform-support/powerpc-unknown-linux-muslspe.md) - [powerpc64-ibm-aix](platform-support/aix.md) - [riscv32im-risc0-zkvm-elf](platform-support/riscv32im-risc0-zkvm-elf.md) - [riscv32imac-unknown-xous-elf](platform-support/riscv32imac-unknown-xous-elf.md) diff --git a/src/doc/rustc/src/platform-support.md b/src/doc/rustc/src/platform-support.md index 90e30f85bd6fd..c3f73a8367a30 100644 --- a/src/doc/rustc/src/platform-support.md +++ b/src/doc/rustc/src/platform-support.md @@ -33,6 +33,7 @@ All tier 1 targets with host tools support the full standard library. target | notes -------|------- `aarch64-unknown-linux-gnu` | ARM64 Linux (kernel 4.1, glibc 2.17+) +[`aarch64-apple-darwin`](platform-support/apple-darwin.md) | ARM64 macOS (11.0+, Big Sur+) `i686-pc-windows-gnu` | 32-bit MinGW (Windows 10+, Windows Server 2016+) [^x86_32-floats-return-ABI] `i686-pc-windows-msvc` | 32-bit MSVC (Windows 10+, Windows Server 2016+) [^x86_32-floats-return-ABI] `i686-unknown-linux-gnu` | 32-bit Linux (kernel 3.2+, glibc 2.17+) [^x86_32-floats-return-ABI] @@ -86,7 +87,6 @@ so Rustup may install the documentation for a similar tier 1 target instead. target | notes -------|------- -[`aarch64-apple-darwin`](platform-support/apple-darwin.md) | ARM64 macOS (11.0+, Big Sur+) `aarch64-pc-windows-msvc` | ARM64 Windows MSVC `aarch64-unknown-linux-musl` | ARM64 Linux with musl 1.2.3 `arm-unknown-linux-gnueabi` | Armv6 Linux (kernel 3.2, glibc 2.17) @@ -332,6 +332,7 @@ target | std | host | notes `msp430-none-elf` | * | | 16-bit MSP430 microcontrollers `powerpc-unknown-linux-gnuspe` | ✓ | | PowerPC SPE Linux `powerpc-unknown-linux-musl` | ? | | PowerPC Linux with musl 1.2.3 +[`powerpc-unknown-linux-muslspe`](platform-support/powerpc-unknown-linux-muslspe.md) | ? | | PowerPC SPE Linux [`powerpc-unknown-netbsd`](platform-support/netbsd.md) | ✓ | ✓ | NetBSD 32-bit powerpc systems [`powerpc-unknown-openbsd`](platform-support/powerpc-unknown-openbsd.md) | * | | [`powerpc-wrs-vxworks-spe`](platform-support/vxworks.md) | ✓ | | diff --git a/src/doc/rustc/src/platform-support/android.md b/src/doc/rustc/src/platform-support/android.md index 9ddf00e3a5053..96499b0d80148 100644 --- a/src/doc/rustc/src/platform-support/android.md +++ b/src/doc/rustc/src/platform-support/android.md @@ -61,3 +61,8 @@ Currently the `riscv64-linux-android` target requires the following architecture * `Zba` (address calculation instructions) * `Zbb` (base instructions) * `Zbs` (single-bit instructions) + +### aarch64-linux-android on Nightly compilers + +As soon as `-Zfixed-x18` compiler flag is supplied, the [`ShadowCallStack` sanitizer](https://releases.llvm.org/7.0.1/tools/clang/docs/ShadowCallStack.html) +instrumentation is also made avaiable by supplying the second compiler flag `-Zsanitizer=shadow-call-stack`. diff --git a/src/doc/rustc/src/platform-support/apple-darwin.md b/src/doc/rustc/src/platform-support/apple-darwin.md index 0fb86949a4b79..c3a7b81f411e8 100644 --- a/src/doc/rustc/src/platform-support/apple-darwin.md +++ b/src/doc/rustc/src/platform-support/apple-darwin.md @@ -5,9 +5,6 @@ Apple macOS targets. **Tier: 1** - `x86_64-apple-darwin`: macOS on 64-bit x86. - -**Tier: 2 (with Host Tools)** - - `aarch64-apple-darwin`: macOS on ARM64 (M1-family or later Apple Silicon CPUs). ## Target maintainers diff --git a/src/doc/rustc/src/platform-support/powerpc-unknown-linux-muslspe.md b/src/doc/rustc/src/platform-support/powerpc-unknown-linux-muslspe.md new file mode 100644 index 0000000000000..4c416b5192994 --- /dev/null +++ b/src/doc/rustc/src/platform-support/powerpc-unknown-linux-muslspe.md @@ -0,0 +1,32 @@ +# powerpc-unknown-linux-muslspe + +**Tier: 3** + +This target is very similar to already existing ones like `powerpc_unknown_linux_musl` and `powerpc_unknown_linux_gnuspe`. +This one has PowerPC SPE support for musl. Unfortunately, the last supported gcc version with PowerPC SPE is 8.4.0. + +## Target maintainers + +- [@BKPepe](https://github.com/BKPepe) + +## Requirements + +This target is cross-compiled. There is no support for `std`. There is no +default allocator, but it's possible to use `alloc` by supplying an allocator. + +This target generated binaries in the ELF format. + +## Building the target + +This target was tested and used within the `OpenWrt` build system for CZ.NIC Turris 1.x routers using Freescale P2020. + +## Building Rust programs + +Rust does not yet ship pre-compiled artifacts for this target. To compile for +this target, you will either need to build Rust with the target enabled (see +"Building the target" above), or build your own copy of `core` by using +`build-std` or similar. + +## Testing + +This is a cross-compiled target and there is no support to run rustc test suite. diff --git a/src/doc/rustdoc/src/unstable-features.md b/src/doc/rustdoc/src/unstable-features.md index 39f24a13143c8..ebbe141b6f540 100644 --- a/src/doc/rustdoc/src/unstable-features.md +++ b/src/doc/rustdoc/src/unstable-features.md @@ -515,6 +515,9 @@ pub fn no_documentation() {} Note that the third item is the crate root, which in this case is undocumented. +If you want the JSON output to be displayed on `stdout` instead of having a file generated, you can +use `-o -`. + ### `-w`/`--output-format`: output format `--output-format json` emits documentation in the experimental diff --git a/src/doc/rustdoc/src/write-documentation/documentation-tests.md b/src/doc/rustdoc/src/write-documentation/documentation-tests.md index 9526f33359e4b..7ed2e9720fed5 100644 --- a/src/doc/rustdoc/src/write-documentation/documentation-tests.md +++ b/src/doc/rustdoc/src/write-documentation/documentation-tests.md @@ -376,6 +376,57 @@ that the code sample should be compiled using the respective edition of Rust. # fn foo() {} ``` +Starting in the 2024 edition[^edition-note], compatible doctests are merged as one before being +run. We combine doctests for performance reasons: the slowest part of doctests is to compile them. +Merging all of them into one file and compiling this new file, then running the doctests is much +faster. Whether doctests are merged or not, they are run in their own process. + +An example of time spent when running doctests: + +[sysinfo crate](https://crates.io/crates/sysinfo): + +```text +wall-time duration: 4.59s +total compile time: 27.067s +total runtime: 3.969s +``` + +Rust core library: + +```text +wall-time duration: 102s +total compile time: 775.204s +total runtime: 15.487s +``` + +[^edition-note]: This is based on the edition of the whole crate, not the edition of the individual +test case that may be specified in its code attribute. + +In some cases, doctests cannot be merged. For example, if you have: + +```rust +//! ``` +//! let location = std::panic::Location::caller(); +//! assert_eq!(location.line(), 4); +//! ``` +``` + +The problem with this code is that, if you change any other doctests, it'll likely break when +runing `rustdoc --test`, making it tricky to maintain. + +This is where the `standalone` attribute comes in: it tells `rustdoc` that a doctest +should not be merged with the others. So the previous code should use it: + +```rust +//! ```standalone +//! let location = std::panic::Location::caller(); +//! assert_eq!(location.line(), 4); +//! ``` +``` + +In this case, it means that the line information will not change if you add/remove other +doctests. + ### Custom CSS classes for code blocks ```rust diff --git a/src/doc/unstable-book/src/compiler-flags/fixed-x18.md b/src/doc/unstable-book/src/compiler-flags/fixed-x18.md index 8c8bff5fa296d..b215bc84fbb71 100644 --- a/src/doc/unstable-book/src/compiler-flags/fixed-x18.md +++ b/src/doc/unstable-book/src/compiler-flags/fixed-x18.md @@ -1,7 +1,7 @@ # `fixed-x18` This option prevents the compiler from using the x18 register. It is only -supported on aarch64. +supported on `aarch64`. From the [ABI spec][arm-abi]: @@ -23,6 +23,11 @@ Currently, the `-Zsanitizer=shadow-call-stack` flag is only supported on platforms that always treat x18 as a reserved register, and the `-Zfixed-x18` flag is not required to use the sanitizer on such platforms. However, the sanitizer may be supported on targets where this is not the case in the future. +One way to do so now on Nightly compilers is to explicitly supply this `-Zfixed-x18` +flag with `aarch64` targets, so that the sanitizer is available for instrumentation +on targets like `aarch64-unknown-none`, for instance. However, discretion is still +required to make sure that the runtime support is in place for this sanitizer +to be effective. It is undefined behavior for `-Zsanitizer=shadow-call-stack` code to call into code where x18 is a temporary register. On the other hand, when you are *not* diff --git a/src/doc/unstable-book/src/compiler-flags/sanitizer.md b/src/doc/unstable-book/src/compiler-flags/sanitizer.md index 72b44e002b4d4..edc63a25ac1be 100644 --- a/src/doc/unstable-book/src/compiler-flags/sanitizer.md +++ b/src/doc/unstable-book/src/compiler-flags/sanitizer.md @@ -787,6 +787,10 @@ A runtime must be provided by the application or operating system. See the [Clang ShadowCallStack documentation][clang-scs] for more details. +* `aarch64-unknown-none` + +In addition to support from a runtime by the application or operating system, the `-Zfixed-x18` flag is also mandatory. + # ThreadSanitizer ThreadSanitizer is a data race detection tool. It is supported on the following diff --git a/src/doc/unstable-book/src/language-features/asm-const.md b/src/doc/unstable-book/src/language-features/asm-const.md deleted file mode 100644 index 670c4df414f31..0000000000000 --- a/src/doc/unstable-book/src/language-features/asm-const.md +++ /dev/null @@ -1,11 +0,0 @@ -# `asm_const` - -The tracking issue for this feature is: [#93332] - -[#93332]: https://github.com/rust-lang/rust/issues/93332 - ------------------------- - -This feature adds a `const ` operand type to `asm!` and `global_asm!`. -- `` must be an integer constant expression. -- The value of the expression is formatted as a string and substituted directly into the asm template string. diff --git a/src/doc/unstable-book/src/language-features/rustc-private.md b/src/doc/unstable-book/src/language-features/rustc-private.md new file mode 100644 index 0000000000000..97fce5980e40b --- /dev/null +++ b/src/doc/unstable-book/src/language-features/rustc-private.md @@ -0,0 +1,11 @@ +# `rustc_private` + +The tracking issue for this feature is: [#27812] + +[#27812]: https://github.com/rust-lang/rust/issues/27812 + +------------------------ + +This feature allows access to unstable internal compiler crates. + +Additionally it changes the linking behavior of crates which have this feature enabled. It will prevent linking to a dylib if there's a static variant of it already statically linked into another dylib dependency. This is required to successfully link to `rustc_driver`. diff --git a/src/etc/completions/x.py.fish b/src/etc/completions/x.py.fish index 805fc8aa8ccd0..297dc11cd6145 100644 --- a/src/etc/completions/x.py.fish +++ b/src/etc/completions/x.py.fish @@ -266,7 +266,7 @@ complete -c x.py -n "__fish_seen_subcommand_from doc" -l enable-bolt-settings -d complete -c x.py -n "__fish_seen_subcommand_from doc" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_seen_subcommand_from doc" -s h -l help -d 'Print help (see more with \'--help\')' complete -c x.py -n "__fish_seen_subcommand_from test" -l test-args -d 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)' -r -complete -c x.py -n "__fish_seen_subcommand_from test" -l rustc-args -d 'extra options to pass the compiler when running tests' -r +complete -c x.py -n "__fish_seen_subcommand_from test" -l compiletest-rustc-args -d 'extra options to pass the compiler when running compiletest tests' -r complete -c x.py -n "__fish_seen_subcommand_from test" -l extra-checks -d 'comma-separated list of other files types to check (accepts py, py:lint, py:fmt, shell)' -r complete -c x.py -n "__fish_seen_subcommand_from test" -l compare-mode -d 'mode describing what file the actual ui output will be compared to' -r complete -c x.py -n "__fish_seen_subcommand_from test" -l pass -d 'force {check,build,run}-pass tests to this mode' -r @@ -313,7 +313,6 @@ complete -c x.py -n "__fish_seen_subcommand_from test" -l enable-bolt-settings - complete -c x.py -n "__fish_seen_subcommand_from test" -l skip-stage0-validation -d 'Skip stage0 compiler validation' complete -c x.py -n "__fish_seen_subcommand_from test" -s h -l help -d 'Print help (see more with \'--help\')' complete -c x.py -n "__fish_seen_subcommand_from miri" -l test-args -d 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)' -r -complete -c x.py -n "__fish_seen_subcommand_from miri" -l rustc-args -d 'extra options to pass the compiler when running tests' -r complete -c x.py -n "__fish_seen_subcommand_from miri" -l config -d 'TOML configuration file for build' -r -F complete -c x.py -n "__fish_seen_subcommand_from miri" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)" complete -c x.py -n "__fish_seen_subcommand_from miri" -l build -d 'build target of the stage0 compiler' -r -f diff --git a/src/etc/completions/x.py.ps1 b/src/etc/completions/x.py.ps1 index ce590d2fa4897..4b424471a2672 100644 --- a/src/etc/completions/x.py.ps1 +++ b/src/etc/completions/x.py.ps1 @@ -338,7 +338,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { } 'x.py;test' { [CompletionResult]::new('--test-args', 'test-args', [CompletionResultType]::ParameterName, 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)') - [CompletionResult]::new('--rustc-args', 'rustc-args', [CompletionResultType]::ParameterName, 'extra options to pass the compiler when running tests') + [CompletionResult]::new('--compiletest-rustc-args', 'compiletest-rustc-args', [CompletionResultType]::ParameterName, 'extra options to pass the compiler when running compiletest tests') [CompletionResult]::new('--extra-checks', 'extra-checks', [CompletionResultType]::ParameterName, 'comma-separated list of other files types to check (accepts py, py:lint, py:fmt, shell)') [CompletionResult]::new('--compare-mode', 'compare-mode', [CompletionResultType]::ParameterName, 'mode describing what file the actual ui output will be compared to') [CompletionResult]::new('--pass', 'pass', [CompletionResultType]::ParameterName, 'force {check,build,run}-pass tests to this mode') @@ -392,7 +392,6 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { } 'x.py;miri' { [CompletionResult]::new('--test-args', 'test-args', [CompletionResultType]::ParameterName, 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)') - [CompletionResult]::new('--rustc-args', 'rustc-args', [CompletionResultType]::ParameterName, 'extra options to pass the compiler when running tests') [CompletionResult]::new('--config', 'config', [CompletionResultType]::ParameterName, 'TOML configuration file for build') [CompletionResult]::new('--build-dir', 'build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`') [CompletionResult]::new('--build', 'build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler') diff --git a/src/etc/completions/x.py.sh b/src/etc/completions/x.py.sh index a4234905476ad..60ba8d3ba7032 100644 --- a/src/etc/completions/x.py.sh +++ b/src/etc/completions/x.py.sh @@ -1300,7 +1300,7 @@ _x.py() { return 0 ;; x.py__miri) - opts="-v -i -j -h --no-fail-fast --test-args --rustc-args --no-doc --doc --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --no-fail-fast --test-args --no-doc --doc --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1310,10 +1310,6 @@ _x.py() { COMPREPLY=($(compgen -f "${cur}")) return 0 ;; - --rustc-args) - COMPREPLY=($(compgen -f "${cur}")) - return 0 - ;; --config) COMPREPLY=($(compgen -f "${cur}")) return 0 @@ -1862,7 +1858,7 @@ _x.py() { return 0 ;; x.py__test) - opts="-v -i -j -h --no-fail-fast --test-args --rustc-args --no-doc --doc --bless --extra-checks --force-rerun --only-modified --compare-mode --pass --run --rustfix-coverage --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." + opts="-v -i -j -h --no-fail-fast --test-args --compiletest-rustc-args --no-doc --doc --bless --extra-checks --force-rerun --only-modified --compare-mode --pass --run --rustfix-coverage --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..." if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) return 0 @@ -1872,7 +1868,7 @@ _x.py() { COMPREPLY=($(compgen -f "${cur}")) return 0 ;; - --rustc-args) + --compiletest-rustc-args) COMPREPLY=($(compgen -f "${cur}")) return 0 ;; diff --git a/src/etc/completions/x.py.zsh b/src/etc/completions/x.py.zsh index fc8be4f788127..688f692da2414 100644 --- a/src/etc/completions/x.py.zsh +++ b/src/etc/completions/x.py.zsh @@ -337,7 +337,7 @@ _arguments "${_arguments_options[@]}" \ (test) _arguments "${_arguments_options[@]}" \ '*--test-args=[extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)]:ARGS: ' \ -'*--rustc-args=[extra options to pass the compiler when running tests]:ARGS: ' \ +'*--compiletest-rustc-args=[extra options to pass the compiler when running compiletest tests]:ARGS: ' \ '--extra-checks=[comma-separated list of other files types to check (accepts py, py\:lint, py\:fmt, shell)]:EXTRA_CHECKS: ' \ '--compare-mode=[mode describing what file the actual ui output will be compared to]:COMPARE MODE: ' \ '--pass=[force {check,build,run}-pass tests to this mode]:check | build | run: ' \ @@ -393,7 +393,6 @@ _arguments "${_arguments_options[@]}" \ (miri) _arguments "${_arguments_options[@]}" \ '*--test-args=[extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)]:ARGS: ' \ -'*--rustc-args=[extra options to pass the compiler when running tests]:ARGS: ' \ '--config=[TOML configuration file for build]:FILE:_files' \ '--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \ '--build=[build target of the stage0 compiler]:BUILD:( )' \ diff --git a/src/etc/gdb_providers.py b/src/etc/gdb_providers.py index 227695cdadd58..e8f9dee07d3e9 100644 --- a/src/etc/gdb_providers.py +++ b/src/etc/gdb_providers.py @@ -56,7 +56,7 @@ def __init__(self, valobj): self._valobj = valobj vec = valobj["vec"] self._length = int(vec["len"]) - self._data_ptr = unwrap_unique_or_non_null(vec["buf"]["ptr"]) + self._data_ptr = unwrap_unique_or_non_null(vec["buf"]["inner"]["ptr"]) def to_string(self): return self._data_ptr.lazy_string(encoding="utf-8", length=self._length) @@ -74,7 +74,7 @@ def __init__(self, valobj): vec = buf[ZERO_FIELD] if is_windows else buf self._length = int(vec["len"]) - self._data_ptr = unwrap_unique_or_non_null(vec["buf"]["ptr"]) + self._data_ptr = unwrap_unique_or_non_null(vec["buf"]["inner"]["ptr"]) def to_string(self): return self._data_ptr.lazy_string(encoding="utf-8", length=self._length) @@ -96,6 +96,7 @@ def to_string(self): def display_hint(): return "string" + def _enumerate_array_elements(element_ptrs): for (i, element_ptr) in enumerate(element_ptrs): key = "[{}]".format(i) @@ -112,6 +113,7 @@ def _enumerate_array_elements(element_ptrs): yield key, element + class StdSliceProvider(printer_base): def __init__(self, valobj): self._valobj = valobj @@ -130,11 +132,14 @@ def children(self): def display_hint(): return "array" + class StdVecProvider(printer_base): def __init__(self, valobj): self._valobj = valobj self._length = int(valobj["len"]) - self._data_ptr = unwrap_unique_or_non_null(valobj["buf"]["ptr"]) + self._data_ptr = unwrap_unique_or_non_null(valobj["buf"]["inner"]["ptr"]) + ptr_ty = gdb.Type.pointer(valobj.type.template_argument(0)) + self._data_ptr = self._data_ptr.reinterpret_cast(ptr_ty) def to_string(self): return "Vec(size={})".format(self._length) @@ -155,11 +160,13 @@ def __init__(self, valobj): self._head = int(valobj["head"]) self._size = int(valobj["len"]) # BACKCOMPAT: rust 1.75 - cap = valobj["buf"]["cap"] + cap = valobj["buf"]["inner"]["cap"] if cap.type.code != gdb.TYPE_CODE_INT: cap = cap[ZERO_FIELD] self._cap = int(cap) - self._data_ptr = unwrap_unique_or_non_null(valobj["buf"]["ptr"]) + self._data_ptr = unwrap_unique_or_non_null(valobj["buf"]["inner"]["ptr"]) + ptr_ty = gdb.Type.pointer(valobj.type.template_argument(0)) + self._data_ptr = self._data_ptr.reinterpret_cast(ptr_ty) def to_string(self): return "VecDeque(size={})".format(self._size) diff --git a/src/etc/lldb_providers.py b/src/etc/lldb_providers.py index c6330117380b1..8750d7682d136 100644 --- a/src/etc/lldb_providers.py +++ b/src/etc/lldb_providers.py @@ -389,11 +389,11 @@ def get_child_at_index(self, index): def update(self): # type: () -> None self.length = self.valobj.GetChildMemberWithName("len").GetValueAsUnsigned() - self.buf = self.valobj.GetChildMemberWithName("buf") + self.buf = self.valobj.GetChildMemberWithName("buf").GetChildMemberWithName("inner") self.data_ptr = unwrap_unique_or_non_null(self.buf.GetChildMemberWithName("ptr")) - self.element_type = self.data_ptr.GetType().GetPointeeType() + self.element_type = self.valobj.GetType().GetTemplateArgumentType(0) self.element_type_size = self.element_type.GetByteSize() def has_children(self): @@ -474,7 +474,7 @@ def update(self): # type: () -> None self.head = self.valobj.GetChildMemberWithName("head").GetValueAsUnsigned() self.size = self.valobj.GetChildMemberWithName("len").GetValueAsUnsigned() - self.buf = self.valobj.GetChildMemberWithName("buf") + self.buf = self.valobj.GetChildMemberWithName("buf").GetChildMemberWithName("inner") cap = self.buf.GetChildMemberWithName("cap") if cap.GetType().num_fields == 1: cap = cap.GetChildAtIndex(0) @@ -482,7 +482,7 @@ def update(self): self.data_ptr = unwrap_unique_or_non_null(self.buf.GetChildMemberWithName("ptr")) - self.element_type = self.data_ptr.GetType().GetPointeeType() + self.element_type = self.valobj.GetType().GetTemplateArgumentType(0) self.element_type_size = self.element_type.GetByteSize() def has_children(self): diff --git a/src/etc/natvis/liballoc.natvis b/src/etc/natvis/liballoc.natvis index da307809f7b04..49d82dfad820b 100644 --- a/src/etc/natvis/liballoc.natvis +++ b/src/etc/natvis/liballoc.natvis @@ -4,10 +4,10 @@ {{ len={len} }} len - buf.cap.__0 + buf.inner.cap.__0 len - buf.ptr.pointer.pointer + ($T1*)buf.inner.ptr.pointer.pointer @@ -15,7 +15,7 @@ {{ len={len} }} len - buf.cap.__0 + buf.inner.cap.__0 len @@ -23,7 +23,7 @@ - buf.ptr.pointer.pointer[(i + head) % buf.cap.__0] + (($T1*)buf.inner.ptr.pointer.pointer)[(i + head) % buf.inner.cap.__0] i = i + 1 @@ -41,17 +41,17 @@ - {(char*)vec.buf.ptr.pointer.pointer,[vec.len]s8} - (char*)vec.buf.ptr.pointer.pointer,[vec.len]s8 + {(char*)vec.buf.inner.ptr.pointer.pointer,[vec.len]s8} + (char*)vec.buf.inner.ptr.pointer.pointer,[vec.len]s8 vec.len - vec.buf.cap.__0 + vec.buf.inner.cap.__0 - {(char*)vec.buf.ptr.pointer.pointer,[vec.len]s8} + {(char*)vec.buf.inner.ptr.pointer.pointer,[vec.len]s8} vec.len - (char*)vec.buf.ptr.pointer.pointer + (char*)vec.buf.inner.ptr.pointer.pointer diff --git a/src/etc/natvis/libstd.natvis b/src/etc/natvis/libstd.natvis index 4371b99531817..4719a479c476b 100644 --- a/src/etc/natvis/libstd.natvis +++ b/src/etc/natvis/libstd.natvis @@ -104,14 +104,14 @@ - {(char*)inner.inner.bytes.buf.ptr.pointer.pointer,[inner.inner.bytes.len]} + {(char*)inner.inner.bytes.buf.inner.ptr.pointer.pointer,[inner.inner.bytes.len]} - {(char*)inner.inner.bytes.buf.ptr.pointer.pointer,[inner.inner.bytes.len]} + {(char*)inner.inner.bytes.buf.inner.ptr.pointer.pointer,[inner.inner.bytes.len]} inner.inner.bytes.len - (char*)inner.inner.bytes.buf.ptr.pointer.pointer + (char*)inner.inner.bytes.buf.inner.ptr.pointer.pointer diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index cffadc7c10a91..db81b4c4282a1 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -2069,7 +2069,7 @@ pub(crate) fn clean_middle_ty<'tcx>( Some(ContainerTy::Ref(r)), )), }, - ty::FnDef(..) | ty::FnPtr(_) => { + ty::FnDef(..) | ty::FnPtr(..) => { // FIXME: should we merge the outer and inner binders somehow? let sig = bound_ty.skip_binder().fn_sig(cx.tcx); let decl = clean_poly_fn_sig(cx, None, sig); @@ -3094,16 +3094,10 @@ fn clean_maybe_renamed_foreign_item<'tcx>( let def_id = item.owner_id.to_def_id(); cx.with_param_env(def_id, |cx| { let kind = match item.kind { - hir::ForeignItemKind::Fn(decl, names, generics, safety) => { - let (generics, decl) = enter_impl_trait(cx, |cx| { - // NOTE: generics must be cleaned before args - let generics = clean_generics(generics, cx); - let args = clean_args_from_types_and_names(cx, decl.inputs, names); - let decl = clean_fn_decl_with_args(cx, decl, None, args); - (generics, decl) - }); - ForeignFunctionItem(Box::new(Function { decl, generics }), safety) - } + hir::ForeignItemKind::Fn(sig, names, generics) => ForeignFunctionItem( + clean_function(cx, &sig, generics, FunctionArgs::Names(names)), + sig.header.safety, + ), hir::ForeignItemKind::Static(ty, mutability, safety) => ForeignStaticItem( Static { type_: Box::new(clean_ty(ty, cx)), mutability, expr: None }, safety, diff --git a/src/librustdoc/config.rs b/src/librustdoc/config.rs index 9c7a9f8467f5a..9e9d8f02a2ece 100644 --- a/src/librustdoc/config.rs +++ b/src/librustdoc/config.rs @@ -286,6 +286,9 @@ pub(crate) struct RenderOptions { pub(crate) no_emit_shared: bool, /// If `true`, HTML source code pages won't be generated. pub(crate) html_no_source: bool, + /// This field is only used for the JSON output. If it's set to true, no file will be created + /// and content will be displayed in stdout directly. + pub(crate) output_to_stdout: bool, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -548,16 +551,17 @@ impl Options { dcx.fatal("the `--test` flag must be passed to enable `--no-run`"); } + let mut output_to_stdout = false; let test_builder_wrappers = matches.opt_strs("test-builder-wrapper").iter().map(PathBuf::from).collect(); - let out_dir = matches.opt_str("out-dir").map(|s| PathBuf::from(&s)); - let output = matches.opt_str("output").map(|s| PathBuf::from(&s)); - let output = match (out_dir, output) { + let output = match (matches.opt_str("out-dir"), matches.opt_str("output")) { (Some(_), Some(_)) => { dcx.fatal("cannot use both 'out-dir' and 'output' at once"); } - (Some(out_dir), None) => out_dir, - (None, Some(output)) => output, + (Some(out_dir), None) | (None, Some(out_dir)) => { + output_to_stdout = out_dir == "-"; + PathBuf::from(out_dir) + } (None, None) => PathBuf::from("doc"), }; @@ -733,9 +737,11 @@ impl Options { let html_no_source = matches.opt_present("html-no-source"); if generate_link_to_definition && (show_coverage || output_format != OutputFormat::Html) { - dcx.fatal( - "--generate-link-to-definition option can only be used with HTML output format", - ); + dcx.struct_warn( + "`--generate-link-to-definition` option can only be used with HTML output format", + ) + .with_note("`--generate-link-to-definition` option will be ignored") + .emit(); } let scrape_examples_options = ScrapeExamplesOptions::new(matches, dcx); @@ -816,6 +822,7 @@ impl Options { call_locations, no_emit_shared: false, html_no_source, + output_to_stdout, }; Some((options, render_options)) } diff --git a/src/librustdoc/doctest.rs b/src/librustdoc/doctest.rs index 08d6a5a52b21a..743c1ed507eeb 100644 --- a/src/librustdoc/doctest.rs +++ b/src/librustdoc/doctest.rs @@ -1,5 +1,6 @@ mod make; mod markdown; +mod runner; mod rust; use std::fs::File; @@ -10,7 +11,7 @@ use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::{Arc, Mutex}; use std::{panic, str}; -pub(crate) use make::make_test; +pub(crate) use make::DocTestBuilder; pub(crate) use markdown::test as test_markdown; use rustc_ast as ast; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; @@ -150,8 +151,6 @@ pub(crate) fn run(dcx: DiagCtxtHandle<'_>, options: RustdocOptions) -> Result<() expanded_args: options.expanded_args.clone(), }; - let test_args = options.test_args.clone(); - let nocapture = options.nocapture; let externs = options.externs.clone(); let json_unused_externs = options.json_unused_externs; @@ -164,39 +163,46 @@ pub(crate) fn run(dcx: DiagCtxtHandle<'_>, options: RustdocOptions) -> Result<() let args_path = temp_dir.path().join("rustdoc-cfgs"); crate::wrap_return(dcx, generate_args_file(&args_path, &options))?; - let (tests, unused_extern_reports, compiling_test_count) = - interface::run_compiler(config, |compiler| { - compiler.enter(|queries| { - let collector = queries.global_ctxt()?.enter(|tcx| { - let crate_name = tcx.crate_name(LOCAL_CRATE).to_string(); - let crate_attrs = tcx.hir().attrs(CRATE_HIR_ID); - let opts = scrape_test_config(crate_name, crate_attrs, args_path); - let enable_per_target_ignores = options.enable_per_target_ignores; - - let mut collector = CreateRunnableDoctests::new(options, opts); - let hir_collector = HirCollector::new( - &compiler.sess, - tcx.hir(), - ErrorCodes::from(compiler.sess.opts.unstable_features.is_nightly_build()), - enable_per_target_ignores, - tcx, - ); - let tests = hir_collector.collect_crate(); - tests.into_iter().for_each(|t| collector.add_test(t)); - - collector - }); - if compiler.sess.dcx().has_errors().is_some() { - FatalError.raise(); - } + let CreateRunnableDocTests { + standalone_tests, + mergeable_tests, + rustdoc_options, + opts, + unused_extern_reports, + compiling_test_count, + .. + } = interface::run_compiler(config, |compiler| { + compiler.enter(|queries| { + let collector = queries.global_ctxt()?.enter(|tcx| { + let crate_name = tcx.crate_name(LOCAL_CRATE).to_string(); + let crate_attrs = tcx.hir().attrs(CRATE_HIR_ID); + let opts = scrape_test_config(crate_name, crate_attrs, args_path); + let enable_per_target_ignores = options.enable_per_target_ignores; + + let mut collector = CreateRunnableDocTests::new(options, opts); + let hir_collector = HirCollector::new( + &compiler.sess, + tcx.hir(), + ErrorCodes::from(compiler.sess.opts.unstable_features.is_nightly_build()), + enable_per_target_ignores, + tcx, + ); + let tests = hir_collector.collect_crate(); + tests.into_iter().for_each(|t| collector.add_test(t)); + + collector + }); + if compiler.sess.dcx().has_errors().is_some() { + FatalError.raise(); + } + + Ok(collector) + }) + })?; - let unused_extern_reports = collector.unused_extern_reports.clone(); - let compiling_test_count = collector.compiling_test_count.load(Ordering::SeqCst); - Ok((collector.tests, unused_extern_reports, compiling_test_count)) - }) - })?; + run_tests(opts, &rustdoc_options, &unused_extern_reports, standalone_tests, mergeable_tests); - run_tests(test_args, nocapture, tests); + let compiling_test_count = compiling_test_count.load(Ordering::SeqCst); // Collect and warn about unused externs, but only if we've gotten // reports for each doctest @@ -240,16 +246,83 @@ pub(crate) fn run(dcx: DiagCtxtHandle<'_>, options: RustdocOptions) -> Result<() } pub(crate) fn run_tests( - mut test_args: Vec, - nocapture: bool, - mut tests: Vec, + opts: GlobalTestOptions, + rustdoc_options: &Arc, + unused_extern_reports: &Arc>>, + mut standalone_tests: Vec, + mergeable_tests: FxHashMap>, ) { + let mut test_args = Vec::with_capacity(rustdoc_options.test_args.len() + 1); test_args.insert(0, "rustdoctest".to_string()); - if nocapture { + test_args.extend_from_slice(&rustdoc_options.test_args); + if rustdoc_options.nocapture { test_args.push("--nocapture".to_string()); } - tests.sort_by(|a, b| a.desc.name.as_slice().cmp(&b.desc.name.as_slice())); - test::test_main(&test_args, tests, None); + + let mut nb_errors = 0; + let mut ran_edition_tests = 0; + let target_str = rustdoc_options.target.to_string(); + + for (edition, mut doctests) in mergeable_tests { + if doctests.is_empty() { + continue; + } + doctests.sort_by(|(_, a), (_, b)| a.name.cmp(&b.name)); + + let mut tests_runner = runner::DocTestRunner::new(); + + let rustdoc_test_options = IndividualTestOptions::new( + &rustdoc_options, + &Some(format!("merged_doctest_{edition}")), + PathBuf::from(format!("doctest_{edition}.rs")), + ); + + for (doctest, scraped_test) in &doctests { + tests_runner.add_test(doctest, scraped_test, &target_str); + } + if let Ok(success) = tests_runner.run_merged_tests( + rustdoc_test_options, + edition, + &opts, + &test_args, + rustdoc_options, + ) { + ran_edition_tests += 1; + if !success { + nb_errors += 1; + } + continue; + } + // We failed to compile all compatible tests as one so we push them into the + // `standalone_tests` doctests. + debug!("Failed to compile compatible doctests for edition {} all at once", edition); + for (doctest, scraped_test) in doctests { + doctest.generate_unique_doctest( + &scraped_test.text, + scraped_test.langstr.test_harness, + &opts, + Some(&opts.crate_name), + ); + standalone_tests.push(generate_test_desc_and_fn( + doctest, + scraped_test, + opts.clone(), + Arc::clone(&rustdoc_options), + unused_extern_reports.clone(), + )); + } + } + + // We need to call `test_main` even if there is no doctest to run to get the output + // `running 0 tests...`. + if ran_edition_tests == 0 || !standalone_tests.is_empty() { + standalone_tests.sort_by(|a, b| a.desc.name.as_slice().cmp(&b.desc.name.as_slice())); + test::test_main(&test_args, standalone_tests, None); + } + if nb_errors != 0 { + // libtest::ERROR_EXIT_CODE is not public but it's the same value. + std::process::exit(101); + } } // Look for `#![doc(test(no_crate_inject))]`, used by crates in the std facade. @@ -330,7 +403,7 @@ impl DirState { // We could unify this struct the one in rustc but they have different // ownership semantics, so doing so would create wasteful allocations. #[derive(serde::Serialize, serde::Deserialize)] -struct UnusedExterns { +pub(crate) struct UnusedExterns { /// Lint level of the unused_crate_dependencies lint lint_level: String, /// List of unused externs by their names. @@ -359,22 +432,41 @@ fn wrapped_rustc_command(rustc_wrappers: &[PathBuf], rustc_binary: &Path) -> Com command } -struct RunnableDoctest { +/// Information needed for running a bundle of doctests. +/// +/// This data structure contains the "full" test code, including the wrappers +/// (if multiple doctests are merged), `main` function, +/// and everything needed to calculate the compiler's command-line arguments. +/// The `# ` prefix on boring lines has also been stripped. +pub(crate) struct RunnableDocTest { full_test_code: String, full_test_line_offset: usize, test_opts: IndividualTestOptions, global_opts: GlobalTestOptions, - scraped_test: ScrapedDoctest, + langstr: LangString, + line: usize, + edition: Edition, + no_run: bool, + is_multiple_tests: bool, } +impl RunnableDocTest { + fn path_for_merged_doctest(&self) -> PathBuf { + self.test_opts.outdir.path().join(&format!("doctest_{}.rs", self.edition)) + } +} + +/// Execute a `RunnableDoctest`. +/// +/// This is the function that calculates the compiler command line, invokes the compiler, then +/// invokes the test or tests in a separate executable (if applicable). fn run_test( - doctest: RunnableDoctest, + doctest: RunnableDocTest, rustdoc_options: &RustdocOptions, supports_color: bool, report_unused_externs: impl Fn(UnusedExterns), ) -> Result<(), TestFailure> { - let scraped_test = &doctest.scraped_test; - let langstr = &scraped_test.langstr; + let langstr = &doctest.langstr; // Make sure we emit well-formed executable names for our target. let rust_out = add_exe_suffix("rust_out".to_owned(), &rustdoc_options.target); let output_file = doctest.test_opts.outdir.path().join(rust_out); @@ -391,12 +483,15 @@ fn run_test( compiler.arg(format!("--sysroot={}", sysroot.display())); } - compiler.arg("--edition").arg(&scraped_test.edition(rustdoc_options).to_string()); - compiler.env("UNSTABLE_RUSTDOC_TEST_PATH", &doctest.test_opts.path); - compiler.env( - "UNSTABLE_RUSTDOC_TEST_LINE", - format!("{}", scraped_test.line as isize - doctest.full_test_line_offset as isize), - ); + compiler.arg("--edition").arg(&doctest.edition.to_string()); + if !doctest.is_multiple_tests { + // Setting these environment variables is unneeded if this is a merged doctest. + compiler.env("UNSTABLE_RUSTDOC_TEST_PATH", &doctest.test_opts.path); + compiler.env( + "UNSTABLE_RUSTDOC_TEST_LINE", + format!("{}", doctest.line as isize - doctest.full_test_line_offset as isize), + ); + } compiler.arg("-o").arg(&output_file); if langstr.test_harness { compiler.arg("--test"); @@ -408,10 +503,7 @@ fn run_test( compiler.arg("-Z").arg("unstable-options"); } - if scraped_test.no_run(rustdoc_options) - && !langstr.compile_fail - && rustdoc_options.persist_doctests.is_none() - { + if doctest.no_run && !langstr.compile_fail && rustdoc_options.persist_doctests.is_none() { // FIXME: why does this code check if it *shouldn't* persist doctests // -- shouldn't it be the negation? compiler.arg("--emit=metadata"); @@ -442,18 +534,40 @@ fn run_test( } } - compiler.arg("-"); - compiler.stdin(Stdio::piped()); - compiler.stderr(Stdio::piped()); + // If this is a merged doctest, we need to write it into a file instead of using stdin + // because if the size of the merged doctests is too big, it'll simply break stdin. + if doctest.is_multiple_tests { + // It makes the compilation failure much faster if it is for a combined doctest. + compiler.arg("--error-format=short"); + let input_file = doctest.path_for_merged_doctest(); + if std::fs::write(&input_file, &doctest.full_test_code).is_err() { + // If we cannot write this file for any reason, we leave. All combined tests will be + // tested as standalone tests. + return Err(TestFailure::CompileError); + } + compiler.arg(input_file); + if !rustdoc_options.nocapture { + // If `nocapture` is disabled, then we don't display rustc's output when compiling + // the merged doctests. + compiler.stderr(Stdio::null()); + } + } else { + compiler.arg("-"); + compiler.stdin(Stdio::piped()); + compiler.stderr(Stdio::piped()); + } debug!("compiler invocation for doctest: {compiler:?}"); let mut child = compiler.spawn().expect("Failed to spawn rustc process"); - { + let output = if doctest.is_multiple_tests { + let status = child.wait().expect("Failed to wait"); + process::Output { status, stdout: Vec::new(), stderr: Vec::new() } + } else { let stdin = child.stdin.as_mut().expect("Failed to open stdin"); stdin.write_all(doctest.full_test_code.as_bytes()).expect("could write out test sources"); - } - let output = child.wait_with_output().expect("Failed to read stdout"); + child.wait_with_output().expect("Failed to read stdout") + }; struct Bomb<'a>(&'a str); impl Drop for Bomb<'_> { @@ -492,8 +606,7 @@ fn run_test( // We used to check if the output contained "error[{}]: " but since we added the // colored output, we can't anymore because of the color escape characters before // the ":". - let missing_codes: Vec = scraped_test - .langstr + let missing_codes: Vec = langstr .error_codes .iter() .filter(|err| !out.contains(&format!("error[{err}]"))) @@ -510,7 +623,7 @@ fn run_test( } } - if scraped_test.no_run(rustdoc_options) { + if doctest.no_run { return Ok(()); } @@ -522,15 +635,19 @@ fn run_test( let tool = make_maybe_absolute_path(tool.into()); cmd = Command::new(tool); cmd.args(&rustdoc_options.runtool_args); - cmd.arg(output_file); + cmd.arg(&output_file); } else { - cmd = Command::new(output_file); + cmd = Command::new(&output_file); + if doctest.is_multiple_tests { + cmd.arg("*doctest-bin-path"); + cmd.arg(&output_file); + } } if let Some(run_directory) = &rustdoc_options.test_run_directory { cmd.current_dir(run_directory); } - let result = if rustdoc_options.nocapture { + let result = if doctest.is_multiple_tests || rustdoc_options.nocapture { cmd.status().map(|status| process::Output { status, stdout: Vec::new(), @@ -568,15 +685,14 @@ fn make_maybe_absolute_path(path: PathBuf) -> PathBuf { } struct IndividualTestOptions { outdir: DirState, - test_id: String, path: PathBuf, } impl IndividualTestOptions { - fn new(options: &RustdocOptions, test_id: String, test_path: PathBuf) -> Self { + fn new(options: &RustdocOptions, test_id: &Option, test_path: PathBuf) -> Self { let outdir = if let Some(ref path) = options.persist_doctests { let mut path = path.clone(); - path.push(&test_id); + path.push(&test_id.as_deref().unwrap_or_else(|| "")); if let Err(err) = std::fs::create_dir_all(&path) { eprintln!("Couldn't create directory for doctest executables: {err}"); @@ -588,20 +704,45 @@ impl IndividualTestOptions { DirState::Temp(get_doctest_dir().expect("rustdoc needs a tempdir")) }; - Self { outdir, test_id, path: test_path } + Self { outdir, path: test_path } } } /// A doctest scraped from the code, ready to be turned into a runnable test. -struct ScrapedDoctest { +/// +/// The pipeline goes: [`clean`] AST -> `ScrapedDoctest` -> `RunnableDoctest`. +/// [`run_merged_tests`] converts a bunch of scraped doctests to a single runnable doctest, +/// while [`generate_unique_doctest`] does the standalones. +/// +/// [`clean`]: crate::clean +/// [`run_merged_tests`]: crate::doctest::runner::DocTestRunner::run_merged_tests +/// [`generate_unique_doctest`]: crate::doctest::make::DocTestBuilder::generate_unique_doctest +pub(crate) struct ScrapedDocTest { filename: FileName, line: usize, - logical_path: Vec, langstr: LangString, text: String, + name: String, } -impl ScrapedDoctest { +impl ScrapedDocTest { + fn new( + filename: FileName, + line: usize, + logical_path: Vec, + langstr: LangString, + text: String, + ) -> Self { + let mut item_path = logical_path.join("::"); + item_path.retain(|c| c != ' '); + if !item_path.is_empty() { + item_path.push(' '); + } + let name = + format!("{} - {item_path}(line {line})", filename.prefer_remapped_unconditionaly()); + + Self { filename, line, langstr, text, name } + } fn edition(&self, opts: &RustdocOptions) -> Edition { self.langstr.edition.unwrap_or(opts.edition) } @@ -609,67 +750,56 @@ impl ScrapedDoctest { fn no_run(&self, opts: &RustdocOptions) -> bool { self.langstr.no_run || opts.no_run } + fn path(&self) -> PathBuf { + match &self.filename { + FileName::Real(path) => { + if let Some(local_path) = path.local_path() { + local_path.to_path_buf() + } else { + // Somehow we got the filename from the metadata of another crate, should never happen + unreachable!("doctest from a different crate"); + } + } + _ => PathBuf::from(r"doctest.rs"), + } + } } -pub(crate) trait DoctestVisitor { +pub(crate) trait DocTestVisitor { fn visit_test(&mut self, test: String, config: LangString, rel_line: MdRelLine); fn visit_header(&mut self, _name: &str, _level: u32) {} } -struct CreateRunnableDoctests { - tests: Vec, +struct CreateRunnableDocTests { + standalone_tests: Vec, + mergeable_tests: FxHashMap>, rustdoc_options: Arc, opts: GlobalTestOptions, visited_tests: FxHashMap<(String, usize), usize>, unused_extern_reports: Arc>>, compiling_test_count: AtomicUsize, + can_merge_doctests: bool, } -impl CreateRunnableDoctests { - fn new(rustdoc_options: RustdocOptions, opts: GlobalTestOptions) -> CreateRunnableDoctests { - CreateRunnableDoctests { - tests: Vec::new(), +impl CreateRunnableDocTests { + fn new(rustdoc_options: RustdocOptions, opts: GlobalTestOptions) -> CreateRunnableDocTests { + let can_merge_doctests = rustdoc_options.edition >= Edition::Edition2024; + CreateRunnableDocTests { + standalone_tests: Vec::new(), + mergeable_tests: FxHashMap::default(), rustdoc_options: Arc::new(rustdoc_options), opts, visited_tests: FxHashMap::default(), unused_extern_reports: Default::default(), compiling_test_count: AtomicUsize::new(0), + can_merge_doctests, } } - fn generate_name(&self, filename: &FileName, line: usize, logical_path: &[String]) -> String { - let mut item_path = logical_path.join("::"); - item_path.retain(|c| c != ' '); - if !item_path.is_empty() { - item_path.push(' '); - } - format!("{} - {item_path}(line {line})", filename.prefer_remapped_unconditionaly()) - } - - fn add_test(&mut self, test: ScrapedDoctest) { - let name = self.generate_name(&test.filename, test.line, &test.logical_path); - let opts = self.opts.clone(); - let target_str = self.rustdoc_options.target.to_string(); - let unused_externs = self.unused_extern_reports.clone(); - if !test.langstr.compile_fail { - self.compiling_test_count.fetch_add(1, Ordering::SeqCst); - } - - let path = match &test.filename { - FileName::Real(path) => { - if let Some(local_path) = path.local_path() { - local_path.to_path_buf() - } else { - // Somehow we got the filename from the metadata of another crate, should never happen - unreachable!("doctest from a different crate"); - } - } - _ => PathBuf::from(r"doctest.rs"), - }; - + fn add_test(&mut self, scraped_test: ScrapedDocTest) { // For example `module/file.rs` would become `module_file_rs` - let file = test + let file = scraped_test .filename .prefer_local() .to_string_lossy() @@ -679,75 +809,134 @@ impl CreateRunnableDoctests { let test_id = format!( "{file}_{line}_{number}", file = file, - line = test.line, + line = scraped_test.line, number = { // Increases the current test number, if this file already // exists or it creates a new entry with a test number of 0. self.visited_tests - .entry((file.clone(), test.line)) + .entry((file.clone(), scraped_test.line)) .and_modify(|v| *v += 1) .or_insert(0) }, ); - let rustdoc_options = self.rustdoc_options.clone(); - let rustdoc_test_options = IndividualTestOptions::new(&self.rustdoc_options, test_id, path); - - debug!("creating test {name}: {}", test.text); - self.tests.push(test::TestDescAndFn { - desc: test::TestDesc { - name: test::DynTestName(name), - ignore: match test.langstr.ignore { - Ignore::All => true, - Ignore::None => false, - Ignore::Some(ref ignores) => ignores.iter().any(|s| target_str.contains(s)), - }, - ignore_message: None, - source_file: "", - start_line: 0, - start_col: 0, - end_line: 0, - end_col: 0, - // compiler failures are test failures - should_panic: test::ShouldPanic::No, - compile_fail: test.langstr.compile_fail, - no_run: test.no_run(&rustdoc_options), - test_type: test::TestType::DocTest, + let edition = scraped_test.edition(&self.rustdoc_options); + let doctest = DocTestBuilder::new( + &scraped_test.text, + Some(&self.opts.crate_name), + edition, + self.can_merge_doctests, + Some(test_id), + Some(&scraped_test.langstr), + ); + let is_standalone = !doctest.can_be_merged + || scraped_test.langstr.compile_fail + || scraped_test.langstr.test_harness + || scraped_test.langstr.standalone + || self.rustdoc_options.nocapture + || self.rustdoc_options.test_args.iter().any(|arg| arg == "--show-output"); + if is_standalone { + let test_desc = self.generate_test_desc_and_fn(doctest, scraped_test); + self.standalone_tests.push(test_desc); + } else { + self.mergeable_tests.entry(edition).or_default().push((doctest, scraped_test)); + } + } + + fn generate_test_desc_and_fn( + &mut self, + test: DocTestBuilder, + scraped_test: ScrapedDocTest, + ) -> test::TestDescAndFn { + if !scraped_test.langstr.compile_fail { + self.compiling_test_count.fetch_add(1, Ordering::SeqCst); + } + + generate_test_desc_and_fn( + test, + scraped_test, + self.opts.clone(), + Arc::clone(&self.rustdoc_options), + self.unused_extern_reports.clone(), + ) + } +} + +fn generate_test_desc_and_fn( + test: DocTestBuilder, + scraped_test: ScrapedDocTest, + opts: GlobalTestOptions, + rustdoc_options: Arc, + unused_externs: Arc>>, +) -> test::TestDescAndFn { + let target_str = rustdoc_options.target.to_string(); + let rustdoc_test_options = + IndividualTestOptions::new(&rustdoc_options, &test.test_id, scraped_test.path()); + + debug!("creating test {}: {}", scraped_test.name, scraped_test.text); + test::TestDescAndFn { + desc: test::TestDesc { + name: test::DynTestName(scraped_test.name.clone()), + ignore: match scraped_test.langstr.ignore { + Ignore::All => true, + Ignore::None => false, + Ignore::Some(ref ignores) => ignores.iter().any(|s| target_str.contains(s)), }, - testfn: test::DynTestFn(Box::new(move || { - doctest_run_fn(rustdoc_test_options, opts, test, rustdoc_options, unused_externs) - })), - }); + ignore_message: None, + source_file: "", + start_line: 0, + start_col: 0, + end_line: 0, + end_col: 0, + // compiler failures are test failures + should_panic: test::ShouldPanic::No, + compile_fail: scraped_test.langstr.compile_fail, + no_run: scraped_test.no_run(&rustdoc_options), + test_type: test::TestType::DocTest, + }, + testfn: test::DynTestFn(Box::new(move || { + doctest_run_fn( + rustdoc_test_options, + opts, + test, + scraped_test, + rustdoc_options, + unused_externs, + ) + })), } } fn doctest_run_fn( test_opts: IndividualTestOptions, global_opts: GlobalTestOptions, - scraped_test: ScrapedDoctest, + doctest: DocTestBuilder, + scraped_test: ScrapedDocTest, rustdoc_options: Arc, unused_externs: Arc>>, ) -> Result<(), String> { let report_unused_externs = |uext| { unused_externs.lock().unwrap().push(uext); }; - let edition = scraped_test.edition(&rustdoc_options); - let (full_test_code, full_test_line_offset, supports_color) = make_test( + let (full_test_code, full_test_line_offset) = doctest.generate_unique_doctest( &scraped_test.text, - Some(&global_opts.crate_name), scraped_test.langstr.test_harness, &global_opts, - edition, - Some(&test_opts.test_id), + Some(&global_opts.crate_name), ); - let runnable_test = RunnableDoctest { + let runnable_test = RunnableDocTest { full_test_code, full_test_line_offset, test_opts, global_opts, - scraped_test, + langstr: scraped_test.langstr.clone(), + line: scraped_test.line, + edition: scraped_test.edition(&rustdoc_options), + no_run: scraped_test.no_run(&rustdoc_options), + is_multiple_tests: false, }; - let res = run_test(runnable_test, &rustdoc_options, supports_color, report_unused_externs); + let res = + run_test(runnable_test, &rustdoc_options, doctest.supports_color, report_unused_externs); if let Err(err) = res { match err { @@ -804,7 +993,7 @@ fn doctest_run_fn( } #[cfg(test)] // used in tests -impl DoctestVisitor for Vec { +impl DocTestVisitor for Vec { fn visit_test(&mut self, _test: String, _config: LangString, rel_line: MdRelLine) { self.push(1 + rel_line.offset()); } diff --git a/src/librustdoc/doctest/make.rs b/src/librustdoc/doctest/make.rs index 74833c113623b..aed079e5887b0 100644 --- a/src/librustdoc/doctest/make.rs +++ b/src/librustdoc/doctest/make.rs @@ -16,250 +16,428 @@ use rustc_span::symbol::sym; use rustc_span::FileName; use super::GlobalTestOptions; +use crate::html::markdown::LangString; + +/// This struct contains information about the doctest itself which is then used to generate +/// doctest source code appropriately. +pub(crate) struct DocTestBuilder { + pub(crate) supports_color: bool, + pub(crate) already_has_extern_crate: bool, + pub(crate) has_main_fn: bool, + pub(crate) crate_attrs: String, + /// If this is a merged doctest, it will be put into `everything_else`, otherwise it will + /// put into `crate_attrs`. + pub(crate) maybe_crate_attrs: String, + pub(crate) crates: String, + pub(crate) everything_else: String, + pub(crate) test_id: Option, + pub(crate) failed_ast: bool, + pub(crate) can_be_merged: bool, +} -/// Transforms a test into code that can be compiled into a Rust binary, and returns the number of -/// lines before the test code begins as well as if the output stream supports colors or not. -pub(crate) fn make_test( - s: &str, - crate_name: Option<&str>, - dont_insert_main: bool, - opts: &GlobalTestOptions, - edition: Edition, - test_id: Option<&str>, -) -> (String, usize, bool) { - let (crate_attrs, everything_else, crates) = partition_source(s, edition); - let everything_else = everything_else.trim(); - let mut line_offset = 0; - let mut prog = String::new(); - let mut supports_color = false; - - if opts.attrs.is_empty() { - // If there aren't any attributes supplied by #![doc(test(attr(...)))], then allow some - // lints that are commonly triggered in doctests. The crate-level test attributes are - // commonly used to make tests fail in case they trigger warnings, so having this there in - // that case may cause some tests to pass when they shouldn't have. - prog.push_str("#![allow(unused)]\n"); - line_offset += 1; +impl DocTestBuilder { + pub(crate) fn new( + source: &str, + crate_name: Option<&str>, + edition: Edition, + can_merge_doctests: bool, + // If `test_id` is `None`, it means we're generating code for a code example "run" link. + test_id: Option, + lang_str: Option<&LangString>, + ) -> Self { + let can_merge_doctests = can_merge_doctests + && lang_str.is_some_and(|lang_str| { + !lang_str.compile_fail && !lang_str.test_harness && !lang_str.standalone + }); + + let SourceInfo { crate_attrs, maybe_crate_attrs, crates, everything_else } = + partition_source(source, edition); + + // Uses librustc_ast to parse the doctest and find if there's a main fn and the extern + // crate already is included. + let Ok(( + ParseSourceInfo { + has_main_fn, + found_extern_crate, + supports_color, + has_global_allocator, + has_macro_def, + .. + }, + failed_ast, + )) = check_for_main_and_extern_crate( + crate_name, + source, + &everything_else, + &crates, + edition, + can_merge_doctests, + ) + else { + // If the parser panicked due to a fatal error, pass the test code through unchanged. + // The error will be reported during compilation. + return Self { + supports_color: false, + has_main_fn: false, + crate_attrs, + maybe_crate_attrs, + crates, + everything_else, + already_has_extern_crate: false, + test_id, + failed_ast: true, + can_be_merged: false, + }; + }; + // If the AST returned an error, we don't want this doctest to be merged with the + // others. Same if it contains `#[feature]` or `#[no_std]`. + let can_be_merged = can_merge_doctests + && !failed_ast + && !has_global_allocator + && crate_attrs.is_empty() + // If this is a merged doctest and a defined macro uses `$crate`, then the path will + // not work, so better not put it into merged doctests. + && !(has_macro_def && everything_else.contains("$crate")); + Self { + supports_color, + has_main_fn, + crate_attrs, + maybe_crate_attrs, + crates, + everything_else, + already_has_extern_crate: found_extern_crate, + test_id, + failed_ast: false, + can_be_merged, + } } - // Next, any attributes that came from the crate root via #![doc(test(attr(...)))]. - for attr in &opts.attrs { - prog.push_str(&format!("#![{attr}]\n")); - line_offset += 1; - } + /// Transforms a test into code that can be compiled into a Rust binary, and returns the number of + /// lines before the test code begins. + pub(crate) fn generate_unique_doctest( + &self, + test_code: &str, + dont_insert_main: bool, + opts: &GlobalTestOptions, + crate_name: Option<&str>, + ) -> (String, usize) { + if self.failed_ast { + // If the AST failed to compile, no need to go generate a complete doctest, the error + // will be better this way. + return (test_code.to_string(), 0); + } + let mut line_offset = 0; + let mut prog = String::new(); + let everything_else = self.everything_else.trim(); + if opts.attrs.is_empty() { + // If there aren't any attributes supplied by #![doc(test(attr(...)))], then allow some + // lints that are commonly triggered in doctests. The crate-level test attributes are + // commonly used to make tests fail in case they trigger warnings, so having this there in + // that case may cause some tests to pass when they shouldn't have. + prog.push_str("#![allow(unused)]\n"); + line_offset += 1; + } - // Now push any outer attributes from the example, assuming they - // are intended to be crate attributes. - prog.push_str(&crate_attrs); - prog.push_str(&crates); - - // Uses librustc_ast to parse the doctest and find if there's a main fn and the extern - // crate already is included. - let Ok((already_has_main, already_has_extern_crate)) = - check_for_main_and_extern_crate(crate_name, s.to_owned(), edition, &mut supports_color) - else { - // If the parser panicked due to a fatal error, pass the test code through unchanged. - // The error will be reported during compilation. - return (s.to_owned(), 0, false); - }; + // Next, any attributes that came from the crate root via #![doc(test(attr(...)))]. + for attr in &opts.attrs { + prog.push_str(&format!("#![{attr}]\n")); + line_offset += 1; + } - // Don't inject `extern crate std` because it's already injected by the - // compiler. - if !already_has_extern_crate && !opts.no_crate_inject && crate_name != Some("std") { - if let Some(crate_name) = crate_name { + // Now push any outer attributes from the example, assuming they + // are intended to be crate attributes. + prog.push_str(&self.crate_attrs); + prog.push_str(&self.maybe_crate_attrs); + prog.push_str(&self.crates); + + // Don't inject `extern crate std` because it's already injected by the + // compiler. + if !self.already_has_extern_crate && + !opts.no_crate_inject && + let Some(crate_name) = crate_name && + crate_name != "std" && // Don't inject `extern crate` if the crate is never used. // NOTE: this is terribly inaccurate because it doesn't actually // parse the source, but only has false positives, not false // negatives. - if s.contains(crate_name) { - // rustdoc implicitly inserts an `extern crate` item for the own crate - // which may be unused, so we need to allow the lint. - prog.push_str("#[allow(unused_extern_crates)]\n"); - - prog.push_str(&format!("extern crate r#{crate_name};\n")); - line_offset += 1; - } + test_code.contains(crate_name) + { + // rustdoc implicitly inserts an `extern crate` item for the own crate + // which may be unused, so we need to allow the lint. + prog.push_str("#[allow(unused_extern_crates)]\n"); + + prog.push_str(&format!("extern crate r#{crate_name};\n")); + line_offset += 1; } - } - // FIXME: This code cannot yet handle no_std test cases yet - if dont_insert_main || already_has_main || prog.contains("![no_std]") { - prog.push_str(everything_else); - } else { - let returns_result = everything_else.trim_end().ends_with("(())"); - // Give each doctest main function a unique name. - // This is for example needed for the tooling around `-C instrument-coverage`. - let inner_fn_name = if let Some(test_id) = test_id { - format!("_doctest_main_{test_id}") + // FIXME: This code cannot yet handle no_std test cases yet + if dont_insert_main || self.has_main_fn || prog.contains("![no_std]") { + prog.push_str(everything_else); } else { - "_inner".into() - }; - let inner_attr = if test_id.is_some() { "#[allow(non_snake_case)] " } else { "" }; - let (main_pre, main_post) = if returns_result { - ( - format!( - "fn main() {{ {inner_attr}fn {inner_fn_name}() -> Result<(), impl core::fmt::Debug> {{\n", - ), - format!("\n}} {inner_fn_name}().unwrap() }}"), - ) - } else if test_id.is_some() { - ( - format!("fn main() {{ {inner_attr}fn {inner_fn_name}() {{\n",), - format!("\n}} {inner_fn_name}() }}"), - ) - } else { - ("fn main() {\n".into(), "\n}".into()) - }; - // Note on newlines: We insert a line/newline *before*, and *after* - // the doctest and adjust the `line_offset` accordingly. - // In the case of `-C instrument-coverage`, this means that the generated - // inner `main` function spans from the doctest opening codeblock to the - // closing one. For example - // /// ``` <- start of the inner main - // /// <- code under doctest - // /// ``` <- end of the inner main - line_offset += 1; - - // add extra 4 spaces for each line to offset the code block - let content = if opts.insert_indent_space { - everything_else - .lines() - .map(|line| format!(" {}", line)) - .collect::>() - .join("\n") - } else { - everything_else.to_string() - }; - prog.extend([&main_pre, content.as_str(), &main_post].iter().cloned()); - } + let returns_result = everything_else.ends_with("(())"); + // Give each doctest main function a unique name. + // This is for example needed for the tooling around `-C instrument-coverage`. + let inner_fn_name = if let Some(ref test_id) = self.test_id { + format!("_doctest_main_{test_id}") + } else { + "_inner".into() + }; + let inner_attr = if self.test_id.is_some() { "#[allow(non_snake_case)] " } else { "" }; + let (main_pre, main_post) = if returns_result { + ( + format!( + "fn main() {{ {inner_attr}fn {inner_fn_name}() -> Result<(), impl core::fmt::Debug> {{\n", + ), + format!("\n}} {inner_fn_name}().unwrap() }}"), + ) + } else if self.test_id.is_some() { + ( + format!("fn main() {{ {inner_attr}fn {inner_fn_name}() {{\n",), + format!("\n}} {inner_fn_name}() }}"), + ) + } else { + ("fn main() {\n".into(), "\n}".into()) + }; + // Note on newlines: We insert a line/newline *before*, and *after* + // the doctest and adjust the `line_offset` accordingly. + // In the case of `-C instrument-coverage`, this means that the generated + // inner `main` function spans from the doctest opening codeblock to the + // closing one. For example + // /// ``` <- start of the inner main + // /// <- code under doctest + // /// ``` <- end of the inner main + line_offset += 1; + + prog.push_str(&main_pre); + + // add extra 4 spaces for each line to offset the code block + if opts.insert_indent_space { + prog.push_str( + &everything_else + .lines() + .map(|line| format!(" {}", line)) + .collect::>() + .join("\n"), + ); + } else { + prog.push_str(everything_else); + }; + prog.push_str(&main_post); + } - debug!("final doctest:\n{prog}"); + debug!("final doctest:\n{prog}"); - (prog, line_offset, supports_color) + (prog, line_offset) + } } -fn check_for_main_and_extern_crate( - crate_name: Option<&str>, - source: String, - edition: Edition, - supports_color: &mut bool, -) -> Result<(bool, bool), FatalError> { - let result = rustc_driver::catch_fatal_errors(|| { - rustc_span::create_session_if_not_set_then(edition, |_| { - use rustc_errors::emitter::{Emitter, HumanEmitter}; - use rustc_errors::DiagCtxt; - use rustc_parse::parser::ForceCollect; - use rustc_span::source_map::FilePathMapping; - - let filename = FileName::anon_source_code(&source); - - // Any errors in parsing should also appear when the doctest is compiled for real, so just - // send all the errors that librustc_ast emits directly into a `Sink` instead of stderr. - let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let fallback_bundle = rustc_errors::fallback_fluent_bundle( - rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(), - false, - ); - *supports_color = - HumanEmitter::new(stderr_destination(ColorConfig::Auto), fallback_bundle.clone()) - .supports_color(); +#[derive(PartialEq, Eq, Debug)] +enum ParsingResult { + Failed, + AstError, + Ok, +} - let emitter = HumanEmitter::new(Box::new(io::sink()), fallback_bundle); +fn cancel_error_count(psess: &ParseSess) { + // Reset errors so that they won't be reported as compiler bugs when dropping the + // dcx. Any errors in the tests will be reported when the test file is compiled, + // Note that we still need to cancel the errors above otherwise `Diag` will panic on + // drop. + psess.dcx().reset_err_count(); +} - // FIXME(misdreavus): pass `-Z treat-err-as-bug` to the doctest parser - let dcx = DiagCtxt::new(Box::new(emitter)).disable_warnings(); - let psess = ParseSess::with_dcx(dcx, sm); +fn parse_source( + source: String, + info: &mut ParseSourceInfo, + crate_name: &Option<&str>, +) -> ParsingResult { + use rustc_errors::emitter::{Emitter, HumanEmitter}; + use rustc_errors::DiagCtxt; + use rustc_parse::parser::ForceCollect; + use rustc_span::source_map::FilePathMapping; + + let filename = FileName::anon_source_code(&source); + + // Any errors in parsing should also appear when the doctest is compiled for real, so just + // send all the errors that librustc_ast emits directly into a `Sink` instead of stderr. + let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); + let fallback_bundle = rustc_errors::fallback_fluent_bundle( + rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec(), + false, + ); + info.supports_color = + HumanEmitter::new(stderr_destination(ColorConfig::Auto), fallback_bundle.clone()) + .supports_color(); + + let emitter = HumanEmitter::new(Box::new(io::sink()), fallback_bundle); + + // FIXME(misdreavus): pass `-Z treat-err-as-bug` to the doctest parser + let dcx = DiagCtxt::new(Box::new(emitter)).disable_warnings(); + let psess = ParseSess::with_dcx(dcx, sm); + + let mut parser = match new_parser_from_source_str(&psess, filename, source) { + Ok(p) => p, + Err(errs) => { + errs.into_iter().for_each(|err| err.cancel()); + cancel_error_count(&psess); + return ParsingResult::Failed; + } + }; + let mut parsing_result = ParsingResult::Ok; + + // Recurse through functions body. It is necessary because the doctest source code is + // wrapped in a function to limit the number of AST errors. If we don't recurse into + // functions, we would thing all top-level items (so basically nothing). + fn check_item(item: &ast::Item, info: &mut ParseSourceInfo, crate_name: &Option<&str>) { + if !info.has_global_allocator + && item.attrs.iter().any(|attr| attr.name_or_empty() == sym::global_allocator) + { + info.has_global_allocator = true; + } + match item.kind { + ast::ItemKind::Fn(ref fn_item) if !info.has_main_fn => { + if item.ident.name == sym::main { + info.has_main_fn = true; + } + if let Some(ref body) = fn_item.body { + for stmt in &body.stmts { + match stmt.kind { + ast::StmtKind::Item(ref item) => check_item(item, info, crate_name), + ast::StmtKind::MacCall(..) => info.found_macro = true, + _ => {} + } + } + } + } + ast::ItemKind::ExternCrate(original) => { + if !info.found_extern_crate + && let Some(ref crate_name) = crate_name + { + info.found_extern_crate = match original { + Some(name) => name.as_str() == *crate_name, + None => item.ident.as_str() == *crate_name, + }; + } + } + ast::ItemKind::MacCall(..) => info.found_macro = true, + ast::ItemKind::MacroDef(..) => info.has_macro_def = true, + _ => {} + } + } - let mut found_main = false; - let mut found_extern_crate = crate_name.is_none(); - let mut found_macro = false; + loop { + match parser.parse_item(ForceCollect::No) { + Ok(Some(item)) => { + check_item(&item, info, crate_name); - let mut parser = match new_parser_from_source_str(&psess, filename, source.clone()) { - Ok(p) => p, - Err(errs) => { - errs.into_iter().for_each(|err| err.cancel()); - return (found_main, found_extern_crate, found_macro); + if info.has_main_fn && info.found_extern_crate { + break; } - }; - - loop { - match parser.parse_item(ForceCollect::No) { - Ok(Some(item)) => { - if !found_main - && let ast::ItemKind::Fn(..) = item.kind - && item.ident.name == sym::main - { - found_main = true; - } + } + Ok(None) => break, + Err(e) => { + parsing_result = ParsingResult::AstError; + e.cancel(); + break; + } + } - if !found_extern_crate - && let ast::ItemKind::ExternCrate(original) = item.kind - { - // This code will never be reached if `crate_name` is none because - // `found_extern_crate` is initialized to `true` if it is none. - let crate_name = crate_name.unwrap(); - - match original { - Some(name) => found_extern_crate = name.as_str() == crate_name, - None => found_extern_crate = item.ident.as_str() == crate_name, - } - } + // The supplied item is only used for diagnostics, + // which are swallowed here anyway. + parser.maybe_consume_incorrect_semicolon(None); + } - if !found_macro && let ast::ItemKind::MacCall(..) = item.kind { - found_macro = true; - } + cancel_error_count(&psess); + parsing_result +} - if found_main && found_extern_crate { - break; - } - } - Ok(None) => break, - Err(e) => { - e.cancel(); - break; - } - } +#[derive(Default)] +struct ParseSourceInfo { + has_main_fn: bool, + found_extern_crate: bool, + found_macro: bool, + supports_color: bool, + has_global_allocator: bool, + has_macro_def: bool, +} - // The supplied item is only used for diagnostics, - // which are swallowed here anyway. - parser.maybe_consume_incorrect_semicolon(None); +fn check_for_main_and_extern_crate( + crate_name: Option<&str>, + original_source_code: &str, + everything_else: &str, + crates: &str, + edition: Edition, + can_merge_doctests: bool, +) -> Result<(ParseSourceInfo, bool), FatalError> { + let result = rustc_driver::catch_fatal_errors(|| { + rustc_span::create_session_if_not_set_then(edition, |_| { + let mut info = + ParseSourceInfo { found_extern_crate: crate_name.is_none(), ..Default::default() }; + + let mut parsing_result = + parse_source(format!("{crates}{everything_else}"), &mut info, &crate_name); + // No need to double-check this if the "merged doctests" feature isn't enabled (so + // before the 2024 edition). + if can_merge_doctests && parsing_result != ParsingResult::Ok { + // If we found an AST error, we want to ensure it's because of an expression being + // used outside of a function. + // + // To do so, we wrap in a function in order to make sure that the doctest AST is + // correct. For example, if your doctest is `foo::bar()`, if we don't wrap it in a + // block, it would emit an AST error, which would be problematic for us since we + // want to filter out such errors which aren't "real" errors. + // + // The end goal is to be able to merge as many doctests as possible as one for much + // faster doctests run time. + parsing_result = parse_source( + format!("{crates}\nfn __doctest_wrap(){{{everything_else}\n}}"), + &mut info, + &crate_name, + ); } - // Reset errors so that they won't be reported as compiler bugs when dropping the - // dcx. Any errors in the tests will be reported when the test file is compiled, - // Note that we still need to cancel the errors above otherwise `Diag` will panic on - // drop. - psess.dcx().reset_err_count(); - - (found_main, found_extern_crate, found_macro) + (info, parsing_result) }) }); - let (already_has_main, already_has_extern_crate, found_macro) = result?; + let (mut info, parsing_result) = match result { + Err(..) | Ok((_, ParsingResult::Failed)) => return Err(FatalError), + Ok((info, parsing_result)) => (info, parsing_result), + }; // If a doctest's `fn main` is being masked by a wrapper macro, the parsing loop above won't // see it. In that case, run the old text-based scan to see if they at least have a main // function written inside a macro invocation. See // https://github.com/rust-lang/rust/issues/56898 - let already_has_main = if found_macro && !already_has_main { - source + if info.found_macro + && !info.has_main_fn + && original_source_code .lines() .map(|line| { let comment = line.find("//"); if let Some(comment_begins) = comment { &line[0..comment_begins] } else { line } }) .any(|code| code.contains("fn main")) - } else { - already_has_main - }; + { + info.has_main_fn = true; + } - Ok((already_has_main, already_has_extern_crate)) + Ok((info, parsing_result != ParsingResult::Ok)) } -fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool { +enum AttrKind { + CrateAttr, + Attr, +} + +/// Returns `Some` if the attribute is complete and `Some(true)` if it is an attribute that can be +/// placed at the crate root. +fn check_if_attr_is_complete(source: &str, edition: Edition) -> Option { if source.is_empty() { // Empty content so nothing to check in here... - return true; + return None; } + let not_crate_attrs = [sym::forbid, sym::allow, sym::warn, sym::deny]; + rustc_driver::catch_fatal_errors(|| { rustc_span::create_session_if_not_set_then(edition, |_| { use rustc_errors::emitter::HumanEmitter; @@ -285,32 +463,75 @@ fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool { errs.into_iter().for_each(|err| err.cancel()); // If there is an unclosed delimiter, an error will be returned by the // tokentrees. - return false; + return None; } }; // If a parsing error happened, it's very likely that the attribute is incomplete. - if let Err(e) = parser.parse_attribute(InnerAttrPolicy::Permitted) { - e.cancel(); - return false; - } - true + let ret = match parser.parse_attribute(InnerAttrPolicy::Permitted) { + Ok(attr) => { + let attr_name = attr.name_or_empty(); + + if not_crate_attrs.contains(&attr_name) { + // There is one exception to these attributes: + // `#![allow(internal_features)]`. If this attribute is used, we need to + // consider it only as a crate-level attribute. + if attr_name == sym::allow + && let Some(list) = attr.meta_item_list() + && list.iter().any(|sub_attr| { + sub_attr.name_or_empty().as_str() == "internal_features" + }) + { + Some(AttrKind::CrateAttr) + } else { + Some(AttrKind::Attr) + } + } else { + Some(AttrKind::CrateAttr) + } + } + Err(e) => { + e.cancel(); + None + } + }; + ret }) }) - .unwrap_or(false) + .unwrap_or(None) +} + +fn handle_attr(mod_attr_pending: &mut String, source_info: &mut SourceInfo, edition: Edition) { + if let Some(attr_kind) = check_if_attr_is_complete(mod_attr_pending, edition) { + let push_to = match attr_kind { + AttrKind::CrateAttr => &mut source_info.crate_attrs, + AttrKind::Attr => &mut source_info.maybe_crate_attrs, + }; + push_to.push_str(mod_attr_pending); + push_to.push('\n'); + // If it's complete, then we can clear the pending content. + mod_attr_pending.clear(); + } else if mod_attr_pending.ends_with('\\') { + mod_attr_pending.push('n'); + } +} + +#[derive(Default)] +struct SourceInfo { + crate_attrs: String, + maybe_crate_attrs: String, + crates: String, + everything_else: String, } -fn partition_source(s: &str, edition: Edition) -> (String, String, String) { +fn partition_source(s: &str, edition: Edition) -> SourceInfo { #[derive(Copy, Clone, PartialEq)] enum PartitionState { Attrs, Crates, Other, } + let mut source_info = SourceInfo::default(); let mut state = PartitionState::Attrs; - let mut before = String::new(); - let mut crates = String::new(); - let mut after = String::new(); - let mut mod_attr_pending = String::new(); for line in s.lines() { @@ -321,12 +542,9 @@ fn partition_source(s: &str, edition: Edition) -> (String, String, String) { match state { PartitionState::Attrs => { state = if trimline.starts_with("#![") { - if !check_if_attr_is_complete(line, edition) { - mod_attr_pending = line.to_owned(); - } else { - mod_attr_pending.clear(); - } - PartitionState::Attrs + mod_attr_pending = line.to_owned(); + handle_attr(&mut mod_attr_pending, &mut source_info, edition); + continue; } else if trimline.chars().all(|c| c.is_whitespace()) || (trimline.starts_with("//") && !trimline.starts_with("///")) { @@ -341,15 +559,10 @@ fn partition_source(s: &str, edition: Edition) -> (String, String, String) { // If not, then we append the new line into the pending attribute to check // if this time it's complete... mod_attr_pending.push_str(line); - if !trimline.is_empty() - && check_if_attr_is_complete(&mod_attr_pending, edition) - { - // If it's complete, then we can clear the pending content. - mod_attr_pending.clear(); + if !trimline.is_empty() { + handle_attr(&mut mod_attr_pending, &mut source_info, edition); } - // In any case, this is considered as `PartitionState::Attrs` so it's - // prepended before rustdoc's inserts. - PartitionState::Attrs + continue; } else { PartitionState::Other } @@ -371,23 +584,25 @@ fn partition_source(s: &str, edition: Edition) -> (String, String, String) { match state { PartitionState::Attrs => { - before.push_str(line); - before.push('\n'); + source_info.crate_attrs.push_str(line); + source_info.crate_attrs.push('\n'); } PartitionState::Crates => { - crates.push_str(line); - crates.push('\n'); + source_info.crates.push_str(line); + source_info.crates.push('\n'); } PartitionState::Other => { - after.push_str(line); - after.push('\n'); + source_info.everything_else.push_str(line); + source_info.everything_else.push('\n'); } } } - debug!("before:\n{before}"); - debug!("crates:\n{crates}"); - debug!("after:\n{after}"); + source_info.everything_else = source_info.everything_else.trim().to_string(); + + debug!("crate_attrs:\n{}{}", source_info.crate_attrs, source_info.maybe_crate_attrs); + debug!("crates:\n{}", source_info.crates); + debug!("after:\n{}", source_info.everything_else); - (before, after, crates) + source_info } diff --git a/src/librustdoc/doctest/markdown.rs b/src/librustdoc/doctest/markdown.rs index b8ab7adb36e8c..4806d86558997 100644 --- a/src/librustdoc/doctest/markdown.rs +++ b/src/librustdoc/doctest/markdown.rs @@ -1,34 +1,29 @@ //! Doctest functionality used only for doctests in `.md` Markdown files. use std::fs::read_to_string; +use std::sync::{Arc, Mutex}; use rustc_span::FileName; use tempfile::tempdir; use super::{ - generate_args_file, CreateRunnableDoctests, DoctestVisitor, GlobalTestOptions, ScrapedDoctest, + generate_args_file, CreateRunnableDocTests, DocTestVisitor, GlobalTestOptions, ScrapedDocTest, }; use crate::config::Options; use crate::html::markdown::{find_testable_code, ErrorCodes, LangString, MdRelLine}; struct MdCollector { - tests: Vec, + tests: Vec, cur_path: Vec, filename: FileName, } -impl DoctestVisitor for MdCollector { +impl DocTestVisitor for MdCollector { fn visit_test(&mut self, test: String, config: LangString, rel_line: MdRelLine) { let filename = self.filename.clone(); // First line of Markdown is line 1. let line = 1 + rel_line.offset(); - self.tests.push(ScrapedDoctest { - filename, - line, - logical_path: self.cur_path.clone(), - langstr: config, - text: test, - }); + self.tests.push(ScrapedDocTest::new(filename, line, self.cur_path.clone(), config, test)); } fn visit_header(&mut self, name: &str, level: u32) { @@ -118,8 +113,16 @@ pub(crate) fn test(options: Options) -> Result<(), String> { None, ); - let mut collector = CreateRunnableDoctests::new(options.clone(), opts); + let mut collector = CreateRunnableDocTests::new(options.clone(), opts); md_collector.tests.into_iter().for_each(|t| collector.add_test(t)); - crate::doctest::run_tests(options.test_args, options.nocapture, collector.tests); + let CreateRunnableDocTests { opts, rustdoc_options, standalone_tests, mergeable_tests, .. } = + collector; + crate::doctest::run_tests( + opts, + &rustdoc_options, + &Arc::new(Mutex::new(Vec::new())), + standalone_tests, + mergeable_tests, + ); Ok(()) } diff --git a/src/librustdoc/doctest/runner.rs b/src/librustdoc/doctest/runner.rs new file mode 100644 index 0000000000000..d49fa3ac5ac5a --- /dev/null +++ b/src/librustdoc/doctest/runner.rs @@ -0,0 +1,265 @@ +use std::fmt::Write; + +use rustc_data_structures::fx::FxHashSet; +use rustc_span::edition::Edition; + +use crate::doctest::{ + run_test, DocTestBuilder, GlobalTestOptions, IndividualTestOptions, RunnableDocTest, + RustdocOptions, ScrapedDocTest, TestFailure, UnusedExterns, +}; +use crate::html::markdown::{Ignore, LangString}; + +/// Convenient type to merge compatible doctests into one. +pub(crate) struct DocTestRunner { + crate_attrs: FxHashSet, + ids: String, + output: String, + supports_color: bool, + nb_tests: usize, +} + +impl DocTestRunner { + pub(crate) fn new() -> Self { + Self { + crate_attrs: FxHashSet::default(), + ids: String::new(), + output: String::new(), + supports_color: true, + nb_tests: 0, + } + } + + pub(crate) fn add_test( + &mut self, + doctest: &DocTestBuilder, + scraped_test: &ScrapedDocTest, + target_str: &str, + ) { + let ignore = match scraped_test.langstr.ignore { + Ignore::All => true, + Ignore::None => false, + Ignore::Some(ref ignores) => ignores.iter().any(|s| target_str.contains(s)), + }; + if !ignore { + for line in doctest.crate_attrs.split('\n') { + self.crate_attrs.insert(line.to_string()); + } + } + if !self.ids.is_empty() { + self.ids.push(','); + } + self.ids.push_str(&format!( + "{}::TEST", + generate_mergeable_doctest( + doctest, + scraped_test, + ignore, + self.nb_tests, + &mut self.output + ), + )); + self.supports_color &= doctest.supports_color; + self.nb_tests += 1; + } + + pub(crate) fn run_merged_tests( + &mut self, + test_options: IndividualTestOptions, + edition: Edition, + opts: &GlobalTestOptions, + test_args: &[String], + rustdoc_options: &RustdocOptions, + ) -> Result { + let mut code = "\ +#![allow(unused_extern_crates)] +#![allow(internal_features)] +#![feature(test)] +#![feature(rustc_attrs)] +" + .to_string(); + + for crate_attr in &self.crate_attrs { + code.push_str(crate_attr); + code.push('\n'); + } + + if opts.attrs.is_empty() { + // If there aren't any attributes supplied by #![doc(test(attr(...)))], then allow some + // lints that are commonly triggered in doctests. The crate-level test attributes are + // commonly used to make tests fail in case they trigger warnings, so having this there in + // that case may cause some tests to pass when they shouldn't have. + code.push_str("#![allow(unused)]\n"); + } + + // Next, any attributes that came from the crate root via #![doc(test(attr(...)))]. + for attr in &opts.attrs { + code.push_str(&format!("#![{attr}]\n")); + } + + code.push_str("extern crate test;\n"); + + let test_args = + test_args.iter().map(|arg| format!("{arg:?}.to_string(),")).collect::(); + write!( + code, + "\ +{output} + +mod __doctest_mod {{ + use std::sync::OnceLock; + use std::path::PathBuf; + + pub static BINARY_PATH: OnceLock = OnceLock::new(); + pub const RUN_OPTION: &str = \"*doctest-inner-test\"; + pub const BIN_OPTION: &str = \"*doctest-bin-path\"; + + #[allow(unused)] + pub fn doctest_path() -> Option<&'static PathBuf> {{ + self::BINARY_PATH.get() + }} + + #[allow(unused)] + pub fn doctest_runner(bin: &std::path::Path, test_nb: usize) -> Result<(), String> {{ + let out = std::process::Command::new(bin) + .arg(self::RUN_OPTION) + .arg(test_nb.to_string()) + .output() + .expect(\"failed to run command\"); + if !out.status.success() {{ + Err(String::from_utf8_lossy(&out.stderr).to_string()) + }} else {{ + Ok(()) + }} + }} +}} + +#[rustc_main] +fn main() -> std::process::ExitCode {{ +const TESTS: [test::TestDescAndFn; {nb_tests}] = [{ids}]; +let bin_marker = std::ffi::OsStr::new(__doctest_mod::BIN_OPTION); +let test_marker = std::ffi::OsStr::new(__doctest_mod::RUN_OPTION); +let test_args = &[{test_args}]; + +let mut args = std::env::args_os().skip(1); +while let Some(arg) = args.next() {{ + if arg == bin_marker {{ + let Some(binary) = args.next() else {{ + panic!(\"missing argument after `{{}}`\", __doctest_mod::BIN_OPTION); + }}; + if crate::__doctest_mod::BINARY_PATH.set(binary.into()).is_err() {{ + panic!(\"`{{}}` option was used more than once\", bin_marker.to_string_lossy()); + }} + return std::process::Termination::report(test::test_main(test_args, Vec::from(TESTS), None)); + }} else if arg == test_marker {{ + let Some(nb_test) = args.next() else {{ + panic!(\"missing argument after `{{}}`\", __doctest_mod::RUN_OPTION); + }}; + if let Some(nb_test) = nb_test.to_str().and_then(|nb| nb.parse::().ok()) {{ + if let Some(test) = TESTS.get(nb_test) {{ + if let test::StaticTestFn(f) = test.testfn {{ + return std::process::Termination::report(f()); + }} + }} + }} + panic!(\"Unexpected value after `{{}}`\", __doctest_mod::RUN_OPTION); + }} +}} + +eprintln!(\"WARNING: No argument provided so doctests will be run in the same process\"); +std::process::Termination::report(test::test_main(test_args, Vec::from(TESTS), None)) +}}", + nb_tests = self.nb_tests, + output = self.output, + ids = self.ids, + ) + .expect("failed to generate test code"); + let runnable_test = RunnableDocTest { + full_test_code: code, + full_test_line_offset: 0, + test_opts: test_options, + global_opts: opts.clone(), + langstr: LangString::default(), + line: 0, + edition, + no_run: false, + is_multiple_tests: true, + }; + let ret = + run_test(runnable_test, rustdoc_options, self.supports_color, |_: UnusedExterns| {}); + if let Err(TestFailure::CompileError) = ret { Err(()) } else { Ok(ret.is_ok()) } + } +} + +/// Push new doctest content into `output`. Returns the test ID for this doctest. +fn generate_mergeable_doctest( + doctest: &DocTestBuilder, + scraped_test: &ScrapedDocTest, + ignore: bool, + id: usize, + output: &mut String, +) -> String { + let test_id = format!("__doctest_{id}"); + + if ignore { + // We generate nothing else. + writeln!(output, "mod {test_id} {{\n").unwrap(); + } else { + writeln!(output, "mod {test_id} {{\n{}{}", doctest.crates, doctest.maybe_crate_attrs) + .unwrap(); + if scraped_test.langstr.no_run { + // To prevent having warnings about unused items since they're not called. + writeln!(output, "#![allow(unused)]").unwrap(); + } + if doctest.has_main_fn { + output.push_str(&doctest.everything_else); + } else { + let returns_result = if doctest.everything_else.trim_end().ends_with("(())") { + "-> Result<(), impl core::fmt::Debug>" + } else { + "" + }; + write!( + output, + "\ +fn main() {returns_result} {{ +{} +}}", + doctest.everything_else + ) + .unwrap(); + } + } + let not_running = ignore || scraped_test.langstr.no_run; + writeln!( + output, + " +pub const TEST: test::TestDescAndFn = test::TestDescAndFn::new_doctest( +{test_name:?}, {ignore}, {file:?}, {line}, {no_run}, {should_panic}, +test::StaticTestFn( + || {{{runner}}}, +)); +}}", + test_name = scraped_test.name, + file = scraped_test.path(), + line = scraped_test.line, + no_run = scraped_test.langstr.no_run, + should_panic = !scraped_test.langstr.no_run && scraped_test.langstr.should_panic, + // Setting `no_run` to `true` in `TestDesc` still makes the test run, so we simply + // don't give it the function to run. + runner = if not_running { + "test::assert_test_result(Ok::<(), String>(()))".to_string() + } else { + format!( + " +if let Some(bin_path) = crate::__doctest_mod::doctest_path() {{ + test::assert_test_result(crate::__doctest_mod::doctest_runner(bin_path, {id})) +}} else {{ + test::assert_test_result(self::main()) +}} +", + ) + }, + ) + .unwrap(); + test_id +} diff --git a/src/librustdoc/doctest/rust.rs b/src/librustdoc/doctest/rust.rs index f179f3aa1c99b..abd66f15dc0c1 100644 --- a/src/librustdoc/doctest/rust.rs +++ b/src/librustdoc/doctest/rust.rs @@ -14,14 +14,14 @@ use rustc_session::Session; use rustc_span::source_map::SourceMap; use rustc_span::{BytePos, FileName, Pos, Span, DUMMY_SP}; -use super::{DoctestVisitor, ScrapedDoctest}; +use super::{DocTestVisitor, ScrapedDocTest}; use crate::clean::types::AttributesExt; use crate::clean::Attributes; use crate::html::markdown::{self, ErrorCodes, LangString, MdRelLine}; struct RustCollector { source_map: Lrc, - tests: Vec, + tests: Vec, cur_path: Vec, position: Span, } @@ -48,16 +48,16 @@ impl RustCollector { } } -impl DoctestVisitor for RustCollector { +impl DocTestVisitor for RustCollector { fn visit_test(&mut self, test: String, config: LangString, rel_line: MdRelLine) { let line = self.get_base_line() + rel_line.offset(); - self.tests.push(ScrapedDoctest { - filename: self.get_filename(), + self.tests.push(ScrapedDocTest::new( + self.get_filename(), line, - logical_path: self.cur_path.clone(), - langstr: config, - text: test, - }); + self.cur_path.clone(), + config, + test, + )); } fn visit_header(&mut self, _name: &str, _level: u32) {} @@ -89,7 +89,7 @@ impl<'a, 'tcx> HirCollector<'a, 'tcx> { Self { sess, map, codes, enable_per_target_ignores, tcx, collector } } - pub fn collect_crate(mut self) -> Vec { + pub fn collect_crate(mut self) -> Vec { let tcx = self.tcx; self.visit_testable("".to_string(), CRATE_DEF_ID, tcx.hir().span(CRATE_HIR_ID), |this| { tcx.hir().walk_toplevel_module(this) diff --git a/src/librustdoc/doctest/tests.rs b/src/librustdoc/doctest/tests.rs index 0f13ee404c682..160d0f222b4e0 100644 --- a/src/librustdoc/doctest/tests.rs +++ b/src/librustdoc/doctest/tests.rs @@ -2,7 +2,27 @@ use std::path::PathBuf; use rustc_span::edition::DEFAULT_EDITION; -use super::{make_test, GlobalTestOptions}; +use super::{DocTestBuilder, GlobalTestOptions}; + +fn make_test( + test_code: &str, + crate_name: Option<&str>, + dont_insert_main: bool, + opts: &GlobalTestOptions, + test_id: Option<&str>, +) -> (String, usize) { + let doctest = DocTestBuilder::new( + test_code, + crate_name, + DEFAULT_EDITION, + false, + test_id.map(|s| s.to_string()), + None, + ); + let (code, line_offset) = + doctest.generate_unique_doctest(test_code, dont_insert_main, opts, crate_name); + (code, line_offset) +} /// Default [`GlobalTestOptions`] for these unit tests. fn default_global_opts(crate_name: impl Into) -> GlobalTestOptions { @@ -25,7 +45,7 @@ fn main() { assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, None, false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -40,7 +60,7 @@ fn main() { assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("asdf"), false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -59,7 +79,7 @@ use asdf::qwop; assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("asdf"), false, &opts, None); assert_eq!((output, len), (expected, 3)); } @@ -76,7 +96,7 @@ use asdf::qwop; assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("asdf"), false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -94,7 +114,7 @@ use std::*; assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, Some("std"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("std"), false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -113,7 +133,7 @@ use asdf::qwop; assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("asdf"), false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -130,7 +150,7 @@ use asdf::qwop; assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("asdf"), false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -150,7 +170,7 @@ use asdf::qwop; assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("asdf"), false, &opts, None); assert_eq!((output, len), (expected, 3)); // Adding more will also bump the returned line offset. @@ -164,7 +184,7 @@ use asdf::qwop; assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("asdf"), false, &opts, None); assert_eq!((output, len), (expected, 4)); } @@ -181,7 +201,7 @@ fn main() { assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, None, false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -197,7 +217,7 @@ fn main() { assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, None, false, &opts, None); assert_eq!((output, len), (expected, 1)); } @@ -213,7 +233,7 @@ fn main() { assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, None, false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -227,7 +247,7 @@ assert_eq!(2+2, 4);"; //Ceci n'est pas une `fn main` assert_eq!(2+2, 4);" .to_string(); - let (output, len, _) = make_test(input, None, true, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, None, true, &opts, None); assert_eq!((output, len), (expected, 1)); } @@ -245,7 +265,7 @@ assert_eq!(2+2, 4); }" .to_string(); - let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, None, false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -265,7 +285,7 @@ assert_eq!(asdf::foo, 4); }" .to_string(); - let (output, len, _) = make_test(input, Some("asdf"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("asdf"), false, &opts, None); assert_eq!((output, len), (expected, 3)); } @@ -283,7 +303,7 @@ test_wrapper! { }" .to_string(); - let (output, len, _) = make_test(input, Some("my_crate"), false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, Some("my_crate"), false, &opts, None); assert_eq!((output, len), (expected, 1)); } @@ -303,7 +323,7 @@ io::stdin().read_line(&mut input)?; Ok::<(), io:Error>(()) } _inner().unwrap() }" .to_string(); - let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, None, false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -317,8 +337,7 @@ fn main() { #[allow(non_snake_case)] fn _doctest_main__some_unique_name() { assert_eq!(2+2, 4); } _doctest_main__some_unique_name() }" .to_string(); - let (output, len, _) = - make_test(input, None, false, &opts, DEFAULT_EDITION, Some("_some_unique_name")); + let (output, len) = make_test(input, None, false, &opts, Some("_some_unique_name")); assert_eq!((output, len), (expected, 2)); } @@ -337,7 +356,7 @@ fn main() { eprintln!(\"hello anan\"); }" .to_string(); - let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, None, false, &opts, None); assert_eq!((output, len), (expected, 2)); } @@ -357,6 +376,6 @@ fn main() { eprintln!(\"hello anan\"); }" .to_string(); - let (output, len, _) = make_test(input, None, false, &opts, DEFAULT_EDITION, None); + let (output, len) = make_test(input, None, false, &opts, None); assert_eq!((output, len), (expected, 1)); } diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs index bb8d39aaf1dcc..7bfe5d87d399f 100644 --- a/src/librustdoc/html/markdown.rs +++ b/src/librustdoc/html/markdown.rs @@ -297,14 +297,16 @@ impl<'a, I: Iterator>> Iterator for CodeBlocks<'_, 'a, I> { attrs: vec![], args_file: PathBuf::new(), }; - let (test, _, _) = doctest::make_test(&test, krate, false, &opts, edition, None); + let doctest = doctest::DocTestBuilder::new(&test, krate, edition, false, None, None); + let (test, _) = doctest.generate_unique_doctest(&test, false, &opts, krate); let channel = if test.contains("#![feature(") { "&version=nightly" } else { "" }; let test_escaped = small_url_encode(test); Some(format!( "Run", + title=\"Run code\" \ + href=\"{url}?code={test_escaped}{channel}&edition={edition}\">", )) }); @@ -736,7 +738,7 @@ impl MdRelLine { } } -pub(crate) fn find_testable_code( +pub(crate) fn find_testable_code( doc: &str, tests: &mut T, error_codes: ErrorCodes, @@ -746,7 +748,7 @@ pub(crate) fn find_testable_code( find_codes(doc, tests, error_codes, enable_per_target_ignores, extra_info, false) } -pub(crate) fn find_codes( +pub(crate) fn find_codes( doc: &str, tests: &mut T, error_codes: ErrorCodes, @@ -867,6 +869,7 @@ pub(crate) struct LangString { pub(crate) rust: bool, pub(crate) test_harness: bool, pub(crate) compile_fail: bool, + pub(crate) standalone: bool, pub(crate) error_codes: Vec, pub(crate) edition: Option, pub(crate) added_classes: Vec, @@ -1189,6 +1192,7 @@ impl Default for LangString { rust: true, test_harness: false, compile_fail: false, + standalone: false, error_codes: Vec::new(), edition: None, added_classes: Vec::new(), @@ -1258,6 +1262,10 @@ impl LangString { seen_rust_tags = !seen_other_tags || seen_rust_tags; data.no_run = true; } + LangStringToken::LangToken("standalone") => { + data.standalone = true; + seen_rust_tags = !seen_other_tags || seen_rust_tags; + } LangStringToken::LangToken(x) if x.starts_with("edition") => { data.edition = x[7..].parse::().ok(); } diff --git a/src/librustdoc/html/static/css/noscript.css b/src/librustdoc/html/static/css/noscript.css index 6e10cf21537bc..86e8edad7032a 100644 --- a/src/librustdoc/html/static/css/noscript.css +++ b/src/librustdoc/html/static/css/noscript.css @@ -104,10 +104,6 @@ nav.sub { --code-highlight-doc-comment-color: #4d4d4c; --src-line-numbers-span-color: #c67e2d; --src-line-number-highlighted-background-color: #fdffd3; - --test-arrow-color: #f5f5f5; - --test-arrow-background-color: rgba(78, 139, 202, 0.2); - --test-arrow-hover-color: #f5f5f5; - --test-arrow-hover-background-color: rgb(78, 139, 202); --target-background-color: #fdffd3; --target-border-color: #ad7c37; --kbd-color: #000; @@ -210,10 +206,6 @@ nav.sub { --code-highlight-doc-comment-color: #8ca375; --src-line-numbers-span-color: #3b91e2; --src-line-number-highlighted-background-color: #0a042f; - --test-arrow-color: #dedede; - --test-arrow-background-color: rgba(78, 139, 202, 0.2); - --test-arrow-hover-color: #dedede; - --test-arrow-hover-background-color: #4e8bca; --target-background-color: #494a3d; --target-border-color: #bb7410; --kbd-color: #000; diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css index 02a6bb8f5487a..eb5a5d935e202 100644 --- a/src/librustdoc/html/static/css/rustdoc.css +++ b/src/librustdoc/html/static/css/rustdoc.css @@ -353,7 +353,7 @@ details:not(.toggle) summary { margin-bottom: .6em; } -code, pre, a.test-arrow, .code-header { +code, pre, .code-header { font-family: "Source Code Pro", monospace; } .docblock code, .docblock-short code { @@ -946,8 +946,8 @@ because of the `[-]` element which would overlap with it. */ .main-heading a:hover, .example-wrap .rust a:hover, .all-items a:hover, -.docblock a:not(.test-arrow):not(.scrape-help):not(.tooltip):hover:not(.doc-anchor), -.docblock-short a:not(.test-arrow):not(.scrape-help):not(.tooltip):hover, +.docblock a:not(.scrape-help):not(.tooltip):hover:not(.doc-anchor), +.docblock-short a:not(.scrape-help):not(.tooltip):hover, .item-info a { text-decoration: underline; } @@ -1461,22 +1461,17 @@ documentation. */ z-index: 1; } a.test-arrow { - padding: 5px 7px; - border-radius: var(--button-border-radius); - font-size: 1rem; - color: var(--test-arrow-color); - background-color: var(--test-arrow-background-color); + height: var(--copy-path-height); + padding: 6px 4px 0 11px; } -a.test-arrow:hover { - color: var(--test-arrow-hover-color); - background-color: var(--test-arrow-hover-background-color); +a.test-arrow::before { + content: url('data:image/svg+xml,'); } .example-wrap .button-holder { display: flex; } -.example-wrap:hover > .test-arrow { - padding: 2px 7px; -} + /* On iPad, the ":hover" state sticks around, making things work not greatly. Do work around it, we move it into this media query. More information can be found at: @@ -1486,29 +1481,34 @@ However, using `@media (hover: hover)` makes this rule never to be applied in GU instead, we check that it's not a "finger" cursor. */ @media not (pointer: coarse) { - .example-wrap:hover > .test-arrow, .example-wrap:hover > .button-holder { + .example-wrap:hover > a.test-arrow, .example-wrap:hover > .button-holder { visibility: visible; } } .example-wrap .button-holder.keep-visible { visibility: visible; } -.example-wrap .button-holder .copy-button { - color: var(--copy-path-button-color); +.example-wrap .button-holder .copy-button, .example-wrap .test-arrow { background: var(--main-background-color); + cursor: pointer; + border-radius: var(--button-border-radius); height: var(--copy-path-height); width: var(--copy-path-width); +} +.example-wrap .button-holder .copy-button { margin-left: var(--button-left-margin); padding: 2px 0 0 4px; border: 0; - cursor: pointer; - border-radius: var(--button-border-radius); } -.example-wrap .button-holder .copy-button::before { +.example-wrap .button-holder .copy-button::before, +.example-wrap .test-arrow::before { filter: var(--copy-path-img-filter); +} +.example-wrap .button-holder .copy-button::before { content: var(--clipboard-image); } -.example-wrap .button-holder .copy-button:hover::before { +.example-wrap .button-holder .copy-button:hover::before, +.example-wrap .test-arrow:hover::before { filter: var(--copy-path-img-hover-filter); } .example-wrap .button-holder .copy-button.clicked::before { @@ -2552,10 +2552,6 @@ by default. --code-highlight-doc-comment-color: #4d4d4c; --src-line-numbers-span-color: #c67e2d; --src-line-number-highlighted-background-color: #fdffd3; - --test-arrow-color: #f5f5f5; - --test-arrow-background-color: rgba(78, 139, 202, 0.2); - --test-arrow-hover-color: #f5f5f5; - --test-arrow-hover-background-color: rgb(78, 139, 202); --target-background-color: #fdffd3; --target-border-color: #ad7c37; --kbd-color: #000; @@ -2658,10 +2654,6 @@ by default. --code-highlight-doc-comment-color: #8ca375; --src-line-numbers-span-color: #3b91e2; --src-line-number-highlighted-background-color: #0a042f; - --test-arrow-color: #dedede; - --test-arrow-background-color: rgba(78, 139, 202, 0.2); - --test-arrow-hover-color: #dedede; - --test-arrow-hover-background-color: #4e8bca; --target-background-color: #494a3d; --target-border-color: #bb7410; --kbd-color: #000; @@ -2771,10 +2763,6 @@ Original by Dempfi (https://github.com/dempfi/ayu) --code-highlight-doc-comment-color: #a1ac88; --src-line-numbers-span-color: #5c6773; --src-line-number-highlighted-background-color: rgba(255, 236, 164, 0.06); - --test-arrow-color: #788797; - --test-arrow-background-color: rgba(57, 175, 215, 0.09); - --test-arrow-hover-color: #c5c5c5; - --test-arrow-hover-background-color: rgba(57, 175, 215, 0.368); --target-background-color: rgba(255, 236, 164, 0.06); --target-border-color: rgba(255, 180, 76, 0.85); --kbd-color: #c5c5c5; diff --git a/src/librustdoc/html/static/js/main.js b/src/librustdoc/html/static/js/main.js index 62fbde6f225b1..75f2a1418cd8f 100644 --- a/src/librustdoc/html/static/js/main.js +++ b/src/librustdoc/html/static/js/main.js @@ -1835,10 +1835,14 @@ href="https://doc.rust-lang.org/${channel}/rustdoc/read-documentation/search.htm function getExampleWrap(event) { let elem = event.target; while (!hasClass(elem, "example-wrap")) { - elem = elem.parentElement; - if (elem === document.body || hasClass(elem, "docblock")) { + if (elem === document.body || + elem.tagName === "A" || + elem.tagName === "BUTTON" || + hasClass(elem, "docblock") + ) { return null; } + elem = elem.parentElement; } return elem; } diff --git a/src/librustdoc/json/mod.rs b/src/librustdoc/json/mod.rs index ea191dc89cfde..e2860292aa3ba 100644 --- a/src/librustdoc/json/mod.rs +++ b/src/librustdoc/json/mod.rs @@ -9,16 +9,19 @@ mod import_finder; use std::cell::RefCell; use std::fs::{create_dir_all, File}; -use std::io::{BufWriter, Write}; +use std::io::{stdout, BufWriter, Write}; use std::path::PathBuf; use std::rc::Rc; -use rustc_data_structures::fx::FxHashMap; use rustc_hir::def_id::{DefId, DefIdSet}; use rustc_middle::ty::TyCtxt; use rustc_session::Session; use rustc_span::def_id::LOCAL_CRATE; use rustdoc_json_types as types; +// It's important to use the FxHashMap from rustdoc_json_types here, instead of +// the one from rustc_data_structures, as they're different types due to sysroots. +// See #110051 and #127456 for details +use rustdoc_json_types::FxHashMap; use crate::clean::types::{ExternalCrate, ExternalLocation}; use crate::clean::ItemKind; @@ -36,8 +39,10 @@ pub(crate) struct JsonRenderer<'tcx> { /// A mapping of IDs that contains all local items for this crate which gets output as a top /// level field of the JSON blob. index: Rc>>, - /// The directory where the blob will be written to. - out_path: PathBuf, + /// The directory where the JSON blob should be written to. + /// + /// If this is `None`, the blob will be printed to `stdout` instead. + out_dir: Option, cache: Rc, imported_items: DefIdSet, } @@ -97,6 +102,22 @@ impl<'tcx> JsonRenderer<'tcx> { }) .unwrap_or_default() } + + fn serialize_and_write( + &self, + output_crate: types::Crate, + mut writer: BufWriter, + path: &str, + ) -> Result<(), Error> { + self.sess().time("rustdoc_json_serialize_and_write", || { + try_err!( + serde_json::ser::to_writer(&mut writer, &output_crate).map_err(|e| e.to_string()), + path + ); + try_err!(writer.flush(), path); + Ok(()) + }) + } } impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> { @@ -120,7 +141,7 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> { JsonRenderer { tcx, index: Rc::new(RefCell::new(FxHashMap::default())), - out_path: options.output, + out_dir: if options.output_to_stdout { None } else { Some(options.output) }, cache: Rc::new(cache), imported_items, }, @@ -220,14 +241,11 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> { let index = (*self.index).clone().into_inner(); debug!("Constructing Output"); - // This needs to be the default HashMap for compatibility with the public interface for - // rustdoc-json-types - #[allow(rustc::default_hash_types)] - let output = types::Crate { + let output_crate = types::Crate { root: types::Id(format!("0:0:{}", e.name(self.tcx).as_u32())), crate_version: self.cache.crate_version.clone(), includes_private: self.cache.document_private, - index: index.into_iter().collect(), + index, paths: self .cache .paths @@ -264,20 +282,21 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> { .collect(), format_version: types::FORMAT_VERSION, }; - let out_dir = self.out_path.clone(); - try_err!(create_dir_all(&out_dir), out_dir); + if let Some(ref out_dir) = self.out_dir { + try_err!(create_dir_all(&out_dir), out_dir); - let mut p = out_dir; - p.push(output.index.get(&output.root).unwrap().name.clone().unwrap()); - p.set_extension("json"); - let mut file = BufWriter::new(try_err!(File::create(&p), p)); - self.tcx - .sess - .time("rustdoc_json_serialization", || serde_json::ser::to_writer(&mut file, &output)) - .unwrap(); - try_err!(file.flush(), p); + let mut p = out_dir.clone(); + p.push(output_crate.index.get(&output_crate.root).unwrap().name.clone().unwrap()); + p.set_extension("json"); - Ok(()) + self.serialize_and_write( + output_crate, + BufWriter::new(try_err!(File::create(&p), p)), + &p.display().to_string(), + ) + } else { + self.serialize_and_write(output_crate, BufWriter::new(stdout().lock()), "") + } } fn cache(&self) -> &Cache { diff --git a/src/librustdoc/passes/check_doc_test_visibility.rs b/src/librustdoc/passes/check_doc_test_visibility.rs index d78afdffc626d..5015d66595504 100644 --- a/src/librustdoc/passes/check_doc_test_visibility.rs +++ b/src/librustdoc/passes/check_doc_test_visibility.rs @@ -45,7 +45,7 @@ pub(crate) struct Tests { pub(crate) found_tests: usize, } -impl crate::doctest::DoctestVisitor for Tests { +impl crate::doctest::DocTestVisitor for Tests { fn visit_test(&mut self, _: String, config: LangString, _: MdRelLine) { if config.rust && config.ignore == Ignore::None { self.found_tests += 1; diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs index 9c7eee4040bdf..5b96529fed70d 100644 --- a/src/librustdoc/passes/collect_intra_doc_links.rs +++ b/src/librustdoc/passes/collect_intra_doc_links.rs @@ -495,7 +495,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> { ty::RawPtr(_, _) => Res::Primitive(RawPointer), ty::Ref(..) => Res::Primitive(Reference), ty::FnDef(..) => panic!("type alias to a function definition"), - ty::FnPtr(_) => Res::Primitive(Fn), + ty::FnPtr(..) => Res::Primitive(Fn), ty::Never => Res::Primitive(Never), ty::Adt(ty::AdtDef(Interned(&ty::AdtDefData { did, .. }, _)), _) | ty::Foreign(did) => { Res::from_def_id(self.cx.tcx, did) diff --git a/src/llvm-project b/src/llvm-project index 57ae1a3474057..ccf4c38bdd73f 160000 --- a/src/llvm-project +++ b/src/llvm-project @@ -1 +1 @@ -Subproject commit 57ae1a3474057fead2c438928ed368b3740bf0ec +Subproject commit ccf4c38bdd73f1a37ec266c73bdaef80e39f8cf6 diff --git a/src/rustdoc-json-types/lib.rs b/src/rustdoc-json-types/lib.rs index 999134a40909d..40a90c1a56504 100644 --- a/src/rustdoc-json-types/lib.rs +++ b/src/rustdoc-json-types/lib.rs @@ -5,7 +5,7 @@ use std::path::PathBuf; -use rustc_hash::FxHashMap; +pub use rustc_hash::FxHashMap; use serde::{Deserialize, Serialize}; /// The version of JSON output that this crate represents. diff --git a/src/tools/cargo b/src/tools/cargo index 0d8d22f83b066..ba8b39413c74d 160000 --- a/src/tools/cargo +++ b/src/tools/cargo @@ -1 +1 @@ -Subproject commit 0d8d22f83b066503f6b2b755925197e959e58b4f +Subproject commit ba8b39413c74d08494f94a7542fe79aa636e1661 diff --git a/src/tools/clippy/clippy_dev/src/new_lint.rs b/src/tools/clippy/clippy_dev/src/new_lint.rs index de91233d196c8..87117832fb952 100644 --- a/src/tools/clippy/clippy_dev/src/new_lint.rs +++ b/src/tools/clippy/clippy_dev/src/new_lint.rs @@ -470,7 +470,7 @@ fn setup_mod_file(path: &Path, lint: &LintData<'_>) -> io::Result<&'static str> }); // Find both the last lint declaration (declare_clippy_lint!) and the lint pass impl - while let Some(LintDeclSearchResult { content, .. }) = iter.find(|result| result.token_kind == TokenKind::Ident) { + while let Some(LintDeclSearchResult { content, .. }) = iter.find(|result| result.token == TokenKind::Ident) { let mut iter = iter .by_ref() .filter(|t| !matches!(t.token_kind, TokenKind::Whitespace | TokenKind::LineComment { .. })); @@ -480,7 +480,7 @@ fn setup_mod_file(path: &Path, lint: &LintData<'_>) -> io::Result<&'static str> // matches `!{` match_tokens!(iter, Bang OpenBrace); if let Some(LintDeclSearchResult { range, .. }) = - iter.find(|result| result.token_kind == TokenKind::CloseBrace) + iter.find(|result| result.token == TokenKind::CloseBrace) { last_decl_curly_offset = Some(range.end); } diff --git a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs index f263bec1576d0..dbe03e4ae8095 100644 --- a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs +++ b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs @@ -15,7 +15,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, } match cast_from.kind() { - ty::FnDef(..) | ty::FnPtr(_) => { + ty::FnDef(..) | ty::FnPtr(..) => { let mut applicability = Applicability::MaybeIncorrect; let from_snippet = snippet_with_applicability(cx, cast_expr.span, "x", &mut applicability); let to_nbits = utils::int_ty_to_nbits(cast_to, cx.tcx); diff --git a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs index 75de53f73ee7a..5dc6df1e907d0 100644 --- a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs +++ b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs @@ -14,7 +14,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, _ => { /* continue to checks */ }, } - if let ty::FnDef(..) | ty::FnPtr(_) = cast_from.kind() { + if let ty::FnDef(..) | ty::FnPtr(..) = cast_from.kind() { let mut applicability = Applicability::MaybeIncorrect; let from_snippet = snippet_with_applicability(cx, cast_expr.span, "..", &mut applicability); diff --git a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs index 0e11bcfb8ecdb..dfbae1618ac62 100644 --- a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs +++ b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs @@ -14,7 +14,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, _ => return, } match cast_from.kind() { - ty::FnDef(..) | ty::FnPtr(_) => { + ty::FnDef(..) | ty::FnPtr(..) => { let mut applicability = Applicability::MaybeIncorrect; let from_snippet = snippet_with_applicability(cx, cast_expr.span, "x", &mut applicability); diff --git a/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs b/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs index a74b3a8c8362c..05c3cd3c81407 100644 --- a/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs +++ b/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs @@ -236,7 +236,7 @@ fn fn_sig_opt<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Option Some(cx.tcx.fn_sig(*def_id).instantiate_identity()), - ty::FnPtr(fn_sig) => Some(*fn_sig), + ty::FnPtr(sig_tys, hdr) => Some(sig_tys.with(*hdr)), _ => None, } } diff --git a/src/tools/clippy/clippy_lints/src/dereference.rs b/src/tools/clippy/clippy_lints/src/dereference.rs index d0cb24884686a..0e55d3db469ae 100644 --- a/src/tools/clippy/clippy_lints/src/dereference.rs +++ b/src/tools/clippy/clippy_lints/src/dereference.rs @@ -872,7 +872,7 @@ impl TyCoercionStability { | ty::Pat(..) | ty::Float(_) | ty::RawPtr(..) - | ty::FnPtr(_) + | ty::FnPtr(..) | ty::Str | ty::Slice(..) | ty::Adt(..) diff --git a/src/tools/clippy/clippy_lints/src/eta_reduction.rs b/src/tools/clippy/clippy_lints/src/eta_reduction.rs index 5a7226d590c4d..a7e831fdc42a8 100644 --- a/src/tools/clippy/clippy_lints/src/eta_reduction.rs +++ b/src/tools/clippy/clippy_lints/src/eta_reduction.rs @@ -158,7 +158,7 @@ fn check_clousure<'tcx>(cx: &LateContext<'tcx>, outer_receiver: Option<&Expr<'tc cx.tcx.fn_sig(def).skip_binder().skip_binder() }, - ty::FnPtr(sig) => sig.skip_binder(), + ty::FnPtr(sig_tys, hdr) => sig_tys.with(*hdr).skip_binder(), ty::Closure(_, subs) => cx .tcx .signature_unclosure(subs.as_closure().sig(), Safety::Safe) diff --git a/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs b/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs index def8be2ef73dc..22a03825194e0 100644 --- a/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs +++ b/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs @@ -58,7 +58,7 @@ fn try_get_caller_ty_name_and_method_name( fn is_map_to_option(cx: &LateContext<'_>, map_arg: &Expr<'_>) -> bool { let map_closure_ty = cx.typeck_results().expr_ty(map_arg); match map_closure_ty.kind() { - ty::Closure(_, _) | ty::FnDef(_, _) | ty::FnPtr(_) => { + ty::Closure(_, _) | ty::FnDef(_, _) | ty::FnPtr(..) => { let map_closure_sig = match map_closure_ty.kind() { ty::Closure(_, args) => args.as_closure().sig(), _ => map_closure_ty.fn_sig(cx.tcx), diff --git a/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs b/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs index 6964d8c8dbb33..0b3769ecb7cc4 100644 --- a/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs +++ b/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs @@ -166,7 +166,7 @@ impl<'a, 'tcx> Visitor<'tcx> for DivergenceVisitor<'a, 'tcx> { ExprKind::Call(func, _) => { let typ = self.cx.typeck_results().expr_ty(func); match typ.kind() { - ty::FnDef(..) | ty::FnPtr(_) => { + ty::FnDef(..) | ty::FnPtr(..) => { let sig = typ.fn_sig(self.cx.tcx); if self.cx.tcx.instantiate_bound_regions_with_erased(sig).output().kind() == &ty::Never { self.report_diverging_sub_expr(e); diff --git a/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs b/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs index da74a7c7145a6..0bde0da3cd814 100644 --- a/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs +++ b/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs @@ -130,7 +130,7 @@ fn collect_unsafe_exprs<'tcx>( ExprKind::Call(path_expr, _) => { let sig = match *cx.typeck_results().expr_ty(path_expr).kind() { ty::FnDef(id, _) => cx.tcx.fn_sig(id).skip_binder(), - ty::FnPtr(sig) => sig, + ty::FnPtr(sig_tys, hdr) => sig_tys.with(hdr), _ => return Continue(Descend::Yes), }; if sig.safety() == Safety::Unsafe { diff --git a/src/tools/clippy/clippy_lints/src/mut_reference.rs b/src/tools/clippy/clippy_lints/src/mut_reference.rs index 0a3b769c3e604..3c0f06f66d105 100644 --- a/src/tools/clippy/clippy_lints/src/mut_reference.rs +++ b/src/tools/clippy/clippy_lints/src/mut_reference.rs @@ -79,7 +79,7 @@ fn check_arguments<'tcx>( fn_kind: &str, ) { match type_definition.kind() { - ty::FnDef(..) | ty::FnPtr(_) => { + ty::FnDef(..) | ty::FnPtr(..) => { let parameters = type_definition.fn_sig(cx.tcx).skip_binder().inputs(); for (argument, parameter) in iter::zip(arguments, parameters) { match parameter.kind() { diff --git a/src/tools/clippy/clippy_utils/src/ty.rs b/src/tools/clippy/clippy_utils/src/ty.rs index bd48990aea95f..2f6faba073e95 100644 --- a/src/tools/clippy/clippy_utils/src/ty.rs +++ b/src/tools/clippy/clippy_utils/src/ty.rs @@ -541,7 +541,7 @@ pub fn peel_mid_ty_refs_is_mutable(ty: Ty<'_>) -> (Ty<'_>, usize, Mutability) { /// Returns `true` if the given type is an `unsafe` function. pub fn type_is_unsafe_function<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool { match ty.kind() { - ty::FnDef(..) | ty::FnPtr(_) => ty.fn_sig(cx.tcx).safety() == Safety::Unsafe, + ty::FnDef(..) | ty::FnPtr(..) => ty.fn_sig(cx.tcx).safety() == Safety::Unsafe, _ => false, } } @@ -721,7 +721,7 @@ pub fn ty_sig<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option Some(ExprFnSig::Sig(sig, None)), + ty::FnPtr(sig_tys, hdr) => Some(ExprFnSig::Sig(sig_tys.with(hdr), None)), ty::Dynamic(bounds, _, _) => { let lang_items = cx.tcx.lang_items(); match bounds.principal() { diff --git a/src/tools/clippy/clippy_utils/src/visitors.rs b/src/tools/clippy/clippy_utils/src/visitors.rs index 2a5d3536ff6b7..e5b6d3965e930 100644 --- a/src/tools/clippy/clippy_utils/src/visitors.rs +++ b/src/tools/clippy/clippy_utils/src/visitors.rs @@ -441,7 +441,7 @@ pub fn is_expr_unsafe<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) -> bool { ty::FnDef(id, _) if self.cx.tcx.fn_sig(id).skip_binder().safety() == Safety::Unsafe => { self.is_unsafe = true; }, - ty::FnPtr(sig) if sig.safety() == Safety::Unsafe => self.is_unsafe = true, + ty::FnPtr(_, hdr) if hdr.safety == Safety::Unsafe => self.is_unsafe = true, _ => walk_expr(self, e), }, ExprKind::Path(ref p) diff --git a/src/tools/clippy/src/main.rs b/src/tools/clippy/src/main.rs index c9af2138a7233..c9853e53f3b38 100644 --- a/src/tools/clippy/src/main.rs +++ b/src/tools/clippy/src/main.rs @@ -1,3 +1,6 @@ +// We need this feature as it changes `dylib` linking behavior and allows us to link to +// `rustc_driver`. +#![feature(rustc_private)] // warn on lints, that are included in `rust-lang/rust`s bootstrap #![warn(rust_2018_idioms, unused_lifetimes)] diff --git a/src/tools/clippy/tests/compile-test.rs b/src/tools/clippy/tests/compile-test.rs index c7080e5dcdc9e..64253514fbe67 100644 --- a/src/tools/clippy/tests/compile-test.rs +++ b/src/tools/clippy/tests/compile-test.rs @@ -1,3 +1,6 @@ +// We need this feature as it changes `dylib` linking behavior and allows us to link to +// `rustc_driver`. +#![feature(rustc_private)] #![warn(rust_2018_idioms, unused_lifetimes)] #![allow(unused_extern_crates)] diff --git a/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.rs b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.rs index 0ea93dd84625c..452d1b198133e 100644 --- a/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.rs +++ b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.rs @@ -10,7 +10,7 @@ use std::sync::Once; const ATOMIC: AtomicUsize = AtomicUsize::new(5); const CELL: Cell = Cell::new(6); -const ATOMIC_TUPLE: ([AtomicUsize; 1], Vec, u8) = ([ATOMIC], Vec::new(), 7); +const ATOMIC_TUPLE: ([AtomicUsize; 1], Option>, u8) = ([ATOMIC], None, 7); const INTEGER: u8 = 8; const STRING: String = String::new(); const STR: &str = "012345"; @@ -74,7 +74,6 @@ fn main() { let _ = &(&&&&ATOMIC_TUPLE).0; //~ ERROR: interior mutability let _ = &ATOMIC_TUPLE.0[0]; //~ ERROR: interior mutability let _ = ATOMIC_TUPLE.0[0].load(Ordering::SeqCst); //~ ERROR: interior mutability - let _ = &*ATOMIC_TUPLE.1; let _ = &ATOMIC_TUPLE.2; let _ = (&&&&ATOMIC_TUPLE).0; let _ = (&&&&ATOMIC_TUPLE).2; diff --git a/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.stderr b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.stderr index 33c774667f942..9a9028c864986 100644 --- a/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.stderr +++ b/src/tools/clippy/tests/ui/borrow_interior_mutable_const/others.stderr @@ -92,7 +92,7 @@ LL | let _ = ATOMIC_TUPLE.0[0].load(Ordering::SeqCst); = help: assign this const to a local or static variable, and use the variable here error: a `const` item with interior mutability should not be borrowed - --> tests/ui/borrow_interior_mutable_const/others.rs:82:13 + --> tests/ui/borrow_interior_mutable_const/others.rs:81:13 | LL | let _ = ATOMIC_TUPLE.0[0]; | ^^^^^^^^^^^^ @@ -100,7 +100,7 @@ LL | let _ = ATOMIC_TUPLE.0[0]; = help: assign this const to a local or static variable, and use the variable here error: a `const` item with interior mutability should not be borrowed - --> tests/ui/borrow_interior_mutable_const/others.rs:87:5 + --> tests/ui/borrow_interior_mutable_const/others.rs:86:5 | LL | CELL.set(2); | ^^^^ @@ -108,7 +108,7 @@ LL | CELL.set(2); = help: assign this const to a local or static variable, and use the variable here error: a `const` item with interior mutability should not be borrowed - --> tests/ui/borrow_interior_mutable_const/others.rs:88:16 + --> tests/ui/borrow_interior_mutable_const/others.rs:87:16 | LL | assert_eq!(CELL.get(), 6); | ^^^^ diff --git a/src/tools/compiletest/src/json.rs b/src/tools/compiletest/src/json.rs index 76b83f02b149a..0da93dcafa20b 100644 --- a/src/tools/compiletest/src/json.rs +++ b/src/tools/compiletest/src/json.rs @@ -1,5 +1,4 @@ //! These structs are a subset of the ones found in `rustc_errors::json`. -//! They are only used for deserialization of JSON output provided by libtest. use std::path::{Path, PathBuf}; use std::str::FromStr; @@ -127,11 +126,10 @@ pub fn extract_rendered(output: &str) -> String { // Ignore the notification. None } else { - print!( - "failed to decode compiler output as json: line: {}\noutput: {}", - line, output - ); - panic!() + // This function is called for both compiler and non-compiler output, + // so if the line isn't recognized as JSON from the compiler then + // just print it as-is. + Some(format!("{line}\n")) } } else { // preserve non-JSON lines, such as ICEs diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs index b1b6d6fc8eb18..59fce44d1c725 100644 --- a/src/tools/compiletest/src/runtest.rs +++ b/src/tools/compiletest/src/runtest.rs @@ -3027,11 +3027,17 @@ impl<'test> TestCx<'test> { const PREFIX: &str = "MONO_ITEM "; const CGU_MARKER: &str = "@@"; + // Some MonoItems can contain {closure@/path/to/checkout/tests/codgen-units/test.rs} + // To prevent the current dir from leaking, we just replace the entire path to the test + // file with TEST_PATH. let actual: Vec = proc_res .stdout .lines() .filter(|line| line.starts_with(PREFIX)) - .map(|line| str_to_mono_item(line, true)) + .map(|line| { + line.replace(&self.testpaths.file.display().to_string(), "TEST_PATH").to_string() + }) + .map(|line| str_to_mono_item(&line, true)) .collect(); let expected: Vec = errors::load_errors(&self.testpaths.file, None) @@ -3729,15 +3735,14 @@ impl<'test> TestCx<'test> { } if self.config.bless { - cmd.env("RUSTC_BLESS_TEST", "--bless"); - // Assume this option is active if the environment variable is "defined", with _any_ value. - // As an example, a `Makefile` can use this option by: + // If we're running in `--bless` mode, set an environment variable to tell + // `run_make_support` to bless snapshot files instead of checking them. // - // ifdef RUSTC_BLESS_TEST - // cp "$(TMPDIR)"/actual_something.ext expected_something.ext - // else - // $(DIFF) expected_something.ext "$(TMPDIR)"/actual_something.ext - // endif + // The value is this test's source directory, because the support code + // will need that path in order to bless the _original_ snapshot files, + // not the copies in `rmake_out`. + // (See .) + cmd.env("RUSTC_BLESS_TEST", &self.testpaths.file); } if self.config.target.contains("msvc") && !self.config.cc.is_empty() { diff --git a/src/tools/generate-windows-sys/src/main.rs b/src/tools/generate-windows-sys/src/main.rs index fe1b1bd5ceb14..6dbf29d957f12 100644 --- a/src/tools/generate-windows-sys/src/main.rs +++ b/src/tools/generate-windows-sys/src/main.rs @@ -35,7 +35,6 @@ fn main() -> Result<(), Box> { let mut f = std::fs::File::options().append(true).open("windows_sys.rs")?; f.write_all(ARM32_SHIM.as_bytes())?; writeln!(&mut f, "// ignore-tidy-filelength")?; - writeln!(&mut f, "use super::windows_targets;")?; Ok(()) } diff --git a/src/tools/miri/README.md b/src/tools/miri/README.md index 499d0fe2f1c93..a0bff386a71dd 100644 --- a/src/tools/miri/README.md +++ b/src/tools/miri/README.md @@ -212,7 +212,6 @@ degree documented below): - All Rust [Tier 1 targets](https://doc.rust-lang.org/rustc/platform-support.html) are supported by Miri. They are all checked on Miri's CI, and some (at least one per OS) are even checked on every Rust PR, so the shipped Miri should always work on these targets. -- `aarch64-apple-darwin` is supported. - `s390x-unknown-linux-gnu` is supported as our "big-endian target of choice". - For every other target with OS `linux`, `macos`, or `windows`, Miri should generally work, but we make no promises and we don't run tests for such targets. diff --git a/src/tools/miri/ci/ci.sh b/src/tools/miri/ci/ci.sh index 5e75638f46709..3e90ecc5c0397 100755 --- a/src/tools/miri/ci/ci.sh +++ b/src/tools/miri/ci/ci.sh @@ -137,7 +137,7 @@ case $HOST_TARGET in MANY_SEEDS=16 TEST_TARGET=x86_64-pc-windows-gnu run_tests ;; aarch64-apple-darwin) - # Host (tier 2) + # Host GC_STRESS=1 MIR_OPT=1 MANY_SEEDS=64 TEST_BENCH=1 CARGO_MIRI_ENV=1 run_tests # Extra tier 1 MANY_SEEDS=64 TEST_TARGET=i686-pc-windows-gnu run_tests diff --git a/src/tools/miri/src/lib.rs b/src/tools/miri/src/lib.rs index 2b3ae6df5de8a..7a11e353f9d98 100644 --- a/src/tools/miri/src/lib.rs +++ b/src/tools/miri/src/lib.rs @@ -12,7 +12,6 @@ #![feature(let_chains)] #![feature(trait_upcasting)] #![feature(strict_overflow_ops)] -#![feature(is_none_or)] // Configure clippy and other lints #![allow( clippy::collapsible_else_if, @@ -56,6 +55,7 @@ extern crate either; extern crate tracing; // The rustc crates we need +extern crate rustc_attr; extern crate rustc_apfloat; extern crate rustc_ast; extern crate rustc_const_eval; diff --git a/src/tools/miri/src/machine.rs b/src/tools/miri/src/machine.rs index 94598e7d2e3cc..754f8cf13814a 100644 --- a/src/tools/miri/src/machine.rs +++ b/src/tools/miri/src/machine.rs @@ -11,6 +11,7 @@ use rand::rngs::StdRng; use rand::Rng; use rand::SeedableRng; +use rustc_attr::InlineAttr; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; #[allow(unused)] use rustc_data_structures::static_assert_size; @@ -23,6 +24,7 @@ use rustc_middle::{ Instance, Ty, TyCtxt, }, }; +use rustc_session::config::InliningThreshold; use rustc_span::def_id::{CrateNum, DefId}; use rustc_span::{Span, SpanData, Symbol}; use rustc_target::abi::{Align, Size}; @@ -47,10 +49,10 @@ pub const SIGRTMIN: i32 = 34; /// `SIGRTMAX` - `SIGRTMIN` >= 8 (which is the value of `_POSIX_RTSIG_MAX`) pub const SIGRTMAX: i32 = 42; -/// Each const has multiple addresses, but only this many. Since const allocations are never -/// deallocated, choosing a new [`AllocId`] and thus base address for each evaluation would -/// produce unbounded memory usage. -const ADDRS_PER_CONST: usize = 16; +/// Each anonymous global (constant, vtable, function pointer, ...) has multiple addresses, but only +/// this many. Since const allocations are never deallocated, choosing a new [`AllocId`] and thus +/// base address for each evaluation would produce unbounded memory usage. +const ADDRS_PER_ANON_GLOBAL: usize = 32; /// Extra data stored with each stack frame pub struct FrameExtra<'tcx> { @@ -1372,7 +1374,7 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { catch_unwind: None, timing, is_user_relevant: ecx.machine.is_user_relevant(&frame), - salt: ecx.machine.rng.borrow_mut().gen::() % ADDRS_PER_CONST, + salt: ecx.machine.rng.borrow_mut().gen::() % ADDRS_PER_ANON_GLOBAL, }; Ok(frame.with_extra(extra)) @@ -1518,4 +1520,45 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { Entry::Occupied(oe) => Ok(oe.get().clone()), } } + + fn get_global_alloc_salt( + ecx: &InterpCx<'tcx, Self>, + instance: Option>, + ) -> usize { + let unique = if let Some(instance) = instance { + // Functions cannot be identified by pointers, as asm-equal functions can get + // deduplicated by the linker (we set the "unnamed_addr" attribute for LLVM) and + // functions can be duplicated across crates. We thus generate a new `AllocId` for every + // mention of a function. This means that `main as fn() == main as fn()` is false, while + // `let x = main as fn(); x == x` is true. However, as a quality-of-life feature it can + // be useful to identify certain functions uniquely, e.g. for backtraces. So we identify + // whether codegen will actually emit duplicate functions. It does that when they have + // non-lifetime generics, or when they can be inlined. All other functions are given a + // unique address. This is not a stable guarantee! The `inline` attribute is a hint and + // cannot be relied upon for anything. But if we don't do this, the + // `__rust_begin_short_backtrace`/`__rust_end_short_backtrace` logic breaks and panic + // backtraces look terrible. + let is_generic = instance + .args + .into_iter() + .any(|kind| !matches!(kind.unpack(), ty::GenericArgKind::Lifetime(_))); + let can_be_inlined = matches!( + ecx.tcx.sess.opts.unstable_opts.cross_crate_inline_threshold, + InliningThreshold::Always + ) || !matches!( + ecx.tcx.codegen_fn_attrs(instance.def_id()).inline, + InlineAttr::Never + ); + !is_generic && !can_be_inlined + } else { + // Non-functions are never unique. + false + }; + // Always use the same salt if the allocation is unique. + if unique { + CTFE_ALLOC_SALT + } else { + ecx.machine.rng.borrow_mut().gen::() % ADDRS_PER_ANON_GLOBAL + } + } } diff --git a/src/tools/miri/tests/pass/dyn-traits.rs b/src/tools/miri/tests/pass/dyn-traits.rs index 908d521a0d816..f6220120968f7 100644 --- a/src/tools/miri/tests/pass/dyn-traits.rs +++ b/src/tools/miri/tests/pass/dyn-traits.rs @@ -141,7 +141,17 @@ fn unsized_dyn_autoderef() { } */ +fn vtable_ptr_eq() { + use std::{fmt, ptr}; + + // We don't always get the same vtable when casting this to a wide pointer. + let x = &2; + let x_wide = x as &dyn fmt::Display; + assert!((0..256).any(|_| !ptr::eq(x as &dyn fmt::Display, x_wide))); +} + fn main() { ref_box_dyn(); box_box_trait(); + vtable_ptr_eq(); } diff --git a/src/tools/miri/tests/pass/function_pointers.rs b/src/tools/miri/tests/pass/function_pointers.rs index 2aa3ebf2dd0b4..a5c4bc5e0d95b 100644 --- a/src/tools/miri/tests/pass/function_pointers.rs +++ b/src/tools/miri/tests/pass/function_pointers.rs @@ -82,7 +82,8 @@ fn main() { assert!(return_fn_ptr(i) == i); assert!(return_fn_ptr(i) as unsafe fn() -> i32 == i as fn() -> i32 as unsafe fn() -> i32); // Miri gives different addresses to different reifications of a generic function. - assert!(return_fn_ptr(f) != f); + // at least if we try often enough. + assert!((0..256).any(|_| return_fn_ptr(f) != f)); // However, if we only turn `f` into a function pointer and use that pointer, // it is equal to itself. let f2 = f as fn() -> i32; diff --git a/src/tools/miri/tests/pass/rc.rs b/src/tools/miri/tests/pass/rc.rs index 6dd1b3aff9e72..b1470dabc26bd 100644 --- a/src/tools/miri/tests/pass/rc.rs +++ b/src/tools/miri/tests/pass/rc.rs @@ -75,7 +75,8 @@ fn rc_fat_ptr_eq() { let p = Rc::new(1) as Rc; let a: *const dyn Debug = &*p; let r = Rc::into_raw(p); - assert!(a == r); + // Only compare the pointer parts, as the vtable might differ. + assert!(a as *const () == r as *const ()); drop(unsafe { Rc::from_raw(r) }); } diff --git a/src/tools/miri/tests/pass/tree_borrows/vec_unique.rs b/src/tools/miri/tests/pass/tree_borrows/vec_unique.rs index 05090d685ab2e..9debe224d4583 100644 --- a/src/tools/miri/tests/pass/tree_borrows/vec_unique.rs +++ b/src/tools/miri/tests/pass/tree_borrows/vec_unique.rs @@ -1,3 +1,5 @@ +// FIXME: This test is broken since https://github.com/rust-lang/rust/pull/126793, +// possibly related to the additional struct between Vec and Unique. //@revisions: default uniq // We disable the GC for this test because it would change what is printed. //@compile-flags: -Zmiri-tree-borrows -Zmiri-provenance-gc=0 diff --git a/src/tools/miri/tests/pass/tree_borrows/vec_unique.uniq.stderr b/src/tools/miri/tests/pass/tree_borrows/vec_unique.uniq.stderr index 7942e9884f401..e3796a742e9ae 100644 --- a/src/tools/miri/tests/pass/tree_borrows/vec_unique.uniq.stderr +++ b/src/tools/miri/tests/pass/tree_borrows/vec_unique.uniq.stderr @@ -2,7 +2,9 @@ Warning: this tree is indicative only. Some tags may have been hidden. 0.. 2 | Act | └─┬── -|-----| └─┬── -|-----| └─┬── -|-----| └──── +|-----| ├──── +|-----| ├──── +|-----| └─┬── +|-----| ├──── +|-----| └──── ────────────────────────────────────────────────── diff --git a/src/tools/run-make-support/Cargo.toml b/src/tools/run-make-support/Cargo.toml index c4d5446a2481e..eae6022b803fd 100644 --- a/src/tools/run-make-support/Cargo.toml +++ b/src/tools/run-make-support/Cargo.toml @@ -11,3 +11,4 @@ wasmparser = { version = "0.214", default-features = false, features = ["std"] } regex = "1.8" # 1.8 to avoid memchr 2.6.0, as 2.5.0 is pinned in the workspace gimli = "0.31.0" build_helper = { path = "../build_helper" } +serde_json = "1.0" diff --git a/src/tools/run-make-support/src/assertion_helpers.rs b/src/tools/run-make-support/src/assertion_helpers.rs index 6d256fc594d8e..b4da65aff4ab1 100644 --- a/src/tools/run-make-support/src/assertion_helpers.rs +++ b/src/tools/run-make-support/src/assertion_helpers.rs @@ -77,6 +77,20 @@ pub fn assert_not_contains_regex, N: AsRef>(haystack: H, need } } +/// Assert that `haystack` contains `needle` a `count` number of times. +#[track_caller] +pub fn assert_count_is, N: AsRef>(count: usize, haystack: H, needle: N) { + let haystack = haystack.as_ref(); + let needle = needle.as_ref(); + if count != haystack.matches(needle).count() { + eprintln!("=== HAYSTACK ==="); + eprintln!("{}", haystack); + eprintln!("=== NEEDLE ==="); + eprintln!("{}", needle); + panic!("needle did not appear {count} times in haystack"); + } +} + /// Assert that all files in `dir1` exist and have the same content in `dir2` pub fn assert_dirs_are_equal(dir1: impl AsRef, dir2: impl AsRef) { let dir2 = dir2.as_ref(); diff --git a/src/tools/run-make-support/src/diff/mod.rs b/src/tools/run-make-support/src/diff/mod.rs index b0ed4d5445c48..ee48e3733668d 100644 --- a/src/tools/run-make-support/src/diff/mod.rs +++ b/src/tools/run-make-support/src/diff/mod.rs @@ -112,14 +112,8 @@ impl Diff { let (expected_name, actual_name, output, actual) = self.run_common(); if !output.is_empty() { - // If we can bless (meaning we have a file to write into and the `RUSTC_BLESS_TEST` - // environment variable set), then we write into the file and return. - if let Some(ref expected_file) = self.expected_file { - if std::env::var("RUSTC_BLESS_TEST").is_ok() { - println!("Blessing `{}`", expected_file.display()); - fs::write(expected_file, actual); - return; - } + if self.maybe_bless_expected_file(&actual) { + return; } panic!( "test failed: `{}` is different from `{}`\n\n{}", @@ -134,14 +128,8 @@ impl Diff { let (expected_name, actual_name, output, actual) = self.run_common(); if output.is_empty() { - // If we can bless (meaning we have a file to write into and the `RUSTC_BLESS_TEST` - // environment variable set), then we write into the file and return. - if let Some(ref expected_file) = self.expected_file { - if std::env::var("RUSTC_BLESS_TEST").is_ok() { - println!("Blessing `{}`", expected_file.display()); - fs::write(expected_file, actual); - return; - } + if self.maybe_bless_expected_file(&actual) { + return; } panic!( "test failed: `{}` is not different from `{}`\n\n{}", @@ -149,4 +137,24 @@ impl Diff { ) } } + + /// If we have an expected file to write into, and `RUSTC_BLESS_TEST` is + /// set, then write the actual output into the file and return `true`. + /// + /// We assume that `RUSTC_BLESS_TEST` contains the path to the original test's + /// source directory. That lets us bless the original snapshot file in the + /// source tree, not the copy in `rmake_out` that we would normally use. + fn maybe_bless_expected_file(&self, actual: &str) -> bool { + let Some(ref expected_file) = self.expected_file else { + return false; + }; + let Ok(bless_dir) = std::env::var("RUSTC_BLESS_TEST") else { + return false; + }; + + let bless_file = Path::new(&bless_dir).join(expected_file); + println!("Blessing `{}`", bless_file.display()); + fs::write(bless_file, actual); + true + } } diff --git a/src/tools/run-make-support/src/external_deps/llvm.rs b/src/tools/run-make-support/src/external_deps/llvm.rs index dc651fdd8205a..e9315856cd775 100644 --- a/src/tools/run-make-support/src/external_deps/llvm.rs +++ b/src/tools/run-make-support/src/external_deps/llvm.rs @@ -48,6 +48,12 @@ pub fn llvm_bcanalyzer() -> LlvmBcanalyzer { LlvmBcanalyzer::new() } +/// Construct a new `llvm-dwarfdump` invocation. This assumes that `llvm-dwarfdump` is available +/// at `$LLVM_BIN_DIR/llvm-dwarfdump`. +pub fn llvm_dwarfdump() -> LlvmDwarfdump { + LlvmDwarfdump::new() +} + /// A `llvm-readobj` invocation builder. #[derive(Debug)] #[must_use] @@ -97,6 +103,13 @@ pub struct LlvmBcanalyzer { cmd: Command, } +/// A `llvm-dwarfdump` invocation builder. +#[derive(Debug)] +#[must_use] +pub struct LlvmDwarfdump { + cmd: Command, +} + crate::macros::impl_common_helpers!(LlvmReadobj); crate::macros::impl_common_helpers!(LlvmProfdata); crate::macros::impl_common_helpers!(LlvmFilecheck); @@ -104,6 +117,7 @@ crate::macros::impl_common_helpers!(LlvmObjdump); crate::macros::impl_common_helpers!(LlvmAr); crate::macros::impl_common_helpers!(LlvmNm); crate::macros::impl_common_helpers!(LlvmBcanalyzer); +crate::macros::impl_common_helpers!(LlvmDwarfdump); /// Generate the path to the bin directory of LLVM. #[must_use] @@ -271,12 +285,24 @@ impl LlvmAr { self } + /// Like `obj_to_ar` except creating a thin archive. + pub fn obj_to_thin_ar(&mut self) -> &mut Self { + self.cmd.arg("rcus").arg("--thin"); + self + } + /// Extract archive members back to files. pub fn extract(&mut self) -> &mut Self { self.cmd.arg("x"); self } + /// Print the table of contents. + pub fn table_of_contents(&mut self) -> &mut Self { + self.cmd.arg("t"); + self + } + /// Provide an output, then an input file. Bundled in one function, as llvm-ar has /// no "--output"-style flag. pub fn output_input(&mut self, out: impl AsRef, input: impl AsRef) -> &mut Self { @@ -317,3 +343,19 @@ impl LlvmBcanalyzer { self } } + +impl LlvmDwarfdump { + /// Construct a new `llvm-dwarfdump` invocation. This assumes that `llvm-dwarfdump` is available + /// at `$LLVM_BIN_DIR/llvm-dwarfdump`. + pub fn new() -> Self { + let llvm_dwarfdump = llvm_bin_dir().join("llvm-dwarfdump"); + let cmd = Command::new(llvm_dwarfdump); + Self { cmd } + } + + /// Provide an input file. + pub fn input>(&mut self, path: P) -> &mut Self { + self.cmd.arg(path.as_ref()); + self + } +} diff --git a/src/tools/run-make-support/src/external_deps/rustdoc.rs b/src/tools/run-make-support/src/external_deps/rustdoc.rs index 96c2218a563e6..96b1c719e2e3e 100644 --- a/src/tools/run-make-support/src/external_deps/rustdoc.rs +++ b/src/tools/run-make-support/src/external_deps/rustdoc.rs @@ -71,14 +71,8 @@ impl Rustdoc { self } - /// Specify path to the output folder. - pub fn output>(&mut self, path: P) -> &mut Self { - self.cmd.arg("-o"); - self.cmd.arg(path.as_ref()); - self - } - /// Specify output directory. + #[doc(alias = "output")] pub fn out_dir>(&mut self, path: P) -> &mut Self { self.cmd.arg("--out-dir").arg(path.as_ref()); self diff --git a/src/tools/run-make-support/src/lib.rs b/src/tools/run-make-support/src/lib.rs index a44dd00ad7986..fc20fd3b2e86a 100644 --- a/src/tools/run-make-support/src/lib.rs +++ b/src/tools/run-make-support/src/lib.rs @@ -38,6 +38,7 @@ pub use bstr; pub use gimli; pub use object; pub use regex; +pub use serde_json; pub use wasmparser; // Re-exports of external dependencies. @@ -49,8 +50,9 @@ pub use c_build::{build_native_dynamic_lib, build_native_static_lib, build_nativ pub use clang::{clang, Clang}; pub use htmldocck::htmldocck; pub use llvm::{ - llvm_ar, llvm_bcanalyzer, llvm_filecheck, llvm_nm, llvm_objdump, llvm_profdata, llvm_readobj, - LlvmAr, LlvmBcanalyzer, LlvmFilecheck, LlvmNm, LlvmObjdump, LlvmProfdata, LlvmReadobj, + llvm_ar, llvm_bcanalyzer, llvm_dwarfdump, llvm_filecheck, llvm_nm, llvm_objdump, llvm_profdata, + llvm_readobj, LlvmAr, LlvmBcanalyzer, LlvmDwarfdump, LlvmFilecheck, LlvmNm, LlvmObjdump, + LlvmProfdata, LlvmReadobj, }; pub use python::python_command; pub use rustc::{aux_build, bare_rustc, rustc, Rustc}; @@ -86,7 +88,7 @@ pub use path_helpers::{ pub use scoped_run::{run_in_tmpdir, test_while_readonly}; pub use assertion_helpers::{ - assert_contains, assert_contains_regex, assert_dirs_are_equal, assert_equals, + assert_contains, assert_contains_regex, assert_count_is, assert_dirs_are_equal, assert_equals, assert_not_contains, assert_not_contains_regex, }; diff --git a/src/tools/run-make-support/src/path_helpers.rs b/src/tools/run-make-support/src/path_helpers.rs index b788bc6ef302c..1e6e44c458423 100644 --- a/src/tools/run-make-support/src/path_helpers.rs +++ b/src/tools/run-make-support/src/path_helpers.rs @@ -84,3 +84,18 @@ pub fn has_suffix>(path: P, suffix: &str) -> bool { pub fn filename_contains>(path: P, needle: &str) -> bool { path.as_ref().file_name().is_some_and(|name| name.to_str().unwrap().contains(needle)) } + +/// Helper for reading entries in a given directory and its children. +pub fn read_dir_entries_recursive, F: FnMut(&Path)>(dir: P, mut callback: F) { + fn read_dir_entries_recursive_inner, F: FnMut(&Path)>(dir: P, callback: &mut F) { + for entry in rfs::read_dir(dir) { + let path = entry.unwrap().path(); + callback(&path); + if path.is_dir() { + read_dir_entries_recursive_inner(path, callback); + } + } + } + + read_dir_entries_recursive_inner(dir, &mut callback); +} diff --git a/src/tools/rust-analyzer/.github/workflows/publish-libs.yaml b/src/tools/rust-analyzer/.github/workflows/publish-libs.yaml index 34ca53e2e53ad..f1533bf26e52b 100644 --- a/src/tools/rust-analyzer/.github/workflows/publish-libs.yaml +++ b/src/tools/rust-analyzer/.github/workflows/publish-libs.yaml @@ -30,7 +30,6 @@ jobs: run: | git config --global user.email "runner@gha.local" git config --global user.name "GitHub Action" - # Remove r-a crates from the workspaces so we don't auto-publish them as well - sed -i 's/ "crates\/\*"//' ./Cargo.toml - sed -i 's/ "xtask\/"//' ./Cargo.toml + # Only publish the crates under lib/ + sed -i 's|^members = .*$|members = ["lib/*"]|' Cargo.toml cargo workspaces publish --yes --exact --from-git --no-git-commit --allow-dirty diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index b98a1195d8bb7..41dc440509934 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -52,16 +52,16 @@ checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "backtrace" -version = "0.3.72" +version = "0.3.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17c6a35df3749d2e8bb1b7b21a976d82b15548788d2735b9d82f329268f71a11" +checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" dependencies = [ "addr2line", "cc", "cfg-if", "libc", "miniz_oxide", - "object 0.35.0", + "object 0.36.3", "rustc-demangle", ] @@ -92,9 +92,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" [[package]] name = "byteorder" @@ -136,9 +136,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.98" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41c270e7540d725e65ac7f1b212ac8ce349719624d7bcff99f8e2e488e8cf03f" +checksum = "e9e8aabfac534be767c909e0690571677d49f41bd8465ae876fe043d52ba5292" [[package]] name = "cfg" @@ -148,10 +148,10 @@ dependencies = [ "derive_arbitrary", "expect-test", "intern", - "mbe", "oorandom", "rustc-hash", "syntax", + "syntax-bridge", "tt", ] @@ -185,7 +185,7 @@ version = "0.98.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d5f2eb1cd6054da221bd1ac0197fb2fe5e2caf3dcb93619398fc1433f8f09093" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "chalk-derive", ] @@ -226,9 +226,9 @@ checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" [[package]] name = "cov-mark" -version = "2.0.0-pre.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d48d8f76bd9331f19fe2aaf3821a9f9fb32c3963e1e3d6ce82a8c09cef7444a" +checksum = "0570650661aa447e7335f1d5e4f499d8e58796e617bedc9267d971e51c8b49d4" [[package]] name = "crc32fast" @@ -366,9 +366,9 @@ checksum = "9bda8e21c04aca2ae33ffc2fd8c23134f3cac46db123ba97bd9d3f3b8a4a85e1" [[package]] name = "either" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "ena" @@ -397,14 +397,14 @@ dependencies = [ [[package]] name = "filetime" -version = "0.2.23" +version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" +checksum = "bf401df4a4e3872c4fe8151134cf483738e74b67fc934d6532c882b3d24a4550" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.4.1", - "windows-sys 0.52.0", + "libredox", + "windows-sys 0.59.0", ] [[package]] @@ -415,31 +415,14 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.30" +version = "1.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" +checksum = "7f211bbe8e69bbd0cfdea405084f128ae8b4aaa6b0b522fc8f2b009084797920" dependencies = [ "crc32fast", "miniz_oxide", ] -[[package]] -name = "flycheck" -version = "0.0.0" -dependencies = [ - "cargo_metadata", - "crossbeam-channel", - "paths", - "process-wrap", - "project-model", - "rustc-hash", - "serde", - "serde_json", - "stdx", - "toolchain", - "tracing", -] - [[package]] name = "form_urlencoded" version = "1.2.1" @@ -529,7 +512,7 @@ version = "0.0.0" dependencies = [ "arrayvec", "base-db", - "bitflags 2.5.0", + "bitflags 2.6.0", "cfg", "cov-mark", "dashmap", @@ -554,6 +537,7 @@ dependencies = [ "span", "stdx", "syntax", + "syntax-bridge", "test-fixture", "test-utils", "tracing", @@ -582,6 +566,7 @@ dependencies = [ "span", "stdx", "syntax", + "syntax-bridge", "tracing", "triomphe", "tt", @@ -593,7 +578,7 @@ version = "0.0.0" dependencies = [ "arrayvec", "base-db", - "bitflags 2.5.0", + "bitflags 2.6.0", "chalk-derive", "chalk-ir", "chalk-recursive", @@ -722,7 +707,7 @@ version = "0.0.0" dependencies = [ "arrayvec", "base-db", - "bitflags 2.5.0", + "bitflags 2.6.0", "cov-mark", "crossbeam-channel", "either", @@ -803,9 +788,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.2.6" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0" dependencies = [ "equivalent", "hashbrown", @@ -895,9 +880,9 @@ checksum = "3752f229dcc5a481d60f385fa479ff46818033d881d2d801aa27dffcfb5e8306" [[package]] name = "lazy_static" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" @@ -907,19 +892,19 @@ checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" [[package]] name = "libloading" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19" +checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] name = "libmimalloc-sys" -version = "0.1.38" +version = "0.1.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e7bb23d733dfcc8af652a78b7bf232f0e967710d044732185e561e47c0336b6" +checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44" dependencies = [ "cc", "libc", @@ -931,8 +916,9 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "libc", + "redox_syscall", ] [[package]] @@ -996,9 +982,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.21" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" [[package]] name = "lsp-server" @@ -1056,6 +1042,7 @@ dependencies = [ "span", "stdx", "syntax", + "syntax-bridge", "test-utils", "tracing", "tt", @@ -1063,9 +1050,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.2" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "memmap2" @@ -1087,18 +1074,18 @@ dependencies = [ [[package]] name = "mimalloc" -version = "0.1.42" +version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9186d86b79b52f4a77af65604b51225e8db1d6ee7e3f41aec1e40829c71a176" +checksum = "68914350ae34959d83f732418d51e2427a794055d0b9529f48259ac07af65633" dependencies = [ "libmimalloc-sys", ] [[package]] name = "miniz_oxide" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87dfd01fe195c66b572b37921ad8803d010623c0aca821bea2302239d155cdae" +checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" dependencies = [ "adler", ] @@ -1130,7 +1117,7 @@ version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "cfg-if", "cfg_aliases", "libc", @@ -1148,7 +1135,7 @@ version = "6.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "crossbeam-channel", "filetime", "fsevent-sys", @@ -1163,11 +1150,11 @@ dependencies = [ [[package]] name = "nu-ansi-term" -version = "0.50.0" +version = "0.50.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd2800e1520bdc966782168a627aa5d1ad92e33b984bf7c7615d31280c83ff14" +checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -1197,9 +1184,9 @@ dependencies = [ [[package]] name = "object" -version = "0.35.0" +version = "0.36.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8ec7ab813848ba4522158d5517a6093db1ded27575b070f4177b8d12b41db5e" +checksum = "27b64972346851a39438c60b341ebc01bba47464ae329e55cf343eb93964efd9" dependencies = [ "memchr", ] @@ -1212,9 +1199,9 @@ checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "oorandom" -version = "11.1.3" +version = "11.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" +checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9" [[package]] name = "option-ext" @@ -1240,9 +1227,9 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.1", + "redox_syscall", "smallvec", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -1268,6 +1255,7 @@ name = "paths" version = "0.0.0" dependencies = [ "camino", + "serde", ] [[package]] @@ -1319,9 +1307,12 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" -version = "0.2.17" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] [[package]] name = "proc-macro-api" @@ -1330,14 +1321,12 @@ dependencies = [ "base-db", "indexmap", "intern", - "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "paths", "rustc-hash", "serde", "serde_json", "span", "stdx", - "text-size", "tracing", "tt", ] @@ -1350,7 +1339,6 @@ dependencies = [ "expect-test", "intern", "libloading", - "mbe", "memmap2", "object 0.33.0", "paths", @@ -1360,6 +1348,7 @@ dependencies = [ "snap", "span", "stdx", + "syntax-bridge", "tt", ] @@ -1380,9 +1369,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.85" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" dependencies = [ "unicode-ident", ] @@ -1460,7 +1449,7 @@ version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "memchr", "unicase", ] @@ -1485,20 +1474,20 @@ dependencies = [ [[package]] name = "ra-ap-rustc_abi" -version = "0.53.0" +version = "0.63.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80b1d613eee933486c0613a7bc26e515e46f43adf479d1edd5e537f983e9ce46" +checksum = "b011c39d409940a890414e3a7b239762ac16d88029ad71b050a8374831b93790" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "ra-ap-rustc_index", "tracing", ] [[package]] name = "ra-ap-rustc_index" -version = "0.53.0" +version = "0.63.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f072060ac77e9e1a02cc20028095993af7e72cc0804779c68bcbf47b16de49c9" +checksum = "9027acdee649b0b27eb10b7db5be833efee3362d394935c5eed8f0745a9d43ce" dependencies = [ "arrayvec", "ra-ap-rustc_index_macros", @@ -1507,21 +1496,20 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index_macros" -version = "0.53.0" +version = "0.63.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82f3d6dcb30a66905388e14756b8f2216131d9f8004922c07f13335840e058d1" +checksum = "540b86dc0384141ac8e825fc2874cd44bffd4277d99d8ec63ee416f1a98d5997" dependencies = [ "proc-macro2", "quote", "syn", - "synstructure", ] [[package]] name = "ra-ap-rustc_lexer" -version = "0.53.0" +version = "0.63.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbd8a2b0bdcba9892cbce0b25f6c953d31b0febc1f3420fc692884fce5a23ad8" +checksum = "3bdf98bb457b47b9ae4aeebf867d0ca440c86925e0b6381658c4a02589748c9d" dependencies = [ "unicode-properties", "unicode-xid", @@ -1529,9 +1517,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_parse_format" -version = "0.53.0" +version = "0.63.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70dad7a491c2554590222e0c9212dcb7c2e7aceb668875075012a35ea780d135" +checksum = "e8fe3556ab6311bb775220563a300e2bf62ec56404521fe0c511a583937683d5" dependencies = [ "ra-ap-rustc_index", "ra-ap-rustc_lexer", @@ -1539,9 +1527,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_pattern_analysis" -version = "0.53.0" +version = "0.63.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34768e1faf88c31f2e9ad57b48318a52b507dafac0cddbf01b5d63bfc0b0a365" +checksum = "1709080fdeb5db630e1c2644026c2962aaa32416cd92f0190c04b0c21e114b91" dependencies = [ "ra-ap-rustc_index", "rustc-hash", @@ -1602,20 +1590,11 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" -dependencies = [ - "bitflags 1.3.2", -] - -[[package]] -name = "redox_syscall" -version = "0.5.1" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e" +checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", ] [[package]] @@ -1648,12 +1627,12 @@ version = "0.0.0" dependencies = [ "always-assert", "anyhow", + "cargo_metadata", "cfg", "crossbeam-channel", "dirs", "dissimilar", "expect-test", - "flycheck", "hir", "hir-def", "hir-ty", @@ -1665,7 +1644,6 @@ dependencies = [ "load-cargo", "lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", "lsp-types", - "mbe", "memchr", "mimalloc", "nohash-hasher", @@ -1675,6 +1653,7 @@ dependencies = [ "parser", "paths", "proc-macro-api", + "process-wrap", "profile", "project-model", "rayon", @@ -1685,6 +1664,7 @@ dependencies = [ "serde_json", "stdx", "syntax", + "syntax-bridge", "test-fixture", "test-utils", "tikv-jemallocator", @@ -1716,9 +1696,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc_apfloat" -version = "0.2.0+llvm-462a31f5a5ab" +version = "0.2.1+llvm-462a31f5a5ab" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "465187772033a5ee566f69fe008df03628fce549a0899aae76f0a0c2e34696be" +checksum = "886d94c63c812a8037c4faca2607453a0fa4cf82f734665266876b022244543f" dependencies = [ "bitflags 1.3.2", "smallvec", @@ -1801,18 +1781,18 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.203" +version = "1.0.206" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094" +checksum = "5b3e4cd94123dd520a128bcd11e34d9e9e423e7e3e50425cb1b4b1e3549d0284" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.203" +version = "1.0.206" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" +checksum = "fabfb6138d2383ea8208cf98ccf69cdfb1aff4088460681d84189aa259762f97" dependencies = [ "proc-macro2", "quote", @@ -1821,12 +1801,13 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.117" +version = "1.0.124" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" +checksum = "66ad62847a56b3dba58cc891acd13884b9c61138d330c0d7b6181713d4fce38d" dependencies = [ "indexmap", "itoa", + "memchr", "ryu", "serde", ] @@ -1844,9 +1825,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "0.6.6" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0" +checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d" dependencies = [ "serde", ] @@ -1923,9 +1904,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.66" +version = "2.0.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5" +checksum = "1fceb41e3d546d0bd83421d3409b1460cc7444cd389341a4c880fe7a042cb3d7" dependencies = [ "proc-macro2", "quote", @@ -1967,6 +1948,21 @@ dependencies = [ "triomphe", ] +[[package]] +name = "syntax-bridge" +version = "0.0.0" +dependencies = [ + "intern", + "parser", + "rustc-hash", + "span", + "stdx", + "syntax", + "test-utils", + "tracing", + "tt", +] + [[package]] name = "test-fixture" version = "0.0.0" @@ -1987,6 +1983,7 @@ name = "test-utils" version = "0.0.0" dependencies = [ "dissimilar", + "paths", "profile", "rustc-hash", "stdx", @@ -2010,18 +2007,18 @@ checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233" [[package]] name = "thiserror" -version = "1.0.61" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" +checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.61" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" +checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", @@ -2090,9 +2087,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "tinyvec" -version = "1.6.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" dependencies = [ "tinyvec_macros", ] @@ -2105,9 +2102,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "toml" -version = "0.8.14" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f49eb2ab21d2f26bd6db7bf383edc527a7ebaee412d17af4d40fdccd442f335" +checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" dependencies = [ "serde", "serde_spanned", @@ -2117,18 +2114,18 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.6" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.22.14" +version = "0.22.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f21c7aaf97f1bd9ca9d4f9e73b0a6c74bd5afef56f2bc931943a6e1c37e04e38" +checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" dependencies = [ "indexmap", "serde", @@ -2214,9 +2211,9 @@ dependencies = [ [[package]] name = "triomphe" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b2cb4fbb9995eeb36ac86fadf24031ccd58f99d6b4b2d7b911db70bddb80d90" +checksum = "e6631e42e10b40c0690bf92f404ebcfe6e1fdb480391d15f17cc8e96eeed5369" dependencies = [ "serde", "stable_deref_trait", @@ -2289,9 +2286,9 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" [[package]] name = "url" -version = "2.5.0" +version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" dependencies = [ "form_urlencoded", "idna", @@ -2307,14 +2304,15 @@ checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" [[package]] name = "version_check" -version = "0.9.4" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "vfs" version = "0.0.0" dependencies = [ + "crossbeam-channel", "fst", "indexmap", "nohash-hasher", @@ -2331,6 +2329,8 @@ dependencies = [ "crossbeam-channel", "notify", "paths", + "rayon", + "rustc-hash", "stdx", "tracing", "vfs", @@ -2355,11 +2355,11 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "winapi-util" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -2369,7 +2369,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1de69df01bdf1ead2f4ac895dc77c9351aefff65b2f3db429a343f9cbf05e132" dependencies = [ "windows-core", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -2381,7 +2381,7 @@ dependencies = [ "windows-implement", "windows-interface", "windows-result", - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -2408,11 +2408,11 @@ dependencies = [ [[package]] name = "windows-result" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "749f0da9cc72d82e600d8d2e44cadd0b9eedb9038f71a1c58556ac1c5791813b" +checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8" dependencies = [ - "windows-targets 0.52.5", + "windows-targets 0.52.6", ] [[package]] @@ -2430,7 +2430,16 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.5", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", ] [[package]] @@ -2450,18 +2459,18 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.5", - "windows_aarch64_msvc 0.52.5", - "windows_i686_gnu 0.52.5", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", "windows_i686_gnullvm", - "windows_i686_msvc 0.52.5", - "windows_x86_64_gnu 0.52.5", - "windows_x86_64_gnullvm 0.52.5", - "windows_x86_64_msvc 0.52.5", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", ] [[package]] @@ -2472,9 +2481,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" @@ -2484,9 +2493,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" @@ -2496,15 +2505,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" @@ -2514,9 +2523,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" @@ -2526,9 +2535,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" @@ -2538,9 +2547,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" @@ -2550,15 +2559,15 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.6.11" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56c52728401e1dc672a56e81e593e912aa54c78f40246869f78359a2bf24d29d" +checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" dependencies = [ "memchr", ] @@ -2618,6 +2627,27 @@ dependencies = [ "zip", ] +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "byteorder", + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "zip" version = "0.6.6" diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index c2f601a91bc7a..56e80e11e774f 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -4,10 +4,11 @@ exclude = ["crates/proc-macro-srv/proc-macro-test/imp"] resolver = "2" [workspace.package] -rust-version = "1.78" +rust-version = "1.80" edition = "2021" license = "MIT OR Apache-2.0" authors = ["rust-analyzer team"] +repository = "https://github.com/rust-lang/rust-analyzer" [profile.dev] debug = 1 @@ -51,7 +52,6 @@ debug = 2 # local crates base-db = { path = "./crates/base-db", version = "0.0.0" } cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt"] } -flycheck = { path = "./crates/flycheck", version = "0.0.0" } hir = { path = "./crates/hir", version = "0.0.0" } hir-def = { path = "./crates/hir-def", version = "0.0.0" } hir-expand = { path = "./crates/hir-expand", version = "0.0.0" } @@ -77,17 +77,18 @@ salsa = { path = "./crates/salsa", version = "0.0.0" } span = { path = "./crates/span", version = "0.0.0" } stdx = { path = "./crates/stdx", version = "0.0.0" } syntax = { path = "./crates/syntax", version = "0.0.0" } +syntax-bridge = { path = "./crates/syntax-bridge", version = "0.0.0" } text-edit = { path = "./crates/text-edit", version = "0.0.0" } toolchain = { path = "./crates/toolchain", version = "0.0.0" } tt = { path = "./crates/tt", version = "0.0.0" } vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" } vfs = { path = "./crates/vfs", version = "0.0.0" } -ra-ap-rustc_lexer = { version = "0.53.0", default-features = false } -ra-ap-rustc_parse_format = { version = "0.53.0", default-features = false } -ra-ap-rustc_index = { version = "0.53.0", default-features = false } -ra-ap-rustc_abi = { version = "0.53.0", default-features = false } -ra-ap-rustc_pattern_analysis = { version = "0.53.0", default-features = false } +ra-ap-rustc_lexer = { version = "0.63.0", default-features = false } +ra-ap-rustc_parse_format = { version = "0.63.0", default-features = false } +ra-ap-rustc_index = { version = "0.63.0", default-features = false } +ra-ap-rustc_abi = { version = "0.63.0", default-features = false } +ra-ap-rustc_pattern_analysis = { version = "0.63.0", default-features = false } # local crates that aren't published to crates.io. These should not have versions. test-fixture = { path = "./crates/test-fixture" } @@ -124,11 +125,11 @@ memmap2 = "0.5.4" nohash-hasher = "0.2.0" oorandom = "11.1.3" object = { version = "0.33.0", default-features = false, features = [ - "std", - "read_core", - "elf", - "macho", - "pe", + "std", + "read_core", + "elf", + "macho", + "pe", ] } process-wrap = { version = "8.0.2", features = ["std"] } pulldown-cmark-to-cmark = "10.0.4" @@ -158,7 +159,6 @@ url = "2.3.1" xshell = "0.2.5" - # We need to freeze the version of the crate, as the raw-api feature is considered unstable dashmap = { version = "=5.5.3", features = ["raw-api"] } diff --git a/src/tools/rust-analyzer/crates/base-db/Cargo.toml b/src/tools/rust-analyzer/crates/base-db/Cargo.toml index 1b1ee034cac2f..b17b08a720c99 100644 --- a/src/tools/rust-analyzer/crates/base-db/Cargo.toml +++ b/src/tools/rust-analyzer/crates/base-db/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "base-db" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Basic database traits for rust-analyzer. The concrete DB is defined by `ide` (aka `ra_ap_ide`)." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/base-db/src/change.rs b/src/tools/rust-analyzer/crates/base-db/src/change.rs index 0fd54e1211cd9..a9d91d64ceb6d 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/change.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/change.rs @@ -7,7 +7,7 @@ use salsa::Durability; use triomphe::Arc; use vfs::FileId; -use crate::{CrateGraph, SourceDatabaseExt, SourceDatabaseExt2, SourceRoot, SourceRootId}; +use crate::{CrateGraph, SourceDatabaseFileInputExt, SourceRoot, SourceRootDatabase, SourceRootId}; /// Encapsulate a bunch of raw `.set` calls on the database. #[derive(Default)] @@ -50,7 +50,7 @@ impl FileChange { self.crate_graph = Some(graph); } - pub fn apply(self, db: &mut dyn SourceDatabaseExt) { + pub fn apply(self, db: &mut dyn SourceRootDatabase) { let _p = tracing::info_span!("FileChange::apply").entered(); if let Some(roots) = self.roots { for (idx, root) in roots.into_iter().enumerate() { diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs index 460581f4a6c06..3616fa9fd8684 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/input.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs @@ -690,6 +690,14 @@ impl Env { pub fn extend_from_other(&mut self, other: &Env) { self.entries.extend(other.entries.iter().map(|(x, y)| (x.to_owned(), y.to_owned()))); } + + pub fn is_empty(&self) -> bool { + self.entries.is_empty() + } + + pub fn insert(&mut self, k: impl Into, v: impl Into) -> Option { + self.entries.insert(k.into(), v.into()) + } } impl From for Vec<(String, String)> { @@ -700,6 +708,15 @@ impl From for Vec<(String, String)> { } } +impl<'a> IntoIterator for &'a Env { + type Item = (&'a String, &'a String); + type IntoIter = std::collections::hash_map::Iter<'a, String, String>; + + fn into_iter(self) -> Self::IntoIter { + self.entries.iter() + } +} + #[derive(Debug)] pub struct CyclicDependenciesError { path: Vec<(CrateId, Option)>, diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs index f319f98537b60..20ef45d0b3729 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs @@ -1,5 +1,5 @@ //! base_db defines basic database traits. The concrete DB is defined by ide. - +// FIXME: Rename this crate, base db is non descriptive mod change; mod input; @@ -47,8 +47,6 @@ pub const DEFAULT_PARSE_LRU_CAP: u16 = 128; pub const DEFAULT_BORROWCK_LRU_CAP: u16 = 2024; pub trait FileLoader { - /// Text of the file. - fn file_text(&self, file_id: FileId) -> Arc; fn resolve_path(&self, path: AnchoredPath<'_>) -> Option; /// Crates whose root's source root is the same as the source root of `file_id` fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]>; @@ -58,6 +56,13 @@ pub trait FileLoader { /// model. Everything else in rust-analyzer is derived from these queries. #[salsa::query_group(SourceDatabaseStorage)] pub trait SourceDatabase: FileLoader + std::fmt::Debug { + #[salsa::input] + fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>; + + /// Text of the file. + #[salsa::lru] + fn file_text(&self, file_id: FileId) -> Arc; + /// Parses the file into the syntax tree. #[salsa::lru] fn parse(&self, file_id: EditionedFileId) -> Parse; @@ -99,16 +104,18 @@ fn parse_errors(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Option Arc { + let bytes = db.compressed_file_text(file_id); + let bytes = + lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail"); + let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8"); + Arc::from(text) +} + /// We don't want to give HIR knowledge of source roots, hence we extract these /// methods into a separate DB. -#[salsa::query_group(SourceDatabaseExtStorage)] -pub trait SourceDatabaseExt: SourceDatabase { - #[salsa::input] - fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>; - - #[salsa::lru] - fn file_text(&self, file_id: FileId) -> Arc; - +#[salsa::query_group(SourceRootDatabaseStorage)] +pub trait SourceRootDatabase: SourceDatabase { /// Path to a file, relative to the root of its source root. /// Source root of the file. #[salsa::input] @@ -121,15 +128,7 @@ pub trait SourceDatabaseExt: SourceDatabase { fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>; } -fn file_text(db: &dyn SourceDatabaseExt, file_id: FileId) -> Arc { - let bytes = db.compressed_file_text(file_id); - let bytes = - lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail"); - let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8"); - Arc::from(text) -} - -pub trait SourceDatabaseExt2 { +pub trait SourceDatabaseFileInputExt { fn set_file_text(&mut self, file_id: FileId, text: &str) { self.set_file_text_with_durability(file_id, text, Durability::LOW); } @@ -142,7 +141,7 @@ pub trait SourceDatabaseExt2 { ); } -impl SourceDatabaseExt2 for Db { +impl SourceDatabaseFileInputExt for Db { fn set_file_text_with_durability( &mut self, file_id: FileId, @@ -159,7 +158,7 @@ impl SourceDatabaseExt2 for Db { } } -fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<[CrateId]> { +fn source_root_crates(db: &dyn SourceRootDatabase, id: SourceRootId) -> Arc<[CrateId]> { let graph = db.crate_graph(); let mut crates = graph .iter() @@ -173,13 +172,12 @@ fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<[Crat crates.into_iter().collect() } -/// Silly workaround for cyclic deps between the traits +// FIXME: Would be nice to get rid of this somehow +/// Silly workaround for cyclic deps due to the SourceRootDatabase and SourceDatabase split +/// regarding FileLoader pub struct FileLoaderDelegate(pub T); -impl FileLoader for FileLoaderDelegate<&'_ T> { - fn file_text(&self, file_id: FileId) -> Arc { - SourceDatabaseExt::file_text(self.0, file_id) - } +impl FileLoader for FileLoaderDelegate<&'_ T> { fn resolve_path(&self, path: AnchoredPath<'_>) -> Option { // FIXME: this *somehow* should be platform agnostic... let source_root = self.0.file_source_root(path.anchor); diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml index faf93f62c6afb..29b7ad6f8fe2a 100644 --- a/src/tools/rust-analyzer/crates/cfg/Cargo.toml +++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "cfg" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Conditional compiling options, `cfg` attribute parser and evaluator for rust-analyzer." authors.workspace = true edition.workspace = true @@ -28,7 +29,7 @@ arbitrary = "1.3.2" derive_arbitrary = "1.3.2" # local deps -mbe.workspace = true +syntax-bridge.workspace = true syntax.workspace = true [lints] diff --git a/src/tools/rust-analyzer/crates/cfg/src/lib.rs b/src/tools/rust-analyzer/crates/cfg/src/lib.rs index 6d46dfb99949b..e9daaf7de3c3b 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/lib.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/lib.rs @@ -108,6 +108,14 @@ impl<'a> IntoIterator for &'a CfgOptions { } } +impl FromIterator for CfgOptions { + fn from_iter>(iter: T) -> Self { + let mut options = CfgOptions::default(); + options.extend(iter); + options + } +} + #[derive(Default, Clone, Debug, PartialEq, Eq)] pub struct CfgDiff { // Invariants: No duplicates, no atom that's both in `enable` and `disable`. diff --git a/src/tools/rust-analyzer/crates/cfg/src/tests.rs b/src/tools/rust-analyzer/crates/cfg/src/tests.rs index 597023a792bb7..6d87d83ad9300 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/tests.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/tests.rs @@ -1,8 +1,11 @@ use arbitrary::{Arbitrary, Unstructured}; use expect_test::{expect, Expect}; use intern::Symbol; -use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, DummyTestSpanMap, DUMMY}; use syntax::{ast, AstNode, Edition}; +use syntax_bridge::{ + dummy_test_span_utils::{DummyTestSpanMap, DUMMY}, + syntax_node_to_token_tree, DocCommentDesugarMode, +}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; diff --git a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml deleted file mode 100644 index bb3a94c8da678..0000000000000 --- a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml +++ /dev/null @@ -1,30 +0,0 @@ -[package] -name = "flycheck" -version = "0.0.0" -description = "TBD" - -authors.workspace = true -edition.workspace = true -license.workspace = true -rust-version.workspace = true - -[lib] -doctest = false - -[dependencies] -cargo_metadata.workspace = true -crossbeam-channel.workspace = true -tracing.workspace = true -rustc-hash.workspace = true -serde_json.workspace = true -serde.workspace = true -process-wrap.workspace = true - -# local deps -paths.workspace = true -stdx.workspace = true -toolchain.workspace = true -project-model.workspace = true - -[lints] -workspace = true \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml index 8ac2d00313778..5b9d227e3152c 100644 --- a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "hir-def" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "RPC Api for the `proc-macro-srv` crate of rust-analyzer." authors.workspace = true edition.workspace = true @@ -52,7 +53,7 @@ expect-test.workspace = true # local deps test-utils.workspace = true test-fixture.workspace = true - +syntax-bridge.workspace = true [features] in-rust-tree = ["hir-expand/in-rust-tree"] diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs index ba88495e14d30..198dc93f6b141 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs @@ -657,9 +657,9 @@ mod tests { use triomphe::Arc; use hir_expand::span_map::{RealSpanMap, SpanMap}; - use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode}; use span::FileId; use syntax::{ast, AstNode, TextRange}; + use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode}; use crate::attr::{DocAtom, DocExpr}; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/body.rs index d3c134f326604..a988317e046ef 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body.rs @@ -118,6 +118,7 @@ pub enum BodyDiagnostic { MacroError { node: InFile>, err: ExpandError }, UnresolvedMacroCall { node: InFile>, path: ModPath }, UnreachableLabel { node: InFile>, name: Name }, + AwaitOutsideOfAsync { node: InFile>, location: String }, UndeclaredLabel { node: InFile>, name: Name }, } @@ -157,7 +158,7 @@ impl Body { }), ) }); - is_async_fn = data.has_async_kw(); + is_async_fn = data.is_async(); src.map(|it| it.body().map(ast::Expr::from)) } DefWithBodyId::ConstId(c) => { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs index 9e30aff8fe9b5..abf7895829206 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs @@ -72,6 +72,7 @@ pub(super) fn lower( is_lowering_coroutine: false, label_ribs: Vec::new(), current_binding_owner: None, + awaitable_context: None, } .collect(params, body, is_async_fn) } @@ -100,6 +101,8 @@ struct ExprCollector<'a> { // resolution label_ribs: Vec, current_binding_owner: Option, + + awaitable_context: Option, } #[derive(Clone, Debug)] @@ -135,6 +138,11 @@ impl RibKind { } } +enum Awaitable { + Yes, + No(&'static str), +} + #[derive(Debug, Default)] struct BindingList { map: FxHashMap, @@ -180,6 +188,18 @@ impl ExprCollector<'_> { body: Option, is_async_fn: bool, ) -> (Body, BodySourceMap) { + self.awaitable_context.replace(if is_async_fn { + Awaitable::Yes + } else { + match self.owner { + DefWithBodyId::FunctionId(..) => Awaitable::No("non-async function"), + DefWithBodyId::StaticId(..) => Awaitable::No("static"), + DefWithBodyId::ConstId(..) | DefWithBodyId::InTypeConstId(..) => { + Awaitable::No("constant") + } + DefWithBodyId::VariantId(..) => Awaitable::No("enum variant"), + } + }); if let Some((param_list, mut attr_enabled)) = param_list { let mut params = vec![]; if let Some(self_param) = @@ -280,31 +300,40 @@ impl ExprCollector<'_> { } Some(ast::BlockModifier::Async(_)) => { self.with_label_rib(RibKind::Closure, |this| { - this.collect_block_(e, |id, statements, tail| Expr::Async { - id, - statements, - tail, + this.with_awaitable_block(Awaitable::Yes, |this| { + this.collect_block_(e, |id, statements, tail| Expr::Async { + id, + statements, + tail, + }) }) }) } Some(ast::BlockModifier::Const(_)) => { self.with_label_rib(RibKind::Constant, |this| { - let (result_expr_id, prev_binding_owner) = - this.initialize_binding_owner(syntax_ptr); - let inner_expr = this.collect_block(e); - let it = this.db.intern_anonymous_const(ConstBlockLoc { - parent: this.owner, - root: inner_expr, - }); - this.body.exprs[result_expr_id] = Expr::Const(it); - this.current_binding_owner = prev_binding_owner; - result_expr_id + this.with_awaitable_block(Awaitable::No("constant block"), |this| { + let (result_expr_id, prev_binding_owner) = + this.initialize_binding_owner(syntax_ptr); + let inner_expr = this.collect_block(e); + let it = this.db.intern_anonymous_const(ConstBlockLoc { + parent: this.owner, + root: inner_expr, + }); + this.body.exprs[result_expr_id] = Expr::Const(it); + this.current_binding_owner = prev_binding_owner; + result_expr_id + }) }) } // FIXME - Some(ast::BlockModifier::AsyncGen(_)) | Some(ast::BlockModifier::Gen(_)) | None => { - self.collect_block(e) + Some(ast::BlockModifier::AsyncGen(_)) => { + self.with_awaitable_block(Awaitable::Yes, |this| this.collect_block(e)) } + Some(ast::BlockModifier::Gen(_)) => self + .with_awaitable_block(Awaitable::No("non-async gen block"), |this| { + this.collect_block(e) + }), + None => self.collect_block(e), }, ast::Expr::LoopExpr(e) => { let label = e.label().map(|label| self.collect_label(label)); @@ -469,6 +498,12 @@ impl ExprCollector<'_> { } ast::Expr::AwaitExpr(e) => { let expr = self.collect_expr_opt(e.expr()); + if let Awaitable::No(location) = self.is_lowering_awaitable_block() { + self.source_map.diagnostics.push(BodyDiagnostic::AwaitOutsideOfAsync { + node: InFile::new(self.expander.current_file_id(), AstPtr::new(&e)), + location: location.to_string(), + }); + } self.alloc_expr(Expr::Await { expr }, syntax_ptr) } ast::Expr::TryExpr(e) => self.collect_try_operator(syntax_ptr, e), @@ -527,7 +562,13 @@ impl ExprCollector<'_> { let prev_is_lowering_coroutine = mem::take(&mut this.is_lowering_coroutine); let prev_try_block_label = this.current_try_block_label.take(); - let body = this.collect_expr_opt(e.body()); + let awaitable = if e.async_token().is_some() { + Awaitable::Yes + } else { + Awaitable::No("non-async closure") + }; + let body = + this.with_awaitable_block(awaitable, |this| this.collect_expr_opt(e.body())); let closure_kind = if this.is_lowering_coroutine { let movability = if e.static_token().is_some() { @@ -2082,6 +2123,21 @@ impl ExprCollector<'_> { fn alloc_label_desugared(&mut self, label: Label) -> LabelId { self.body.labels.alloc(label) } + + fn is_lowering_awaitable_block(&self) -> &Awaitable { + self.awaitable_context.as_ref().unwrap_or(&Awaitable::No("unknown")) + } + + fn with_awaitable_block( + &mut self, + awaitable: Awaitable, + f: impl FnOnce(&mut Self) -> T, + ) -> T { + let orig = self.awaitable_context.replace(awaitable); + let res = f(self); + self.awaitable_context = orig; + res + } } fn comma_follows_token(t: Option) -> bool { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs index c3c2e51fd0385..d17ebd7ff920b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs @@ -94,6 +94,12 @@ impl FunctionData { .filter(|it| !it.is_empty()) .map(Box::new); let rustc_allow_incoherent_impl = attrs.by_key(&sym::rustc_allow_incoherent_impl).exists(); + if flags.contains(FnFlags::HAS_UNSAFE_KW) + && !crate_graph[krate].edition.at_least_2024() + && attrs.by_key(&sym::rustc_deprecated_safe_2024).exists() + { + flags.remove(FnFlags::HAS_UNSAFE_KW); + } Arc::new(FunctionData { name: func.name.clone(), @@ -126,19 +132,19 @@ impl FunctionData { self.flags.contains(FnFlags::HAS_SELF_PARAM) } - pub fn has_default_kw(&self) -> bool { + pub fn is_default(&self) -> bool { self.flags.contains(FnFlags::HAS_DEFAULT_KW) } - pub fn has_const_kw(&self) -> bool { + pub fn is_const(&self) -> bool { self.flags.contains(FnFlags::HAS_CONST_KW) } - pub fn has_async_kw(&self) -> bool { + pub fn is_async(&self) -> bool { self.flags.contains(FnFlags::HAS_ASYNC_KW) } - pub fn has_unsafe_kw(&self) -> bool { + pub fn is_unsafe(&self) -> bool { self.flags.contains(FnFlags::HAS_UNSAFE_KW) } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs index 56feb0163e13a..b1103d35cab16 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs @@ -160,7 +160,7 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast Arc; #[salsa::invoke(StaticData::static_data_query)] - fn static_data(&self, konst: StaticId) -> Arc; + fn static_data(&self, statik: StaticId) -> Arc; #[salsa::invoke(Macro2Data::macro2_data_query)] fn macro2_data(&self, makro: Macro2Id) -> Arc; @@ -240,14 +240,14 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast bool; - fn include_macro_invoc(&self, crate_id: CrateId) -> Vec<(MacroCallId, EditionedFileId)>; + fn include_macro_invoc(&self, crate_id: CrateId) -> Arc<[(MacroCallId, EditionedFileId)]>; } // return: macro call id and include file id fn include_macro_invoc( db: &dyn DefDatabase, krate: CrateId, -) -> Vec<(MacroCallId, EditionedFileId)> { +) -> Arc<[(MacroCallId, EditionedFileId)]> { db.crate_def_map(krate) .modules .values() diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs index 91594aecd0441..5a3a3e9189790 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs @@ -50,13 +50,13 @@ pub fn find_path( prefix: prefix_kind, cfg, ignore_local_imports, + is_std_item: db.crate_graph()[item_module.krate()].origin.is_lang(), from, from_def_map: &from.def_map(db), fuel: Cell::new(FIND_PATH_FUEL), }, item, MAX_PATH_LEN, - db.crate_graph()[item_module.krate()].origin.is_lang(), ) } @@ -98,20 +98,16 @@ struct FindPathCtx<'db> { prefix: PrefixKind, cfg: ImportPathConfig, ignore_local_imports: bool, + is_std_item: bool, from: ModuleId, from_def_map: &'db DefMap, fuel: Cell, } /// Attempts to find a path to refer to the given `item` visible from the `from` ModuleId -fn find_path_inner( - ctx: &FindPathCtx<'_>, - item: ItemInNs, - max_len: usize, - is_std_item: bool, -) -> Option { +fn find_path_inner(ctx: &FindPathCtx<'_>, item: ItemInNs, max_len: usize) -> Option { // - if the item is a module, jump straight to module search - if !is_std_item { + if !ctx.is_std_item { if let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item { return find_path_for_module(ctx, &mut FxHashSet::default(), module_id, true, max_len) .map(|choice| choice.path); @@ -138,12 +134,9 @@ fn find_path_inner( if let Some(ModuleDefId::EnumVariantId(variant)) = item.as_module_def_id() { // - if the item is an enum variant, refer to it via the enum - if let Some(mut path) = find_path_inner( - ctx, - ItemInNs::Types(variant.lookup(ctx.db).parent.into()), - max_len, - is_std_item, - ) { + if let Some(mut path) = + find_path_inner(ctx, ItemInNs::Types(variant.lookup(ctx.db).parent.into()), max_len) + { path.push_segment(ctx.db.enum_variant_data(variant).name.clone()); return Some(path); } @@ -152,16 +145,6 @@ fn find_path_inner( // variant somewhere } - if is_std_item { - // The item we are searching for comes from the sysroot libraries, so skip prefer looking in - // the sysroot libraries directly. - // We do need to fallback as the item in question could be re-exported by another crate - // while not being a transitive dependency of the current crate. - if let Some(choice) = find_in_sysroot(ctx, &mut FxHashSet::default(), item, max_len) { - return Some(choice.path); - } - } - let mut best_choice = None; calculate_best_path(ctx, &mut FxHashSet::default(), item, max_len, &mut best_choice); best_choice.map(|choice| choice.path) @@ -366,6 +349,12 @@ fn calculate_best_path( // Item was defined in the same crate that wants to import it. It cannot be found in any // dependency in this case. calculate_best_path_local(ctx, visited_modules, item, max_len, best_choice) + } else if ctx.is_std_item { + // The item we are searching for comes from the sysroot libraries, so skip prefer looking in + // the sysroot libraries directly. + // We do need to fallback as the item in question could be re-exported by another crate + // while not being a transitive dependency of the current crate. + find_in_sysroot(ctx, visited_modules, item, max_len, best_choice) } else { // Item was defined in some upstream crate. This means that it must be exported from one, // too (unless we can't name it at all). It could *also* be (re)exported by the same crate @@ -382,10 +371,10 @@ fn find_in_sysroot( visited_modules: &mut FxHashSet<(ItemInNs, ModuleId)>, item: ItemInNs, max_len: usize, -) -> Option { + best_choice: &mut Option, +) { let crate_graph = ctx.db.crate_graph(); let dependencies = &crate_graph[ctx.from.krate].dependencies; - let mut best_choice = None; let mut search = |lang, best_choice: &mut _| { if let Some(dep) = dependencies.iter().filter(|it| it.is_sysroot()).find(|dep| { match crate_graph[dep.crate_id].origin { @@ -397,29 +386,31 @@ fn find_in_sysroot( } }; if ctx.cfg.prefer_no_std { - search(LangCrateOrigin::Core, &mut best_choice); + search(LangCrateOrigin::Core, best_choice); if matches!(best_choice, Some(Choice { stability: Stable, .. })) { - return best_choice; + return; } - search(LangCrateOrigin::Std, &mut best_choice); + search(LangCrateOrigin::Std, best_choice); if matches!(best_choice, Some(Choice { stability: Stable, .. })) { - return best_choice; + return; } } else { - search(LangCrateOrigin::Std, &mut best_choice); + search(LangCrateOrigin::Std, best_choice); if matches!(best_choice, Some(Choice { stability: Stable, .. })) { - return best_choice; + return; } - search(LangCrateOrigin::Core, &mut best_choice); + search(LangCrateOrigin::Core, best_choice); if matches!(best_choice, Some(Choice { stability: Stable, .. })) { - return best_choice; + return; } } - let mut best_choice = None; - dependencies.iter().filter(|it| it.is_sysroot()).for_each(|dep| { - find_in_dep(ctx, visited_modules, item, max_len, &mut best_choice, dep.crate_id); - }); - best_choice + dependencies + .iter() + .filter(|it| it.is_sysroot()) + .chain(dependencies.iter().filter(|it| !it.is_sysroot())) + .for_each(|dep| { + find_in_dep(ctx, visited_modules, item, max_len, best_choice, dep.crate_id); + }); } fn find_in_dep( @@ -491,6 +482,7 @@ fn calculate_best_path_local( ); } +#[derive(Debug)] struct Choice { path: ModPath, /// The length in characters of the path @@ -676,6 +668,7 @@ mod tests { path: &str, prefer_prelude: bool, prefer_absolute: bool, + prefer_no_std: bool, expect: Expect, ) { let (db, pos) = TestDB::with_position(ra_fixture); @@ -717,7 +710,7 @@ mod tests { module, prefix, ignore_local_imports, - ImportPathConfig { prefer_no_std: false, prefer_prelude, prefer_absolute }, + ImportPathConfig { prefer_no_std, prefer_prelude, prefer_absolute }, ); format_to!( res, @@ -732,15 +725,19 @@ mod tests { } fn check_found_path(ra_fixture: &str, path: &str, expect: Expect) { - check_found_path_(ra_fixture, path, false, false, expect); + check_found_path_(ra_fixture, path, false, false, false, expect); } fn check_found_path_prelude(ra_fixture: &str, path: &str, expect: Expect) { - check_found_path_(ra_fixture, path, true, false, expect); + check_found_path_(ra_fixture, path, true, false, false, expect); } fn check_found_path_absolute(ra_fixture: &str, path: &str, expect: Expect) { - check_found_path_(ra_fixture, path, false, true, expect); + check_found_path_(ra_fixture, path, false, true, false, expect); + } + + fn check_found_path_prefer_no_std(ra_fixture: &str, path: &str, expect: Expect) { + check_found_path_(ra_fixture, path, false, false, true, expect); } #[test] @@ -1361,9 +1358,66 @@ pub mod sync { "#]], ); } + #[test] + fn prefer_core_paths_over_std_for_mod_reexport() { + check_found_path_prefer_no_std( + r#" +//- /main.rs crate:main deps:core,std + +$0 + +//- /stdlib.rs crate:std deps:core + +pub use core::pin; + +//- /corelib.rs crate:core + +pub mod pin { + pub struct Pin; +} + "#, + "std::pin::Pin", + expect![[r#" + Plain (imports ✔): core::pin::Pin + Plain (imports ✖): core::pin::Pin + ByCrate(imports ✔): core::pin::Pin + ByCrate(imports ✖): core::pin::Pin + BySelf (imports ✔): core::pin::Pin + BySelf (imports ✖): core::pin::Pin + "#]], + ); + } #[test] fn prefer_core_paths_over_std() { + check_found_path_prefer_no_std( + r#" +//- /main.rs crate:main deps:core,std + +$0 + +//- /std.rs crate:std deps:core + +pub mod fmt { + pub use core::fmt::Error; +} + +//- /zzz.rs crate:core + +pub mod fmt { + pub struct Error; +} + "#, + "core::fmt::Error", + expect![[r#" + Plain (imports ✔): core::fmt::Error + Plain (imports ✖): core::fmt::Error + ByCrate(imports ✔): core::fmt::Error + ByCrate(imports ✖): core::fmt::Error + BySelf (imports ✔): core::fmt::Error + BySelf (imports ✖): core::fmt::Error + "#]], + ); check_found_path( r#" //- /main.rs crate:main deps:core,std @@ -1878,10 +1932,9 @@ pub mod ops { #[test] fn respect_unstable_modules() { - check_found_path( + check_found_path_prefer_no_std( r#" //- /main.rs crate:main deps:std,core -#![no_std] extern crate std; $0 //- /longer.rs crate:std deps:core diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs index 66412b26a0085..4ced30c81dced 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs @@ -105,7 +105,7 @@ use crate::{ type FxIndexMap = indexmap::IndexMap>; -/// A wrapper around two booleans, [`ImportPathConfig::prefer_no_std`] and [`ImportPathConfig::prefer_prelude`]. +/// A wrapper around three booleans #[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)] pub struct ImportPathConfig { /// If true, prefer to unconditionally use imports of the `core` and `alloc` crate diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs index 64b37d2d06508..fc1460870ceaf 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -1201,7 +1201,6 @@ macro_rules! m { #[test] fn test_meta_doc_comments() { - cov_mark::check!(test_meta_doc_comments); check( r#" macro_rules! m { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs index d34f0afc3ef8f..b430e2cefb35d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -317,9 +317,9 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander { _: Span, _: Span, ) -> Result { - let (parse, _) = ::mbe::token_tree_to_syntax_node( + let (parse, _) = syntax_bridge::token_tree_to_syntax_node( subtree, - ::mbe::TopEntryPoint::MacroItems, + syntax_bridge::TopEntryPoint::MacroItems, span::Edition::CURRENT, ); if parse.errors().is_empty() { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index 483bffc4b290b..debc5a44326ef 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -56,7 +56,6 @@ use crate::{ }; static GLOB_RECURSION_LIMIT: Limit = Limit::new(100); -static EXPANSION_DEPTH_LIMIT: Limit = Limit::new(128); static FIXED_POINT_LIMIT: Limit = Limit::new(8192); pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeId) -> DefMap { @@ -1440,7 +1439,14 @@ impl DefCollector<'_> { depth: usize, container: ItemContainerId, ) { - if EXPANSION_DEPTH_LIMIT.check(depth).is_err() { + let recursion_limit = self.def_map.recursion_limit() as usize; + let recursion_limit = Limit::new(if cfg!(test) { + // Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug + std::cmp::min(32, recursion_limit) + } else { + recursion_limit + }); + if recursion_limit.check(depth).is_err() { cov_mark::hit!(macro_expansion_overflow); tracing::warn!("macro expansion is too deep"); return; @@ -2003,7 +2009,7 @@ impl ModCollector<'_, '_> { Err(cfg) => { self.emit_unconfigured_diagnostic( self.tree_id, - AttrOwner::TopLevel, + AttrOwner::ModItem(module_id.into()), &cfg, ); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs index e82af318501ac..d319831867c0e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs @@ -1,4 +1,4 @@ -use base_db::{SourceDatabase, SourceDatabaseExt2 as _}; +use base_db::{SourceDatabase, SourceDatabaseFileInputExt as _}; use test_fixture::WithFixture; use crate::{db::DefDatabase, nameres::tests::TestDB, AdtId, ModuleDefId}; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs index 7c39773aa6874..70918a9358e8e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs @@ -194,6 +194,11 @@ pub(super) fn lower_generic_args( match generic_arg { ast::GenericArg::TypeArg(type_arg) => { let type_ref = TypeRef::from_ast_opt(lower_ctx, type_arg.ty()); + type_ref.walk(&mut |tr| { + if let TypeRef::ImplTrait(bounds) = tr { + lower_ctx.update_impl_traits_bounds(bounds.clone()); + } + }); args.push(GenericArg::Type(type_ref)); } ast::GenericArg::AssocTypeArg(assoc_type_arg) => { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs index f44472eae5b0f..df9dec69d464e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs @@ -19,7 +19,7 @@ use crate::{ }; #[salsa::database( - base_db::SourceDatabaseExtStorage, + base_db::SourceRootDatabaseStorage, base_db::SourceDatabaseStorage, hir_expand::db::ExpandDatabaseStorage, crate::db::InternDatabaseStorage, @@ -69,9 +69,6 @@ impl fmt::Debug for TestDB { impl panic::RefUnwindSafe for TestDB {} impl FileLoader for TestDB { - fn file_text(&self, file_id: FileId) -> Arc { - FileLoaderDelegate(self).file_text(file_id) - } fn resolve_path(&self, path: AnchoredPath<'_>) -> Option { FileLoaderDelegate(self).resolve_path(path) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml index ca05618aecd90..03a9d54d2e652 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "hir-expand" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Macro expansion for rust-analyzer." authors.workspace = true edition.workspace = true @@ -33,6 +34,7 @@ mbe.workspace = true limit.workspace = true span.workspace = true parser.workspace = true +syntax-bridge.workspace = true [dev-dependencies] expect-test = "1.4.0" diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs index 777e415418662..79cfeb4cf1844 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs @@ -6,14 +6,12 @@ use cfg::CfgExpr; use either::Either; use intern::{sym, Interned, Symbol}; -use mbe::{ - desugar_doc_comment_text, syntax_node_to_token_tree, DelimiterKind, DocCommentDesugarMode, - Punct, -}; +use mbe::{DelimiterKind, Punct}; use smallvec::{smallvec, SmallVec}; use span::{Span, SyntaxContextId}; use syntax::unescape; use syntax::{ast, match_ast, AstNode, AstToken, SyntaxNode}; +use syntax_bridge::{desugar_doc_comment_text, syntax_node_to_token_tree, DocCommentDesugarMode}; use triomphe::ThinArc; use crate::name::Name; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs index f560d3bfd1db3..7d3e8deaf08ee 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/derive_macro.rs @@ -2,10 +2,10 @@ use intern::sym; use itertools::izip; -use mbe::DocCommentDesugarMode; use rustc_hash::FxHashSet; use span::{MacroCallId, Span}; use stdx::never; +use syntax_bridge::DocCommentDesugarMode; use tracing::debug; use crate::{ @@ -209,9 +209,9 @@ struct BasicAdtInfo { } fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result { - let (parsed, tm) = &mbe::token_tree_to_syntax_node( + let (parsed, tm) = &syntax_bridge::token_tree_to_syntax_node( tt, - mbe::TopEntryPoint::MacroItems, + syntax_bridge::TopEntryPoint::MacroItems, parser::Edition::CURRENT_FIXME, ); let macro_items = ast::MacroItems::cast(parsed.syntax_node()) @@ -268,7 +268,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result { param_type_set.insert(it.as_name()); - mbe::syntax_node_to_token_tree( + syntax_bridge::syntax_node_to_token_tree( it.syntax(), tm, call_site, @@ -282,7 +282,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result it.type_bound_list().map(|it| { - mbe::syntax_node_to_token_tree( + syntax_bridge::syntax_node_to_token_tree( it.syntax(), tm, call_site, @@ -295,7 +295,7 @@ fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result Result Result ExpandResult { let call_site_span = span_with_call_site_ctxt(db, span, id); - let args = parse_exprs_with_sep(tt, ',', call_site_span, Edition::CURRENT_FIXME); + + let mut iter = ::tt::iter::TtIter::new(tt); + + let cond = expect_fragment( + &mut iter, + parser::PrefixEntryPoint::Expr, + db.crate_graph()[id.lookup(db).krate].edition, + tt::DelimSpan { open: tt.delimiter.open, close: tt.delimiter.close }, + ); + _ = iter.expect_char(','); + let rest = iter.as_slice(); + let dollar_crate = dollar_crate(span); - let expanded = match &*args { - [cond, panic_args @ ..] => { - let comma = tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(call_site_span), - token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { - char: ',', - spacing: tt::Spacing::Alone, - span: call_site_span, - }))]), - }; - let cond = cond.clone(); - let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma); + let expanded = match cond.value { + Some(cond) => { + let panic_args = rest.iter().cloned(); let mac = if use_panic_2021(db, span) { quote! {call_site_span => #dollar_crate::panic::panic_2021!(##panic_args) } } else { @@ -253,10 +256,13 @@ fn assert_expand( } }} } - [] => quote! {call_site_span =>{}}, + None => quote! {call_site_span =>{}}, }; - ExpandResult::ok(expanded) + match cond.err { + Some(err) => ExpandResult::new(expanded, err.into()), + None => ExpandResult::ok(expanded), + } } fn file_expand( diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/change.rs b/src/tools/rust-analyzer/crates/hir-expand/src/change.rs index 1a3dd0e7ddbd6..8b3f69db0277c 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/change.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/change.rs @@ -1,7 +1,7 @@ //! Defines a unit of change that can applied to the database to get the next //! state. Changes are transactional. use base_db::{ - salsa::Durability, CrateGraph, CrateId, FileChange, SourceDatabaseExt, SourceRoot, + salsa::Durability, CrateGraph, CrateId, FileChange, SourceRoot, SourceRootDatabase, TargetLayoutLoadResult, Version, }; use la_arena::RawIdx; @@ -23,7 +23,7 @@ impl ChangeWithProcMacros { Self::default() } - pub fn apply(self, db: &mut (impl ExpandDatabase + SourceDatabaseExt)) { + pub fn apply(self, db: &mut (impl ExpandDatabase + SourceRootDatabase)) { self.source_change.apply(db); if let Some(proc_macros) = self.proc_macros { db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH); diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs index 01a35660a904b..584f9631e3445 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs @@ -3,10 +3,11 @@ use base_db::{salsa, CrateId, SourceDatabase}; use either::Either; use limit::Limit; -use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, MatchedArmIndex}; +use mbe::MatchedArmIndex; use rustc_hash::FxHashSet; use span::{AstIdMap, EditionedFileId, Span, SyntaxContextData, SyntaxContextId}; use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T}; +use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode}; use triomphe::Arc; use crate::{ @@ -165,7 +166,7 @@ pub fn expand_speculative( // Build the subtree and token mapping for the speculative args let (mut tt, undo_info) = match loc.kind { MacroCallKind::FnLike { .. } => ( - mbe::syntax_node_to_token_tree( + syntax_bridge::syntax_node_to_token_tree( speculative_args, span_map, span, @@ -178,7 +179,7 @@ pub fn expand_speculative( SyntaxFixupUndoInfo::NONE, ), MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => ( - mbe::syntax_node_to_token_tree( + syntax_bridge::syntax_node_to_token_tree( speculative_args, span_map, span, @@ -213,7 +214,7 @@ pub fn expand_speculative( fixups.remove.extend(censor_cfg); ( - mbe::syntax_node_to_token_tree_modified( + syntax_bridge::syntax_node_to_token_tree_modified( speculative_args, span_map, fixups.append, @@ -459,7 +460,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult { return dummy_tt(kind); } - let mut tt = mbe::syntax_node_to_token_tree( + let mut tt = syntax_bridge::syntax_node_to_token_tree( tt.syntax(), map.as_ref(), span, @@ -515,7 +516,7 @@ fn macro_arg(db: &dyn ExpandDatabase, id: MacroCallId) -> MacroArgResult { fixups.remove.extend(censor_cfg); ( - mbe::syntax_node_to_token_tree_modified( + syntax_bridge::syntax_node_to_token_tree_modified( syntax, map, fixups.append, @@ -720,13 +721,13 @@ fn token_tree_to_syntax_node( edition: parser::Edition, ) -> (Parse, ExpansionSpanMap) { let entry_point = match expand_to { - ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts, - ExpandTo::Items => mbe::TopEntryPoint::MacroItems, - ExpandTo::Pattern => mbe::TopEntryPoint::Pattern, - ExpandTo::Type => mbe::TopEntryPoint::Type, - ExpandTo::Expr => mbe::TopEntryPoint::Expr, + ExpandTo::Statements => syntax_bridge::TopEntryPoint::MacroStmts, + ExpandTo::Items => syntax_bridge::TopEntryPoint::MacroItems, + ExpandTo::Pattern => syntax_bridge::TopEntryPoint::Pattern, + ExpandTo::Type => syntax_bridge::TopEntryPoint::Type, + ExpandTo::Expr => syntax_bridge::TopEntryPoint::Expr, }; - mbe::token_tree_to_syntax_node(tt, entry_point, edition) + syntax_bridge::token_tree_to_syntax_node(tt, entry_point, edition) } fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> { diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs index 48851af3fd160..b1a6eed2fbc71 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs @@ -2,10 +2,10 @@ use base_db::CrateId; use intern::sym; -use mbe::DocCommentDesugarMode; use span::{Edition, MacroCallId, Span, SyntaxContextId}; use stdx::TupleExt; use syntax::{ast, AstNode}; +use syntax_bridge::DocCommentDesugarMode; use triomphe::Arc; use crate::{ @@ -112,7 +112,7 @@ impl DeclarativeMacroExpander { ast::Macro::MacroRules(macro_rules) => ( match macro_rules.token_tree() { Some(arg) => { - let tt = mbe::syntax_node_to_token_tree( + let tt = syntax_bridge::syntax_node_to_token_tree( arg.syntax(), map.as_ref(), map.span_for_range( @@ -135,14 +135,14 @@ impl DeclarativeMacroExpander { let span = map.span_for_range(macro_def.macro_token().unwrap().text_range()); let args = macro_def.args().map(|args| { - mbe::syntax_node_to_token_tree( + syntax_bridge::syntax_node_to_token_tree( args.syntax(), map.as_ref(), span, DocCommentDesugarMode::Mbe, ) }); - let body = mbe::syntax_node_to_token_tree( + let body = syntax_bridge::syntax_node_to_token_tree( body.syntax(), map.as_ref(), span, diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs index 5385b44532b05..3528b2dde73e7 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs @@ -19,9 +19,9 @@ //! //! See the full discussion : use base_db::CrateId; -use mbe::DocCommentDesugarMode; use span::SyntaxContextId; use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent}; +use syntax_bridge::DocCommentDesugarMode; use triomphe::Arc; use crate::{ @@ -82,7 +82,7 @@ pub fn expand_eager_macro_input( return ExpandResult { value: None, err }; }; - let mut subtree = mbe::syntax_node_to_token_tree( + let mut subtree = syntax_bridge::syntax_node_to_token_tree( &expanded_eager_input, arg_map, span, diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs index 71579d2f87f4b..b6d5828da9641 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs @@ -2,7 +2,6 @@ //! fix up syntax errors in the code we're passing to them. use intern::sym; -use mbe::DocCommentDesugarMode; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::SmallVec; use span::{ @@ -14,6 +13,7 @@ use syntax::{ ast::{self, AstNode, HasLoopBody}, match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize, }; +use syntax_bridge::DocCommentDesugarMode; use triomphe::Arc; use tt::Spacing; @@ -76,7 +76,8 @@ pub(crate) fn fixup_syntax( if can_handle_error(&node) && has_error_to_handle(&node) { remove.insert(node.clone().into()); // the node contains an error node, we have to completely replace it by something valid - let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site, mode); + let original_tree = + syntax_bridge::syntax_node_to_token_tree(&node, span_map, call_site, mode); let idx = original.len() as u32; original.push(original_tree); let span = span_map.span_for_range(node_range); @@ -434,9 +435,9 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) { #[cfg(test)] mod tests { use expect_test::{expect, Expect}; - use mbe::DocCommentDesugarMode; use span::{Edition, EditionedFileId, FileId}; use syntax::TextRange; + use syntax_bridge::DocCommentDesugarMode; use triomphe::Arc; use crate::{ @@ -483,7 +484,7 @@ mod tests { span_map.span_for_range(TextRange::empty(0.into())), DocCommentDesugarMode::Mbe, ); - let mut tt = mbe::syntax_node_to_token_tree_modified( + let mut tt = syntax_bridge::syntax_node_to_token_tree_modified( &parsed.syntax_node(), span_map.as_ref(), fixups.append, @@ -498,9 +499,9 @@ mod tests { expect.assert_eq(&actual); // the fixed-up tree should be syntactically valid - let (parse, _) = mbe::token_tree_to_syntax_node( + let (parse, _) = syntax_bridge::token_tree_to_syntax_node( &tt, - ::mbe::TopEntryPoint::MacroItems, + syntax_bridge::TopEntryPoint::MacroItems, parser::Edition::CURRENT, ); assert!( @@ -513,7 +514,7 @@ mod tests { // the fixed-up + reversed version should be equivalent to the original input // modulo token IDs and `Punct`s' spacing. - let original_as_tt = mbe::syntax_node_to_token_tree( + let original_as_tt = syntax_bridge::syntax_node_to_token_tree( &parsed.syntax_node(), span_map.as_ref(), span_map.span_for_range(TextRange::empty(0.into())), diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs index 18da77d6caa9b..2bea9026265a6 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs @@ -176,7 +176,12 @@ impl ExpandErrorKind { &ExpandErrorKind::MissingProcMacroExpander(def_crate) => { match db.proc_macros().get_error_for_crate(def_crate) { Some((e, hard_err)) => (e.to_owned(), hard_err), - None => ("missing expander".to_owned(), true), + None => ( + format!( + "internal error: proc-macro map is missing error entry for crate {def_crate:?}" + ), + true, + ), } } ExpandErrorKind::MacroDefinition => { diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs index 3be88ee9daef5..740c27b89cea1 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs @@ -28,13 +28,13 @@ pub enum SpanMapRef<'a> { RealSpanMap(&'a RealSpanMap), } -impl mbe::SpanMapper for SpanMap { +impl syntax_bridge::SpanMapper for SpanMap { fn span_for(&self, range: TextRange) -> Span { self.span_for_range(range) } } -impl mbe::SpanMapper for SpanMapRef<'_> { +impl syntax_bridge::SpanMapper for SpanMapRef<'_> { fn span_for(&self, range: TextRange) -> Span { self.span_for_range(range) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml index b6c33683ff6d9..b079b5675bd9e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "hir-ty" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "The type system for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs index d506e00ca1239..a151ee01e6455 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs @@ -275,7 +275,7 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { }; chalk_ir::Binders::new(binders, bound) } - crate::ImplTraitId::AssociatedTypeImplTrait(alias, idx) => { + crate::ImplTraitId::TypeAliasImplTrait(alias, idx) => { let datas = self .db .type_alias_impl_traits(alias) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs index 5765262b08bea..302558162ac96 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs @@ -276,7 +276,7 @@ impl TyExt for Ty { data.substitute(Interner, &subst).into_value_and_skipped_binders().0 }) } - ImplTraitId::AssociatedTypeImplTrait(alias, idx) => { + ImplTraitId::TypeAliasImplTrait(alias, idx) => { db.type_alias_impl_traits(alias).map(|it| { let data = (*it).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); @@ -295,7 +295,7 @@ impl TyExt for Ty { data.substitute(Interner, &opaque_ty.substitution) }) } - ImplTraitId::AssociatedTypeImplTrait(alias, idx) => { + ImplTraitId::TypeAliasImplTrait(alias, idx) => { db.type_alias_impl_traits(alias).map(|it| { let data = (*it).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs index e52fae06d7f73..6e5a7cce9c976 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs @@ -4,20 +4,25 @@ use std::fmt; +use chalk_solve::rust_ir::AdtKind; use either::Either; -use hir_def::lang_item::LangItem; -use hir_def::{resolver::HasResolver, AdtId, AssocItemId, DefWithBodyId, HasModule}; -use hir_def::{ItemContainerId, Lookup}; +use hir_def::{ + lang_item::LangItem, + resolver::{HasResolver, ValueNs}, + AdtId, AssocItemId, DefWithBodyId, HasModule, ItemContainerId, Lookup, +}; use intern::sym; use itertools::Itertools; use rustc_hash::FxHashSet; use rustc_pattern_analysis::constructor::Constructor; -use syntax::{ast, AstNode}; +use syntax::{ + ast::{self, UnaryOp}, + AstNode, +}; use tracing::debug; use triomphe::Arc; use typed_arena::Arena; -use crate::Interner; use crate::{ db::HirDatabase, diagnostics::match_check::{ @@ -25,7 +30,7 @@ use crate::{ pat_analysis::{self, DeconstructedPat, MatchCheckCtx, WitnessPat}, }, display::HirDisplay, - InferenceResult, Ty, TyExt, + Adjust, InferenceResult, Interner, Ty, TyExt, TyKind, }; pub(crate) use hir_def::{ @@ -117,7 +122,7 @@ impl ExprValidator { Expr::If { .. } => { self.check_for_unnecessary_else(id, expr, db); } - Expr::Block { .. } => { + Expr::Block { .. } | Expr::Async { .. } | Expr::Unsafe { .. } => { self.validate_block(db, expr); } _ => {} @@ -236,7 +241,12 @@ impl ExprValidator { return; } - let report = match cx.compute_match_usefulness(m_arms.as_slice(), scrut_ty.clone()) { + let known_valid_scrutinee = Some(self.is_known_valid_scrutinee(scrutinee_expr, db)); + let report = match cx.compute_match_usefulness( + m_arms.as_slice(), + scrut_ty.clone(), + known_valid_scrutinee, + ) { Ok(report) => report, Err(()) => return, }; @@ -253,8 +263,52 @@ impl ExprValidator { } } + // [rustc's `is_known_valid_scrutinee`](https://github.com/rust-lang/rust/blob/c9bd03cb724e13cca96ad320733046cbdb16fbbe/compiler/rustc_mir_build/src/thir/pattern/check_match.rs#L288) + // + // While the above function in rustc uses thir exprs, r-a doesn't have them. + // So, the logic here is getting same result as "hir lowering + match with lowered thir" + // with "hir only" + fn is_known_valid_scrutinee(&self, scrutinee_expr: ExprId, db: &dyn HirDatabase) -> bool { + if self + .infer + .expr_adjustments + .get(&scrutinee_expr) + .is_some_and(|adjusts| adjusts.iter().any(|a| matches!(a.kind, Adjust::Deref(..)))) + { + return false; + } + + match &self.body[scrutinee_expr] { + Expr::UnaryOp { op: UnaryOp::Deref, .. } => false, + Expr::Path(path) => { + let value_or_partial = self + .owner + .resolver(db.upcast()) + .resolve_path_in_value_ns_fully(db.upcast(), path); + value_or_partial.map_or(true, |v| !matches!(v, ValueNs::StaticId(_))) + } + Expr::Field { expr, .. } => match self.infer.type_of_expr[*expr].kind(Interner) { + TyKind::Adt(adt, ..) + if db.adt_datum(self.owner.krate(db.upcast()), *adt).kind == AdtKind::Union => + { + false + } + _ => self.is_known_valid_scrutinee(*expr, db), + }, + Expr::Index { base, .. } => self.is_known_valid_scrutinee(*base, db), + Expr::Cast { expr, .. } => self.is_known_valid_scrutinee(*expr, db), + Expr::Missing => false, + _ => true, + } + } + fn validate_block(&mut self, db: &dyn HirDatabase, expr: &Expr) { - let Expr::Block { statements, .. } = expr else { return }; + let (Expr::Block { statements, .. } + | Expr::Async { statements, .. } + | Expr::Unsafe { statements, .. }) = expr + else { + return; + }; let pattern_arena = Arena::new(); let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db); for stmt in &**statements { @@ -280,7 +334,7 @@ impl ExprValidator { has_guard: false, arm_data: (), }; - let report = match cx.compute_match_usefulness(&[match_arm], ty.clone()) { + let report = match cx.compute_match_usefulness(&[match_arm], ty.clone(), None) { Ok(v) => v, Err(e) => { debug!(?e, "match usefulness error"); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index a12e201cf3d97..7b3abf501d293 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -69,22 +69,20 @@ pub(crate) struct MatchCheckCtx<'db> { body: DefWithBodyId, pub(crate) db: &'db dyn HirDatabase, exhaustive_patterns: bool, - min_exhaustive_patterns: bool, } impl<'db> MatchCheckCtx<'db> { pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'db dyn HirDatabase) -> Self { let def_map = db.crate_def_map(module.krate()); let exhaustive_patterns = def_map.is_unstable_feature_enabled(&sym::exhaustive_patterns); - let min_exhaustive_patterns = - def_map.is_unstable_feature_enabled(&sym::min_exhaustive_patterns); - Self { module, body, db, exhaustive_patterns, min_exhaustive_patterns } + Self { module, body, db, exhaustive_patterns } } pub(crate) fn compute_match_usefulness( &self, arms: &[MatchArm<'db>], scrut_ty: Ty, + known_valid_scrutinee: Option, ) -> Result, ()> { if scrut_ty.contains_unknown() { return Err(()); @@ -95,8 +93,7 @@ impl<'db> MatchCheckCtx<'db> { } } - // FIXME: Determine place validity correctly. For now, err on the safe side. - let place_validity = PlaceValidity::MaybeInvalid; + let place_validity = PlaceValidity::from_bool(known_valid_scrutinee.unwrap_or(true)); // Measured to take ~100ms on modern hardware. let complexity_limit = Some(500000); compute_match_usefulness(self, arms, scrut_ty, place_validity, complexity_limit) @@ -307,7 +304,8 @@ impl<'db> MatchCheckCtx<'db> { &Str(void) => match void {}, Wildcard | NonExhaustive | Hidden | PrivateUninhabited => PatKind::Wild, Never => PatKind::Never, - Missing | F32Range(..) | F64Range(..) | Opaque(..) | Or => { + Missing | F16Range(..) | F32Range(..) | F64Range(..) | F128Range(..) | Opaque(..) + | Or => { never!("can't convert to pattern: {:?}", pat.ctor()); PatKind::Wild } @@ -327,9 +325,6 @@ impl<'db> PatCx for MatchCheckCtx<'db> { fn is_exhaustive_patterns_feature_on(&self) -> bool { self.exhaustive_patterns } - fn is_min_exhaustive_patterns_feature_on(&self) -> bool { - self.min_exhaustive_patterns - } fn ctor_arity( &self, @@ -356,8 +351,9 @@ impl<'db> PatCx for MatchCheckCtx<'db> { }, Ref => 1, Slice(..) => unimplemented!(), - Never | Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) - | Opaque(..) | NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => 0, + Never | Bool(..) | IntRange(..) | F16Range(..) | F32Range(..) | F64Range(..) + | F128Range(..) | Str(..) | Opaque(..) | NonExhaustive | PrivateUninhabited + | Hidden | Missing | Wildcard => 0, Or => { never!("The `Or` constructor doesn't have a fixed arity"); 0 @@ -419,8 +415,9 @@ impl<'db> PatCx for MatchCheckCtx<'db> { } }, Slice(_) => unreachable!("Found a `Slice` constructor in match checking"), - Never | Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..) - | Opaque(..) | NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => { + Never | Bool(..) | IntRange(..) | F16Range(..) | F32Range(..) | F64Range(..) + | F128Range(..) | Str(..) | Opaque(..) | NonExhaustive | PrivateUninhabited + | Hidden | Missing | Wildcard => { smallvec![] } Or => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs index 22aa5c69bb03d..3f54cdd20cee3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs @@ -17,7 +17,7 @@ pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> Vec { let mut res = Vec::new(); let is_unsafe = match def { - DefWithBodyId::FunctionId(it) => db.function_data(it).has_unsafe_kw(), + DefWithBodyId::FunctionId(it) => db.function_data(it).is_unsafe(), DefWithBodyId::StaticId(_) | DefWithBodyId::ConstId(_) | DefWithBodyId::VariantId(_) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index a433ecfd778b9..f406666ae5a76 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -1022,16 +1022,16 @@ impl HirDisplay for Ty { // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self? if parameters.len() - impl_ > 0 { // `parameters` are in the order of fn's params (including impl traits), fn's lifetimes + let without_impl = self_param as usize + type_ + const_ + lifetime; // parent's params (those from enclosing impl or trait, if any). - let (fn_params, other) = - parameters.split_at(self_param as usize + type_ + const_ + lifetime); - let (_impl, parent_params) = other.split_at(impl_); + let (fn_params, parent_params) = parameters.split_at(without_impl + impl_); debug_assert_eq!(parent_params.len(), parent_len); let parent_params = generic_args_sans_defaults(f, Some(generic_def_id), parent_params); let fn_params = - generic_args_sans_defaults(f, Some(generic_def_id), fn_params); + &generic_args_sans_defaults(f, Some(generic_def_id), fn_params) + [0..without_impl]; write!(f, "<")?; hir_fmt_generic_arguments(f, parent_params, None)?; @@ -1069,6 +1069,7 @@ impl HirDisplay for Ty { module_id, PrefixKind::Plain, false, + // FIXME: no_std Cfg? ImportPathConfig { prefer_no_std: false, prefer_prelude: true, @@ -1151,11 +1152,10 @@ impl HirDisplay for Ty { )?; // FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution } - ImplTraitId::AssociatedTypeImplTrait(alias, idx) => { + ImplTraitId::TypeAliasImplTrait(alias, idx) => { let datas = db.type_alias_impl_traits(alias).expect("impl trait id without data"); - let data = - (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); + let data = (*datas).as_ref().map(|it| it.impl_traits[idx].bounds.clone()); let bounds = data.substitute(Interner, ¶meters); let krate = alias.krate(db.upcast()); write_bounds_like_dyn_trait_with_prefix( @@ -1338,7 +1338,7 @@ impl HirDisplay for Ty { SizedByDefault::Sized { anchor: krate }, )?; } - ImplTraitId::AssociatedTypeImplTrait(alias, idx) => { + ImplTraitId::TypeAliasImplTrait(alias, idx) => { let datas = db.type_alias_impl_traits(alias).expect("impl trait id without data"); let data = @@ -1462,7 +1462,7 @@ fn generic_args_sans_defaults<'ga>( // otherwise, if the arg is equal to the param default, hide it (unless the // default is an error which can happen for the trait Self type) #[allow(unstable_name_collisions)] - default_parameters.get(i).is_none_or(|default_parameter| { + IsNoneOr::is_none_or(default_parameters.get(i), |default_parameter| { // !is_err(default_parameter.skip_binders()) // && arg != &default_parameter.clone().substitute(Interner, ¶meters) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 804bc53905ad7..45d423d03c021 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -36,15 +36,14 @@ use hir_def::{ body::Body, builtin_type::{BuiltinInt, BuiltinType, BuiltinUint}, data::{ConstData, StaticData}, - hir::LabelId, - hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, PatId}, + hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, LabelId, PatId}, lang_item::{LangItem, LangItemTarget}, layout::Integer, path::{ModPath, Path}, resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs}, type_ref::{LifetimeRef, TypeRef}, - AdtId, AssocItemId, DefWithBodyId, FieldId, FunctionId, ItemContainerId, Lookup, TraitId, - TupleFieldId, TupleId, TypeAliasId, VariantId, + AdtId, AssocItemId, DefWithBodyId, FieldId, FunctionId, ImplId, ItemContainerId, Lookup, + TraitId, TupleFieldId, TupleId, TypeAliasId, VariantId, }; use hir_expand::name::Name; use indexmap::IndexSet; @@ -785,14 +784,19 @@ impl<'a> InferenceContext<'a> { fn collect_const(&mut self, data: &ConstData) { let return_ty = self.make_ty(&data.type_ref); - // Constants might be associated items that define ATPITs. - self.insert_atpit_coercion_table(iter::once(&return_ty)); + // Constants might be defining usage sites of TAITs. + self.make_tait_coercion_table(iter::once(&return_ty)); self.return_ty = return_ty; } fn collect_static(&mut self, data: &StaticData) { - self.return_ty = self.make_ty(&data.type_ref); + let return_ty = self.make_ty(&data.type_ref); + + // Statics might be defining usage sites of TAITs. + self.make_tait_coercion_table(iter::once(&return_ty)); + + self.return_ty = return_ty; } fn collect_fn(&mut self, func: FunctionId) { @@ -857,11 +861,11 @@ impl<'a> InferenceContext<'a> { self.return_ty = self.normalize_associated_types_in(return_ty); self.return_coercion = Some(CoerceMany::new(self.return_ty.clone())); - // Functions might be associated items that define ATPITs. - // To define an ATPITs, that ATPIT must appear in the function's signatures. + // Functions might be defining usage sites of TAITs. + // To define an TAITs, that TAIT must appear in the function's signatures. // So, it suffices to check for params and return types. params_and_ret_tys.push(self.return_ty.clone()); - self.insert_atpit_coercion_table(params_and_ret_tys.iter()); + self.make_tait_coercion_table(params_and_ret_tys.iter()); } fn insert_inference_vars_for_impl_trait(&mut self, t: T, placeholders: Substitution) -> T @@ -880,7 +884,7 @@ impl<'a> InferenceContext<'a> { ImplTraitId::ReturnTypeImplTrait(def, idx) => { (self.db.return_type_impl_traits(def), idx) } - ImplTraitId::AssociatedTypeImplTrait(def, idx) => { + ImplTraitId::TypeAliasImplTrait(def, idx) => { (self.db.type_alias_impl_traits(def), idx) } _ => unreachable!(), @@ -909,23 +913,25 @@ impl<'a> InferenceContext<'a> { } /// The coercion of a non-inference var into an opaque type should fail, - /// but not in the defining sites of the ATPITs. - /// In such cases, we insert an proxy inference var for each ATPIT, - /// and coerce into it instead of ATPIT itself. + /// but not in the defining sites of the TAITs. + /// In such cases, we insert an proxy inference var for each TAIT, + /// and coerce into it instead of TAIT itself. /// /// The inference var stretagy is effective because; /// - /// - It can still unify types that coerced into ATPIT + /// - It can still unify types that coerced into TAITs /// - We are pushing `impl Trait` bounds into it /// /// This function inserts a map that maps the opaque type to that proxy inference var. - fn insert_atpit_coercion_table<'b>(&mut self, tys: impl Iterator) { - struct OpaqueTyCollector<'a, 'b> { + fn make_tait_coercion_table<'b>(&mut self, tait_candidates: impl Iterator) { + struct TypeAliasImplTraitCollector<'a, 'b> { + db: &'b dyn HirDatabase, table: &'b mut InferenceTable<'a>, - opaque_tys: FxHashMap, + assocs: FxHashMap, + non_assocs: FxHashMap, } - impl<'a, 'b> TypeVisitor for OpaqueTyCollector<'a, 'b> { + impl<'a, 'b> TypeVisitor for TypeAliasImplTraitCollector<'a, 'b> { type BreakTy = (); fn as_dyn(&mut self) -> &mut dyn TypeVisitor { @@ -944,59 +950,105 @@ impl<'a> InferenceContext<'a> { let ty = self.table.resolve_ty_shallow(ty); if let TyKind::OpaqueType(id, _) = ty.kind(Interner) { - self.opaque_tys.insert(*id, ty.clone()); + if let ImplTraitId::TypeAliasImplTrait(alias_id, _) = + self.db.lookup_intern_impl_trait_id((*id).into()) + { + let loc = self.db.lookup_intern_type_alias(alias_id); + match loc.container { + ItemContainerId::ImplId(impl_id) => { + self.assocs.insert(*id, (impl_id, ty.clone())); + } + ItemContainerId::ModuleId(..) | ItemContainerId::ExternBlockId(..) => { + self.non_assocs.insert(*id, ty.clone()); + } + _ => {} + } + } } ty.super_visit_with(self, outer_binder) } } - // Early return if this is not happening inside the impl block - let impl_id = if let Some(impl_id) = self.resolver.impl_def() { - impl_id - } else { - return; + let mut collector = TypeAliasImplTraitCollector { + db: self.db, + table: &mut self.table, + assocs: FxHashMap::default(), + non_assocs: FxHashMap::default(), }; - - let assoc_tys: FxHashSet<_> = self - .db - .impl_data(impl_id) - .items - .iter() - .filter_map(|item| match item { - AssocItemId::TypeAliasId(alias) => Some(*alias), - _ => None, - }) - .collect(); - if assoc_tys.is_empty() { - return; + for ty in tait_candidates { + ty.visit_with(collector.as_dyn(), DebruijnIndex::INNERMOST); } - let mut collector = - OpaqueTyCollector { table: &mut self.table, opaque_tys: FxHashMap::default() }; - for ty in tys { - ty.visit_with(collector.as_dyn(), DebruijnIndex::INNERMOST); + // Non-assoc TAITs can be define-used everywhere as long as they are + // in function signatures or const types, etc + let mut taits = collector.non_assocs; + + // assoc TAITs(ATPITs) can be only define-used inside their impl block. + // They cannot be define-used in inner items like in the following; + // + // ``` + // impl Trait for Struct { + // type Assoc = impl Default; + // + // fn assoc_fn() -> Self::Assoc { + // let foo: Self::Assoc = true; // Allowed here + // + // fn inner() -> Self::Assoc { + // false // Not allowed here + // } + // + // foo + // } + // } + // ``` + let impl_id = match self.owner { + DefWithBodyId::FunctionId(it) => { + let loc = self.db.lookup_intern_function(it); + if let ItemContainerId::ImplId(impl_id) = loc.container { + Some(impl_id) + } else { + None + } + } + DefWithBodyId::ConstId(it) => { + let loc = self.db.lookup_intern_const(it); + if let ItemContainerId::ImplId(impl_id) = loc.container { + Some(impl_id) + } else { + None + } + } + _ => None, + }; + + if let Some(impl_id) = impl_id { + taits.extend(collector.assocs.into_iter().filter_map(|(id, (impl_, ty))| { + if impl_ == impl_id { + Some((id, ty)) + } else { + None + } + })); } - let atpit_coercion_table: FxHashMap<_, _> = collector - .opaque_tys + + let tait_coercion_table: FxHashMap<_, _> = taits .into_iter() - .filter_map(|(opaque_ty_id, ty)| { - if let ImplTraitId::AssociatedTypeImplTrait(alias_id, _) = - self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) + .filter_map(|(id, ty)| { + if let ImplTraitId::TypeAliasImplTrait(alias_id, _) = + self.db.lookup_intern_impl_trait_id(id.into()) { - if assoc_tys.contains(&alias_id) { - let alias_placeholders = TyBuilder::placeholder_subst(self.db, alias_id); - let ty = self.insert_inference_vars_for_impl_trait(ty, alias_placeholders); - return Some((opaque_ty_id, ty)); - } + let subst = TyBuilder::placeholder_subst(self.db, alias_id); + let ty = self.insert_inference_vars_for_impl_trait(ty, subst); + Some((id, ty)) + } else { + None } - - None }) .collect(); - if !atpit_coercion_table.is_empty() { - self.table.atpit_coercion_table = Some(atpit_coercion_table); + if !tait_coercion_table.is_empty() { + self.table.tait_coercion_table = Some(tait_coercion_table); } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs index 72928851f123c..6f85a4a4247c4 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs @@ -276,16 +276,16 @@ impl InferenceTable<'_> { return success(simple(Adjust::NeverToAny)(to_ty.clone()), to_ty.clone(), vec![]); } - // If we are coercing into an ATPIT, coerce into its proxy inference var, instead. + // If we are coercing into a TAIT, coerce into its proxy inference var, instead. let mut to_ty = to_ty; let _to; - if let Some(atpit_table) = &self.atpit_coercion_table { + if let Some(tait_table) = &self.tait_coercion_table { if let TyKind::OpaqueType(opaque_ty_id, _) = to_ty.kind(Interner) { if !matches!( from_ty.kind(Interner), TyKind::InferenceVar(..) | TyKind::OpaqueType(..) ) { - if let Some(ty) = atpit_table.get(opaque_ty_id) { + if let Some(ty) = tait_table.get(opaque_ty_id) { _to = ty.clone(); to_ty = &_to; } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index 3d762b174ace0..f5eb37f427876 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -1759,13 +1759,14 @@ impl InferenceContext<'_> { skip_indices: &[u32], is_varargs: bool, ) { - if args.len() != param_tys.len() + skip_indices.len() && !is_varargs { + let arg_count_mismatch = args.len() != param_tys.len() + skip_indices.len() && !is_varargs; + if arg_count_mismatch { self.push_diagnostic(InferenceDiagnostic::MismatchedArgCount { call_expr: expr, expected: param_tys.len() + skip_indices.len(), found: args.len(), }); - } + }; // Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 -- // We do this in a pretty awful way: first we type-check any arguments @@ -1819,7 +1820,7 @@ impl InferenceContext<'_> { // The function signature may contain some unknown types, so we need to insert // type vars here to avoid type mismatch false positive. let coercion_target = self.insert_type_vars(coercion_target); - if self.coerce(Some(arg), &ty, &coercion_target).is_err() { + if self.coerce(Some(arg), &ty, &coercion_target).is_err() && !arg_count_mismatch { self.result.type_mismatches.insert( arg.into(), TypeMismatch { expected: coercion_target, actual: ty.clone() }, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs index 7ee63af1c229e..3e3578b9f9b87 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs @@ -224,7 +224,7 @@ type ChalkInferenceTable = chalk_solve::infer::InferenceTable; pub(crate) struct InferenceTable<'a> { pub(crate) db: &'a dyn HirDatabase, pub(crate) trait_env: Arc, - pub(crate) atpit_coercion_table: Option>, + pub(crate) tait_coercion_table: Option>, var_unification_table: ChalkInferenceTable, type_variable_table: SmallVec<[TypeVariableFlags; 16]>, pending_obligations: Vec>>, @@ -244,7 +244,7 @@ impl<'a> InferenceTable<'a> { InferenceTable { db, trait_env, - atpit_coercion_table: None, + tait_coercion_table: None, var_unification_table: ChalkInferenceTable::new(), type_variable_table: SmallVec::new(), pending_obligations: Vec::new(), diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs index 034b9c773c216..47cc2a2f1e6bd 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs @@ -391,7 +391,7 @@ pub fn layout_of_ty_query( let infer = db.infer(func.into()); return db.layout_of_ty(infer.type_of_rpit[idx].clone(), trait_env); } - crate::ImplTraitId::AssociatedTypeImplTrait(..) => { + crate::ImplTraitId::TypeAliasImplTrait(..) => { return Err(LayoutError::NotImplemented); } crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs index 2f93ce31816c3..21c84511dc35e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs @@ -15,8 +15,10 @@ extern crate rustc_abi; #[cfg(not(feature = "in-rust-tree"))] extern crate ra_ap_rustc_abi as rustc_abi; -// Use the crates.io version unconditionally until the API settles enough that we can switch to -// using the in-tree one. +#[cfg(feature = "in-rust-tree")] +extern crate rustc_pattern_analysis; + +#[cfg(not(feature = "in-rust-tree"))] extern crate ra_ap_rustc_pattern_analysis as rustc_pattern_analysis; mod builder; @@ -595,7 +597,7 @@ impl TypeFoldable for CallableSig { #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] pub enum ImplTraitId { ReturnTypeImplTrait(hir_def::FunctionId, ImplTraitIdx), - AssociatedTypeImplTrait(hir_def::TypeAliasId, ImplTraitIdx), + TypeAliasImplTrait(hir_def::TypeAliasId, ImplTraitIdx), AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId), } impl InternValueTrivial for ImplTraitId {} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index 444628ff521d6..67cdb99744ab7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -341,7 +341,7 @@ impl<'a> TyLoweringContext<'a> { let impl_trait_id = origin.either( |f| ImplTraitId::ReturnTypeImplTrait(f, idx), - |a| ImplTraitId::AssociatedTypeImplTrait(a, idx), + |a| ImplTraitId::TypeAliasImplTrait(a, idx), ); let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into(); let generics = @@ -1857,7 +1857,7 @@ fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig { params, ret, data.is_varargs(), - if data.has_unsafe_kw() { Safety::Unsafe } else { Safety::Safe }, + if data.is_unsafe() { Safety::Unsafe } else { Safety::Safe }, data.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol), ); make_binders(db, &generics, sig) @@ -2131,7 +2131,6 @@ pub(crate) fn type_alias_impl_traits( if let Some(type_ref) = &data.type_ref { let _ty = ctx.lower_ty(type_ref); } - let generics = generics(db.upcast(), def.into()); let type_alias_impl_traits = ImplTraits { impl_traits: match ctx.impl_trait_mode { ImplTraitLoweringState::Opaque(x) => x.into_inner(), @@ -2141,6 +2140,7 @@ pub(crate) fn type_alias_impl_traits( if type_alias_impl_traits.impl_traits.is_empty() { None } else { + let generics = generics(db.upcast(), def.into()); Some(Arc::new(make_binders(db, &generics, type_alias_impl_traits))) } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs index 172dea02e6194..8f6582b7f8022 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs @@ -82,8 +82,8 @@ impl FallibleTypeFolder for Filler<'_> { }; filler.try_fold_ty(infer.type_of_rpit[idx].clone(), outer_binder) } - crate::ImplTraitId::AssociatedTypeImplTrait(..) => { - not_supported!("associated type impl trait"); + crate::ImplTraitId::TypeAliasImplTrait(..) => { + not_supported!("type alias impl trait"); } crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => { not_supported!("async block impl trait"); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs index 108ae198d50ee..0efb9c52fb647 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs @@ -15,7 +15,7 @@ use test_utils::extract_annotations; use triomphe::Arc; #[salsa::database( - base_db::SourceDatabaseExtStorage, + base_db::SourceRootDatabaseStorage, base_db::SourceDatabaseStorage, hir_expand::db::ExpandDatabaseStorage, hir_def::db::InternDatabaseStorage, @@ -75,9 +75,6 @@ impl salsa::ParallelDatabase for TestDB { impl panic::RefUnwindSafe for TestDB {} impl FileLoader for TestDB { - fn file_text(&self, file_id: FileId) -> Arc { - FileLoaderDelegate(self).file_text(file_id) - } fn resolve_path(&self, path: AnchoredPath<'_>) -> Option { FileLoaderDelegate(self).resolve_path(path) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs index e67124d57a2cb..0fcd789f761c3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs @@ -9,10 +9,11 @@ mod patterns; mod regression; mod simple; mod traits; +mod type_alias_impl_traits; use std::env; -use base_db::SourceDatabaseExt2 as _; +use base_db::SourceDatabaseFileInputExt as _; use expect_test::Expect; use hir_def::{ body::{Body, BodySourceMap, SyntheticSyntax}, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs index e9c62d3416954..0a24eeb1fe82d 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs @@ -1,4 +1,4 @@ -use base_db::SourceDatabaseExt2 as _; +use base_db::SourceDatabaseFileInputExt as _; use test_fixture::WithFixture; use crate::{db::HirDatabase, test_db::TestDB}; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs index ac2dfea1010d4..281983861207b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs @@ -2122,3 +2122,22 @@ fn test() { "#, ) } + +#[test] +fn issue_17191() { + check_types( + r#" +trait A { + type Item; +} + +trait B {} + +fn foo>() {} + +fn test() { + let f = foo; + //^ fn foo<{unknown}>() +}"#, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs index fb07e718d102c..a98cff2a08b26 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs @@ -4691,119 +4691,6 @@ fn f() { ); } -#[test] -fn associated_type_impl_trait() { - check_types( - r#" -trait Foo {} -struct S1; -impl Foo for S1 {} - -trait Bar { - type Item; - fn bar(&self) -> Self::Item; -} -struct S2; -impl Bar for S2 { - type Item = impl Foo; - fn bar(&self) -> Self::Item { - S1 - } -} - -fn test() { - let x = S2.bar(); - //^ impl Foo + ?Sized -} - "#, - ); -} - -#[test] -fn associated_type_impl_traits_complex() { - check_types( - r#" -struct Unary(T); -struct Binary(T, U); - -trait Foo {} -struct S1; -impl Foo for S1 {} - -trait Bar { - type Item; - fn bar(&self) -> Unary; -} -struct S2; -impl Bar for S2 { - type Item = Unary; - fn bar(&self) -> Unary<::Item> { - Unary(Unary(S1)) - } -} - -trait Baz { - type Target1; - type Target2; - fn baz(&self) -> Binary; -} -struct S3; -impl Baz for S3 { - type Target1 = impl Foo; - type Target2 = Unary; - fn baz(&self) -> Binary { - Binary(S1, Unary(S2)) - } -} - -fn test() { - let x = S3.baz(); - //^ Binary> - let y = x.1.0.bar(); - //^ Unary> -} - "#, - ); -} - -#[test] -fn associated_type_with_impl_trait_in_tuple() { - check_no_mismatches( - r#" -pub trait Iterator { - type Item; -} - -pub trait Value {} - -fn bar>() {} - -fn foo() { - bar(); -} -"#, - ); -} - -#[test] -fn associated_type_with_impl_trait_in_nested_tuple() { - check_no_mismatches( - r#" -pub trait Iterator { - type Item; -} - -pub trait Value {} - -fn bar>() {} - -fn foo() { - bar(); -} -"#, - ); -} - #[test] fn dyn_trait_with_lifetime_in_rpit() { check_types( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/type_alias_impl_traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/type_alias_impl_traits.rs new file mode 100644 index 0000000000000..e2b7bf379cc3b --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/type_alias_impl_traits.rs @@ -0,0 +1,161 @@ +use expect_test::expect; + +use super::{check_infer_with_mismatches, check_no_mismatches, check_types}; + +#[test] +fn associated_type_impl_trait() { + check_types( + r#" +trait Foo {} +struct S1; +impl Foo for S1 {} + +trait Bar { + type Item; + fn bar(&self) -> Self::Item; +} +struct S2; +impl Bar for S2 { + type Item = impl Foo; + fn bar(&self) -> Self::Item { + S1 + } +} + +fn test() { + let x = S2.bar(); + //^ impl Foo + ?Sized +} + "#, + ); +} + +#[test] +fn associated_type_impl_traits_complex() { + check_types( + r#" +struct Unary(T); +struct Binary(T, U); + +trait Foo {} +struct S1; +impl Foo for S1 {} + +trait Bar { + type Item; + fn bar(&self) -> Unary; +} +struct S2; +impl Bar for S2 { + type Item = Unary; + fn bar(&self) -> Unary<::Item> { + Unary(Unary(S1)) + } +} + +trait Baz { + type Target1; + type Target2; + fn baz(&self) -> Binary; +} +struct S3; +impl Baz for S3 { + type Target1 = impl Foo; + type Target2 = Unary; + fn baz(&self) -> Binary { + Binary(S1, Unary(S2)) + } +} + +fn test() { + let x = S3.baz(); + //^ Binary> + let y = x.1.0.bar(); + //^ Unary> +} + "#, + ); +} + +#[test] +fn associated_type_with_impl_trait_in_tuple() { + check_no_mismatches( + r#" +pub trait Iterator { + type Item; +} + +pub trait Value {} + +fn bar>() {} + +fn foo() { + bar(); +} +"#, + ); +} + +#[test] +fn associated_type_with_impl_trait_in_nested_tuple() { + check_no_mismatches( + r#" +pub trait Iterator { + type Item; +} + +pub trait Value {} + +fn bar>() {} + +fn foo() { + bar(); +} +"#, + ); +} + +#[test] +fn type_alias_impl_trait_simple() { + check_no_mismatches( + r#" +trait Trait {} + +struct Struct; + +impl Trait for Struct {} + +type AliasTy = impl Trait; + +static ALIAS: AliasTy = { + let res: AliasTy = Struct; + res +}; +"#, + ); + + check_infer_with_mismatches( + r#" +trait Trait {} + +struct Struct; + +impl Trait for Struct {} + +type AliasTy = impl Trait; + +static ALIAS: i32 = { + // TATIs cannot be define-used if not in signature or type annotations + let _a: AliasTy = Struct; + 5 +}; +"#, + expect![[r#" + 106..220 '{ ... 5 }': i32 + 191..193 '_a': impl Trait + ?Sized + 205..211 'Struct': Struct + 217..218 '5': i32 + 205..211: expected impl Trait + ?Sized, got Struct + "#]], + ) +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs index fbec332885dfa..d1ce68da6d6d8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs @@ -253,12 +253,7 @@ impl<'a> ClosureSubst<'a> { pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool { let data = db.function_data(func); - if data.has_unsafe_kw() { - // Functions that are `#[rustc_deprecated_safe_2024]` are safe to call before 2024. - if db.attrs(func.into()).by_key(&sym::rustc_deprecated_safe_2024).exists() { - // FIXME: Properly check the caller span and mark it as unsafe after 2024. - return false; - } + if data.is_unsafe() { return true; } diff --git a/src/tools/rust-analyzer/crates/hir/Cargo.toml b/src/tools/rust-analyzer/crates/hir/Cargo.toml index edf26a07a7442..324fa1c6a8571 100644 --- a/src/tools/rust-analyzer/crates/hir/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "hir" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "A high-level object-oriented access to Rust code for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs index 4bb8c140a1f20..ffb972475f8f7 100644 --- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs @@ -48,6 +48,7 @@ macro_rules! diagnostics { // ] diagnostics![ + AwaitOutsideOfAsync, BreakOutsideOfLoop, ExpectedFunction, InactiveCode, @@ -135,6 +136,12 @@ pub struct UnreachableLabel { pub name: Name, } +#[derive(Debug)] +pub struct AwaitOutsideOfAsync { + pub node: InFile>, + pub location: String, +} + #[derive(Debug, Clone, Eq, PartialEq)] pub struct UndeclaredLabel { pub node: InFile>, diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs index 7def828e95f3c..12dd8b5bf4f5d 100644 --- a/src/tools/rust-analyzer/crates/hir/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir/src/display.rs @@ -69,13 +69,13 @@ impl HirDisplay for Function { write_visibility(module_id, self.visibility(db), f)?; - if data.has_default_kw() { + if data.is_default() { f.write_str("default ")?; } - if data.has_const_kw() { + if data.is_const() { f.write_str("const ")?; } - if data.has_async_kw() { + if data.is_async() { f.write_str("async ")?; } if self.is_unsafe_to_call(db) { @@ -125,7 +125,7 @@ impl HirDisplay for Function { // `FunctionData::ret_type` will be `::core::future::Future` for async fns. // Use ugly pattern match to strip the Future trait. // Better way? - let ret_type = if !data.has_async_kw() { + let ret_type = if !data.is_async() { &data.ret_type } else { match &*data.ret_type { diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 266ef2a55c520..1a3becdf50e9b 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -1,4 +1,4 @@ -//! HIR (previously known as descriptors) provides a high-level object oriented +//! HIR (previously known as descriptors) provides a high-level object-oriented //! access to Rust code. //! //! The principal difference between HIR and syntax trees is that HIR is bound @@ -1828,6 +1828,9 @@ impl DefWithBody { is_bang: true, } .into(), + BodyDiagnostic::AwaitOutsideOfAsync { node, location } => { + AwaitOutsideOfAsync { node: *node, location: location.clone() }.into() + } BodyDiagnostic::UnreachableLabel { node, name } => { UnreachableLabel { node: *node, name: name.clone() }.into() } @@ -2186,11 +2189,11 @@ impl Function { } pub fn is_const(self, db: &dyn HirDatabase) -> bool { - db.function_data(self.id).has_const_kw() + db.function_data(self.id).is_const() } pub fn is_async(self, db: &dyn HirDatabase) -> bool { - db.function_data(self.id).has_async_kw() + db.function_data(self.id).is_async() } /// Does this function have `#[test]` attribute? diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index 29f98972dcd82..a377163162c2c 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -28,7 +28,7 @@ use hir_expand::{ use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; -use span::{EditionedFileId, FileId, Span, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; +use span::{EditionedFileId, FileId}; use stdx::TupleExt; use syntax::{ algo::skip_trivia_token, @@ -757,81 +757,9 @@ impl<'db> SemanticsImpl<'db> { res } - // return: - // SourceAnalyzer(file_id that original call include!) - // macro file id - // token in include! macro mapped from token in params - // span for the mapped token - fn is_from_include_file( - &self, - token: SyntaxToken, - ) -> Option<(SourceAnalyzer, HirFileId, SyntaxToken, Span)> { - let parent = token.parent()?; - let file_id = self.find_file(&parent).file_id.file_id()?; - - // iterate related crates and find all include! invocations that include_file_id matches - for (invoc, _) in self - .db - .relevant_crates(file_id.file_id()) - .iter() - .flat_map(|krate| self.db.include_macro_invoc(*krate)) - .filter(|&(_, include_file_id)| include_file_id == file_id) - { - let macro_file = invoc.as_macro_file(); - let expansion_info = { - self.with_ctx(|ctx| { - ctx.cache - .expansion_info_cache - .entry(macro_file) - .or_insert_with(|| { - let exp_info = macro_file.expansion_info(self.db.upcast()); - - let InMacroFile { file_id, value } = exp_info.expanded(); - if let InFile { file_id, value: Some(value) } = exp_info.arg() { - self.cache(value.ancestors().last().unwrap(), file_id); - } - self.cache(value, file_id.into()); - - exp_info - }) - .clone() - }) - }; - - // FIXME: uncached parse - // Create the source analyzer for the macro call scope - let Some(sa) = expansion_info - .arg() - .value - .and_then(|it| self.analyze_no_infer(&it.ancestors().last().unwrap())) - else { - continue; - }; - - // get mapped token in the include! macro file - let span = span::Span { - range: token.text_range(), - anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, - ctx: SyntaxContextId::ROOT, - }; - let Some(InMacroFile { file_id, value: mut mapped_tokens }) = - expansion_info.map_range_down_exact(span) - else { - continue; - }; - - // if we find one, then return - if let Some(t) = mapped_tokens.next() { - return Some((sa, file_id.into(), t, span)); - } - } - - None - } - fn descend_into_macros_impl( &self, - mut token: SyntaxToken, + token: SyntaxToken, f: &mut dyn FnMut(InFile) -> ControlFlow<()>, ) { let _p = tracing::info_span!("descend_into_macros_impl").entered(); @@ -848,17 +776,7 @@ impl<'db> SemanticsImpl<'db> { return; } }, - None => { - // if we cannot find a source analyzer for this token, then we try to find out - // whether this file is an included file and treat that as the include input - let Some((it, macro_file_id, mapped_token, s)) = - self.is_from_include_file(token) - else { - return; - }; - token = mapped_token; - (it, s, macro_file_id) - } + None => return, }; let mut m_cache = self.macro_call_cache.borrow_mut(); diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs index 1376dddf67156..09df639d4a810 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs @@ -94,8 +94,9 @@ use hir_def::{ }, hir::{BindingId, LabelId}, AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, - FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, - StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId, + FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, Lookup, MacroId, + ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, + VariantId, }; use hir_expand::{ attrs::AttrId, name::AsName, ExpansionInfo, HirFileId, HirFileIdExt, MacroCallId, @@ -131,11 +132,30 @@ impl SourceToDefCtx<'_, '_> { for &crate_id in self.db.relevant_crates(file).iter() { // Note: `mod` declarations in block modules cannot be supported here let crate_def_map = self.db.crate_def_map(crate_id); - mods.extend( + let n_mods = mods.len(); + let modules = |file| { crate_def_map .modules_for_file(file) - .map(|local_id| crate_def_map.module_id(local_id)), - ) + .map(|local_id| crate_def_map.module_id(local_id)) + }; + mods.extend(modules(file)); + if mods.len() == n_mods { + mods.extend( + self.db + .include_macro_invoc(crate_id) + .iter() + .filter(|&&(_, file_id)| file_id == file) + .flat_map(|(call, _)| { + modules( + call.lookup(self.db.upcast()) + .kind + .file_id() + .original_file(self.db.upcast()) + .file_id(), + ) + }), + ); + } } if mods.is_empty() { // FIXME: detached file diff --git a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml index b1e7609afef41..df52562b6a102 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "ide-assists" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Code assists for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs index ebfbb83bb9183..4cd15f1c755e1 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs @@ -2280,4 +2280,29 @@ impl b::LocalTrait for B { "#, ) } + + #[test] + fn impl_with_type_param_with_former_param_as_default() { + check_assist( + add_missing_impl_members, + r#" +pub trait Test<'a, T, U = T> { + fn test(item: &'a T) -> U; +} +impl<'a> Test<'a, i32> for bool { + $0 +} +"#, + r#" +pub trait Test<'a, T, U = T> { + fn test(item: &'a T) -> U; +} +impl<'a> Test<'a, i32> for bool { + fn test(item: &'a i32) -> i32 { + ${0:todo!()} + } +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs index 839ffa2614b05..8f053f4df949f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs @@ -43,7 +43,7 @@ pub(crate) fn bind_unused_param(acc: &mut Assists, ctx: &AssistContext<'_>) -> O acc.add( AssistId("bind_unused_param", AssistKind::QuickFix), - &format!("Bind as `let _ = {ident_pat};`"), + format!("Bind as `let _ = {ident_pat};`"), param.syntax().text_range(), |builder| { let line_index = ctx.db().line_index(ctx.file_id().into()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index 44f31dcb8496a..c72bd411d644e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -2,7 +2,7 @@ use either::Either; use ide_db::defs::{Definition, NameRefClass}; use syntax::{ ast::{self, AstNode, HasGenericParams, HasVisibility}, - match_ast, SyntaxNode, + match_ast, SyntaxKind, SyntaxNode, }; use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists}; @@ -100,7 +100,9 @@ fn edit_struct_def( ast::make::tokens::single_newline().text(), ); edit.insert(tuple_fields_text_range.start(), w.syntax().text()); - edit.insert(tuple_fields_text_range.start(), ","); + if !w.syntax().last_token().is_some_and(|t| t.kind() == SyntaxKind::COMMA) { + edit.insert(tuple_fields_text_range.start(), ","); + } edit.insert( tuple_fields_text_range.start(), ast::make::tokens::single_newline().text(), @@ -879,6 +881,29 @@ use crate::{A::Variant, Inner}; fn f() { let a = Variant { field1: Inner }; } +"#, + ); + } + + #[test] + fn where_clause_with_trailing_comma() { + check_assist( + convert_tuple_struct_to_named_struct, + r#" +trait Foo {} + +struct Bar$0(pub T) +where + T: Foo,; +"#, + r#" +trait Foo {} + +struct Bar +where + T: Foo, +{ pub field1: T } + "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs index e42be636d71de..6469957fe16b5 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs @@ -3,7 +3,7 @@ mod generated; use expect_test::expect; use hir::{FileRange, Semantics}; use ide_db::{ - base_db::SourceDatabaseExt, + base_db::{SourceDatabase, SourceRootDatabase}, imports::insert_use::{ImportGranularity, InsertUseConfig}, source_change::FileSystemEdit, EditionedFileId, RootDatabase, SnippetCap, diff --git a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml index 6a4c70d460f2c..035b2fc0db0c4 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "ide-completion" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Utilities for generating completions of user input for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs index 414b096ad475c..58e9b724df2a0 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions.rs @@ -24,7 +24,7 @@ pub(crate) mod vis; use std::iter; -use hir::{sym, HasAttrs, ImportPathConfig, Name, ScopeDef, Variant}; +use hir::{sym, HasAttrs, Name, ScopeDef, Variant}; use ide_db::{imports::import_assets::LocatedImport, RootDatabase, SymbolKind}; use syntax::{ast, SmolStr, ToSmolStr}; @@ -645,11 +645,7 @@ fn enum_variants_with_paths( if let Some(path) = ctx.module.find_path( ctx.db, hir::ModuleDef::from(variant), - ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }, + ctx.config.import_path_config(), ) { // Variants with trivial paths are already added by the existing completion logic, // so we should avoid adding these twice diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs index 71ff6b5aea37a..ff2c8da421306 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/expr.rs @@ -1,6 +1,6 @@ //! Completion of names from the current scope in expression position. -use hir::{sym, ImportPathConfig, Name, ScopeDef}; +use hir::{sym, Name, ScopeDef}; use syntax::ast; use crate::{ @@ -174,11 +174,7 @@ pub(crate) fn complete_expr_path( .find_path( ctx.db, hir::ModuleDef::from(strukt), - ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }, + ctx.config.import_path_config(), ) .filter(|it| it.len() > 1); @@ -200,11 +196,7 @@ pub(crate) fn complete_expr_path( .find_path( ctx.db, hir::ModuleDef::from(un), - ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }, + ctx.config.import_path_config(), ) .filter(|it| it.len() > 1); diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs index e803072fa8f0a..fdce7c547a490 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/flyimport.rs @@ -1,5 +1,5 @@ //! See [`import_on_the_fly`]. -use hir::{ImportPathConfig, ItemInNs, ModuleDef}; +use hir::{ItemInNs, ModuleDef}; use ide_db::imports::{ import_assets::{ImportAssets, LocatedImport}, insert_use::ImportScope, @@ -256,11 +256,7 @@ fn import_on_the_fly( }; let user_input_lowercased = potential_import_name.to_lowercase(); - let import_cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let import_cfg = ctx.config.import_path_config(); import_assets .search_for_imports(&ctx.sema, import_cfg, ctx.config.insert_use.prefix_kind) @@ -306,12 +302,7 @@ fn import_on_the_fly_pat_( ItemInNs::Values(def) => matches!(def, hir::ModuleDef::Const(_)), }; let user_input_lowercased = potential_import_name.to_lowercase(); - - let cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let cfg = ctx.config.import_path_config(); import_assets .search_for_imports(&ctx.sema, cfg, ctx.config.insert_use.prefix_kind) @@ -353,11 +344,7 @@ fn import_on_the_fly_method( let user_input_lowercased = potential_import_name.to_lowercase(); - let cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let cfg = ctx.config.import_path_config(); import_assets .search_for_imports(&ctx.sema, cfg, ctx.config.insert_use.prefix_kind) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs index 713968c1caf97..d9a10893bf52b 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs @@ -4,7 +4,7 @@ use std::iter; use hir::{HirFileIdExt, Module}; use ide_db::{ - base_db::{SourceDatabaseExt, VfsPath}, + base_db::{SourceRootDatabase, VfsPath}, FxHashSet, RootDatabase, SymbolKind, }; use stdx::IsNoneOr; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs index d919609237a6b..977e0d80a4d46 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs @@ -2,7 +2,7 @@ mod format_like; -use hir::{ImportPathConfig, ItemInNs}; +use hir::ItemInNs; use ide_db::{ documentation::{Documentation, HasDocs}, imports::insert_use::ImportScope, @@ -60,11 +60,7 @@ pub(crate) fn complete_postfix( None => return, }; - let cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let cfg = ctx.config.import_path_config(); if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() { if receiver_ty.impls_trait(ctx.db, drop_trait, &[]) { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs index 7d062cb23e5ed..d885b82ec90df 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs @@ -4,6 +4,7 @@ //! module, and we use to statically check that we only produce snippet //! completions if we are allowed to. +use hir::ImportPathConfig; use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap}; use crate::snippet::Snippet; @@ -45,4 +46,12 @@ impl CompletionConfig { .iter() .flat_map(|snip| snip.prefix_triggers.iter().map(move |trigger| (&**trigger, snip))) } + + pub fn import_path_config(&self) -> ImportPathConfig { + ImportPathConfig { + prefer_no_std: self.prefer_no_std, + prefer_prelude: self.prefer_prelude, + prefer_absolute: self.prefer_absolute, + } + } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs index 424f94457e3bf..90c1728074d4d 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs @@ -10,7 +10,6 @@ mod snippet; #[cfg(test)] mod tests; -use hir::ImportPathConfig; use ide_db::{ helpers::mod_path_to_ast, imports::{ @@ -249,11 +248,7 @@ pub fn resolve_completion_edits( let new_ast = scope.clone_for_update(); let mut import_insert = TextEdit::builder(); - let cfg = ImportPathConfig { - prefer_no_std: config.prefer_no_std, - prefer_prelude: config.prefer_prelude, - prefer_absolute: config.prefer_absolute, - }; + let cfg = config.import_path_config(); imports.into_iter().for_each(|(full_import_path, imported_name)| { let items_with_name = items_locator::items_with_name( diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs index abcff62341b59..02d667c52056a 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs @@ -10,7 +10,7 @@ pub(crate) mod type_alias; pub(crate) mod union_literal; pub(crate) mod variant; -use hir::{sym, AsAssocItem, HasAttrs, HirDisplay, ImportPathConfig, ModuleDef, ScopeDef, Type}; +use hir::{sym, AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type}; use ide_db::{ documentation::{Documentation, HasDocs}, helpers::item_name, @@ -294,11 +294,7 @@ pub(crate) fn render_expr( .unwrap_or_else(|| String::from("...")) }; - let cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let cfg = ctx.config.import_path_config(); let label = expr.gen_source_code(&ctx.scope, &mut label_formatter, cfg).ok()?; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs index 1eb8c574bd1c4..5265aa8515b6a 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/snippet.rs @@ -100,7 +100,6 @@ // } // ---- -use hir::ImportPathConfig; use ide_db::imports::import_assets::LocatedImport; use itertools::Itertools; use syntax::{ast, AstNode, GreenNode, SyntaxNode}; @@ -169,11 +168,7 @@ impl Snippet { } fn import_edits(ctx: &CompletionContext<'_>, requires: &[GreenNode]) -> Option> { - let import_cfg = ImportPathConfig { - prefer_no_std: ctx.config.prefer_no_std, - prefer_prelude: ctx.config.prefer_prelude, - prefer_absolute: ctx.config.prefer_absolute, - }; + let import_cfg = ctx.config.import_path_config(); let resolve = |import: &GreenNode| { let path = ast::Path::cast(SyntaxNode::new_root(import.clone()))?; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs index f6274cf537605..415f2afeebbea 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs @@ -23,10 +23,10 @@ mod type_pos; mod use_tree; mod visibility; +use base_db::SourceDatabase; use expect_test::Expect; use hir::PrefixKind; use ide_db::{ - base_db::FileLoader, imports::insert_use::{ImportGranularity, InsertUseConfig}, FilePosition, RootDatabase, SnippetCap, }; diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml index 9a6826a5c4263..8f3cae2fa1703 100644 --- a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "ide-db" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Core data structure representing IDE state for rust-analyzer." authors.workspace = true edition.workspace = true @@ -13,7 +14,7 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" -crossbeam-channel = "0.5.5" +crossbeam-channel.workspace = true tracing.workspace = true rayon.workspace = true fst = { version = "0.4.7", default-features = false } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs index f6a781907db46..e6638dde5d403 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs @@ -2,7 +2,7 @@ use std::collections::VecDeque; -use base_db::SourceDatabaseExt; +use base_db::SourceRootDatabase; use hir::{Crate, DescendPreference, ItemInNs, ModuleDef, Name, Semantics}; use span::FileId; use syntax::{ diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs index 4c52ba39dec41..3cf29987fa14c 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs @@ -1,4 +1,4 @@ -//! This crate defines the core datastructure representing IDE state -- `RootDatabase`. +//! This crate defines the core data structure representing IDE state -- `RootDatabase`. //! //! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search. @@ -74,7 +74,7 @@ pub type FilePosition = FilePositionWrapper; pub type FileRange = FileRangeWrapper; #[salsa::database( - base_db::SourceDatabaseExtStorage, + base_db::SourceRootDatabaseStorage, base_db::SourceDatabaseStorage, hir::db::ExpandDatabaseStorage, hir::db::DefDatabaseStorage, @@ -125,9 +125,6 @@ impl Upcast for RootDatabase { } impl FileLoader for RootDatabase { - fn file_text(&self, file_id: FileId) -> Arc { - FileLoaderDelegate(self).file_text(file_id) - } fn resolve_path(&self, path: AnchoredPath<'_>) -> Option { FileLoaderDelegate(self).resolve_path(path) } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs index 84a388a460b82..0afa9163e31d2 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs @@ -7,7 +7,7 @@ use itertools::Itertools; use rustc_hash::FxHashMap; use syntax::{ ast::{self, make, AstNode, HasGenericArgs}, - ted, SyntaxNode, + ted, NodeOrToken, SyntaxNode, }; #[derive(Default)] @@ -328,10 +328,26 @@ impl Ctx<'_> { let qualified = make::path_from_segments(std::iter::once(segment), false); ted::replace(path.syntax(), qualified.clone_for_update().syntax()); } else if let Some(path_ty) = ast::PathType::cast(parent) { - ted::replace( - path_ty.syntax(), - subst.clone_subtree().clone_for_update().syntax(), - ); + let old = path_ty.syntax(); + + if old.parent().is_some() { + ted::replace(old, subst.clone_subtree().clone_for_update().syntax()); + } else { + // Some `path_ty` has no parent, especially ones made for default value + // of type parameters. + // In this case, `ted` cannot replace `path_ty` with `subst` directly. + // So, just replace its children as long as the `subst` is the same type. + let new = subst.clone_subtree().clone_for_update(); + if !matches!(new, ast::Type::PathType(..)) { + return None; + } + let start = path_ty.syntax().first_child().map(NodeOrToken::Node)?; + let end = path_ty.syntax().last_child().map(NodeOrToken::Node)?; + ted::replace_all( + start..=end, + new.syntax().children().map(NodeOrToken::Node).collect::>(), + ); + } } else { ted::replace( path.syntax(), diff --git a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs index 62104fb7dce66..bb121f4a80ae2 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs @@ -11,7 +11,7 @@ use hir::db::DefDatabase; use crate::{ base_db::{ salsa::{Database, ParallelDatabase, Snapshot}, - Cancelled, CrateId, SourceDatabase, SourceDatabaseExt, + Cancelled, CrateId, SourceDatabase, SourceRootDatabase, }, FxIndexMap, RootDatabase, }; diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index 05b32e2a85487..9e01a6d44083c 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -6,7 +6,7 @@ use std::mem; -use base_db::{salsa::Database, SourceDatabase, SourceDatabaseExt}; +use base_db::{salsa::Database, SourceDatabase, SourceRootDatabase}; use hir::{ sym, AsAssocItem, DefWithBody, DescendPreference, FileRange, HasAttrs, HasSource, HirFileIdExt, InFile, InRealFile, ModuleSource, PathResolution, Semantics, Visibility, @@ -663,9 +663,16 @@ impl<'a> FindUsages<'a> { name_ref: &ast::NameRef, sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool, ) -> bool { + // See https://github.com/rust-lang/rust-analyzer/pull/15864/files/e0276dc5ddc38c65240edb408522bb869f15afb4#r1389848845 + let ty_eq = |ty: hir::Type| match (ty.as_adt(), self_ty.as_adt()) { + (Some(ty), Some(self_ty)) => ty == self_ty, + (None, None) => ty == *self_ty, + _ => false, + }; + match NameRefClass::classify(self.sema, name_ref) { Some(NameRefClass::Definition(Definition::SelfType(impl_))) - if impl_.self_ty(self.sema.db).as_adt() == self_ty.as_adt() => + if ty_eq(impl_.self_ty(self.sema.db)) => { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs index c70aed4c43bb8..209b1477bace4 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs @@ -29,7 +29,7 @@ use std::{ use base_db::{ salsa::{self, ParallelDatabase}, - SourceDatabaseExt, SourceRootId, Upcast, + SourceRootDatabase, SourceRootId, Upcast, }; use fst::{raw::IndexedValue, Automaton, Streamer}; use hir::{ @@ -100,7 +100,7 @@ impl Query { } #[salsa::query_group(SymbolsDatabaseStorage)] -pub trait SymbolsDatabase: HirDatabase + SourceDatabaseExt + Upcast { +pub trait SymbolsDatabase: HirDatabase + SourceRootDatabase + Upcast { /// The symbol index for a given module. These modules should only be in source roots that /// are inside local_roots. fn module_symbols(&self, module: Module) -> Arc; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml index edd05009332e5..9c3a279a945dd 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "ide-diagnostics" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Diagnostics rendering and fixits for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs new file mode 100644 index 0000000000000..92b6e748ca5ed --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs @@ -0,0 +1,101 @@ +use crate::{adjusted_display_range, Diagnostic, DiagnosticsContext}; + +// Diagnostic: await-outside-of-async +// +// This diagnostic is triggered if the `await` keyword is used outside of an async function or block +pub(crate) fn await_outside_of_async( + ctx: &DiagnosticsContext<'_>, + d: &hir::AwaitOutsideOfAsync, +) -> Diagnostic { + let display_range = + adjusted_display_range(ctx, d.node, &|node| Some(node.await_token()?.text_range())); + Diagnostic::new( + crate::DiagnosticCode::RustcHardError("E0728"), + format!("`await` is used inside {}, which is not an `async` context", d.location), + display_range, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn await_inside_non_async_fn() { + check_diagnostics( + r#" +async fn foo() {} + +fn bar() { + foo().await; + //^^^^^ error: `await` is used inside non-async function, which is not an `async` context +} +"#, + ); + } + + #[test] + fn await_inside_async_fn() { + check_diagnostics( + r#" +async fn foo() {} + +async fn bar() { + foo().await; +} +"#, + ); + } + + #[test] + fn await_inside_closure() { + check_diagnostics( + r#" +async fn foo() {} + +async fn bar() { + let _a = || { foo().await }; + //^^^^^ error: `await` is used inside non-async closure, which is not an `async` context +} +"#, + ); + } + + #[test] + fn await_inside_async_block() { + check_diagnostics( + r#" +async fn foo() {} + +fn bar() { + let _a = async { foo().await }; +} +"#, + ); + } + + #[test] + fn await_in_complex_context() { + check_diagnostics( + r#" +async fn foo() {} + +fn bar() { + async fn baz() { + let a = foo().await; + } + + let x = || { + let y = async { + baz().await; + let z = || { + baz().await; + //^^^^^ error: `await` is used inside non-async closure, which is not an `async` context + }; + }; + }; +} +"#, + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs index acff811696103..1f8f805a1e296 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs @@ -42,7 +42,10 @@ mod tests { use crate::{tests::check_diagnostics_with_config, DiagnosticsConfig}; pub(crate) fn check(ra_fixture: &str) { - let config = DiagnosticsConfig::test_sample(); + let config = DiagnosticsConfig { + disabled: std::iter::once("unlinked-file".to_owned()).collect(), + ..DiagnosticsConfig::test_sample() + }; check_diagnostics_with_config(config, ra_fixture) } @@ -168,6 +171,27 @@ union FooBar { #[cfg(a)] baz: u32, //^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled } +"#, + ); + } + + #[test] + fn modules() { + check( + r#" +//- /main.rs + #[cfg(outline)] mod outline; +//^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: outline is disabled + + mod outline_inner; +//^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: outline_inner is disabled + + #[cfg(inline)] mod inline {} +//^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: inline is disabled + +//- /outline_inner.rs +#![cfg(outline_inner)] +//- /outline.rs "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs index e59b63f288dde..6a976697c8057 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs @@ -273,11 +273,7 @@ fn f() { #[test] fn include_does_not_break_diagnostics() { - let mut config = DiagnosticsConfig::test_sample(); - config.disabled.insert("inactive-code".to_owned()); - config.disabled.insert("unlinked-file".to_owned()); - check_diagnostics_with_config( - config, + check_diagnostics( r#" //- minicore: include //- /lib.rs crate:lib diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs index 56ec45c898409..7126617cdeed3 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs @@ -472,4 +472,18 @@ fn f( "#, ) } + + #[test] + fn no_type_mismatches_when_arg_count_mismatch() { + check_diagnostics( + r#" +fn foo((): (), (): ()) { + foo(1, 2, 3); + // ^^ error: expected 2 arguments, found 3 + foo(1); + // ^ error: expected 2 arguments, found 1 +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs index 97296278c3a3a..f39738f2fb80c 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs @@ -1032,6 +1032,44 @@ fn f() { check_diagnostics_no_bails(&code); } + #[test] + fn min_exhaustive() { + check_diagnostics( + r#" +//- minicore: result +fn test(x: Result) { + match x { + Ok(_y) => {} + } +} +"#, + ); + check_diagnostics( + r#" +//- minicore: result +fn test(ptr: *const Result) { + unsafe { + match *ptr { + //^^^^ error: missing match arm: `Err(!)` not covered + Ok(_x) => {} + } + } +} +"#, + ); + check_diagnostics( + r#" +//- minicore: result +fn test(x: Result) { + match x { + //^ error: missing match arm: `Err(_)` not covered + Ok(_y) => {} + } +} +"#, + ); + } + mod rust_unstable { use super::*; diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs index 30dd26a118d27..af8ac6005d7a7 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs @@ -483,6 +483,30 @@ unsafe fn foo() -> u8 { fn main() { let x = format!("foo: {}", foo$0()); +} + "#, + ) + } + + #[test] + fn rustc_deprecated_safe_2024() { + check_diagnostics( + r#" +//- /ed2021.rs crate:ed2021 edition:2021 +#[rustc_deprecated_safe_2024] +unsafe fn safe() -> u8 { + 0 +} +//- /ed2024.rs crate:ed2024 edition:2024 +#[rustc_deprecated_safe_2024] +unsafe fn not_safe() -> u8 { + 0 +} +//- /main.rs crate:main deps:ed2021,ed2024 +fn main() { + ed2021::safe(); + ed2024::not_safe(); + //^^^^^^^^^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block } "#, ) diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs index 1a4d2877ef25f..ff1eeb0516a9e 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs @@ -41,6 +41,67 @@ fn main() { fn main() { let Some(_) | None = Some(5); } +"#, + ); + } + + #[test] + fn option_nonexhaustive_inside_blocks() { + check_diagnostics( + r#" +//- minicore: option +fn main() { + '_a: { + let None = Some(5); + //^^^^ error: non-exhaustive pattern: `Some(_)` not covered + } +} +"#, + ); + + check_diagnostics( + r#" +//- minicore: future, option +fn main() { + let _ = async { + let None = Some(5); + //^^^^ error: non-exhaustive pattern: `Some(_)` not covered + }; +} +"#, + ); + + check_diagnostics( + r#" +//- minicore: option +fn main() { + unsafe { + let None = Some(5); + //^^^^ error: non-exhaustive pattern: `Some(_)` not covered + } +} +"#, + ); + } + + #[test] + fn min_exhaustive() { + check_diagnostics( + r#" +//- minicore: result +fn test(x: Result) { + let Ok(_y) = x; +} +"#, + ); + + check_diagnostics( + r#" +//- minicore: result +fn test(x: Result) { + let Ok(_y) = x; + //^^^^^^ error: non-exhaustive pattern: `Err(_)` not covered +} "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs index 4e52d28051bd1..6f5c68d4b5c86 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -2,8 +2,12 @@ use either::Either; use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, HirFileIdExt, InFile, Type}; use ide_db::{famous_defs::FamousDefs, source_change::SourceChange}; use syntax::{ - ast::{self, BlockExpr, ExprStmt}, - AstNode, AstPtr, + ast::{ + self, + edit::{AstNodeEdit, IndentLevel}, + BlockExpr, Expr, ExprStmt, + }, + AstNode, AstPtr, TextSize, }; use text_edit::TextEdit; @@ -119,6 +123,38 @@ fn add_missing_ok_or_some( return None; } + if d.actual.is_unit() { + if let Expr::BlockExpr(block) = &expr { + if block.tail_expr().is_none() { + let mut builder = TextEdit::builder(); + let block_indent = block.indent_level(); + + if block.statements().count() == 0 { + // Empty block + let indent = block_indent + 1; + builder.insert( + block.syntax().text_range().start() + TextSize::from(1), + format!("\n{indent}{variant_name}(())\n{block_indent}"), + ); + } else { + let indent = IndentLevel::from(1); + builder.insert( + block.syntax().text_range().end() - TextSize::from(1), + format!("{indent}{variant_name}(())\n{block_indent}"), + ); + } + + let source_change = SourceChange::from_text_edit( + expr_ptr.file_id.original_file(ctx.sema.db), + builder.finish(), + ); + let name = format!("Insert {variant_name}(()) as the tail of this block"); + acc.push(fix("insert_wrapped_unit", &name, source_change, expr_range)); + } + return Some(()); + } + } + let mut builder = TextEdit::builder(); builder.insert(expr.syntax().text_range().start(), format!("{variant_name}(")); builder.insert(expr.syntax().text_range().end(), ")".to_owned()); @@ -533,6 +569,36 @@ fn div(x: i32, y: i32) -> MyResult { ); } + #[test] + fn test_wrapped_unit_as_block_tail_expr() { + check_fix( + r#" +//- minicore: result +fn foo() -> Result<(), ()> { + foo(); +}$0 + "#, + r#" +fn foo() -> Result<(), ()> { + foo(); + Ok(()) +} + "#, + ); + + check_fix( + r#" +//- minicore: result +fn foo() -> Result<(), ()> {}$0 + "#, + r#" +fn foo() -> Result<(), ()> { + Ok(()) +} + "#, + ); + } + #[test] fn test_in_const_and_static() { check_fix( diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs index 1b71a3a3e69b3..a1573bab8ae9b 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -4,7 +4,7 @@ use std::iter; use hir::{db::DefDatabase, DefMap, InFile, ModuleSource}; use ide_db::{ - base_db::{FileLoader, SourceDatabaseExt}, + base_db::{FileLoader, SourceDatabase, SourceRootDatabase}, source_change::SourceChange, FileId, FileRange, LineIndexDatabase, }; @@ -47,7 +47,7 @@ pub(crate) fn unlinked_file( // // Only show this diagnostic on the first three characters of // the file, to avoid overwhelming the user during startup. - range = FileLoader::file_text(ctx.sema.db, file_id) + range = SourceDatabase::file_text(ctx.sema.db, file_id) .char_indices() .take(3) .last() @@ -499,6 +499,18 @@ $0 mod bar { mod foo; } +"#, + ); + } + + #[test] + fn include_macro_works() { + check_diagnostics( + r#" +//- minicore: include +//- /main.rs +include!("bar/foo/mod.rs"); +//- /bar/foo/mod.rs "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs index 263ab74755999..a61c5f0cd4db2 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs @@ -24,6 +24,7 @@ //! don't yet have a great pattern for how to do them properly. mod handlers { + pub(crate) mod await_outside_of_async; pub(crate) mod break_outside_of_loop; pub(crate) mod expected_function; pub(crate) mod inactive_code; @@ -96,6 +97,7 @@ use syntax::{ #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub enum DiagnosticCode { RustcHardError(&'static str), + SyntaxError, RustcLint(&'static str), Clippy(&'static str), Ra(&'static str, Severity), @@ -107,6 +109,9 @@ impl DiagnosticCode { DiagnosticCode::RustcHardError(e) => { format!("https://doc.rust-lang.org/stable/error_codes/{e}.html") } + DiagnosticCode::SyntaxError => { + String::from("https://doc.rust-lang.org/stable/reference/") + } DiagnosticCode::RustcLint(e) => { format!("https://doc.rust-lang.org/rustc/?search={e}") } @@ -125,6 +130,7 @@ impl DiagnosticCode { | DiagnosticCode::RustcLint(r) | DiagnosticCode::Clippy(r) | DiagnosticCode::Ra(r, _) => r, + DiagnosticCode::SyntaxError => "syntax-error", } } } @@ -154,7 +160,7 @@ impl Diagnostic { message, range: range.into(), severity: match code { - DiagnosticCode::RustcHardError(_) => Severity::Error, + DiagnosticCode::RustcHardError(_) | DiagnosticCode::SyntaxError => Severity::Error, // FIXME: Rustc lints are not always warning, but the ones that are currently implemented are all warnings. DiagnosticCode::RustcLint(_) => Severity::Warning, // FIXME: We can make this configurable, and if the user uses `cargo clippy` on flycheck, we can @@ -297,31 +303,54 @@ impl DiagnosticsContext<'_> { } } -/// Request diagnostics for the given [`FileId`]. The produced diagnostics may point to other files +/// Request parser level diagnostics for the given [`FileId`]. +pub fn syntax_diagnostics( + db: &RootDatabase, + config: &DiagnosticsConfig, + file_id: FileId, +) -> Vec { + let _p = tracing::info_span!("syntax_diagnostics").entered(); + + if config.disabled.contains("syntax-error") { + return Vec::new(); + } + + let sema = Semantics::new(db); + let file_id = sema + .attach_first_edition(file_id) + .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); + + // [#3434] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily. + db.parse_errors(file_id) + .as_deref() + .into_iter() + .flatten() + .take(128) + .map(|err| { + Diagnostic::new( + DiagnosticCode::SyntaxError, + format!("Syntax Error: {err}"), + FileRange { file_id: file_id.into(), range: err.range() }, + ) + }) + .collect() +} + +/// Request semantic diagnostics for the given [`FileId`]. The produced diagnostics may point to other files /// due to macros. -pub fn diagnostics( +pub fn semantic_diagnostics( db: &RootDatabase, config: &DiagnosticsConfig, resolve: &AssistResolveStrategy, file_id: FileId, ) -> Vec { - let _p = tracing::info_span!("diagnostics").entered(); + let _p = tracing::info_span!("semantic_diagnostics").entered(); let sema = Semantics::new(db); let file_id = sema .attach_first_edition(file_id) .unwrap_or_else(|| EditionedFileId::current_edition(file_id)); let mut res = Vec::new(); - // [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily. - res.extend(db.parse_errors(file_id).as_deref().into_iter().flatten().take(128).map(|err| { - Diagnostic::new( - DiagnosticCode::RustcHardError("syntax-error"), - format!("Syntax Error: {err}"), - FileRange { file_id: file_id.into(), range: err.range() }, - ) - })); - let parse_errors = res.len(); - let parse = sema.parse(file_id); // FIXME: This iterates the entire file which is a rather expensive operation. @@ -341,13 +370,17 @@ pub fn diagnostics( match module { // A bunch of parse errors in a file indicate some bigger structural parse changes in the // file, so we skip semantic diagnostics so we can show these faster. - Some(m) if parse_errors < 16 => m.diagnostics(db, &mut diags, config.style_lints), - Some(_) => (), + Some(m) => { + if !db.parse_errors(file_id).as_deref().is_some_and(|es| es.len() >= 16) { + m.diagnostics(db, &mut diags, config.style_lints); + } + } None => handlers::unlinked_file::unlinked_file(&ctx, &mut res, file_id.file_id()), } for diag in diags { let d = match diag { + AnyDiagnostic::AwaitOutsideOfAsync(d) => handlers::await_outside_of_async::await_outside_of_async(&ctx, &d), AnyDiagnostic::ExpectedFunction(d) => handlers::expected_function::expected_function(&ctx, &d), AnyDiagnostic::InactiveCode(d) => match handlers::inactive_code::inactive_code(&ctx, &d) { Some(it) => it, @@ -363,7 +396,7 @@ pub fn diagnostics( res.extend(d.errors.iter().take(16).map(|err| { { Diagnostic::new( - DiagnosticCode::RustcHardError("syntax-error"), + DiagnosticCode::SyntaxError, format!("Syntax Error in Expansion: {err}"), ctx.resolve_precise_location(&d.node.clone(), d.precise_location), ) @@ -464,6 +497,19 @@ pub fn diagnostics( res } +/// Request both syntax and semantic diagnostics for the given [`FileId`]. +pub fn full_diagnostics( + db: &RootDatabase, + config: &DiagnosticsConfig, + resolve: &AssistResolveStrategy, + file_id: FileId, +) -> Vec { + let mut res = syntax_diagnostics(db, config, file_id); + let sema = semantic_diagnostics(db, config, resolve, file_id); + res.extend(sema); + res +} + // `__RA_EVERY_LINT` is a fake lint group to allow every lint in proc macros static RUSTC_LINT_GROUPS_DICT: Lazy>> = diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs index e56fca1e50082..ec74640a54d9a 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs @@ -1,7 +1,7 @@ #![allow(clippy::print_stderr)] use ide_db::{ - assists::AssistResolveStrategy, base_db::SourceDatabaseExt, LineIndexDatabase, RootDatabase, + assists::AssistResolveStrategy, base_db::SourceDatabase, LineIndexDatabase, RootDatabase, }; use itertools::Itertools; use stdx::trim_indent; @@ -59,10 +59,14 @@ fn check_nth_fix_with_config( let after = trim_indent(ra_fixture_after); let (db, file_position) = RootDatabase::with_position(ra_fixture_before); - let diagnostic = - super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_position.file_id.into()) - .pop() - .expect("no diagnostics"); + let diagnostic = super::full_diagnostics( + &db, + &config, + &AssistResolveStrategy::All, + file_position.file_id.into(), + ) + .pop() + .expect("no diagnostics"); let fix = &diagnostic .fixes .unwrap_or_else(|| panic!("{:?} diagnostic misses fixes", diagnostic.code))[nth]; @@ -102,37 +106,39 @@ pub(crate) fn check_has_fix(ra_fixture_before: &str, ra_fixture_after: &str) { let (db, file_position) = RootDatabase::with_position(ra_fixture_before); let mut conf = DiagnosticsConfig::test_sample(); conf.expr_fill_default = ExprFillDefaultMode::Default; - let fix = - super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id.into()) - .into_iter() - .find(|d| { - d.fixes - .as_ref() - .and_then(|fixes| { - fixes.iter().find(|fix| { - if !fix.target.contains_inclusive(file_position.offset) { - return false; - } - let actual = { - let source_change = fix.source_change.as_ref().unwrap(); - let file_id = - *source_change.source_file_edits.keys().next().unwrap(); - let mut actual = db.file_text(file_id).to_string(); + let fix = super::full_diagnostics( + &db, + &conf, + &AssistResolveStrategy::All, + file_position.file_id.into(), + ) + .into_iter() + .find(|d| { + d.fixes + .as_ref() + .and_then(|fixes| { + fixes.iter().find(|fix| { + if !fix.target.contains_inclusive(file_position.offset) { + return false; + } + let actual = { + let source_change = fix.source_change.as_ref().unwrap(); + let file_id = *source_change.source_file_edits.keys().next().unwrap(); + let mut actual = db.file_text(file_id).to_string(); - for (edit, snippet_edit) in source_change.source_file_edits.values() - { - edit.apply(&mut actual); - if let Some(snippet_edit) = snippet_edit { - snippet_edit.apply(&mut actual); - } - } - actual - }; - after == actual - }) - }) - .is_some() - }); + for (edit, snippet_edit) in source_change.source_file_edits.values() { + edit.apply(&mut actual); + if let Some(snippet_edit) = snippet_edit { + snippet_edit.apply(&mut actual); + } + } + actual + }; + after == actual + }) + }) + .is_some() + }); assert!(fix.is_some(), "no diagnostic with desired fix"); } @@ -144,38 +150,40 @@ pub(crate) fn check_has_single_fix(ra_fixture_before: &str, ra_fixture_after: &s let mut conf = DiagnosticsConfig::test_sample(); conf.expr_fill_default = ExprFillDefaultMode::Default; let mut n_fixes = 0; - let fix = - super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id.into()) - .into_iter() - .find(|d| { - d.fixes - .as_ref() - .and_then(|fixes| { - n_fixes += fixes.len(); - fixes.iter().find(|fix| { - if !fix.target.contains_inclusive(file_position.offset) { - return false; - } - let actual = { - let source_change = fix.source_change.as_ref().unwrap(); - let file_id = - *source_change.source_file_edits.keys().next().unwrap(); - let mut actual = db.file_text(file_id).to_string(); + let fix = super::full_diagnostics( + &db, + &conf, + &AssistResolveStrategy::All, + file_position.file_id.into(), + ) + .into_iter() + .find(|d| { + d.fixes + .as_ref() + .and_then(|fixes| { + n_fixes += fixes.len(); + fixes.iter().find(|fix| { + if !fix.target.contains_inclusive(file_position.offset) { + return false; + } + let actual = { + let source_change = fix.source_change.as_ref().unwrap(); + let file_id = *source_change.source_file_edits.keys().next().unwrap(); + let mut actual = db.file_text(file_id).to_string(); - for (edit, snippet_edit) in source_change.source_file_edits.values() - { - edit.apply(&mut actual); - if let Some(snippet_edit) = snippet_edit { - snippet_edit.apply(&mut actual); - } - } - actual - }; - after == actual - }) - }) - .is_some() - }); + for (edit, snippet_edit) in source_change.source_file_edits.values() { + edit.apply(&mut actual); + if let Some(snippet_edit) = snippet_edit { + snippet_edit.apply(&mut actual); + } + } + actual + }; + after == actual + }) + }) + .is_some() + }); assert!(fix.is_some(), "no diagnostic with desired fix"); assert!(n_fixes == 1, "Too many fixes suggested"); } @@ -183,7 +191,7 @@ pub(crate) fn check_has_single_fix(ra_fixture_before: &str, ra_fixture_after: &s /// Checks that there's a diagnostic *without* fix at `$0`. pub(crate) fn check_no_fix(ra_fixture: &str) { let (db, file_position) = RootDatabase::with_position(ra_fixture); - let diagnostic = super::diagnostics( + let diagnostic = super::full_diagnostics( &db, &DiagnosticsConfig::test_sample(), &AssistResolveStrategy::All, @@ -215,7 +223,7 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur .iter() .copied() .flat_map(|file_id| { - super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id.into()) + super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id.into()) .into_iter() .map(|d| { let mut annotation = String::new(); @@ -243,6 +251,12 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur let mut actual = annotations.remove(&file_id).unwrap_or_default(); let expected = extract_annotations(&db.file_text(file_id)); actual.sort_by_key(|(range, _)| range.start()); + // FIXME: We should panic on duplicates instead, but includes currently cause us to report + // diagnostics twice for the calling module when both files are queried. + actual.dedup(); + // actual.iter().duplicates().for_each(|(range, msg)| { + // panic!("duplicate diagnostic at {:?}: {msg:?}", line_index.line_col(range.start())) + // }); if expected.is_empty() { // makes minicore smoke test debuggable for (e, _) in &actual { @@ -277,10 +291,10 @@ fn test_disabled_diagnostics() { let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#); let file_id = file_id.into(); - let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id); + let diagnostics = super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id); assert!(diagnostics.is_empty()); - let diagnostics = super::diagnostics( + let diagnostics = super::full_diagnostics( &db, &DiagnosticsConfig::test_sample(), &AssistResolveStrategy::All, diff --git a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml index 57b1f9465ad3e..fad62fa3b96b8 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml @@ -1,8 +1,8 @@ [package] name = "ide-ssr" version = "0.0.0" -description = "Structural search and replace of Rust code" -repository = "https://github.com/rust-lang/rust-analyzer" +repository.workspace = true +description = "Structural search and replace of Rust code for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs index e62ef60433649..54236ea8bc4e9 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs @@ -84,7 +84,7 @@ pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Matc use crate::{errors::bail, matching::MatchFailureReason}; use hir::{FileRange, Semantics}; -use ide_db::{EditionedFileId, FileId, FxHashMap, RootDatabase}; +use ide_db::{base_db::SourceDatabase, EditionedFileId, FileId, FxHashMap, RootDatabase}; use resolving::ResolvedRule; use syntax::{ast, AstNode, SyntaxNode, TextRange}; use text_edit::TextEdit; @@ -141,7 +141,7 @@ impl<'db> MatchFinder<'db> { /// Constructs an instance using the start of the first file in `db` as the lookup context. pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result, SsrError> { - use ide_db::base_db::SourceDatabaseExt; + use ide_db::base_db::SourceRootDatabase; use ide_db::symbol_index::SymbolsDatabase; if let Some(first_file_id) = db.local_roots().iter().next().and_then(|root| db.source_root(*root).iter().next()) @@ -172,7 +172,6 @@ impl<'db> MatchFinder<'db> { /// Finds matches for all added rules and returns edits for all found matches. pub fn edits(&self) -> FxHashMap { - use ide_db::base_db::SourceDatabaseExt; let mut matches_by_file = FxHashMap::default(); for m in self.matches().matches { matches_by_file @@ -228,7 +227,6 @@ impl<'db> MatchFinder<'db> { file_id: EditionedFileId, snippet: &str, ) -> Vec { - use ide_db::base_db::SourceDatabaseExt; let file = self.sema.parse(file_id); let mut res = Vec::new(); let file_text = self.sema.db.file_text(file_id.into()); diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs index 832386685d72c..241de10b44dc3 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs @@ -156,7 +156,7 @@ impl MatchFinder<'_> { fn search_files_do(&self, mut callback: impl FnMut(FileId)) { if self.restrict_ranges.is_empty() { // Unrestricted search. - use ide_db::base_db::SourceDatabaseExt; + use ide_db::base_db::SourceRootDatabase; use ide_db::symbol_index::SymbolsDatabase; for &root in self.sema.db.local_roots().iter() { let sr = self.sema.db.source_root(root); diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs index 4477a268b2912..42930889d757a 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs @@ -1,7 +1,7 @@ use expect_test::{expect, Expect}; use hir::{FilePosition, FileRange}; use ide_db::{ - base_db::{salsa::Durability, SourceDatabaseExt}, + base_db::{salsa::Durability, SourceDatabase}, EditionedFileId, FxHashSet, }; use test_utils::RangeOrOffset; diff --git a/src/tools/rust-analyzer/crates/ide/Cargo.toml b/src/tools/rust-analyzer/crates/ide/Cargo.toml index 9d8400ba3adc5..d976d604f1aa9 100644 --- a/src/tools/rust-analyzer/crates/ide/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "ide" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "IDE-centric APIs for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index d0701a45b1019..8a8bc07945f1a 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -10,7 +10,7 @@ use hir::{ Semantics, }; use ide_db::{ - base_db::{AnchoredPath, FileLoader}, + base_db::{AnchoredPath, FileLoader, SourceDatabase}, defs::{Definition, IdentClass}, helpers::pick_best_token, RootDatabase, SymbolKind, diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index 3257305184e91..516e32ef91725 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -1,5 +1,5 @@ use expect_test::{expect, Expect}; -use ide_db::{base_db::FileLoader, FileRange}; +use ide_db::{base_db::SourceDatabase, FileRange}; use syntax::TextRange; use crate::{ @@ -8579,3 +8579,26 @@ fn main(a$0: T) {} "#]], ); } + +#[test] +fn hover_fn_with_impl_trait_arg() { + check( + r#" +trait Foo {} +impl Foo for bool {} +fn bar(_: impl Foo) {} +fn test() { + let f = bar::<3>; + f$0(true); +} +"#, + expect![[r#" + *f* + + ```rust + // size = 0, align = 1 + let f: fn bar<3>(bool) + ``` + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs index 5775abaeb18c6..7310852b8ed57 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs @@ -1144,6 +1144,21 @@ async fn main() { //^^ impl Future return 8_i32; }; +}"#, + ); + } + + #[test] + fn works_in_included_file() { + check_types( + r#" +//- minicore: include +//- /main.rs +include!("foo.rs"); +//- /foo.rs +fn main() { + let _x = 42; + //^^ i32 }"#, ); } diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs index ea96c9504e55c..8f2777f3928d1 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs @@ -7,7 +7,7 @@ use hir::{HirDisplay, Semantics}; use ide_db::{FileRange, RootDatabase}; use span::EditionedFileId; use syntax::{ - ast::{self, AstNode, HasName}, + ast::{self, AstNode, HasLoopBody, HasName}, match_ast, SyntaxKind, SyntaxNode, T, }; @@ -57,9 +57,24 @@ pub(super) fn hints( // the actual number of lines in this case should be the line count of the parent BlockExpr, // which the `min_lines` config cares about node = node.parent()?; - let block = label.syntax().parent().and_then(ast::BlockExpr::cast)?; - closing_token = block.stmt_list()?.r_curly_token()?; + + let parent = label.syntax().parent()?; + let block; + match_ast! { + match parent { + ast::BlockExpr(block_expr) => { + block = block_expr.stmt_list()?; + }, + ast::AnyHasLoopBody(loop_expr) => { + block = loop_expr.loop_body()?.stmt_list()?; + }, + _ => return None, + } + } + closing_token = block.r_curly_token()?; + let lifetime = label.lifetime().map_or_else(String::new, |it| it.to_string()); + (lifetime, Some(label.syntax().text_range())) } else if let Some(block) = ast::BlockExpr::cast(node.clone()) { closing_token = block.stmt_list()?.r_curly_token()?; @@ -219,6 +234,19 @@ fn test() { //^ 'do_a } //^ 'end + + 'a: loop { + 'b: for i in 0..5 { + 'c: while true { + + + } + //^ 'c + } + //^ 'b + } + //^ 'a + } //^ fn test "#, diff --git a/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs b/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs index aeb3c8c1ee5ac..ff1317d135c9d 100644 --- a/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs +++ b/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs @@ -1,5 +1,5 @@ use hir::Semantics; -use ide_db::{base_db::SourceDatabaseExt, FilePosition, LineIndexDatabase, RootDatabase}; +use ide_db::{base_db::SourceRootDatabase, FilePosition, LineIndexDatabase, RootDatabase}; use std::{fmt::Write, time::Instant}; use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange}; diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs index 8cb81a9cc452c..eff4bc3d3766d 100644 --- a/src/tools/rust-analyzer/crates/ide/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs @@ -65,7 +65,7 @@ use hir::{sym, ChangeWithProcMacros}; use ide_db::{ base_db::{ salsa::{self, ParallelDatabase}, - CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, SourceDatabaseExt, VfsPath, + CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, SourceRootDatabase, VfsPath, }, prime_caches, symbol_index, FxHashMap, FxIndexSet, LineIndexDatabase, }; @@ -286,7 +286,7 @@ impl Analysis { /// Gets the text of the source file. pub fn file_text(&self, file_id: FileId) -> Cancellable> { - self.with_db(|db| SourceDatabaseExt::file_text(db, file_id)) + self.with_db(|db| SourceDatabase::file_text(db, file_id)) } /// Gets the syntax tree of the file. @@ -672,14 +672,33 @@ impl Analysis { .unwrap_or_default()) } - /// Computes the set of diagnostics for the given file. - pub fn diagnostics( + /// Computes the set of parser level diagnostics for the given file. + pub fn syntax_diagnostics( + &self, + config: &DiagnosticsConfig, + file_id: FileId, + ) -> Cancellable> { + self.with_db(|db| ide_diagnostics::syntax_diagnostics(db, config, file_id)) + } + + /// Computes the set of semantic diagnostics for the given file. + pub fn semantic_diagnostics( + &self, + config: &DiagnosticsConfig, + resolve: AssistResolveStrategy, + file_id: FileId, + ) -> Cancellable> { + self.with_db(|db| ide_diagnostics::semantic_diagnostics(db, config, &resolve, file_id)) + } + + /// Computes the set of both syntax and semantic diagnostics for the given file. + pub fn full_diagnostics( &self, config: &DiagnosticsConfig, resolve: AssistResolveStrategy, file_id: FileId, ) -> Cancellable> { - self.with_db(|db| ide_diagnostics::diagnostics(db, config, &resolve, file_id)) + self.with_db(|db| ide_diagnostics::full_diagnostics(db, config, &resolve, file_id)) } /// Convenience function to return assists + quick fixes for diagnostics @@ -697,7 +716,7 @@ impl Analysis { self.with_db(|db| { let diagnostic_assists = if diagnostics_config.enabled && include_fixes { - ide_diagnostics::diagnostics(db, diagnostics_config, &resolve, frange.file_id) + ide_diagnostics::full_diagnostics(db, diagnostics_config, &resolve, frange.file_id) .into_iter() .flat_map(|it| it.fixes.unwrap_or_default()) .filter(|it| it.target.intersect(frange.range).is_some()) diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs index 46c2d47ee827f..64b82b31c7417 100644 --- a/src/tools/rust-analyzer/crates/ide/src/references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/references.rs @@ -60,7 +60,6 @@ pub(crate) fn find_all_refs( move |def: Definition| { let mut usages = def.usages(sema).set_scope(search_scope.as_ref()).include_self_refs().all(); - if literal_search { retain_adt_literal_usages(&mut usages, def, sema); } @@ -817,6 +816,30 @@ impl S { ) } + #[test] + fn test_self_inside_not_adt_impl() { + check( + r#" +pub trait TestTrait { + type Assoc; + fn stuff() -> Self; +} +impl TestTrait for () { + type Assoc$0 = u8; + fn stuff() -> Self { + let me: Self = (); + me + } +} +"#, + expect![[r#" + Assoc TypeAlias FileId(0) 92..108 97..102 + + FileId(0) 31..36 + "#]], + ) + } + #[test] fn test_find_all_refs_two_modules() { check( diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs index cd9b7ae2f6209..eaccee08e8c01 100644 --- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs +++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs @@ -3,9 +3,12 @@ use hir::{db::HirDatabase, Crate, HirFileIdExt, Module, Semantics}; use ide_db::{ - base_db::SourceDatabaseExt, defs::Definition, documentation::Documentation, - famous_defs::FamousDefs, helpers::get_definition, FileId, FileRange, FxHashMap, FxHashSet, - RootDatabase, + base_db::{SourceRootDatabase, VfsPath}, + defs::Definition, + documentation::Documentation, + famous_defs::FamousDefs, + helpers::get_definition, + FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, }; use syntax::{AstNode, SyntaxKind::*, SyntaxNode, TextRange, T}; @@ -227,13 +230,16 @@ impl StaticIndex<'_> { self.files.push(result); } - pub fn compute(analysis: &Analysis) -> StaticIndex<'_> { + pub fn compute<'a>(analysis: &'a Analysis, workspace_root: &VfsPath) -> StaticIndex<'a> { let db = &*analysis.db; let work = all_modules(db).into_iter().filter(|module| { let file_id = module.definition_source_file_id(db).original_file(db); let source_root = db.file_source_root(file_id.into()); let source_root = db.source_root(source_root); - !source_root.is_library + let is_vendored = source_root + .path_for_file(&file_id.into()) + .is_some_and(|module_path| module_path.starts_with(workspace_root)); + !source_root.is_library || is_vendored }); let mut this = StaticIndex { files: vec![], @@ -259,12 +265,13 @@ impl StaticIndex<'_> { #[cfg(test)] mod tests { use crate::{fixture, StaticIndex}; - use ide_db::{FileRange, FxHashSet}; + use ide_db::{base_db::VfsPath, FileRange, FxHashSet}; use syntax::TextSize; fn check_all_ranges(ra_fixture: &str) { let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); - let s = StaticIndex::compute(&analysis); + let s = + StaticIndex::compute(&analysis, &VfsPath::new_virtual_path("/workspace".to_owned())); let mut range_set: FxHashSet<_> = ranges.iter().map(|it| it.0).collect(); for f in s.files { for (range, _) in f.tokens { @@ -283,7 +290,8 @@ mod tests { #[track_caller] fn check_definitions(ra_fixture: &str) { let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); - let s = StaticIndex::compute(&analysis); + let s = + StaticIndex::compute(&analysis, &VfsPath::new_virtual_path("/workspace".to_owned())); let mut range_set: FxHashSet<_> = ranges.iter().map(|it| it.0).collect(); for (_, t) in s.tokens.iter() { if let Some(t) = t.definition { @@ -326,7 +334,7 @@ enum E { X(Foo) } fn multi_crate() { check_definitions( r#" -//- /main.rs crate:main deps:foo +//- /workspace/main.rs crate:main deps:foo use foo::func; @@ -335,7 +343,7 @@ fn main() { //^^^^ func(); } -//- /foo/lib.rs crate:foo +//- /workspace/foo/lib.rs crate:foo pub func() { @@ -344,6 +352,24 @@ pub func() { ); } + #[test] + fn vendored_crate() { + check_all_ranges( + r#" +//- /workspace/main.rs crate:main deps:external,vendored +struct Main(i32); + //^^^^ ^^^ + +//- /external/lib.rs new_source_root:library crate:external@0.1.0,https://a.b/foo.git library +struct ExternalLibrary(i32); + +//- /workspace/vendored/lib.rs new_source_root:library crate:vendored@0.1.0,https://a.b/bar.git library +struct VendoredLibrary(i32); + //^^^^^^^^^^^^^^^ ^^^ +"#, + ); + } + #[test] fn derives() { check_all_ranges( diff --git a/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs b/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs index 727012112ebd4..9ff099f479e7b 100644 --- a/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs +++ b/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs @@ -1,6 +1,6 @@ use dot::{Id, LabelText}; use ide_db::{ - base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceDatabaseExt}, + base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceRootDatabase}, FxHashSet, RootDatabase, }; use triomphe::Arc; diff --git a/src/tools/rust-analyzer/crates/intern/Cargo.toml b/src/tools/rust-analyzer/crates/intern/Cargo.toml index c08ecb5c307b1..5e7ee54c6af77 100644 --- a/src/tools/rust-analyzer/crates/intern/Cargo.toml +++ b/src/tools/rust-analyzer/crates/intern/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "intern" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Global `Arc`-based object interning infrastructure for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/limit/Cargo.toml b/src/tools/rust-analyzer/crates/limit/Cargo.toml index c1a768833b99e..30666f5219af3 100644 --- a/src/tools/rust-analyzer/crates/limit/Cargo.toml +++ b/src/tools/rust-analyzer/crates/limit/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "limit" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "A struct to enforce limits for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml index 64ed93bbb1640..23fd50a05644c 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml +++ b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "load-cargo" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Loads a Cargo project into a static instance of rust-analyzer for analysis." rust-version.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs index 8737f2246be59..abad7e9f7d22a 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs +++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs @@ -16,11 +16,16 @@ use ide_db::{ use itertools::Itertools; use proc_macro_api::{MacroDylib, ProcMacroServer}; use project_model::{ - CargoConfig, ManifestPath, PackageRoot, ProjectManifest, ProjectWorkspace, ProjectWorkspaceKind, + CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace, ProjectWorkspaceKind, }; use span::Span; -use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath}; +use vfs::{ + file_set::FileSetConfig, + loader::{Handle, LoadingProgress}, + AbsPath, AbsPathBuf, VfsPath, +}; +#[derive(Debug)] pub struct LoadCargoConfig { pub load_out_dirs_from_check: bool, pub with_proc_macro_server: ProcMacroServerChoice, @@ -60,11 +65,11 @@ pub fn load_workspace( let (sender, receiver) = unbounded(); let mut vfs = vfs::Vfs::default(); let mut loader = { - let loader = - vfs_notify::NotifyHandle::spawn(Box::new(move |msg| sender.send(msg).unwrap())); + let loader = vfs_notify::NotifyHandle::spawn(sender); Box::new(loader) }; + tracing::debug!(?load_config, "LoadCargoConfig"); let proc_macro_server = match &load_config.with_proc_macro_server { ProcMacroServerChoice::Sysroot => ws .find_sysroot_proc_macro_srv() @@ -77,6 +82,14 @@ pub fn load_workspace( Err((anyhow::format_err!("proc macro server disabled"), false)) } }; + match &proc_macro_server { + Ok(server) => { + tracing::info!(path=%server.path(), "Proc-macro server started") + } + Err((e, _)) => { + tracing::info!(%e, "Failed to start proc-macro server") + } + } let (crate_graph, proc_macros) = ws.to_crate_graph( &mut |path: &AbsPath| { @@ -247,7 +260,7 @@ impl ProjectFolders { let mut file_set_roots: Vec = vec![]; let mut entries = vec![]; - if let Some(manifest) = ws.manifest().map(ManifestPath::as_ref) { + if let Some(manifest) = ws.manifest().map(|it| it.to_path_buf()) { file_set_roots.push(VfsPath::from(manifest.to_owned())); entries.push(manifest.to_owned()); } @@ -409,8 +422,8 @@ fn load_crate_graph( // wait until Vfs has loaded all roots for task in receiver { match task { - vfs::loader::Message::Progress { n_done, n_total, .. } => { - if n_done == Some(n_total) { + vfs::loader::Message::Progress { n_done, .. } => { + if n_done == LoadingProgress::Finished { break; } } diff --git a/src/tools/rust-analyzer/crates/mbe/Cargo.toml b/src/tools/rust-analyzer/crates/mbe/Cargo.toml index 57834623e84fa..756d42ef573fc 100644 --- a/src/tools/rust-analyzer/crates/mbe/Cargo.toml +++ b/src/tools/rust-analyzer/crates/mbe/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "mbe" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Handling of `macro_rules` macros for rust-analyzer." authors.workspace = true edition.workspace = true @@ -25,6 +26,7 @@ tt.workspace = true stdx.workspace = true span.workspace = true intern.workspace = true +syntax-bridge.workspace = true [dev-dependencies] test-utils.workspace = true diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs index b6db4d2e76c40..43604eb232dc9 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs @@ -7,11 +7,15 @@ use syntax::{ ast::{self, HasName}, AstNode, }; +use syntax_bridge::{ + dummy_test_span_utils::{DummyTestSpanMap, DUMMY}, + syntax_node_to_token_tree, DocCommentDesugarMode, +}; use test_utils::{bench, bench_fixture, skip_slow_tests}; use crate::{ parser::{MetaVarKind, Op, RepeatKind, Separator}, - syntax_node_to_token_tree, DeclarativeMacro, DocCommentDesugarMode, DummyTestSpanMap, DUMMY, + DeclarativeMacro, }; #[test] diff --git a/src/tools/rust-analyzer/crates/mbe/src/lib.rs b/src/tools/rust-analyzer/crates/mbe/src/lib.rs index 568490d57345f..88785537c7d20 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/lib.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/lib.rs @@ -8,13 +8,12 @@ mod expander; mod parser; -mod syntax_bridge; -mod to_parser_input; #[cfg(test)] mod benchmark; use span::{Edition, Span, SyntaxContextId}; +use syntax_bridge::to_parser_input; use tt::iter::TtIter; use tt::DelimSpan; @@ -23,18 +22,8 @@ use std::sync::Arc; use crate::parser::{MetaTemplate, MetaVarKind, Op}; -// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces -pub use ::parser::TopEntryPoint; pub use tt::{Delimiter, DelimiterKind, Punct}; -pub use crate::syntax_bridge::{ - desugar_doc_comment_text, parse_exprs_with_sep, parse_to_token_tree, - parse_to_token_tree_static_span, syntax_node_to_token_tree, syntax_node_to_token_tree_modified, - token_tree_to_syntax_node, DocCommentDesugarMode, SpanMapper, -}; - -pub use crate::syntax_bridge::dummy_test_span_utils::*; - #[derive(Debug, PartialEq, Eq, Clone)] pub enum ParseError { UnexpectedToken(Box), @@ -361,7 +350,7 @@ impl From> for ValueResult { } } -fn expect_fragment( +pub fn expect_fragment( tt_iter: &mut TtIter<'_, Span>, entry_point: ::parser::PrefixEntryPoint, edition: ::parser::Edition, @@ -369,7 +358,7 @@ fn expect_fragment( ) -> ExpandResult>> { use ::parser; let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice()); - let parser_input = to_parser_input::to_parser_input(edition, &buffer); + let parser_input = to_parser_input(edition, &buffer); let tree_traversal = entry_point.parse(&parser_input, edition); let mut cursor = buffer.begin(); let mut error = false; diff --git a/src/tools/rust-analyzer/crates/parser/Cargo.toml b/src/tools/rust-analyzer/crates/parser/Cargo.toml index 54b57c201be90..d5255665b46a1 100644 --- a/src/tools/rust-analyzer/crates/parser/Cargo.toml +++ b/src/tools/rust-analyzer/crates/parser/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "parser" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "The Rust parser for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/paths/Cargo.toml b/src/tools/rust-analyzer/crates/paths/Cargo.toml index 59a4ad9a25587..d4b0a54ed6446 100644 --- a/src/tools/rust-analyzer/crates/paths/Cargo.toml +++ b/src/tools/rust-analyzer/crates/paths/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "paths" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Path wrappers for absolute and relative paths rust-analyzer." authors.workspace = true edition.workspace = true @@ -13,13 +14,10 @@ doctest = false [dependencies] camino.workspace = true -# Adding this dep sadly puts a lot of rust-analyzer crates after the -# serde-derive crate. Even though we don't activate the derive feature here, -# someone else in the crate graph certainly does! -# serde.workspace = true +serde = { workspace = true, optional = true } [features] -serde1 = ["camino/serde1"] +serde1 = ["camino/serde1", "dep:serde"] [lints] workspace = true diff --git a/src/tools/rust-analyzer/crates/paths/src/lib.rs b/src/tools/rust-analyzer/crates/paths/src/lib.rs index 885f071889eca..0084244161692 100644 --- a/src/tools/rust-analyzer/crates/paths/src/lib.rs +++ b/src/tools/rust-analyzer/crates/paths/src/lib.rs @@ -1,4 +1,4 @@ -//! Thin wrappers around `std::path`/`camino::path`, distinguishing between absolute and +//! Thin wrappers around [`camino::path`], distinguishing between absolute and //! relative paths. use std::{ @@ -8,9 +8,9 @@ use std::{ path::{Path, PathBuf}, }; -pub use camino::*; +pub use camino::{Utf8Component, Utf8Components, Utf8Path, Utf8PathBuf, Utf8Prefix}; -/// Wrapper around an absolute [`Utf8PathBuf`]. +/// A [`Utf8PathBuf`] that is guaranteed to be absolute. #[derive(Debug, Clone, Ord, PartialOrd, Eq, Hash)] pub struct AbsPathBuf(Utf8PathBuf); @@ -73,16 +73,6 @@ impl TryFrom for AbsPathBuf { } } -impl TryFrom for AbsPathBuf { - type Error = PathBuf; - fn try_from(path_buf: PathBuf) -> Result { - if !path_buf.is_absolute() { - return Err(path_buf); - } - Ok(AbsPathBuf(Utf8PathBuf::from_path_buf(path_buf)?)) - } -} - impl TryFrom<&str> for AbsPathBuf { type Error = Utf8PathBuf; fn try_from(path: &str) -> Result { @@ -151,6 +141,10 @@ impl AbsPathBuf { pub fn push>(&mut self, suffix: P) { self.0.push(suffix) } + + pub fn join(&self, path: impl AsRef) -> Self { + Self(self.0.join(path)) + } } impl fmt::Display for AbsPathBuf { diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml index 345fb9f8ae973..84b877f026b77 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "proc-macro-api" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "RPC Api for the `proc-macro-srv` crate of rust-analyzer." authors.workspace = true edition.workspace = true @@ -22,12 +23,11 @@ indexmap.workspace = true paths = { workspace = true, features = ["serde1"] } tt.workspace = true stdx.workspace = true -text-size.workspace = true -span.workspace = true # Ideally this crate would not depend on salsa things, but we need span information here which wraps # InternIds for the syntax context +span.workspace = true +# only here due to the `Env` newtype :/ base-db.workspace = true -la-arena.workspace = true intern.workspace = true [lints] diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs index 6a99b5ed1cc84..883528558d95d 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs @@ -158,9 +158,7 @@ type ProtocolWrite = for<'o, 'msg> fn(out: &'o mut W, msg: &'msg str) #[cfg(test)] mod tests { use intern::{sym, Symbol}; - use la_arena::RawIdx; - use span::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId}; - use text_size::{TextRange, TextSize}; + use span::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange, TextSize}; use tt::{Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Subtree, TokenTree}; use super::*; @@ -171,7 +169,7 @@ mod tests { span::FileId::from_raw(0xe4e4e), span::Edition::CURRENT, ), - ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)), + ast_id: ErasedFileAstId::from_raw(0), }; let token_trees = Box::new([ diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs index a8661f59b2887..88256e98b5887 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs @@ -38,11 +38,9 @@ use std::collections::VecDeque; use intern::Symbol; -use la_arena::RawIdx; use rustc_hash::FxHashMap; use serde::{Deserialize, Serialize}; -use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId}; -use text_size::TextRange; +use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange}; use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, EXTENDED_LEAF_DATA}; @@ -54,7 +52,7 @@ pub fn serialize_span_data_index_map(map: &SpanDataIndexMap) -> Vec { .flat_map(|span| { [ span.anchor.file_id.as_u32(), - span.anchor.ast_id.into_raw().into_u32(), + span.anchor.ast_id.into_raw(), span.range.start().into(), span.range.end().into(), span.ctx.into_u32(), @@ -71,7 +69,7 @@ pub fn deserialize_span_data_index_map(map: &[u32]) -> SpanDataIndexMap { Span { anchor: SpanAnchor { file_id: EditionedFileId::from_raw(file_id), - ast_id: ErasedFileAstId::from_raw(RawIdx::from_u32(ast_id)), + ast_id: ErasedFileAstId::from_raw(ast_id), }, range: TextRange::new(start.into(), end.into()), ctx: SyntaxContextId::from_u32(e), diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml index a559ba0175565..1c394513c459d 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "proc-macro-srv-cli" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "A standalone binary for the `proc-macro-srv` crate of rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml index 673b5bd78a8e9..e8d9677c92863 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "proc-macro-srv" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Proc-macro server for rust-analyzer." authors.workspace = true edition.workspace = true @@ -19,23 +20,24 @@ snap.workspace = true stdx.workspace = true tt.workspace = true -mbe.workspace = true +syntax-bridge.workspace = true paths.workspace = true base-db.workspace = true span.workspace = true proc-macro-api.workspace = true -ra-ap-rustc_lexer.workspace = true intern.workspace = true +ra-ap-rustc_lexer.workspace = true + [dev-dependencies] -expect-test = "1.4.0" +expect-test.workspace = true # used as proc macro test targets proc-macro-test.path = "./proc-macro-test" [features] sysroot-abi = [] -in-rust-tree = ["mbe/in-rust-tree", "tt/in-rust-tree","sysroot-abi"] +in-rust-tree = ["syntax-bridge/in-rust-tree", "tt/in-rust-tree", "sysroot-abi"] [lints] workspace = true diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs index 8b9eb3beb6e88..552d99f51ba12 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs @@ -479,7 +479,7 @@ mod tests { range: TextRange::empty(TextSize::new(0)), anchor: span::SpanAnchor { file_id: EditionedFileId::current_edition(FileId::from_raw(0)), - ast_id: span::ErasedFileAstId::from_raw(0.into()), + ast_id: span::ErasedFileAstId::from_raw(0), }, ctx: SyntaxContextId::ROOT, }; @@ -515,7 +515,7 @@ mod tests { range: TextRange::empty(TextSize::new(0)), anchor: span::SpanAnchor { file_id: EditionedFileId::current_edition(FileId::from_raw(0)), - ast_id: span::ErasedFileAstId::from_raw(0.into()), + ast_id: span::ErasedFileAstId::from_raw(0), }, ctx: SyntaxContextId::ROOT, }; diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs index cdf93fa4251d5..4d8d496418bf8 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs @@ -126,9 +126,12 @@ pub(super) mod token_stream { /// change these errors into `LexError`s later. impl TokenStream { pub(crate) fn from_str(src: &str, call_site: S) -> Result, String> { - let subtree = - mbe::parse_to_token_tree_static_span(span::Edition::CURRENT_FIXME, call_site, src) - .ok_or("lexing error")?; + let subtree = syntax_bridge::parse_to_token_tree_static_span( + span::Edition::CURRENT_FIXME, + call_site, + src, + ) + .ok_or("lexing error")?; Ok(TokenStream::with_subtree(subtree)) } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs index 70eff51cadea8..4f1a18c03fc6a 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs @@ -9,7 +9,8 @@ use crate::{dylib, proc_macro_test_dylib_path, EnvSnapshot, ProcMacroSrv}; fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream { crate::server_impl::TokenStream::with_subtree( - mbe::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src).unwrap(), + syntax_bridge::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src) + .unwrap(), ) } @@ -19,7 +20,7 @@ fn parse_string_spanned( src: &str, ) -> crate::server_impl::TokenStream { crate::server_impl::TokenStream::with_subtree( - mbe::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src).unwrap(), + syntax_bridge::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src).unwrap(), ) } @@ -69,7 +70,7 @@ fn assert_expand_impl( range: TextRange::new(0.into(), 150.into()), anchor: SpanAnchor { file_id: EditionedFileId::current_edition(FileId::from_raw(41)), - ast_id: ErasedFileAstId::from_raw(From::from(1)), + ast_id: ErasedFileAstId::from_raw(1), }, ctx: SyntaxContextId::ROOT, }; @@ -77,7 +78,7 @@ fn assert_expand_impl( range: TextRange::new(0.into(), 100.into()), anchor: SpanAnchor { file_id: EditionedFileId::current_edition(FileId::from_raw(42)), - ast_id: ErasedFileAstId::from_raw(From::from(2)), + ast_id: ErasedFileAstId::from_raw(2), }, ctx: SyntaxContextId::ROOT, }; diff --git a/src/tools/rust-analyzer/crates/profile/Cargo.toml b/src/tools/rust-analyzer/crates/profile/Cargo.toml index 5989dc6c96291..2e3413f339b63 100644 --- a/src/tools/rust-analyzer/crates/profile/Cargo.toml +++ b/src/tools/rust-analyzer/crates/profile/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "profile" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "A collection of tools for profiling rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml index 8b34bd3fad1a1..68e0e1ba55430 100644 --- a/src/tools/rust-analyzer/crates/project-model/Cargo.toml +++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "project-model" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "A representation for a Cargo project for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs similarity index 89% rename from src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs rename to src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs index 839d8e569fe3f..e7a4b8f39f7a5 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs @@ -1,28 +1,25 @@ -//! Workspace information we get from cargo consists of two pieces. The first is -//! the output of `cargo metadata`. The second is the output of running -//! `build.rs` files (`OUT_DIR` env var, extra cfg flags) and compiling proc -//! macro. +//! Logic to invoke `cargo` for building build-dependencies (build scripts and proc-macros) as well as +//! executing the build scripts to fetch required dependency information (`OUT_DIR` env var, extra +//! cfg flags, etc). //! -//! This module implements this second part. We use "build script" terminology -//! here, but it covers procedural macros as well. - -use std::{ - cell::RefCell, - io, mem, - path::{self, PathBuf}, - process::Command, -}; +//! In essence this just invokes `cargo` with the appropriate output format which we consume, +//! but if enabled we will also use `RUSTC_WRAPPER` to only compile the build scripts and +//! proc-macros and skip everything else. + +use std::{cell::RefCell, io, mem, process::Command}; +use base_db::Env; use cargo_metadata::{camino::Utf8Path, Message}; +use cfg::CfgAtom; use itertools::Itertools; use la_arena::ArenaMap; -use paths::{AbsPath, AbsPathBuf}; +use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; use serde::Deserialize; use toolchain::Tool; use crate::{ - cfg::CfgFlag, utf8_stdout, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation, + utf8_stdout, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation, InvocationStrategy, ManifestPath, Package, Sysroot, TargetKind, }; @@ -37,12 +34,12 @@ pub struct WorkspaceBuildScripts { #[derive(Debug, Clone, Default, PartialEq, Eq)] pub(crate) struct BuildScriptOutput { /// List of config flags defined by this package's build script. - pub(crate) cfgs: Vec, + pub(crate) cfgs: Vec, /// List of cargo-related environment variables with their value. /// /// If the package has a build script which defines environment variables, /// they can also be found here. - pub(crate) envs: Vec<(String, String)>, + pub(crate) envs: Env, /// Directory where a build script might place its output. pub(crate) out_dir: Option, /// Path to the proc-macro library file if this package exposes proc-macros. @@ -50,7 +47,7 @@ pub(crate) struct BuildScriptOutput { } impl BuildScriptOutput { - fn is_unchanged(&self) -> bool { + fn is_empty(&self) -> bool { self.cfgs.is_empty() && self.envs.is_empty() && self.out_dir.is_none() @@ -59,85 +56,6 @@ impl BuildScriptOutput { } impl WorkspaceBuildScripts { - fn build_command( - config: &CargoConfig, - allowed_features: &FxHashSet, - manifest_path: &ManifestPath, - sysroot: &Sysroot, - ) -> io::Result { - let mut cmd = match config.run_build_script_command.as_deref() { - Some([program, args @ ..]) => { - let mut cmd = Command::new(program); - cmd.args(args); - cmd - } - _ => { - let mut cmd = sysroot.tool(Tool::Cargo); - - cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]); - cmd.args(&config.extra_args); - - cmd.arg("--manifest-path"); - cmd.arg(manifest_path.as_ref()); - - if let Some(target_dir) = &config.target_dir { - cmd.arg("--target-dir").arg(target_dir); - } - - // --all-targets includes tests, benches and examples in addition to the - // default lib and bins. This is an independent concept from the --target - // flag below. - if config.all_targets { - cmd.arg("--all-targets"); - } - - if let Some(target) = &config.target { - cmd.args(["--target", target]); - } - - match &config.features { - CargoFeatures::All => { - cmd.arg("--all-features"); - } - CargoFeatures::Selected { features, no_default_features } => { - if *no_default_features { - cmd.arg("--no-default-features"); - } - if !features.is_empty() { - cmd.arg("--features"); - cmd.arg( - features - .iter() - .filter(|&feat| allowed_features.contains(feat)) - .join(","), - ); - } - } - } - - if manifest_path.is_rust_manifest() { - cmd.arg("-Zscript"); - } - - cmd.arg("--keep-going"); - - cmd - } - }; - - cmd.envs(&config.extra_env); - if config.wrap_rustc_in_build_scripts { - // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use - // that to compile only proc macros and build scripts during the initial - // `cargo check`. - let myself = std::env::current_exe()?; - cmd.env("RUSTC_WRAPPER", myself); - cmd.env("RA_RUSTC_WRAPPER", "1"); - } - - Ok(cmd) - } - /// Runs the build scripts for the given workspace pub(crate) fn run_for_workspace( config: &CargoConfig, @@ -146,17 +64,19 @@ impl WorkspaceBuildScripts { sysroot: &Sysroot, ) -> io::Result { let current_dir = match &config.invocation_location { - InvocationLocation::Root(root) if config.run_build_script_command.is_some() => { - root.as_path() - } + InvocationLocation::Root(root) if config.run_build_script_command.is_some() => root, _ => workspace.workspace_root(), - } - .as_ref(); + }; let allowed_features = workspace.workspace_features(); - let cmd = - Self::build_command(config, &allowed_features, workspace.manifest_path(), sysroot)?; - Self::run_per_ws(cmd, workspace, current_dir, progress) + let cmd = Self::build_command( + config, + &allowed_features, + workspace.manifest_path(), + current_dir, + sysroot, + )?; + Self::run_per_ws(cmd, workspace, progress) } /// Runs the build scripts by invoking the configured command *once*. @@ -183,6 +103,7 @@ impl WorkspaceBuildScripts { &Default::default(), // This is not gonna be used anyways, so just construct a dummy here &ManifestPath::try_from(workspace_root.clone()).unwrap(), + current_dir, &Sysroot::empty(), )?; // NB: Cargo.toml could have been modified between `cargo metadata` and @@ -211,7 +132,6 @@ impl WorkspaceBuildScripts { let errors = Self::run_command( cmd, - current_dir.as_path().as_ref(), |package, cb| { if let Some(&(package, workspace)) = by_id.get(package) { cb(&workspaces[workspace][package].name, &mut res[workspace].outputs[package]); @@ -230,7 +150,7 @@ impl WorkspaceBuildScripts { for (idx, workspace) in workspaces.iter().enumerate() { for package in workspace.packages() { let package_build_data = &mut res[idx].outputs[package]; - if !package_build_data.is_unchanged() { + if !package_build_data.is_empty() { tracing::info!( "{}: {package_build_data:?}", workspace[package].manifest.parent(), @@ -243,10 +163,100 @@ impl WorkspaceBuildScripts { Ok(res) } + pub fn error(&self) -> Option<&str> { + self.error.as_deref() + } + + pub(crate) fn get_output(&self, idx: Package) -> Option<&BuildScriptOutput> { + self.outputs.get(idx) + } + + /// Assembles build script outputs for the rustc crates via `--print target-libdir`. + pub(crate) fn rustc_crates( + rustc: &CargoWorkspace, + current_dir: &AbsPath, + extra_env: &FxHashMap, + sysroot: &Sysroot, + ) -> Self { + let mut bs = WorkspaceBuildScripts::default(); + for p in rustc.packages() { + bs.outputs.insert(p, BuildScriptOutput::default()); + } + let res = (|| { + let target_libdir = (|| { + let mut cargo_config = sysroot.tool(Tool::Cargo); + cargo_config.envs(extra_env); + cargo_config + .current_dir(current_dir) + .args(["rustc", "-Z", "unstable-options", "--print", "target-libdir"]) + .env("RUSTC_BOOTSTRAP", "1"); + if let Ok(it) = utf8_stdout(cargo_config) { + return Ok(it); + } + let mut cmd = sysroot.tool(Tool::Rustc); + cmd.envs(extra_env); + cmd.args(["--print", "target-libdir"]); + utf8_stdout(cmd) + })()?; + + let target_libdir = AbsPathBuf::try_from(Utf8PathBuf::from(target_libdir)) + .map_err(|_| anyhow::format_err!("target-libdir was not an absolute path"))?; + tracing::info!("Loading rustc proc-macro paths from {target_libdir}"); + + let proc_macro_dylibs: Vec<(String, AbsPathBuf)> = std::fs::read_dir(target_libdir)? + .filter_map(|entry| { + let dir_entry = entry.ok()?; + if dir_entry.file_type().ok()?.is_file() { + let path = dir_entry.path(); + let extension = path.extension()?; + if extension == std::env::consts::DLL_EXTENSION { + let name = path.file_stem()?.to_str()?.split_once('-')?.0.to_owned(); + let path = AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).ok()?) + .ok()?; + return Some((name, path)); + } + } + None + }) + .collect(); + for p in rustc.packages() { + let package = &rustc[p]; + if package + .targets + .iter() + .any(|&it| matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true })) + { + if let Some((_, path)) = proc_macro_dylibs + .iter() + .find(|(name, _)| *name.trim_start_matches("lib") == package.name) + { + bs.outputs[p].proc_macro_dylib_path = Some(path.clone()); + } + } + } + + if tracing::enabled!(tracing::Level::INFO) { + for package in rustc.packages() { + let package_build_data = &bs.outputs[package]; + if !package_build_data.is_empty() { + tracing::info!( + "{}: {package_build_data:?}", + rustc[package].manifest.parent(), + ); + } + } + } + Ok(()) + })(); + if let Err::<_, anyhow::Error>(e) = res { + bs.error = Some(e.to_string()); + } + bs + } + fn run_per_ws( cmd: Command, workspace: &CargoWorkspace, - current_dir: &path::Path, progress: &dyn Fn(String), ) -> io::Result { let mut res = WorkspaceBuildScripts::default(); @@ -262,7 +272,6 @@ impl WorkspaceBuildScripts { res.error = Self::run_command( cmd, - current_dir, |package, cb| { if let Some(&package) = by_id.get(package) { cb(&workspace[package].name, &mut outputs[package]); @@ -274,7 +283,7 @@ impl WorkspaceBuildScripts { if tracing::enabled!(tracing::Level::INFO) { for package in workspace.packages() { let package_build_data = &outputs[package]; - if !package_build_data.is_unchanged() { + if !package_build_data.is_empty() { tracing::info!( "{}: {package_build_data:?}", workspace[package].manifest.parent(), @@ -287,8 +296,7 @@ impl WorkspaceBuildScripts { } fn run_command( - mut cmd: Command, - current_dir: &path::Path, + cmd: Command, // ideally this would be something like: // with_output_for: impl FnMut(&str, dyn FnOnce(&mut BuildScriptOutput)), // but owned trait objects aren't a thing @@ -302,8 +310,7 @@ impl WorkspaceBuildScripts { e.push('\n'); }; - tracing::info!("Running build scripts in {}: {:?}", current_dir.display(), cmd); - cmd.current_dir(current_dir); + tracing::info!("Running build scripts: {:?}", cmd); let output = stdx::process::spawn_with_streaming_output( cmd, &mut |line| { @@ -321,7 +328,7 @@ impl WorkspaceBuildScripts { let cfgs = { let mut acc = Vec::new(); for cfg in &message.cfgs { - match cfg.parse::() { + match crate::parse_cfg(cfg) { Ok(it) => acc.push(it), Err(err) => { push_err(&format!( @@ -333,16 +340,14 @@ impl WorkspaceBuildScripts { } acc }; - if !message.env.is_empty() { - data.envs = mem::take(&mut message.env); - } + data.envs.extend(message.env.drain(..)); // cargo_metadata crate returns default (empty) path for // older cargos, which is not absolute, so work around that. let out_dir = mem::take(&mut message.out_dir); if !out_dir.as_str().is_empty() { let out_dir = AbsPathBuf::assert(out_dir); // inject_cargo_env(package, package_build_data); - data.envs.push(("OUT_DIR".to_owned(), out_dir.as_str().to_owned())); + data.envs.insert("OUT_DIR", out_dir.as_str()); data.out_dir = Some(out_dir); data.cfgs = cfgs; } @@ -354,7 +359,7 @@ impl WorkspaceBuildScripts { if message.target.kind.iter().any(|k| k == "proc-macro") { // Skip rmeta file if let Some(filename) = - message.filenames.iter().find(|name| is_dylib(name)) + message.filenames.iter().find(|file| is_dylib(file)) { let filename = AbsPath::assert(filename); data.proc_macro_dylib_path = Some(filename.to_owned()); @@ -388,93 +393,85 @@ impl WorkspaceBuildScripts { Ok(errors) } - pub fn error(&self) -> Option<&str> { - self.error.as_deref() - } - - pub(crate) fn get_output(&self, idx: Package) -> Option<&BuildScriptOutput> { - self.outputs.get(idx) - } - - pub(crate) fn rustc_crates( - rustc: &CargoWorkspace, + fn build_command( + config: &CargoConfig, + allowed_features: &FxHashSet, + manifest_path: &ManifestPath, current_dir: &AbsPath, - extra_env: &FxHashMap, sysroot: &Sysroot, - ) -> Self { - let mut bs = WorkspaceBuildScripts::default(); - for p in rustc.packages() { - bs.outputs.insert(p, BuildScriptOutput::default()); - } - let res = (|| { - let target_libdir = (|| { - let mut cargo_config = sysroot.tool(Tool::Cargo); - cargo_config.envs(extra_env); - cargo_config - .current_dir(current_dir) - .args(["rustc", "-Z", "unstable-options", "--print", "target-libdir"]) - .env("RUSTC_BOOTSTRAP", "1"); - if let Ok(it) = utf8_stdout(cargo_config) { - return Ok(it); + ) -> io::Result { + let mut cmd = match config.run_build_script_command.as_deref() { + Some([program, args @ ..]) => { + let mut cmd = Command::new(program); + cmd.args(args); + cmd + } + _ => { + let mut cmd = sysroot.tool(Tool::Cargo); + + cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]); + cmd.args(&config.extra_args); + + cmd.arg("--manifest-path"); + cmd.arg(manifest_path); + + if let Some(target_dir) = &config.target_dir { + cmd.arg("--target-dir").arg(target_dir); } - let mut cmd = sysroot.tool(Tool::Rustc); - cmd.envs(extra_env); - cmd.args(["--print", "target-libdir"]); - utf8_stdout(cmd) - })()?; - let target_libdir = AbsPathBuf::try_from(PathBuf::from(target_libdir)) - .map_err(|_| anyhow::format_err!("target-libdir was not an absolute path"))?; - tracing::info!("Loading rustc proc-macro paths from {target_libdir}"); + // --all-targets includes tests, benches and examples in addition to the + // default lib and bins. This is an independent concept from the --target + // flag below. + if config.all_targets { + cmd.arg("--all-targets"); + } - let proc_macro_dylibs: Vec<(String, AbsPathBuf)> = std::fs::read_dir(target_libdir)? - .filter_map(|entry| { - let dir_entry = entry.ok()?; - if dir_entry.file_type().ok()?.is_file() { - let path = dir_entry.path(); - let extension = path.extension()?; - if extension == std::env::consts::DLL_EXTENSION { - let name = path.file_stem()?.to_str()?.split_once('-')?.0.to_owned(); - let path = AbsPathBuf::try_from(path).ok()?; - return Some((name, path)); - } + if let Some(target) = &config.target { + cmd.args(["--target", target]); + } + + match &config.features { + CargoFeatures::All => { + cmd.arg("--all-features"); } - None - }) - .collect(); - for p in rustc.packages() { - let package = &rustc[p]; - if package - .targets - .iter() - .any(|&it| matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true })) - { - if let Some((_, path)) = proc_macro_dylibs - .iter() - .find(|(name, _)| *name.trim_start_matches("lib") == package.name) - { - bs.outputs[p].proc_macro_dylib_path = Some(path.clone()); + CargoFeatures::Selected { features, no_default_features } => { + if *no_default_features { + cmd.arg("--no-default-features"); + } + if !features.is_empty() { + cmd.arg("--features"); + cmd.arg( + features + .iter() + .filter(|&feat| allowed_features.contains(feat)) + .join(","), + ); + } } } - } - if tracing::enabled!(tracing::Level::INFO) { - for package in rustc.packages() { - let package_build_data = &bs.outputs[package]; - if !package_build_data.is_unchanged() { - tracing::info!( - "{}: {package_build_data:?}", - rustc[package].manifest.parent(), - ); - } + if manifest_path.is_rust_manifest() { + cmd.arg("-Zscript"); } + + cmd.arg("--keep-going"); + + cmd } - Ok(()) - })(); - if let Err::<_, anyhow::Error>(e) = res { - bs.error = Some(e.to_string()); + }; + + cmd.current_dir(current_dir); + cmd.envs(&config.extra_env); + if config.wrap_rustc_in_build_scripts { + // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use + // that to compile only proc macros and build scripts during the initial + // `cargo check`. + let myself = std::env::current_exe()?; + cmd.env("RUSTC_WRAPPER", myself); + cmd.env("RA_RUSTC_WRAPPER", "1"); } - bs + + Ok(cmd) } } diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index 632ba1cacf213..38eeedec6217f 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -85,8 +85,6 @@ pub struct CargoConfig { pub target: Option, /// Sysroot loading behavior pub sysroot: Option, - /// Whether to invoke `cargo metadata` on the sysroot crate. - pub sysroot_query_metadata: bool, pub sysroot_src: Option, /// rustc private crate source pub rustc_source: Option, @@ -259,6 +257,7 @@ impl CargoWorkspace { current_dir: &AbsPath, config: &CargoConfig, sysroot: &Sysroot, + locked: bool, progress: &dyn Fn(String), ) -> anyhow::Result { let targets = find_list_of_build_targets(config, cargo_toml, sysroot); @@ -312,6 +311,9 @@ impl CargoWorkspace { // opt into it themselves. other_options.push("-Zscript".to_owned()); } + if locked { + other_options.push("--locked".to_owned()); + } meta.other_options(other_options); // FIXME: Fetching metadata is a slow process, as it might require diff --git a/src/tools/rust-analyzer/crates/project-model/src/cfg.rs b/src/tools/rust-analyzer/crates/project-model/src/cfg.rs deleted file mode 100644 index e921e3de722d1..0000000000000 --- a/src/tools/rust-analyzer/crates/project-model/src/cfg.rs +++ /dev/null @@ -1,100 +0,0 @@ -//! Parsing of CfgFlags as command line arguments, as in -//! -//! rustc main.rs --cfg foo --cfg 'feature="bar"' -use std::{fmt, str::FromStr}; - -use cfg::{CfgDiff, CfgOptions}; -use intern::Symbol; -use rustc_hash::FxHashMap; -use serde::Serialize; - -#[derive(Clone, Eq, PartialEq, Debug, Serialize)] -pub enum CfgFlag { - Atom(String), - KeyValue { key: String, value: String }, -} - -impl FromStr for CfgFlag { - type Err = String; - fn from_str(s: &str) -> Result { - let res = match s.split_once('=') { - Some((key, value)) => { - if !(value.starts_with('"') && value.ends_with('"')) { - return Err(format!("Invalid cfg ({s:?}), value should be in quotes")); - } - let key = key.to_owned(); - let value = value[1..value.len() - 1].to_string(); - CfgFlag::KeyValue { key, value } - } - None => CfgFlag::Atom(s.into()), - }; - Ok(res) - } -} - -impl<'de> serde::Deserialize<'de> for CfgFlag { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - String::deserialize(deserializer)?.parse().map_err(serde::de::Error::custom) - } -} - -impl Extend for CfgOptions { - fn extend>(&mut self, iter: T) { - for cfg_flag in iter { - match cfg_flag { - CfgFlag::Atom(it) => self.insert_atom(Symbol::intern(&it)), - CfgFlag::KeyValue { key, value } => { - self.insert_key_value(Symbol::intern(&key), Symbol::intern(&value)) - } - } - } - } -} - -impl FromIterator for CfgOptions { - fn from_iter>(iter: T) -> Self { - let mut this = CfgOptions::default(); - this.extend(iter); - this - } -} - -impl fmt::Display for CfgFlag { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - CfgFlag::Atom(atom) => f.write_str(atom), - CfgFlag::KeyValue { key, value } => { - f.write_str(key)?; - f.write_str("=")?; - f.write_str(value) - } - } - } -} - -/// A set of cfg-overrides per crate. -#[derive(Default, Debug, Clone, Eq, PartialEq)] -pub struct CfgOverrides { - /// A global set of overrides matching all crates. - pub global: CfgDiff, - /// A set of overrides matching specific crates. - pub selective: FxHashMap, -} - -impl CfgOverrides { - pub fn len(&self) -> usize { - self.global.len() + self.selective.values().map(|it| it.len()).sum::() - } - - pub fn apply(&self, cfg_options: &mut CfgOptions, name: &str) { - if !self.global.is_empty() { - cfg_options.apply_diff(self.global.clone()); - }; - if let Some(diff) = self.selective.get(name) { - cfg_options.apply_diff(diff.clone()); - }; - } -} diff --git a/src/tools/rust-analyzer/crates/project-model/src/env.rs b/src/tools/rust-analyzer/crates/project-model/src/env.rs index 049acc290bbfe..ac7246acc592f 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/env.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/env.rs @@ -90,10 +90,13 @@ fn parse_output_cargo_config_env(stdout: String) -> FxHashMap { stdout .lines() .filter_map(|l| l.strip_prefix("env.")) - .filter_map(|l| { - l.split_once(" = ") - // cargo used to report it with this, keep it for a couple releases around - .or_else(|| l.split_once(".value = ")) + .filter_map(|l| l.split_once(" = ")) + .filter_map(|(k, v)| { + if k.contains('.') { + k.strip_suffix(".value").zip(Some(v)) + } else { + Some((k, v)) + } }) .map(|(key, value)| (key.to_owned(), value.trim_matches('"').to_owned())) .collect() diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs index 92bf6a08f870d..4fa70508bbd4d 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs @@ -15,9 +15,8 @@ //! procedural macros). //! * Lowering of concrete model to a [`base_db::CrateGraph`] -mod build_scripts; +mod build_dependencies; mod cargo_workspace; -mod cfg; mod env; mod manifest_path; pub mod project_json; @@ -37,16 +36,15 @@ use std::{ }; use anyhow::{bail, format_err, Context}; -use paths::{AbsPath, AbsPathBuf}; +use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::FxHashSet; pub use crate::{ - build_scripts::WorkspaceBuildScripts, + build_dependencies::WorkspaceBuildScripts, cargo_workspace::{ CargoConfig, CargoFeatures, CargoWorkspace, Package, PackageData, PackageDependency, RustLibSource, Target, TargetData, TargetKind, }, - cfg::CfgOverrides, manifest_path::ManifestPath, project_json::{ProjectJson, ProjectJsonData}, sysroot::Sysroot, @@ -68,6 +66,9 @@ impl ProjectManifest { if path.file_name().unwrap_or_default() == "rust-project.json" { return Ok(ProjectManifest::ProjectJson(path)); } + if path.file_name().unwrap_or_default() == ".rust-project.json" { + return Ok(ProjectManifest::ProjectJson(path)); + } if path.file_name().unwrap_or_default() == "Cargo.toml" { return Ok(ProjectManifest::CargoToml(path)); } @@ -94,6 +95,9 @@ impl ProjectManifest { if let Some(project_json) = find_in_parent_dirs(path, "rust-project.json") { return Ok(vec![ProjectManifest::ProjectJson(project_json)]); } + if let Some(project_json) = find_in_parent_dirs(path, ".rust-project.json") { + return Ok(vec![ProjectManifest::ProjectJson(project_json)]); + } return find_cargo_toml(path) .map(|paths| paths.into_iter().map(ProjectManifest::CargoToml).collect()); @@ -132,8 +136,11 @@ impl ProjectManifest { .filter_map(Result::ok) .map(|it| it.path().join("Cargo.toml")) .filter(|it| it.exists()) + .map(Utf8PathBuf::from_path_buf) + .filter_map(Result::ok) .map(AbsPathBuf::try_from) - .filter_map(|it| it.ok()?.try_into().ok()) + .filter_map(Result::ok) + .filter_map(|it| it.try_into().ok()) .collect() } } @@ -192,3 +199,42 @@ pub enum InvocationLocation { #[default] Workspace, } + +/// A set of cfg-overrides per crate. +#[derive(Default, Debug, Clone, Eq, PartialEq)] +pub struct CfgOverrides { + /// A global set of overrides matching all crates. + pub global: cfg::CfgDiff, + /// A set of overrides matching specific crates. + pub selective: rustc_hash::FxHashMap, +} + +impl CfgOverrides { + pub fn len(&self) -> usize { + self.global.len() + self.selective.values().map(|it| it.len()).sum::() + } + + pub fn apply(&self, cfg_options: &mut cfg::CfgOptions, name: &str) { + if !self.global.is_empty() { + cfg_options.apply_diff(self.global.clone()); + }; + if let Some(diff) = self.selective.get(name) { + cfg_options.apply_diff(diff.clone()); + }; + } +} + +fn parse_cfg(s: &str) -> Result { + let res = match s.split_once('=') { + Some((key, value)) => { + if !(value.starts_with('"') && value.ends_with('"')) { + return Err(format!("Invalid cfg ({s:?}), value should be in quotes")); + } + let key = intern::Symbol::intern(key); + let value = intern::Symbol::intern(&value[1..value.len() - 1]); + cfg::CfgAtom::KeyValue { key, value } + } + None => cfg::CfgAtom::Flag(intern::Symbol::intern(s)), + }; + Ok(res) +} diff --git a/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs b/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs index 2331c0c36c3ad..a8be5dff7b690 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs @@ -64,6 +64,18 @@ impl AsRef for ManifestPath { } } +impl AsRef for ManifestPath { + fn as_ref(&self) -> &std::path::Path { + self.file.as_ref() + } +} + +impl AsRef for ManifestPath { + fn as_ref(&self) -> &std::ffi::OsStr { + self.file.as_ref() + } +} + impl Borrow for ManifestPath { fn borrow(&self) -> &AbsPath { self.file.borrow() diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs index cf0a6ad402508..7dea0c3839841 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs @@ -50,12 +50,13 @@ //! rust-project.json over time via configuration request!) use base_db::{CrateDisplayName, CrateName}; +use cfg::CfgAtom; use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::FxHashMap; use serde::{de, Deserialize, Serialize}; use span::Edition; -use crate::{cfg::CfgFlag, ManifestPath, TargetKind}; +use crate::{ManifestPath, TargetKind}; /// Roots and crates that compose this Rust project. #[derive(Clone, Debug, Eq, PartialEq)] @@ -73,106 +74,6 @@ pub struct ProjectJson { runnables: Vec, } -/// A crate points to the root module of a crate and lists the dependencies of the crate. This is -/// useful in creating the crate graph. -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct Crate { - pub(crate) display_name: Option, - pub root_module: AbsPathBuf, - pub(crate) edition: Edition, - pub(crate) version: Option, - pub(crate) deps: Vec, - pub(crate) cfg: Vec, - pub(crate) target: Option, - pub(crate) env: FxHashMap, - pub(crate) proc_macro_dylib_path: Option, - pub(crate) is_workspace_member: bool, - pub(crate) include: Vec, - pub(crate) exclude: Vec, - pub(crate) is_proc_macro: bool, - pub(crate) repository: Option, - pub build: Option, -} - -/// Additional, build-specific data about a crate. -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct Build { - /// The name associated with this crate. - /// - /// This is determined by the build system that produced - /// the `rust-project.json` in question. For instance, if buck were used, - /// the label might be something like `//ide/rust/rust-analyzer:rust-analyzer`. - /// - /// Do not attempt to parse the contents of this string; it is a build system-specific - /// identifier similar to [`Crate::display_name`]. - pub label: String, - /// Path corresponding to the build system-specific file defining the crate. - /// - /// It is roughly analogous to [`ManifestPath`], but it should *not* be used with - /// [`crate::ProjectManifest::from_manifest_file`], as the build file may not be - /// be in the `rust-project.json`. - pub build_file: Utf8PathBuf, - /// The kind of target. - /// - /// Examples (non-exhaustively) include [`TargetKind::Bin`], [`TargetKind::Lib`], - /// and [`TargetKind::Test`]. This information is used to determine what sort - /// of runnable codelens to provide, if any. - pub target_kind: TargetKind, -} - -/// A template-like structure for describing runnables. -/// -/// These are used for running and debugging binaries and tests without encoding -/// build system-specific knowledge into rust-analyzer. -/// -/// # Example -/// -/// Below is an example of a test runnable. `{label}` and `{test_id}` -/// are explained in [`Runnable::args`]'s documentation. -/// -/// ```json -/// { -/// "program": "buck", -/// "args": [ -/// "test", -/// "{label}", -/// "--", -/// "{test_id}", -/// "--print-passing-details" -/// ], -/// "cwd": "/home/user/repo-root/", -/// "kind": "testOne" -/// } -/// ``` -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Runnable { - /// The program invoked by the runnable. - /// - /// For example, this might be `cargo`, `buck`, or `bazel`. - pub program: String, - /// The arguments passed to [`Runnable::program`]. - /// - /// The args can contain two template strings: `{label}` and `{test_id}`. - /// rust-analyzer will find and replace `{label}` with [`Build::label`] and - /// `{test_id}` with the test name. - pub args: Vec, - /// The current working directory of the runnable. - pub cwd: Utf8PathBuf, - pub kind: RunnableKind, -} - -/// The kind of runnable. -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum RunnableKind { - Check, - - /// Can run a binary. - Run, - - /// Run a single test. - TestOne, -} - impl ProjectJson { /// Create a new ProjectJson instance. /// @@ -301,6 +202,106 @@ impl ProjectJson { } } +/// A crate points to the root module of a crate and lists the dependencies of the crate. This is +/// useful in creating the crate graph. +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct Crate { + pub(crate) display_name: Option, + pub root_module: AbsPathBuf, + pub(crate) edition: Edition, + pub(crate) version: Option, + pub(crate) deps: Vec, + pub(crate) cfg: Vec, + pub(crate) target: Option, + pub(crate) env: FxHashMap, + pub(crate) proc_macro_dylib_path: Option, + pub(crate) is_workspace_member: bool, + pub(crate) include: Vec, + pub(crate) exclude: Vec, + pub(crate) is_proc_macro: bool, + pub(crate) repository: Option, + pub build: Option, +} + +/// Additional, build-specific data about a crate. +#[derive(Clone, Debug, Eq, PartialEq)] +pub struct Build { + /// The name associated with this crate. + /// + /// This is determined by the build system that produced + /// the `rust-project.json` in question. For instance, if buck were used, + /// the label might be something like `//ide/rust/rust-analyzer:rust-analyzer`. + /// + /// Do not attempt to parse the contents of this string; it is a build system-specific + /// identifier similar to [`Crate::display_name`]. + pub label: String, + /// Path corresponding to the build system-specific file defining the crate. + /// + /// It is roughly analogous to [`ManifestPath`], but it should *not* be used with + /// [`crate::ProjectManifest::from_manifest_file`], as the build file may not be + /// be in the `rust-project.json`. + pub build_file: Utf8PathBuf, + /// The kind of target. + /// + /// Examples (non-exhaustively) include [`TargetKind::Bin`], [`TargetKind::Lib`], + /// and [`TargetKind::Test`]. This information is used to determine what sort + /// of runnable codelens to provide, if any. + pub target_kind: TargetKind, +} + +/// A template-like structure for describing runnables. +/// +/// These are used for running and debugging binaries and tests without encoding +/// build system-specific knowledge into rust-analyzer. +/// +/// # Example +/// +/// Below is an example of a test runnable. `{label}` and `{test_id}` +/// are explained in [`Runnable::args`]'s documentation. +/// +/// ```json +/// { +/// "program": "buck", +/// "args": [ +/// "test", +/// "{label}", +/// "--", +/// "{test_id}", +/// "--print-passing-details" +/// ], +/// "cwd": "/home/user/repo-root/", +/// "kind": "testOne" +/// } +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Runnable { + /// The program invoked by the runnable. + /// + /// For example, this might be `cargo`, `buck`, or `bazel`. + pub program: String, + /// The arguments passed to [`Runnable::program`]. + /// + /// The args can contain two template strings: `{label}` and `{test_id}`. + /// rust-analyzer will find and replace `{label}` with [`Build::label`] and + /// `{test_id}` with the test name. + pub args: Vec, + /// The current working directory of the runnable. + pub cwd: Utf8PathBuf, + pub kind: RunnableKind, +} + +/// The kind of runnable. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum RunnableKind { + Check, + + /// Can run a binary. + Run, + + /// Run a single test. + TestOne, +} + #[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] pub struct ProjectJsonData { sysroot: Option, @@ -319,7 +320,8 @@ struct CrateData { version: Option, deps: Vec, #[serde(default)] - cfg: Vec, + #[serde(with = "cfg_")] + cfg: Vec, target: Option, #[serde(default)] env: FxHashMap, @@ -334,6 +336,33 @@ struct CrateData { build: Option, } +mod cfg_ { + use cfg::CfgAtom; + use serde::{Deserialize, Serialize}; + + pub(super) fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> + where + D: serde::Deserializer<'de>, + { + let cfg: Vec = Vec::deserialize(deserializer)?; + cfg.into_iter().map(|it| crate::parse_cfg(&it).map_err(serde::de::Error::custom)).collect() + } + pub(super) fn serialize(cfg: &[CfgAtom], serializer: S) -> Result + where + S: serde::Serializer, + { + cfg.iter() + .map(|cfg| match cfg { + CfgAtom::Flag(flag) => flag.as_str().to_owned(), + CfgAtom::KeyValue { key, value } => { + format!("{}=\"{}\"", key.as_str(), value.as_str()) + } + }) + .collect::>() + .serialize(serializer) + } +} + #[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] #[serde(rename = "edition")] enum EditionData { @@ -378,6 +407,29 @@ pub enum TargetKindData { Lib, Test, } +/// Identifies a crate by position in the crates array. +/// +/// This will differ from `CrateId` when multiple `ProjectJson` +/// workspaces are loaded. +#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq, Hash)] +#[serde(transparent)] +pub struct CrateArrayIdx(pub usize); + +#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] +pub(crate) struct Dep { + /// Identifies a crate by position in the crates array. + #[serde(rename = "crate")] + pub(crate) krate: CrateArrayIdx, + #[serde(serialize_with = "serialize_crate_name")] + #[serde(deserialize_with = "deserialize_crate_name")] + pub(crate) name: CrateName, +} + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +struct CrateSource { + include_dirs: Vec, + exclude_dirs: Vec, +} impl From for TargetKind { fn from(data: TargetKindData) -> Self { @@ -416,30 +468,6 @@ impl From for RunnableKind { } } -/// Identifies a crate by position in the crates array. -/// -/// This will differ from `CrateId` when multiple `ProjectJson` -/// workspaces are loaded. -#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq, Hash)] -#[serde(transparent)] -pub struct CrateArrayIdx(pub usize); - -#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)] -pub(crate) struct Dep { - /// Identifies a crate by position in the crates array. - #[serde(rename = "crate")] - pub(crate) krate: CrateArrayIdx, - #[serde(serialize_with = "serialize_crate_name")] - #[serde(deserialize_with = "deserialize_crate_name")] - pub(crate) name: CrateName, -} - -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -struct CrateSource { - include_dirs: Vec, - exclude_dirs: Vec, -} - fn deserialize_crate_name<'de, D>(de: D) -> std::result::Result where D: de::Deserializer<'de>, diff --git a/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs index 599897f84a082..aa73ff891004b 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs @@ -1,10 +1,12 @@ //! Runs `rustc --print cfg` to get built-in cfg flags. use anyhow::Context; +use cfg::CfgAtom; +use intern::Symbol; use rustc_hash::FxHashMap; use toolchain::Tool; -use crate::{cfg::CfgFlag, utf8_stdout, ManifestPath, Sysroot}; +use crate::{utf8_stdout, ManifestPath, Sysroot}; /// Determines how `rustc --print cfg` is discovered and invoked. pub(crate) enum RustcCfgConfig<'a> { @@ -20,15 +22,15 @@ pub(crate) fn get( target: Option<&str>, extra_env: &FxHashMap, config: RustcCfgConfig<'_>, -) -> Vec { +) -> Vec { let _p = tracing::info_span!("rustc_cfg::get").entered(); - let mut res = Vec::with_capacity(6 * 2 + 1); + let mut res: Vec<_> = Vec::with_capacity(6 * 2 + 1); // Some nightly-only cfgs, which are required for stdlib - res.push(CfgFlag::Atom("target_thread_local".into())); + res.push(CfgAtom::Flag(Symbol::intern("target_thread_local"))); for ty in ["8", "16", "32", "64", "cas", "ptr"] { for key in ["target_has_atomic", "target_has_atomic_load_store"] { - res.push(CfgFlag::KeyValue { key: key.to_owned(), value: ty.into() }); + res.push(CfgAtom::KeyValue { key: Symbol::intern(key), value: Symbol::intern(ty) }); } } @@ -42,8 +44,7 @@ pub(crate) fn get( } }; - let rustc_cfgs = - rustc_cfgs.lines().map(|it| it.parse::()).collect::, _>>(); + let rustc_cfgs = rustc_cfgs.lines().map(crate::parse_cfg).collect::, _>>(); match rustc_cfgs { Ok(rustc_cfgs) => { diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs index 1eeec4cedeba1..419fac3f41f75 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs @@ -123,32 +123,27 @@ impl Sysroot { // FIXME: Expose a builder api as loading the sysroot got way too modular and complicated. impl Sysroot { /// Attempts to discover the toolchain's sysroot from the given `dir`. - pub fn discover( - dir: &AbsPath, - extra_env: &FxHashMap, - metadata: bool, - ) -> Sysroot { + pub fn discover(dir: &AbsPath, extra_env: &FxHashMap) -> Sysroot { let sysroot_dir = discover_sysroot_dir(dir, extra_env); let sysroot_src_dir = sysroot_dir.as_ref().ok().map(|sysroot_dir| { discover_sysroot_src_dir_or_add_component(sysroot_dir, dir, extra_env) }); - Sysroot::load_core_check(Some(sysroot_dir), sysroot_src_dir, metadata) + Sysroot::load_core_check(Some(sysroot_dir), sysroot_src_dir) } pub fn discover_with_src_override( current_dir: &AbsPath, extra_env: &FxHashMap, sysroot_src_dir: AbsPathBuf, - metadata: bool, ) -> Sysroot { let sysroot_dir = discover_sysroot_dir(current_dir, extra_env); - Sysroot::load_core_check(Some(sysroot_dir), Some(Ok(sysroot_src_dir)), metadata) + Sysroot::load_core_check(Some(sysroot_dir), Some(Ok(sysroot_src_dir))) } - pub fn discover_sysroot_src_dir(sysroot_dir: AbsPathBuf, metadata: bool) -> Sysroot { + pub fn discover_sysroot_src_dir(sysroot_dir: AbsPathBuf) -> Sysroot { let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir) .ok_or_else(|| format_err!("can't find standard library sources in {sysroot_dir}")); - Sysroot::load_core_check(Some(Ok(sysroot_dir)), Some(sysroot_src_dir), metadata) + Sysroot::load_core_check(Some(Ok(sysroot_dir)), Some(sysroot_src_dir)) } pub fn discover_rustc_src(&self) -> Option { @@ -191,20 +186,15 @@ impl Sysroot { }) } - pub fn load( - sysroot_dir: Option, - sysroot_src_dir: Option, - metadata: bool, - ) -> Sysroot { - Self::load_core_check(sysroot_dir.map(Ok), sysroot_src_dir.map(Ok), metadata) + pub fn load(sysroot_dir: Option, sysroot_src_dir: Option) -> Sysroot { + Self::load_core_check(sysroot_dir.map(Ok), sysroot_src_dir.map(Ok)) } fn load_core_check( sysroot_dir: Option>, sysroot_src_dir: Option>, - metadata: bool, ) -> Sysroot { - let mut sysroot = Self::load_(sysroot_dir, sysroot_src_dir, metadata); + let mut sysroot = Self::load_(sysroot_dir, sysroot_src_dir); if sysroot.error.is_none() { if let Some(src_root) = &sysroot.src_root { let has_core = match &sysroot.mode { @@ -230,7 +220,6 @@ impl Sysroot { fn load_( sysroot_dir: Option>, sysroot_src_dir: Option>, - metadata: bool, ) -> Sysroot { let sysroot_dir = match sysroot_dir { Some(Ok(sysroot_dir)) => Some(sysroot_dir), @@ -263,119 +252,16 @@ impl Sysroot { } } }; - if metadata { - let sysroot: Option<_> = (|| { - let sysroot_cargo_toml = ManifestPath::try_from( - AbsPathBuf::try_from(&*format!("{sysroot_src_dir}/sysroot/Cargo.toml")).ok()?, - ) - .ok()?; - let current_dir = - AbsPathBuf::try_from(&*format!("{sysroot_src_dir}/sysroot")).ok()?; - - let mut cargo_config = CargoConfig::default(); - // the sysroot uses `public-dependency`, so we make cargo think it's a nightly - cargo_config.extra_env.insert( - "__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS".to_owned(), - "nightly".to_owned(), - ); - - let res = CargoWorkspace::fetch_metadata( - &sysroot_cargo_toml, - ¤t_dir, - &cargo_config, - &Sysroot::empty(), - &|_| (), - ) - .map_err(|e| { - tracing::error!( - "failed to load sysroot `{sysroot_src_dir}/sysroot/Cargo.toml`: {}", - e - ); - e - }); - if let Err(e) = - std::fs::remove_file(format!("{sysroot_src_dir}/sysroot/Cargo.lock")) - { - tracing::error!( - "failed to remove sysroot `{sysroot_src_dir}/sysroot/Cargo.lock`: {}", - e - ) - } - let mut res = res.ok()?; - - // Patch out `rustc-std-workspace-*` crates to point to the real crates. - // This is done prior to `CrateGraph` construction to avoid having duplicate `std` targets. - - let mut fake_core = None; - let mut fake_alloc = None; - let mut fake_std = None; - let mut real_core = None; - let mut real_alloc = None; - let mut real_std = None; - res.packages.iter().enumerate().for_each(|(idx, package)| { - match package.name.strip_prefix("rustc-std-workspace-") { - Some("core") => fake_core = Some((idx, package.id.clone())), - Some("alloc") => fake_alloc = Some((idx, package.id.clone())), - Some("std") => fake_std = Some((idx, package.id.clone())), - Some(_) => { - tracing::warn!("unknown rustc-std-workspace-* crate: {}", package.name) - } - None => match &*package.name { - "core" => real_core = Some(package.id.clone()), - "alloc" => real_alloc = Some(package.id.clone()), - "std" => real_std = Some(package.id.clone()), - _ => (), - }, - } - }); - - let patches = - [fake_core.zip(real_core), fake_alloc.zip(real_alloc), fake_std.zip(real_std)] - .into_iter() - .flatten(); - - let resolve = res.resolve.as_mut().expect("metadata executed with deps"); - let mut remove_nodes = vec![]; - for (idx, node) in resolve.nodes.iter_mut().enumerate() { - // Replace them in the dependency list - node.deps.iter_mut().for_each(|dep| { - if let Some((_, real)) = - patches.clone().find(|((_, fake_id), _)| *fake_id == dep.pkg) - { - dep.pkg = real; - } - }); - if patches.clone().any(|((_, fake), _)| fake == node.id) { - remove_nodes.push(idx); - } - } - // Remove the fake ones from the resolve data - remove_nodes.into_iter().rev().for_each(|r| { - resolve.nodes.remove(r); - }); - // Remove the fake ones from the packages - patches.map(|((r, _), _)| r).sorted().rev().for_each(|r| { - res.packages.remove(r); - }); - - res.workspace_members = res - .packages - .iter() - .filter(|&package| RELEVANT_SYSROOT_CRATES.contains(&&*package.name)) - .map(|package| package.id.clone()) - .collect(); - let cargo_workspace = CargoWorkspace::new(res, sysroot_cargo_toml); - Some(Sysroot { - root: sysroot_dir.clone(), - src_root: Some(sysroot_src_dir.clone()), - mode: SysrootMode::Workspace(cargo_workspace), - error: None, - }) - })(); - if let Some(sysroot) = sysroot { + let library_manifest = ManifestPath::try_from(sysroot_src_dir.join("Cargo.toml")).unwrap(); + if fs::metadata(&library_manifest).is_ok() { + if let Some(sysroot) = + Self::load_library_via_cargo(library_manifest, &sysroot_dir, &sysroot_src_dir) + { return sysroot; } } + tracing::debug!("Stitching sysroot library: {sysroot_src_dir}"); + let mut stitched = Stitched { crates: Arena::default() }; for path in SYSROOT_CRATES.trim().lines() { @@ -384,7 +270,7 @@ impl Sysroot { .into_iter() .map(|it| sysroot_src_dir.join(it)) .filter_map(|it| ManifestPath::try_from(it).ok()) - .find(|it| fs::metadata(it.as_ref()).is_ok()); + .find(|it| fs::metadata(it).is_ok()); if let Some(root) = root { stitched.crates.alloc(SysrootCrateData { @@ -425,6 +311,92 @@ impl Sysroot { error: None, } } + + fn load_library_via_cargo( + library_manifest: ManifestPath, + sysroot_dir: &Option, + sysroot_src_dir: &AbsPathBuf, + ) -> Option { + tracing::debug!("Loading library metadata: {library_manifest}"); + let mut cargo_config = CargoConfig::default(); + // the sysroot uses `public-dependency`, so we make cargo think it's a nightly + cargo_config.extra_env.insert( + "__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS".to_owned(), + "nightly".to_owned(), + ); + + let mut res = match CargoWorkspace::fetch_metadata( + &library_manifest, + sysroot_src_dir, + &cargo_config, + &Sysroot::empty(), + // Make sure we never attempt to write to the sysroot + true, + &|_| (), + ) { + Ok(it) => it, + Err(e) => { + tracing::error!("`cargo metadata` failed on `{library_manifest}` : {e}"); + return None; + } + }; + + // Patch out `rustc-std-workspace-*` crates to point to the real crates. + // This is done prior to `CrateGraph` construction to prevent de-duplication logic from failing. + let patches = { + let mut fake_core = None; + let mut fake_alloc = None; + let mut fake_std = None; + let mut real_core = None; + let mut real_alloc = None; + let mut real_std = None; + res.packages.iter().enumerate().for_each(|(idx, package)| { + match package.name.strip_prefix("rustc-std-workspace-") { + Some("core") => fake_core = Some((idx, package.id.clone())), + Some("alloc") => fake_alloc = Some((idx, package.id.clone())), + Some("std") => fake_std = Some((idx, package.id.clone())), + Some(_) => { + tracing::warn!("unknown rustc-std-workspace-* crate: {}", package.name) + } + None => match &*package.name { + "core" => real_core = Some(package.id.clone()), + "alloc" => real_alloc = Some(package.id.clone()), + "std" => real_std = Some(package.id.clone()), + _ => (), + }, + } + }); + + [fake_core.zip(real_core), fake_alloc.zip(real_alloc), fake_std.zip(real_std)] + .into_iter() + .flatten() + }; + + let resolve = res.resolve.as_mut().expect("metadata executed with deps"); + resolve.nodes.retain_mut(|node| { + // Replace `rustc-std-workspace` crate with the actual one in the dependency list + node.deps.iter_mut().for_each(|dep| { + let real_pkg = patches.clone().find(|((_, fake_id), _)| *fake_id == dep.pkg); + if let Some((_, real)) = real_pkg { + dep.pkg = real; + } + }); + // Remove this node if it's a fake one + !patches.clone().any(|((_, fake), _)| fake == node.id) + }); + // Remove the fake ones from the package list + patches.map(|((idx, _), _)| idx).sorted().rev().for_each(|idx| { + res.packages.remove(idx); + }); + + let cargo_workspace = CargoWorkspace::new(res, library_manifest); + Some(Sysroot { + root: sysroot_dir.clone(), + src_root: Some(sysroot_src_dir.clone()), + mode: SysrootMode::Workspace(cargo_workspace), + error: None, + }) + } } fn discover_sysroot_dir( @@ -471,13 +443,13 @@ fn discover_sysroot_src_dir_or_add_component( get_rust_src(sysroot_path) }) .ok_or_else(|| { - format_err!( - "\ + let error = "\ can't load standard library from sysroot {sysroot_path} (discovered via `rustc --print sysroot`) -try installing the Rust source the same way you installed rustc", - ) +try installing the Rust source the same way you installed rustc"; + tracing::error!(error); + format_err!(error) }) } @@ -485,7 +457,7 @@ fn get_rustc_src(sysroot_path: &AbsPath) -> Option { let rustc_src = sysroot_path.join("lib/rustlib/rustc-src/rust/compiler/rustc/Cargo.toml"); let rustc_src = ManifestPath::try_from(rustc_src).ok()?; tracing::debug!("checking for rustc source code: {rustc_src}"); - if fs::metadata(rustc_src.as_ref()).is_ok() { + if fs::metadata(&rustc_src).is_ok() { Some(rustc_src) } else { None @@ -531,5 +503,3 @@ test"; const PROC_MACRO_DEPS: &str = " std core"; - -const RELEVANT_SYSROOT_CRATES: &[&str] = &["core", "alloc", "std", "test", "proc_macro"]; diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs index 8f5457bf99af9..e3bc81e1963d9 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs @@ -12,8 +12,8 @@ use span::FileId; use triomphe::Arc; use crate::{ - workspace::ProjectWorkspaceKind, CargoWorkspace, CfgOverrides, ManifestPath, ProjectJson, - ProjectJsonData, ProjectWorkspace, Sysroot, WorkspaceBuildScripts, + sysroot::SysrootMode, workspace::ProjectWorkspaceKind, CargoWorkspace, CfgOverrides, + ManifestPath, ProjectJson, ProjectJsonData, ProjectWorkspace, Sysroot, WorkspaceBuildScripts, }; fn load_cargo(file: &str) -> (CrateGraph, ProcMacroPaths) { @@ -146,7 +146,7 @@ fn get_fake_sysroot() -> Sysroot { // fake sysroot, so we give them both the same path: let sysroot_dir = AbsPathBuf::assert(sysroot_path); let sysroot_src_dir = sysroot_dir.clone(); - Sysroot::load(Some(sysroot_dir), Some(sysroot_src_dir), false) + Sysroot::load(Some(sysroot_dir), Some(sysroot_src_dir)) } fn rooted_project_json(data: ProjectJsonData) -> ProjectJson { @@ -274,10 +274,9 @@ fn crate_graph_dedup() { } #[test] +// FIXME Remove the ignore +#[ignore = "requires nightly until the sysroot ships a cargo workspace for library on stable"] fn smoke_test_real_sysroot_cargo() { - if std::env::var("SYSROOT_CARGO_METADATA").is_err() { - return; - } let file_map = &mut FxHashMap::::default(); let meta: Metadata = get_test_json_file("hello-world-metadata.json"); let manifest_path = @@ -286,8 +285,8 @@ fn smoke_test_real_sysroot_cargo() { let sysroot = Sysroot::discover( AbsPath::assert(Utf8Path::new(env!("CARGO_MANIFEST_DIR"))), &Default::default(), - true, ); + assert!(matches!(sysroot.mode(), SysrootMode::Workspace(_))); let project_workspace = ProjectWorkspace { kind: ProjectWorkspaceKind::Cargo { diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 31d1c77fd07c4..5620dfade2d22 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -20,16 +20,15 @@ use tracing::instrument; use triomphe::Arc; use crate::{ - build_scripts::BuildScriptOutput, + build_dependencies::BuildScriptOutput, cargo_workspace::{DepKind, PackageData, RustLibSource}, - cfg::{CfgFlag, CfgOverrides}, env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env}, project_json::{Crate, CrateArrayIdx}, rustc_cfg::{self, RustcCfgConfig}, sysroot::{SysrootCrate, SysrootMode}, target_data_layout::{self, RustcDataLayoutConfig}, - utf8_stdout, CargoConfig, CargoWorkspace, InvocationStrategy, ManifestPath, Package, - ProjectJson, ProjectManifest, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts, + utf8_stdout, CargoConfig, CargoWorkspace, CfgOverrides, InvocationStrategy, ManifestPath, + Package, ProjectJson, ProjectManifest, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts, }; use tracing::{debug, error, info}; @@ -55,7 +54,7 @@ pub struct ProjectWorkspace { /// `rustc --print cfg`. // FIXME: make this a per-crate map, as, eg, build.rs might have a // different target. - pub rustc_cfg: Vec, + pub rustc_cfg: Vec, /// The toolchain version used by this workspace. pub toolchain: Option, /// The target data layout queried for workspace. @@ -194,7 +193,7 @@ impl ProjectWorkspace { ) -> anyhow::Result { let res = match manifest { ProjectManifest::ProjectJson(project_json) => { - let file = fs::read_to_string(project_json.as_ref()) + let file = fs::read_to_string(project_json) .with_context(|| format!("Failed to read json file {project_json}"))?; let data = serde_json::from_str(&file) .with_context(|| format!("Failed to deserialize json file {project_json}"))?; @@ -213,28 +212,22 @@ impl ProjectWorkspace { } ProjectManifest::CargoToml(cargo_toml) => { let sysroot = match (&config.sysroot, &config.sysroot_src) { - (Some(RustLibSource::Discover), None) => Sysroot::discover( - cargo_toml.parent(), - &config.extra_env, - config.sysroot_query_metadata, - ), + (Some(RustLibSource::Discover), None) => { + Sysroot::discover(cargo_toml.parent(), &config.extra_env) + } (Some(RustLibSource::Discover), Some(sysroot_src)) => { Sysroot::discover_with_src_override( cargo_toml.parent(), &config.extra_env, sysroot_src.clone(), - config.sysroot_query_metadata, ) } - (Some(RustLibSource::Path(path)), None) => Sysroot::discover_sysroot_src_dir( - path.clone(), - config.sysroot_query_metadata, - ), - (Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => Sysroot::load( - Some(sysroot.clone()), - Some(sysroot_src.clone()), - config.sysroot_query_metadata, - ), + (Some(RustLibSource::Path(path)), None) => { + Sysroot::discover_sysroot_src_dir(path.clone()) + } + (Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => { + Sysroot::load(Some(sysroot.clone()), Some(sysroot_src.clone())) + } (None, _) => Sysroot::empty(), }; tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.src_root(), root = ?sysroot.root(), "Using sysroot"); @@ -260,6 +253,7 @@ impl ProjectWorkspace { ..config.clone() }, &sysroot, + false, progress, ) { Ok(meta) => { @@ -312,7 +306,8 @@ impl ProjectWorkspace { cargo_toml.parent(), config, &sysroot, - progress, + false, + progress, ) .with_context(|| { format!( @@ -350,8 +345,7 @@ impl ProjectWorkspace { extra_env: &FxHashMap, cfg_overrides: &CfgOverrides, ) -> ProjectWorkspace { - let sysroot = - Sysroot::load(project_json.sysroot.clone(), project_json.sysroot_src.clone(), false); + let sysroot = Sysroot::load(project_json.sysroot.clone(), project_json.sysroot_src.clone()); let cfg_config = RustcCfgConfig::Rustc(&sysroot); let data_layout_config = RustcDataLayoutConfig::Rustc(&sysroot); let toolchain = match get_toolchain_version( @@ -386,12 +380,8 @@ impl ProjectWorkspace { ) -> anyhow::Result { let dir = detached_file.parent(); let sysroot = match &config.sysroot { - Some(RustLibSource::Path(path)) => { - Sysroot::discover_sysroot_src_dir(path.clone(), config.sysroot_query_metadata) - } - Some(RustLibSource::Discover) => { - Sysroot::discover(dir, &config.extra_env, config.sysroot_query_metadata) - } + Some(RustLibSource::Path(path)) => Sysroot::discover_sysroot_src_dir(path.clone()), + Some(RustLibSource::Discover) => Sysroot::discover(dir, &config.extra_env), None => Sysroot::empty(), }; @@ -412,14 +402,14 @@ impl ProjectWorkspace { ); let cargo_script = - CargoWorkspace::fetch_metadata(detached_file, dir, config, &sysroot, &|_| ()).ok().map( - |ws| { + CargoWorkspace::fetch_metadata(detached_file, dir, config, &sysroot, false, &|_| ()) + .ok() + .map(|ws| { ( CargoWorkspace::new(ws, detached_file.clone()), WorkspaceBuildScripts::default(), ) - }, - ); + }); let cargo_config_extra_env = cargo_config_env(detached_file, &config.extra_env, &sysroot); Ok(ProjectWorkspace { @@ -651,7 +641,7 @@ impl ProjectWorkspace { ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, .. } => { iter::once(PackageRoot { is_local: true, - include: vec![file.as_ref().to_owned()], + include: vec![file.to_path_buf()], exclude: Vec::new(), }) .chain(cargo_script.iter().flat_map(|(cargo, build_scripts)| { @@ -851,7 +841,7 @@ impl ProjectWorkspace { #[instrument(skip_all)] fn project_json_to_crate_graph( - rustc_cfg: Vec, + rustc_cfg: Vec, load: FileLoader<'_>, project: &ProjectJson, sysroot: &Sysroot, @@ -863,8 +853,8 @@ fn project_json_to_crate_graph( let (public_deps, libproc_macro) = sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load); - let r_a_cfg_flag = CfgFlag::Atom("rust_analyzer".to_owned()); - let mut cfg_cache: FxHashMap<&str, Vec> = FxHashMap::default(); + let r_a_cfg_flag = CfgAtom::Flag(sym::rust_analyzer.clone()); + let mut cfg_cache: FxHashMap<&str, Vec> = FxHashMap::default(); let idx_to_crate_id: FxHashMap = project .crates() @@ -971,7 +961,7 @@ fn cargo_to_crate_graph( rustc: Option<&(CargoWorkspace, WorkspaceBuildScripts)>, cargo: &CargoWorkspace, sysroot: &Sysroot, - rustc_cfg: Vec, + rustc_cfg: Vec, override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, ) -> (CrateGraph, ProcMacroPaths) { @@ -1154,7 +1144,7 @@ fn cargo_to_crate_graph( } fn detached_file_to_crate_graph( - rustc_cfg: Vec, + rustc_cfg: Vec, load: FileLoader<'_>, detached_file: &ManifestPath, sysroot: &Sysroot, @@ -1317,11 +1307,10 @@ fn add_target_crate_root( None } else { let mut potential_cfg_options = cfg_options.clone(); - potential_cfg_options.extend( - pkg.features - .iter() - .map(|feat| CfgFlag::KeyValue { key: "feature".into(), value: feat.0.into() }), - ); + potential_cfg_options.extend(pkg.features.iter().map(|feat| CfgAtom::KeyValue { + key: sym::feature.clone(), + value: Symbol::intern(feat.0), + })); Some(potential_cfg_options) }; let cfg_options = { @@ -1358,12 +1347,13 @@ fn add_target_crate_root( ); if let TargetKind::Lib { is_proc_macro: true } = kind { let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) { - Some(it) => it.cloned().map(|path| Ok((cargo_name.to_owned(), path))), - None => Some(Err("proc-macro crate is missing its build data".to_owned())), + Some(it) => match it { + Some(path) => Ok((cargo_name.to_owned(), path.clone())), + None => Err("proc-macro crate build data is missing dylib path".to_owned()), + }, + None => Err("proc-macro crate is missing its build data".to_owned()), }; - if let Some(proc_macro) = proc_macro { - proc_macros.insert(crate_id, proc_macro); - } + proc_macros.insert(crate_id, proc_macro); } crate_id @@ -1386,7 +1376,7 @@ impl SysrootPublicDeps { fn sysroot_to_crate_graph( crate_graph: &mut CrateGraph, sysroot: &Sysroot, - rustc_cfg: Vec, + rustc_cfg: Vec, load: FileLoader<'_>, ) -> (SysrootPublicDeps, Option) { let _p = tracing::info_span!("sysroot_to_crate_graph").entered(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml index bc1b13a649700..eb95f42d755c9 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml @@ -2,7 +2,7 @@ name = "rust-analyzer" version = "0.0.0" homepage = "https://rust-analyzer.github.io/" -repository = "https://github.com/rust-analyzer/rust-analyzer" +repository.workspace = true description = "A language server for the Rust programming language" documentation = "https://rust-analyzer.github.io/manual.html" autobins = false @@ -21,7 +21,7 @@ path = "src/bin/main.rs" [dependencies] anyhow.workspace = true -crossbeam-channel = "0.5.5" +crossbeam-channel.workspace = true dirs = "5.0.1" dissimilar.workspace = true itertools.workspace = true @@ -47,9 +47,10 @@ always-assert = "0.2.0" walkdir = "2.3.2" semver.workspace = true memchr = "2.7.1" +cargo_metadata.workspace = true +process-wrap.workspace = true cfg.workspace = true -flycheck.workspace = true hir-def.workspace = true hir-ty.workspace = true hir.workspace = true @@ -82,20 +83,20 @@ xshell.workspace = true test-utils.workspace = true test-fixture.workspace = true -mbe.workspace = true +syntax-bridge.workspace = true [features] jemalloc = ["jemallocator", "profile/jemalloc"] force-always-assert = ["always-assert/force"] sysroot-abi = [] in-rust-tree = [ - "sysroot-abi", - "syntax/in-rust-tree", - "parser/in-rust-tree", - "hir/in-rust-tree", - "hir-def/in-rust-tree", - "hir-ty/in-rust-tree", - "load-cargo/in-rust-tree", + "sysroot-abi", + "syntax/in-rust-tree", + "parser/in-rust-tree", + "hir/in-rust-tree", + "hir-def/in-rust-tree", + "hir-ty/in-rust-tree", + "load-cargo/in-rust-tree", ] [lints] diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs index 6a980a153c991..42953d3b833c2 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs @@ -14,6 +14,7 @@ use std::{env, fs, path::PathBuf, process::ExitCode, sync::Arc}; use anyhow::Context; use lsp_server::Connection; +use paths::Utf8PathBuf; use rust_analyzer::{ cli::flags, config::{Config, ConfigChange, ConfigErrors}, @@ -189,6 +190,7 @@ fn run_server() -> anyhow::Result<()> { let root_path = match root_uri .and_then(|it| it.to_file_path().ok()) .map(patch_path_prefix) + .and_then(|it| Utf8PathBuf::from_path_buf(it).ok()) .and_then(|it| AbsPathBuf::try_from(it).ok()) { Some(it) => it, @@ -218,6 +220,7 @@ fn run_server() -> anyhow::Result<()> { .into_iter() .filter_map(|it| it.uri.to_file_path().ok()) .map(patch_path_prefix) + .filter_map(|it| Utf8PathBuf::from_path_buf(it).ok()) .filter_map(|it| AbsPathBuf::try_from(it).ok()) .collect::>() }) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs index 380105d2c2141..06f4ba815d0dd 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -23,7 +23,7 @@ use ide::{ use ide_db::{ base_db::{ salsa::{self, debug::DebugQueryTable, ParallelDatabase}, - SourceDatabase, SourceDatabaseExt, + SourceDatabase, SourceRootDatabase, }, EditionedFileId, LineIndexDatabase, SnippetCap, }; @@ -64,7 +64,6 @@ impl flags::AnalysisStats { true => None, false => Some(RustLibSource::Discover), }, - sysroot_query_metadata: self.query_sysroot_metadata, ..Default::default() }; let no_progress = &|_| (); @@ -977,7 +976,7 @@ impl flags::AnalysisStats { let mut sw = self.stop_watch(); for &file_id in &file_ids { - _ = analysis.diagnostics( + _ = analysis.full_diagnostics( &DiagnosticsConfig { enabled: true, proc_macros_enabled: true, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs index 4ddeb4ab1b0c3..cdac0e5ef5cca 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs @@ -6,7 +6,7 @@ use rustc_hash::FxHashSet; use hir::{db::HirDatabase, Crate, HirFileIdExt, Module}; use ide::{AnalysisHost, AssistResolveStrategy, Diagnostic, DiagnosticsConfig, Severity}; -use ide_db::{base_db::SourceDatabaseExt, LineIndexDatabase}; +use ide_db::{base_db::SourceRootDatabase, LineIndexDatabase}; use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; use crate::cli::flags; @@ -63,7 +63,7 @@ impl flags::Diagnostics { _vfs.file_path(file_id.into()) ); for diagnostic in analysis - .diagnostics( + .full_diagnostics( &DiagnosticsConfig::test_sample(), AssistResolveStrategy::None, file_id.into(), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs index b3b8ab9a40499..2a3e74c680b92 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs @@ -71,9 +71,6 @@ xflags::xflags! { optional --with-deps /// Don't load sysroot crates (`std`, `core` & friends). optional --no-sysroot - /// Run cargo metadata on the sysroot to analyze its third-party dependencies. - /// Requires --no-sysroot to not be set. - optional --query-sysroot-metadata /// Don't run build scripts or load `OUT_DIR` values by running `cargo check` before analysis. optional --disable-build-scripts @@ -214,7 +211,6 @@ pub struct AnalysisStats { pub only: Option, pub with_deps: bool, pub no_sysroot: bool, - pub query_sysroot_metadata: bool, pub disable_build_scripts: bool, pub disable_proc_macros: bool, pub skip_lowering: bool, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs index 3ff9be7102fbc..016f0edc2dd3c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs @@ -276,7 +276,7 @@ impl flags::Lsif { eprintln!("Generating LSIF started..."); let now = Instant::now(); let cargo_config = - CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() }; + &CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() }; let no_progress = &|_| (); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, @@ -284,9 +284,11 @@ impl flags::Lsif { prefill_caches: false, }; let path = AbsPathBuf::assert_utf8(env::current_dir()?.join(self.path)); - let manifest = ProjectManifest::discover_single(&path)?; + let root = ProjectManifest::discover_single(&path)?; + let mut workspace = ProjectWorkspace::load(root, cargo_config, no_progress)?; - let workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?; + let build_scripts = workspace.run_build_scripts(cargo_config, no_progress)?; + workspace.set_build_scripts(build_scripts); let (db, vfs, _proc_macro) = load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?; @@ -294,7 +296,7 @@ impl flags::Lsif { let db = host.raw_database(); let analysis = host.analysis(); - let si = StaticIndex::compute(&analysis); + let si = StaticIndex::compute(&analysis, &path.clone().into()); let mut lsif = LsifManager::new(&analysis, db, &vfs); lsif.add_vertex(lsif::Vertex::MetaData(lsif::MetaData { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs index 10cb2d5ad6e3d..157ef43dd0a27 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs @@ -2,7 +2,7 @@ use hir::{Crate, Module}; use hir_ty::db::HirDatabase; -use ide_db::{base_db::SourceDatabaseExt, LineIndexDatabase}; +use ide_db::{base_db::SourceRootDatabase, LineIndexDatabase}; use profile::StopWatch; use project_model::{CargoConfig, RustLibSource}; use syntax::TextRange; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs index 31565878d845c..75efdfd7dd8fe 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -8,6 +8,7 @@ use std::{cell::RefCell, fs::read_to_string, panic::AssertUnwindSafe, path::Path use hir::{ChangeWithProcMacros, Crate}; use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig}; use itertools::Either; +use paths::Utf8PathBuf; use profile::StopWatch; use project_model::target_data_layout::RustcDataLayoutConfig; use project_model::{ @@ -64,12 +65,12 @@ impl Tester { fn new() -> Result { let mut path = std::env::temp_dir(); path.push("ra-rustc-test.rs"); - let tmp_file = AbsPathBuf::try_from(path).unwrap(); + let tmp_file = AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).unwrap()).unwrap(); std::fs::write(&tmp_file, "")?; let cargo_config = CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() }; - let sysroot = Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env, false); + let sysroot = Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env); let data_layout = target_data_layout::get( RustcDataLayoutConfig::Rustc(&sysroot), None, @@ -154,7 +155,7 @@ impl Tester { let root_file = self.root_file; move || { let res = std::panic::catch_unwind(move || { - analysis.diagnostics( + analysis.full_diagnostics( diagnostic_config, ide::AssistResolveStrategy::None, root_file, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs index ee134b6c507e8..2fc0ef8d4daac 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs @@ -63,7 +63,7 @@ impl flags::Scip { let db = host.raw_database(); let analysis = host.analysis(); - let si = StaticIndex::compute(&analysis); + let si = StaticIndex::compute(&analysis, &root.clone().into()); let metadata = scip_types::Metadata { version: scip_types::ProtocolVersion::UnspecifiedProtocolVersion.into(), @@ -334,6 +334,7 @@ mod test { use ide::{FilePosition, TextSize}; use scip::symbol::format_symbol; use test_fixture::ChangeFixture; + use vfs::VfsPath; fn position(ra_fixture: &str) -> (AnalysisHost, FilePosition) { let mut host = AnalysisHost::default(); @@ -351,7 +352,8 @@ mod test { let (host, position) = position(ra_fixture); let analysis = host.analysis(); - let si = StaticIndex::compute(&analysis); + let si = + StaticIndex::compute(&analysis, &VfsPath::new_virtual_path("/workspace".to_owned())); let FilePosition { file_id, offset } = position; @@ -384,7 +386,7 @@ mod test { fn basic() { check_symbol( r#" -//- /lib.rs crate:main deps:foo +//- /workspace/lib.rs crate:main deps:foo use foo::example_mod::func; fn main() { func$0(); @@ -485,7 +487,7 @@ pub mod module { fn symbol_for_field() { check_symbol( r#" - //- /lib.rs crate:main deps:foo + //- /workspace/lib.rs crate:main deps:foo use foo::St; fn main() { let x = St { a$0: 2 }; @@ -503,7 +505,7 @@ pub mod module { fn symbol_for_param() { check_symbol( r#" -//- /lib.rs crate:main deps:foo +//- /workspace/lib.rs crate:main deps:foo use foo::example_mod::func; fn main() { func(42); @@ -521,7 +523,7 @@ pub mod example_mod { fn symbol_for_closure_param() { check_symbol( r#" -//- /lib.rs crate:main deps:foo +//- /workspace/lib.rs crate:main deps:foo use foo::example_mod::func; fn main() { func(); @@ -541,7 +543,7 @@ pub mod example_mod { fn local_symbol_for_local() { check_symbol( r#" - //- /lib.rs crate:main deps:foo + //- /workspace/lib.rs crate:main deps:foo use foo::module::func; fn main() { func(); @@ -561,13 +563,13 @@ pub mod example_mod { fn global_symbol_for_pub_struct() { check_symbol( r#" - //- /lib.rs crate:main + //- /workspace/lib.rs crate:main mod foo; fn main() { let _bar = foo::Bar { i: 0 }; } - //- /foo.rs + //- /workspace/foo.rs pub struct Bar$0 { pub i: i32, } @@ -580,13 +582,13 @@ pub mod example_mod { fn global_symbol_for_pub_struct_reference() { check_symbol( r#" - //- /lib.rs crate:main + //- /workspace/lib.rs crate:main mod foo; fn main() { let _bar = foo::Bar$0 { i: 0 }; } - //- /foo.rs + //- /workspace/foo.rs pub struct Bar { pub i: i32, } @@ -599,7 +601,7 @@ pub mod example_mod { fn symbol_for_for_type_alias() { check_symbol( r#" - //- /lib.rs crate:main + //- /workspace/lib.rs crate:main pub type MyTypeAlias$0 = u8; "#, "rust-analyzer cargo main . MyTypeAlias#", @@ -615,7 +617,8 @@ pub mod example_mod { host.raw_database_mut().apply_change(change_fixture.change); let analysis = host.analysis(); - let si = StaticIndex::compute(&analysis); + let si = + StaticIndex::compute(&analysis, &VfsPath::new_virtual_path("/workspace".to_owned())); let file = si.files.first().unwrap(); let (_, token_id) = file.tokens.first().unwrap(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs index 7f24fa2835e93..3caa487988744 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs @@ -1,7 +1,7 @@ //! Applies structured search replace rules from the command line. use anyhow::Context; -use ide_db::EditionedFileId; +use ide_db::{base_db::SourceDatabase, EditionedFileId}; use ide_ssr::MatchFinder; use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; use project_model::{CargoConfig, RustLibSource}; @@ -10,7 +10,6 @@ use crate::cli::flags; impl flags::Ssr { pub fn run(self) -> anyhow::Result<()> { - use ide_db::base_db::SourceDatabaseExt; let cargo_config = CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() }; let load_cargo_config = LoadCargoConfig { @@ -46,7 +45,7 @@ impl flags::Search { /// `debug_snippet`. This is intended for debugging and probably isn't in it's current form useful /// for much else. pub fn run(self) -> anyhow::Result<()> { - use ide_db::base_db::SourceDatabaseExt; + use ide_db::base_db::SourceRootDatabase; use ide_db::symbol_index::SymbolsDatabase; let cargo_config = CargoConfig::default(); let load_cargo_config = LoadCargoConfig { diff --git a/src/tools/rust-analyzer/crates/flycheck/src/command.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs similarity index 98% rename from src/tools/rust-analyzer/crates/flycheck/src/command.rs rename to src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs index 38c7c81f57a15..f1009eb46602f 100644 --- a/src/tools/rust-analyzer/crates/flycheck/src/command.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/command.rs @@ -1,5 +1,5 @@ -//! Utilities for running a cargo command like `cargo check` or `cargo test` in a separate thread and -//! parse its stdout/stderr. +//! Utilities for running a cargo command like `cargo check` or `cargo test` in a separate thread +//! and parse its stdout/stderr. use std::{ ffi::OsString, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index b9b8cfdfc9eab..02f5d75136e9e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -7,7 +7,6 @@ use std::{fmt, iter, ops::Not, sync::OnceLock}; use cfg::{CfgAtom, CfgDiff}; use dirs::config_dir; -use flycheck::{CargoOptions, FlycheckConfig}; use hir::Symbol; use ide::{ AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode, @@ -35,8 +34,9 @@ use triomphe::Arc; use vfs::{AbsPath, AbsPathBuf, VfsPath}; use crate::{ - capabilities::ClientCapabilities, diagnostics::DiagnosticsMapConfig, + flycheck::{CargoOptions, FlycheckConfig}, + lsp::capabilities::ClientCapabilities, lsp_ext::{WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope}, }; @@ -143,13 +143,6 @@ config_data! { /// /// This option does not take effect until rust-analyzer is restarted. cargo_sysroot: Option = Some("discover".to_owned()), - /// Whether to run cargo metadata on the sysroot library allowing rust-analyzer to analyze - /// third-party dependencies of the standard libraries. - /// - /// This will cause `cargo` to create a lockfile in your sysroot directory. rust-analyzer - /// will attempt to clean up afterwards, but nevertheless requires the location to be - /// writable to. - cargo_sysrootQueryMetadata: bool = false, /// Relative path to the sysroot library sources. If left unset, this will default to /// `{cargo.sysroot}/lib/rustlib/src/rust/library`. /// @@ -1839,7 +1832,6 @@ impl Config { }); let sysroot_src = self.cargo_sysrootSrc(None).as_ref().map(|sysroot| self.root_path.join(sysroot)); - let sysroot_query_metadata = self.cargo_sysrootQueryMetadata(None); CargoConfig { all_targets: *self.cargo_allTargets(None), @@ -1852,7 +1844,6 @@ impl Config { }, target: self.cargo_target(None).clone(), sysroot, - sysroot_query_metadata: *sysroot_query_metadata, sysroot_src, rustc_source, cfg_overrides: project_model::CfgOverrides { @@ -1908,7 +1899,7 @@ impl Config { *self.check_workspace(None) } - pub fn cargo_test_options(&self) -> CargoOptions { + pub(crate) fn cargo_test_options(&self) -> CargoOptions { CargoOptions { target_triples: self.cargo_target(None).clone().into_iter().collect(), all_targets: false, @@ -1924,7 +1915,7 @@ impl Config { } } - pub fn flycheck(&self) -> FlycheckConfig { + pub(crate) fn flycheck(&self) -> FlycheckConfig { match &self.check_overrideCommand(None) { Some(args) if !args.is_empty() => { let mut args = args.clone(); @@ -1934,16 +1925,18 @@ impl Config { args, extra_env: self.check_extra_env(), invocation_strategy: match self.check_invocationStrategy(None) { - InvocationStrategy::Once => flycheck::InvocationStrategy::Once, + InvocationStrategy::Once => crate::flycheck::InvocationStrategy::Once, InvocationStrategy::PerWorkspace => { - flycheck::InvocationStrategy::PerWorkspace + crate::flycheck::InvocationStrategy::PerWorkspace } }, invocation_location: match self.check_invocationLocation(None) { InvocationLocation::Root => { - flycheck::InvocationLocation::Root(self.root_path.clone()) + crate::flycheck::InvocationLocation::Root(self.root_path.clone()) + } + InvocationLocation::Workspace => { + crate::flycheck::InvocationLocation::Workspace } - InvocationLocation::Workspace => flycheck::InvocationLocation::Workspace, }, } } @@ -3450,7 +3443,7 @@ mod tests { let s = remove_ws(&schema); if !p.contains(&s) { package_json.replace_range(start..end, &schema); - ensure_file_contents(&package_json_path, &package_json) + ensure_file_contents(package_json_path.as_std_path(), &package_json) } } @@ -3458,7 +3451,7 @@ mod tests { fn generate_config_documentation() { let docs_path = project_root().join("docs/user/generated_config.adoc"); let expected = FullConfigInput::manual(); - ensure_file_contents(&docs_path, &expected); + ensure_file_contents(docs_path.as_std_path(), &expected); } fn remove_ws(text: &str) -> String { @@ -3467,13 +3460,8 @@ mod tests { #[test] fn proc_macro_srv_null() { - let mut config = Config::new( - AbsPathBuf::try_from(project_root()).unwrap(), - Default::default(), - vec![], - None, - None, - ); + let mut config = + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); let mut change = ConfigChange::default(); change.change_client_config(serde_json::json!({ @@ -3487,32 +3475,22 @@ mod tests { #[test] fn proc_macro_srv_abs() { - let mut config = Config::new( - AbsPathBuf::try_from(project_root()).unwrap(), - Default::default(), - vec![], - None, - None, - ); + let mut config = + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); let mut change = ConfigChange::default(); change.change_client_config(serde_json::json!({ "procMacro" : { - "server": project_root().display().to_string(), + "server": project_root().to_string(), }})); (config, _, _) = config.apply_change(change); - assert_eq!(config.proc_macro_srv(), Some(AbsPathBuf::try_from(project_root()).unwrap())); + assert_eq!(config.proc_macro_srv(), Some(AbsPathBuf::assert(project_root()))); } #[test] fn proc_macro_srv_rel() { - let mut config = Config::new( - AbsPathBuf::try_from(project_root()).unwrap(), - Default::default(), - vec![], - None, - None, - ); + let mut config = + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); let mut change = ConfigChange::default(); @@ -3531,13 +3509,8 @@ mod tests { #[test] fn cargo_target_dir_unset() { - let mut config = Config::new( - AbsPathBuf::try_from(project_root()).unwrap(), - Default::default(), - vec![], - None, - None, - ); + let mut config = + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); let mut change = ConfigChange::default(); @@ -3554,13 +3527,8 @@ mod tests { #[test] fn cargo_target_dir_subdir() { - let mut config = Config::new( - AbsPathBuf::try_from(project_root()).unwrap(), - Default::default(), - vec![], - None, - None, - ); + let mut config = + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); let mut change = ConfigChange::default(); change.change_client_config(serde_json::json!({ @@ -3577,13 +3545,8 @@ mod tests { #[test] fn cargo_target_dir_relative_dir() { - let mut config = Config::new( - AbsPathBuf::try_from(project_root()).unwrap(), - Default::default(), - vec![], - None, - None, - ); + let mut config = + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); let mut change = ConfigChange::default(); change.change_client_config(serde_json::json!({ @@ -3603,13 +3566,8 @@ mod tests { #[test] fn toml_unknown_key() { - let config = Config::new( - AbsPathBuf::try_from(project_root()).unwrap(), - Default::default(), - vec![], - None, - None, - ); + let config = + Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None); let mut change = ConfigChange::default(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs index b23e7b7e98c69..034c49c3d5c69 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs @@ -11,7 +11,7 @@ use rustc_hash::FxHashSet; use stdx::iter_eq_by; use triomphe::Arc; -use crate::{global_state::GlobalStateSnapshot, lsp, lsp_ext}; +use crate::{global_state::GlobalStateSnapshot, lsp, lsp_ext, main_loop::DiagnosticsTaskKind}; pub(crate) type CheckFixes = Arc>>>; @@ -28,7 +28,8 @@ pub(crate) type DiagnosticsGeneration = usize; #[derive(Debug, Default, Clone)] pub(crate) struct DiagnosticCollection { // FIXME: should be IntMap> - pub(crate) native: IntMap)>, + pub(crate) native_syntax: IntMap)>, + pub(crate) native_semantic: IntMap)>, // FIXME: should be Vec pub(crate) check: IntMap>>, pub(crate) check_fixes: CheckFixes, @@ -64,7 +65,8 @@ impl DiagnosticCollection { } pub(crate) fn clear_native_for(&mut self, file_id: FileId) { - self.native.remove(&file_id); + self.native_syntax.remove(&file_id); + self.native_semantic.remove(&file_id); self.changes.insert(file_id); } @@ -88,43 +90,51 @@ impl DiagnosticCollection { self.changes.insert(file_id); } - pub(crate) fn set_native_diagnostics( - &mut self, - generation: DiagnosticsGeneration, - file_id: FileId, - mut diagnostics: Vec, - ) { - diagnostics.sort_by_key(|it| (it.range.start, it.range.end)); - if let Some((old_gen, existing_diagnostics)) = self.native.get_mut(&file_id) { - if existing_diagnostics.len() == diagnostics.len() - && iter_eq_by(&diagnostics, &*existing_diagnostics, |new, existing| { - are_diagnostics_equal(new, existing) - }) - { - // don't signal an update if the diagnostics are the same - return; + pub(crate) fn set_native_diagnostics(&mut self, kind: DiagnosticsTaskKind) { + let (generation, diagnostics, target) = match kind { + DiagnosticsTaskKind::Syntax(generation, diagnostics) => { + (generation, diagnostics, &mut self.native_syntax) + } + DiagnosticsTaskKind::Semantic(generation, diagnostics) => { + (generation, diagnostics, &mut self.native_semantic) } - if *old_gen < generation || generation == 0 { - self.native.insert(file_id, (generation, diagnostics)); + }; + + for (file_id, mut diagnostics) in diagnostics { + diagnostics.sort_by_key(|it| (it.range.start, it.range.end)); + + if let Some((old_gen, existing_diagnostics)) = target.get_mut(&file_id) { + if existing_diagnostics.len() == diagnostics.len() + && iter_eq_by(&diagnostics, &*existing_diagnostics, |new, existing| { + are_diagnostics_equal(new, existing) + }) + { + // don't signal an update if the diagnostics are the same + continue; + } + if *old_gen < generation || generation == 0 { + target.insert(file_id, (generation, diagnostics)); + } else { + existing_diagnostics.extend(diagnostics); + // FIXME: Doing the merge step of a merge sort here would be a bit more performant + // but eh + existing_diagnostics.sort_by_key(|it| (it.range.start, it.range.end)) + } } else { - existing_diagnostics.extend(diagnostics); - // FIXME: Doing the merge step of a merge sort here would be a bit more performant - // but eh - existing_diagnostics.sort_by_key(|it| (it.range.start, it.range.end)) + target.insert(file_id, (generation, diagnostics)); } - } else { - self.native.insert(file_id, (generation, diagnostics)); + self.changes.insert(file_id); } - self.changes.insert(file_id); } pub(crate) fn diagnostics_for( &self, file_id: FileId, ) -> impl Iterator { - let native = self.native.get(&file_id).into_iter().flat_map(|(_, d)| d); + let native_syntax = self.native_syntax.get(&file_id).into_iter().flat_map(|(_, d)| d); + let native_semantic = self.native_semantic.get(&file_id).into_iter().flat_map(|(_, d)| d); let check = self.check.values().filter_map(move |it| it.get(&file_id)).flatten(); - native.chain(check) + native_syntax.chain(native_semantic).chain(check) } pub(crate) fn take_changes(&mut self) -> Option> { @@ -147,10 +157,16 @@ fn are_diagnostics_equal(left: &lsp_types::Diagnostic, right: &lsp_types::Diagno && left.message == right.message } +pub(crate) enum NativeDiagnosticsFetchKind { + Syntax, + Semantic, +} + pub(crate) fn fetch_native_diagnostics( - snapshot: GlobalStateSnapshot, + snapshot: &GlobalStateSnapshot, subscriptions: std::sync::Arc<[FileId]>, slice: std::ops::Range, + kind: NativeDiagnosticsFetchKind, ) -> Vec<(FileId, Vec)> { let _p = tracing::info_span!("fetch_native_diagnostics").entered(); let _ctx = stdx::panic_context::enter("fetch_native_diagnostics".to_owned()); @@ -180,14 +196,17 @@ pub(crate) fn fetch_native_diagnostics( let line_index = snapshot.file_line_index(file_id).ok()?; let source_root = snapshot.analysis.source_root_id(file_id).ok()?; - let diagnostics = snapshot - .analysis - .diagnostics( - &snapshot.config.diagnostics(Some(source_root)), - ide::AssistResolveStrategy::None, - file_id, - ) - .ok()? + let config = &snapshot.config.diagnostics(Some(source_root)); + let diagnostics = match kind { + NativeDiagnosticsFetchKind::Syntax => { + snapshot.analysis.syntax_diagnostics(config, file_id).ok()? + } + NativeDiagnosticsFetchKind::Semantic => snapshot + .analysis + .semantic_diagnostics(config, ide::AssistResolveStrategy::None, file_id) + .ok()?, + }; + let diagnostics = diagnostics .into_iter() .filter_map(|d| { if d.range.file_id == file_id { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs index defa464f2ba1e..208a70bc02ad9 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -1,7 +1,7 @@ //! This module provides the functionality needed to convert diagnostics from //! `cargo check` json format to the LSP diagnostic format. -use flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan}; +use crate::flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan}; use itertools::Itertools; use rustc_hash::FxHashMap; use stdx::format_to; @@ -17,8 +17,8 @@ use super::{DiagnosticsMapConfig, Fix}; /// Determines the LSP severity from a diagnostic fn diagnostic_severity( config: &DiagnosticsMapConfig, - level: flycheck::DiagnosticLevel, - code: Option, + level: crate::flycheck::DiagnosticLevel, + code: Option, ) -> Option { let res = match level { DiagnosticLevel::Ice => lsp_types::DiagnosticSeverity::ERROR, @@ -181,7 +181,7 @@ enum MappedRustChildDiagnostic { fn map_rust_child_diagnostic( config: &DiagnosticsMapConfig, workspace_root: &AbsPath, - rd: &flycheck::Diagnostic, + rd: &crate::flycheck::Diagnostic, snap: &GlobalStateSnapshot, ) -> MappedRustChildDiagnostic { let spans: Vec<&DiagnosticSpan> = rd.spans.iter().filter(|s| s.is_primary).collect(); @@ -284,7 +284,7 @@ pub(crate) struct MappedRustDiagnostic { /// If the diagnostic has no primary span this will return `None` pub(crate) fn map_rust_diagnostic_to_lsp( config: &DiagnosticsMapConfig, - rd: &flycheck::Diagnostic, + rd: &crate::flycheck::Diagnostic, workspace_root: &AbsPath, snap: &GlobalStateSnapshot, ) -> Vec { @@ -537,7 +537,8 @@ mod tests { } fn check_with_config(config: DiagnosticsMapConfig, diagnostics_json: &str, expect: ExpectFile) { - let diagnostic: flycheck::Diagnostic = serde_json::from_str(diagnostics_json).unwrap(); + let diagnostic: crate::flycheck::Diagnostic = + serde_json::from_str(diagnostics_json).unwrap(); let workspace_root: &AbsPath = Utf8Path::new("/test/").try_into().unwrap(); let (sender, _) = crossbeam_channel::unbounded(); let state = GlobalState::new( diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diff.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diff.rs deleted file mode 100644 index 3fcfb4a1b08aa..0000000000000 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diff.rs +++ /dev/null @@ -1,53 +0,0 @@ -//! Generate minimal `TextEdit`s from different text versions -use dissimilar::Chunk; -use ide::{TextEdit, TextRange, TextSize}; - -pub(crate) fn diff(left: &str, right: &str) -> TextEdit { - let chunks = dissimilar::diff(left, right); - textedit_from_chunks(chunks) -} - -fn textedit_from_chunks(chunks: Vec>) -> TextEdit { - let mut builder = TextEdit::builder(); - let mut pos = TextSize::default(); - - let mut chunks = chunks.into_iter().peekable(); - while let Some(chunk) = chunks.next() { - if let (Chunk::Delete(deleted), Some(&Chunk::Insert(inserted))) = (chunk, chunks.peek()) { - chunks.next().unwrap(); - let deleted_len = TextSize::of(deleted); - builder.replace(TextRange::at(pos, deleted_len), inserted.into()); - pos += deleted_len; - continue; - } - - match chunk { - Chunk::Equal(text) => { - pos += TextSize::of(text); - } - Chunk::Delete(deleted) => { - let deleted_len = TextSize::of(deleted); - builder.delete(TextRange::at(pos, deleted_len)); - pos += deleted_len; - } - Chunk::Insert(inserted) => { - builder.insert(pos, inserted.into()); - } - } - } - builder.finish() -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn diff_applies() { - let mut original = String::from("fn foo(a:u32){\n}"); - let result = "fn foo(a: u32) {}"; - let edit = diff(&original, result); - edit.apply(&mut original); - assert_eq!(original, result); - } -} diff --git a/src/tools/rust-analyzer/crates/flycheck/src/project_json.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs similarity index 86% rename from src/tools/rust-analyzer/crates/flycheck/src/project_json.rs rename to src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs index b6e4495bc6d6e..7e9162eee6e58 100644 --- a/src/tools/rust-analyzer/crates/flycheck/src/project_json.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs @@ -1,4 +1,5 @@ -//! A `cargo-metadata`-equivalent for non-Cargo build systems. +//! Infrastructure for lazy project discovery. Currently only support rust-project.json discovery +//! via a custom discover command. use std::{io, process::Command}; use crossbeam_channel::Sender; @@ -9,19 +10,19 @@ use serde_json::Value; use crate::command::{CommandHandle, ParseFromLine}; -pub const ARG_PLACEHOLDER: &str = "{arg}"; +pub(crate) const ARG_PLACEHOLDER: &str = "{arg}"; /// A command wrapper for getting a `rust-project.json`. /// -/// This is analogous to `cargo-metadata`, but for non-Cargo build systems. -pub struct Discover { +/// This is analogous to discovering a cargo project + running `cargo-metadata` on it, but for non-Cargo build systems. +pub(crate) struct DiscoverCommand { command: Vec, sender: Sender, } #[derive(PartialEq, Clone, Debug, Serialize)] #[serde(rename_all = "camelCase")] -pub enum DiscoverArgument { +pub(crate) enum DiscoverArgument { Path(#[serde(serialize_with = "serialize_abs_pathbuf")] AbsPathBuf), Buildfile(#[serde(serialize_with = "serialize_abs_pathbuf")] AbsPathBuf), } @@ -34,14 +35,14 @@ where se.serialize_str(path.as_str()) } -impl Discover { - /// Create a new [Discover]. - pub fn new(sender: Sender, command: Vec) -> Self { +impl DiscoverCommand { + /// Create a new [DiscoverCommand]. + pub(crate) fn new(sender: Sender, command: Vec) -> Self { Self { sender, command } } /// Spawn the command inside [Discover] and report progress, if any. - pub fn spawn(&self, discover_arg: DiscoverArgument) -> io::Result { + pub(crate) fn spawn(&self, discover_arg: DiscoverArgument) -> io::Result { let command = &self.command[0]; let args = &self.command[1..]; @@ -65,7 +66,7 @@ impl Discover { /// A handle to a spawned [Discover]. #[derive(Debug)] -pub struct DiscoverHandle { +pub(crate) struct DiscoverHandle { _handle: CommandHandle, } @@ -81,7 +82,7 @@ enum DiscoverProjectData { } #[derive(Debug, PartialEq, Clone)] -pub enum DiscoverProjectMessage { +pub(crate) enum DiscoverProjectMessage { Finished { project: ProjectJsonData, buildfile: AbsPathBuf }, Error { error: String, source: Option }, Progress { message: String }, diff --git a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs similarity index 81% rename from src/tools/rust-analyzer/crates/flycheck/src/lib.rs rename to src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index 3dd2a91d8fd96..8f2e7d1ca26b9 100644 --- a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -1,59 +1,48 @@ -//! Flycheck provides the functionality needed to run `cargo check` or -//! another compatible command (f.x. clippy) in a background thread and provide +//! Flycheck provides the functionality needed to run `cargo check` to provide //! LSP diagnostics based on the output of the command. -// FIXME: This crate now handles running `cargo test` needed in the test explorer in -// addition to `cargo check`. Either split it into 3 crates (one for test, one for check -// and one common utilities) or change its name and docs to reflect the current state. - use std::{fmt, io, process::Command, time::Duration}; -use crossbeam_channel::{never, select, unbounded, Receiver, Sender}; +use crossbeam_channel::{select_biased, unbounded, Receiver, Sender}; use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::FxHashMap; use serde::Deserialize; -pub use cargo_metadata::diagnostic::{ +pub(crate) use cargo_metadata::diagnostic::{ Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan, - DiagnosticSpanMacroExpansion, }; use toolchain::Tool; -mod command; -pub mod project_json; -mod test_runner; - -use command::{CommandHandle, ParseFromLine}; -pub use test_runner::{CargoTestHandle, CargoTestMessage, TestState, TestTarget}; +use crate::command::{CommandHandle, ParseFromLine}; #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] -pub enum InvocationStrategy { +pub(crate) enum InvocationStrategy { Once, #[default] PerWorkspace, } #[derive(Clone, Debug, Default, PartialEq, Eq)] -pub enum InvocationLocation { +pub(crate) enum InvocationLocation { Root(AbsPathBuf), #[default] Workspace, } #[derive(Clone, Debug, PartialEq, Eq)] -pub struct CargoOptions { - pub target_triples: Vec, - pub all_targets: bool, - pub no_default_features: bool, - pub all_features: bool, - pub features: Vec, - pub extra_args: Vec, - pub extra_env: FxHashMap, - pub target_dir: Option, +pub(crate) struct CargoOptions { + pub(crate) target_triples: Vec, + pub(crate) all_targets: bool, + pub(crate) no_default_features: bool, + pub(crate) all_features: bool, + pub(crate) features: Vec, + pub(crate) extra_args: Vec, + pub(crate) extra_env: FxHashMap, + pub(crate) target_dir: Option, } impl CargoOptions { - fn apply_on_command(&self, cmd: &mut Command) { + pub(crate) fn apply_on_command(&self, cmd: &mut Command) { for target in &self.target_triples { cmd.args(["--target", target.as_str()]); } @@ -79,7 +68,7 @@ impl CargoOptions { } #[derive(Clone, Debug, PartialEq, Eq)] -pub enum FlycheckConfig { +pub(crate) enum FlycheckConfig { CargoCommand { command: String, options: CargoOptions, @@ -110,7 +99,7 @@ impl fmt::Display for FlycheckConfig { /// diagnostics based on the output. /// The spawned thread is shut down when this struct is dropped. #[derive(Debug)] -pub struct FlycheckHandle { +pub(crate) struct FlycheckHandle { // XXX: drop order is significant sender: Sender, _thread: stdx::thread::JoinHandle, @@ -118,9 +107,9 @@ pub struct FlycheckHandle { } impl FlycheckHandle { - pub fn spawn( + pub(crate) fn spawn( id: usize, - sender: Box, + sender: Sender, config: FlycheckConfig, sysroot_root: Option, workspace_root: AbsPathBuf, @@ -137,28 +126,28 @@ impl FlycheckHandle { } /// Schedule a re-start of the cargo check worker to do a workspace wide check. - pub fn restart_workspace(&self, saved_file: Option) { + pub(crate) fn restart_workspace(&self, saved_file: Option) { self.sender.send(StateChange::Restart { package: None, saved_file }).unwrap(); } /// Schedule a re-start of the cargo check worker to do a package wide check. - pub fn restart_for_package(&self, package: String) { + pub(crate) fn restart_for_package(&self, package: String) { self.sender .send(StateChange::Restart { package: Some(package), saved_file: None }) .unwrap(); } /// Stop this cargo check worker. - pub fn cancel(&self) { + pub(crate) fn cancel(&self) { self.sender.send(StateChange::Cancel).unwrap(); } - pub fn id(&self) -> usize { + pub(crate) fn id(&self) -> usize { self.id } } -pub enum Message { +pub(crate) enum FlycheckMessage { /// Request adding a diagnostic with fixes included to a file AddDiagnostic { id: usize, workspace_root: AbsPathBuf, diagnostic: Diagnostic }, @@ -173,19 +162,19 @@ pub enum Message { }, } -impl fmt::Debug for Message { +impl fmt::Debug for FlycheckMessage { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - Message::AddDiagnostic { id, workspace_root, diagnostic } => f + FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic } => f .debug_struct("AddDiagnostic") .field("id", id) .field("workspace_root", workspace_root) .field("diagnostic_code", &diagnostic.code.as_ref().map(|it| &it.code)) .finish(), - Message::ClearDiagnostics { id } => { + FlycheckMessage::ClearDiagnostics { id } => { f.debug_struct("ClearDiagnostics").field("id", id).finish() } - Message::Progress { id, progress } => { + FlycheckMessage::Progress { id, progress } => { f.debug_struct("Progress").field("id", id).field("progress", progress).finish() } } @@ -193,7 +182,7 @@ impl fmt::Debug for Message { } #[derive(Debug)] -pub enum Progress { +pub(crate) enum Progress { DidStart, DidCheckCrate(String), DidFinish(io::Result<()>), @@ -210,7 +199,7 @@ enum StateChange { struct FlycheckActor { /// The workspace id of this flycheck instance. id: usize, - sender: Box, + sender: Sender, config: FlycheckConfig, manifest_path: Option, /// Either the workspace root of the workspace we are flychecking, @@ -241,12 +230,12 @@ enum FlycheckStatus { Finished, } -pub const SAVED_FILE_PLACEHOLDER: &str = "$saved_file"; +pub(crate) const SAVED_FILE_PLACEHOLDER: &str = "$saved_file"; impl FlycheckActor { fn new( id: usize, - sender: Box, + sender: Sender, config: FlycheckConfig, sysroot_root: Option, workspace_root: AbsPathBuf, @@ -267,17 +256,18 @@ impl FlycheckActor { } fn report_progress(&self, progress: Progress) { - self.send(Message::Progress { id: self.id, progress }); + self.send(FlycheckMessage::Progress { id: self.id, progress }); } fn next_event(&self, inbox: &Receiver) -> Option { - if let Ok(msg) = inbox.try_recv() { - // give restarts a preference so check outputs don't block a restart or stop - return Some(Event::RequestStateChange(msg)); - } - select! { + let Some(command_receiver) = &self.command_receiver else { + return inbox.recv().ok().map(Event::RequestStateChange); + }; + + // Biased to give restarts a preference so check outputs don't block a restart or stop + select_biased! { recv(inbox) -> msg => msg.ok().map(Event::RequestStateChange), - recv(self.command_receiver.as_ref().unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())), + recv(command_receiver) -> msg => Some(Event::CheckEvent(msg.ok())), } } @@ -340,7 +330,7 @@ impl FlycheckActor { ); } if self.status == FlycheckStatus::Started { - self.send(Message::ClearDiagnostics { id: self.id }); + self.send(FlycheckMessage::ClearDiagnostics { id: self.id }); } self.report_progress(Progress::DidFinish(res)); self.status = FlycheckStatus::Finished; @@ -362,9 +352,9 @@ impl FlycheckActor { "diagnostic received" ); if self.status == FlycheckStatus::Started { - self.send(Message::ClearDiagnostics { id: self.id }); + self.send(FlycheckMessage::ClearDiagnostics { id: self.id }); } - self.send(Message::AddDiagnostic { + self.send(FlycheckMessage::AddDiagnostic { id: self.id, workspace_root: self.root.clone(), diagnostic: msg, @@ -399,7 +389,7 @@ impl FlycheckActor { package: Option<&str>, saved_file: Option<&AbsPath>, ) -> Option { - let (mut cmd, args) = match &self.config { + match &self.config { FlycheckConfig::CargoCommand { command, options, ansi_color_output } => { let mut cmd = Command::new(Tool::Cargo.path()); if let Some(sysroot_root) = &self.sysroot_root { @@ -430,7 +420,8 @@ impl FlycheckActor { cmd.arg("--keep-going"); options.apply_on_command(&mut cmd); - (cmd, options.extra_args.clone()) + cmd.args(&options.extra_args); + Some(cmd) } FlycheckConfig::CustomCommand { command, @@ -459,38 +450,36 @@ impl FlycheckActor { } } - if args.contains(&SAVED_FILE_PLACEHOLDER.to_owned()) { - // If the custom command has a $saved_file placeholder, and - // we're saving a file, replace the placeholder in the arguments. - if let Some(saved_file) = saved_file { - let args = args - .iter() - .map(|arg| { - if arg == SAVED_FILE_PLACEHOLDER { - saved_file.to_string() - } else { - arg.clone() - } - }) - .collect(); - (cmd, args) - } else { - // The custom command has a $saved_file placeholder, - // but we had an IDE event that wasn't a file save. Do nothing. - return None; + // If the custom command has a $saved_file placeholder, and + // we're saving a file, replace the placeholder in the arguments. + if let Some(saved_file) = saved_file { + for arg in args { + if arg == SAVED_FILE_PLACEHOLDER { + cmd.arg(saved_file); + } else { + cmd.arg(arg); + } } } else { - (cmd, args.clone()) + for arg in args { + if arg == SAVED_FILE_PLACEHOLDER { + // The custom command has a $saved_file placeholder, + // but we had an IDE event that wasn't a file save. Do nothing. + return None; + } + + cmd.arg(arg); + } } - } - }; - cmd.args(args); - Some(cmd) + Some(cmd) + } + } } - fn send(&self, check_task: Message) { - (self.sender)(check_task); + #[track_caller] + fn send(&self, check_task: FlycheckMessage) { + self.sender.send(check_task).unwrap(); } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index f1dde104fce4a..7a7ec1d77e09b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -6,10 +6,9 @@ use std::{ops::Not as _, time::Instant}; use crossbeam_channel::{unbounded, Receiver, Sender}; -use flycheck::{project_json, FlycheckHandle}; use hir::ChangeWithProcMacros; use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId}; -use ide_db::base_db::{CrateId, ProcMacroPaths, SourceDatabaseExt}; +use ide_db::base_db::{CrateId, ProcMacroPaths, SourceDatabase, SourceRootDatabase}; use itertools::Itertools; use load_cargo::SourceRootConfig; use lsp_types::{SemanticTokens, Url}; @@ -28,15 +27,18 @@ use vfs::{AbsPathBuf, AnchoredPathBuf, ChangeKind, Vfs, VfsPath}; use crate::{ config::{Config, ConfigChange, ConfigErrors, RatomlFileKind}, diagnostics::{CheckFixes, DiagnosticCollection}, + discover, + flycheck::{FlycheckHandle, FlycheckMessage}, line_index::{LineEndings, LineIndex}, lsp::{from_proto, to_proto::url_from_abs_path}, lsp_ext, main_loop::Task, mem_docs::MemDocs, - op_queue::OpQueue, + op_queue::{Cause, OpQueue}, reload, target_spec::{CargoTargetSpec, ProjectJsonTargetSpec, TargetSpec}, task_pool::{TaskPool, TaskQueue}, + test_runner::{CargoTestHandle, CargoTestMessage}, }; pub(crate) struct FetchWorkspaceRequest { @@ -88,28 +90,28 @@ pub(crate) struct GlobalState { // Flycheck pub(crate) flycheck: Arc<[FlycheckHandle]>, - pub(crate) flycheck_sender: Sender, - pub(crate) flycheck_receiver: Receiver, + pub(crate) flycheck_sender: Sender, + pub(crate) flycheck_receiver: Receiver, pub(crate) last_flycheck_error: Option, // Test explorer - pub(crate) test_run_session: Option>, - pub(crate) test_run_sender: Sender, - pub(crate) test_run_receiver: Receiver, + pub(crate) test_run_session: Option>, + pub(crate) test_run_sender: Sender, + pub(crate) test_run_receiver: Receiver, pub(crate) test_run_remaining_jobs: usize, // Project loading - pub(crate) discover_handle: Option, - pub(crate) discover_sender: Sender, - pub(crate) discover_receiver: Receiver, + pub(crate) discover_handle: Option, + pub(crate) discover_sender: Sender, + pub(crate) discover_receiver: Receiver, // VFS pub(crate) loader: Handle, Receiver>, pub(crate) vfs: Arc)>>, pub(crate) vfs_config_version: u32, pub(crate) vfs_progress_config_version: u32, - pub(crate) vfs_progress_n_total: usize, - pub(crate) vfs_progress_n_done: usize, + pub(crate) vfs_done: bool, + pub(crate) wants_to_switch: Option, /// `workspaces` field stores the data we actually use, while the `OpQueue` /// stores the result of the last fetch. @@ -183,8 +185,7 @@ impl GlobalState { pub(crate) fn new(sender: Sender, config: Config) -> GlobalState { let loader = { let (sender, receiver) = unbounded::(); - let handle: vfs_notify::NotifyHandle = - vfs::loader::Handle::spawn(Box::new(move |msg| sender.send(msg).unwrap())); + let handle: vfs_notify::NotifyHandle = vfs::loader::Handle::spawn(sender); let handle = Box::new(handle) as Box; Handle { handle, receiver } }; @@ -252,8 +253,8 @@ impl GlobalState { vfs: Arc::new(RwLock::new((vfs::Vfs::default(), IntMap::default()))), vfs_config_version: 0, vfs_progress_config_version: 0, - vfs_progress_n_total: 0, - vfs_progress_n_done: 0, + vfs_done: true, + wants_to_switch: None, workspaces: Arc::from(Vec::new()), crate_graph_file_dependencies: FxHashSet::default(), @@ -458,6 +459,11 @@ impl GlobalState { } } + // FIXME: `workspace_structure_change` is computed from `should_refresh_for_change` which is + // path syntax based. That is not sufficient for all cases so we should lift that check out + // into a `QueuedTask`, see `handle_did_save_text_document`. + // Or maybe instead of replacing that check, kick off a semantic one if the syntactic one + // didn't find anything (to make up for the lack of precision). { if !matches!(&workspace_structure_change, Some((.., true))) { _ = self @@ -557,8 +563,52 @@ impl GlobalState { self.req_queue.incoming.is_completed(&request.id) } + #[track_caller] fn send(&self, message: lsp_server::Message) { - self.sender.send(message).unwrap() + self.sender.send(message).unwrap(); + } + + pub(crate) fn publish_diagnostics( + &mut self, + uri: Url, + version: Option, + mut diagnostics: Vec, + ) { + // We put this on a separate thread to avoid blocking the main thread with serialization work + self.task_pool.handle.spawn_with_sender(stdx::thread::ThreadIntent::Worker, { + let sender = self.sender.clone(); + move |_| { + // VSCode assumes diagnostic messages to be non-empty strings, so we need to patch + // empty diagnostics. Neither the docs of VSCode nor the LSP spec say whether + // diagnostic messages are actually allowed to be empty or not and patching this + // in the VSCode client does not work as the assertion happens in the protocol + // conversion. So this hack is here to stay, and will be considered a hack + // until the LSP decides to state that empty messages are allowed. + + // See https://github.com/rust-lang/rust-analyzer/issues/11404 + // See https://github.com/rust-lang/rust-analyzer/issues/13130 + let patch_empty = |message: &mut String| { + if message.is_empty() { + " ".clone_into(message); + } + }; + + for d in &mut diagnostics { + patch_empty(&mut d.message); + if let Some(dri) = &mut d.related_information { + for dri in dri { + patch_empty(&mut dri.message); + } + } + } + + let not = lsp_server::Notification::new( + ::METHOD.to_owned(), + lsp_types::PublishDiagnosticsParams { uri, diagnostics, version }, + ); + _ = sender.send(not.into()); + } + }); } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs similarity index 87% rename from src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs rename to src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs index ebdc196a658e4..a105ec638203b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs @@ -97,16 +97,45 @@ impl RequestDispatcher<'_> { self } - /// Dispatches a non-latency-sensitive request onto the thread pool. + /// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not + /// ready this will return a default constructed [`R::Result`]. pub(crate) fn on( &mut self, f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result, ) -> &mut Self where - R: lsp_types::request::Request + 'static, - R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug, - R::Result: Serialize, + R: lsp_types::request::Request< + Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug, + Result: Serialize + Default, + > + 'static, + { + if !self.global_state.vfs_done { + if let Some(lsp_server::Request { id, .. }) = + self.req.take_if(|it| it.method == R::METHOD) + { + self.global_state.respond(lsp_server::Response::new_ok(id, R::Result::default())); + } + return self; + } + self.on_with_thread_intent::(ThreadIntent::Worker, f) + } + + /// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not + /// ready this will return the parameter as is. + pub(crate) fn on_identity( + &mut self, + f: fn(GlobalStateSnapshot, Params) -> anyhow::Result, + ) -> &mut Self + where + R: lsp_types::request::Request + 'static, + Params: Serialize + DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug, { + if !self.global_state.vfs_done { + if let Some((request, params, _)) = self.parse::() { + self.global_state.respond(lsp_server::Response::new_ok(request.id, ¶ms)) + } + return self; + } self.on_with_thread_intent::(ThreadIntent::Worker, f) } @@ -198,11 +227,7 @@ impl RequestDispatcher<'_> { R: lsp_types::request::Request, R::Params: DeserializeOwned + fmt::Debug, { - let req = match &self.req { - Some(req) if req.method == R::METHOD => self.req.take()?, - _ => return None, - }; - + let req = self.req.take_if(|it| it.method == R::METHOD)?; let res = crate::from_json(R::METHOD, &req.params); match res { Ok(params) => { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index 4b14dcfc3721b..de5d1f231368b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -9,6 +9,7 @@ use lsp_types::{ DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, WorkDoneProgressCancelParams, }; +use paths::Utf8PathBuf; use triomphe::Arc; use vfs::{AbsPathBuf, ChangeKind, VfsPath}; @@ -157,6 +158,8 @@ pub(crate) fn handle_did_save_text_document( .map(|cfg| cfg.files_to_watch.iter().map(String::as_str).collect::>()) .unwrap_or_default(); + // FIXME: We should move this check into a QueuedTask and do semantic resolution of + // the files. There is only so much we can tell syntactically from the path. if reload::should_refresh_for_change(path, ChangeKind::Modify, additional_files) { state.fetch_workspaces_queue.request_op( format!("workspace vfs file change saved {path}"), @@ -240,6 +243,7 @@ pub(crate) fn handle_did_change_workspace_folders( for workspace in params.event.removed { let Ok(path) = workspace.uri.to_file_path() else { continue }; + let Ok(path) = Utf8PathBuf::from_path_buf(path) else { continue }; let Ok(path) = AbsPathBuf::try_from(path) else { continue }; config.remove_workspace(&path); } @@ -249,6 +253,7 @@ pub(crate) fn handle_did_change_workspace_folders( .added .into_iter() .filter_map(|it| it.uri.to_file_path().ok()) + .filter_map(|it| Utf8PathBuf::from_path_buf(it).ok()) .filter_map(|it| AbsPathBuf::try_from(it).ok()); config.add_workspaces(added); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index eca139d79ae6a..34325ac7a93aa 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -36,7 +36,6 @@ use vfs::{AbsPath, AbsPathBuf, FileId, VfsPath}; use crate::{ config::{Config, RustfmtConfig, WorkspaceSymbolConfig}, - diff::diff, global_state::{FetchWorkspaceRequest, GlobalState, GlobalStateSnapshot}, hack_recover_crate_name, line_index::LineEndings, @@ -51,6 +50,7 @@ use crate::{ FetchDependencyListResult, PositionOrRange, ViewCrateGraphParams, WorkspaceSymbolParams, }, target_spec::{CargoTargetSpec, TargetSpec}, + test_runner::{CargoTestHandle, TestTarget}, }; pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { @@ -246,15 +246,15 @@ pub(crate) fn handle_run_test( if let ProjectWorkspaceKind::Cargo { cargo, .. } = &ws.kind { let test_target = if let Some(namespace_root) = namespace_root { if let Some(package_name) = find_package_name(namespace_root, cargo) { - flycheck::TestTarget::Package(package_name) + TestTarget::Package(package_name) } else { - flycheck::TestTarget::Workspace + TestTarget::Workspace } } else { - flycheck::TestTarget::Workspace + TestTarget::Workspace }; - let handle = flycheck::CargoTestHandle::new( + let handle = CargoTestHandle::new( test_path, state.config.cargo_test_options(), cargo.workspace_root(), @@ -781,9 +781,12 @@ pub(crate) fn handle_parent_module( if let Ok(file_path) = ¶ms.text_document.uri.to_file_path() { if file_path.file_name().unwrap_or_default() == "Cargo.toml" { // search workspaces for parent packages or fallback to workspace root - let abs_path_buf = match AbsPathBuf::try_from(file_path.to_path_buf()).ok() { - Some(abs_path_buf) => abs_path_buf, - None => return Ok(None), + let abs_path_buf = match Utf8PathBuf::from_path_buf(file_path.to_path_buf()) + .ok() + .map(AbsPathBuf::try_from) + { + Some(Ok(abs_path_buf)) => abs_path_buf, + _ => return Ok(None), }; let manifest_path = match ManifestPath::try_from(abs_path_buf).ok() { @@ -2366,3 +2369,47 @@ fn resolve_resource_op(op: &ResourceOp) -> ResourceOperationKind { ResourceOp::Delete(_) => ResourceOperationKind::Delete, } } + +pub(crate) fn diff(left: &str, right: &str) -> TextEdit { + use dissimilar::Chunk; + + let chunks = dissimilar::diff(left, right); + + let mut builder = TextEdit::builder(); + let mut pos = TextSize::default(); + + let mut chunks = chunks.into_iter().peekable(); + while let Some(chunk) = chunks.next() { + if let (Chunk::Delete(deleted), Some(&Chunk::Insert(inserted))) = (chunk, chunks.peek()) { + chunks.next().unwrap(); + let deleted_len = TextSize::of(deleted); + builder.replace(TextRange::at(pos, deleted_len), inserted.into()); + pos += deleted_len; + continue; + } + + match chunk { + Chunk::Equal(text) => { + pos += TextSize::of(text); + } + Chunk::Delete(deleted) => { + let deleted_len = TextSize::of(deleted); + builder.delete(TextRange::at(pos, deleted_len)); + pos += deleted_len; + } + Chunk::Insert(inserted) => { + builder.insert(pos, inserted.into()); + } + } + } + builder.finish() +} + +#[test] +fn diff_smoke_test() { + let mut original = String::from("fn foo(a:u32){\n}"); + let result = "fn foo(a: u32) {}"; + let edit = diff(&original, result); + edit.apply(&mut original); + assert_eq!(original, result); +} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs index ff8eb6c8612e0..28f4b809d6c82 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -46,13 +46,19 @@ fn integrated_highlighting_benchmark() { let (db, vfs, _proc_macro) = { let _it = stdx::timeit("workspace loading"); - load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap() + load_workspace_at( + workspace_to_load.as_std_path(), + &cargo_config, + &load_cargo_config, + &|_| {}, + ) + .unwrap() }; let mut host = AnalysisHost::with_database(db); let file_id = { let file = workspace_to_load.join(file); - let path = VfsPath::from(AbsPathBuf::assert_utf8(file)); + let path = VfsPath::from(AbsPathBuf::assert(file)); vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}")) }; @@ -106,13 +112,19 @@ fn integrated_completion_benchmark() { let (db, vfs, _proc_macro) = { let _it = stdx::timeit("workspace loading"); - load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap() + load_workspace_at( + workspace_to_load.as_std_path(), + &cargo_config, + &load_cargo_config, + &|_| {}, + ) + .unwrap() }; let mut host = AnalysisHost::with_database(db); let file_id = { let file = workspace_to_load.join(file); - let path = VfsPath::from(AbsPathBuf::assert_utf8(file)); + let path = VfsPath::from(AbsPathBuf::assert(file)); vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}")) }; @@ -274,13 +286,19 @@ fn integrated_diagnostics_benchmark() { let (db, vfs, _proc_macro) = { let _it = stdx::timeit("workspace loading"); - load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap() + load_workspace_at( + workspace_to_load.as_std_path(), + &cargo_config, + &load_cargo_config, + &|_| {}, + ) + .unwrap() }; let mut host = AnalysisHost::with_database(db); let file_id = { let file = workspace_to_load.join(file); - let path = VfsPath::from(AbsPathBuf::assert_utf8(file)); + let path = VfsPath::from(AbsPathBuf::assert(file)); vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}")) }; @@ -307,7 +325,7 @@ fn integrated_diagnostics_benchmark() { term_search_borrowck: true, }; host.analysis() - .diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id) + .full_diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id) .unwrap(); let _g = crate::tracing::hprof::init("*"); @@ -325,7 +343,7 @@ fn integrated_diagnostics_benchmark() { let _p = tracing::info_span!("diagnostics").entered(); let _span = profile::cpu_span(); host.analysis() - .diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id) + .full_diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id) .unwrap(); } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs index 174979ededed8..714991e81167a 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs @@ -11,10 +11,10 @@ pub mod cli; -mod capabilities; +mod command; mod diagnostics; -mod diff; -mod dispatch; +mod discover; +mod flycheck; mod hack_recover_crate_name; mod line_index; mod main_loop; @@ -23,9 +23,11 @@ mod op_queue; mod reload; mod target_spec; mod task_pool; +mod test_runner; mod version; mod handlers { + pub(crate) mod dispatch; pub(crate) mod notification; pub(crate) mod request; } @@ -47,7 +49,7 @@ mod integrated_benchmarks; use serde::de::DeserializeOwned; pub use crate::{ - capabilities::server_capabilities, main_loop::main_loop, reload::ws_to_crate_graph, + lsp::capabilities::server_capabilities, main_loop::main_loop, reload::ws_to_crate_graph, version::version, }; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp.rs index 9e0d42faed43f..122ad20d65ee6 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp.rs @@ -3,6 +3,8 @@ use core::fmt; pub mod ext; + +pub(crate) mod capabilities; pub(crate) mod from_proto; pub(crate) mod semantic_tokens; pub(crate) mod to_proto; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/capabilities.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs similarity index 100% rename from src/tools/rust-analyzer/crates/rust-analyzer/src/capabilities.rs rename to src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs index 1fcb636f85606..8d1a686dc4d41 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs @@ -61,7 +61,7 @@ impl Request for FetchDependencyList { #[serde(rename_all = "camelCase")] pub struct FetchDependencyListParams {} -#[derive(Deserialize, Serialize, Debug)] +#[derive(Deserialize, Serialize, Debug, Default)] #[serde(rename_all = "camelCase")] pub struct FetchDependencyListResult { pub crates: Vec, @@ -194,7 +194,7 @@ pub struct TestItem { pub runnable: Option, } -#[derive(Deserialize, Serialize, Debug)] +#[derive(Deserialize, Serialize, Debug, Default)] #[serde(rename_all = "camelCase")] pub struct DiscoverTestResults { pub tests: Vec, @@ -690,6 +690,12 @@ pub enum ExternalDocsResponse { WithLocal(ExternalDocsPair), } +impl Default for ExternalDocsResponse { + fn default() -> Self { + ExternalDocsResponse::Simple(None) + } +} + #[derive(Debug, Default, PartialEq, Serialize, Deserialize, Clone)] #[serde(rename_all = "camelCase")] pub struct ExternalDocsPair { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs index aea424298f8c8..1f4544887f062 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/from_proto.rs @@ -2,6 +2,7 @@ use anyhow::format_err; use ide::{Annotation, AnnotationKind, AssistKind, LineCol}; use ide_db::{line_index::WideLineCol, FileId, FilePosition, FileRange}; +use paths::Utf8PathBuf; use syntax::{TextRange, TextSize}; use vfs::AbsPathBuf; @@ -13,7 +14,7 @@ use crate::{ pub(crate) fn abs_path(url: &lsp_types::Url) -> anyhow::Result { let path = url.to_file_path().map_err(|()| anyhow::format_err!("url is not a file"))?; - Ok(AbsPathBuf::try_from(path).unwrap()) + Ok(AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).unwrap()).unwrap()) } pub(crate) fn vfs_path(url: &lsp_types::Url) -> anyhow::Result { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index 9c820749ece6d..1d4ee71e5c1a0 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -9,26 +9,28 @@ use std::{ use always_assert::always; use crossbeam_channel::{select, Receiver}; -use flycheck::project_json; -use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath}; +use ide_db::base_db::{SourceDatabase, SourceRootDatabase, VfsPath}; use lsp_server::{Connection, Notification, Request}; use lsp_types::{notification::Notification as _, TextDocumentIdentifier}; use stdx::thread::ThreadIntent; use tracing::{error, span, Level}; -use vfs::{AbsPathBuf, FileId}; +use vfs::{loader::LoadingProgress, AbsPathBuf, FileId}; use crate::{ config::Config, - diagnostics::{fetch_native_diagnostics, DiagnosticsGeneration}, - dispatch::{NotificationDispatcher, RequestDispatcher}, + diagnostics::{fetch_native_diagnostics, DiagnosticsGeneration, NativeDiagnosticsFetchKind}, + discover::{DiscoverArgument, DiscoverCommand, DiscoverProjectMessage}, + flycheck::{self, FlycheckMessage}, global_state::{file_id_to_url, url_to_file_id, FetchWorkspaceRequest, GlobalState}, hack_recover_crate_name, + handlers::dispatch::{NotificationDispatcher, RequestDispatcher}, lsp::{ from_proto, to_proto, utils::{notification_is, Progress}, }, lsp_ext, reload::{BuildDataProgress, ProcMacroProgress, ProjectWorkspaceProgress}, + test_runner::{CargoTestMessage, TestState}, }; pub fn main_loop(config: Config, connection: Connection) -> anyhow::Result<()> { @@ -61,9 +63,9 @@ enum Event { Task(Task), QueuedTask(QueuedTask), Vfs(vfs::loader::Message), - Flycheck(flycheck::Message), - TestResult(flycheck::CargoTestMessage), - DiscoverProject(project_json::DiscoverProjectMessage), + Flycheck(FlycheckMessage), + TestResult(CargoTestMessage), + DiscoverProject(DiscoverProjectMessage), } impl fmt::Display for Event { @@ -86,17 +88,24 @@ pub(crate) enum QueuedTask { CheckProcMacroSources(Vec), } +#[derive(Debug)] +pub(crate) enum DiagnosticsTaskKind { + Syntax(DiagnosticsGeneration, Vec<(FileId, Vec)>), + Semantic(DiagnosticsGeneration, Vec<(FileId, Vec)>), +} + #[derive(Debug)] pub(crate) enum Task { Response(lsp_server::Response), DiscoverLinkedProjects(DiscoverProjectParam), Retry(lsp_server::Request), - Diagnostics(DiagnosticsGeneration, Vec<(FileId, Vec)>), + Diagnostics(DiagnosticsTaskKind), DiscoverTest(lsp_ext::DiscoverTestResults), PrimeCaches(PrimeCachesProgress), FetchWorkspace(ProjectWorkspaceProgress), FetchBuildData(BuildDataProgress), LoadProcMacros(ProcMacroProgress), + // FIXME: Remove this in favor of a more general QueuedTask, see `handle_did_save_text_document` BuildDepsHaveChanged, } @@ -164,8 +173,10 @@ impl GlobalState { } if self.config.discover_workspace_config().is_none() { - let req = FetchWorkspaceRequest { path: None, force_crate_graph_reload: false }; - self.fetch_workspaces_queue.request_op("startup".to_owned(), req); + self.fetch_workspaces_queue.request_op( + "startup".to_owned(), + FetchWorkspaceRequest { path: None, force_crate_graph_reload: false }, + ); if let Some((cause, FetchWorkspaceRequest { path, force_crate_graph_reload })) = self.fetch_workspaces_queue.should_start_op() { @@ -173,7 +184,10 @@ impl GlobalState { } } - while let Some(event) = self.next_event(&inbox) { + while let Ok(event) = self.next_event(&inbox) { + let Some(event) = event else { + anyhow::bail!("client exited without proper shutdown sequence"); + }; if matches!( &event, Event::Lsp(lsp_server::Message::Notification(Notification { method, .. })) @@ -184,7 +198,7 @@ impl GlobalState { self.handle_event(event)?; } - anyhow::bail!("client exited without proper shutdown sequence") + Err(anyhow::anyhow!("A receiver has been dropped, something panicked!")) } fn register_did_save_capability(&mut self, additional_patterns: impl Iterator) { @@ -231,37 +245,40 @@ impl GlobalState { ); } - fn next_event(&self, inbox: &Receiver) -> Option { + fn next_event( + &self, + inbox: &Receiver, + ) -> Result, crossbeam_channel::RecvError> { select! { recv(inbox) -> msg => - msg.ok().map(Event::Lsp), + return Ok(msg.ok().map(Event::Lsp)), recv(self.task_pool.receiver) -> task => - Some(Event::Task(task.unwrap())), + task.map(Event::Task), recv(self.deferred_task_queue.receiver) -> task => - Some(Event::QueuedTask(task.unwrap())), + task.map(Event::QueuedTask), recv(self.fmt_pool.receiver) -> task => - Some(Event::Task(task.unwrap())), + task.map(Event::Task), recv(self.loader.receiver) -> task => - Some(Event::Vfs(task.unwrap())), + task.map(Event::Vfs), recv(self.flycheck_receiver) -> task => - Some(Event::Flycheck(task.unwrap())), + task.map(Event::Flycheck), recv(self.test_run_receiver) -> task => - Some(Event::TestResult(task.unwrap())), + task.map(Event::TestResult), recv(self.discover_receiver) -> task => - Some(Event::DiscoverProject(task.unwrap())), + task.map(Event::DiscoverProject), } + .map(Some) } fn handle_event(&mut self, event: Event) -> anyhow::Result<()> { let loop_start = Instant::now(); - // NOTE: don't count blocking select! call as a loop-turn time let _p = tracing::info_span!("GlobalState::handle_event", event = %event).entered(); let event_dbg_msg = format!("{event:?}"); @@ -375,9 +392,14 @@ impl GlobalState { } } let event_handling_duration = loop_start.elapsed(); - - let state_changed = self.process_changes(); - let memdocs_added_or_removed = self.mem_docs.take_changes(); + let (state_changed, memdocs_added_or_removed) = if self.vfs_done { + if let Some(cause) = self.wants_to_switch.take() { + self.switch_workspaces(cause); + } + (self.process_changes(), self.mem_docs.take_changes()) + } else { + (false, false) + }; if self.is_quiescent() { let became_quiescent = !was_quiescent; @@ -423,40 +445,13 @@ impl GlobalState { if let Some(diagnostic_changes) = self.diagnostics.take_changes() { for file_id in diagnostic_changes { let uri = file_id_to_url(&self.vfs.read().0, file_id); - let mut diagnostics = - self.diagnostics.diagnostics_for(file_id).cloned().collect::>(); - - // VSCode assumes diagnostic messages to be non-empty strings, so we need to patch - // empty diagnostics. Neither the docs of VSCode nor the LSP spec say whether - // diagnostic messages are actually allowed to be empty or not and patching this - // in the VSCode client does not work as the assertion happens in the protocol - // conversion. So this hack is here to stay, and will be considered a hack - // until the LSP decides to state that empty messages are allowed. - - // See https://github.com/rust-lang/rust-analyzer/issues/11404 - // See https://github.com/rust-lang/rust-analyzer/issues/13130 - let patch_empty = |message: &mut String| { - if message.is_empty() { - " ".clone_into(message); - } - }; - - for d in &mut diagnostics { - patch_empty(&mut d.message); - if let Some(dri) = &mut d.related_information { - for dri in dri { - patch_empty(&mut dri.message); - } - } - } - let version = from_proto::vfs_path(&uri) - .map(|path| self.mem_docs.get(&path).map(|it| it.version)) - .unwrap_or_default(); + .ok() + .and_then(|path| self.mem_docs.get(&path).map(|it| it.version)); - self.send_notification::( - lsp_types::PublishDiagnosticsParams { uri, diagnostics, version }, - ); + let diagnostics = + self.diagnostics.diagnostics_for(file_id).cloned().collect::>(); + self.publish_diagnostics(uri, version, diagnostics); } } @@ -549,14 +544,37 @@ impl GlobalState { } // Diagnostics are triggered by the user typing // so we run them on a latency sensitive thread. - self.task_pool.handle.spawn(ThreadIntent::LatencySensitive, { - let snapshot = self.snapshot(); + let snapshot = self.snapshot(); + self.task_pool.handle.spawn_with_sender(ThreadIntent::LatencySensitive, { let subscriptions = subscriptions.clone(); - move || { - Task::Diagnostics( - generation, - fetch_native_diagnostics(snapshot, subscriptions, slice), - ) + // Do not fetch semantic diagnostics (and populate query results) if we haven't even + // loaded the initial workspace yet. + let fetch_semantic = + self.vfs_done && self.fetch_workspaces_queue.last_op_result().is_some(); + move |sender| { + let diags = fetch_native_diagnostics( + &snapshot, + subscriptions.clone(), + slice.clone(), + NativeDiagnosticsFetchKind::Syntax, + ); + sender + .send(Task::Diagnostics(DiagnosticsTaskKind::Syntax(generation, diags))) + .unwrap(); + + if fetch_semantic { + let diags = fetch_native_diagnostics( + &snapshot, + subscriptions, + slice, + NativeDiagnosticsFetchKind::Semantic, + ); + sender + .send(Task::Diagnostics(DiagnosticsTaskKind::Semantic( + generation, diags, + ))) + .unwrap(); + } } }); start = end; @@ -564,6 +582,9 @@ impl GlobalState { } fn update_tests(&mut self) { + if !self.vfs_done { + return; + } let db = self.analysis_host.raw_database(); let subscriptions = self .mem_docs @@ -644,10 +665,8 @@ impl GlobalState { // Only retry requests that haven't been cancelled. Otherwise we do unnecessary work. Task::Retry(req) if !self.is_completed(&req) => self.on_request(req), Task::Retry(_) => (), - Task::Diagnostics(generation, diagnostics_per_file) => { - for (file_id, diagnostics) in diagnostics_per_file { - self.diagnostics.set_native_diagnostics(generation, file_id, diagnostics) - } + Task::Diagnostics(kind) => { + self.diagnostics.set_native_diagnostics(kind); } Task::PrimeCaches(progress) => match progress { PrimeCachesProgress::Begin => prime_caches_progress.push(progress), @@ -672,7 +691,7 @@ impl GlobalState { if let Err(e) = self.fetch_workspace_error() { error!("FetchWorkspaceError:\n{e}"); } - self.switch_workspaces("fetched workspace".to_owned()); + self.wants_to_switch = Some("fetched workspace".to_owned()); (Progress::End, None) } }; @@ -686,8 +705,7 @@ impl GlobalState { // `self.report_progress` is called later let title = &cfg.progress_label.clone(); let command = cfg.command.clone(); - let discover = - project_json::Discover::new(self.discover_sender.clone(), command); + let discover = DiscoverCommand::new(self.discover_sender.clone(), command); self.report_progress(title, Progress::Begin, None, None, None); self.discover_workspace_queue @@ -695,12 +713,8 @@ impl GlobalState { let _ = self.discover_workspace_queue.should_start_op(); let arg = match arg { - DiscoverProjectParam::Buildfile(it) => { - project_json::DiscoverArgument::Buildfile(it) - } - DiscoverProjectParam::Path(it) => { - project_json::DiscoverArgument::Path(it) - } + DiscoverProjectParam::Buildfile(it) => DiscoverArgument::Buildfile(it), + DiscoverProjectParam::Path(it) => DiscoverArgument::Path(it), }; let handle = discover.spawn(arg).unwrap(); @@ -718,8 +732,9 @@ impl GlobalState { error!("FetchBuildDataError:\n{e}"); } - self.switch_workspaces("fetched build data".to_owned()); - + if self.wants_to_switch.is_none() { + self.wants_to_switch = Some("fetched build data".to_owned()); + } (Some(Progress::End), None) } }; @@ -772,16 +787,14 @@ impl GlobalState { let _p = tracing::info_span!("GlobalState::handle_vfs_mgs/progress").entered(); always!(config_version <= self.vfs_config_version); - let state = match n_done { - None => Progress::Begin, - Some(done) if done == n_total => Progress::End, - Some(_) => Progress::Report, + let (n_done, state) = match n_done { + LoadingProgress::Started => (0, Progress::Begin), + LoadingProgress::Progress(n_done) => (n_done.min(n_total), Progress::Report), + LoadingProgress::Finished => (n_total, Progress::End), }; - let n_done = n_done.unwrap_or_default(); self.vfs_progress_config_version = config_version; - self.vfs_progress_n_total = n_total; - self.vfs_progress_n_done = n_done; + self.vfs_done = state == Progress::End; let mut message = format!("{n_done}/{n_total}"); if let Some(dir) = dir { @@ -850,14 +863,14 @@ impl GlobalState { } } - fn handle_discover_msg(&mut self, message: project_json::DiscoverProjectMessage) { + fn handle_discover_msg(&mut self, message: DiscoverProjectMessage) { let title = self .config .discover_workspace_config() .map(|cfg| cfg.progress_label.clone()) .expect("No title could be found; this is a bug"); match message { - project_json::DiscoverProjectMessage::Finished { project, buildfile } => { + DiscoverProjectMessage::Finished { project, buildfile } => { self.report_progress(&title, Progress::End, None, None, None); self.discover_workspace_queue.op_completed(()); @@ -865,10 +878,10 @@ impl GlobalState { config.add_linked_projects(project, buildfile); self.update_configuration(config); } - project_json::DiscoverProjectMessage::Progress { message } => { + DiscoverProjectMessage::Progress { message } => { self.report_progress(&title, Progress::Report, Some(message), None, None) } - project_json::DiscoverProjectMessage::Error { error, source } => { + DiscoverProjectMessage::Error { error, source } => { let message = format!("Project discovery failed: {error}"); self.discover_workspace_queue.op_completed(()); self.show_and_log_error(message.clone(), source); @@ -877,16 +890,14 @@ impl GlobalState { } } - fn handle_cargo_test_msg(&mut self, message: flycheck::CargoTestMessage) { + fn handle_cargo_test_msg(&mut self, message: CargoTestMessage) { match message { - flycheck::CargoTestMessage::Test { name, state } => { + CargoTestMessage::Test { name, state } => { let state = match state { - flycheck::TestState::Started => lsp_ext::TestState::Started, - flycheck::TestState::Ignored => lsp_ext::TestState::Skipped, - flycheck::TestState::Ok => lsp_ext::TestState::Passed, - flycheck::TestState::Failed { stdout } => { - lsp_ext::TestState::Failed { message: stdout } - } + TestState::Started => lsp_ext::TestState::Started, + TestState::Ignored => lsp_ext::TestState::Skipped, + TestState::Ok => lsp_ext::TestState::Passed, + TestState::Failed { stdout } => lsp_ext::TestState::Failed { message: stdout }, }; let Some(test_id) = hack_recover_crate_name::lookup_name(name) else { return; @@ -895,23 +906,23 @@ impl GlobalState { lsp_ext::ChangeTestStateParams { test_id, state }, ); } - flycheck::CargoTestMessage::Suite => (), - flycheck::CargoTestMessage::Finished => { + CargoTestMessage::Suite => (), + CargoTestMessage::Finished => { self.test_run_remaining_jobs = self.test_run_remaining_jobs.saturating_sub(1); if self.test_run_remaining_jobs == 0 { self.send_notification::(()); self.test_run_session = None; } } - flycheck::CargoTestMessage::Custom { text } => { + CargoTestMessage::Custom { text } => { self.send_notification::(text); } } } - fn handle_flycheck_msg(&mut self, message: flycheck::Message) { + fn handle_flycheck_msg(&mut self, message: FlycheckMessage) { match message { - flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => { + FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic } => { let snap = self.snapshot(); let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp( &self.config.diagnostics_map(), @@ -937,9 +948,9 @@ impl GlobalState { } } - flycheck::Message::ClearDiagnostics { id } => self.diagnostics.clear_check(id), + FlycheckMessage::ClearDiagnostics { id } => self.diagnostics.clear_check(id), - flycheck::Message::Progress { id, progress } => { + FlycheckMessage::Progress { id, progress } => { let (state, message) = match progress { flycheck::Progress::DidStart => (Progress::Begin, None), flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)), @@ -1054,9 +1065,9 @@ impl GlobalState { .on::(handlers::handle_goto_implementation) .on::(handlers::handle_goto_type_definition) .on::(handlers::handle_inlay_hints) - .on::(handlers::handle_inlay_hints_resolve) + .on_identity::(handlers::handle_inlay_hints_resolve) .on::(handlers::handle_code_lens) - .on::(handlers::handle_code_lens_resolve) + .on_identity::(handlers::handle_code_lens_resolve) .on::(handlers::handle_prepare_rename) .on::(handlers::handle_rename) .on::(handlers::handle_references) @@ -1083,7 +1094,7 @@ impl GlobalState { .on::(handlers::handle_runnables) .on::(handlers::handle_related_tests) .on::(handlers::handle_code_action) - .on::(handlers::handle_code_action_resolve) + .on_identity::(handlers::handle_code_action_resolve) .on::(handlers::handle_hover) .on::(handlers::handle_open_docs) .on::(handlers::handle_open_cargo_toml) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index 5c95ccd4b826f..dee34b1b393d2 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -15,7 +15,6 @@ // FIXME: This is a mess that needs some untangling work use std::{iter, mem}; -use flycheck::{FlycheckConfig, FlycheckHandle}; use hir::{db::DefDatabase, ChangeWithProcMacros, ProcMacros, ProcMacrosBuilder}; use ide_db::{ base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, Version}, @@ -32,6 +31,7 @@ use vfs::{AbsPath, AbsPathBuf, ChangeKind}; use crate::{ config::{Config, FilesWatcher, LinkedProject}, + flycheck::{FlycheckConfig, FlycheckHandle}, global_state::{FetchWorkspaceRequest, GlobalState}, lsp_ext, main_loop::{DiscoverProjectParam, Task}, @@ -62,13 +62,13 @@ pub(crate) enum ProcMacroProgress { impl GlobalState { pub(crate) fn is_quiescent(&self) -> bool { - !(self.last_reported_status.is_none() - || self.fetch_workspaces_queue.op_in_progress() - || self.fetch_build_data_queue.op_in_progress() - || self.fetch_proc_macros_queue.op_in_progress() - || self.discover_workspace_queue.op_in_progress() - || self.vfs_progress_config_version < self.vfs_config_version - || self.vfs_progress_n_done < self.vfs_progress_n_total) + self.vfs_done + && self.last_reported_status.is_some() + && !self.fetch_workspaces_queue.op_in_progress() + && !self.fetch_build_data_queue.op_in_progress() + && !self.fetch_proc_macros_queue.op_in_progress() + && !self.discover_workspace_queue.op_in_progress() + && self.vfs_progress_config_version >= self.vfs_config_version } pub(crate) fn update_configuration(&mut self, config: Config) { @@ -102,15 +102,13 @@ impl GlobalState { } pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams { - let mut status = lsp_ext::ServerStatusParams { - health: lsp_ext::Health::Ok, - quiescent: self.is_quiescent(), - message: None, - }; + let quiescent = self.is_quiescent(); + let mut status = + lsp_ext::ServerStatusParams { health: lsp_ext::Health::Ok, quiescent, message: None }; let mut message = String::new(); if !self.config.cargo_autoreload(None) - && self.is_quiescent() + && quiescent && self.fetch_workspaces_queue.op_requested() && self.config.discover_workspace_config().is_none() { @@ -242,7 +240,7 @@ impl GlobalState { let discover_command = self.config.discover_workspace_config().cloned(); let is_quiescent = !(self.discover_workspace_queue.op_in_progress() || self.vfs_progress_config_version < self.vfs_config_version - || self.vfs_progress_n_done < self.vfs_progress_n_total); + || !self.vfs_done); move |sender| { let progress = { @@ -751,20 +749,22 @@ impl GlobalState { let config = self.config.flycheck(); let sender = self.flycheck_sender.clone(); let invocation_strategy = match config { - FlycheckConfig::CargoCommand { .. } => flycheck::InvocationStrategy::PerWorkspace, + FlycheckConfig::CargoCommand { .. } => { + crate::flycheck::InvocationStrategy::PerWorkspace + } FlycheckConfig::CustomCommand { invocation_strategy, .. } => invocation_strategy, }; self.flycheck = match invocation_strategy { - flycheck::InvocationStrategy::Once => vec![FlycheckHandle::spawn( + crate::flycheck::InvocationStrategy::Once => vec![FlycheckHandle::spawn( 0, - Box::new(move |msg| sender.send(msg).unwrap()), + sender, config, None, self.config.root_path().clone(), None, )], - flycheck::InvocationStrategy::PerWorkspace => { + crate::flycheck::InvocationStrategy::PerWorkspace => { self.workspaces .iter() .enumerate() @@ -793,10 +793,9 @@ impl GlobalState { )) }) .map(|(id, (root, manifest_path), sysroot_root)| { - let sender = sender.clone(); FlycheckHandle::spawn( id, - Box::new(move |msg| sender.send(msg).unwrap()), + sender.clone(), config.clone(), sysroot_root, root.to_path_buf(), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs index 67e1bad5281d0..965fd415e9967 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/target_spec.rs @@ -263,11 +263,14 @@ mod tests { use super::*; use ide::Edition; - use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, DummyTestSpanMap, DUMMY}; use syntax::{ ast::{self, AstNode}, SmolStr, }; + use syntax_bridge::{ + dummy_test_span_utils::{DummyTestSpanMap, DUMMY}, + syntax_node_to_token_tree, DocCommentDesugarMode, + }; fn check(cfg: &str, expected_features: &[&str]) { let cfg_expr = { diff --git a/src/tools/rust-analyzer/crates/flycheck/src/test_runner.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs similarity index 93% rename from src/tools/rust-analyzer/crates/flycheck/src/test_runner.rs rename to src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs index 74ebca34103aa..293cff474337d 100644 --- a/src/tools/rust-analyzer/crates/flycheck/src/test_runner.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/test_runner.rs @@ -10,12 +10,12 @@ use toolchain::Tool; use crate::{ command::{CommandHandle, ParseFromLine}, - CargoOptions, + flycheck::CargoOptions, }; #[derive(Debug, Deserialize)] #[serde(tag = "event", rename_all = "camelCase")] -pub enum TestState { +pub(crate) enum TestState { Started, Ok, Ignored, @@ -24,7 +24,7 @@ pub enum TestState { #[derive(Debug, Deserialize)] #[serde(tag = "type", rename_all = "camelCase")] -pub enum CargoTestMessage { +pub(crate) enum CargoTestMessage { Test { name: String, #[serde(flatten)] @@ -54,7 +54,7 @@ impl ParseFromLine for CargoTestMessage { } #[derive(Debug)] -pub struct CargoTestHandle { +pub(crate) struct CargoTestHandle { _handle: CommandHandle, } @@ -64,13 +64,13 @@ pub struct CargoTestHandle { // cargo test --package my-package --no-fail-fast -- module::func -Z unstable-options --format=json #[derive(Debug)] -pub enum TestTarget { +pub(crate) enum TestTarget { Workspace, Package(String), } impl CargoTestHandle { - pub fn new( + pub(crate) fn new( path: Option<&str>, options: CargoOptions, root: &AbsPath, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/crate_graph.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/crate_graph.rs index 66481d3d7f5e1..b8a82fd6a72ee 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/crate_graph.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/crate_graph.rs @@ -69,7 +69,7 @@ fn get_fake_sysroot() -> Sysroot { // fake sysroot, so we give them both the same path: let sysroot_dir = AbsPathBuf::assert_utf8(sysroot_path); let sysroot_src_dir = sysroot_dir.clone(); - Sysroot::load(Some(sysroot_dir), Some(sysroot_src_dir), false) + Sysroot::load(Some(sysroot_dir), Some(sysroot_src_dir)) } #[test] diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs index b1ef48377178b..54cd27f4b3bc3 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs @@ -750,7 +750,7 @@ fn test_missing_module_code_action_in_json_project() { let code = format!( r#" -//- /rust-project.json +//- /.rust-project.json {project} //- /src/lib.rs @@ -909,7 +909,7 @@ version = \"0.0.0\" fn out_dirs_check_impl(root_contains_symlink: bool) { if skip_slow_tests() { - // return; + return; } let mut server = Project::with_fixture( @@ -1084,7 +1084,6 @@ fn resolve_proc_macro() { let sysroot = project_model::Sysroot::discover( &AbsPathBuf::assert_utf8(std::env::current_dir().unwrap()), &Default::default(), - false, ); let proc_macro_server_path = sysroot.discover_proc_macro_srv().unwrap(); @@ -1125,7 +1124,6 @@ edition = "2021" proc-macro = true //- /bar/src/lib.rs -extern crate proc_macro; use proc_macro::{Delimiter, Group, Ident, Span, TokenStream, TokenTree}; macro_rules! t { ($n:literal) => { diff --git a/src/tools/rust-analyzer/crates/span/Cargo.toml b/src/tools/rust-analyzer/crates/span/Cargo.toml index 9f85f0107cc18..3381dac0b4255 100644 --- a/src/tools/rust-analyzer/crates/span/Cargo.toml +++ b/src/tools/rust-analyzer/crates/span/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "span" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "File and span related types for rust-analyzer." rust-version.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/span/src/ast_id.rs b/src/tools/rust-analyzer/crates/span/src/ast_id.rs index b61baa224465f..0ebd72e1514c6 100644 --- a/src/tools/rust-analyzer/crates/span/src/ast_id.rs +++ b/src/tools/rust-analyzer/crates/span/src/ast_id.rs @@ -18,7 +18,28 @@ use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; /// See crates\hir-expand\src\ast_id_map.rs /// This is a type erased FileAstId. -pub type ErasedFileAstId = la_arena::Idx; +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct ErasedFileAstId(u32); + +impl ErasedFileAstId { + pub const fn into_raw(self) -> u32 { + self.0 + } + pub const fn from_raw(u32: u32) -> Self { + Self(u32) + } +} + +impl fmt::Display for ErasedFileAstId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} +impl fmt::Debug for ErasedFileAstId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} /// `AstId` points to an AST node in a specific file. pub struct FileAstId { @@ -47,7 +68,7 @@ impl Hash for FileAstId { impl fmt::Debug for FileAstId { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "FileAstId::<{}>({})", type_name::(), self.raw.into_raw()) + write!(f, "FileAstId::<{}>({})", type_name::(), self.raw) } } @@ -176,7 +197,10 @@ impl AstIdMap { let ptr = ptr.syntax_node_ptr(); let hash = hash_ptr(&ptr); match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) { - Some((&raw, &())) => FileAstId { raw, covariant: PhantomData }, + Some((&raw, &())) => FileAstId { + raw: ErasedFileAstId(raw.into_raw().into_u32()), + covariant: PhantomData, + }, None => panic!( "Can't find {:?} in AstIdMap:\n{:?}", ptr, @@ -186,18 +210,19 @@ impl AstIdMap { } pub fn get(&self, id: FileAstId) -> AstPtr { - AstPtr::try_from_raw(self.arena[id.raw]).unwrap() + AstPtr::try_from_raw(self.arena[Idx::from_raw(RawIdx::from_u32(id.raw.into_raw()))]) + .unwrap() } pub fn get_erased(&self, id: ErasedFileAstId) -> SyntaxNodePtr { - self.arena[id] + self.arena[Idx::from_raw(RawIdx::from_u32(id.into_raw()))] } fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId { let ptr = SyntaxNodePtr::new(item); let hash = hash_ptr(&ptr); match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) { - Some((&idx, &())) => idx, + Some((&idx, &())) => ErasedFileAstId(idx.into_raw().into_u32()), None => panic!( "Can't find {:?} in AstIdMap:\n{:?}", item, @@ -207,7 +232,7 @@ impl AstIdMap { } fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId { - self.arena.alloc(SyntaxNodePtr::new(item)) + ErasedFileAstId(self.arena.alloc(SyntaxNodePtr::new(item)).into_raw().into_u32()) } } diff --git a/src/tools/rust-analyzer/crates/span/src/hygiene.rs b/src/tools/rust-analyzer/crates/span/src/hygiene.rs index e8c558355c965..874480c59fbf3 100644 --- a/src/tools/rust-analyzer/crates/span/src/hygiene.rs +++ b/src/tools/rust-analyzer/crates/span/src/hygiene.rs @@ -79,6 +79,10 @@ impl SyntaxContextId { #[derive(Copy, Clone, Hash, PartialEq, Eq)] pub struct SyntaxContextData { /// Invariant: Only [`SyntaxContextId::ROOT`] has a [`None`] outer expansion. + // FIXME: The None case needs to encode the context crate id. We can encode that as the MSB of + // MacroCallId is reserved anyways so we can do bit tagging here just fine. + // The bigger issue is that that will cause interning to now create completely separate chains + // per crate. Though that is likely not a problem as `MacroCallId`s are already crate calling dependent. pub outer_expn: Option, pub outer_transparency: Transparency, pub parent: SyntaxContextId, diff --git a/src/tools/rust-analyzer/crates/span/src/lib.rs b/src/tools/rust-analyzer/crates/span/src/lib.rs index b4e21d64f81e3..61e4c98128a57 100644 --- a/src/tools/rust-analyzer/crates/span/src/lib.rs +++ b/src/tools/rust-analyzer/crates/span/src/lib.rs @@ -21,15 +21,14 @@ pub use vfs::FileId; /// The root ast id always points to the encompassing file, using this in spans is discouraged as /// any range relative to it will be effectively absolute, ruining the entire point of anchored /// relative text ranges. -pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId = - la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0)); +pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(0); /// FileId used as the span for syntax node fixups. Any Span containing this file id is to be /// considered fake. pub const FIXUP_ERASED_FILE_AST_ID_MARKER: ErasedFileAstId = - // we pick the second to last for this in case we every consider making this a NonMaxU32, this + // we pick the second to last for this in case we ever consider making this a NonMaxU32, this // is required to be stable for the proc-macro-server - la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(!0 - 1)); + ErasedFileAstId::from_raw(!0 - 1); pub type Span = SpanData; diff --git a/src/tools/rust-analyzer/crates/span/src/map.rs b/src/tools/rust-analyzer/crates/span/src/map.rs index 6269f4c30c7d2..c539754979d6a 100644 --- a/src/tools/rust-analyzer/crates/span/src/map.rs +++ b/src/tools/rust-analyzer/crates/span/src/map.rs @@ -119,7 +119,7 @@ impl fmt::Display for RealSpanMap { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { writeln!(f, "RealSpanMap({:?}):", self.file_id)?; for span in self.pairs.iter() { - writeln!(f, "{}: {}", u32::from(span.0), span.1.into_raw().into_u32())?; + writeln!(f, "{}: {}", u32::from(span.0), span.1.into_raw())?; } Ok(()) } diff --git a/src/tools/rust-analyzer/crates/stdx/Cargo.toml b/src/tools/rust-analyzer/crates/stdx/Cargo.toml index 99824df1f69d1..bf0d6df9ad810 100644 --- a/src/tools/rust-analyzer/crates/stdx/Cargo.toml +++ b/src/tools/rust-analyzer/crates/stdx/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "stdx" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Missing batteries for standard libraries for rust-analyzer." authors.workspace = true edition.workspace = true @@ -16,7 +17,7 @@ backtrace = { version = "0.3.67", optional = true } always-assert = { version = "0.2.0", features = ["tracing"] } jod-thread = "0.1.2" libc.workspace = true -crossbeam-channel = "0.5.5" +crossbeam-channel.workspace = true itertools.workspace = true # Think twice before adding anything here diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/Cargo.toml b/src/tools/rust-analyzer/crates/syntax-bridge/Cargo.toml new file mode 100644 index 0000000000000..e995ff3b55b44 --- /dev/null +++ b/src/tools/rust-analyzer/crates/syntax-bridge/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "syntax-bridge" +version = "0.0.0" +repository.workspace = true +description = "Conversions between syntax nodes and token trees for rust-analyzer." + +authors.workspace = true +edition.workspace = true +license.workspace = true +rust-version.workspace = true + +[lib] +doctest = false + +[dependencies] +rustc-hash.workspace = true +tracing.workspace = true + +# local deps +syntax.workspace = true +parser.workspace = true +tt.workspace = true +stdx.workspace = true +span.workspace = true +intern.workspace = true + +[dev-dependencies] +test-utils.workspace = true + +[features] +in-rust-tree = ["parser/in-rust-tree", "tt/in-rust-tree", "syntax/in-rust-tree"] + +[lints] +workspace = true diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs similarity index 96% rename from src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs rename to src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs index a29efdd4ef77e..b0afd245c52e3 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs +++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs @@ -14,11 +14,13 @@ use syntax::{ }; use tt::{ buffer::{Cursor, TokenBuffer}, - iter::TtIter, token_to_literal, }; -use crate::to_parser_input::to_parser_input; +mod to_parser_input; +pub use to_parser_input::to_parser_input; +// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces +pub use ::parser::TopEntryPoint; #[cfg(test)] mod tests; @@ -43,7 +45,7 @@ impl> SpanMapper for &SM { } /// Dummy things for testing where spans don't matter. -pub(crate) mod dummy_test_span_utils { +pub mod dummy_test_span_utils { use span::{Span, SyntaxContextId}; @@ -211,50 +213,6 @@ where Some(convert_tokens(&mut conv)) } -/// Split token tree with separate expr: $($e:expr)SEP* -pub fn parse_exprs_with_sep( - tt: &tt::Subtree, - sep: char, - span: span::Span, - edition: Edition, -) -> Vec> { - if tt.token_trees.is_empty() { - return Vec::new(); - } - - let mut iter = TtIter::new(tt); - let mut res = Vec::new(); - - while iter.peek_n(0).is_some() { - let expanded = crate::expect_fragment( - &mut iter, - parser::PrefixEntryPoint::Expr, - edition, - tt::DelimSpan { open: tt.delimiter.open, close: tt.delimiter.close }, - ); - - res.push(match expanded.value { - None => break, - Some(tt) => tt.subtree_or_wrap(tt::DelimSpan { open: span, close: span }), - }); - - let mut fork = iter.clone(); - if fork.expect_char(sep).is_err() { - break; - } - iter = fork; - } - - if iter.peek_n(0).is_some() { - res.push(tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(span), - token_trees: iter.cloned().collect(), - }); - } - - res -} - fn convert_tokens(conv: &mut C) -> tt::Subtree where C: TokenConverter, @@ -479,7 +437,6 @@ fn convert_doc_comment( span: S, mode: DocCommentDesugarMode, ) -> Option>> { - cov_mark::hit!(test_meta_doc_comments); let comment = ast::Comment::cast(token.clone())?; let doc = comment.kind().doc?; diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/tests.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/tests.rs new file mode 100644 index 0000000000000..7b8e3f2b49c20 --- /dev/null +++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/tests.rs @@ -0,0 +1,104 @@ +use rustc_hash::FxHashMap; +use span::Span; +use syntax::{ast, AstNode}; +use test_utils::extract_annotations; +use tt::{ + buffer::{TokenBuffer, TokenTreeRef}, + Leaf, Punct, Spacing, +}; + +use crate::{ + dummy_test_span_utils::{DummyTestSpanMap, DUMMY}, + syntax_node_to_token_tree, DocCommentDesugarMode, +}; + +fn check_punct_spacing(fixture: &str) { + let source_file = ast::SourceFile::parse(fixture, span::Edition::CURRENT).ok().unwrap(); + let subtree = syntax_node_to_token_tree( + source_file.syntax(), + DummyTestSpanMap, + DUMMY, + DocCommentDesugarMode::Mbe, + ); + let mut annotations: FxHashMap<_, _> = extract_annotations(fixture) + .into_iter() + .map(|(range, annotation)| { + let spacing = match annotation.as_str() { + "Alone" => Spacing::Alone, + "Joint" => Spacing::Joint, + a => panic!("unknown annotation: {a}"), + }; + (range, spacing) + }) + .collect(); + + let buf = TokenBuffer::from_subtree(&subtree); + let mut cursor = buf.begin(); + while !cursor.eof() { + while let Some(token_tree) = cursor.token_tree() { + if let TokenTreeRef::Leaf( + Leaf::Punct(Punct { spacing, span: Span { range, .. }, .. }), + _, + ) = token_tree + { + if let Some(expected) = annotations.remove(range) { + assert_eq!(expected, *spacing); + } + } + cursor = cursor.bump_subtree(); + } + cursor = cursor.bump(); + } + + assert!(annotations.is_empty(), "unchecked annotations: {annotations:?}"); +} + +#[test] +fn punct_spacing() { + check_punct_spacing( + r#" +fn main() { + 0+0; + //^ Alone + 0+(0); + //^ Alone + 0<=0; + //^ Joint + // ^ Alone + 0<=(0); + // ^ Alone + a=0; + //^ Alone + a=(0); + //^ Alone + a+=0; + //^ Joint + // ^ Alone + a+=(0); + // ^ Alone + a&&b; + //^ Joint + // ^ Alone + a&&(b); + // ^ Alone + foo::bar; + // ^ Joint + // ^ Alone + use foo::{bar,baz,}; + // ^ Alone + // ^ Alone + // ^ Alone + struct Struct<'a> {}; + // ^ Joint + // ^ Joint + Struct::<0>; + // ^ Alone + Struct::<{0}>; + // ^ Alone + ;; + //^ Joint + // ^ Alone +} + "#, + ); +} diff --git a/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs similarity index 98% rename from src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs rename to src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs index c35b28527a09b..2c54899268834 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs +++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/to_parser_input.rs @@ -8,7 +8,7 @@ use syntax::{SyntaxKind, SyntaxKind::*, T}; use tt::buffer::TokenBuffer; -pub(crate) fn to_parser_input( +pub fn to_parser_input( edition: Edition, buffer: &TokenBuffer<'_, S>, ) -> parser::Input { diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml index b371ec6ebd532..994c21469ff45 100644 --- a/src/tools/rust-analyzer/crates/syntax/Cargo.toml +++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml @@ -1,8 +1,8 @@ [package] name = "syntax" version = "0.0.0" -description = "Comment and whitespace preserving parser for the Rust language" -repository = "https://github.com/rust-lang/rust-analyzer" +repository.workspace = true +description = "Concrete syntax tree definitions for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/syntax/src/tests.rs b/src/tools/rust-analyzer/crates/syntax/src/tests.rs index f0d58efc01ed8..b50489c6f0f2a 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/tests.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/tests.rs @@ -79,7 +79,7 @@ fn self_hosting_parsing() { let crates_dir = project_root().join("crates"); let mut files = Vec::new(); - let mut work = vec![crates_dir.to_path_buf()]; + let mut work = vec![crates_dir.into_std_path_buf()]; while let Some(dir) = work.pop() { for entry in dir.read_dir().unwrap() { let entry = entry.unwrap(); @@ -127,7 +127,7 @@ fn self_hosting_parsing() { } fn test_data_dir() -> PathBuf { - project_root().join("crates/syntax/test_data") + project_root().into_std_path_buf().join("crates/syntax/test_data") } fn assert_errors_are_present(errors: &[SyntaxError], path: &Path) { diff --git a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs index e1f40f5da0116..03e85a898ab0c 100644 --- a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs +++ b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs @@ -3,7 +3,7 @@ use std::{iter, mem, str::FromStr, sync}; use base_db::{ CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Env, FileChange, - FileSet, LangCrateOrigin, SourceDatabaseExt, SourceRoot, Version, VfsPath, + FileSet, LangCrateOrigin, SourceRoot, SourceRootDatabase, Version, VfsPath, }; use cfg::CfgOptions; use hir_expand::{ @@ -26,7 +26,7 @@ use tt::{Leaf, Subtree, TokenTree}; pub const WORKSPACE: base_db::SourceRootId = base_db::SourceRootId(0); -pub trait WithFixture: Default + ExpandDatabase + SourceDatabaseExt + 'static { +pub trait WithFixture: Default + ExpandDatabase + SourceRootDatabase + 'static { #[track_caller] fn with_single_file(ra_fixture: &str) -> (Self, EditionedFileId) { let fixture = ChangeFixture::parse(ra_fixture); @@ -101,7 +101,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceDatabaseExt + 'static { } } -impl WithFixture for DB {} +impl WithFixture for DB {} pub struct ChangeFixture { pub file_position: Option<(EditionedFileId, RangeOrOffset)>, diff --git a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml index 2ff1fad6c29d9..b1457722a922e 100644 --- a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml +++ b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "test-utils" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Assorted testing utilities for rust-analyzer." authors.workspace = true edition.workspace = true @@ -18,6 +19,7 @@ text-size.workspace = true tracing.workspace = true rustc-hash.workspace = true +paths.workspace = true stdx.workspace = true profile.workspace = true diff --git a/src/tools/rust-analyzer/crates/test-utils/src/lib.rs b/src/tools/rust-analyzer/crates/test-utils/src/lib.rs index 088817b8357ce..36be9937d3fea 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/lib.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/lib.rs @@ -18,6 +18,7 @@ use std::{ path::{Path, PathBuf}, }; +use paths::Utf8PathBuf; use profile::StopWatch; use stdx::is_ci; use text_size::{TextRange, TextSize}; @@ -402,9 +403,10 @@ pub fn skip_slow_tests() -> bool { } /// Returns the path to the root directory of `rust-analyzer` project. -pub fn project_root() -> PathBuf { +pub fn project_root() -> Utf8PathBuf { let dir = env!("CARGO_MANIFEST_DIR"); - PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned() + Utf8PathBuf::from_path_buf(PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned()) + .unwrap() } pub fn format_diff(chunks: Vec>) -> String { diff --git a/src/tools/rust-analyzer/crates/text-edit/Cargo.toml b/src/tools/rust-analyzer/crates/text-edit/Cargo.toml index f745674794c99..dc6b3d31a09f4 100644 --- a/src/tools/rust-analyzer/crates/text-edit/Cargo.toml +++ b/src/tools/rust-analyzer/crates/text-edit/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "text-edit" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Representation of a `TextEdit` for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/toolchain/Cargo.toml b/src/tools/rust-analyzer/crates/toolchain/Cargo.toml index c85efd432b00d..87e8efb20fc6f 100644 --- a/src/tools/rust-analyzer/crates/toolchain/Cargo.toml +++ b/src/tools/rust-analyzer/crates/toolchain/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "toolchain" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Discovery of `cargo` & `rustc` executables for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/tt/Cargo.toml b/src/tools/rust-analyzer/crates/tt/Cargo.toml index cea1519c2dd46..82e7c24668fe6 100644 --- a/src/tools/rust-analyzer/crates/tt/Cargo.toml +++ b/src/tools/rust-analyzer/crates/tt/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "tt" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "A `TokenTree` data structure for rust-analyzer." authors.workspace = true edition.workspace = true diff --git a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml index a6d5027c3a60a..09296dc6dd533 100644 --- a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml +++ b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "vfs-notify" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "Implementation of `loader::Handle` for rust-analyzer." authors.workspace = true edition.workspace = true @@ -14,12 +15,14 @@ doctest = false [dependencies] tracing.workspace = true walkdir = "2.3.2" -crossbeam-channel = "0.5.5" +crossbeam-channel.workspace = true notify = "6.1.1" +rayon = "1.10.0" stdx.workspace = true vfs.workspace = true paths.workspace = true +rustc-hash.workspace = true [lints] -workspace = true \ No newline at end of file +workspace = true diff --git a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs index 7e0f9af7af89f..0ae8b7baf464a 100644 --- a/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs +++ b/src/tools/rust-analyzer/crates/vfs-notify/src/lib.rs @@ -10,12 +10,15 @@ use std::{ fs, path::{Component, Path}, + sync::atomic::AtomicUsize, }; -use crossbeam_channel::{never, select, unbounded, Receiver, Sender}; -use notify::{Config, RecommendedWatcher, RecursiveMode, Watcher}; -use paths::{AbsPath, AbsPathBuf}; -use vfs::loader; +use crossbeam_channel::{select, unbounded, Receiver, Sender}; +use notify::{Config, EventKind, RecommendedWatcher, RecursiveMode, Watcher}; +use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; +use rayon::iter::{IndexedParallelIterator as _, IntoParallelIterator as _, ParallelIterator}; +use rustc_hash::FxHashSet; +use vfs::loader::{self, LoadingProgress}; use walkdir::WalkDir; #[derive(Debug)] @@ -59,7 +62,8 @@ type NotifyEvent = notify::Result; struct NotifyActor { sender: loader::Sender, - watched_entries: Vec, + watched_file_entries: FxHashSet, + watched_dir_entries: Vec, // Drop order is significant. watcher: Option<(RecommendedWatcher, Receiver)>, } @@ -72,14 +76,22 @@ enum Event { impl NotifyActor { fn new(sender: loader::Sender) -> NotifyActor { - NotifyActor { sender, watched_entries: Vec::new(), watcher: None } + NotifyActor { + sender, + watched_dir_entries: Vec::new(), + watched_file_entries: FxHashSet::default(), + watcher: None, + } } fn next_event(&self, receiver: &Receiver) -> Option { - let watcher_receiver = self.watcher.as_ref().map(|(_, receiver)| receiver); + let Some((_, watcher_receiver)) = &self.watcher else { + return receiver.recv().ok().map(Event::Message); + }; + select! { recv(receiver) -> it => it.ok().map(Event::Message), - recv(watcher_receiver.unwrap_or(&never())) -> it => Some(Event::NotifyEvent(it.unwrap())), + recv(watcher_receiver) -> it => Some(Event::NotifyEvent(it.unwrap())), } } @@ -94,7 +106,10 @@ impl NotifyActor { let (watcher_sender, watcher_receiver) = unbounded(); let watcher = log_notify_error(RecommendedWatcher::new( move |event| { - watcher_sender.send(event).unwrap(); + // we don't care about the error. If sending fails that usually + // means we were dropped, so unwrapping will just add to the + // panic noise. + _ = watcher_sender.send(event); }, Config::default(), )); @@ -104,35 +119,74 @@ impl NotifyActor { let config_version = config.version; let n_total = config.load.len(); + self.watched_dir_entries.clear(); + self.watched_file_entries.clear(); + self.send(loader::Message::Progress { n_total, - n_done: None, + n_done: LoadingProgress::Started, config_version, dir: None, }); - self.watched_entries.clear(); + let (entry_tx, entry_rx) = unbounded(); + let (watch_tx, watch_rx) = unbounded(); + let processed = AtomicUsize::new(0); - for (i, entry) in config.load.into_iter().enumerate() { - let watch = config.watch.contains(&i); - if watch { - self.watched_entries.push(entry.clone()); + config.load.into_par_iter().enumerate().for_each(|(i, entry)| { + let do_watch = config.watch.contains(&i); + if do_watch { + _ = entry_tx.send(entry.clone()); } - let files = - self.load_entry(entry, watch, |file| loader::Message::Progress { - n_total, - n_done: Some(i), - dir: Some(file), - config_version, - }); + let files = Self::load_entry( + |f| _ = watch_tx.send(f.to_owned()), + entry, + do_watch, + |file| { + self.send(loader::Message::Progress { + n_total, + n_done: LoadingProgress::Progress( + processed.load(std::sync::atomic::Ordering::Relaxed), + ), + dir: Some(file), + config_version, + }); + }, + ); self.send(loader::Message::Loaded { files }); self.send(loader::Message::Progress { n_total, - n_done: Some(i + 1), + n_done: LoadingProgress::Progress( + processed.fetch_add(1, std::sync::atomic::Ordering::AcqRel) + 1, + ), config_version, dir: None, }); + }); + + drop(watch_tx); + for path in watch_rx { + self.watch(&path); } + + drop(entry_tx); + for entry in entry_rx { + match entry { + loader::Entry::Files(files) => { + self.watched_file_entries.extend(files) + } + loader::Entry::Directories(dir) => { + self.watched_dir_entries.push(dir) + } + } + } + + self.send(loader::Message::Progress { + n_total, + n_done: LoadingProgress::Finished, + config_version, + dir: None, + }); } Message::Invalidate(path) => { let contents = read(path.as_path()); @@ -142,55 +196,69 @@ impl NotifyActor { }, Event::NotifyEvent(event) => { if let Some(event) = log_notify_error(event) { - let files = event - .paths - .into_iter() - .map(|path| AbsPathBuf::try_from(path).unwrap()) - .filter_map(|path| { - let meta = fs::metadata(&path).ok()?; - if meta.file_type().is_dir() - && self - .watched_entries - .iter() - .any(|entry| entry.contains_dir(&path)) - { - self.watch(path); - return None; - } + if let EventKind::Create(_) | EventKind::Modify(_) | EventKind::Remove(_) = + event.kind + { + let files = event + .paths + .into_iter() + .filter_map(|path| { + Some( + AbsPathBuf::try_from( + Utf8PathBuf::from_path_buf(path).ok()?, + ) + .expect("path is absolute"), + ) + }) + .filter_map(|path| -> Option<(AbsPathBuf, Option>)> { + let meta = fs::metadata(&path).ok()?; + if meta.file_type().is_dir() + && self + .watched_dir_entries + .iter() + .any(|dir| dir.contains_dir(&path)) + { + self.watch(path.as_ref()); + return None; + } - if !meta.file_type().is_file() { - return None; - } - if !self - .watched_entries - .iter() - .any(|entry| entry.contains_file(&path)) - { - return None; - } + if !meta.file_type().is_file() { + return None; + } - let contents = read(&path); - Some((path, contents)) - }) - .collect(); - self.send(loader::Message::Changed { files }); + if !(self.watched_file_entries.contains(&path) + || self + .watched_dir_entries + .iter() + .any(|dir| dir.contains_file(&path))) + { + return None; + } + + let contents = read(&path); + Some((path, contents)) + }) + .collect(); + self.send(loader::Message::Changed { files }); + } } } } } } + fn load_entry( - &mut self, + mut watch: impl FnMut(&Path), entry: loader::Entry, - watch: bool, - make_message: impl Fn(AbsPathBuf) -> loader::Message, + do_watch: bool, + send_message: impl Fn(AbsPathBuf), ) -> Vec<(AbsPathBuf, Option>)> { match entry { loader::Entry::Files(files) => files .into_iter() .map(|file| { - if watch { - self.watch(file.clone()); + if do_watch { + watch(file.as_ref()); } let contents = read(file.as_path()); (file, contents) @@ -200,7 +268,7 @@ impl NotifyActor { let mut res = Vec::new(); for root in &dirs.include { - self.send(make_message(root.clone())); + send_message(root.clone()); let walkdir = WalkDir::new(root).follow_links(true).into_iter().filter_entry(|entry| { if !entry.file_type().is_dir() { @@ -208,7 +276,7 @@ impl NotifyActor { } let path = entry.path(); - if path_is_parent_symlink(path) { + if path_might_be_cyclic(path) { return false; } @@ -220,12 +288,15 @@ impl NotifyActor { let depth = entry.depth(); let is_dir = entry.file_type().is_dir(); let is_file = entry.file_type().is_file(); - let abs_path = AbsPathBuf::try_from(entry.into_path()).ok()?; + let abs_path = AbsPathBuf::try_from( + Utf8PathBuf::from_path_buf(entry.into_path()).ok()?, + ) + .ok()?; if depth < 2 && is_dir { - self.send(make_message(abs_path.clone())); + send_message(abs_path.clone()); } - if is_dir && watch { - self.watch(abs_path.clone()); + if is_dir && do_watch { + watch(abs_path.as_ref()); } if !is_file { return None; @@ -247,13 +318,15 @@ impl NotifyActor { } } - fn watch(&mut self, path: AbsPathBuf) { + fn watch(&mut self, path: &Path) { if let Some((watcher, _)) = &mut self.watcher { - log_notify_error(watcher.watch(path.as_ref(), RecursiveMode::NonRecursive)); + log_notify_error(watcher.watch(path, RecursiveMode::NonRecursive)); } } - fn send(&mut self, msg: loader::Message) { - (self.sender)(msg); + + #[track_caller] + fn send(&self, msg: loader::Message) { + self.sender.send(msg).unwrap(); } } @@ -271,7 +344,7 @@ fn log_notify_error(res: notify::Result) -> Option { /// heuristic is not sufficient to catch all symlink cycles (it's /// possible to construct cycle using two or more symlinks), but it /// catches common cases. -fn path_is_parent_symlink(path: &Path) -> bool { +fn path_might_be_cyclic(path: &Path) -> bool { let Ok(destination) = std::fs::read_link(path) else { return false; }; diff --git a/src/tools/rust-analyzer/crates/vfs/Cargo.toml b/src/tools/rust-analyzer/crates/vfs/Cargo.toml index 84f2110ebad15..e8a6195036ed7 100644 --- a/src/tools/rust-analyzer/crates/vfs/Cargo.toml +++ b/src/tools/rust-analyzer/crates/vfs/Cargo.toml @@ -1,7 +1,8 @@ [package] name = "vfs" version = "0.0.0" -description = "TBD" +repository.workspace = true +description = "A virtual file system for rust-analyzer." authors.workspace = true edition.workspace = true @@ -17,6 +18,7 @@ tracing.workspace = true fst = "0.4.7" indexmap.workspace = true nohash-hasher.workspace = true +crossbeam-channel.workspace = true paths.workspace = true stdx.workspace = true diff --git a/src/tools/rust-analyzer/crates/vfs/src/lib.rs b/src/tools/rust-analyzer/crates/vfs/src/lib.rs index 77f890fd7e0de..bc40e03c5a246 100644 --- a/src/tools/rust-analyzer/crates/vfs/src/lib.rs +++ b/src/tools/rust-analyzer/crates/vfs/src/lib.rs @@ -201,8 +201,8 @@ impl Vfs { pub fn set_file_contents(&mut self, path: VfsPath, contents: Option>) -> bool { let _p = span!(Level::INFO, "Vfs::set_file_contents").entered(); let file_id = self.alloc_file_id(path); - let state = self.get(file_id); - let change_kind = match (state, contents) { + let state: FileState = self.get(file_id); + let change = match (state, contents) { (FileState::Deleted, None) => return false, (FileState::Deleted, Some(v)) => { let hash = hash_once::(&*v); @@ -225,7 +225,7 @@ impl Vfs { }; }; - let changed_file = ChangedFile { file_id, change: change_kind }; + let changed_file = ChangedFile { file_id, change }; match self.changes.entry(file_id) { // two changes to the same file in one cycle, merge them appropriately Entry::Occupied(mut o) => { diff --git a/src/tools/rust-analyzer/crates/vfs/src/loader.rs b/src/tools/rust-analyzer/crates/vfs/src/loader.rs index 3af91b1af815b..f24354cb493ca 100644 --- a/src/tools/rust-analyzer/crates/vfs/src/loader.rs +++ b/src/tools/rust-analyzer/crates/vfs/src/loader.rs @@ -43,6 +43,13 @@ pub struct Config { pub watch: Vec, } +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum LoadingProgress { + Started, + Progress(usize), + Finished, +} + /// Message about an action taken by a [`Handle`]. pub enum Message { /// Indicate a gradual progress. @@ -52,7 +59,7 @@ pub enum Message { /// The total files to be loaded. n_total: usize, /// The files that have been loaded successfully. - n_done: Option, + n_done: LoadingProgress, /// The dir being loaded, `None` if its for a file. dir: Option, /// The [`Config`] version. @@ -65,7 +72,7 @@ pub enum Message { } /// Type that will receive [`Messages`](Message) from a [`Handle`]. -pub type Sender = Box; +pub type Sender = crossbeam_channel::Sender; /// Interface for reading and watching files. pub trait Handle: fmt::Debug { diff --git a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md index e559f88e23353..4786bd54d59be 100644 --- a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md +++ b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@ multiple-dep-versions.rs:7:18 + | +7 | do_something(Type); + | ------------ ^^^^ the trait `Trait` is not implemented for `dep_2_reexport::Type` + | | + | required by a bound introduced by this call + | +help: there are multiple different versions of crate `dependency` the your dependency graph + --> multiple-dep-versions.rs:1:1 + | +1 | extern crate dep_2_reexport; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ one version of crate `dependency` is used here, as a dependency of crate `foo` +2 | extern crate dependency; + | ^^^^^^^^^^^^^^^^^^^^^^^^ one version of crate `dependency` is used here, as a direct dependency of the current crate"#, ) .assert_stderr_contains( - "two types coming from two different versions of the same crate are different types \ - even if they look the same", + r#" +3 | pub struct Type(pub i32); + | ^^^^^^^^^^^^^^^ this type implements the required trait +4 | pub trait Trait { + | --------------- this is the required trait"#, ) - .assert_stderr_contains("this type doesn't implement the required trait") - .assert_stderr_contains("this type implements the required trait") - .assert_stderr_contains("this is the required trait"); + .assert_stderr_contains( + r#" +3 | pub struct Type; + | ^^^^^^^^^^^^^^^ this type doesn't implement the required trait"#, + ) + .assert_stderr_contains( + r#" +error[E0599]: no method named `foo` found for struct `dep_2_reexport::Type` in the current scope + --> multiple-dep-versions.rs:8:10 + | +8 | Type.foo(); + | ^^^ method not found in `Type` + | +note: there are multiple different versions of crate `dependency` in the dependency graph"#, + ) + .assert_stderr_contains( + r#" +4 | pub trait Trait { + | ^^^^^^^^^^^^^^^ this is the trait that is needed +5 | fn foo(&self); + | -------------- the method is available for `dep_2_reexport::Type` here + | + ::: multiple-dep-versions.rs:4:32 + | +4 | use dependency::{do_something, Trait}; + | ----- `Trait` imported here doesn't correspond to the right version of crate `dependency`"#, + ) + .assert_stderr_contains( + r#" +4 | pub trait Trait { + | --------------- this is the trait that was imported"#, + ) + .assert_stderr_contains( + r#" +error[E0599]: no function or associated item named `bar` found for struct `dep_2_reexport::Type` in the current scope + --> multiple-dep-versions.rs:9:11 + | +9 | Type::bar(); + | ^^^ function or associated item not found in `Type` + | +note: there are multiple different versions of crate `dependency` in the dependency graph"#, + ) + .assert_stderr_contains( + r#" +4 | pub trait Trait { + | ^^^^^^^^^^^^^^^ this is the trait that is needed +5 | fn foo(&self); +6 | fn bar(); + | --------- the associated function is available for `dep_2_reexport::Type` here + | + ::: multiple-dep-versions.rs:4:32 + | +4 | use dependency::{do_something, Trait}; + | ----- `Trait` imported here doesn't correspond to the right version of crate `dependency`"#, + ); } diff --git a/tests/run-make/dep-info-doesnt-run-much/Makefile b/tests/run-make/dep-info-doesnt-run-much/Makefile deleted file mode 100644 index b4dc44ad2be84..0000000000000 --- a/tests/run-make/dep-info-doesnt-run-much/Makefile +++ /dev/null @@ -1,4 +0,0 @@ -include ../tools.mk - -all: - $(RUSTC) foo.rs --emit dep-info diff --git a/tests/run-make/dep-info-spaces/Makefile b/tests/run-make/dep-info-spaces/Makefile deleted file mode 100644 index 0cfe513e490a7..0000000000000 --- a/tests/run-make/dep-info-spaces/Makefile +++ /dev/null @@ -1,19 +0,0 @@ -include ../tools.mk - -# ignore-windows -# ignore-freebsd -# FIXME: (windows: see `../dep-info/Makefile`) - -all: - cp lib.rs $(TMPDIR)/ - cp 'foo foo.rs' $(TMPDIR)/ - cp bar.rs $(TMPDIR)/ - $(RUSTC) --emit link,dep-info --crate-type=lib $(TMPDIR)/lib.rs - sleep 1 - touch $(TMPDIR)/'foo foo.rs' - -rm -f $(TMPDIR)/done - $(MAKE) -drf Makefile.foo - rm $(TMPDIR)/done - pwd - $(MAKE) -drf Makefile.foo - rm $(TMPDIR)/done && exit 1 || exit 0 diff --git a/tests/run-make/dep-info-spaces/Makefile.foo b/tests/run-make/dep-info-spaces/Makefile.foo deleted file mode 100644 index 80a5d4333cdc2..0000000000000 --- a/tests/run-make/dep-info-spaces/Makefile.foo +++ /dev/null @@ -1,7 +0,0 @@ -LIB := $(shell $(RUSTC) --print file-names --crate-type=lib $(TMPDIR)/lib.rs) - -$(TMPDIR)/$(LIB): - $(RUSTC) --emit link,dep-info --crate-type=lib $(TMPDIR)/lib.rs - touch $(TMPDIR)/done - --include $(TMPDIR)/lib.d diff --git a/tests/run-make/dep-info-spaces/bar.rs b/tests/run-make/dep-info-spaces/bar.rs deleted file mode 100644 index c5c0bc606cd69..0000000000000 --- a/tests/run-make/dep-info-spaces/bar.rs +++ /dev/null @@ -1 +0,0 @@ -pub fn bar() {} diff --git a/tests/run-make/dep-info/Makefile b/tests/run-make/dep-info/Makefile deleted file mode 100644 index c76f43a8eed51..0000000000000 --- a/tests/run-make/dep-info/Makefile +++ /dev/null @@ -1,25 +0,0 @@ -include ../tools.mk - -# ignore-windows -# ignore-freebsd -# FIXME: on windows `rustc --dep-info` produces Makefile dependency with -# windows native paths (e.g. `c:\path\to\libfoo.a`) -# but msys make seems to fail to recognize such paths, so test fails. - -all: - cp *.rs $(TMPDIR) - $(RUSTC) --emit dep-info,link --crate-type=lib $(TMPDIR)/lib.rs - sleep 2 - touch $(TMPDIR)/foo.rs - -rm -f $(TMPDIR)/done - $(MAKE) -drf Makefile.foo - sleep 2 - rm $(TMPDIR)/done - pwd - $(MAKE) -drf Makefile.foo - rm $(TMPDIR)/done && exit 1 || exit 0 - - # When a source file is deleted `make` should still work - rm $(TMPDIR)/bar.rs - cp $(TMPDIR)/lib2.rs $(TMPDIR)/lib.rs - $(MAKE) -drf Makefile.foo diff --git a/tests/run-make/dep-info/Makefile.foo b/tests/run-make/dep-info/Makefile.foo deleted file mode 100644 index e5df31f88c1e1..0000000000000 --- a/tests/run-make/dep-info/Makefile.foo +++ /dev/null @@ -1,7 +0,0 @@ -LIB := $(shell $(RUSTC) --print file-names --crate-type=lib lib.rs) - -$(TMPDIR)/$(LIB): - $(RUSTC) --emit dep-info,link --crate-type=lib lib.rs - touch $(TMPDIR)/done - --include $(TMPDIR)/foo.d diff --git a/tests/run-make/dep-info-doesnt-run-much/foo.rs b/tests/run-make/dep-info/erroneous.rs similarity index 100% rename from tests/run-make/dep-info-doesnt-run-much/foo.rs rename to tests/run-make/dep-info/erroneous.rs diff --git a/tests/run-make/dep-info-spaces/foo foo.rs b/tests/run-make/dep-info/foo foo.rs similarity index 100% rename from tests/run-make/dep-info-spaces/foo foo.rs rename to tests/run-make/dep-info/foo foo.rs diff --git a/tests/run-make/dep-info-spaces/lib.rs b/tests/run-make/dep-info/lib_foofoo.rs similarity index 100% rename from tests/run-make/dep-info-spaces/lib.rs rename to tests/run-make/dep-info/lib_foofoo.rs diff --git a/tests/run-make/dep-info/rmake.rs b/tests/run-make/dep-info/rmake.rs new file mode 100644 index 0000000000000..508569b7671c1 --- /dev/null +++ b/tests/run-make/dep-info/rmake.rs @@ -0,0 +1,37 @@ +// This is a simple smoke test for rustc's `--emit dep-info` feature. It prints out +// information about dependencies in a Makefile-compatible format, as a `.d` file. +// Note that this test does not check that the `.d` file is Makefile-compatible. + +// This test first checks that emitting dep-info disables static analysis, preventing +// compilation of `erroneous.rs` from causing a compilation failure. +// Then, it checks that compilation using the flag is successful in general, even with +// empty source files or source files that contain a whitespace character. + +// Finally, it removes one dependency and checks that compilation is still successful. +// See https://github.com/rust-lang/rust/pull/10698 + +use run_make_support::{rfs, rustc}; + +fn main() { + // We're only emitting dep info, so we shouldn't be running static analysis to + // figure out that this program is erroneous. + rustc().input("erroneous.rs").emit("dep-info").run(); + + rustc().input("lib.rs").emit("dep-info,link").crate_type("lib").run(); + rfs::remove_file("foo.rs"); + rfs::create_file("foo.rs"); + // Compilation should succeed even if `foo.rs` is empty. + rustc().input("lib.rs").emit("dep-info,link").crate_type("lib").run(); + + // Again, with a space in the filename this time around. + rustc().input("lib_foofoo.rs").emit("dep-info,link").crate_type("lib").run(); + rfs::remove_file("foo foo.rs"); + rfs::create_file("foo foo.rs"); + // Compilation should succeed even if `foo foo.rs` is empty. + rustc().input("lib_foofoo.rs").emit("dep-info,link").crate_type("lib").run(); + + // When a source file is deleted, compilation should still succeed if the library + // also loses this source file dependency. + rfs::remove_file("bar.rs"); + rustc().input("lib2.rs").emit("dep-info,link").crate_type("lib").run(); +} diff --git a/tests/run-make/deref-impl-rustdoc-ice/rmake.rs b/tests/run-make/deref-impl-rustdoc-ice/rmake.rs index 91fc0a9025fad..0ad5934f62e18 100644 --- a/tests/run-make/deref-impl-rustdoc-ice/rmake.rs +++ b/tests/run-make/deref-impl-rustdoc-ice/rmake.rs @@ -12,5 +12,5 @@ use run_make_support::{cwd, rustc, rustdoc}; fn main() { rustc().input("foo.rs").run(); rustc().input("bar.rs").run(); - rustdoc().input("baz.rs").library_search_path(cwd()).output(cwd()).run(); + rustdoc().input("baz.rs").library_search_path(cwd()).out_dir(cwd()).run(); } diff --git a/tests/run-make/doctests-keep-binaries-2024/rmake.rs b/tests/run-make/doctests-keep-binaries-2024/rmake.rs new file mode 100644 index 0000000000000..3e8ffcbf24457 --- /dev/null +++ b/tests/run-make/doctests-keep-binaries-2024/rmake.rs @@ -0,0 +1,67 @@ +// Check that valid binaries are persisted by running them, regardless of whether the +// --run or --no-run option is used. + +//@ ignore-cross-compile + +use std::path::Path; + +use run_make_support::{rfs, run, rustc, rustdoc}; + +fn setup_test_env(callback: F) { + let out_dir = Path::new("doctests"); + rfs::create_dir(&out_dir); + rustc().input("t.rs").crate_type("rlib").run(); + callback(&out_dir, Path::new("libt.rlib")); + rfs::remove_dir_all(out_dir); +} + +fn check_generated_binaries() { + run("doctests/merged_doctest_2024/rust_out"); +} + +fn main() { + setup_test_env(|out_dir, extern_path| { + rustdoc() + .input("t.rs") + .arg("-Zunstable-options") + .arg("--test") + .arg("--persist-doctests") + .arg(out_dir) + .extern_("t", extern_path) + .edition("2024") + .run(); + check_generated_binaries(); + }); + setup_test_env(|out_dir, extern_path| { + rustdoc() + .input("t.rs") + .arg("-Zunstable-options") + .arg("--test") + .arg("--persist-doctests") + .arg(out_dir) + .extern_("t", extern_path) + .arg("--no-run") + .edition("2024") + .run(); + check_generated_binaries(); + }); + // Behavior with --test-run-directory with relative paths. + setup_test_env(|_, _| { + let run_dir_path = Path::new("rundir"); + rfs::create_dir(&run_dir_path); + + rustdoc() + .input("t.rs") + .arg("-Zunstable-options") + .arg("--test") + .arg("--persist-doctests") + .arg("doctests") + .arg("--test-run-directory") + .arg(run_dir_path) + .extern_("t", "libt.rlib") + .edition("2024") + .run(); + + rfs::remove_dir_all(run_dir_path); + }); +} diff --git a/tests/run-make/doctests-keep-binaries-2024/t.rs b/tests/run-make/doctests-keep-binaries-2024/t.rs new file mode 100644 index 0000000000000..c38cf0a0b25d4 --- /dev/null +++ b/tests/run-make/doctests-keep-binaries-2024/t.rs @@ -0,0 +1,11 @@ +/// Fungle the foople. +/// ``` +/// t::foople(); +/// ``` +pub fn foople() {} + +/// Flomble the florp +/// ``` +/// t::florp(); +/// ``` +pub fn florp() {} diff --git a/tests/run-make/doctests-merge/doctest-2021.stdout b/tests/run-make/doctests-merge/doctest-2021.stdout new file mode 100644 index 0000000000000..7da08d68faae3 --- /dev/null +++ b/tests/run-make/doctests-merge/doctest-2021.stdout @@ -0,0 +1,7 @@ + +running 2 tests +test doctest.rs - (line 4) ... ok +test doctest.rs - init (line 8) ... ok + +test result: ok. 2 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/tests/run-make/doctests-merge/doctest-2024.stdout b/tests/run-make/doctests-merge/doctest-2024.stdout new file mode 100644 index 0000000000000..7da08d68faae3 --- /dev/null +++ b/tests/run-make/doctests-merge/doctest-2024.stdout @@ -0,0 +1,7 @@ + +running 2 tests +test doctest.rs - (line 4) ... ok +test doctest.rs - init (line 8) ... ok + +test result: ok. 2 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/tests/run-make/doctests-merge/doctest-standalone.rs b/tests/run-make/doctests-merge/doctest-standalone.rs new file mode 100644 index 0000000000000..134ffb58285e8 --- /dev/null +++ b/tests/run-make/doctests-merge/doctest-standalone.rs @@ -0,0 +1,18 @@ +#![crate_name = "foo"] +#![crate_type = "lib"] + +//! ```standalone +//! foo::init(); +//! ``` + +/// ```standalone +/// foo::init(); +/// ``` +pub fn init() { + static mut IS_INIT: bool = false; + + unsafe { + assert!(!IS_INIT); + IS_INIT = true; + } +} diff --git a/tests/run-make/doctests-merge/doctest-standalone.stdout b/tests/run-make/doctests-merge/doctest-standalone.stdout new file mode 100644 index 0000000000000..ee9f62326ab02 --- /dev/null +++ b/tests/run-make/doctests-merge/doctest-standalone.stdout @@ -0,0 +1,7 @@ + +running 2 tests +test doctest-standalone.rs - (line 4) ... ok +test doctest-standalone.rs - init (line 8) ... ok + +test result: ok. 2 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/tests/run-make/doctests-merge/doctest.rs b/tests/run-make/doctests-merge/doctest.rs new file mode 100644 index 0000000000000..66a5d88db67f4 --- /dev/null +++ b/tests/run-make/doctests-merge/doctest.rs @@ -0,0 +1,18 @@ +#![crate_name = "foo"] +#![crate_type = "lib"] + +//! ``` +//! foo::init(); +//! ``` + +/// ``` +/// foo::init(); +/// ``` +pub fn init() { + static mut IS_INIT: bool = false; + + unsafe { + assert!(!IS_INIT); + IS_INIT = true; + } +} diff --git a/tests/run-make/doctests-merge/rmake.rs b/tests/run-make/doctests-merge/rmake.rs new file mode 100644 index 0000000000000..a25da7403e24b --- /dev/null +++ b/tests/run-make/doctests-merge/rmake.rs @@ -0,0 +1,39 @@ +use std::path::Path; + +use run_make_support::{cwd, diff, rustc, rustdoc}; + +fn test_and_compare(input_file: &str, stdout_file: &str, edition: &str, dep: &Path) { + let mut cmd = rustdoc(); + + let output = cmd + .input(input_file) + .arg("--test") + .arg("-Zunstable-options") + .edition(edition) + .arg("--test-args=--test-threads=1") + .extern_("foo", dep.display().to_string()) + .env("RUST_BACKTRACE", "short") + .run(); + + diff() + .expected_file(stdout_file) + .actual_text("output", output.stdout_utf8()) + .normalize(r#"finished in \d+\.\d+s"#, "finished in $$TIME") + .run(); +} + +fn main() { + let out_file = cwd().join("libfoo.rlib"); + + rustc().input("doctest.rs").crate_type("rlib").output(&out_file).run(); + + // First we ensure that running with the 2024 edition will not fail at runtime. + test_and_compare("doctest.rs", "doctest-2024.stdout", "2024", &out_file); + + // Then we ensure that running with an edition < 2024 will not fail at runtime. + test_and_compare("doctest.rs", "doctest-2021.stdout", "2021", &out_file); + + // Now we check with the standalone attribute which should succeed in all cases. + test_and_compare("doctest-standalone.rs", "doctest-standalone.stdout", "2024", &out_file); + test_and_compare("doctest-standalone.rs", "doctest-standalone.stdout", "2021", &out_file); +} diff --git a/tests/run-make/dump-ice-to-disk/rmake.rs b/tests/run-make/dump-ice-to-disk/rmake.rs index 260abf3aa6fdc..08767246b90d0 100644 --- a/tests/run-make/dump-ice-to-disk/rmake.rs +++ b/tests/run-make/dump-ice-to-disk/rmake.rs @@ -1,137 +1,200 @@ -// This test checks if internal compilation error (ICE) log files work as expected. -// - Get the number of lines from the log files without any configuration options, -// then check that the line count doesn't change if the backtrace gets configured to be short -// or full. -// - Check that disabling ICE logging results in zero files created. -// - Check that the ICE files contain some of the expected strings. -// - exercise the -Zmetrics-dir nightly flag -// - verify what happens when both the nightly flag and env variable are set -// - test the RUST_BACKTRACE=0 behavior against the file creation +//! This test checks if Internal Compilation Error (ICE) dump files `rustc-ice*.txt` work as +//! expected. +//! +//! - Basic sanity checks on a default ICE dump. +//! - Get the number of lines from the dump files without any `RUST_BACKTRACE` options, then check +//! ICE dump file (line count) is not affected by `RUSTC_BACKTRACE` settings. +//! - Check that disabling ICE dumping results in zero dump files created. +//! - Check that the ICE dump contain some of the expected strings. +//! - Check that `RUST_BACKTRACE=0` prevents ICE dump from created. +//! - Exercise the `-Zmetrics-dir` nightly flag (#128914): +//! - When `-Zmetrics=dir=PATH` is present but `RUSTC_ICE` is not set, check that the ICE dump +//! is placed under `PATH`. +//! - When `RUSTC_ICE=RUSTC_ICE_PATH` and `-Zmetrics-dir=METRICS_PATH` are both provided, check +//! that `RUSTC_ICE_PATH` takes precedence and no ICE dump is emitted under `METRICS_PATH`. +//! +//! See . +//! +//! # Test history +//! +//! - The previous rmake.rs iteration of this test was flakey for unknown reason on `i686-mingw` +//! *specifically*, so assertion failures in this test was made extremely verbose to help +//! diagnose why the ICE messages was different *specifically* on `i686-mingw`. +//! - An attempt is made to re-enable this test on `i686-mingw` (by removing `ignore-windows`). If +//! this test is still flakey, please restore the `ignore-windows` directive. -// See https://github.com/rust-lang/rust/pull/108714 +use std::cell::OnceCell; +use std::path::{Path, PathBuf}; -use run_make_support::{cwd, has_extension, has_prefix, rfs, rustc, shallow_find_files}; +use run_make_support::{ + cwd, filename_contains, has_extension, has_prefix, rfs, run_in_tmpdir, rustc, + shallow_find_files, +}; -fn main() { - rustc().env("RUSTC_ICE", cwd()).input("lib.rs").arg("-Ztreat-err-as-bug=1").run_fail(); - let default = get_text_from_ice(".").lines().count(); - - clear_ice_files(); - rustc().env("RUSTC_ICE", cwd()).input("lib.rs").arg("-Ztreat-err-as-bug=1").run_fail(); - let ice_text = get_text_from_ice(cwd()); - let default_set = ice_text.lines().count(); - let content = ice_text; - let ice_files = shallow_find_files(cwd(), |path| { - has_prefix(path, "rustc-ice") && has_extension(path, "txt") - }); +#[derive(Debug)] +struct IceDump { + name: &'static str, + path: PathBuf, + message: String, +} + +impl IceDump { + fn lines_count(&self) -> usize { + self.message.lines().count() + } +} + +#[track_caller] +fn assert_ice_len_equals(left: &IceDump, right: &IceDump) { + let left_len = left.lines_count(); + let right_len = right.lines_count(); + + if left_len != right_len { + eprintln!("=== {} ICE MESSAGE ({} lines) ====", left.name, left_len); + eprintln!("{}", left.message); + + eprintln!("=== {} ICE MESSAGE ({} lines) ====", right.name, right_len); + eprintln!("{}", right.message); + + eprintln!("===================================="); + panic!( + "ICE message length mismatch: {} has {} lines but {} has {} lines", + left.name, left_len, right.name, right_len + ); + } +} + +fn find_ice_dumps_in_dir>(dir: P) -> Vec { + shallow_find_files(dir, |path| has_prefix(path, "rustc-ice") && has_extension(path, "txt")) +} + +// Assert only one `rustc-ice*.txt` ICE file exists, and extract the ICE message from the ICE file. +#[track_caller] +fn extract_exactly_one_ice_file>(name: &'static str, dir: P) -> IceDump { + let ice_files = find_ice_dumps_in_dir(dir); assert_eq!(ice_files.len(), 1); // There should only be 1 ICE file. - let ice_file_name = - ice_files.first().and_then(|f| f.file_name()).and_then(|n| n.to_str()).unwrap(); - // Ensure that the ICE dump path doesn't contain `:`, because they cause problems on Windows. - assert!(!ice_file_name.contains(":"), "{ice_file_name}"); - assert_eq!(default, default_set); - assert!(default > 0); - // Some of the expected strings in an ICE file should appear. - assert!(content.contains("thread 'rustc' panicked at")); - assert!(content.contains("stack backtrace:")); - - test_backtrace_short(default); - test_backtrace_full(default); - test_backtrace_disabled(default); - - clear_ice_files(); - // The ICE dump is explicitly disabled. Therefore, this should produce no files. - rustc().env("RUSTC_ICE", "0").input("lib.rs").arg("-Ztreat-err-as-bug=1").run_fail(); - let ice_files = shallow_find_files(cwd(), |path| { - has_prefix(path, "rustc-ice") && has_extension(path, "txt") + let path = ice_files.get(0).unwrap(); + let message = rfs::read_to_string(path); + IceDump { name, path: path.to_path_buf(), message } +} + +fn main() { + // Establish baseline ICE message. + let mut default_ice_dump = OnceCell::new(); + run_in_tmpdir(|| { + rustc().env("RUSTC_ICE", cwd()).input("lib.rs").arg("-Ztreat-err-as-bug=1").run_fail(); + let dump = extract_exactly_one_ice_file("baseline", cwd()); + // Ensure that the ICE dump path doesn't contain `:`, because they cause problems on + // Windows. + assert!(!filename_contains(&dump.path, ":"), "{} contains `:`", dump.path.display()); + // Some of the expected strings in an ICE file should appear. + assert!(dump.message.contains("thread 'rustc' panicked at")); + assert!(dump.message.contains("stack backtrace:")); + default_ice_dump.set(dump).unwrap(); }); - assert!(ice_files.is_empty()); // There should be 0 ICE files. + let default_ice_dump = default_ice_dump.get().unwrap(); - metrics_dir(default); -} + test_backtrace_short(default_ice_dump); + test_backtrace_full(default_ice_dump); + test_backtrace_disabled(default_ice_dump); + test_ice_dump_disabled(); -fn test_backtrace_short(baseline: usize) { - clear_ice_files(); - rustc() - .env("RUSTC_ICE", cwd()) - .input("lib.rs") - .env("RUST_BACKTRACE", "short") - .arg("-Ztreat-err-as-bug=1") - .run_fail(); - let short = get_text_from_ice(cwd()).lines().count(); - // backtrace length in dump shouldn't be changed by RUST_BACKTRACE - assert_eq!(short, baseline); + test_metrics_dir(default_ice_dump); } -fn test_backtrace_full(baseline: usize) { - clear_ice_files(); - rustc() - .env("RUSTC_ICE", cwd()) - .input("lib.rs") - .env("RUST_BACKTRACE", "full") - .arg("-Ztreat-err-as-bug=1") - .run_fail(); - let full = get_text_from_ice(cwd()).lines().count(); - // backtrace length in dump shouldn't be changed by RUST_BACKTRACE - assert_eq!(full, baseline); +#[track_caller] +fn test_backtrace_short(baseline: &IceDump) { + run_in_tmpdir(|| { + rustc() + .env("RUSTC_ICE", cwd()) + .input("lib.rs") + .env("RUST_BACKTRACE", "short") + .arg("-Ztreat-err-as-bug=1") + .run_fail(); + let dump = extract_exactly_one_ice_file("RUST_BACKTRACE=short", cwd()); + // Backtrace length in dump shouldn't be changed by `RUST_BACKTRACE`. + assert_ice_len_equals(baseline, &dump); + }); } -fn test_backtrace_disabled(baseline: usize) { - clear_ice_files(); - rustc() - .env("RUSTC_ICE", cwd()) - .input("lib.rs") - .env("RUST_BACKTRACE", "0") - .arg("-Ztreat-err-as-bug=1") - .run_fail(); - let disabled = get_text_from_ice(cwd()).lines().count(); - // backtrace length in dump shouldn't be changed by RUST_BACKTRACE - assert_eq!(disabled, baseline); +#[track_caller] +fn test_backtrace_full(baseline: &IceDump) { + run_in_tmpdir(|| { + rustc() + .env("RUSTC_ICE", cwd()) + .input("lib.rs") + .env("RUST_BACKTRACE", "full") + .arg("-Ztreat-err-as-bug=1") + .run_fail(); + let dump = extract_exactly_one_ice_file("RUST_BACKTRACE=full", cwd()); + // Backtrace length in dump shouldn't be changed by `RUST_BACKTRACE`. + assert_ice_len_equals(baseline, &dump); + }); } -fn metrics_dir(baseline: usize) { - test_flag_only(baseline); - test_flag_and_env(baseline); +#[track_caller] +fn test_backtrace_disabled(baseline: &IceDump) { + run_in_tmpdir(|| { + rustc() + .env("RUSTC_ICE", cwd()) + .input("lib.rs") + .env("RUST_BACKTRACE", "0") + .arg("-Ztreat-err-as-bug=1") + .run_fail(); + let dump = extract_exactly_one_ice_file("RUST_BACKTRACE=disabled", cwd()); + // Backtrace length in dump shouldn't be changed by `RUST_BACKTRACE`. + assert_ice_len_equals(baseline, &dump); + }); } -fn test_flag_only(baseline: usize) { - clear_ice_files(); - let metrics_arg = format!("-Zmetrics-dir={}", cwd().display()); - rustc().input("lib.rs").arg("-Ztreat-err-as-bug=1").arg(metrics_arg).run_fail(); - let output = get_text_from_ice(cwd()).lines().count(); - assert_eq!(output, baseline); +#[track_caller] +fn test_ice_dump_disabled() { + // The ICE dump is explicitly disabled. Therefore, this should produce no files. + run_in_tmpdir(|| { + rustc().env("RUSTC_ICE", "0").input("lib.rs").arg("-Ztreat-err-as-bug=1").run_fail(); + let ice_files = find_ice_dumps_in_dir(cwd()); + assert!(ice_files.is_empty(), "there should be no ICE files if `RUSTC_ICE=0` is set"); + }); } -fn test_flag_and_env(baseline: usize) { - clear_ice_files(); - let metrics_arg = format!("-Zmetrics-dir={}", cwd().display()); - let real_dir = cwd().join("actually_put_ice_here"); - rfs::create_dir(real_dir.clone()); - rustc() - .input("lib.rs") - .env("RUSTC_ICE", real_dir.clone()) - .arg("-Ztreat-err-as-bug=1") - .arg(metrics_arg) - .run_fail(); - let output = get_text_from_ice(real_dir).lines().count(); - assert_eq!(output, baseline); +#[track_caller] +fn test_metrics_dir(baseline: &IceDump) { + test_flag_only(baseline); + test_flag_and_env(baseline); } -fn clear_ice_files() { - let ice_files = shallow_find_files(cwd(), |path| { - has_prefix(path, "rustc-ice") && has_extension(path, "txt") +#[track_caller] +fn test_flag_only(baseline: &IceDump) { + run_in_tmpdir(|| { + let metrics_arg = format!("-Zmetrics-dir={}", cwd().display()); + rustc() + .env_remove("RUSTC_ICE") // prevent interference from environment + .input("lib.rs") + .arg("-Ztreat-err-as-bug=1") + .arg(metrics_arg) + .run_fail(); + let dump = extract_exactly_one_ice_file("-Zmetrics-dir only", cwd()); + assert_ice_len_equals(baseline, &dump); }); - for file in ice_files { - rfs::remove_file(file); - } } #[track_caller] -fn get_text_from_ice(dir: impl AsRef) -> String { - let ice_files = - shallow_find_files(dir, |path| has_prefix(path, "rustc-ice") && has_extension(path, "txt")); - assert_eq!(ice_files.len(), 1); // There should only be 1 ICE file. - let ice_file = ice_files.get(0).unwrap(); - let output = rfs::read_to_string(ice_file); - output +fn test_flag_and_env(baseline: &IceDump) { + run_in_tmpdir(|| { + let metrics_arg = format!("-Zmetrics-dir={}", cwd().display()); + let real_dir = cwd().join("actually_put_ice_here"); + rfs::create_dir(&real_dir); + rustc() + .input("lib.rs") + .env("RUSTC_ICE", &real_dir) + .arg("-Ztreat-err-as-bug=1") + .arg(metrics_arg) + .run_fail(); + + let cwd_ice_files = find_ice_dumps_in_dir(cwd()); + assert!(cwd_ice_files.is_empty(), "RUSTC_ICE should override -Zmetrics-dir"); + + let dump = extract_exactly_one_ice_file("RUSTC_ICE overrides -Zmetrics-dir", real_dir); + assert_ice_len_equals(baseline, &dump); + }); } diff --git a/tests/run-make/dylib-soname/rmake.rs b/tests/run-make/dylib-soname/rmake.rs index a0215a6906e47..cec0d46384246 100644 --- a/tests/run-make/dylib-soname/rmake.rs +++ b/tests/run-make/dylib-soname/rmake.rs @@ -4,7 +4,6 @@ //@ only-linux //@ ignore-cross-compile -use run_make_support::regex::Regex; use run_make_support::{cmd, run_in_tmpdir, rustc}; fn main() { diff --git a/tests/run-make/emit-shared-files/rmake.rs b/tests/run-make/emit-shared-files/rmake.rs index 8ac9073e993ba..e5482af10bb94 100644 --- a/tests/run-make/emit-shared-files/rmake.rs +++ b/tests/run-make/emit-shared-files/rmake.rs @@ -13,7 +13,7 @@ fn main() { rustdoc() .arg("-Zunstable-options") .arg("--emit=invocation-specific") - .output("invocation-only") + .out_dir("invocation-only") .arg("--resource-suffix=-xxx") .args(&["--theme", "y.css"]) .args(&["--extend-css", "z.css"]) @@ -34,7 +34,7 @@ fn main() { rustdoc() .arg("-Zunstable-options") .arg("--emit=toolchain-shared-resources") - .output("toolchain-only") + .out_dir("toolchain-only") .arg("--resource-suffix=-xxx") .args(&["--extend-css", "z.css"]) .input("x.rs") @@ -68,7 +68,7 @@ fn main() { rustdoc() .arg("-Zunstable-options") .arg("--emit=toolchain-shared-resources,unversioned-shared-resources") - .output("all-shared") + .out_dir("all-shared") .arg("--resource-suffix=-xxx") .args(&["--extend-css", "z.css"]) .input("x.rs") diff --git a/tests/run-make/exit-code/rmake.rs b/tests/run-make/exit-code/rmake.rs index f290554831db7..d3dcc04428cb6 100644 --- a/tests/run-make/exit-code/rmake.rs +++ b/tests/run-make/exit-code/rmake.rs @@ -16,7 +16,7 @@ fn main() { .run_fail() .assert_exit_code(101); - rustdoc().arg("success.rs").output("exit-code").run(); + rustdoc().arg("success.rs").out_dir("exit-code").run(); rustdoc().arg("--invalid-arg-foo").run_fail().assert_exit_code(1); diff --git a/tests/run-make/extern-flag-disambiguates/rmake.rs b/tests/run-make/extern-flag-disambiguates/rmake.rs index 2d7d7f69f66fa..89cda02eaeae6 100644 --- a/tests/run-make/extern-flag-disambiguates/rmake.rs +++ b/tests/run-make/extern-flag-disambiguates/rmake.rs @@ -1,6 +1,6 @@ //@ ignore-cross-compile -use run_make_support::{cwd, run, rustc}; +use run_make_support::{run, rustc}; // Attempt to build this dependency tree: // diff --git a/tests/run-make/git_clone_sha1.sh b/tests/run-make/git_clone_sha1.sh deleted file mode 100644 index 626e4e4276121..0000000000000 --- a/tests/run-make/git_clone_sha1.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/bash -x - -# Usage: $0 project_name url sha1 -# Get the crate with the specified sha1. -# -# all arguments are required. -# -# See below link for git usage: -# https://stackoverflow.com/questions/3489173#14091182 - -# Mandatory arguments: -PROJECT_NAME=$1 -URL=$2 -SHA1=$3 - -function err_exit() { - echo "ERROR:" $* - exit 1 -} - -git clone $URL $PROJECT_NAME || err_exit -cd $PROJECT_NAME || err_exit -git reset --hard $SHA1 || err_exit diff --git a/tests/run-make/ice-dep-cannot-find-dep/rmake.rs b/tests/run-make/ice-dep-cannot-find-dep/rmake.rs index 8ba3a0366b002..1c136773f0155 100644 --- a/tests/run-make/ice-dep-cannot-find-dep/rmake.rs +++ b/tests/run-make/ice-dep-cannot-find-dep/rmake.rs @@ -16,7 +16,7 @@ // If we used `rustc` the additional '-L rmake_out' option would allow rustc to // actually find the crate. -use run_make_support::{bare_rustc, rfs, rust_lib_name, rustc}; +use run_make_support::{bare_rustc, rust_lib_name, rustc}; fn main() { rustc().crate_name("a").crate_type("rlib").input("a.rs").arg("--verbose").run(); diff --git a/tests/run-make/incr-test-moved-file/rmake.rs b/tests/run-make/incr-test-moved-file/rmake.rs index f314e110a8c33..0ba1b0d58feeb 100644 --- a/tests/run-make/incr-test-moved-file/rmake.rs +++ b/tests/run-make/incr-test-moved-file/rmake.rs @@ -14,7 +14,7 @@ //@ ignore-nvptx64-nvidia-cuda // FIXME: can't find crate for 'std' -use run_make_support::{rfs, rust_lib_name, rustc}; +use run_make_support::{rfs, rustc}; fn main() { rfs::create_dir("incr"); diff --git a/tests/run-make/incremental-debugger-visualizer/rmake.rs b/tests/run-make/incremental-debugger-visualizer/rmake.rs index 58a7e415bc58b..07c920cc04a6f 100644 --- a/tests/run-make/incremental-debugger-visualizer/rmake.rs +++ b/tests/run-make/incremental-debugger-visualizer/rmake.rs @@ -2,8 +2,6 @@ // (in this case, foo.py and foo.natvis) are picked up when compiling incrementally. // See https://github.com/rust-lang/rust/pull/111641 -use std::io::Read; - use run_make_support::{invalid_utf8_contains, invalid_utf8_not_contains, rfs, rustc}; fn main() { diff --git a/tests/run-make/libtest-json/Makefile b/tests/run-make/libtest-json/Makefile deleted file mode 100644 index c8bc7b5dd4a4c..0000000000000 --- a/tests/run-make/libtest-json/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# ignore-cross-compile -# needs-unwind -include ../tools.mk - -# Test expected libtest's JSON output - -OUTPUT_FILE_DEFAULT := $(TMPDIR)/libtest-json-output-default.json -OUTPUT_FILE_STDOUT_SUCCESS := $(TMPDIR)/libtest-json-output-stdout-success.json - -all: f.rs validate_json.py output-default.json output-stdout-success.json - $(RUSTC) --test f.rs - RUST_BACKTRACE=0 $(call RUN,f) -Z unstable-options --test-threads=1 --format=json > $(OUTPUT_FILE_DEFAULT) || true - RUST_BACKTRACE=0 $(call RUN,f) -Z unstable-options --test-threads=1 --format=json --show-output > $(OUTPUT_FILE_STDOUT_SUCCESS) || true - - cat $(OUTPUT_FILE_DEFAULT) | "$(PYTHON)" validate_json.py - cat $(OUTPUT_FILE_STDOUT_SUCCESS) | "$(PYTHON)" validate_json.py - - # Normalize the actual output and compare to expected output file - cat $(OUTPUT_FILE_DEFAULT) | sed 's/"exec_time": [0-9.]*/"exec_time": $$TIME/' | diff output-default.json - - cat $(OUTPUT_FILE_STDOUT_SUCCESS) | sed 's/"exec_time": [0-9.]*/"exec_time": $$TIME/' | diff output-stdout-success.json - diff --git a/tests/run-make/libtest-json/output-default.json b/tests/run-make/libtest-json/output-default.json index 01710f59e5d74..a2293a032d012 100644 --- a/tests/run-make/libtest-json/output-default.json +++ b/tests/run-make/libtest-json/output-default.json @@ -7,4 +7,4 @@ { "type": "test", "name": "c", "event": "ok" } { "type": "test", "event": "started", "name": "d" } { "type": "test", "name": "d", "event": "ignored", "message": "msg" } -{ "type": "suite", "event": "failed", "passed": 2, "failed": 1, "ignored": 1, "measured": 0, "filtered_out": 0, "exec_time": $TIME } +{ "type": "suite", "event": "failed", "passed": 2, "failed": 1, "ignored": 1, "measured": 0, "filtered_out": 0, "exec_time": "$EXEC_TIME" } diff --git a/tests/run-make/libtest-json/output-stdout-success.json b/tests/run-make/libtest-json/output-stdout-success.json index 878eb6c7c260d..cf92f01f63aca 100644 --- a/tests/run-make/libtest-json/output-stdout-success.json +++ b/tests/run-make/libtest-json/output-stdout-success.json @@ -7,4 +7,4 @@ { "type": "test", "name": "c", "event": "ok", "stdout": "thread 'c' panicked at f.rs:15:5:\nassertion failed: false\n" } { "type": "test", "event": "started", "name": "d" } { "type": "test", "name": "d", "event": "ignored", "message": "msg" } -{ "type": "suite", "event": "failed", "passed": 2, "failed": 1, "ignored": 1, "measured": 0, "filtered_out": 0, "exec_time": $TIME } +{ "type": "suite", "event": "failed", "passed": 2, "failed": 1, "ignored": 1, "measured": 0, "filtered_out": 0, "exec_time": "$EXEC_TIME" } diff --git a/tests/run-make/libtest-json/rmake.rs b/tests/run-make/libtest-json/rmake.rs new file mode 100644 index 0000000000000..acbd88dc46cb9 --- /dev/null +++ b/tests/run-make/libtest-json/rmake.rs @@ -0,0 +1,31 @@ +// Check libtest's JSON output against snapshots. + +//@ ignore-cross-compile +//@ needs-unwind (test file contains #[should_panic] test) + +use run_make_support::{cmd, diff, python_command, rustc}; + +fn main() { + rustc().arg("--test").input("f.rs").run(); + + run_tests(&[], "output-default.json"); + run_tests(&["--show-output"], "output-stdout-success.json"); +} + +#[track_caller] +fn run_tests(extra_args: &[&str], expected_file: &str) { + let cmd_out = cmd("./f") + .env("RUST_BACKTRACE", "0") + .args(&["-Zunstable-options", "--test-threads=1", "--format=json"]) + .args(extra_args) + .run_fail(); + let test_stdout = &cmd_out.stdout_utf8(); + + python_command().arg("validate_json.py").stdin(test_stdout).run(); + + diff() + .expected_file(expected_file) + .actual_text("stdout", test_stdout) + .normalize(r#"(?"exec_time": )[0-9.]+"#, r#"${prefix}"$$EXEC_TIME""#) + .run(); +} diff --git a/tests/run-make/libtest-junit/Makefile b/tests/run-make/libtest-junit/Makefile deleted file mode 100644 index 26e56242dd239..0000000000000 --- a/tests/run-make/libtest-junit/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# ignore-cross-compile -# needs-unwind contains should_panic test -include ../tools.mk - -# Test expected libtest's junit output - -OUTPUT_FILE_DEFAULT := $(TMPDIR)/libtest-junit-output-default.xml -OUTPUT_FILE_STDOUT_SUCCESS := $(TMPDIR)/libtest-junit-output-stdout-success.xml - -all: f.rs validate_junit.py output-default.xml output-stdout-success.xml - $(RUSTC) --test f.rs - RUST_BACKTRACE=0 $(call RUN,f) -Z unstable-options --test-threads=1 --format=junit > $(OUTPUT_FILE_DEFAULT) || true - RUST_BACKTRACE=0 $(call RUN,f) -Z unstable-options --test-threads=1 --format=junit --show-output > $(OUTPUT_FILE_STDOUT_SUCCESS) || true - - cat $(OUTPUT_FILE_DEFAULT) | "$(PYTHON)" validate_junit.py - cat $(OUTPUT_FILE_STDOUT_SUCCESS) | "$(PYTHON)" validate_junit.py - - # Normalize the actual output and compare to expected output file - cat $(OUTPUT_FILE_DEFAULT) | sed 's/time="[0-9.]*"/time="$$TIME"/g' | diff output-default.xml - - cat $(OUTPUT_FILE_STDOUT_SUCCESS) | sed 's/time="[0-9.]*"/time="$$TIME"/g' | diff output-stdout-success.xml - diff --git a/tests/run-make/libtest-junit/rmake.rs b/tests/run-make/libtest-junit/rmake.rs new file mode 100644 index 0000000000000..d631313ed9210 --- /dev/null +++ b/tests/run-make/libtest-junit/rmake.rs @@ -0,0 +1,31 @@ +// Check libtest's JUnit (XML) output against snapshots. + +//@ ignore-cross-compile +//@ needs-unwind (test file contains #[should_panic] test) + +use run_make_support::{cmd, diff, python_command, rustc}; + +fn main() { + rustc().arg("--test").input("f.rs").run(); + + run_tests(&[], "output-default.xml"); + run_tests(&["--show-output"], "output-stdout-success.xml"); +} + +#[track_caller] +fn run_tests(extra_args: &[&str], expected_file: &str) { + let cmd_out = cmd("./f") + .env("RUST_BACKTRACE", "0") + .args(&["-Zunstable-options", "--test-threads=1", "--format=junit"]) + .args(extra_args) + .run_fail(); + let test_stdout = &cmd_out.stdout_utf8(); + + python_command().arg("validate_junit.py").stdin(test_stdout).run(); + + diff() + .expected_file(expected_file) + .actual_text("stdout", test_stdout) + .normalize(r#"\btime="[0-9.]+""#, r#"time="$$TIME""#) + .run(); +} diff --git a/tests/run-make/lto-readonly-lib/rmake.rs b/tests/run-make/lto-readonly-lib/rmake.rs index b9f0dc21dafc2..78f2b580e0df2 100644 --- a/tests/run-make/lto-readonly-lib/rmake.rs +++ b/tests/run-make/lto-readonly-lib/rmake.rs @@ -7,7 +7,7 @@ //@ ignore-cross-compile -use run_make_support::{rfs, run, rust_lib_name, rustc, test_while_readonly}; +use run_make_support::{run, rust_lib_name, rustc, test_while_readonly}; fn main() { rustc().input("lib.rs").run(); diff --git a/tests/run-make/min-global-align/Makefile b/tests/run-make/min-global-align/Makefile deleted file mode 100644 index 82f38749e0092..0000000000000 --- a/tests/run-make/min-global-align/Makefile +++ /dev/null @@ -1,22 +0,0 @@ -include ../tools.mk - -# only-linux - -# This tests ensure that global variables respect the target minimum alignment. -# The three bools `STATIC_BOOL`, `STATIC_MUT_BOOL`, and `CONST_BOOL` all have -# type-alignment of 1, but some targets require greater global alignment. - -SRC = min_global_align.rs -LL = $(TMPDIR)/min_global_align.ll - -all: -# Most targets are happy with default alignment -- take i686 for example. -ifeq ($(filter x86,$(LLVM_COMPONENTS)),x86) - $(RUSTC) --target=i686-unknown-linux-gnu --emit=llvm-ir $(SRC) - [ "$$(grep -c 'align 1' "$(LL)")" -eq "3" ] -endif -# SystemZ requires even alignment for PC-relative addressing. -ifeq ($(filter systemz,$(LLVM_COMPONENTS)),systemz) - $(RUSTC) --target=s390x-unknown-linux-gnu --emit=llvm-ir $(SRC) - [ "$$(grep -c 'align 2' "$(LL)")" -eq "3" ] -endif diff --git a/tests/run-make/min-global-align/rmake.rs b/tests/run-make/min-global-align/rmake.rs new file mode 100644 index 0000000000000..2adaaf172f470 --- /dev/null +++ b/tests/run-make/min-global-align/rmake.rs @@ -0,0 +1,27 @@ +// This test checks that global variables respect the target minimum alignment. +// The three bools `STATIC_BOOL`, `STATIC_MUT_BOOL`, and `CONST_BOOL` all have +// type-alignment of 1, but some targets require greater global alignment. +// See https://github.com/rust-lang/rust/pull/44440 + +//@ only-linux +// Reason: this test is specific to linux, considering compilation is targeted +// towards linux architectures only. + +use run_make_support::{assert_count_is, llvm_components_contain, rfs, rustc}; + +fn main() { + // Most targets are happy with default alignment -- take i686 for example. + if llvm_components_contain("x86") { + rustc().target("i686-unknown-linux-gnu").emit("llvm-ir").input("min_global_align.rs").run(); + assert_count_is(3, rfs::read_to_string("min_global_align.ll"), "align 1"); + } + // SystemZ requires even alignment for PC-relative addressing. + if llvm_components_contain("systemz") { + rustc() + .target("s390x-unknown-linux-gnu") + .emit("llvm-ir") + .input("min_global_align.rs") + .run(); + assert_count_is(3, rfs::read_to_string("min_global_align.ll"), "align 2"); + } +} diff --git a/tests/run-make/multiple-emits/rmake.rs b/tests/run-make/multiple-emits/rmake.rs index 67c0ebb9864a5..8a5eb1d9d856a 100644 --- a/tests/run-make/multiple-emits/rmake.rs +++ b/tests/run-make/multiple-emits/rmake.rs @@ -1,4 +1,4 @@ -use run_make_support::{cwd, path, rustc}; +use run_make_support::{path, rustc}; fn main() { rustc().input("foo.rs").emit("asm,llvm-ir").output("out").run(); diff --git a/tests/run-make/naked-symbol-visibility/rmake.rs b/tests/run-make/naked-symbol-visibility/rmake.rs index a32e62326eb32..07ff253788d0a 100644 --- a/tests/run-make/naked-symbol-visibility/rmake.rs +++ b/tests/run-make/naked-symbol-visibility/rmake.rs @@ -3,7 +3,7 @@ use run_make_support::object::read::{File, Object, Symbol}; use run_make_support::object::ObjectSymbol; use run_make_support::targets::is_windows; -use run_make_support::{dynamic_lib_name, env_var, rfs, rustc}; +use run_make_support::{dynamic_lib_name, rfs, rustc}; fn main() { let rdylib_name = dynamic_lib_name("a_rust_dylib"); diff --git a/tests/run-make/native-link-modifier-bundle/Makefile b/tests/run-make/native-link-modifier-bundle/Makefile deleted file mode 100644 index 527720922fe58..0000000000000 --- a/tests/run-make/native-link-modifier-bundle/Makefile +++ /dev/null @@ -1,38 +0,0 @@ -# ignore-cross-compile -# ignore-windows-msvc - -include ../tools.mk - -# We're using the llvm-nm instead of the system nm to ensure it is compatible -# with the LLVM bitcode generated by rustc. -# Except on Windows where piping/IO redirection under MSYS2 is wonky with llvm-nm. -ifndef IS_WINDOWS -NM = "$(LLVM_BIN_DIR)"/llvm-nm -else -NM = nm -endif - -all: $(call NATIVE_STATICLIB,native-staticlib) - # Build a staticlib and a rlib, the `native_func` symbol will be bundled into them - $(RUSTC) bundled.rs --crate-type=staticlib --crate-type=rlib - $(NM) $(TMPDIR)/libbundled.a | $(CGREP) -e "T _*native_func" - $(NM) $(TMPDIR)/libbundled.a | $(CGREP) -e "U _*native_func" - $(NM) $(TMPDIR)/libbundled.rlib | $(CGREP) -e "T _*native_func" - $(NM) $(TMPDIR)/libbundled.rlib | $(CGREP) -e "U _*native_func" - - # Build a staticlib and a rlib, the `native_func` symbol will not be bundled into it - $(RUSTC) non-bundled.rs --crate-type=staticlib --crate-type=rlib - $(NM) $(TMPDIR)/libnon_bundled.a | $(CGREP) -ve "T _*native_func" - $(NM) $(TMPDIR)/libnon_bundled.a | $(CGREP) -e "U _*native_func" - $(NM) $(TMPDIR)/libnon_bundled.rlib | $(CGREP) -ve "T _*native_func" - $(NM) $(TMPDIR)/libnon_bundled.rlib | $(CGREP) -e "U _*native_func" - - # Build a cdylib, `native-staticlib` will not appear on the linker line because it was bundled previously - # The cdylib will contain the `native_func` symbol in the end - $(RUSTC) cdylib-bundled.rs --crate-type=cdylib --print link-args | $(CGREP) -ve '-l[" ]*native-staticlib' - $(NM) $(call DYLIB,cdylib_bundled) | $(CGREP) -e "[Tt] _*native_func" - - # Build a cdylib, `native-staticlib` will appear on the linker line because it was not bundled previously - # The cdylib will contain the `native_func` symbol in the end - $(RUSTC) cdylib-non-bundled.rs --crate-type=cdylib --print link-args | $(CGREP) -e '-l[" ]*native-staticlib' - $(NM) $(call DYLIB,cdylib_non_bundled) | $(CGREP) -e "[Tt] _*native_func" diff --git a/tests/run-make/native-link-modifier-bundle/rmake.rs b/tests/run-make/native-link-modifier-bundle/rmake.rs new file mode 100644 index 0000000000000..058b66b15f12f --- /dev/null +++ b/tests/run-make/native-link-modifier-bundle/rmake.rs @@ -0,0 +1,90 @@ +// This test exercises the `bundle` link argument, which can be turned on or off. + +// When building a rlib or staticlib, +bundle means that all object files from the native static +// library will be added to the rlib or staticlib archive, and then used from it during linking of +// the final binary. + +// When building a rlib -bundle means that the native static library is registered as a dependency +// of that rlib "by name", and object files from it are included only during linking of the final +// binary, the file search by that name is also performed during final linking. +// When building a staticlib -bundle means that the native static library is simply not included +// into the archive and some higher level build system will need to add it later during linking of +// the final binary. + +// This modifier has no effect when building other targets like executables or dynamic libraries. + +// The default for this modifier is +bundle. +// See https://github.com/rust-lang/rust/pull/95818 + +//@ ignore-cross-compile +// Reason: cross-compilation fails to export native symbols + +use run_make_support::{ + build_native_static_lib, dynamic_lib_name, is_msvc, llvm_nm, rust_lib_name, rustc, + static_lib_name, +}; + +fn main() { + build_native_static_lib("native-staticlib"); + // Build a staticlib and a rlib, the `native_func` symbol will be bundled into them + rustc().input("bundled.rs").crate_type("staticlib").crate_type("rlib").run(); + llvm_nm() + .input(static_lib_name("bundled")) + .run() + .assert_stdout_contains_regex("T _*native_func"); + llvm_nm() + .input(static_lib_name("bundled")) + .run() + .assert_stdout_contains_regex("U _*native_func"); + llvm_nm().input(rust_lib_name("bundled")).run().assert_stdout_contains_regex("T _*native_func"); + llvm_nm().input(rust_lib_name("bundled")).run().assert_stdout_contains_regex("U _*native_func"); + + // Build a staticlib and a rlib, the `native_func` symbol will not be bundled into it + build_native_static_lib("native-staticlib"); + rustc().input("non-bundled.rs").crate_type("staticlib").crate_type("rlib").run(); + llvm_nm() + .input(static_lib_name("non_bundled")) + .run() + .assert_stdout_not_contains_regex("T _*native_func"); + llvm_nm() + .input(static_lib_name("non_bundled")) + .run() + .assert_stdout_contains_regex("U _*native_func"); + llvm_nm() + .input(rust_lib_name("non_bundled")) + .run() + .assert_stdout_not_contains_regex("T _*native_func"); + llvm_nm() + .input(rust_lib_name("non_bundled")) + .run() + .assert_stdout_contains_regex("U _*native_func"); + + // This part of the test does not function on Windows MSVC - no symbols are printed. + if !is_msvc() { + // Build a cdylib, `native-staticlib` will not appear on the linker line because it was + // bundled previously. The cdylib will contain the `native_func` symbol in the end. + rustc() + .input("cdylib-bundled.rs") + .crate_type("cdylib") + .print("link-args") + .run() + .assert_stdout_not_contains(r#"-l[" ]*native-staticlib"#); + llvm_nm() + .input(dynamic_lib_name("cdylib_bundled")) + .run() + .assert_stdout_contains_regex("[Tt] _*native_func"); + + // Build a cdylib, `native-staticlib` will appear on the linker line because it was not + // bundled previously. The cdylib will contain the `native_func` symbol in the end + rustc() + .input("cdylib-non-bundled.rs") + .crate_type("cdylib") + .print("link-args") + .run() + .assert_stdout_contains_regex(r#"-l[" ]*native-staticlib"#); + llvm_nm() + .input(dynamic_lib_name("cdylib_non_bundled")) + .run() + .assert_stdout_contains_regex("[Tt] _*native_func"); + } +} diff --git a/tests/run-make/no-alloc-shim/Makefile b/tests/run-make/no-alloc-shim/Makefile deleted file mode 100644 index 568e3f9ba1d68..0000000000000 --- a/tests/run-make/no-alloc-shim/Makefile +++ /dev/null @@ -1,24 +0,0 @@ -include ../tools.mk - -# ignore-cross-compile -# ignore-msvc FIXME(bjorn3) can't figure out how to link with the MSVC toolchain - -TARGET_LIBDIR = $$($(RUSTC) --print target-libdir) - -all: - $(RUSTC) foo.rs --crate-type bin --emit obj -Cpanic=abort -ifdef IS_MSVC - $(CC) $(CFLAGS) $(TMPDIR)/foo.o $(call OUT_EXE,foo) /link $(TARGET_LIBDIR)/liballoc-*.rlib $(TARGET_LIBDIR)/libcore-*.rlib $(TARGET_LIBDIR)/libcompiler_builtins-*.rlib - $(call OUT_EXE,foo) -else - $(CC) $(CFLAGS) $(TMPDIR)/foo.o $(TARGET_LIBDIR)/liballoc-*.rlib $(TARGET_LIBDIR)/libcore-*.rlib $(TARGET_LIBDIR)/libcompiler_builtins-*.rlib -o $(call RUN_BINFILE,foo) - $(call RUN_BINFILE,foo) -endif - - # Check that linking without __rust_no_alloc_shim_is_unstable defined fails - $(RUSTC) foo.rs --crate-type bin --emit obj -Cpanic=abort --cfg check_feature_gate -ifdef IS_MSVC - $(CC) $(CFLAGS) $(TMPDIR)/foo.o $(call OUT_EXE,foo) /link $(TARGET_LIBDIR)/liballoc-*.rlib $(TARGET_LIBDIR)/libcore-*.rlib $(TARGET_LIBDIR)/libcompiler_builtins-*.rlib || exit 0 && exit 1 -else - $(CC) $(CFLAGS) $(TMPDIR)/foo.o $(TARGET_LIBDIR)/liballoc-*.rlib $(TARGET_LIBDIR)/libcore-*.rlib $(TARGET_LIBDIR)/libcompiler_builtins-*.rlib -o $(call RUN_BINFILE,foo) || exit 0 && exit 1 -endif diff --git a/tests/run-make/no-alloc-shim/rmake.rs b/tests/run-make/no-alloc-shim/rmake.rs new file mode 100644 index 0000000000000..c398a3177df63 --- /dev/null +++ b/tests/run-make/no-alloc-shim/rmake.rs @@ -0,0 +1,55 @@ +// This test checks the compatibility of the interaction between `--emit obj` and +// `#[global_allocator]`, as it is now possible to invoke the latter without the +// allocator shim since #86844. As this feature is unstable, it should fail if +// --cfg check_feature_gate is passed. +// See https://github.com/rust-lang/rust/pull/86844 + +//@ ignore-cross-compile +// Reason: the compiled binary is executed + +//@ ignore-msvc +//FIXME(Oneirical): Getting this to work on MSVC requires passing libcmt.lib to CC, +// which is not trivial to do. +// Tracking issue: https://github.com/rust-lang/rust/issues/128602 +// Discussion: https://github.com/rust-lang/rust/pull/128407#discussion_r1702439172 + +use run_make_support::{cc, cwd, has_extension, has_prefix, run, rustc, shallow_find_files}; + +fn main() { + rustc().input("foo.rs").crate_type("bin").emit("obj").panic("abort").run(); + let libdir = rustc().print("target-libdir").run().stdout_utf8(); + let libdir = libdir.trim(); + + let alloc_libs = shallow_find_files(&libdir, |path| { + has_prefix(path, "liballoc-") && has_extension(path, "rlib") + }); + let core_libs = shallow_find_files(&libdir, |path| { + has_prefix(path, "libcore-") && has_extension(path, "rlib") + }); + let compiler_builtins_libs = shallow_find_files(libdir, |path| { + has_prefix(path, "libcompiler_builtins") && has_extension(path, "rlib") + }); + + cc().input("foo.o") + .out_exe("foo") + .args(&alloc_libs) + .args(&core_libs) + .args(&compiler_builtins_libs) + .run(); + run("foo"); + + // Check that linking without __rust_no_alloc_shim_is_unstable defined fails + rustc() + .input("foo.rs") + .crate_type("bin") + .emit("obj") + .panic("abort") + .cfg("check_feature_gate") + .run(); + cc().input("foo.o") + .out_exe("foo") + .args(&alloc_libs) + .args(&core_libs) + .args(&compiler_builtins_libs) + .run_fail(); +} diff --git a/tests/run-make/non-unicode-in-incremental-dir/rmake.rs b/tests/run-make/non-unicode-in-incremental-dir/rmake.rs index ef316f1103638..42bd4b1b79990 100644 --- a/tests/run-make/non-unicode-in-incremental-dir/rmake.rs +++ b/tests/run-make/non-unicode-in-incremental-dir/rmake.rs @@ -8,7 +8,7 @@ fn main() { match std::fs::create_dir(&non_unicode) { // If an error occurs, check if creating a directory with a valid Unicode name would // succeed. - Err(e) if std::fs::create_dir("valid_unicode").is_ok() => { + Err(_) if std::fs::create_dir("valid_unicode").is_ok() => { // Filesystem doesn't appear support non-Unicode paths. return; } diff --git a/tests/run-make/output-type-permutations/rmake.rs b/tests/run-make/output-type-permutations/rmake.rs index 76fd8c1c2603a..c0569af6e84a3 100644 --- a/tests/run-make/output-type-permutations/rmake.rs +++ b/tests/run-make/output-type-permutations/rmake.rs @@ -113,7 +113,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "asm-emit".to_string(), }, || { @@ -123,7 +123,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "asm-emit2".to_string(), }, || { @@ -133,7 +133,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "asm-emit3".to_string(), }, || { @@ -144,7 +144,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "llvm-ir-emit".to_string(), }, || { @@ -154,7 +154,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "llvm-ir-emit2".to_string(), }, || { @@ -164,7 +164,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "llvm-ir-emit3".to_string(), }, || { @@ -175,7 +175,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "llvm-bc-emit".to_string(), }, || { @@ -185,7 +185,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "llvm-bc-emit2".to_string(), }, || { @@ -195,7 +195,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "llvm-bc-emit3".to_string(), }, || { @@ -206,7 +206,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "obj-emit".to_string(), }, || { @@ -216,7 +216,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "obj-emit2".to_string(), }, || { @@ -226,7 +226,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "obj-emit3".to_string(), }, || { @@ -268,7 +268,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "rlib".to_string(), }, || { @@ -278,7 +278,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "rlib2".to_string(), }, || { @@ -288,7 +288,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "rlib3".to_string(), }, || { @@ -375,7 +375,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "staticlib".to_string(), }, || { @@ -385,7 +385,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "staticlib2".to_string(), }, || { @@ -395,7 +395,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["foo"], - allowed_files: s![], + allowed_files: vec![], test_dir: "staticlib3".to_string(), }, || { @@ -449,7 +449,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["ir", rust_lib_name("bar")], - allowed_files: s![], + allowed_files: vec![], test_dir: "rlib-ir".to_string(), }, || { @@ -466,7 +466,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["ir", "asm", "bc", "obj", "link"], - allowed_files: s![], + allowed_files: vec![], test_dir: "staticlib-all".to_string(), }, || { @@ -484,7 +484,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["ir", "asm", "bc", "obj", "link"], - allowed_files: s![], + allowed_files: vec![], test_dir: "staticlib-all2".to_string(), }, || { @@ -523,7 +523,7 @@ fn main() { assert_expected_output_files( Expectations { expected_files: s!["bar.bc", rust_lib_name("bar"), "foo.bc"], - allowed_files: s![], + allowed_files: vec![], test_dir: "rlib-emits".to_string(), }, || { diff --git a/tests/run-make/print-check-cfg/rmake.rs b/tests/run-make/print-check-cfg/rmake.rs index b10336f88c649..f6f7f7cece6e7 100644 --- a/tests/run-make/print-check-cfg/rmake.rs +++ b/tests/run-make/print-check-cfg/rmake.rs @@ -4,7 +4,6 @@ extern crate run_make_support; use std::collections::HashSet; use std::iter::FromIterator; -use std::ops::Deref; use run_make_support::rustc; diff --git a/tests/run-make/print-native-static-libs/rmake.rs b/tests/run-make/print-native-static-libs/rmake.rs index b4e7c0e17ff8e..a51ac934c722e 100644 --- a/tests/run-make/print-native-static-libs/rmake.rs +++ b/tests/run-make/print-native-static-libs/rmake.rs @@ -12,8 +12,6 @@ //@ ignore-cross-compile //@ ignore-wasm -use std::io::BufRead; - use run_make_support::{is_msvc, rustc}; fn main() { diff --git a/tests/run-make/redundant-libs/rmake.rs b/tests/run-make/redundant-libs/rmake.rs index 43bb30bde702d..e984dee8df5ba 100644 --- a/tests/run-make/redundant-libs/rmake.rs +++ b/tests/run-make/redundant-libs/rmake.rs @@ -11,9 +11,7 @@ //@ ignore-cross-compile // Reason: the compiled binary is executed -use run_make_support::{ - build_native_dynamic_lib, build_native_static_lib, cwd, is_msvc, rfs, run, rustc, -}; +use run_make_support::{build_native_dynamic_lib, build_native_static_lib, run, rustc}; fn main() { build_native_dynamic_lib("foo"); diff --git a/tests/run-make/remap-path-prefix-dwarf/Makefile b/tests/run-make/remap-path-prefix-dwarf/Makefile deleted file mode 100644 index 8905a00ea2878..0000000000000 --- a/tests/run-make/remap-path-prefix-dwarf/Makefile +++ /dev/null @@ -1,112 +0,0 @@ -# This test makes sure that --remap-path-prefix has the expected effects on paths in debuginfo. -# It tests several cases, each of them has a detailed description attached to it. - -# ignore-windows - -include ../tools.mk - -SRC_DIR := $(abspath .) -SRC_DIR_PARENT := $(abspath ..) - -ifeq ($(UNAME),Darwin) - DEBUGINFOOPTS := -Csplit-debuginfo=off -else - DEBUGINFOOPTS := -endif - -all: \ - abs_input_outside_working_dir \ - rel_input_remap_working_dir \ - rel_input_remap_working_dir_scope \ - rel_input_remap_working_dir_parent \ - rel_input_remap_working_dir_child \ - rel_input_remap_working_dir_diagnostics \ - abs_input_inside_working_dir \ - abs_input_inside_working_dir_scope \ - abs_input_outside_working_dir - -# The compiler is called with an *ABSOLUTE PATH* as input, and that absolute path *is* within -# the working directory of the compiler. We are remapping the path that contains `src`. -abs_input_inside_working_dir: - # We explicitly switch to a directory that *is* a prefix of the directory our - # source code is contained in. - cd $(SRC_DIR) && $(RUSTC) $(SRC_DIR)/src/quux.rs -o "$(TMPDIR)/abs_input_inside_working_dir.rlib" -Cdebuginfo=2 --remap-path-prefix $(SRC_DIR)=REMAPPED - # We expect the path to the main source file to be remapped. - "$(LLVM_BIN_DIR)"/llvm-dwarfdump $(TMPDIR)/abs_input_inside_working_dir.rlib | $(CGREP) "REMAPPED/src/quux.rs" - # No weird duplication of remapped components (see #78479) - "$(LLVM_BIN_DIR)"/llvm-dwarfdump $(TMPDIR)/abs_input_inside_working_dir.rlib | $(CGREP) -v "REMAPPED/REMAPPED" - -# The compiler is called with an *ABSOLUTE PATH* as input, and that absolute path *is* within -# the working directory of the compiler. We are remapping the path that contains `src`. -abs_input_inside_working_dir_scope: - # We explicitly switch to a directory that *is* a prefix of the directory our - # source code is contained in. - cd $(SRC_DIR) && $(RUSTC) $(SRC_DIR)/src/quux.rs -o "$(TMPDIR)/abs_input_inside_working_dir_scope.rlib" -Cdebuginfo=2 --remap-path-prefix $(SRC_DIR)=REMAPPED -Zremap-path-scope=object $(DEBUGINFOOPTS) - # We expect the path to the main source file to be remapped. - "$(LLVM_BIN_DIR)"/llvm-dwarfdump $(TMPDIR)/abs_input_inside_working_dir_scope.rlib | $(CGREP) "REMAPPED/src/quux.rs" - # No weird duplication of remapped components (see #78479) - "$(LLVM_BIN_DIR)"/llvm-dwarfdump $(TMPDIR)/abs_input_inside_working_dir_scope.rlib | $(CGREP) -v "REMAPPED/REMAPPED" - -# The compiler is called with an *ABSOLUTE PATH* as input, and that absolute path is *not* within -# the working directory of the compiler. We are remapping both the path that contains `src` and -# the working directory to the same thing. This setup corresponds to a workaround that is needed -# when trying to remap everything to something that looks like a local path. -# Relative paths are interpreted as relative to the compiler's working directory (e.g. in -# debuginfo). If we also remap the working directory, the compiler strip it from other paths so -# that the final outcome is the desired one again. -abs_input_outside_working_dir: - # We explicitly switch to a directory that is *not* a prefix of the directory our - # source code is contained in. - cd $(TMPDIR) && $(RUSTC) $(SRC_DIR)/src/quux.rs -o "$(TMPDIR)/abs_input_outside_working_dir.rlib" -Cdebuginfo=2 --remap-path-prefix $(SRC_DIR)=REMAPPED --remap-path-prefix $(TMPDIR)=REMAPPED - "$(LLVM_BIN_DIR)"/llvm-dwarfdump $(TMPDIR)/abs_input_outside_working_dir.rlib | $(CGREP) "REMAPPED/src/quux.rs" - # No weird duplication of remapped components (see #78479) - "$(LLVM_BIN_DIR)"/llvm-dwarfdump $(TMPDIR)/abs_input_outside_working_dir.rlib | $(CGREP) -v "REMAPPED/REMAPPED" - -# The compiler is called with a *RELATIVE PATH* as input. We are remapping the working directory of -# the compiler, which naturally is an implicit prefix of our relative input path. Debuginfo will -# expand the relative path to an absolute path and we expect the working directory to be remapped -# in that expansion. -rel_input_remap_working_dir: - cd $(SRC_DIR) && $(RUSTC) src/quux.rs -o "$(TMPDIR)/rel_input_remap_working_dir.rlib" -Cdebuginfo=2 --remap-path-prefix "$(SRC_DIR)=REMAPPED" - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir.rlib" | $(CGREP) "REMAPPED/src/quux.rs" - # No weird duplication of remapped components (see #78479) - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir.rlib" | $(CGREP) -v "REMAPPED/REMAPPED" - -# The compiler is called with a *RELATIVE PATH* as input. We are remapping the working directory of -# the compiler, which naturally is an implicit prefix of our relative input path. Debuginfo will -# expand the relative path to an absolute path and we expect the working directory to be remapped -# in that expansion. -rel_input_remap_working_dir_scope: - cd $(SRC_DIR) && $(RUSTC) src/quux.rs -o "$(TMPDIR)/rel_input_remap_working_dir_scope.rlib" -Cdebuginfo=2 --remap-path-prefix "$(SRC_DIR)=REMAPPED" -Zremap-path-scope=object $(DEBUGINFOOPTS) - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_scope.rlib" | $(CGREP) "REMAPPED/src/quux.rs" - # No weird duplication of remapped components (see #78479) - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_scope.rlib" | $(CGREP) -v "REMAPPED/REMAPPED" - -rel_input_remap_working_dir_diagnostics: - cd $(SRC_DIR) && $(RUSTC) src/quux.rs -o "$(TMPDIR)/rel_input_remap_working_dir_scope.rlib" -Cdebuginfo=2 --remap-path-prefix "$(SRC_DIR)=REMAPPED" -Zremap-path-scope=diagnostics $(DEBUGINFOOPTS) - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_scope.rlib" | $(CGREP) -v "REMAPPED/src/quux.rs" - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_scope.rlib" | $(CGREP) -v "REMAPPED/REMAPPED" - -# The compiler is called with a *RELATIVE PATH* as input. We are remapping a *SUB-DIRECTORY* of the -# compiler's working directory. This test makes sure that that directory is remapped even though it -# won't actually show up in this form in the compiler's SourceMap and instead is only constructed -# on demand during debuginfo generation. -rel_input_remap_working_dir_child: - cd $(SRC_DIR) && $(RUSTC) src/quux.rs -o "$(TMPDIR)/rel_input_remap_working_dir_child.rlib" -Cdebuginfo=2 --remap-path-prefix "$(SRC_DIR)/src=REMAPPED" - # We expect `src/quux.rs` to have been remapped to `REMAPPED/quux.rs`. - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_child.rlib" | $(CGREP) "REMAPPED/quux.rs" - # We don't want to find the path that we just remapped anywhere in the DWARF - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_child.rlib" | $(CGREP) -v "$(SRC_DIR)/src" - # No weird duplication of remapped components (see #78479) - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_child.rlib" | $(CGREP) -v "REMAPPED/REMAPPED" - -# The compiler is called with a *RELATIVE PATH* as input. We are remapping a *PARENT DIRECTORY* of -# the compiler's working directory. -rel_input_remap_working_dir_parent: - cd $(SRC_DIR) && $(RUSTC) src/quux.rs -o "$(TMPDIR)/rel_input_remap_working_dir_parent.rlib" -Cdebuginfo=2 --remap-path-prefix "$(SRC_DIR_PARENT)=REMAPPED" - # We expect `src/quux.rs` to have been remapped to `REMAPPED/remap-path-prefix-dwarf/src/quux.rs`. - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_parent.rlib" | $(CGREP) "REMAPPED/remap-path-prefix-dwarf/src/quux.rs" - # We don't want to find the path that we just remapped anywhere in the DWARF - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_parent.rlib" | $(CGREP) -v "$(SRC_DIR_PARENT)" - # No weird duplication of remapped components (see #78479) - "$(LLVM_BIN_DIR)"/llvm-dwarfdump "$(TMPDIR)/rel_input_remap_working_dir_parent.rlib" | $(CGREP) -v "REMAPPED/REMAPPED" diff --git a/tests/run-make/remap-path-prefix-dwarf/rmake.rs b/tests/run-make/remap-path-prefix-dwarf/rmake.rs new file mode 100644 index 0000000000000..ede1d61574257 --- /dev/null +++ b/tests/run-make/remap-path-prefix-dwarf/rmake.rs @@ -0,0 +1,202 @@ +// This test makes sure that --remap-path-prefix has the expected effects on paths in debuginfo. +// We explicitly switch to a directory that *is* a prefix of the directory our +// source code is contained in. +// It tests several cases, each of them has a detailed description attached to it. +// See https://github.com/rust-lang/rust/pull/96867 + +//@ ignore-windows +// Reason: the remap path prefix is not printed in the dwarf dump. + +use run_make_support::{cwd, is_darwin, llvm_dwarfdump, rust_lib_name, rustc}; + +fn main() { + // The compiler is called with an *ABSOLUTE PATH* as input, and that absolute path *is* within + // the working directory of the compiler. We are remapping the path that contains `src`. + check_dwarf(DwarfTest { + lib_name: "abs_input_inside_working_dir", + input_path: PathType::Absolute, + scope: None, + remap_path_prefix: PrefixType::Regular(format!("{}=REMAPPED", cwd().display())), + dwarf_test: DwarfDump::ContainsSrcPath, + }); + check_dwarf(DwarfTest { + lib_name: "abs_input_inside_working_dir_scope", + input_path: PathType::Absolute, + scope: Some(ScopeType::Object), + remap_path_prefix: PrefixType::Regular(format!("{}=REMAPPED", cwd().display())), + dwarf_test: DwarfDump::ContainsSrcPath, + }); + // The compiler is called with an *ABSOLUTE PATH* as input, and that absolute path is *not* + // within the working directory of the compiler. We are remapping both the path that contains + // `src` and the working directory to the same thing. This setup corresponds to a workaround + // that is needed when trying to remap everything to something that looks like a local + // path. Relative paths are interpreted as relative to the compiler's working directory (e.g. + // in debuginfo). If we also remap the working directory, the compiler strip it from other + // paths so that the final outcome is the desired one again. + check_dwarf(DwarfTest { + lib_name: "abs_input_outside_working_dir", + input_path: PathType::Absolute, + scope: None, + remap_path_prefix: PrefixType::Dual(( + format!("{}=REMAPPED", cwd().display()), + "rmake_out=REMAPPED".to_owned(), + )), + dwarf_test: DwarfDump::ContainsSrcPath, + }); + // The compiler is called with a *RELATIVE PATH* as input. We are remapping the working + // directory of the compiler, which naturally is an implicit prefix of our relative input path. + // Debuginfo will expand the relative path to an absolute path and we expect the working + // directory to be remapped in that expansion. + check_dwarf(DwarfTest { + lib_name: "rel_input_remap_working_dir", + input_path: PathType::Relative, + scope: None, + remap_path_prefix: PrefixType::Regular(format!("{}=REMAPPED", cwd().display())), + dwarf_test: DwarfDump::ContainsSrcPath, + }); + check_dwarf(DwarfTest { + lib_name: "rel_input_remap_working_dir_scope", + input_path: PathType::Relative, + scope: Some(ScopeType::Object), + remap_path_prefix: PrefixType::Regular(format!("{}=REMAPPED", cwd().display())), + dwarf_test: DwarfDump::ContainsSrcPath, + }); + check_dwarf(DwarfTest { + lib_name: "rel_input_remap_working_dir_scope", + input_path: PathType::Relative, + scope: Some(ScopeType::Diagnostics), + remap_path_prefix: PrefixType::Regular(format!("{}=REMAPPED", cwd().display())), + dwarf_test: DwarfDump::AvoidSrcPath, + }); + // The compiler is called with a *RELATIVE PATH* as input. We are remapping a *SUB-DIRECTORY* + // of the compiler's working directory. This test makes sure that that directory is remapped + // even though it won't actually show up in this form in the compiler's SourceMap and instead + // is only constructed on demand during debuginfo generation. + check_dwarf(DwarfTest { + lib_name: "rel_input_remap_working_dir_child", + input_path: PathType::Relative, + scope: None, + remap_path_prefix: PrefixType::Regular(format!("{}=REMAPPED", cwd().join("src").display())), + dwarf_test: DwarfDump::ChildTest, + }); + // The compiler is called with a *RELATIVE PATH* as input. We are remapping a + // *PARENT DIRECTORY* of the compiler's working directory. + check_dwarf(DwarfTest { + lib_name: "rel_input_remap_working_dir_parent", + input_path: PathType::Relative, + scope: None, + remap_path_prefix: PrefixType::Regular(format!( + "{}=REMAPPED", + cwd().parent().unwrap().display() + )), + dwarf_test: DwarfDump::ParentTest, + }); +} + +#[track_caller] +fn check_dwarf(test: DwarfTest) { + let mut rustc = rustc(); + match test.input_path { + PathType::Absolute => rustc.input(cwd().join("src/quux.rs")), + PathType::Relative => rustc.input("src/quux.rs"), + }; + rustc.output(rust_lib_name(test.lib_name)); + rustc.arg("-Cdebuginfo=2"); + if let Some(scope) = test.scope { + match scope { + ScopeType::Object => rustc.arg("-Zremap-path-scope=object"), + ScopeType::Diagnostics => rustc.arg("-Zremap-path-scope=diagnostics"), + }; + if is_darwin() { + rustc.arg("-Csplit-debuginfo=off"); + } + } + match test.remap_path_prefix { + PrefixType::Regular(prefix) => { + // We explicitly switch to a directory that *is* a prefix of the directory our + // source code is contained in. + rustc.arg("--remap-path-prefix"); + rustc.arg(prefix); + } + PrefixType::Dual((prefix1, prefix2)) => { + // We explicitly switch to a directory that is *not* a prefix of the directory our + // source code is contained in. + rustc.arg("--remap-path-prefix"); + rustc.arg(prefix1); + rustc.arg("--remap-path-prefix"); + rustc.arg(prefix2); + } + } + rustc.run(); + match test.dwarf_test { + DwarfDump::ContainsSrcPath => { + llvm_dwarfdump() + .input(rust_lib_name(test.lib_name)) + .run() + // We expect the path to the main source file to be remapped. + .assert_stdout_contains("REMAPPED/src/quux.rs") + // No weird duplication of remapped components (see #78479) + .assert_stdout_not_contains("REMAPPED/REMAPPED"); + } + DwarfDump::AvoidSrcPath => { + llvm_dwarfdump() + .input(rust_lib_name(test.lib_name)) + .run() + .assert_stdout_not_contains("REMAPPED/src/quux.rs") + .assert_stdout_not_contains("REMAPPED/REMAPPED"); + } + DwarfDump::ChildTest => { + llvm_dwarfdump() + .input(rust_lib_name(test.lib_name)) + .run() + // We expect `src/quux.rs` to have been remapped to `REMAPPED/quux.rs`. + .assert_stdout_contains("REMAPPED/quux.rs") + // We don't want to find the path that we just remapped anywhere in the DWARF + .assert_stdout_not_contains(cwd().join("src").to_str().unwrap()) + // No weird duplication of remapped components (see #78479) + .assert_stdout_not_contains("REMAPPED/REMAPPED"); + } + DwarfDump::ParentTest => { + llvm_dwarfdump() + .input(rust_lib_name(test.lib_name)) + .run() + // We expect `src/quux.rs` to have been remapped to + // `REMAPPED/remap-path-prefix-dwarf/src/quux.rs`. + .assert_stdout_contains("REMAPPED/rmake_out/src/quux.rs") + // We don't want to find the path that we just remapped anywhere in the DWARF + .assert_stdout_not_contains(cwd().parent().unwrap().to_str().unwrap()) + // No weird duplication of remapped components (see #78479) + .assert_stdout_not_contains("REMAPPED/REMAPPED"); + } + }; +} + +struct DwarfTest { + lib_name: &'static str, + input_path: PathType, + scope: Option, + remap_path_prefix: PrefixType, + dwarf_test: DwarfDump, +} + +enum PathType { + Absolute, + Relative, +} + +enum ScopeType { + Object, + Diagnostics, +} + +enum DwarfDump { + ContainsSrcPath, + AvoidSrcPath, + ChildTest, + ParentTest, +} + +enum PrefixType { + Regular(String), + Dual((String, String)), +} diff --git a/tests/run-make/reproducible-build-2/rmake.rs b/tests/run-make/reproducible-build-2/rmake.rs index c500c4238b088..8b5825cad3026 100644 --- a/tests/run-make/reproducible-build-2/rmake.rs +++ b/tests/run-make/reproducible-build-2/rmake.rs @@ -13,7 +13,7 @@ // 2. When the sysroot gets copied, some symlinks must be re-created, // which is a privileged action on Windows. -use run_make_support::{bin_name, rfs, rust_lib_name, rustc}; +use run_make_support::{rfs, rust_lib_name, rustc}; fn main() { // test 1: fat lto diff --git a/tests/run-make/reproducible-build/Makefile b/tests/run-make/reproducible-build/Makefile deleted file mode 100644 index f5d17a234c0ac..0000000000000 --- a/tests/run-make/reproducible-build/Makefile +++ /dev/null @@ -1,140 +0,0 @@ -# ignore-cross-compile -include ../tools.mk - -# ignore-musl -# Objects are reproducible but their path is not. - -all: \ - smoke \ - debug \ - opt \ - link_paths \ - remap_paths \ - different_source_dirs_rlib \ - remap_cwd_rlib \ - remap_cwd_to_empty \ - extern_flags - -# TODO: Builds of `bin` crate types are not deterministic with debuginfo=2 on -# Windows. -# See: https://github.com/rust-lang/rust/pull/87320#issuecomment-920105533 -# Issue: https://github.com/rust-lang/rust/issues/88982 -# -# different_source_dirs_bin \ -# remap_cwd_bin \ - -smoke: - rm -rf $(TMPDIR) && mkdir $(TMPDIR) - $(RUSTC) linker.rs -O - $(RUSTC) reproducible-build-aux.rs - $(RUSTC) reproducible-build.rs -C linker=$(call RUN_BINFILE,linker) - $(RUSTC) reproducible-build.rs -C linker=$(call RUN_BINFILE,linker) - diff -u "$(TMPDIR)/linker-arguments1" "$(TMPDIR)/linker-arguments2" - -debug: - rm -rf $(TMPDIR) && mkdir $(TMPDIR) - $(RUSTC) linker.rs -O - $(RUSTC) reproducible-build-aux.rs -g - $(RUSTC) reproducible-build.rs -C linker=$(call RUN_BINFILE,linker) -g - $(RUSTC) reproducible-build.rs -C linker=$(call RUN_BINFILE,linker) -g - diff -u "$(TMPDIR)/linker-arguments1" "$(TMPDIR)/linker-arguments2" - -opt: - rm -rf $(TMPDIR) && mkdir $(TMPDIR) - $(RUSTC) linker.rs -O - $(RUSTC) reproducible-build-aux.rs -O - $(RUSTC) reproducible-build.rs -C linker=$(call RUN_BINFILE,linker) -O - $(RUSTC) reproducible-build.rs -C linker=$(call RUN_BINFILE,linker) -O - diff -u "$(TMPDIR)/linker-arguments1" "$(TMPDIR)/linker-arguments2" - -link_paths: - rm -rf $(TMPDIR) && mkdir $(TMPDIR) - $(RUSTC) reproducible-build-aux.rs - $(RUSTC) reproducible-build.rs --crate-type rlib -L /b - cp $(TMPDIR)/libreproducible_build.rlib $(TMPDIR)/libfoo.rlib - $(RUSTC) reproducible-build.rs --crate-type rlib -L /a - cmp "$(TMPDIR)/libreproducible_build.rlib" "$(TMPDIR)/libfoo.rlib" || exit 1 - -remap_paths: - rm -rf $(TMPDIR) && mkdir $(TMPDIR) - $(RUSTC) reproducible-build-aux.rs - $(RUSTC) reproducible-build.rs --crate-type rlib --remap-path-prefix=/a=/c - cp $(TMPDIR)/libreproducible_build.rlib $(TMPDIR)/libfoo.rlib - $(RUSTC) reproducible-build.rs --crate-type rlib --remap-path-prefix=/b=/c - cmp "$(TMPDIR)/libreproducible_build.rlib" "$(TMPDIR)/libfoo.rlib" || exit 1 - -different_source_dirs_bin: - rm -rf $(TMPDIR) && mkdir $(TMPDIR) - $(RUSTC) reproducible-build-aux.rs - mkdir $(TMPDIR)/test - cp reproducible-build.rs $(TMPDIR)/test - $(RUSTC) reproducible-build.rs --crate-type bin --remap-path-prefix=$$PWD=/b - cp $(TMPDIR)/reproducible-build $(TMPDIR)/foo - (cd $(TMPDIR)/test && $(RUSTC) reproducible-build.rs \ - --remap-path-prefix=$(TMPDIR)/test=/b \ - --crate-type bin) - cmp "$(TMPDIR)/reproducible-build" "$(TMPDIR)/foo" || exit 1 - -different_source_dirs_rlib: - rm -rf $(TMPDIR) && mkdir $(TMPDIR) - $(RUSTC) reproducible-build-aux.rs - mkdir $(TMPDIR)/test - cp reproducible-build.rs $(TMPDIR)/test - $(RUSTC) reproducible-build.rs --crate-type rlib --remap-path-prefix=$$PWD=/b - cp $(TMPDIR)/libreproducible_build.rlib $(TMPDIR)/libfoo.rlib - (cd $(TMPDIR)/test && $(RUSTC) reproducible-build.rs \ - --remap-path-prefix=$(TMPDIR)/test=/b \ - --crate-type rlib) - cmp "$(TMPDIR)/libreproducible_build.rlib" "$(TMPDIR)/libfoo.rlib" || exit 1 - -remap_cwd_bin: - rm -rf $(TMPDIR) && mkdir $(TMPDIR) - $(RUSTC) reproducible-build-aux.rs - mkdir $(TMPDIR)/test - cp reproducible-build.rs $(TMPDIR)/test - $(RUSTC) reproducible-build.rs --crate-type bin -C debuginfo=2 \ - -Z remap-cwd-prefix=. - cp $(TMPDIR)/reproducible-build $(TMPDIR)/first - (cd $(TMPDIR)/test && \ - $(RUSTC) reproducible-build.rs --crate-type bin -C debuginfo=2 \ - -Z remap-cwd-prefix=.) - cmp "$(TMPDIR)/first" "$(TMPDIR)/reproducible-build" || exit 1 - -remap_cwd_rlib: - rm -rf $(TMPDIR) && mkdir $(TMPDIR) - $(RUSTC) reproducible-build-aux.rs - mkdir $(TMPDIR)/test - cp reproducible-build.rs $(TMPDIR)/test - $(RUSTC) reproducible-build.rs --crate-type rlib -C debuginfo=2 \ - -Z remap-cwd-prefix=. - cp $(TMPDIR)/libreproducible_build.rlib $(TMPDIR)/libfirst.rlib - (cd $(TMPDIR)/test && \ - $(RUSTC) reproducible-build.rs --crate-type rlib -C debuginfo=2 \ - -Z remap-cwd-prefix=.) - cmp "$(TMPDIR)/libfirst.rlib" "$(TMPDIR)/libreproducible_build.rlib" || exit 1 - -remap_cwd_to_empty: - rm -rf $(TMPDIR) && mkdir $(TMPDIR) - $(RUSTC) reproducible-build-aux.rs - mkdir $(TMPDIR)/test - cp reproducible-build.rs $(TMPDIR)/test - $(RUSTC) reproducible-build.rs --crate-type rlib -C debuginfo=2 \ - -Z remap-cwd-prefix= - cp $(TMPDIR)/libreproducible_build.rlib $(TMPDIR)/libfirst.rlib - (cd $(TMPDIR)/test && \ - $(RUSTC) reproducible-build.rs --crate-type rlib -C debuginfo=2 \ - -Z remap-cwd-prefix=) - cmp "$(TMPDIR)/libfirst.rlib" "$(TMPDIR)/libreproducible_build.rlib" || exit 1 - -extern_flags: - rm -rf $(TMPDIR) && mkdir $(TMPDIR) - $(RUSTC) reproducible-build-aux.rs - $(RUSTC) reproducible-build.rs \ - --extern reproducible_build_aux=$(TMPDIR)/libreproducible_build_aux.rlib \ - --crate-type rlib - cp $(TMPDIR)/libreproducible_build_aux.rlib $(TMPDIR)/libbar.rlib - cp $(TMPDIR)/libreproducible_build.rlib $(TMPDIR)/libfoo.rlib - $(RUSTC) reproducible-build.rs \ - --extern reproducible_build_aux=$(TMPDIR)/libbar.rlib \ - --crate-type rlib - cmp "$(TMPDIR)/libreproducible_build.rlib" "$(TMPDIR)/libfoo.rlib" || exit 1 diff --git a/tests/run-make/reproducible-build/rmake.rs b/tests/run-make/reproducible-build/rmake.rs new file mode 100644 index 0000000000000..34410d224fbc4 --- /dev/null +++ b/tests/run-make/reproducible-build/rmake.rs @@ -0,0 +1,240 @@ +// This test case makes sure that two identical invocations of the compiler +// (i.e. same code base, same compile-flags, same compiler-versions, etc.) +// produce the same output. In the past, symbol names of monomorphized functions +// were not deterministic (which we want to avoid). +// +// The test tries to exercise as many different paths into symbol name +// generation as possible: +// +// - regular functions +// - generic functions +// - methods +// - statics +// - closures +// - enum variant constructors +// - tuple struct constructors +// - drop glue +// - FnOnce adapters +// - Trait object shims +// - Fn Pointer shims +// See https://github.com/rust-lang/rust/pull/32293 +// Tracking Issue: https://github.com/rust-lang/rust/issues/129080 + +use run_make_support::{ + bin_name, cwd, diff, is_darwin, is_windows, rfs, run_in_tmpdir, rust_lib_name, rustc, +}; + +fn main() { + // Smoke tests. Simple flags, build should be reproducible. + eprintln!("smoke_test => None"); + smoke_test(None); + eprintln!("smoke_test => SmokeFlag::Debug"); + smoke_test(Some(SmokeFlag::Debug)); + eprintln!("smoke_test => SmokeFlag::Opt"); + smoke_test(Some(SmokeFlag::Opt)); + + // Builds should be reproducible even through custom library search paths + // or remap path prefixes. + eprintln!("paths_test => PathsFlag::Link"); + paths_test(PathsFlag::Link); + eprintln!("paths_test => PathsFlag::Remap"); + paths_test(PathsFlag::Remap); + + // Builds should be reproducible even if each build is done in a different directory, + // with both --remap-path-prefix and -Z remap-cwd-prefix. + + // FIXME(Oneirical): Building with crate type set to `bin` AND having -Cdebuginfo=2 + // (or `-g`, the shorthand form) enabled will cause reproducibility failures. + // See https://github.com/rust-lang/rust/issues/89911 + + if !is_darwin() && !is_windows() { + // FIXME(Oneirical): Bin builds are not reproducible on non-Linux targets. + eprintln!("diff_dir_test => Bin, Path"); + diff_dir_test(CrateType::Bin, RemapType::Path); + } + + eprintln!("diff_dir_test => Rlib, Path"); + diff_dir_test(CrateType::Rlib, RemapType::Path); + + // FIXME(Oneirical): This specific case would fail on Linux, should -Cdebuginfo=2 + // be added. + // FIXME(Oneirical): Bin builds are not reproducible on non-Linux targets. + // See https://github.com/rust-lang/rust/issues/89911 + if !is_darwin() && !is_windows() { + eprintln!("diff_dir_test => Bin, Cwd false"); + diff_dir_test(CrateType::Bin, RemapType::Cwd { is_empty: false }); + } + + eprintln!("diff_dir_test => Rlib, Cwd false"); + diff_dir_test(CrateType::Rlib, RemapType::Cwd { is_empty: false }); + eprintln!("diff_dir_test => Rlib, Cwd true"); + diff_dir_test(CrateType::Rlib, RemapType::Cwd { is_empty: true }); + + eprintln!("final extern test"); + // Builds should be reproducible when using the --extern flag. + run_in_tmpdir(|| { + rustc().input("reproducible-build-aux.rs").run(); + rustc() + .input("reproducible-build.rs") + .crate_type("rlib") + .extern_("reproducible_build_aux", rust_lib_name("reproducible_build_aux")) + .run(); + rfs::copy(rust_lib_name("reproducible_build"), rust_lib_name("foo")); + rfs::copy(rust_lib_name("reproducible_build_aux"), rust_lib_name("bar")); + rustc() + .input("reproducible-build.rs") + .crate_type("rlib") + .extern_("reproducible_build_aux", rust_lib_name("bar")) + .run(); + assert!(rfs::read(rust_lib_name("foo")) == rfs::read(rust_lib_name("reproducible_build"))) + }); +} + +#[track_caller] +fn smoke_test(flag: Option) { + run_in_tmpdir(|| { + rustc().input("linker.rs").opt().run(); + rustc().input("reproducible-build-aux.rs").run(); + let mut compiler1 = rustc(); + let mut compiler2 = rustc(); + if let Some(flag) = flag { + match flag { + SmokeFlag::Debug => { + compiler1.arg("-g"); + compiler2.arg("-g"); + } + SmokeFlag::Opt => { + compiler1.opt(); + compiler2.opt(); + } + }; + }; + compiler1 + .input("reproducible-build.rs") + .linker(&cwd().join(bin_name("linker")).display().to_string()) + .run(); + compiler2 + .input("reproducible-build.rs") + .linker(&cwd().join(bin_name("linker")).display().to_string()) + .run(); + diff().actual_file("linker-arguments1").expected_file("linker-arguments2").run(); + }); +} + +#[track_caller] +fn paths_test(flag: PathsFlag) { + run_in_tmpdir(|| { + rustc().input("reproducible-build-aux.rs").run(); + let mut compiler1 = rustc(); + let mut compiler2 = rustc(); + match flag { + PathsFlag::Link => { + compiler1.library_search_path("a"); + compiler2.library_search_path("b"); + } + PathsFlag::Remap => { + compiler1.arg("--remap-path-prefix=/a=/c"); + compiler2.arg("--remap-path-prefix=/b=/c"); + } + } + compiler1.input("reproducible-build.rs").crate_type("rlib").run(); + rfs::rename(rust_lib_name("reproducible_build"), rust_lib_name("foo")); + compiler2.input("reproducible-build.rs").crate_type("rlib").run(); + assert!(rfs::read(rust_lib_name("foo")) == rfs::read(rust_lib_name("reproducible_build"))) + }); +} + +#[track_caller] +fn diff_dir_test(crate_type: CrateType, remap_type: RemapType) { + run_in_tmpdir(|| { + let base_dir = cwd(); + rustc().input("reproducible-build-aux.rs").run(); + rfs::create_dir("test"); + rfs::copy("reproducible-build.rs", "test/reproducible-build.rs"); + let mut compiler1 = rustc(); + let mut compiler2 = rustc(); + match crate_type { + CrateType::Bin => { + compiler1.crate_type("bin"); + compiler2.crate_type("bin"); + } + CrateType::Rlib => { + compiler1.crate_type("rlib"); + compiler2.crate_type("rlib"); + } + } + match remap_type { + RemapType::Path => { + compiler1.arg(&format!("--remap-path-prefix={}=/b", cwd().display())); + compiler2 + .arg(format!("--remap-path-prefix={}=/b", base_dir.join("test").display())); + } + RemapType::Cwd { is_empty } => { + // FIXME(Oneirical): Building with crate type set to `bin` AND having -Cdebuginfo=2 + // (or `-g`, the shorthand form) enabled will cause reproducibility failures + // for multiple platforms. + // See https://github.com/rust-lang/rust/issues/89911 + // FIXME(#129117): Windows rlib + `-Cdebuginfo=2` + `-Z remap-cwd-prefix=.` seems + // to be unreproducible. + if !matches!(crate_type, CrateType::Bin) && !is_windows() { + compiler1.arg("-Cdebuginfo=2"); + compiler2.arg("-Cdebuginfo=2"); + } + if is_empty { + compiler1.arg("-Zremap-cwd-prefix="); + compiler2.arg("-Zremap-cwd-prefix="); + } else { + compiler1.arg("-Zremap-cwd-prefix=."); + compiler2.arg("-Zremap-cwd-prefix=."); + } + } + } + compiler1.input("reproducible-build.rs").run(); + match crate_type { + CrateType::Bin => { + rfs::rename(bin_name("reproducible-build"), bin_name("foo")); + } + CrateType::Rlib => { + rfs::rename(rust_lib_name("reproducible_build"), rust_lib_name("foo")); + } + } + std::env::set_current_dir("test").unwrap(); + compiler2 + .input("reproducible-build.rs") + .library_search_path(&base_dir) + .out_dir(&base_dir) + .run(); + std::env::set_current_dir(&base_dir).unwrap(); + match crate_type { + CrateType::Bin => { + assert!(rfs::read(bin_name("reproducible-build")) == rfs::read(bin_name("foo"))); + } + CrateType::Rlib => { + assert!( + rfs::read(rust_lib_name("foo")) + == rfs::read(rust_lib_name("reproducible_build")) + ); + } + } + }); +} + +enum SmokeFlag { + Debug, + Opt, +} + +enum PathsFlag { + Link, + Remap, +} + +enum CrateType { + Bin, + Rlib, +} + +enum RemapType { + Path, + Cwd { is_empty: bool }, +} diff --git a/tests/run-make/reset-codegen-1/rmake.rs b/tests/run-make/reset-codegen-1/rmake.rs index d19907a675e02..118b3a666ad86 100644 --- a/tests/run-make/reset-codegen-1/rmake.rs +++ b/tests/run-make/reset-codegen-1/rmake.rs @@ -9,7 +9,7 @@ use std::path::Path; -use run_make_support::{bin_name, rfs, rustc}; +use run_make_support::{bin_name, rustc}; fn compile(output_file: &str, emit: Option<&str>) { let mut rustc = rustc(); diff --git a/tests/run-make/rlib-format-packed-bundled-libs-3/rmake.rs b/tests/run-make/rlib-format-packed-bundled-libs-3/rmake.rs index d152047600f0c..67e839bec703d 100644 --- a/tests/run-make/rlib-format-packed-bundled-libs-3/rmake.rs +++ b/tests/run-make/rlib-format-packed-bundled-libs-3/rmake.rs @@ -6,7 +6,7 @@ // See https://github.com/rust-lang/rust/pull/105601 use run_make_support::{ - build_native_static_lib, is_msvc, llvm_ar, regex, rfs, rust_lib_name, rustc, static_lib_name, + build_native_static_lib, llvm_ar, regex, rfs, rust_lib_name, rustc, static_lib_name, }; //@ ignore-cross-compile diff --git a/tests/run-make/rlib-format-packed-bundled-libs/Makefile b/tests/run-make/rlib-format-packed-bundled-libs/Makefile deleted file mode 100644 index f454da67893d0..0000000000000 --- a/tests/run-make/rlib-format-packed-bundled-libs/Makefile +++ /dev/null @@ -1,39 +0,0 @@ -include ../tools.mk - -# ignore-cross-compile - -# Make sure rlib format with -Zpacked_bundled_libs is correct. - -# We're using the llvm-nm instead of the system nm to ensure it is compatible -# with the LLVM bitcode generated by rustc. -# Except on Windows where piping/IO redirection under MSYS2 is wonky with llvm-nm. -ifndef IS_WINDOWS -NM = "$(LLVM_BIN_DIR)"/llvm-nm -else -NM = nm -endif - -all: $(call NATIVE_STATICLIB,native_dep_1) $(call NATIVE_STATICLIB,native_dep_2) $(call NATIVE_STATICLIB,native_dep_3) - $(RUSTC) rust_dep_up.rs --crate-type=rlib -Zpacked_bundled_libs - $(NM) $(TMPDIR)/librust_dep_up.rlib | $(CGREP) -e "U.*native_f2" - $(NM) $(TMPDIR)/librust_dep_up.rlib | $(CGREP) -e "U.*native_f3" - $(NM) $(TMPDIR)/librust_dep_up.rlib | $(CGREP) -e "T.*rust_dep_up" - $(AR) t $(TMPDIR)/librust_dep_up.rlib | $(CGREP) "native_dep_2" - $(AR) t $(TMPDIR)/librust_dep_up.rlib | $(CGREP) "native_dep_3" - $(RUSTC) rust_dep_local.rs --extern rlib=$(TMPDIR)/librust_dep_up.rlib -Zpacked_bundled_libs --crate-type=rlib - $(NM) $(TMPDIR)/librust_dep_local.rlib | $(CGREP) -e "U.*native_f1" - $(NM) $(TMPDIR)/librust_dep_local.rlib | $(CGREP) -e "T.*rust_dep_local" - $(AR) t $(TMPDIR)/librust_dep_local.rlib | $(CGREP) "native_dep_1" - - # Make sure compiler doesn't use files, that it shouldn't know about. - rm $(TMPDIR)/*native_dep_* - - $(RUSTC) main.rs --extern lib=$(TMPDIR)/librust_dep_local.rlib -o $(TMPDIR)/main.exe -Zpacked_bundled_libs --print link-args | $(CGREP) -e "native_dep_1.*native_dep_2.*native_dep_3" - -ifndef IS_MSVC - $(NM) $(TMPDIR)/main.exe | $(CGREP) -e "T.*native_f1" - $(NM) $(TMPDIR)/main.exe | $(CGREP) -e "T.*native_f2" - $(NM) $(TMPDIR)/main.exe | $(CGREP) -e "T.*native_f3" - $(NM) $(TMPDIR)/main.exe | $(CGREP) -e "T.*rust_dep_local" - $(NM) $(TMPDIR)/main.exe | $(CGREP) -e "T.*rust_dep_up" -endif diff --git a/tests/run-make/rlib-format-packed-bundled-libs/rmake.rs b/tests/run-make/rlib-format-packed-bundled-libs/rmake.rs new file mode 100644 index 0000000000000..ff0438a6b72be --- /dev/null +++ b/tests/run-make/rlib-format-packed-bundled-libs/rmake.rs @@ -0,0 +1,84 @@ +// `-Z packed_bundled_libs` is an unstable rustc flag that makes the compiler +// only require a native library and no supplementary object files to compile. +// Output files compiled with this flag should still contain all expected symbols - +// that is what this test checks. +// See https://github.com/rust-lang/rust/pull/100101 + +//@ ignore-cross-compile +// Reason: cross-compilation fails to export native symbols + +use run_make_support::{ + bin_name, build_native_static_lib, cwd, filename_contains, is_msvc, llvm_ar, llvm_nm, rfs, + rust_lib_name, rustc, shallow_find_files, +}; + +fn main() { + build_native_static_lib("native_dep_1"); + build_native_static_lib("native_dep_2"); + build_native_static_lib("native_dep_3"); + rustc().input("rust_dep_up.rs").crate_type("rlib").arg("-Zpacked_bundled_libs").run(); + llvm_nm() + .input(rust_lib_name("rust_dep_up")) + .run() + .assert_stdout_contains_regex("U.*native_f2"); + llvm_nm() + .input(rust_lib_name("rust_dep_up")) + .run() + .assert_stdout_contains_regex("U.*native_f3"); + llvm_nm() + .input(rust_lib_name("rust_dep_up")) + .run() + .assert_stdout_contains_regex("T.*rust_dep_up"); + llvm_ar() + .table_of_contents() + .arg(rust_lib_name("rust_dep_up")) + .run() + .assert_stdout_contains("native_dep_2"); + llvm_ar() + .table_of_contents() + .arg(rust_lib_name("rust_dep_up")) + .run() + .assert_stdout_contains("native_dep_3"); + rustc() + .input("rust_dep_local.rs") + .extern_("rlib", rust_lib_name("rust_dep_up")) + .arg("-Zpacked_bundled_libs") + .crate_type("rlib") + .run(); + llvm_nm() + .input(rust_lib_name("rust_dep_local")) + .run() + .assert_stdout_contains_regex("U.*native_f1"); + llvm_nm() + .input(rust_lib_name("rust_dep_local")) + .run() + .assert_stdout_contains_regex("T.*rust_dep_local"); + llvm_ar() + .table_of_contents() + .arg(rust_lib_name("rust_dep_local")) + .run() + .assert_stdout_contains("native_dep_1"); + + // Ensure the compiler will not use files it should not know about. + for file in shallow_find_files(cwd(), |path| filename_contains(path, "native_dep_")) { + rfs::remove_file(file); + } + + rustc() + .input("main.rs") + .extern_("lib", rust_lib_name("rust_dep_local")) + .output(bin_name("main")) + .arg("-Zpacked_bundled_libs") + .print("link-args") + .run() + .assert_stdout_contains_regex("native_dep_1.*native_dep_2.*native_dep_3"); + + // The binary "main" will not contain any symbols on MSVC. + if !is_msvc() { + llvm_nm().input(bin_name("main")).run().assert_stdout_contains_regex("T.*native_f1"); + llvm_nm().input(bin_name("main")).run().assert_stdout_contains_regex("T.*native_f2"); + llvm_nm().input(bin_name("main")).run().assert_stdout_contains_regex("T.*native_f3"); + llvm_nm().input(bin_name("main")).run().assert_stdout_contains_regex("T.*rust_dep_local"); + llvm_nm().input(bin_name("main")).run().assert_stdout_contains_regex("T.*rust_dep_up"); + } +} diff --git a/tests/run-make/run-in-tmpdir-self-test/rmake.rs b/tests/run-make/run-in-tmpdir-self-test/rmake.rs index 83b99bfe86325..dba036f88669e 100644 --- a/tests/run-make/run-in-tmpdir-self-test/rmake.rs +++ b/tests/run-make/run-in-tmpdir-self-test/rmake.rs @@ -3,7 +3,7 @@ //! when returning from the closure. use std::fs; -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use run_make_support::{cwd, run_in_tmpdir}; diff --git a/tests/run-make/rust-lld-by-default-nightly/rmake.rs b/tests/run-make/rust-lld-by-default-nightly/rmake.rs index 79424984c2139..02bbe8227f032 100644 --- a/tests/run-make/rust-lld-by-default-nightly/rmake.rs +++ b/tests/run-make/rust-lld-by-default-nightly/rmake.rs @@ -6,8 +6,6 @@ //@ ignore-stable //@ only-x86_64-unknown-linux-gnu -use std::process::Output; - use run_make_support::regex::Regex; use run_make_support::rustc; diff --git a/tests/run-make/rust-lld-compress-debug-sections/rmake.rs b/tests/run-make/rust-lld-compress-debug-sections/rmake.rs index df9691ccbcf39..ea4997fab8099 100644 --- a/tests/run-make/rust-lld-compress-debug-sections/rmake.rs +++ b/tests/run-make/rust-lld-compress-debug-sections/rmake.rs @@ -6,7 +6,7 @@ // FIXME: This test isn't comprehensive and isn't covering all possible combinations. -use run_make_support::{assert_contains, cmd, llvm_readobj, run_in_tmpdir, rustc}; +use run_make_support::{assert_contains, llvm_readobj, run_in_tmpdir, rustc}; fn check_compression(compression: &str, to_find: &str) { run_in_tmpdir(|| { diff --git a/tests/run-make/rust-lld-custom-target/rmake.rs b/tests/run-make/rust-lld-custom-target/rmake.rs index 17247125521ce..a6f7c33793afe 100644 --- a/tests/run-make/rust-lld-custom-target/rmake.rs +++ b/tests/run-make/rust-lld-custom-target/rmake.rs @@ -8,8 +8,6 @@ //@ needs-rust-lld //@ only-x86_64-unknown-linux-gnu -use std::process::Output; - use run_make_support::regex::Regex; use run_make_support::rustc; diff --git a/tests/run-make/rust-lld/rmake.rs b/tests/run-make/rust-lld/rmake.rs index d0bc19130d7cb..1f311af1ed591 100644 --- a/tests/run-make/rust-lld/rmake.rs +++ b/tests/run-make/rust-lld/rmake.rs @@ -4,8 +4,6 @@ //@ needs-rust-lld //@ ignore-s390x lld does not yet support s390x as target -use std::process::Output; - use run_make_support::regex::Regex; use run_make_support::{is_msvc, rustc}; diff --git a/tests/run-make/rustdoc-determinism/rmake.rs b/tests/run-make/rustdoc-determinism/rmake.rs index aa8090174d9c3..ce0885081783b 100644 --- a/tests/run-make/rustdoc-determinism/rmake.rs +++ b/tests/run-make/rustdoc-determinism/rmake.rs @@ -7,12 +7,12 @@ use run_make_support::{diff, rustdoc}; fn main() { let foo_first = Path::new("foo_first"); - rustdoc().input("foo.rs").output(&foo_first).run(); - rustdoc().input("bar.rs").output(&foo_first).run(); + rustdoc().input("foo.rs").out_dir(&foo_first).run(); + rustdoc().input("bar.rs").out_dir(&foo_first).run(); let bar_first = Path::new("bar_first"); - rustdoc().input("bar.rs").output(&bar_first).run(); - rustdoc().input("foo.rs").output(&bar_first).run(); + rustdoc().input("bar.rs").out_dir(&bar_first).run(); + rustdoc().input("foo.rs").out_dir(&bar_first).run(); diff() .expected_file(foo_first.join("search-index.js")) diff --git a/tests/run-make/rustdoc-io-error/rmake.rs b/tests/run-make/rustdoc-io-error/rmake.rs index d64c5106ffb67..31441d7ebc5c7 100644 --- a/tests/run-make/rustdoc-io-error/rmake.rs +++ b/tests/run-make/rustdoc-io-error/rmake.rs @@ -14,22 +14,20 @@ // `mkfs.ext4 -d`, as well as mounting a loop device for the rootfs. //@ ignore-windows - the `set_readonly` functions doesn't work on folders. -use std::fs; - -use run_make_support::{path, rustdoc}; +use run_make_support::{path, rfs, rustdoc}; fn main() { let out_dir = path("rustdoc-io-error"); - let output = fs::create_dir(&out_dir).unwrap(); - let mut permissions = fs::metadata(&out_dir).unwrap().permissions(); + rfs::create_dir(&out_dir); + let mut permissions = rfs::metadata(&out_dir).permissions(); let original_permissions = permissions.clone(); permissions.set_readonly(true); - fs::set_permissions(&out_dir, permissions).unwrap(); + rfs::set_permissions(&out_dir, permissions); - let output = rustdoc().input("foo.rs").output(&out_dir).env("RUST_BACKTRACE", "1").run_fail(); + let output = rustdoc().input("foo.rs").out_dir(&out_dir).env("RUST_BACKTRACE", "1").run_fail(); - fs::set_permissions(&out_dir, original_permissions).unwrap(); + rfs::set_permissions(&out_dir, original_permissions); output .assert_exit_code(1) diff --git a/tests/run-make/rustdoc-map-file/rmake.rs b/tests/run-make/rustdoc-map-file/rmake.rs index 08f9595ef9f6a..d7e3510fe31e0 100644 --- a/tests/run-make/rustdoc-map-file/rmake.rs +++ b/tests/run-make/rustdoc-map-file/rmake.rs @@ -1,13 +1,54 @@ -use run_make_support::{python_command, rustdoc}; +// This test ensures that all items from `foo` are correctly generated into the `redirect-map.json` +// file with `--generate-redirect-map` rustdoc option. + +use std::path::Path; + +use run_make_support::rfs::read_to_string; +use run_make_support::{path, rustdoc, serde_json}; fn main() { let out_dir = "out"; + let crate_name = "foo"; rustdoc() .input("foo.rs") + .crate_name(crate_name) .arg("-Zunstable-options") .arg("--generate-redirect-map") - .output(&out_dir) + .out_dir(&out_dir) .run(); - // FIXME (GuillaumeGomez): Port the python script to Rust as well. - python_command().arg("validate_json.py").arg(&out_dir).run(); + + let generated = read_to_string(path(out_dir).join(crate_name).join("redirect-map.json")); + let expected = read_to_string("expected.json"); + let generated: serde_json::Value = + serde_json::from_str(&generated).expect("failed to parse JSON"); + let expected: serde_json::Value = + serde_json::from_str(&expected).expect("failed to parse JSON"); + let expected = expected.as_object().unwrap(); + + let mut differences = Vec::new(); + for (key, expected_value) in expected.iter() { + match generated.get(key) { + Some(value) => { + if expected_value != value { + differences.push(format!( + "values for key `{key}` don't match: `{expected_value:?}` != `{value:?}`" + )); + } + } + None => differences.push(format!("missing key `{key}`")), + } + } + for (key, data) in generated.as_object().unwrap().iter() { + if !expected.contains_key(key) { + differences.push(format!("Extra data not expected: key: `{key}`, data: `{data}`")); + } + } + + if !differences.is_empty() { + eprintln!("Found differences in JSON files:"); + for diff in differences { + eprintln!("=> {diff}"); + } + panic!("Found differences in JSON files"); + } } diff --git a/tests/run-make/rustdoc-map-file/validate_json.py b/tests/run-make/rustdoc-map-file/validate_json.py deleted file mode 100755 index 912dea3791bbf..0000000000000 --- a/tests/run-make/rustdoc-map-file/validate_json.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python - -import os -import sys -import json - - -def find_redirect_map_file(folder, errors): - for root, _dirs, files in os.walk(folder): - for name in files: - if not name.endswith("redirect-map.json"): - continue - with open(os.path.join(root, name)) as f: - data = json.load(f) - with open("expected.json") as f: - expected = json.load(f) - for key in expected: - if expected[key] != data.get(key): - errors.append("Expected `{}` for key `{}`, found: `{}`".format( - expected[key], key, data.get(key))) - else: - del data[key] - for key in data: - errors.append("Extra data not expected: key: `{}`, data: `{}`".format( - key, data[key])) - return True - return False - - -if len(sys.argv) != 2: - print("Expected doc directory to check!") - sys.exit(1) - -errors = [] -if not find_redirect_map_file(sys.argv[1], errors): - print("Didn't find the map file in `{}`...".format(sys.argv[1])) - sys.exit(1) -for err in errors: - print("=> {}".format(err)) -if len(errors) != 0: - sys.exit(1) diff --git a/tests/run-make/rustdoc-output-path/rmake.rs b/tests/run-make/rustdoc-output-path/rmake.rs index 3c1ccd3a06936..181239eac4dd3 100644 --- a/tests/run-make/rustdoc-output-path/rmake.rs +++ b/tests/run-make/rustdoc-output-path/rmake.rs @@ -6,6 +6,6 @@ use run_make_support::rustdoc; fn main() { let out_dir = Path::new("foo/bar/doc"); - rustdoc().input("foo.rs").output(&out_dir).run(); + rustdoc().input("foo.rs").out_dir(&out_dir).run(); assert!(out_dir.exists()); } diff --git a/tests/run-make/rustdoc-output-stdout/foo.rs b/tests/run-make/rustdoc-output-stdout/foo.rs new file mode 100644 index 0000000000000..4a835673a596b --- /dev/null +++ b/tests/run-make/rustdoc-output-stdout/foo.rs @@ -0,0 +1 @@ +pub struct Foo; diff --git a/tests/run-make/rustdoc-output-stdout/rmake.rs b/tests/run-make/rustdoc-output-stdout/rmake.rs new file mode 100644 index 0000000000000..dbc9892f3f59a --- /dev/null +++ b/tests/run-make/rustdoc-output-stdout/rmake.rs @@ -0,0 +1,25 @@ +// This test verifies that rustdoc `-o -` prints JSON on stdout and doesn't generate +// a JSON file. + +use std::path::PathBuf; + +use run_make_support::path_helpers::{cwd, has_extension, read_dir_entries_recursive}; +use run_make_support::rustdoc; + +fn main() { + // First we check that we generate the JSON in the stdout. + rustdoc() + .input("foo.rs") + .out_dir("-") + .arg("-Zunstable-options") + .output_format("json") + .run() + .assert_stdout_contains("{\""); + + // Then we check it didn't generate any JSON file. + read_dir_entries_recursive(cwd(), |path| { + if path.is_file() && has_extension(path, "json") { + panic!("Found a JSON file {path:?}"); + } + }); +} diff --git a/tests/run-make/rustdoc-scrape-examples-macros/rmake.rs b/tests/run-make/rustdoc-scrape-examples-macros/rmake.rs index b77df7adc8d9e..546a0685b4ee5 100644 --- a/tests/run-make/rustdoc-scrape-examples-macros/rmake.rs +++ b/tests/run-make/rustdoc-scrape-examples-macros/rmake.rs @@ -35,7 +35,7 @@ fn main() { .input("examples/ex.rs") .crate_name("ex") .crate_type("bin") - .output(&out_dir) + .out_dir(&out_dir) .extern_(crate_name, rust_lib_name(crate_name)) .extern_(proc_crate_name, dylib_name.trim()) .arg("-Zunstable-options") @@ -49,7 +49,7 @@ fn main() { .input("src/lib.rs") .crate_name(crate_name) .crate_type("lib") - .output(&out_dir) + .out_dir(&out_dir) .arg("-Zunstable-options") .arg("--with-examples") .arg(&ex_dir) diff --git a/tests/run-make/rustdoc-scrape-examples-remap/scrape.rs b/tests/run-make/rustdoc-scrape-examples-remap/scrape.rs index 4badc68845e96..c4d7814c3c831 100644 --- a/tests/run-make/rustdoc-scrape-examples-remap/scrape.rs +++ b/tests/run-make/rustdoc-scrape-examples-remap/scrape.rs @@ -1,6 +1,6 @@ use std::path::Path; -use run_make_support::{htmldocck, rfs, rustc, rustdoc, source_root}; +use run_make_support::{htmldocck, rfs, rustc, rustdoc}; pub fn scrape(extra_args: &[&str]) { let out_dir = Path::new("rustdoc"); @@ -20,7 +20,7 @@ pub fn scrape(extra_args: &[&str]) { .input(&dep) .crate_name(&dep_stem) .crate_type("bin") - .output(&out_dir) + .out_dir(&out_dir) .extern_(crate_name, format!("lib{crate_name}.rmeta")) .arg("-Zunstable-options") .arg("--scrape-examples-output-path") @@ -35,7 +35,7 @@ pub fn scrape(extra_args: &[&str]) { let mut rustdoc = rustdoc(); rustdoc .input("src/lib.rs") - .output(&out_dir) + .out_dir(&out_dir) .crate_name(crate_name) .crate_type("lib") .arg("-Zunstable-options"); diff --git a/tests/run-make/rustdoc-target-spec-json-path/rmake.rs b/tests/run-make/rustdoc-target-spec-json-path/rmake.rs index 3246fc5650646..fe9587f502228 100644 --- a/tests/run-make/rustdoc-target-spec-json-path/rmake.rs +++ b/tests/run-make/rustdoc-target-spec-json-path/rmake.rs @@ -7,7 +7,7 @@ fn main() { rustc().crate_type("lib").input("dummy_core.rs").target("target.json").run(); rustdoc() .input("my_crate.rs") - .output(out_dir) + .out_dir(out_dir) .library_search_path(cwd()) .target("target.json") .run(); diff --git a/tests/run-make/rustdoc-themes/rmake.rs b/tests/run-make/rustdoc-themes/rmake.rs index 8a961beb9f742..4577e47d47e72 100644 --- a/tests/run-make/rustdoc-themes/rmake.rs +++ b/tests/run-make/rustdoc-themes/rmake.rs @@ -27,6 +27,6 @@ fn main() { rfs::create_dir_all(&out_dir); rfs::write(&test_css, test_content); - rustdoc().output(&out_dir).input("foo.rs").arg("--theme").arg(&test_css).run(); + rustdoc().out_dir(&out_dir).input("foo.rs").arg("--theme").arg(&test_css).run(); htmldocck().arg(out_dir).arg("foo.rs").run(); } diff --git a/tests/run-make/rustdoc-with-out-dir-option/rmake.rs b/tests/run-make/rustdoc-with-out-dir-option/rmake.rs index ded89c9ae7913..a82a1965a9c9d 100644 --- a/tests/run-make/rustdoc-with-out-dir-option/rmake.rs +++ b/tests/run-make/rustdoc-with-out-dir-option/rmake.rs @@ -2,6 +2,6 @@ use run_make_support::{htmldocck, rustdoc}; fn main() { let out_dir = "rustdoc"; - rustdoc().input("src/lib.rs").crate_name("foobar").crate_type("lib").output(&out_dir).run(); + rustdoc().input("src/lib.rs").crate_name("foobar").crate_type("lib").out_dir(&out_dir).run(); htmldocck().arg(out_dir).arg("src/lib.rs").run(); } diff --git a/tests/run-make/sepcomp-cci-copies/rmake.rs b/tests/run-make/sepcomp-cci-copies/rmake.rs index e244f54658073..a66cc2872b434 100644 --- a/tests/run-make/sepcomp-cci-copies/rmake.rs +++ b/tests/run-make/sepcomp-cci-copies/rmake.rs @@ -4,10 +4,7 @@ // created for each source module (see `rustc_const_eval::monomorphize::partitioning`). // See https://github.com/rust-lang/rust/pull/16367 -use run_make_support::{ - count_regex_matches_in_files_with_extension, cwd, has_extension, regex, rfs, rustc, - shallow_find_files, -}; +use run_make_support::{count_regex_matches_in_files_with_extension, regex, rustc}; fn main() { rustc().input("cci_lib.rs").run(); diff --git a/tests/run-make/sepcomp-inlining/rmake.rs b/tests/run-make/sepcomp-inlining/rmake.rs index b7a6bed05b846..ea4a4d210cc3e 100644 --- a/tests/run-make/sepcomp-inlining/rmake.rs +++ b/tests/run-make/sepcomp-inlining/rmake.rs @@ -5,10 +5,7 @@ // in only one compilation unit. // See https://github.com/rust-lang/rust/pull/16367 -use run_make_support::{ - count_regex_matches_in_files_with_extension, cwd, has_extension, regex, rfs, rustc, - shallow_find_files, -}; +use run_make_support::{count_regex_matches_in_files_with_extension, regex, rustc}; fn main() { rustc().input("foo.rs").emit("llvm-ir").codegen_units(3).arg("-Zinline-in-all-cgus").run(); diff --git a/tests/run-make/sepcomp-separate/rmake.rs b/tests/run-make/sepcomp-separate/rmake.rs index 90017572c4c18..49958044a612a 100644 --- a/tests/run-make/sepcomp-separate/rmake.rs +++ b/tests/run-make/sepcomp-separate/rmake.rs @@ -3,10 +3,7 @@ // wind up in three different compilation units. // See https://github.com/rust-lang/rust/pull/16367 -use run_make_support::{ - count_regex_matches_in_files_with_extension, cwd, has_extension, regex, rfs, rustc, - shallow_find_files, -}; +use run_make_support::{count_regex_matches_in_files_with_extension, regex, rustc}; fn main() { rustc().input("foo.rs").emit("llvm-ir").codegen_units(3).run(); diff --git a/tests/run-make/staticlib-thin-archive/bin.rs b/tests/run-make/staticlib-thin-archive/bin.rs new file mode 100644 index 0000000000000..97a2751f20bcc --- /dev/null +++ b/tests/run-make/staticlib-thin-archive/bin.rs @@ -0,0 +1,5 @@ +fn main() { + unsafe { + rust_lib::simple_fn(); + } +} diff --git a/tests/run-make/staticlib-thin-archive/rmake.rs b/tests/run-make/staticlib-thin-archive/rmake.rs new file mode 100644 index 0000000000000..955c50da2011c --- /dev/null +++ b/tests/run-make/staticlib-thin-archive/rmake.rs @@ -0,0 +1,23 @@ +// Regression test for https://github.com/rust-lang/rust/issues/107407 which +// checks that rustc can read thin archive. Before the object crate added thin +// archive support rustc would add emit object files to the staticlib and after +// the object crate added thin archive support it would previously crash the +// compiler due to a missing special case for thin archive members. +use run_make_support::{llvm_ar, path, rfs, rust_lib_name, rustc, static_lib_name}; + +fn main() { + rfs::create_dir("archive"); + + // Build a thin archive + rustc().input("simple_obj.rs").emit("obj").output("archive/simple_obj.o").run(); + llvm_ar() + .obj_to_thin_ar() + .output_input(path("archive").join(static_lib_name("thin_archive")), "archive/simple_obj.o") + .run(); + + // Build an rlib which includes the members of this thin archive + rustc().input("rust_lib.rs").library_search_path("archive").run(); + + // Build a binary which requires a symbol from the thin archive + rustc().input("bin.rs").extern_("rust_lib", rust_lib_name("rust_lib")).run(); +} diff --git a/tests/run-make/staticlib-thin-archive/rust_lib.rs b/tests/run-make/staticlib-thin-archive/rust_lib.rs new file mode 100644 index 0000000000000..c76b0f2543376 --- /dev/null +++ b/tests/run-make/staticlib-thin-archive/rust_lib.rs @@ -0,0 +1,6 @@ +#![crate_type = "rlib"] + +#[link(name = "thin_archive", kind = "static")] +extern "C" { + pub fn simple_fn(); +} diff --git a/tests/run-make/staticlib-thin-archive/simple_obj.rs b/tests/run-make/staticlib-thin-archive/simple_obj.rs new file mode 100644 index 0000000000000..a120c9b3e676b --- /dev/null +++ b/tests/run-make/staticlib-thin-archive/simple_obj.rs @@ -0,0 +1,4 @@ +#![crate_type = "staticlib"] + +#[no_mangle] +extern "C" fn simple_fn() {} diff --git a/tests/run-make/sysroot-crates-are-unstable/Makefile b/tests/run-make/sysroot-crates-are-unstable/Makefile deleted file mode 100644 index 1e267fb9576ba..0000000000000 --- a/tests/run-make/sysroot-crates-are-unstable/Makefile +++ /dev/null @@ -1,2 +0,0 @@ -all: - '$(PYTHON)' test.py diff --git a/tests/run-make/sysroot-crates-are-unstable/rmake.rs b/tests/run-make/sysroot-crates-are-unstable/rmake.rs new file mode 100644 index 0000000000000..2240d87237b1a --- /dev/null +++ b/tests/run-make/sysroot-crates-are-unstable/rmake.rs @@ -0,0 +1,102 @@ +// Check that crates in the sysroot are treated as unstable, unless they are +// on a list of known-stable sysroot crates. + +use std::path::{Path, PathBuf}; +use std::str; + +use run_make_support::{rfs, rustc, target}; + +fn is_stable_crate(name: &str) -> bool { + matches!(name, "std" | "alloc" | "core" | "proc_macro") +} + +fn main() { + for cr in get_unstable_sysroot_crates() { + check_crate_is_unstable(&cr); + } + println!("Done"); +} + +#[derive(Debug)] +struct Crate { + name: String, + path: PathBuf, +} + +fn check_crate_is_unstable(cr: &Crate) { + let Crate { name, path } = cr; + + print!("- Verifying that sysroot crate '{name}' is an unstable crate ..."); + + // Trying to use this crate from a user program should fail. + let output = rustc() + .crate_type("rlib") + .target(target()) + .extern_(name, path) + .input("-") + .stdin(format!("extern crate {name};")) + .run_fail(); + + // Make sure it failed for the intended reason, not some other reason. + // (The actual feature required varies between crates.) + output.assert_stderr_contains("use of unstable library feature"); + + println!(" OK"); +} + +fn get_unstable_sysroot_crates() -> Vec { + let sysroot = PathBuf::from(rustc().print("sysroot").run().stdout_utf8().trim()); + let sysroot_libs_dir = sysroot.join("lib").join("rustlib").join(target()).join("lib"); + println!("Sysroot libs dir: {sysroot_libs_dir:?}"); + + // Generate a list of all library crates in the sysroot. + let sysroot_crates = get_all_crates_in_dir(&sysroot_libs_dir); + println!( + "Found {} sysroot crates: {:?}", + sysroot_crates.len(), + sysroot_crates.iter().map(|cr| &cr.name).collect::>() + ); + + // Self-check: If we didn't find `core`, we probably checked the wrong directory. + assert!( + sysroot_crates.iter().any(|cr| cr.name == "core"), + "Couldn't find `core` in {sysroot_libs_dir:?}" + ); + + let unstable_sysroot_crates = + sysroot_crates.into_iter().filter(|cr| !is_stable_crate(&cr.name)).collect::>(); + // Self-check: There should be at least one unstable crate in the directory. + assert!( + !unstable_sysroot_crates.is_empty(), + "Couldn't find any unstable crates in {sysroot_libs_dir:?}" + ); + unstable_sysroot_crates +} + +fn get_all_crates_in_dir(libs_dir: &Path) -> Vec { + let mut libs = vec![]; + rfs::read_dir_entries(libs_dir, |path| { + if !path.is_file() { + return; + } + if let Some(name) = crate_name_from_path(path) { + libs.push(Crate { name, path: path.to_owned() }); + } + }); + libs.sort_by(|a, b| a.name.cmp(&b.name)); + libs +} + +/// Treat a file as a crate if its name begins with `lib` and ends with `.rlib`. +/// The crate name is the part before the first hyphen (if any). +fn crate_name_from_path(path: &Path) -> Option { + let name = path + .file_name()? + .to_str()? + .strip_prefix("lib")? + .strip_suffix(".rlib")? + .split('-') + .next() + .expect("split always yields at least one string"); + Some(name.to_owned()) +} diff --git a/tests/run-make/sysroot-crates-are-unstable/test.py b/tests/run-make/sysroot-crates-are-unstable/test.py deleted file mode 100644 index 45cfdd195b4e2..0000000000000 --- a/tests/run-make/sysroot-crates-are-unstable/test.py +++ /dev/null @@ -1,75 +0,0 @@ -import sys -import os -from os import listdir -from os.path import isfile, join -from subprocess import PIPE, Popen - - -# This is n list of files which are stable crates or simply are not crates, -# we don't check for the instability of these crates as they're all stable! -STABLE_CRATES = ['std', 'alloc', 'core', 'proc_macro', - 'rsbegin.o', 'rsend.o', 'dllcrt2.o', 'crt2.o', 'clang_rt'] - - -def convert_to_string(s): - if s.__class__.__name__ == 'bytes': - return s.decode('utf-8') - return s - - -def set_ld_lib_path(): - var = os.environ.get("LD_LIB_PATH_ENVVAR") - rpath = os.environ.get("HOST_RPATH_DIR") - if var and rpath: - path = os.environ.get(var) - if path: - os.environ[var] = rpath + os.pathsep + path - else: - os.environ[var] = rpath - - -def exec_command(command, to_input=None): - child = None - if to_input is None: - child = Popen(command, stdout=PIPE, stderr=PIPE) - else: - child = Popen(command, stdout=PIPE, stderr=PIPE, stdin=PIPE) - stdout, stderr = child.communicate(input=to_input) - return (convert_to_string(stdout), convert_to_string(stderr)) - - -def check_lib(lib): - if lib['name'] in STABLE_CRATES: - return True - print('verifying if {} is an unstable crate'.format(lib['name'])) - stdout, stderr = exec_command([os.environ['RUSTC'], '-', '--crate-type', 'rlib', - '--target', os.environ['TARGET'], - '--extern', '{}={}'.format(lib['name'], lib['path'])], - to_input=('extern crate {};'.format(lib['name'])).encode('utf-8')) - if 'use of unstable library feature' not in '{}{}'.format(stdout, stderr): - print('crate {} "{}" is not unstable'.format(lib['name'], lib['path'])) - print('{}{}'.format(stdout, stderr)) - print('') - return False - return True - -# Generate a list of all crates in the sysroot. To do this we list all files in -# rustc's sysroot, look at the filename, strip everything after the `-`, and -# strip the leading `lib` (if present) -def get_all_libs(dir_path): - return [{ 'path': join(dir_path, f), 'name': f[3:].split('-')[0] } - for f in listdir(dir_path) - if isfile(join(dir_path, f)) and f.endswith('.rlib') and f not in STABLE_CRATES] - - -set_ld_lib_path() -sysroot = exec_command([os.environ['RUSTC'], '--print', 'sysroot'])[0].replace('\n', '') -assert sysroot, "Could not read the rustc sysroot!" -libs = get_all_libs(join(sysroot, 'lib/rustlib/{}/lib'.format(os.environ['TARGET']))) - -ret = 0 -for lib in libs: - if not check_lib(lib): - # We continue so users can see all the not unstable crates. - ret = 1 -sys.exit(ret) diff --git a/tests/rustdoc-gui/code-example-buttons.goml b/tests/rustdoc-gui/code-example-buttons.goml index 57ea2970072e6..4f037ec79f5e2 100644 --- a/tests/rustdoc-gui/code-example-buttons.goml +++ b/tests/rustdoc-gui/code-example-buttons.goml @@ -1,5 +1,6 @@ // This test ensures that code blocks buttons are displayed on hover and when you click on them. go-to: "file://" + |DOC_PATH| + "/test_docs/fn.foo.html" +include: "utils.goml" // First we check we "hover". move-cursor-to: ".example-wrap" @@ -19,3 +20,77 @@ click: ".example-wrap" move-cursor-to: ".search-input" assert-count: (".example-wrap:not(:hover) .button-holder.keep-visible", 0) assert-css: (".example-wrap .copy-button", { "visibility": "hidden" }) + +// Clicking on the "copy code" button shouldn't make the buttons stick. +click: ".example-wrap .copy-button" +move-cursor-to: ".search-input" +assert-count: (".example-wrap:not(:hover) .button-holder.keep-visible", 0) +assert-css: (".example-wrap .copy-button", { "visibility": "hidden" }) + +define-function: ( + "check-buttons", + [theme, background, filter, filter_hover], + block { + call-function: ("switch-theme", {"theme": |theme|}) + + assert-css: (".example-wrap .test-arrow", {"visibility": "hidden"}) + assert-css: (".example-wrap .copy-button", {"visibility": "hidden"}) + + move-cursor-to: ".example-wrap" + assert-css: (".example-wrap .test-arrow", { + "visibility": "visible", + "background-color": |background|, + "border-radius": "2px", + }) + assert-css: (".example-wrap .test-arrow::before", { + "filter": |filter|, + }) + assert-css: (".example-wrap .copy-button", { + "visibility": "visible", + "background-color": |background|, + "border-radius": "2px", + }) + assert-css: (".example-wrap .copy-button::before", { + "filter": |filter|, + }) + + move-cursor-to: ".example-wrap .test-arrow" + assert-css: (".example-wrap .test-arrow:hover", { + "visibility": "visible", + "background-color": |background|, + "border-radius": "2px", + }) + assert-css: (".example-wrap .test-arrow:hover::before", { + "filter": |filter_hover|, + }) + + move-cursor-to: ".example-wrap .copy-button" + assert-css: (".example-wrap .copy-button:hover", { + "visibility": "visible", + "background-color": |background|, + "border-radius": "2px", + }) + assert-css: (".example-wrap .copy-button:hover::before", { + "filter": |filter_hover|, + }) + }, +) + +call-function: ("check-buttons",{ + "theme": "ayu", + "background": "#0f1419", + "filter": "invert(0.7)", + "filter_hover": "invert(1)", +}) +call-function: ("check-buttons",{ + "theme": "dark", + "background": "#353535", + "filter": "invert(0.5)", + "filter_hover": "invert(0.65)", +}) +call-function: ("check-buttons",{ + "theme": "light", + "background": "#fff", + "filter": "invert(0.5)", + "filter_hover": "invert(0.35)", +}) diff --git a/tests/rustdoc-gui/copy-code.goml b/tests/rustdoc-gui/copy-code.goml index 72a5bece1758c..9cc717bc67a3d 100644 --- a/tests/rustdoc-gui/copy-code.goml +++ b/tests/rustdoc-gui/copy-code.goml @@ -24,11 +24,11 @@ define-function: ( ) call-function: ("check-copy-button", {}) -// Checking that the run button and the copy button have the same height. +// Checking that the run button and the copy button have the same height and same width. compare-elements-size: ( ".example-wrap:nth-of-type(1) .test-arrow", ".example-wrap:nth-of-type(1) .copy-button", - ["height"], + ["height", "width"], ) // ... and the same y position. compare-elements-position: ( diff --git a/tests/rustdoc-gui/run-on-hover.goml b/tests/rustdoc-gui/run-on-hover.goml deleted file mode 100644 index b62da79b780e9..0000000000000 --- a/tests/rustdoc-gui/run-on-hover.goml +++ /dev/null @@ -1,54 +0,0 @@ -// Example code blocks sometimes have a "Run" button to run them on the -// Playground. That button is hidden until the user hovers over the code block. -// This test checks that it is hidden, and that it shows on hover. It also -// checks for its color. -include: "utils.goml" -go-to: "file://" + |DOC_PATH| + "/test_docs/fn.foo.html" -show-text: true - -define-function: ( - "check-run-button", - [theme, color, background, hover_color, hover_background], - block { - call-function: ("switch-theme", {"theme": |theme|}) - assert-css: (".test-arrow", {"visibility": "hidden"}) - move-cursor-to: ".example-wrap" - assert-css: (".test-arrow", { - "visibility": "visible", - "color": |color|, - "background-color": |background|, - "font-size": "16px", - "border-radius": "2px", - }) - move-cursor-to: ".test-arrow" - assert-css: (".test-arrow:hover", { - "visibility": "visible", - "color": |hover_color|, - "background-color": |hover_background|, - "font-size": "16px", - "border-radius": "2px", - }) - }, -) - -call-function: ("check-run-button", { - "theme": "ayu", - "color": "#788797", - "background": "rgba(57, 175, 215, 0.09)", - "hover_color": "#c5c5c5", - "hover_background": "rgba(57, 175, 215, 0.37)", -}) -call-function: ("check-run-button", { - "theme": "dark", - "color": "#dedede", - "background": "rgba(78, 139, 202, 0.2)", - "hover_color": "#dedede", - "hover_background": "rgb(78, 139, 202)", -}) -call-function: ("check-run-button", { - "theme": "light", - "color": "#f5f5f5", - "background": "rgba(78, 139, 202, 0.2)", - "hover_color": "#f5f5f5", - "hover_background": "rgb(78, 139, 202)", -}) diff --git a/tests/rustdoc-ui/2024-doctests-checks.rs b/tests/rustdoc-ui/2024-doctests-checks.rs new file mode 100644 index 0000000000000..464cf5b200df0 --- /dev/null +++ b/tests/rustdoc-ui/2024-doctests-checks.rs @@ -0,0 +1,27 @@ +//@ check-pass +//@ compile-flags: --test --test-args=--test-threads=1 -Zunstable-options --edition 2024 +//@ normalize-stdout-test: "tests/rustdoc-ui" -> "$$DIR" +//@ normalize-stdout-test: "finished in \d+\.\d+s" -> "finished in $$TIME" +//@ normalize-stdout-test: ".rs:\d+:\d+" -> ".rs:$$LINE:$$COL" + +/// ``` +/// let x = 12; +/// ``` +/// +/// This one should not be a merged doctest (because of `$crate`). The output +/// will confirm it by displaying both merged and standalone doctest passes. +/// +/// ``` +/// macro_rules! bla { +/// () => {{ +/// $crate::foo(); +/// }} +/// } +/// +/// fn foo() {} +/// +/// fn main() { +/// bla!(); +/// } +/// ``` +pub struct Foo; diff --git a/tests/rustdoc-ui/2024-doctests-checks.stdout b/tests/rustdoc-ui/2024-doctests-checks.stdout new file mode 100644 index 0000000000000..d1064084a8564 --- /dev/null +++ b/tests/rustdoc-ui/2024-doctests-checks.stdout @@ -0,0 +1,12 @@ + +running 1 test +test $DIR/2024-doctests-checks.rs - Foo (line 7) ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + + +running 1 test +test $DIR/2024-doctests-checks.rs - Foo (line 14) ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/tests/rustdoc-ui/2024-doctests-crate-attribute.rs b/tests/rustdoc-ui/2024-doctests-crate-attribute.rs new file mode 100644 index 0000000000000..4984fdfe1949a --- /dev/null +++ b/tests/rustdoc-ui/2024-doctests-crate-attribute.rs @@ -0,0 +1,22 @@ +//@ check-pass +//@ compile-flags: --test --test-args=--test-threads=1 -Zunstable-options --edition 2024 +//@ normalize-stdout-test: "tests/rustdoc-ui" -> "$$DIR" +//@ normalize-stdout-test: "finished in \d+\.\d+s" -> "finished in $$TIME" +//@ normalize-stdout-test: ".rs:\d+:\d+" -> ".rs:$$LINE:$$COL" + +/// This doctest is used to ensure that if a crate attribute is present, +/// it will not be part of the merged doctests. +/// +/// ``` +/// #![doc(html_playground_url = "foo")] +/// +/// pub struct Bar; +/// ``` +/// +/// This one will allow us to confirm that the doctest above will be a +/// standalone one (there will be two separate doctests passes). +/// +/// ``` +/// let x = 12; +/// ``` +pub struct Foo; diff --git a/tests/rustdoc-ui/2024-doctests-crate-attribute.stdout b/tests/rustdoc-ui/2024-doctests-crate-attribute.stdout new file mode 100644 index 0000000000000..29702ce8929bd --- /dev/null +++ b/tests/rustdoc-ui/2024-doctests-crate-attribute.stdout @@ -0,0 +1,12 @@ + +running 1 test +test $DIR/2024-doctests-crate-attribute.rs - Foo (line 19) ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + + +running 1 test +test $DIR/2024-doctests-crate-attribute.rs - Foo (line 10) ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/tests/rustdoc-ui/doctest/failed-doctest-should-panic-2021.rs b/tests/rustdoc-ui/doctest/failed-doctest-should-panic-2021.rs new file mode 100644 index 0000000000000..4fe513b406691 --- /dev/null +++ b/tests/rustdoc-ui/doctest/failed-doctest-should-panic-2021.rs @@ -0,0 +1,12 @@ +// FIXME: if/when the output of the test harness can be tested on its own, this test should be +// adapted to use that, and that normalize line can go away + +//@ compile-flags:--test --edition 2021 +//@ normalize-stdout-test: "tests/rustdoc-ui/doctest" -> "$$DIR" +//@ normalize-stdout-test: "finished in \d+\.\d+s" -> "finished in $$TIME" +//@ failure-status: 101 + +/// ```should_panic +/// println!("Hello, world!"); +/// ``` +pub struct Foo; diff --git a/tests/rustdoc-ui/doctest/failed-doctest-should-panic-2021.stdout b/tests/rustdoc-ui/doctest/failed-doctest-should-panic-2021.stdout new file mode 100644 index 0000000000000..63d987de8a9fa --- /dev/null +++ b/tests/rustdoc-ui/doctest/failed-doctest-should-panic-2021.stdout @@ -0,0 +1,14 @@ + +running 1 test +test $DIR/failed-doctest-should-panic-2021.rs - Foo (line 9) ... FAILED + +failures: + +---- $DIR/failed-doctest-should-panic-2021.rs - Foo (line 9) stdout ---- +Test executable succeeded, but it's marked `should_panic`. + +failures: + $DIR/failed-doctest-should-panic-2021.rs - Foo (line 9) + +test result: FAILED. 0 passed; 1 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/tests/rustdoc-ui/doctest/failed-doctest-should-panic.rs b/tests/rustdoc-ui/doctest/failed-doctest-should-panic.rs index b24687993e5dc..4018e37105ff1 100644 --- a/tests/rustdoc-ui/doctest/failed-doctest-should-panic.rs +++ b/tests/rustdoc-ui/doctest/failed-doctest-should-panic.rs @@ -1,7 +1,7 @@ // FIXME: if/when the output of the test harness can be tested on its own, this test should be // adapted to use that, and that normalize line can go away -//@ compile-flags:--test +//@ compile-flags:--test -Z unstable-options --edition 2024 //@ normalize-stdout-test: "tests/rustdoc-ui/doctest" -> "$$DIR" //@ normalize-stdout-test: "finished in \d+\.\d+s" -> "finished in $$TIME" //@ failure-status: 101 diff --git a/tests/rustdoc-ui/doctest/failed-doctest-should-panic.stdout b/tests/rustdoc-ui/doctest/failed-doctest-should-panic.stdout index 57a20092a5d6c..cb3456e087ebe 100644 --- a/tests/rustdoc-ui/doctest/failed-doctest-should-panic.stdout +++ b/tests/rustdoc-ui/doctest/failed-doctest-should-panic.stdout @@ -1,11 +1,11 @@ running 1 test -test $DIR/failed-doctest-should-panic.rs - Foo (line 9) ... FAILED +test $DIR/failed-doctest-should-panic.rs - Foo (line 9) - should panic ... FAILED failures: ---- $DIR/failed-doctest-should-panic.rs - Foo (line 9) stdout ---- -Test executable succeeded, but it's marked `should_panic`. +note: test did not panic as expected failures: $DIR/failed-doctest-should-panic.rs - Foo (line 9) diff --git a/tests/rustdoc-ui/doctest/merged-ignore-no_run.rs b/tests/rustdoc-ui/doctest/merged-ignore-no_run.rs new file mode 100644 index 0000000000000..4c21d54295106 --- /dev/null +++ b/tests/rustdoc-ui/doctest/merged-ignore-no_run.rs @@ -0,0 +1,14 @@ +//@ compile-flags:--test --test-args=--test-threads=1 -Zunstable-options --edition 2024 +//@ normalize-stdout-test: "tests/rustdoc-ui/doctest" -> "$$DIR" +//@ normalize-stdout-test: "finished in \d+\.\d+s" -> "finished in $$TIME" +//@ check-pass + +/// ```ignore (test) +/// let x = 12; +/// ``` +pub fn ignored() {} + +/// ```no_run +/// panic!("blob"); +/// ``` +pub fn no_run() {} diff --git a/tests/rustdoc-ui/doctest/merged-ignore-no_run.stdout b/tests/rustdoc-ui/doctest/merged-ignore-no_run.stdout new file mode 100644 index 0000000000000..f2cb1e7e72f70 --- /dev/null +++ b/tests/rustdoc-ui/doctest/merged-ignore-no_run.stdout @@ -0,0 +1,7 @@ + +running 2 tests +test $DIR/merged-ignore-no_run.rs - ignored (line 6) ... ignored +test $DIR/merged-ignore-no_run.rs - no_run (line 11) - compile ... ok + +test result: ok. 1 passed; 0 failed; 1 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/tests/rustdoc-ui/doctest/wrong-ast-2024.rs b/tests/rustdoc-ui/doctest/wrong-ast-2024.rs new file mode 100644 index 0000000000000..7b4fa8fd2c9b9 --- /dev/null +++ b/tests/rustdoc-ui/doctest/wrong-ast-2024.rs @@ -0,0 +1,20 @@ +//@ compile-flags:--test --test-args=--test-threads=1 -Zunstable-options --edition 2024 +//@ normalize-stdout-test: "tests/rustdoc-ui/doctest" -> "$$DIR" +//@ normalize-stdout-test: "finished in \d+\.\d+s" -> "finished in $$TIME" +//@ normalize-stdout-test: ".rs:\d+:\d+" -> ".rs:$$LINE:$$COL" +//@ failure-status: 101 + +/// ``` +/// /* plop +/// ``` +pub fn one() {} + +/// ``` +/// } mod __doctest_1 { fn main() { +/// ``` +pub fn two() {} + +/// ```should_panic +/// panic!() +/// ``` +pub fn three() {} diff --git a/tests/rustdoc-ui/doctest/wrong-ast-2024.stdout b/tests/rustdoc-ui/doctest/wrong-ast-2024.stdout new file mode 100644 index 0000000000000..22c8ce468fd7c --- /dev/null +++ b/tests/rustdoc-ui/doctest/wrong-ast-2024.stdout @@ -0,0 +1,41 @@ + +running 1 test +test $DIR/wrong-ast-2024.rs - three (line 17) - should panic ... ok + +test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + + +running 2 tests +test $DIR/wrong-ast-2024.rs - one (line 7) ... FAILED +test $DIR/wrong-ast-2024.rs - two (line 12) ... FAILED + +failures: + +---- $DIR/wrong-ast-2024.rs - one (line 7) stdout ---- +error[E0758]: unterminated block comment + --> $DIR/wrong-ast-2024.rs:$LINE:$COL + | +LL | /* plop + | ^^^^^^^ + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0758`. +Couldn't compile the test. +---- $DIR/wrong-ast-2024.rs - two (line 12) stdout ---- +error: unexpected closing delimiter: `}` + --> $DIR/wrong-ast-2024.rs:$LINE:$COL + | +LL | } mod __doctest_1 { fn main() { + | ^ unexpected closing delimiter + +error: aborting due to 1 previous error + +Couldn't compile the test. + +failures: + $DIR/wrong-ast-2024.rs - one (line 7) + $DIR/wrong-ast-2024.rs - two (line 12) + +test result: FAILED. 0 passed; 2 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/tests/rustdoc-ui/doctest/wrong-ast.rs b/tests/rustdoc-ui/doctest/wrong-ast.rs new file mode 100644 index 0000000000000..92286b33dcfb3 --- /dev/null +++ b/tests/rustdoc-ui/doctest/wrong-ast.rs @@ -0,0 +1,19 @@ +//@ compile-flags:--test --test-args=--test-threads=1 +//@ normalize-stdout-test: "tests/rustdoc-ui/doctest" -> "$$DIR" +//@ normalize-stdout-test: "finished in \d+\.\d+s" -> "finished in $$TIME" +//@ failure-status: 101 + +/// ``` +/// /* plop +/// ``` +pub fn one() {} + +/// ``` +/// } mod __doctest_1 { fn main() { +/// ``` +pub fn two() {} + +/// ```should_panic +/// panic!() +/// ``` +pub fn three() {} diff --git a/tests/rustdoc-ui/doctest/wrong-ast.stdout b/tests/rustdoc-ui/doctest/wrong-ast.stdout new file mode 100644 index 0000000000000..15494706c1643 --- /dev/null +++ b/tests/rustdoc-ui/doctest/wrong-ast.stdout @@ -0,0 +1,36 @@ + +running 3 tests +test $DIR/wrong-ast.rs - one (line 6) ... FAILED +test $DIR/wrong-ast.rs - three (line 16) ... ok +test $DIR/wrong-ast.rs - two (line 11) ... FAILED + +failures: + +---- $DIR/wrong-ast.rs - one (line 6) stdout ---- +error[E0758]: unterminated block comment + --> $DIR/wrong-ast.rs:7:1 + | +LL | /* plop + | ^^^^^^^ + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0758`. +Couldn't compile the test. +---- $DIR/wrong-ast.rs - two (line 11) stdout ---- +error: unexpected closing delimiter: `}` + --> $DIR/wrong-ast.rs:12:1 + | +LL | } mod __doctest_1 { fn main() { + | ^ unexpected closing delimiter + +error: aborting due to 1 previous error + +Couldn't compile the test. + +failures: + $DIR/wrong-ast.rs - one (line 6) + $DIR/wrong-ast.rs - two (line 11) + +test result: FAILED. 1 passed; 2 failed; 0 ignored; 0 measured; 0 filtered out; finished in $TIME + diff --git a/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt.rs b/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt.rs index f11b94bb036c6..babdbd0a69216 100644 --- a/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt.rs +++ b/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt.rs @@ -2,5 +2,6 @@ // option can only be used with HTML generation. //@ compile-flags: -Zunstable-options --generate-link-to-definition --output-format json +//@ check-pass pub fn f() {} diff --git a/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt.stderr b/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt.stderr index 4c8c607e7da23..62b0e3ce408b7 100644 --- a/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt.stderr +++ b/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt.stderr @@ -1,2 +1,4 @@ -error: --generate-link-to-definition option can only be used with HTML output format +warning: `--generate-link-to-definition` option can only be used with HTML output format + | + = note: `--generate-link-to-definition` option will be ignored diff --git a/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt2.rs b/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt2.rs deleted file mode 100644 index 718522059799e..0000000000000 --- a/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt2.rs +++ /dev/null @@ -1,6 +0,0 @@ -// This test purpose is to check that the "--generate-link-to-definition" -// option can only be used with HTML generation. - -//@ compile-flags: -Zunstable-options --generate-link-to-definition --show-coverage - -pub fn f() {} diff --git a/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt2.stderr b/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt2.stderr deleted file mode 100644 index 4c8c607e7da23..0000000000000 --- a/tests/rustdoc-ui/generate-link-to-definition/generate-link-to-definition-opt2.stderr +++ /dev/null @@ -1,2 +0,0 @@ -error: --generate-link-to-definition option can only be used with HTML output format - diff --git a/tests/rustdoc/playground-arg.rs b/tests/rustdoc/playground-arg.rs index 5875451a85922..e10a31017efc0 100644 --- a/tests/rustdoc/playground-arg.rs +++ b/tests/rustdoc/playground-arg.rs @@ -10,4 +10,4 @@ pub fn dummy() {} // ensure that `extern crate foo;` was inserted into code snips automatically: -//@ matches foo/index.html '//a[@class="test-arrow"][@href="https://example.com/?code=%23!%5Ballow(unused)%5D%0A%23%5Ballow(unused_extern_crates)%5D%0Aextern+crate+r%23foo;%0Afn+main()+%7B%0A++++use+foo::dummy;%0A++++dummy();%0A%7D&edition=2015"]' "Run" +//@ matches foo/index.html '//a[@class="test-arrow"][@href="https://example.com/?code=%23!%5Ballow(unused)%5D%0A%23%5Ballow(unused_extern_crates)%5D%0Aextern+crate+r%23foo;%0Afn+main()+%7B%0A++++use+foo::dummy;%0A++++dummy();%0A%7D&edition=2015"]' "" diff --git a/tests/rustdoc/playground-syntax-error.rs b/tests/rustdoc/playground-syntax-error.rs index f5067145e028e..d989570bf58ea 100644 --- a/tests/rustdoc/playground-syntax-error.rs +++ b/tests/rustdoc/playground-syntax-error.rs @@ -17,5 +17,5 @@ pub fn bar() {} //@ has foo/fn.bar.html -//@ has - '//a[@class="test-arrow"]' "Run" +//@ has - '//a[@class="test-arrow"]' "" //@ has - '//*[@class="docblock"]' 'foo_recursive' diff --git a/tests/rustdoc/playground.rs b/tests/rustdoc/playground.rs index 7880f7790678b..db2d1669df604 100644 --- a/tests/rustdoc/playground.rs +++ b/tests/rustdoc/playground.rs @@ -22,6 +22,6 @@ //! } //! ``` -//@ matches foo/index.html '//a[@class="test-arrow"][@href="https://www.example.com/?code=%23!%5Ballow(unused)%5D%0Afn+main()+%7B%0A++++println!(%22Hello,+world!%22);%0A%7D&edition=2015"]' "Run" -//@ matches foo/index.html '//a[@class="test-arrow"][@href="https://www.example.com/?code=%23!%5Ballow(unused)%5D%0Afn+main()+%7B%0A++++println!(%22Hello,+world!%22);%0A%7D&edition=2015"]' "Run" -//@ matches foo/index.html '//a[@class="test-arrow"][@href="https://www.example.com/?code=%23!%5Ballow(unused)%5D%0A%23!%5Bfeature(something)%5D%0A%0Afn+main()+%7B%0A++++println!(%22Hello,+world!%22);%0A%7D&version=nightly&edition=2015"]' "Run" +//@ matches foo/index.html '//a[@class="test-arrow"][@href="https://www.example.com/?code=%23!%5Ballow(unused)%5D%0Afn+main()+%7B%0A++++println!(%22Hello,+world!%22);%0A%7D&edition=2015"]' "" +//@ matches foo/index.html '//a[@class="test-arrow"][@href="https://www.example.com/?code=%23!%5Ballow(unused)%5D%0Afn+main()+%7B%0A++++println!(%22Hello,+world!%22);%0A%7D&edition=2015"]' "" +//@ matches foo/index.html '//a[@class="test-arrow"][@href="https://www.example.com/?code=%23!%5Ballow(unused)%5D%0A%23!%5Bfeature(something)%5D%0A%0Afn+main()+%7B%0A++++println!(%22Hello,+world!%22);%0A%7D&version=nightly&edition=2015"]' "" diff --git a/tests/ui/abi/shadow-call-stack-without-fixed-x18.rs b/tests/ui/abi/shadow-call-stack-without-fixed-x18.rs new file mode 100644 index 0000000000000..d758c903087b6 --- /dev/null +++ b/tests/ui/abi/shadow-call-stack-without-fixed-x18.rs @@ -0,0 +1,15 @@ +//@ compile-flags: --target aarch64-unknown-none -Zsanitizer=shadow-call-stack +//@ error-pattern: shadow-call-stack sanitizer is not supported for this target +//@ dont-check-compiler-stderr +//@ needs-llvm-components: aarch64 + +#![allow(internal_features)] +#![crate_type = "rlib"] +#![feature(no_core, lang_items)] +#![no_core] + +#[lang = "sized"] +trait Sized {} + +#[no_mangle] +pub fn foo() {} diff --git a/tests/ui/asm/aarch64/bad-reg.rs b/tests/ui/asm/aarch64/bad-reg.rs index 1e54b6505db87..b99e5fe4b9e33 100644 --- a/tests/ui/asm/aarch64/bad-reg.rs +++ b/tests/ui/asm/aarch64/bad-reg.rs @@ -1,8 +1,6 @@ //@ only-aarch64 //@ compile-flags: -C target-feature=+neon -#![feature(asm_const)] - use std::arch::asm; fn main() { diff --git a/tests/ui/asm/aarch64/bad-reg.stderr b/tests/ui/asm/aarch64/bad-reg.stderr index 717a788caf67d..370752ad0f11c 100644 --- a/tests/ui/asm/aarch64/bad-reg.stderr +++ b/tests/ui/asm/aarch64/bad-reg.stderr @@ -1,17 +1,17 @@ error: invalid register class `foo`: unknown register class - --> $DIR/bad-reg.rs:14:20 + --> $DIR/bad-reg.rs:12:20 | LL | asm!("{}", in(foo) foo); | ^^^^^^^^^^^ error: invalid register `foo`: unknown register - --> $DIR/bad-reg.rs:16:18 + --> $DIR/bad-reg.rs:14:18 | LL | asm!("", in("foo") foo); | ^^^^^^^^^^^^^ error: invalid asm template modifier for this register class - --> $DIR/bad-reg.rs:18:15 + --> $DIR/bad-reg.rs:16:15 | LL | asm!("{:z}", in(reg) foo); | ^^^^ ----------- argument @@ -21,7 +21,7 @@ LL | asm!("{:z}", in(reg) foo); = note: the `reg` register class supports the following template modifiers: `w`, `x` error: invalid asm template modifier for this register class - --> $DIR/bad-reg.rs:20:15 + --> $DIR/bad-reg.rs:18:15 | LL | asm!("{:r}", in(vreg) foo); | ^^^^ ------------ argument @@ -31,7 +31,7 @@ LL | asm!("{:r}", in(vreg) foo); = note: the `vreg` register class supports the following template modifiers: `b`, `h`, `s`, `d`, `q`, `v` error: invalid asm template modifier for this register class - --> $DIR/bad-reg.rs:22:15 + --> $DIR/bad-reg.rs:20:15 | LL | asm!("{:r}", in(vreg_low16) foo); | ^^^^ ------------------ argument @@ -41,7 +41,7 @@ LL | asm!("{:r}", in(vreg_low16) foo); = note: the `vreg_low16` register class supports the following template modifiers: `b`, `h`, `s`, `d`, `q`, `v` error: asm template modifiers are not allowed for `const` arguments - --> $DIR/bad-reg.rs:24:15 + --> $DIR/bad-reg.rs:22:15 | LL | asm!("{:a}", const 0); | ^^^^ ------- argument @@ -49,7 +49,7 @@ LL | asm!("{:a}", const 0); | template modifier error: asm template modifiers are not allowed for `sym` arguments - --> $DIR/bad-reg.rs:26:15 + --> $DIR/bad-reg.rs:24:15 | LL | asm!("{:a}", sym main); | ^^^^ -------- argument @@ -57,49 +57,49 @@ LL | asm!("{:a}", sym main); | template modifier error: invalid register `x29`: the frame pointer cannot be used as an operand for inline asm - --> $DIR/bad-reg.rs:28:18 + --> $DIR/bad-reg.rs:26:18 | LL | asm!("", in("x29") foo); | ^^^^^^^^^^^^^ error: invalid register `sp`: the stack pointer cannot be used as an operand for inline asm - --> $DIR/bad-reg.rs:30:18 + --> $DIR/bad-reg.rs:28:18 | LL | asm!("", in("sp") foo); | ^^^^^^^^^^^^ error: invalid register `xzr`: the zero register cannot be used as an operand for inline asm - --> $DIR/bad-reg.rs:32:18 + --> $DIR/bad-reg.rs:30:18 | LL | asm!("", in("xzr") foo); | ^^^^^^^^^^^^^ error: invalid register `x19`: x19 is used internally by LLVM and cannot be used as an operand for inline asm - --> $DIR/bad-reg.rs:34:18 + --> $DIR/bad-reg.rs:32:18 | LL | asm!("", in("x19") foo); | ^^^^^^^^^^^^^ error: register class `preg` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:37:18 + --> $DIR/bad-reg.rs:35:18 | LL | asm!("", in("p0") foo); | ^^^^^^^^^^^^ error: register class `preg` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:41:20 + --> $DIR/bad-reg.rs:39:20 | LL | asm!("{}", in(preg) foo); | ^^^^^^^^^^^^ error: register class `preg` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:44:20 + --> $DIR/bad-reg.rs:42:20 | LL | asm!("{}", out(preg) _); | ^^^^^^^^^^^ error: register `w0` conflicts with register `x0` - --> $DIR/bad-reg.rs:50:32 + --> $DIR/bad-reg.rs:48:32 | LL | asm!("", in("x0") foo, in("w0") bar); | ------------ ^^^^^^^^^^^^ register `w0` @@ -107,7 +107,7 @@ LL | asm!("", in("x0") foo, in("w0") bar); | register `x0` error: register `x0` conflicts with register `x0` - --> $DIR/bad-reg.rs:52:32 + --> $DIR/bad-reg.rs:50:32 | LL | asm!("", in("x0") foo, out("x0") bar); | ------------ ^^^^^^^^^^^^^ register `x0` @@ -115,13 +115,13 @@ LL | asm!("", in("x0") foo, out("x0") bar); | register `x0` | help: use `lateout` instead of `out` to avoid conflict - --> $DIR/bad-reg.rs:52:18 + --> $DIR/bad-reg.rs:50:18 | LL | asm!("", in("x0") foo, out("x0") bar); | ^^^^^^^^^^^^ error: register `q0` conflicts with register `v0` - --> $DIR/bad-reg.rs:55:32 + --> $DIR/bad-reg.rs:53:32 | LL | asm!("", in("v0") foo, in("q0") bar); | ------------ ^^^^^^^^^^^^ register `q0` @@ -129,7 +129,7 @@ LL | asm!("", in("v0") foo, in("q0") bar); | register `v0` error: register `q0` conflicts with register `v0` - --> $DIR/bad-reg.rs:57:32 + --> $DIR/bad-reg.rs:55:32 | LL | asm!("", in("v0") foo, out("q0") bar); | ------------ ^^^^^^^^^^^^^ register `q0` @@ -137,13 +137,13 @@ LL | asm!("", in("v0") foo, out("q0") bar); | register `v0` | help: use `lateout` instead of `out` to avoid conflict - --> $DIR/bad-reg.rs:57:18 + --> $DIR/bad-reg.rs:55:18 | LL | asm!("", in("v0") foo, out("q0") bar); | ^^^^^^^^^^^^ error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:37:27 + --> $DIR/bad-reg.rs:35:27 | LL | asm!("", in("p0") foo); | ^^^ @@ -151,7 +151,7 @@ LL | asm!("", in("p0") foo); = note: register class `preg` supports these types: error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:41:29 + --> $DIR/bad-reg.rs:39:29 | LL | asm!("{}", in(preg) foo); | ^^^ diff --git a/tests/ui/asm/aarch64/const.rs b/tests/ui/asm/aarch64/const.rs index a1fadb2115ba6..3eab5138d7d79 100644 --- a/tests/ui/asm/aarch64/const.rs +++ b/tests/ui/asm/aarch64/const.rs @@ -2,8 +2,6 @@ //@ run-pass //@ needs-asm-support -#![feature(asm_const)] - use std::arch::{asm, global_asm}; fn const_generic() -> usize { diff --git a/tests/ui/asm/aarch64/parse-error.rs b/tests/ui/asm/aarch64/parse-error.rs index ac73bbf99c9a3..aa731c35dda88 100644 --- a/tests/ui/asm/aarch64/parse-error.rs +++ b/tests/ui/asm/aarch64/parse-error.rs @@ -1,7 +1,5 @@ //@ only-aarch64 -#![feature(asm_const)] - use std::arch::{asm, global_asm}; fn main() { diff --git a/tests/ui/asm/aarch64/parse-error.stderr b/tests/ui/asm/aarch64/parse-error.stderr index e2c798c798ee6..7b273282ee6a3 100644 --- a/tests/ui/asm/aarch64/parse-error.stderr +++ b/tests/ui/asm/aarch64/parse-error.stderr @@ -1,107 +1,107 @@ error: requires at least a template string argument - --> $DIR/parse-error.rs:11:9 + --> $DIR/parse-error.rs:9:9 | LL | asm!(); | ^^^^^^ error: asm template must be a string literal - --> $DIR/parse-error.rs:13:14 + --> $DIR/parse-error.rs:11:14 | LL | asm!(foo); | ^^^ error: expected token: `,` - --> $DIR/parse-error.rs:15:19 + --> $DIR/parse-error.rs:13:19 | LL | asm!("{}" foo); | ^^^ expected `,` error: expected operand, clobber_abi, options, or additional template string - --> $DIR/parse-error.rs:17:20 + --> $DIR/parse-error.rs:15:20 | LL | asm!("{}", foo); | ^^^ expected operand, clobber_abi, options, or additional template string error: expected `(`, found `foo` - --> $DIR/parse-error.rs:19:23 + --> $DIR/parse-error.rs:17:23 | LL | asm!("{}", in foo); | ^^^ expected `(` error: expected `)`, found `foo` - --> $DIR/parse-error.rs:21:27 + --> $DIR/parse-error.rs:19:27 | LL | asm!("{}", in(reg foo)); | ^^^ expected `)` error: expected expression, found end of macro arguments - --> $DIR/parse-error.rs:23:27 + --> $DIR/parse-error.rs:21:27 | LL | asm!("{}", in(reg)); | ^ expected expression error: expected register class or explicit register - --> $DIR/parse-error.rs:25:26 + --> $DIR/parse-error.rs:23:26 | LL | asm!("{}", inout(=) foo => bar); | ^ error: expected expression, found end of macro arguments - --> $DIR/parse-error.rs:27:37 + --> $DIR/parse-error.rs:25:37 | LL | asm!("{}", inout(reg) foo =>); | ^ expected expression error: expected one of `!`, `,`, `.`, `::`, `?`, `{`, or an operator, found `=>` - --> $DIR/parse-error.rs:29:32 + --> $DIR/parse-error.rs:27:32 | LL | asm!("{}", in(reg) foo => bar); | ^^ expected one of 7 possible tokens error: expected a path for argument to `sym` - --> $DIR/parse-error.rs:31:24 + --> $DIR/parse-error.rs:29:24 | LL | asm!("{}", sym foo + bar); | ^^^^^^^^^ error: expected one of `)`, `att_syntax`, `may_unwind`, `nomem`, `noreturn`, `nostack`, `preserves_flags`, `pure`, `raw`, or `readonly`, found `foo` - --> $DIR/parse-error.rs:33:26 + --> $DIR/parse-error.rs:31:26 | LL | asm!("", options(foo)); | ^^^ expected one of 10 possible tokens error: expected one of `)` or `,`, found `foo` - --> $DIR/parse-error.rs:35:32 + --> $DIR/parse-error.rs:33:32 | LL | asm!("", options(nomem foo)); | ^^^ expected one of `)` or `,` error: expected one of `)`, `att_syntax`, `may_unwind`, `nomem`, `noreturn`, `nostack`, `preserves_flags`, `pure`, `raw`, or `readonly`, found `foo` - --> $DIR/parse-error.rs:37:33 + --> $DIR/parse-error.rs:35:33 | LL | asm!("", options(nomem, foo)); | ^^^ expected one of 10 possible tokens error: expected string literal - --> $DIR/parse-error.rs:41:30 + --> $DIR/parse-error.rs:39:30 | LL | asm!("", clobber_abi(foo)); | ^^^ not a string literal error: expected one of `)` or `,`, found `foo` - --> $DIR/parse-error.rs:43:34 + --> $DIR/parse-error.rs:41:34 | LL | asm!("", clobber_abi("C" foo)); | ^^^ expected one of `)` or `,` error: expected string literal - --> $DIR/parse-error.rs:45:35 + --> $DIR/parse-error.rs:43:35 | LL | asm!("", clobber_abi("C", foo)); | ^^^ not a string literal error: duplicate argument named `a` - --> $DIR/parse-error.rs:52:36 + --> $DIR/parse-error.rs:50:36 | LL | asm!("{a}", a = const foo, a = const bar); | ------------- ^^^^^^^^^^^^^ duplicate argument @@ -109,7 +109,7 @@ LL | asm!("{a}", a = const foo, a = const bar); | previously here error: argument never used - --> $DIR/parse-error.rs:52:36 + --> $DIR/parse-error.rs:50:36 | LL | asm!("{a}", a = const foo, a = const bar); | ^^^^^^^^^^^^^ argument never used @@ -117,13 +117,13 @@ LL | asm!("{a}", a = const foo, a = const bar); = help: if this argument is intentionally unused, consider using it in an asm comment: `"/* {1} */"` error: explicit register arguments cannot have names - --> $DIR/parse-error.rs:57:18 + --> $DIR/parse-error.rs:55:18 | LL | asm!("", a = in("x0") foo); | ^^^^^^^^^^^^^^^^ error: positional arguments cannot follow named arguments or explicit register arguments - --> $DIR/parse-error.rs:63:35 + --> $DIR/parse-error.rs:61:35 | LL | asm!("{1}", in("x0") foo, const bar); | ------------ ^^^^^^^^^ positional argument @@ -131,19 +131,19 @@ LL | asm!("{1}", in("x0") foo, const bar); | explicit register argument error: expected one of `clobber_abi`, `const`, `in`, `inlateout`, `inout`, `label`, `lateout`, `options`, `out`, or `sym`, found `""` - --> $DIR/parse-error.rs:66:29 + --> $DIR/parse-error.rs:64:29 | LL | asm!("", options(), ""); | ^^ expected one of 10 possible tokens error: expected one of `clobber_abi`, `const`, `in`, `inlateout`, `inout`, `label`, `lateout`, `options`, `out`, or `sym`, found `"{}"` - --> $DIR/parse-error.rs:68:33 + --> $DIR/parse-error.rs:66:33 | LL | asm!("{}", in(reg) foo, "{}", out(reg) foo); | ^^^^ expected one of 10 possible tokens error: asm template must be a string literal - --> $DIR/parse-error.rs:70:14 + --> $DIR/parse-error.rs:68:14 | LL | asm!(format!("{{{}}}", 0), in(reg) foo); | ^^^^^^^^^^^^^^^^^^^^ @@ -151,7 +151,7 @@ LL | asm!(format!("{{{}}}", 0), in(reg) foo); = note: this error originates in the macro `format` (in Nightly builds, run with -Z macro-backtrace for more info) error: asm template must be a string literal - --> $DIR/parse-error.rs:72:21 + --> $DIR/parse-error.rs:70:21 | LL | asm!("{1}", format!("{{{}}}", 0), in(reg) foo, out(reg) bar); | ^^^^^^^^^^^^^^^^^^^^ @@ -159,127 +159,127 @@ LL | asm!("{1}", format!("{{{}}}", 0), in(reg) foo, out(reg) bar); = note: this error originates in the macro `format` (in Nightly builds, run with -Z macro-backtrace for more info) error: _ cannot be used for input operands - --> $DIR/parse-error.rs:74:28 + --> $DIR/parse-error.rs:72:28 | LL | asm!("{}", in(reg) _); | ^ error: _ cannot be used for input operands - --> $DIR/parse-error.rs:76:31 + --> $DIR/parse-error.rs:74:31 | LL | asm!("{}", inout(reg) _); | ^ error: _ cannot be used for input operands - --> $DIR/parse-error.rs:78:35 + --> $DIR/parse-error.rs:76:35 | LL | asm!("{}", inlateout(reg) _); | ^ error: requires at least a template string argument - --> $DIR/parse-error.rs:85:1 + --> $DIR/parse-error.rs:83:1 | LL | global_asm!(); | ^^^^^^^^^^^^^ error: asm template must be a string literal - --> $DIR/parse-error.rs:87:13 + --> $DIR/parse-error.rs:85:13 | LL | global_asm!(FOO); | ^^^ error: expected token: `,` - --> $DIR/parse-error.rs:89:18 + --> $DIR/parse-error.rs:87:18 | LL | global_asm!("{}" FOO); | ^^^ expected `,` error: expected operand, options, or additional template string - --> $DIR/parse-error.rs:91:19 + --> $DIR/parse-error.rs:89:19 | LL | global_asm!("{}", FOO); | ^^^ expected operand, options, or additional template string error: expected expression, found end of macro arguments - --> $DIR/parse-error.rs:93:24 + --> $DIR/parse-error.rs:91:24 | LL | global_asm!("{}", const); | ^ expected expression error: expected one of `,`, `.`, `?`, or an operator, found `FOO` - --> $DIR/parse-error.rs:95:30 + --> $DIR/parse-error.rs:93:30 | LL | global_asm!("{}", const(reg) FOO); | ^^^ expected one of `,`, `.`, `?`, or an operator error: expected one of `)`, `att_syntax`, or `raw`, found `FOO` - --> $DIR/parse-error.rs:97:25 + --> $DIR/parse-error.rs:95:25 | LL | global_asm!("", options(FOO)); | ^^^ expected one of `)`, `att_syntax`, or `raw` error: the `nomem` option cannot be used with `global_asm!` - --> $DIR/parse-error.rs:99:25 + --> $DIR/parse-error.rs:97:25 | LL | global_asm!("", options(nomem FOO)); | ^^^^^ the `nomem` option is not meaningful for global-scoped inline assembly error: expected one of `)` or `,`, found `FOO` - --> $DIR/parse-error.rs:99:31 + --> $DIR/parse-error.rs:97:31 | LL | global_asm!("", options(nomem FOO)); | ^^^ expected one of `)` or `,` error: the `nomem` option cannot be used with `global_asm!` - --> $DIR/parse-error.rs:102:25 + --> $DIR/parse-error.rs:100:25 | LL | global_asm!("", options(nomem, FOO)); | ^^^^^ the `nomem` option is not meaningful for global-scoped inline assembly error: expected one of `)`, `att_syntax`, or `raw`, found `FOO` - --> $DIR/parse-error.rs:102:32 + --> $DIR/parse-error.rs:100:32 | LL | global_asm!("", options(nomem, FOO)); | ^^^ expected one of `)`, `att_syntax`, or `raw` error: expected string literal - --> $DIR/parse-error.rs:106:29 + --> $DIR/parse-error.rs:104:29 | LL | global_asm!("", clobber_abi(FOO)); | ^^^ not a string literal error: expected one of `)` or `,`, found `FOO` - --> $DIR/parse-error.rs:108:33 + --> $DIR/parse-error.rs:106:33 | LL | global_asm!("", clobber_abi("C" FOO)); | ^^^ expected one of `)` or `,` error: expected string literal - --> $DIR/parse-error.rs:110:34 + --> $DIR/parse-error.rs:108:34 | LL | global_asm!("", clobber_abi("C", FOO)); | ^^^ not a string literal error: `clobber_abi` cannot be used with `global_asm!` - --> $DIR/parse-error.rs:112:19 + --> $DIR/parse-error.rs:110:19 | LL | global_asm!("{}", clobber_abi("C"), const FOO); | ^^^^^^^^^^^^^^^^ error: `clobber_abi` cannot be used with `global_asm!` - --> $DIR/parse-error.rs:114:28 + --> $DIR/parse-error.rs:112:28 | LL | global_asm!("", options(), clobber_abi("C")); | ^^^^^^^^^^^^^^^^ error: `clobber_abi` cannot be used with `global_asm!` - --> $DIR/parse-error.rs:116:30 + --> $DIR/parse-error.rs:114:30 | LL | global_asm!("{}", options(), clobber_abi("C"), const FOO); | ^^^^^^^^^^^^^^^^ error: duplicate argument named `a` - --> $DIR/parse-error.rs:118:35 + --> $DIR/parse-error.rs:116:35 | LL | global_asm!("{a}", a = const FOO, a = const BAR); | ------------- ^^^^^^^^^^^^^ duplicate argument @@ -287,7 +287,7 @@ LL | global_asm!("{a}", a = const FOO, a = const BAR); | previously here error: argument never used - --> $DIR/parse-error.rs:118:35 + --> $DIR/parse-error.rs:116:35 | LL | global_asm!("{a}", a = const FOO, a = const BAR); | ^^^^^^^^^^^^^ argument never used @@ -295,19 +295,19 @@ LL | global_asm!("{a}", a = const FOO, a = const BAR); = help: if this argument is intentionally unused, consider using it in an asm comment: `"/* {1} */"` error: expected one of `clobber_abi`, `const`, `options`, or `sym`, found `""` - --> $DIR/parse-error.rs:121:28 + --> $DIR/parse-error.rs:119:28 | LL | global_asm!("", options(), ""); | ^^ expected one of `clobber_abi`, `const`, `options`, or `sym` error: expected one of `clobber_abi`, `const`, `options`, or `sym`, found `"{}"` - --> $DIR/parse-error.rs:123:30 + --> $DIR/parse-error.rs:121:30 | LL | global_asm!("{}", const FOO, "{}", const FOO); | ^^^^ expected one of `clobber_abi`, `const`, `options`, or `sym` error: asm template must be a string literal - --> $DIR/parse-error.rs:125:13 + --> $DIR/parse-error.rs:123:13 | LL | global_asm!(format!("{{{}}}", 0), const FOO); | ^^^^^^^^^^^^^^^^^^^^ @@ -315,7 +315,7 @@ LL | global_asm!(format!("{{{}}}", 0), const FOO); = note: this error originates in the macro `format` (in Nightly builds, run with -Z macro-backtrace for more info) error: asm template must be a string literal - --> $DIR/parse-error.rs:127:20 + --> $DIR/parse-error.rs:125:20 | LL | global_asm!("{1}", format!("{{{}}}", 0), const FOO, const BAR); | ^^^^^^^^^^^^^^^^^^^^ @@ -323,7 +323,7 @@ LL | global_asm!("{1}", format!("{{{}}}", 0), const FOO, const BAR); = note: this error originates in the macro `format` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:39:37 + --> $DIR/parse-error.rs:37:37 | LL | asm!("{}", options(), const foo); | ^^^ non-constant value @@ -334,7 +334,7 @@ LL | const foo: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:47:44 + --> $DIR/parse-error.rs:45:44 | LL | asm!("{}", clobber_abi("C"), const foo); | ^^^ non-constant value @@ -345,7 +345,7 @@ LL | const foo: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:50:55 + --> $DIR/parse-error.rs:48:55 | LL | asm!("{}", options(), clobber_abi("C"), const foo); | ^^^ non-constant value @@ -356,7 +356,7 @@ LL | const foo: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:52:31 + --> $DIR/parse-error.rs:50:31 | LL | asm!("{a}", a = const foo, a = const bar); | ^^^ non-constant value @@ -367,7 +367,7 @@ LL | const foo: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:52:46 + --> $DIR/parse-error.rs:50:46 | LL | asm!("{a}", a = const foo, a = const bar); | ^^^ non-constant value @@ -378,7 +378,7 @@ LL | const bar: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:59:45 + --> $DIR/parse-error.rs:57:45 | LL | asm!("{a}", in("x0") foo, a = const bar); | ^^^ non-constant value @@ -389,7 +389,7 @@ LL | const bar: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:61:45 + --> $DIR/parse-error.rs:59:45 | LL | asm!("{a}", in("x0") foo, a = const bar); | ^^^ non-constant value @@ -400,7 +400,7 @@ LL | const bar: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:63:41 + --> $DIR/parse-error.rs:61:41 | LL | asm!("{1}", in("x0") foo, const bar); | ^^^ non-constant value diff --git a/tests/ui/asm/aarch64/type-check-3.rs b/tests/ui/asm/aarch64/type-check-3.rs index 3fc8e5060693a..b64473f98c090 100644 --- a/tests/ui/asm/aarch64/type-check-3.rs +++ b/tests/ui/asm/aarch64/type-check-3.rs @@ -1,7 +1,7 @@ //@ only-aarch64 //@ compile-flags: -C target-feature=+neon -#![feature(repr_simd, asm_const)] +#![feature(repr_simd)] use std::arch::aarch64::float64x2_t; use std::arch::{asm, global_asm}; diff --git a/tests/ui/asm/aarch64/type-check-4.rs b/tests/ui/asm/aarch64/type-check-4.rs index f00b4d4c46fa5..41eb9de5669f6 100644 --- a/tests/ui/asm/aarch64/type-check-4.rs +++ b/tests/ui/asm/aarch64/type-check-4.rs @@ -1,7 +1,7 @@ //@ only-aarch64 //@ compile-flags: -C target-feature=+neon -#![feature(repr_simd, asm_const)] +#![feature(repr_simd)] use std::arch::aarch64::float64x2_t; use std::arch::{asm, global_asm}; @@ -10,8 +10,7 @@ use std::arch::{asm, global_asm}; #[derive(Copy, Clone)] struct Simd256bit(f64, f64, f64, f64); -fn main() { -} +fn main() {} // Constants must be... constant diff --git a/tests/ui/asm/aarch64/type-check-4.stderr b/tests/ui/asm/aarch64/type-check-4.stderr index 3e675f69e842b..89eb8467cdefd 100644 --- a/tests/ui/asm/aarch64/type-check-4.stderr +++ b/tests/ui/asm/aarch64/type-check-4.stderr @@ -1,5 +1,5 @@ error[E0658]: referencing statics in constants is unstable - --> $DIR/type-check-4.rs:25:25 + --> $DIR/type-check-4.rs:24:25 | LL | global_asm!("{}", const S); | ^ @@ -11,7 +11,7 @@ LL | global_asm!("{}", const S); = help: to fix this, the value can be extracted to a `const` and then used. error[E0658]: referencing statics in constants is unstable - --> $DIR/type-check-4.rs:28:35 + --> $DIR/type-check-4.rs:27:35 | LL | global_asm!("{}", const const_foo(S)); | ^ @@ -23,7 +23,7 @@ LL | global_asm!("{}", const const_foo(S)); = help: to fix this, the value can be extracted to a `const` and then used. error[E0658]: referencing statics in constants is unstable - --> $DIR/type-check-4.rs:31:35 + --> $DIR/type-check-4.rs:30:35 | LL | global_asm!("{}", const const_bar(S)); | ^ diff --git a/tests/ui/asm/bad-template.rs b/tests/ui/asm/bad-template.rs index 41a906e32a4ba..6b00905a39326 100644 --- a/tests/ui/asm/bad-template.rs +++ b/tests/ui/asm/bad-template.rs @@ -6,7 +6,7 @@ //@ [x86_64] needs-llvm-components: x86 //@ [aarch64] needs-llvm-components: aarch64 -#![feature(no_core, lang_items, rustc_attrs, asm_const)] +#![feature(no_core, lang_items, rustc_attrs)] #![no_core] #[rustc_builtin_macro] diff --git a/tests/ui/asm/const-error.rs b/tests/ui/asm/const-error.rs index f2cead399b6fb..40d0590c33e9d 100644 --- a/tests/ui/asm/const-error.rs +++ b/tests/ui/asm/const-error.rs @@ -1,15 +1,15 @@ //@ only-x86_64 //@ needs-asm-support -#![feature(asm_const)] - // Test to make sure that we emit const errors eagerly for inline asm use std::arch::asm; fn test() { - unsafe { asm!("/* {} */", const 1 / 0); } - //~^ ERROR evaluation of + unsafe { + asm!("/* {} */", const 1 / 0); + //~^ ERROR evaluation of + } } fn main() {} diff --git a/tests/ui/asm/const-error.stderr b/tests/ui/asm/const-error.stderr index fe31183217726..02e54457e89b9 100644 --- a/tests/ui/asm/const-error.stderr +++ b/tests/ui/asm/const-error.stderr @@ -1,8 +1,8 @@ error[E0080]: evaluation of `test::::{constant#0}` failed - --> $DIR/const-error.rs:11:37 + --> $DIR/const-error.rs:10:32 | -LL | unsafe { asm!("/* {} */", const 1 / 0); } - | ^^^^^ attempt to divide `1_i32` by zero +LL | asm!("/* {} */", const 1 / 0); + | ^^^^^ attempt to divide `1_i32` by zero error: aborting due to 1 previous error diff --git a/tests/ui/asm/fail-const-eval-issue-121099.rs b/tests/ui/asm/fail-const-eval-issue-121099.rs index bed6fc9b39f9d..36d00b1e5d23c 100644 --- a/tests/ui/asm/fail-const-eval-issue-121099.rs +++ b/tests/ui/asm/fail-const-eval-issue-121099.rs @@ -1,6 +1,5 @@ //@ build-fail //@ needs-asm-support -#![feature(asm_const)] use std::arch::global_asm; diff --git a/tests/ui/asm/fail-const-eval-issue-121099.stderr b/tests/ui/asm/fail-const-eval-issue-121099.stderr index 51d283218d227..5d86c3a5f7bdd 100644 --- a/tests/ui/asm/fail-const-eval-issue-121099.stderr +++ b/tests/ui/asm/fail-const-eval-issue-121099.stderr @@ -1,11 +1,11 @@ error[E0080]: evaluation of constant value failed - --> $DIR/fail-const-eval-issue-121099.rs:9:31 + --> $DIR/fail-const-eval-issue-121099.rs:8:31 | LL | global_asm!("/* {} */", const 1 << 500); | ^^^^^^^^ attempt to shift left by `500_i32`, which would overflow error[E0080]: evaluation of constant value failed - --> $DIR/fail-const-eval-issue-121099.rs:11:31 + --> $DIR/fail-const-eval-issue-121099.rs:10:31 | LL | global_asm!("/* {} */", const 1 / 0); | ^^^^^ attempt to divide `1_i32` by zero diff --git a/tests/ui/asm/generic-const.rs b/tests/ui/asm/generic-const.rs index 133d093d200ec..3b69a4e86e3e8 100644 --- a/tests/ui/asm/generic-const.rs +++ b/tests/ui/asm/generic-const.rs @@ -1,8 +1,6 @@ //@ needs-asm-support //@ build-pass -#![feature(asm_const)] - use std::arch::asm; fn foofoo() {} diff --git a/tests/ui/asm/invalid-const-operand.rs b/tests/ui/asm/invalid-const-operand.rs index eff335ff6aaa3..a688f5042db5c 100644 --- a/tests/ui/asm/invalid-const-operand.rs +++ b/tests/ui/asm/invalid-const-operand.rs @@ -2,8 +2,6 @@ //@ ignore-nvptx64 //@ ignore-spirv -#![feature(asm_const)] - use std::arch::{asm, global_asm}; // Const operands must be integers and must be constants. diff --git a/tests/ui/asm/invalid-const-operand.stderr b/tests/ui/asm/invalid-const-operand.stderr index a6d742b53c217..bda4b0355b7c4 100644 --- a/tests/ui/asm/invalid-const-operand.stderr +++ b/tests/ui/asm/invalid-const-operand.stderr @@ -1,5 +1,5 @@ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/invalid-const-operand.rs:42:26 + --> $DIR/invalid-const-operand.rs:40:26 | LL | asm!("{}", const x); | ^ non-constant value @@ -10,7 +10,7 @@ LL | const x: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/invalid-const-operand.rs:45:36 + --> $DIR/invalid-const-operand.rs:43:36 | LL | asm!("{}", const const_foo(x)); | ^ non-constant value @@ -21,7 +21,7 @@ LL | const x: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/invalid-const-operand.rs:48:36 + --> $DIR/invalid-const-operand.rs:46:36 | LL | asm!("{}", const const_bar(x)); | ^ non-constant value @@ -32,7 +32,7 @@ LL | const x: /* Type */ = 0; | ~~~~~ ++++++++++++ error: invalid type for `const` operand - --> $DIR/invalid-const-operand.rs:14:19 + --> $DIR/invalid-const-operand.rs:12:19 | LL | global_asm!("{}", const 0f32); | ^^^^^^---- @@ -42,7 +42,7 @@ LL | global_asm!("{}", const 0f32); = help: `const` operands must be of an integer type error: invalid type for `const` operand - --> $DIR/invalid-const-operand.rs:16:19 + --> $DIR/invalid-const-operand.rs:14:19 | LL | global_asm!("{}", const 0 as *mut u8); | ^^^^^^------------ @@ -52,7 +52,7 @@ LL | global_asm!("{}", const 0 as *mut u8); = help: `const` operands must be of an integer type error: invalid type for `const` operand - --> $DIR/invalid-const-operand.rs:26:20 + --> $DIR/invalid-const-operand.rs:24:20 | LL | asm!("{}", const 0f32); | ^^^^^^---- @@ -62,7 +62,7 @@ LL | asm!("{}", const 0f32); = help: `const` operands must be of an integer type error: invalid type for `const` operand - --> $DIR/invalid-const-operand.rs:28:20 + --> $DIR/invalid-const-operand.rs:26:20 | LL | asm!("{}", const 0 as *mut u8); | ^^^^^^------------ @@ -72,7 +72,7 @@ LL | asm!("{}", const 0 as *mut u8); = help: `const` operands must be of an integer type error: invalid type for `const` operand - --> $DIR/invalid-const-operand.rs:30:20 + --> $DIR/invalid-const-operand.rs:28:20 | LL | asm!("{}", const &0); | ^^^^^^-- diff --git a/tests/ui/asm/naked-functions.rs b/tests/ui/asm/naked-functions.rs index cb1e5c325c239..116a84506c570 100644 --- a/tests/ui/asm/naked-functions.rs +++ b/tests/ui/asm/naked-functions.rs @@ -3,7 +3,7 @@ //@ ignore-spirv #![feature(naked_functions)] -#![feature(asm_const, asm_unwind, linkage)] +#![feature(asm_unwind, linkage)] #![crate_type = "lib"] use std::arch::asm; diff --git a/tests/ui/asm/named-asm-labels.rs b/tests/ui/asm/named-asm-labels.rs index d2ca6fe8808bc..043aab9029d7d 100644 --- a/tests/ui/asm/named-asm-labels.rs +++ b/tests/ui/asm/named-asm-labels.rs @@ -10,7 +10,7 @@ // which causes less readable LLVM errors and in the worst cases causes ICEs // or segfaults based on system dependent behavior and codegen flags. -#![feature(naked_functions, asm_const)] +#![feature(naked_functions)] use std::arch::{asm, global_asm}; @@ -128,6 +128,7 @@ fn main() { // Tests usage of colons in non-label positions asm!(":lo12:FOO"); // this is apparently valid aarch64 + // is there an example that is valid x86 for this test? asm!(":bbb nop"); @@ -176,7 +177,8 @@ fn main() { // label or LTO can cause labels to break #[naked] pub extern "C" fn foo() -> i32 { - unsafe { asm!(".Lfoo: mov rax, {}; ret;", "nop", const 1, options(noreturn)) } //~ ERROR avoid using named labels + unsafe { asm!(".Lfoo: mov rax, {}; ret;", "nop", const 1, options(noreturn)) } + //~^ ERROR avoid using named labels } // Make sure that non-naked attributes *do* still let the lint happen diff --git a/tests/ui/asm/named-asm-labels.stderr b/tests/ui/asm/named-asm-labels.stderr index 20b7d64f9e752..e5e177fb8b83e 100644 --- a/tests/ui/asm/named-asm-labels.stderr +++ b/tests/ui/asm/named-asm-labels.stderr @@ -328,7 +328,7 @@ LL | ab: nop // ab: does foo = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:143:19 + --> $DIR/named-asm-labels.rs:144:19 | LL | asm!("test_{}: nop", in(reg) 10); | ^^^^^^^ @@ -338,7 +338,7 @@ LL | asm!("test_{}: nop", in(reg) 10); = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:145:15 + --> $DIR/named-asm-labels.rs:146:15 | LL | asm!("test_{}: nop", const 10); | ^^^^^^^ @@ -348,7 +348,7 @@ LL | asm!("test_{}: nop", const 10); = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:146:15 + --> $DIR/named-asm-labels.rs:147:15 | LL | asm!("test_{}: nop", sym main); | ^^^^^^^ @@ -358,7 +358,7 @@ LL | asm!("test_{}: nop", sym main); = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:147:15 + --> $DIR/named-asm-labels.rs:148:15 | LL | asm!("{}_test: nop", const 10); | ^^^^^^^ @@ -368,7 +368,7 @@ LL | asm!("{}_test: nop", const 10); = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:148:15 + --> $DIR/named-asm-labels.rs:149:15 | LL | asm!("test_{}_test: nop", const 10); | ^^^^^^^^^^^^ @@ -378,7 +378,7 @@ LL | asm!("test_{}_test: nop", const 10); = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:149:15 + --> $DIR/named-asm-labels.rs:150:15 | LL | asm!("{}: nop", const 10); | ^^ @@ -388,7 +388,7 @@ LL | asm!("{}: nop", const 10); = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:151:15 + --> $DIR/named-asm-labels.rs:152:15 | LL | asm!("{uwu}: nop", uwu = const 10); | ^^^^^ @@ -398,7 +398,7 @@ LL | asm!("{uwu}: nop", uwu = const 10); = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:152:15 + --> $DIR/named-asm-labels.rs:153:15 | LL | asm!("{0}: nop", const 10); | ^^^ @@ -408,7 +408,7 @@ LL | asm!("{0}: nop", const 10); = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:153:15 + --> $DIR/named-asm-labels.rs:154:15 | LL | asm!("{1}: nop", "/* {0} */", const 10, const 20); | ^^^ @@ -418,7 +418,7 @@ LL | asm!("{1}: nop", "/* {0} */", const 10, const 20); = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:156:14 + --> $DIR/named-asm-labels.rs:157:14 | LL | asm!(include_str!("named-asm-labels.s")); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -428,7 +428,7 @@ LL | asm!(include_str!("named-asm-labels.s")); = note: the label may be declared in the expansion of a macro error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:156:14 + --> $DIR/named-asm-labels.rs:157:14 | LL | asm!(include_str!("named-asm-labels.s")); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -439,7 +439,7 @@ LL | asm!(include_str!("named-asm-labels.s")); = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:156:14 + --> $DIR/named-asm-labels.rs:157:14 | LL | asm!(include_str!("named-asm-labels.s")); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -450,7 +450,7 @@ LL | asm!(include_str!("named-asm-labels.s")); = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:156:14 + --> $DIR/named-asm-labels.rs:157:14 | LL | asm!(include_str!("named-asm-labels.s")); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -461,7 +461,7 @@ LL | asm!(include_str!("named-asm-labels.s")); = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` warning: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:170:19 + --> $DIR/named-asm-labels.rs:171:19 | LL | asm!("warned: nop"); | ^^^^^^ @@ -469,13 +469,13 @@ LL | asm!("warned: nop"); = help: only local labels of the form `:` should be used in inline asm = note: see the asm section of Rust By Example for more information note: the lint level is defined here - --> $DIR/named-asm-labels.rs:168:16 + --> $DIR/named-asm-labels.rs:169:16 | LL | #[warn(named_asm_labels)] | ^^^^^^^^^^^^^^^^ error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:179:20 + --> $DIR/named-asm-labels.rs:180:20 | LL | unsafe { asm!(".Lfoo: mov rax, {}; ret;", "nop", const 1, options(noreturn)) } | ^^^^^ @@ -484,7 +484,7 @@ LL | unsafe { asm!(".Lfoo: mov rax, {}; ret;", "nop", const 1, options(noret = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:185:20 + --> $DIR/named-asm-labels.rs:187:20 | LL | unsafe { asm!(".Lbar: mov rax, {}; ret;", "nop", const 1, options(noreturn)) } | ^^^^^ @@ -493,7 +493,7 @@ LL | unsafe { asm!(".Lbar: mov rax, {}; ret;", "nop", const 1, options(noret = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:193:20 + --> $DIR/named-asm-labels.rs:195:20 | LL | unsafe { asm!(".Laaa: nop; ret;", options(noreturn)) } | ^^^^^ @@ -502,7 +502,7 @@ LL | unsafe { asm!(".Laaa: nop; ret;", options(noreturn)) } = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:203:24 + --> $DIR/named-asm-labels.rs:205:24 | LL | unsafe { asm!(".Lbbb: nop; ret;", options(noreturn)) } | ^^^^^ @@ -511,7 +511,7 @@ LL | unsafe { asm!(".Lbbb: nop; ret;", options(noreturn)) } = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:212:15 + --> $DIR/named-asm-labels.rs:214:15 | LL | asm!("closure1: nop"); | ^^^^^^^^ @@ -520,7 +520,7 @@ LL | asm!("closure1: nop"); = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:216:15 + --> $DIR/named-asm-labels.rs:218:15 | LL | asm!("closure2: nop"); | ^^^^^^^^ @@ -529,7 +529,7 @@ LL | asm!("closure2: nop"); = note: see the asm section of Rust By Example for more information error: avoid using named labels in inline assembly - --> $DIR/named-asm-labels.rs:226:19 + --> $DIR/named-asm-labels.rs:228:19 | LL | asm!("closure3: nop"); | ^^^^^^^^ diff --git a/tests/ui/asm/parse-error.rs b/tests/ui/asm/parse-error.rs index 16ae02828642d..4d7b522f5fc5b 100644 --- a/tests/ui/asm/parse-error.rs +++ b/tests/ui/asm/parse-error.rs @@ -1,7 +1,5 @@ //@ needs-asm-support -#![feature(asm_const)] - use std::arch::{asm, global_asm}; fn main() { diff --git a/tests/ui/asm/parse-error.stderr b/tests/ui/asm/parse-error.stderr index f5f8d537d86b8..6d0e629b93775 100644 --- a/tests/ui/asm/parse-error.stderr +++ b/tests/ui/asm/parse-error.stderr @@ -1,167 +1,167 @@ error: requires at least a template string argument - --> $DIR/parse-error.rs:11:9 + --> $DIR/parse-error.rs:9:9 | LL | asm!(); | ^^^^^^ error: asm template must be a string literal - --> $DIR/parse-error.rs:13:14 + --> $DIR/parse-error.rs:11:14 | LL | asm!(foo); | ^^^ error: expected token: `,` - --> $DIR/parse-error.rs:15:19 + --> $DIR/parse-error.rs:13:19 | LL | asm!("{}" foo); | ^^^ expected `,` error: expected operand, clobber_abi, options, or additional template string - --> $DIR/parse-error.rs:17:20 + --> $DIR/parse-error.rs:15:20 | LL | asm!("{}", foo); | ^^^ expected operand, clobber_abi, options, or additional template string error: expected `(`, found `foo` - --> $DIR/parse-error.rs:19:23 + --> $DIR/parse-error.rs:17:23 | LL | asm!("{}", in foo); | ^^^ expected `(` error: expected `)`, found `foo` - --> $DIR/parse-error.rs:21:27 + --> $DIR/parse-error.rs:19:27 | LL | asm!("{}", in(reg foo)); | ^^^ expected `)` error: expected expression, found end of macro arguments - --> $DIR/parse-error.rs:23:27 + --> $DIR/parse-error.rs:21:27 | LL | asm!("{}", in(reg)); | ^ expected expression error: expected register class or explicit register - --> $DIR/parse-error.rs:25:26 + --> $DIR/parse-error.rs:23:26 | LL | asm!("{}", inout(=) foo => bar); | ^ error: expected expression, found end of macro arguments - --> $DIR/parse-error.rs:27:37 + --> $DIR/parse-error.rs:25:37 | LL | asm!("{}", inout(reg) foo =>); | ^ expected expression error: expected one of `!`, `,`, `.`, `::`, `?`, `{`, or an operator, found `=>` - --> $DIR/parse-error.rs:29:32 + --> $DIR/parse-error.rs:27:32 | LL | asm!("{}", in(reg) foo => bar); | ^^ expected one of 7 possible tokens error: expected a path for argument to `sym` - --> $DIR/parse-error.rs:31:24 + --> $DIR/parse-error.rs:29:24 | LL | asm!("{}", sym foo + bar); | ^^^^^^^^^ error: expected one of `)`, `att_syntax`, `may_unwind`, `nomem`, `noreturn`, `nostack`, `preserves_flags`, `pure`, `raw`, or `readonly`, found `foo` - --> $DIR/parse-error.rs:33:26 + --> $DIR/parse-error.rs:31:26 | LL | asm!("", options(foo)); | ^^^ expected one of 10 possible tokens error: expected one of `)` or `,`, found `foo` - --> $DIR/parse-error.rs:35:32 + --> $DIR/parse-error.rs:33:32 | LL | asm!("", options(nomem foo)); | ^^^ expected one of `)` or `,` error: expected one of `)`, `att_syntax`, `may_unwind`, `nomem`, `noreturn`, `nostack`, `preserves_flags`, `pure`, `raw`, or `readonly`, found `foo` - --> $DIR/parse-error.rs:37:33 + --> $DIR/parse-error.rs:35:33 | LL | asm!("", options(nomem, foo)); | ^^^ expected one of 10 possible tokens error: at least one abi must be provided as an argument to `clobber_abi` - --> $DIR/parse-error.rs:44:30 + --> $DIR/parse-error.rs:42:30 | LL | asm!("", clobber_abi()); | ^ error: expected string literal - --> $DIR/parse-error.rs:46:30 + --> $DIR/parse-error.rs:44:30 | LL | asm!("", clobber_abi(foo)); | ^^^ not a string literal error: expected one of `)` or `,`, found `foo` - --> $DIR/parse-error.rs:48:34 + --> $DIR/parse-error.rs:46:34 | LL | asm!("", clobber_abi("C" foo)); | ^^^ expected one of `)` or `,` error: expected string literal - --> $DIR/parse-error.rs:50:35 + --> $DIR/parse-error.rs:48:35 | LL | asm!("", clobber_abi("C", foo)); | ^^^ not a string literal error: expected string literal - --> $DIR/parse-error.rs:52:30 + --> $DIR/parse-error.rs:50:30 | LL | asm!("", clobber_abi(1)); | ^ not a string literal error: expected string literal - --> $DIR/parse-error.rs:54:30 + --> $DIR/parse-error.rs:52:30 | LL | asm!("", clobber_abi(())); | ^ not a string literal error: expected string literal - --> $DIR/parse-error.rs:56:30 + --> $DIR/parse-error.rs:54:30 | LL | asm!("", clobber_abi(uwu)); | ^^^ not a string literal error: expected string literal - --> $DIR/parse-error.rs:58:30 + --> $DIR/parse-error.rs:56:30 | LL | asm!("", clobber_abi({})); | ^ not a string literal error: expected string literal - --> $DIR/parse-error.rs:60:30 + --> $DIR/parse-error.rs:58:30 | LL | asm!("", clobber_abi(loop {})); | ^^^^ not a string literal error: expected string literal - --> $DIR/parse-error.rs:62:30 + --> $DIR/parse-error.rs:60:30 | LL | asm!("", clobber_abi(if)); | ^^ not a string literal error: expected string literal - --> $DIR/parse-error.rs:64:30 + --> $DIR/parse-error.rs:62:30 | LL | asm!("", clobber_abi(do)); | ^^ not a string literal error: expected string literal - --> $DIR/parse-error.rs:66:30 + --> $DIR/parse-error.rs:64:30 | LL | asm!("", clobber_abi(<)); | ^ not a string literal error: expected string literal - --> $DIR/parse-error.rs:68:30 + --> $DIR/parse-error.rs:66:30 | LL | asm!("", clobber_abi(.)); | ^ not a string literal error: duplicate argument named `a` - --> $DIR/parse-error.rs:76:36 + --> $DIR/parse-error.rs:74:36 | LL | asm!("{a}", a = const foo, a = const bar); | ------------- ^^^^^^^^^^^^^ duplicate argument @@ -169,7 +169,7 @@ LL | asm!("{a}", a = const foo, a = const bar); | previously here error: argument never used - --> $DIR/parse-error.rs:76:36 + --> $DIR/parse-error.rs:74:36 | LL | asm!("{a}", a = const foo, a = const bar); | ^^^^^^^^^^^^^ argument never used @@ -177,19 +177,19 @@ LL | asm!("{a}", a = const foo, a = const bar); = help: if this argument is intentionally unused, consider using it in an asm comment: `"/* {1} */"` error: expected one of `clobber_abi`, `const`, `in`, `inlateout`, `inout`, `label`, `lateout`, `options`, `out`, or `sym`, found `""` - --> $DIR/parse-error.rs:82:29 + --> $DIR/parse-error.rs:80:29 | LL | asm!("", options(), ""); | ^^ expected one of 10 possible tokens error: expected one of `clobber_abi`, `const`, `in`, `inlateout`, `inout`, `label`, `lateout`, `options`, `out`, or `sym`, found `"{}"` - --> $DIR/parse-error.rs:84:33 + --> $DIR/parse-error.rs:82:33 | LL | asm!("{}", in(reg) foo, "{}", out(reg) foo); | ^^^^ expected one of 10 possible tokens error: asm template must be a string literal - --> $DIR/parse-error.rs:86:14 + --> $DIR/parse-error.rs:84:14 | LL | asm!(format!("{{{}}}", 0), in(reg) foo); | ^^^^^^^^^^^^^^^^^^^^ @@ -197,7 +197,7 @@ LL | asm!(format!("{{{}}}", 0), in(reg) foo); = note: this error originates in the macro `format` (in Nightly builds, run with -Z macro-backtrace for more info) error: asm template must be a string literal - --> $DIR/parse-error.rs:88:21 + --> $DIR/parse-error.rs:86:21 | LL | asm!("{1}", format!("{{{}}}", 0), in(reg) foo, out(reg) bar); | ^^^^^^^^^^^^^^^^^^^^ @@ -205,139 +205,139 @@ LL | asm!("{1}", format!("{{{}}}", 0), in(reg) foo, out(reg) bar); = note: this error originates in the macro `format` (in Nightly builds, run with -Z macro-backtrace for more info) error: _ cannot be used for input operands - --> $DIR/parse-error.rs:90:28 + --> $DIR/parse-error.rs:88:28 | LL | asm!("{}", in(reg) _); | ^ error: _ cannot be used for input operands - --> $DIR/parse-error.rs:92:31 + --> $DIR/parse-error.rs:90:31 | LL | asm!("{}", inout(reg) _); | ^ error: _ cannot be used for input operands - --> $DIR/parse-error.rs:94:35 + --> $DIR/parse-error.rs:92:35 | LL | asm!("{}", inlateout(reg) _); | ^ error: requires at least a template string argument - --> $DIR/parse-error.rs:101:1 + --> $DIR/parse-error.rs:99:1 | LL | global_asm!(); | ^^^^^^^^^^^^^ error: asm template must be a string literal - --> $DIR/parse-error.rs:103:13 + --> $DIR/parse-error.rs:101:13 | LL | global_asm!(FOO); | ^^^ error: expected token: `,` - --> $DIR/parse-error.rs:105:18 + --> $DIR/parse-error.rs:103:18 | LL | global_asm!("{}" FOO); | ^^^ expected `,` error: expected operand, options, or additional template string - --> $DIR/parse-error.rs:107:19 + --> $DIR/parse-error.rs:105:19 | LL | global_asm!("{}", FOO); | ^^^ expected operand, options, or additional template string error: expected expression, found end of macro arguments - --> $DIR/parse-error.rs:109:24 + --> $DIR/parse-error.rs:107:24 | LL | global_asm!("{}", const); | ^ expected expression error: expected one of `,`, `.`, `?`, or an operator, found `FOO` - --> $DIR/parse-error.rs:111:30 + --> $DIR/parse-error.rs:109:30 | LL | global_asm!("{}", const(reg) FOO); | ^^^ expected one of `,`, `.`, `?`, or an operator error: expected one of `)`, `att_syntax`, or `raw`, found `FOO` - --> $DIR/parse-error.rs:113:25 + --> $DIR/parse-error.rs:111:25 | LL | global_asm!("", options(FOO)); | ^^^ expected one of `)`, `att_syntax`, or `raw` error: expected one of `)`, `att_syntax`, or `raw`, found `FOO` - --> $DIR/parse-error.rs:115:25 + --> $DIR/parse-error.rs:113:25 | LL | global_asm!("", options(FOO,)); | ^^^ expected one of `)`, `att_syntax`, or `raw` error: the `nomem` option cannot be used with `global_asm!` - --> $DIR/parse-error.rs:117:25 + --> $DIR/parse-error.rs:115:25 | LL | global_asm!("", options(nomem FOO)); | ^^^^^ the `nomem` option is not meaningful for global-scoped inline assembly error: expected one of `)` or `,`, found `FOO` - --> $DIR/parse-error.rs:117:31 + --> $DIR/parse-error.rs:115:31 | LL | global_asm!("", options(nomem FOO)); | ^^^ expected one of `)` or `,` error: the `nomem` option cannot be used with `global_asm!` - --> $DIR/parse-error.rs:120:25 + --> $DIR/parse-error.rs:118:25 | LL | global_asm!("", options(nomem, FOO)); | ^^^^^ the `nomem` option is not meaningful for global-scoped inline assembly error: expected one of `)`, `att_syntax`, or `raw`, found `FOO` - --> $DIR/parse-error.rs:120:32 + --> $DIR/parse-error.rs:118:32 | LL | global_asm!("", options(nomem, FOO)); | ^^^ expected one of `)`, `att_syntax`, or `raw` error: expected string literal - --> $DIR/parse-error.rs:124:29 + --> $DIR/parse-error.rs:122:29 | LL | global_asm!("", clobber_abi(FOO)); | ^^^ not a string literal error: expected one of `)` or `,`, found `FOO` - --> $DIR/parse-error.rs:126:33 + --> $DIR/parse-error.rs:124:33 | LL | global_asm!("", clobber_abi("C" FOO)); | ^^^ expected one of `)` or `,` error: expected string literal - --> $DIR/parse-error.rs:128:34 + --> $DIR/parse-error.rs:126:34 | LL | global_asm!("", clobber_abi("C", FOO)); | ^^^ not a string literal error: `clobber_abi` cannot be used with `global_asm!` - --> $DIR/parse-error.rs:130:19 + --> $DIR/parse-error.rs:128:19 | LL | global_asm!("{}", clobber_abi("C"), const FOO); | ^^^^^^^^^^^^^^^^ error: `clobber_abi` cannot be used with `global_asm!` - --> $DIR/parse-error.rs:132:28 + --> $DIR/parse-error.rs:130:28 | LL | global_asm!("", options(), clobber_abi("C")); | ^^^^^^^^^^^^^^^^ error: `clobber_abi` cannot be used with `global_asm!` - --> $DIR/parse-error.rs:134:30 + --> $DIR/parse-error.rs:132:30 | LL | global_asm!("{}", options(), clobber_abi("C"), const FOO); | ^^^^^^^^^^^^^^^^ error: `clobber_abi` cannot be used with `global_asm!` - --> $DIR/parse-error.rs:136:17 + --> $DIR/parse-error.rs:134:17 | LL | global_asm!("", clobber_abi("C"), clobber_abi("C")); | ^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^ error: duplicate argument named `a` - --> $DIR/parse-error.rs:138:35 + --> $DIR/parse-error.rs:136:35 | LL | global_asm!("{a}", a = const FOO, a = const BAR); | ------------- ^^^^^^^^^^^^^ duplicate argument @@ -345,7 +345,7 @@ LL | global_asm!("{a}", a = const FOO, a = const BAR); | previously here error: argument never used - --> $DIR/parse-error.rs:138:35 + --> $DIR/parse-error.rs:136:35 | LL | global_asm!("{a}", a = const FOO, a = const BAR); | ^^^^^^^^^^^^^ argument never used @@ -353,19 +353,19 @@ LL | global_asm!("{a}", a = const FOO, a = const BAR); = help: if this argument is intentionally unused, consider using it in an asm comment: `"/* {1} */"` error: expected one of `clobber_abi`, `const`, `options`, or `sym`, found `""` - --> $DIR/parse-error.rs:141:28 + --> $DIR/parse-error.rs:139:28 | LL | global_asm!("", options(), ""); | ^^ expected one of `clobber_abi`, `const`, `options`, or `sym` error: expected one of `clobber_abi`, `const`, `options`, or `sym`, found `"{}"` - --> $DIR/parse-error.rs:143:30 + --> $DIR/parse-error.rs:141:30 | LL | global_asm!("{}", const FOO, "{}", const FOO); | ^^^^ expected one of `clobber_abi`, `const`, `options`, or `sym` error: asm template must be a string literal - --> $DIR/parse-error.rs:145:13 + --> $DIR/parse-error.rs:143:13 | LL | global_asm!(format!("{{{}}}", 0), const FOO); | ^^^^^^^^^^^^^^^^^^^^ @@ -373,7 +373,7 @@ LL | global_asm!(format!("{{{}}}", 0), const FOO); = note: this error originates in the macro `format` (in Nightly builds, run with -Z macro-backtrace for more info) error: asm template must be a string literal - --> $DIR/parse-error.rs:147:20 + --> $DIR/parse-error.rs:145:20 | LL | global_asm!("{1}", format!("{{{}}}", 0), const FOO, const BAR); | ^^^^^^^^^^^^^^^^^^^^ @@ -381,43 +381,43 @@ LL | global_asm!("{1}", format!("{{{}}}", 0), const FOO, const BAR); = note: this error originates in the macro `format` (in Nightly builds, run with -Z macro-backtrace for more info) error: the `in` operand cannot be used with `global_asm!` - --> $DIR/parse-error.rs:150:19 + --> $DIR/parse-error.rs:148:19 | LL | global_asm!("{}", in(reg)); | ^^ the `in` operand is not meaningful for global-scoped inline assembly, remove it error: the `out` operand cannot be used with `global_asm!` - --> $DIR/parse-error.rs:152:19 + --> $DIR/parse-error.rs:150:19 | LL | global_asm!("{}", out(reg)); | ^^^ the `out` operand is not meaningful for global-scoped inline assembly, remove it error: the `lateout` operand cannot be used with `global_asm!` - --> $DIR/parse-error.rs:154:19 + --> $DIR/parse-error.rs:152:19 | LL | global_asm!("{}", lateout(reg)); | ^^^^^^^ the `lateout` operand is not meaningful for global-scoped inline assembly, remove it error: the `inout` operand cannot be used with `global_asm!` - --> $DIR/parse-error.rs:156:19 + --> $DIR/parse-error.rs:154:19 | LL | global_asm!("{}", inout(reg)); | ^^^^^ the `inout` operand is not meaningful for global-scoped inline assembly, remove it error: the `inlateout` operand cannot be used with `global_asm!` - --> $DIR/parse-error.rs:158:19 + --> $DIR/parse-error.rs:156:19 | LL | global_asm!("{}", inlateout(reg)); | ^^^^^^^^^ the `inlateout` operand is not meaningful for global-scoped inline assembly, remove it error: the `label` operand cannot be used with `global_asm!` - --> $DIR/parse-error.rs:160:19 + --> $DIR/parse-error.rs:158:19 | LL | global_asm!("{}", label(reg)); | ^^^^^ the `label` operand is not meaningful for global-scoped inline assembly, remove it error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:39:37 + --> $DIR/parse-error.rs:37:37 | LL | asm!("{}", options(), const foo); | ^^^ non-constant value @@ -428,7 +428,7 @@ LL | const foo: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:71:44 + --> $DIR/parse-error.rs:69:44 | LL | asm!("{}", clobber_abi("C"), const foo); | ^^^ non-constant value @@ -439,7 +439,7 @@ LL | const foo: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:74:55 + --> $DIR/parse-error.rs:72:55 | LL | asm!("{}", options(), clobber_abi("C"), const foo); | ^^^ non-constant value @@ -450,7 +450,7 @@ LL | const foo: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:76:31 + --> $DIR/parse-error.rs:74:31 | LL | asm!("{a}", a = const foo, a = const bar); | ^^^ non-constant value @@ -461,7 +461,7 @@ LL | const foo: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/parse-error.rs:76:46 + --> $DIR/parse-error.rs:74:46 | LL | asm!("{a}", a = const foo, a = const bar); | ^^^ non-constant value diff --git a/tests/ui/asm/type-check-1.rs b/tests/ui/asm/type-check-1.rs index 22669dce280bb..4dc30fb58385c 100644 --- a/tests/ui/asm/type-check-1.rs +++ b/tests/ui/asm/type-check-1.rs @@ -2,8 +2,6 @@ //@ ignore-nvptx64 //@ ignore-spirv -#![feature(asm_const)] - use std::arch::{asm, global_asm}; fn main() { diff --git a/tests/ui/asm/type-check-1.stderr b/tests/ui/asm/type-check-1.stderr index d47e6ae1d2a97..aa9eed2fce65c 100644 --- a/tests/ui/asm/type-check-1.stderr +++ b/tests/ui/asm/type-check-1.stderr @@ -1,17 +1,17 @@ error: invalid asm output - --> $DIR/type-check-1.rs:14:29 + --> $DIR/type-check-1.rs:12:29 | LL | asm!("{}", out(reg) 1 + 2); | ^^^^^ cannot assign to this expression error: invalid asm output - --> $DIR/type-check-1.rs:16:31 + --> $DIR/type-check-1.rs:14:31 | LL | asm!("{}", inout(reg) 1 + 2); | ^^^^^ cannot assign to this expression error[E0277]: the size for values of type `[u64]` cannot be known at compilation time - --> $DIR/type-check-1.rs:22:28 + --> $DIR/type-check-1.rs:20:28 | LL | asm!("{}", in(reg) v[..]); | ^^^^^ doesn't have a size known at compile-time @@ -20,7 +20,7 @@ LL | asm!("{}", in(reg) v[..]); = note: all inline asm arguments must have a statically known size error[E0277]: the size for values of type `[u64]` cannot be known at compilation time - --> $DIR/type-check-1.rs:25:29 + --> $DIR/type-check-1.rs:23:29 | LL | asm!("{}", out(reg) v[..]); | ^^^^^ doesn't have a size known at compile-time @@ -29,7 +29,7 @@ LL | asm!("{}", out(reg) v[..]); = note: all inline asm arguments must have a statically known size error[E0277]: the size for values of type `[u64]` cannot be known at compilation time - --> $DIR/type-check-1.rs:28:31 + --> $DIR/type-check-1.rs:26:31 | LL | asm!("{}", inout(reg) v[..]); | ^^^^^ doesn't have a size known at compile-time @@ -38,7 +38,7 @@ LL | asm!("{}", inout(reg) v[..]); = note: all inline asm arguments must have a statically known size error: cannot use value of type `[u64]` for inline assembly - --> $DIR/type-check-1.rs:22:28 + --> $DIR/type-check-1.rs:20:28 | LL | asm!("{}", in(reg) v[..]); | ^^^^^ @@ -46,7 +46,7 @@ LL | asm!("{}", in(reg) v[..]); = note: only integers, floats, SIMD vectors, pointers and function pointers can be used as arguments for inline assembly error: cannot use value of type `[u64]` for inline assembly - --> $DIR/type-check-1.rs:25:29 + --> $DIR/type-check-1.rs:23:29 | LL | asm!("{}", out(reg) v[..]); | ^^^^^ @@ -54,7 +54,7 @@ LL | asm!("{}", out(reg) v[..]); = note: only integers, floats, SIMD vectors, pointers and function pointers can be used as arguments for inline assembly error: cannot use value of type `[u64]` for inline assembly - --> $DIR/type-check-1.rs:28:31 + --> $DIR/type-check-1.rs:26:31 | LL | asm!("{}", inout(reg) v[..]); | ^^^^^ diff --git a/tests/ui/asm/x86_64/bad-reg.rs b/tests/ui/asm/x86_64/bad-reg.rs index d41c46d57bb13..2a189a91c5a47 100644 --- a/tests/ui/asm/x86_64/bad-reg.rs +++ b/tests/ui/asm/x86_64/bad-reg.rs @@ -1,8 +1,6 @@ //@ only-x86_64 //@ compile-flags: -C target-feature=+avx2 -#![feature(asm_const)] - use std::arch::asm; fn main() { diff --git a/tests/ui/asm/x86_64/bad-reg.stderr b/tests/ui/asm/x86_64/bad-reg.stderr index 8017008e97d75..3df1f7b220869 100644 --- a/tests/ui/asm/x86_64/bad-reg.stderr +++ b/tests/ui/asm/x86_64/bad-reg.stderr @@ -1,17 +1,17 @@ error: invalid register class `foo`: unknown register class - --> $DIR/bad-reg.rs:14:20 + --> $DIR/bad-reg.rs:12:20 | LL | asm!("{}", in(foo) foo); | ^^^^^^^^^^^ error: invalid register `foo`: unknown register - --> $DIR/bad-reg.rs:16:18 + --> $DIR/bad-reg.rs:14:18 | LL | asm!("", in("foo") foo); | ^^^^^^^^^^^^^ error: invalid asm template modifier for this register class - --> $DIR/bad-reg.rs:18:15 + --> $DIR/bad-reg.rs:16:15 | LL | asm!("{:z}", in(reg) foo); | ^^^^ ----------- argument @@ -21,7 +21,7 @@ LL | asm!("{:z}", in(reg) foo); = note: the `reg` register class supports the following template modifiers: `l`, `x`, `e`, `r` error: invalid asm template modifier for this register class - --> $DIR/bad-reg.rs:20:15 + --> $DIR/bad-reg.rs:18:15 | LL | asm!("{:r}", in(xmm_reg) foo); | ^^^^ --------------- argument @@ -31,7 +31,7 @@ LL | asm!("{:r}", in(xmm_reg) foo); = note: the `xmm_reg` register class supports the following template modifiers: `x`, `y`, `z` error: asm template modifiers are not allowed for `const` arguments - --> $DIR/bad-reg.rs:22:15 + --> $DIR/bad-reg.rs:20:15 | LL | asm!("{:a}", const 0); | ^^^^ ------- argument @@ -39,7 +39,7 @@ LL | asm!("{:a}", const 0); | template modifier error: asm template modifiers are not allowed for `sym` arguments - --> $DIR/bad-reg.rs:24:15 + --> $DIR/bad-reg.rs:22:15 | LL | asm!("{:a}", sym main); | ^^^^ -------- argument @@ -47,67 +47,67 @@ LL | asm!("{:a}", sym main); | template modifier error: invalid register `ebp`: the frame pointer cannot be used as an operand for inline asm - --> $DIR/bad-reg.rs:26:18 + --> $DIR/bad-reg.rs:24:18 | LL | asm!("", in("ebp") foo); | ^^^^^^^^^^^^^ error: invalid register `rsp`: the stack pointer cannot be used as an operand for inline asm - --> $DIR/bad-reg.rs:28:18 + --> $DIR/bad-reg.rs:26:18 | LL | asm!("", in("rsp") foo); | ^^^^^^^^^^^^^ error: invalid register `ip`: the instruction pointer cannot be used as an operand for inline asm - --> $DIR/bad-reg.rs:30:18 + --> $DIR/bad-reg.rs:28:18 | LL | asm!("", in("ip") foo); | ^^^^^^^^^^^^ error: register class `x87_reg` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:33:18 + --> $DIR/bad-reg.rs:31:18 | LL | asm!("", in("st(2)") foo); | ^^^^^^^^^^^^^^^ error: register class `mmx_reg` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:36:18 + --> $DIR/bad-reg.rs:34:18 | LL | asm!("", in("mm0") foo); | ^^^^^^^^^^^^^ error: register class `kreg0` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:39:18 + --> $DIR/bad-reg.rs:37:18 | LL | asm!("", in("k0") foo); | ^^^^^^^^^^^^ error: register class `x87_reg` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:44:20 + --> $DIR/bad-reg.rs:42:20 | LL | asm!("{}", in(x87_reg) foo); | ^^^^^^^^^^^^^^^ error: register class `mmx_reg` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:47:20 + --> $DIR/bad-reg.rs:45:20 | LL | asm!("{}", in(mmx_reg) foo); | ^^^^^^^^^^^^^^^ error: register class `x87_reg` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:50:20 + --> $DIR/bad-reg.rs:48:20 | LL | asm!("{}", out(x87_reg) _); | ^^^^^^^^^^^^^^ error: register class `mmx_reg` can only be used as a clobber, not as an input or output - --> $DIR/bad-reg.rs:52:20 + --> $DIR/bad-reg.rs:50:20 | LL | asm!("{}", out(mmx_reg) _); | ^^^^^^^^^^^^^^ error: register `al` conflicts with register `eax` - --> $DIR/bad-reg.rs:58:33 + --> $DIR/bad-reg.rs:56:33 | LL | asm!("", in("eax") foo, in("al") bar); | ------------- ^^^^^^^^^^^^ register `al` @@ -115,7 +115,7 @@ LL | asm!("", in("eax") foo, in("al") bar); | register `eax` error: register `rax` conflicts with register `rax` - --> $DIR/bad-reg.rs:61:33 + --> $DIR/bad-reg.rs:59:33 | LL | asm!("", in("rax") foo, out("rax") bar); | ------------- ^^^^^^^^^^^^^^ register `rax` @@ -123,13 +123,13 @@ LL | asm!("", in("rax") foo, out("rax") bar); | register `rax` | help: use `lateout` instead of `out` to avoid conflict - --> $DIR/bad-reg.rs:61:18 + --> $DIR/bad-reg.rs:59:18 | LL | asm!("", in("rax") foo, out("rax") bar); | ^^^^^^^^^^^^^ error: register `ymm0` conflicts with register `xmm0` - --> $DIR/bad-reg.rs:66:34 + --> $DIR/bad-reg.rs:64:34 | LL | asm!("", in("xmm0") foo, in("ymm0") bar); | -------------- ^^^^^^^^^^^^^^ register `ymm0` @@ -137,7 +137,7 @@ LL | asm!("", in("xmm0") foo, in("ymm0") bar); | register `xmm0` error: register `ymm0` conflicts with register `xmm0` - --> $DIR/bad-reg.rs:68:34 + --> $DIR/bad-reg.rs:66:34 | LL | asm!("", in("xmm0") foo, out("ymm0") bar); | -------------- ^^^^^^^^^^^^^^^ register `ymm0` @@ -145,13 +145,13 @@ LL | asm!("", in("xmm0") foo, out("ymm0") bar); | register `xmm0` | help: use `lateout` instead of `out` to avoid conflict - --> $DIR/bad-reg.rs:68:18 + --> $DIR/bad-reg.rs:66:18 | LL | asm!("", in("xmm0") foo, out("ymm0") bar); | ^^^^^^^^^^^^^^ error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:33:30 + --> $DIR/bad-reg.rs:31:30 | LL | asm!("", in("st(2)") foo); | ^^^ @@ -159,7 +159,7 @@ LL | asm!("", in("st(2)") foo); = note: register class `x87_reg` supports these types: error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:36:28 + --> $DIR/bad-reg.rs:34:28 | LL | asm!("", in("mm0") foo); | ^^^ @@ -167,7 +167,7 @@ LL | asm!("", in("mm0") foo); = note: register class `mmx_reg` supports these types: error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:39:27 + --> $DIR/bad-reg.rs:37:27 | LL | asm!("", in("k0") foo); | ^^^ @@ -175,7 +175,7 @@ LL | asm!("", in("k0") foo); = note: register class `kreg0` supports these types: error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:44:32 + --> $DIR/bad-reg.rs:42:32 | LL | asm!("{}", in(x87_reg) foo); | ^^^ @@ -183,7 +183,7 @@ LL | asm!("{}", in(x87_reg) foo); = note: register class `x87_reg` supports these types: error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:47:32 + --> $DIR/bad-reg.rs:45:32 | LL | asm!("{}", in(mmx_reg) foo); | ^^^ @@ -191,7 +191,7 @@ LL | asm!("{}", in(mmx_reg) foo); = note: register class `mmx_reg` supports these types: error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:58:42 + --> $DIR/bad-reg.rs:56:42 | LL | asm!("", in("eax") foo, in("al") bar); | ^^^ @@ -199,7 +199,7 @@ LL | asm!("", in("eax") foo, in("al") bar); = note: register class `reg_byte` supports these types: i8 error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:63:27 + --> $DIR/bad-reg.rs:61:27 | LL | asm!("", in("al") foo, lateout("al") bar); | ^^^ @@ -207,7 +207,7 @@ LL | asm!("", in("al") foo, lateout("al") bar); = note: register class `reg_byte` supports these types: i8 error: type `i32` cannot be used with this register class - --> $DIR/bad-reg.rs:63:46 + --> $DIR/bad-reg.rs:61:46 | LL | asm!("", in("al") foo, lateout("al") bar); | ^^^ diff --git a/tests/ui/asm/x86_64/const.rs b/tests/ui/asm/x86_64/const.rs index 817a338a5b99f..eaaaf92e823b4 100644 --- a/tests/ui/asm/x86_64/const.rs +++ b/tests/ui/asm/x86_64/const.rs @@ -2,8 +2,6 @@ //@ run-pass //@ needs-asm-support -#![feature(asm_const)] - use std::arch::{asm, global_asm}; fn const_generic() -> usize { diff --git a/tests/ui/asm/x86_64/type-check-3.rs b/tests/ui/asm/x86_64/type-check-3.rs index bd242af3dbc07..bfb795d262455 100644 --- a/tests/ui/asm/x86_64/type-check-3.rs +++ b/tests/ui/asm/x86_64/type-check-3.rs @@ -1,8 +1,6 @@ //@ only-x86_64 //@ compile-flags: -C target-feature=+avx512f -#![feature(asm_const)] - use std::arch::{asm, global_asm}; use std::arch::x86_64::{_mm256_setzero_ps, _mm_setzero_ps}; diff --git a/tests/ui/asm/x86_64/type-check-3.stderr b/tests/ui/asm/x86_64/type-check-3.stderr index 202b97ca5c0e8..5a7b349413e45 100644 --- a/tests/ui/asm/x86_64/type-check-3.stderr +++ b/tests/ui/asm/x86_64/type-check-3.stderr @@ -1,5 +1,5 @@ error: type `i128` cannot be used with this register class - --> $DIR/type-check-3.rs:14:28 + --> $DIR/type-check-3.rs:12:28 | LL | asm!("{}", in(reg) 0i128); | ^^^^^ @@ -7,7 +7,7 @@ LL | asm!("{}", in(reg) 0i128); = note: register class `reg` supports these types: i16, i32, i64, f16, f32, f64 error: type `__m128` cannot be used with this register class - --> $DIR/type-check-3.rs:16:28 + --> $DIR/type-check-3.rs:14:28 | LL | asm!("{}", in(reg) _mm_setzero_ps()); | ^^^^^^^^^^^^^^^^ @@ -15,7 +15,7 @@ LL | asm!("{}", in(reg) _mm_setzero_ps()); = note: register class `reg` supports these types: i16, i32, i64, f16, f32, f64 error: type `__m256` cannot be used with this register class - --> $DIR/type-check-3.rs:18:28 + --> $DIR/type-check-3.rs:16:28 | LL | asm!("{}", in(reg) _mm256_setzero_ps()); | ^^^^^^^^^^^^^^^^^^^ @@ -23,7 +23,7 @@ LL | asm!("{}", in(reg) _mm256_setzero_ps()); = note: register class `reg` supports these types: i16, i32, i64, f16, f32, f64 error: type `u8` cannot be used with this register class - --> $DIR/type-check-3.rs:20:32 + --> $DIR/type-check-3.rs:18:32 | LL | asm!("{}", in(xmm_reg) 0u8); | ^^^ @@ -31,7 +31,7 @@ LL | asm!("{}", in(xmm_reg) 0u8); = note: register class `xmm_reg` supports these types: i32, i64, f16, f32, f64, f128, i8x16, i16x8, i32x4, i64x2, f16x8, f32x4, f64x2 error: `avx512bw` target feature is not enabled - --> $DIR/type-check-3.rs:29:29 + --> $DIR/type-check-3.rs:27:29 | LL | asm!("{}", in(kreg) 0u64); | ^^^^ @@ -39,7 +39,7 @@ LL | asm!("{}", in(kreg) 0u64); = note: this is required to use type `u64` with register class `kreg` warning: formatting may not be suitable for sub-register argument - --> $DIR/type-check-3.rs:34:15 + --> $DIR/type-check-3.rs:32:15 | LL | asm!("{0} {0}", in(reg) 0i16); | ^^^ ^^^ ---- for this argument @@ -49,7 +49,7 @@ LL | asm!("{0} {0}", in(reg) 0i16); = note: `#[warn(asm_sub_register)]` on by default warning: formatting may not be suitable for sub-register argument - --> $DIR/type-check-3.rs:36:15 + --> $DIR/type-check-3.rs:34:15 | LL | asm!("{0} {0:x}", in(reg) 0i16); | ^^^ ---- for this argument @@ -58,7 +58,7 @@ LL | asm!("{0} {0:x}", in(reg) 0i16); = help: or use `{0:r}` to keep the default formatting of `rax` (for 64-bit values) warning: formatting may not be suitable for sub-register argument - --> $DIR/type-check-3.rs:38:15 + --> $DIR/type-check-3.rs:36:15 | LL | asm!("{}", in(reg) 0i32); | ^^ ---- for this argument @@ -67,7 +67,7 @@ LL | asm!("{}", in(reg) 0i32); = help: or use `{0:r}` to keep the default formatting of `rax` (for 64-bit values) warning: formatting may not be suitable for sub-register argument - --> $DIR/type-check-3.rs:41:15 + --> $DIR/type-check-3.rs:39:15 | LL | asm!("{}", in(ymm_reg) 0i64); | ^^ ---- for this argument @@ -76,7 +76,7 @@ LL | asm!("{}", in(ymm_reg) 0i64); = help: or use `{0:y}` to keep the default formatting of `ymm0` (for 256-bit values) error: type `i8` cannot be used with this register class - --> $DIR/type-check-3.rs:52:28 + --> $DIR/type-check-3.rs:50:28 | LL | asm!("{}", in(reg) 0i8); | ^^^ @@ -85,7 +85,7 @@ LL | asm!("{}", in(reg) 0i8); = help: consider using the `reg_byte` register class instead error: incompatible types for asm inout argument - --> $DIR/type-check-3.rs:64:33 + --> $DIR/type-check-3.rs:62:33 | LL | asm!("{:r}", inout(reg) 0u32 => val_f32); | ^^^^ ^^^^^^^ type `f32` @@ -95,7 +95,7 @@ LL | asm!("{:r}", inout(reg) 0u32 => val_f32); = note: asm inout arguments must have the same type, unless they are both pointers or integers of the same size error: incompatible types for asm inout argument - --> $DIR/type-check-3.rs:66:33 + --> $DIR/type-check-3.rs:64:33 | LL | asm!("{:r}", inout(reg) 0u32 => val_ptr); | ^^^^ ^^^^^^^ type `*mut u8` @@ -105,7 +105,7 @@ LL | asm!("{:r}", inout(reg) 0u32 => val_ptr); = note: asm inout arguments must have the same type, unless they are both pointers or integers of the same size error: incompatible types for asm inout argument - --> $DIR/type-check-3.rs:68:33 + --> $DIR/type-check-3.rs:66:33 | LL | asm!("{:r}", inout(reg) main => val_u32); | ^^^^ ^^^^^^^ type `u32` diff --git a/tests/ui/asm/x86_64/type-check-4.rs b/tests/ui/asm/x86_64/type-check-4.rs index f7bf60d04dfdf..9503cd6d8ab2b 100644 --- a/tests/ui/asm/x86_64/type-check-4.rs +++ b/tests/ui/asm/x86_64/type-check-4.rs @@ -1,8 +1,6 @@ //@ only-x86_64 //@ compile-flags: -C target-feature=+avx512f -#![feature(asm_const)] - use std::arch::{asm, global_asm}; use std::arch::x86_64::{_mm256_setzero_ps, _mm_setzero_ps}; diff --git a/tests/ui/asm/x86_64/type-check-4.stderr b/tests/ui/asm/x86_64/type-check-4.stderr index cbdc051b3436b..f1bbc9e7d33de 100644 --- a/tests/ui/asm/x86_64/type-check-4.stderr +++ b/tests/ui/asm/x86_64/type-check-4.stderr @@ -1,5 +1,5 @@ error[E0658]: referencing statics in constants is unstable - --> $DIR/type-check-4.rs:21:25 + --> $DIR/type-check-4.rs:19:25 | LL | global_asm!("{}", const S); | ^ @@ -11,7 +11,7 @@ LL | global_asm!("{}", const S); = help: to fix this, the value can be extracted to a `const` and then used. error[E0658]: referencing statics in constants is unstable - --> $DIR/type-check-4.rs:24:35 + --> $DIR/type-check-4.rs:22:35 | LL | global_asm!("{}", const const_foo(S)); | ^ @@ -23,7 +23,7 @@ LL | global_asm!("{}", const const_foo(S)); = help: to fix this, the value can be extracted to a `const` and then used. error[E0658]: referencing statics in constants is unstable - --> $DIR/type-check-4.rs:27:35 + --> $DIR/type-check-4.rs:25:35 | LL | global_asm!("{}", const const_bar(S)); | ^ diff --git a/tests/ui/asm/x86_64/x86_64_parse_error.rs b/tests/ui/asm/x86_64/x86_64_parse_error.rs index 850033d4ce03b..3df0febf6b070 100644 --- a/tests/ui/asm/x86_64/x86_64_parse_error.rs +++ b/tests/ui/asm/x86_64/x86_64_parse_error.rs @@ -1,7 +1,5 @@ //@ only-x86_64 -#![feature(asm_const)] - use std::arch::asm; fn main() { diff --git a/tests/ui/asm/x86_64/x86_64_parse_error.stderr b/tests/ui/asm/x86_64/x86_64_parse_error.stderr index 9751f7b09d0eb..b64f6c1127eb6 100644 --- a/tests/ui/asm/x86_64/x86_64_parse_error.stderr +++ b/tests/ui/asm/x86_64/x86_64_parse_error.stderr @@ -1,11 +1,11 @@ error: explicit register arguments cannot have names - --> $DIR/x86_64_parse_error.rs:11:18 + --> $DIR/x86_64_parse_error.rs:9:18 | LL | asm!("", a = in("eax") foo); | ^^^^^^^^^^^^^^^^^ error: positional arguments cannot follow named arguments or explicit register arguments - --> $DIR/x86_64_parse_error.rs:17:36 + --> $DIR/x86_64_parse_error.rs:15:36 | LL | asm!("{1}", in("eax") foo, const bar); | ------------- ^^^^^^^^^ positional argument @@ -13,7 +13,7 @@ LL | asm!("{1}", in("eax") foo, const bar); | explicit register argument error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/x86_64_parse_error.rs:13:46 + --> $DIR/x86_64_parse_error.rs:11:46 | LL | asm!("{a}", in("eax") foo, a = const bar); | ^^^ non-constant value @@ -24,7 +24,7 @@ LL | const bar: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/x86_64_parse_error.rs:15:46 + --> $DIR/x86_64_parse_error.rs:13:46 | LL | asm!("{a}", in("eax") foo, a = const bar); | ^^^ non-constant value @@ -35,7 +35,7 @@ LL | const bar: /* Type */ = 0; | ~~~~~ ++++++++++++ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/x86_64_parse_error.rs:17:42 + --> $DIR/x86_64_parse_error.rs:15:42 | LL | asm!("{1}", in("eax") foo, const bar); | ^^^ non-constant value diff --git a/tests/ui/async-await/async-closures/move-out-of-ref.rs b/tests/ui/async-await/async-closures/move-out-of-ref.rs new file mode 100644 index 0000000000000..a05447232f62b --- /dev/null +++ b/tests/ui/async-await/async-closures/move-out-of-ref.rs @@ -0,0 +1,16 @@ +//@ compile-flags: -Zvalidate-mir +//@ edition: 2021 + +#![feature(async_closure)] + +// NOT copy. +struct Ty; + +fn hello(x: &Ty) { + let c = async || { + *x; + //~^ ERROR cannot move out of `*x` which is behind a shared reference + }; +} + +fn main() {} diff --git a/tests/ui/async-await/async-closures/move-out-of-ref.stderr b/tests/ui/async-await/async-closures/move-out-of-ref.stderr new file mode 100644 index 0000000000000..294905a481d65 --- /dev/null +++ b/tests/ui/async-await/async-closures/move-out-of-ref.stderr @@ -0,0 +1,18 @@ +error[E0507]: cannot move out of `*x` which is behind a shared reference + --> $DIR/move-out-of-ref.rs:11:9 + | +LL | *x; + | ^^ move occurs because `*x` has type `Ty`, which does not implement the `Copy` trait + | +note: if `Ty` implemented `Clone`, you could clone the value + --> $DIR/move-out-of-ref.rs:7:1 + | +LL | struct Ty; + | ^^^^^^^^^ consider implementing `Clone` for this type +... +LL | *x; + | -- you could clone this value + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0507`. diff --git a/tests/ui/async-await/async-closures/sig-from-bare-fn.rs b/tests/ui/async-await/async-closures/sig-from-bare-fn.rs new file mode 100644 index 0000000000000..a679471a3b3d0 --- /dev/null +++ b/tests/ui/async-await/async-closures/sig-from-bare-fn.rs @@ -0,0 +1,49 @@ +//@ check-pass +//@ edition: 2021 + +// Make sure that we infer the args of an async closure even if it's passed to +// a function that requires the async closure implement `Fn*` but does *not* have +// a `Future` bound on the return type. + +#![feature(async_closure)] + +use std::future::Future; + +trait TryStream { + type Ok; + type Err; +} + +trait TryFuture { + type Ok; + type Err; +} + +impl TryFuture for F where F: Future> { + type Ok = T; + type Err = E; +} + +trait TryStreamExt: TryStream { + fn try_for_each(&self, f: F) + where + F: FnMut(Self::Ok) -> Fut, + Fut: TryFuture; +} + +impl TryStreamExt for S where S: TryStream { + fn try_for_each(&self, f: F) + where + F: FnMut(Self::Ok) -> Fut, + Fut: TryFuture, + { } +} + +fn test(stream: impl TryStream) { + stream.try_for_each(async |s| { + s.trim(); // Make sure we know the type of `s` at this point. + Ok(()) + }); +} + +fn main() {} diff --git a/tests/ui/attributes/assoc-expr.rs b/tests/ui/attributes/assoc-expr.rs new file mode 100644 index 0000000000000..f39557d2ef017 --- /dev/null +++ b/tests/ui/attributes/assoc-expr.rs @@ -0,0 +1,42 @@ +//@ check-pass +// This test triggered an assertion failure in token collection due to +// mishandling of attributes on associative expressions. + +#![feature(cfg_eval)] +#![feature(rustc_attrs)] +#![feature(stmt_expr_attributes)] +#![allow(internal_features)] + +fn main() {} + +#[cfg_eval] +struct Foo1( + [ bool; { + let _x = 30; + #[cfg_attr(unix, rustc_dummy(aa))] 1 + } ] +); + +#[cfg_eval] +struct Foo12( + [ bool; { + let _x = 30; + #[cfg_attr(unix, rustc_dummy(bb))] 1 + 2 + } ] +); + +#[cfg_eval] +struct Foox( + [ bool; { + let _x = 30; + #[cfg_attr(unix, rustc_dummy(cc))] _x + } ] +); + +#[cfg_eval] +struct Foox2( + [ bool; { + let _x = 30; + #[cfg_attr(unix, rustc_dummy(dd))] _x + 2 + } ] +); diff --git a/tests/ui/attributes/linkage.rs b/tests/ui/attributes/linkage.rs new file mode 100644 index 0000000000000..0d5ce699fa808 --- /dev/null +++ b/tests/ui/attributes/linkage.rs @@ -0,0 +1,42 @@ +#![feature(linkage)] +#![feature(stmt_expr_attributes)] +#![deny(unused_attributes)] +#![allow(dead_code)] + +#[linkage = "weak"] //~ ERROR attribute should be applied to a function or static +type InvalidTy = (); + +#[linkage = "weak"] //~ ERROR attribute should be applied to a function or static +mod invalid_module {} + +#[linkage = "weak"] //~ ERROR attribute should be applied to a function or static +struct F; + +#[linkage = "weak"] //~ ERROR attribute should be applied to a function or static +impl F { + #[linkage = "weak"] + fn valid(&self) {} +} + +#[linkage = "weak"] +fn f() { + #[linkage = "weak"] + { + 1 + }; + //~^^^^ ERROR attribute should be applied to a function or static +} + +extern "C" { + #[linkage = "weak"] + static A: *const (); + + #[linkage = "weak"] + fn bar(); +} + +fn main() { + let _ = #[linkage = "weak"] + (|| 1); + //~^^ ERROR attribute should be applied to a function or static +} diff --git a/tests/ui/attributes/linkage.stderr b/tests/ui/attributes/linkage.stderr new file mode 100644 index 0000000000000..d5595529f400a --- /dev/null +++ b/tests/ui/attributes/linkage.stderr @@ -0,0 +1,55 @@ +error: attribute should be applied to a function or static + --> $DIR/linkage.rs:6:1 + | +LL | #[linkage = "weak"] + | ^^^^^^^^^^^^^^^^^^^ +LL | type InvalidTy = (); + | -------------------- not a function definition or static + +error: attribute should be applied to a function or static + --> $DIR/linkage.rs:9:1 + | +LL | #[linkage = "weak"] + | ^^^^^^^^^^^^^^^^^^^ +LL | mod invalid_module {} + | --------------------- not a function definition or static + +error: attribute should be applied to a function or static + --> $DIR/linkage.rs:12:1 + | +LL | #[linkage = "weak"] + | ^^^^^^^^^^^^^^^^^^^ +LL | struct F; + | --------- not a function definition or static + +error: attribute should be applied to a function or static + --> $DIR/linkage.rs:15:1 + | +LL | #[linkage = "weak"] + | ^^^^^^^^^^^^^^^^^^^ +LL | / impl F { +LL | | #[linkage = "weak"] +LL | | fn valid(&self) {} +LL | | } + | |_- not a function definition or static + +error: attribute should be applied to a function or static + --> $DIR/linkage.rs:23:5 + | +LL | #[linkage = "weak"] + | ^^^^^^^^^^^^^^^^^^^ +LL | / { +LL | | 1 +LL | | }; + | |_____- not a function definition or static + +error: attribute should be applied to a function or static + --> $DIR/linkage.rs:39:13 + | +LL | let _ = #[linkage = "weak"] + | ^^^^^^^^^^^^^^^^^^^ +LL | (|| 1); + | ------ not a function definition or static + +error: aborting due to 6 previous errors + diff --git a/tests/ui/attributes/unsafe/cfg-unsafe-attributes.rs b/tests/ui/attributes/unsafe/cfg-unsafe-attributes.rs index ce365d1a8b1c8..6a9853b2f6fc6 100644 --- a/tests/ui/attributes/unsafe/cfg-unsafe-attributes.rs +++ b/tests/ui/attributes/unsafe/cfg-unsafe-attributes.rs @@ -1,5 +1,4 @@ //@ build-pass -#![feature(unsafe_attributes)] #[cfg_attr(all(), unsafe(no_mangle))] fn a() {} diff --git a/tests/ui/attributes/unsafe/derive-unsafe-attributes.rs b/tests/ui/attributes/unsafe/derive-unsafe-attributes.rs index b8edb4aab907b..95fc19f506b24 100644 --- a/tests/ui/attributes/unsafe/derive-unsafe-attributes.rs +++ b/tests/ui/attributes/unsafe/derive-unsafe-attributes.rs @@ -1,5 +1,3 @@ -#![feature(unsafe_attributes)] - #[derive(unsafe(Debug))] //~^ ERROR: expected identifier, found keyword `unsafe` //~| ERROR: traits in `#[derive(...)]` don't accept arguments diff --git a/tests/ui/attributes/unsafe/derive-unsafe-attributes.stderr b/tests/ui/attributes/unsafe/derive-unsafe-attributes.stderr index c40a5512fd5cd..4002c930b63e5 100644 --- a/tests/ui/attributes/unsafe/derive-unsafe-attributes.stderr +++ b/tests/ui/attributes/unsafe/derive-unsafe-attributes.stderr @@ -1,5 +1,5 @@ error: expected identifier, found keyword `unsafe` - --> $DIR/derive-unsafe-attributes.rs:3:10 + --> $DIR/derive-unsafe-attributes.rs:1:10 | LL | #[derive(unsafe(Debug))] | ^^^^^^ expected identifier, found keyword @@ -10,13 +10,13 @@ LL | #[derive(r#unsafe(Debug))] | ++ error: traits in `#[derive(...)]` don't accept arguments - --> $DIR/derive-unsafe-attributes.rs:3:16 + --> $DIR/derive-unsafe-attributes.rs:1:16 | LL | #[derive(unsafe(Debug))] | ^^^^^^^ help: remove the arguments error: `derive` is not an unsafe attribute - --> $DIR/derive-unsafe-attributes.rs:12:3 + --> $DIR/derive-unsafe-attributes.rs:10:3 | LL | #[unsafe(derive(Debug))] | ^^^^^^ this is not an unsafe attribute @@ -24,7 +24,7 @@ LL | #[unsafe(derive(Debug))] = note: extraneous unsafe is not allowed in attributes error: expected identifier, found keyword `unsafe` - --> $DIR/derive-unsafe-attributes.rs:3:10 + --> $DIR/derive-unsafe-attributes.rs:1:10 | LL | #[derive(unsafe(Debug))] | ^^^^^^ expected identifier, found keyword @@ -36,7 +36,7 @@ LL | #[derive(r#unsafe(Debug))] | ++ error: expected identifier, found keyword `unsafe` - --> $DIR/derive-unsafe-attributes.rs:3:10 + --> $DIR/derive-unsafe-attributes.rs:1:10 | LL | #[derive(unsafe(Debug))] | ^^^^^^ expected identifier, found keyword @@ -48,13 +48,13 @@ LL | #[derive(r#unsafe(Debug))] | ++ error: cannot find derive macro `r#unsafe` in this scope - --> $DIR/derive-unsafe-attributes.rs:3:10 + --> $DIR/derive-unsafe-attributes.rs:1:10 | LL | #[derive(unsafe(Debug))] | ^^^^^^ error: cannot find derive macro `r#unsafe` in this scope - --> $DIR/derive-unsafe-attributes.rs:3:10 + --> $DIR/derive-unsafe-attributes.rs:1:10 | LL | #[derive(unsafe(Debug))] | ^^^^^^ diff --git a/tests/ui/attributes/unsafe/double-unsafe-attributes.rs b/tests/ui/attributes/unsafe/double-unsafe-attributes.rs index a6c0ea578f25a..894d1327da799 100644 --- a/tests/ui/attributes/unsafe/double-unsafe-attributes.rs +++ b/tests/ui/attributes/unsafe/double-unsafe-attributes.rs @@ -1,5 +1,3 @@ -#![feature(unsafe_attributes)] - #[unsafe(unsafe(no_mangle))] //~^ ERROR expected identifier, found keyword `unsafe` //~| ERROR cannot find attribute `r#unsafe` in this scope diff --git a/tests/ui/attributes/unsafe/double-unsafe-attributes.stderr b/tests/ui/attributes/unsafe/double-unsafe-attributes.stderr index 950b2636993c1..0825cf794083d 100644 --- a/tests/ui/attributes/unsafe/double-unsafe-attributes.stderr +++ b/tests/ui/attributes/unsafe/double-unsafe-attributes.stderr @@ -1,5 +1,5 @@ error: expected identifier, found keyword `unsafe` - --> $DIR/double-unsafe-attributes.rs:3:10 + --> $DIR/double-unsafe-attributes.rs:1:10 | LL | #[unsafe(unsafe(no_mangle))] | ^^^^^^ expected identifier, found keyword @@ -10,7 +10,7 @@ LL | #[unsafe(r#unsafe(no_mangle))] | ++ error: `r#unsafe` is not an unsafe attribute - --> $DIR/double-unsafe-attributes.rs:3:3 + --> $DIR/double-unsafe-attributes.rs:1:3 | LL | #[unsafe(unsafe(no_mangle))] | ^^^^^^ this is not an unsafe attribute @@ -18,7 +18,7 @@ LL | #[unsafe(unsafe(no_mangle))] = note: extraneous unsafe is not allowed in attributes error: cannot find attribute `r#unsafe` in this scope - --> $DIR/double-unsafe-attributes.rs:3:10 + --> $DIR/double-unsafe-attributes.rs:1:10 | LL | #[unsafe(unsafe(no_mangle))] | ^^^^^^ diff --git a/tests/ui/attributes/unsafe/extraneous-unsafe-attributes.rs b/tests/ui/attributes/unsafe/extraneous-unsafe-attributes.rs index 0181add843bcd..b561550c19843 100644 --- a/tests/ui/attributes/unsafe/extraneous-unsafe-attributes.rs +++ b/tests/ui/attributes/unsafe/extraneous-unsafe-attributes.rs @@ -1,6 +1,5 @@ //@ edition: 2024 //@ compile-flags: -Zunstable-options -#![feature(unsafe_attributes)] #[unsafe(cfg(any()))] //~ ERROR: is not an unsafe attribute fn a() {} diff --git a/tests/ui/attributes/unsafe/extraneous-unsafe-attributes.stderr b/tests/ui/attributes/unsafe/extraneous-unsafe-attributes.stderr index f39074b613d40..9fb7f062b912b 100644 --- a/tests/ui/attributes/unsafe/extraneous-unsafe-attributes.stderr +++ b/tests/ui/attributes/unsafe/extraneous-unsafe-attributes.stderr @@ -1,5 +1,5 @@ error: `cfg` is not an unsafe attribute - --> $DIR/extraneous-unsafe-attributes.rs:5:3 + --> $DIR/extraneous-unsafe-attributes.rs:4:3 | LL | #[unsafe(cfg(any()))] | ^^^^^^ this is not an unsafe attribute @@ -7,7 +7,7 @@ LL | #[unsafe(cfg(any()))] = note: extraneous unsafe is not allowed in attributes error: `cfg_attr` is not an unsafe attribute - --> $DIR/extraneous-unsafe-attributes.rs:8:3 + --> $DIR/extraneous-unsafe-attributes.rs:7:3 | LL | #[unsafe(cfg_attr(any(), allow(dead_code)))] | ^^^^^^ this is not an unsafe attribute @@ -15,7 +15,7 @@ LL | #[unsafe(cfg_attr(any(), allow(dead_code)))] = note: extraneous unsafe is not allowed in attributes error: `test` is not an unsafe attribute - --> $DIR/extraneous-unsafe-attributes.rs:11:3 + --> $DIR/extraneous-unsafe-attributes.rs:10:3 | LL | #[unsafe(test)] | ^^^^^^ this is not an unsafe attribute @@ -23,7 +23,7 @@ LL | #[unsafe(test)] = note: extraneous unsafe is not allowed in attributes error: `ignore` is not an unsafe attribute - --> $DIR/extraneous-unsafe-attributes.rs:14:3 + --> $DIR/extraneous-unsafe-attributes.rs:13:3 | LL | #[unsafe(ignore = "test")] | ^^^^^^ this is not an unsafe attribute @@ -31,7 +31,7 @@ LL | #[unsafe(ignore = "test")] = note: extraneous unsafe is not allowed in attributes error: `should_panic` is not an unsafe attribute - --> $DIR/extraneous-unsafe-attributes.rs:17:3 + --> $DIR/extraneous-unsafe-attributes.rs:16:3 | LL | #[unsafe(should_panic(expected = "test"))] | ^^^^^^ this is not an unsafe attribute @@ -39,7 +39,7 @@ LL | #[unsafe(should_panic(expected = "test"))] = note: extraneous unsafe is not allowed in attributes error: `macro_use` is not an unsafe attribute - --> $DIR/extraneous-unsafe-attributes.rs:20:3 + --> $DIR/extraneous-unsafe-attributes.rs:19:3 | LL | #[unsafe(macro_use)] | ^^^^^^ this is not an unsafe attribute @@ -47,7 +47,7 @@ LL | #[unsafe(macro_use)] = note: extraneous unsafe is not allowed in attributes error: `macro_export` is not an unsafe attribute - --> $DIR/extraneous-unsafe-attributes.rs:22:7 + --> $DIR/extraneous-unsafe-attributes.rs:21:7 | LL | #[unsafe(macro_export)] | ^^^^^^ this is not an unsafe attribute @@ -55,7 +55,7 @@ LL | #[unsafe(macro_export)] = note: extraneous unsafe is not allowed in attributes error: `used` is not an unsafe attribute - --> $DIR/extraneous-unsafe-attributes.rs:28:3 + --> $DIR/extraneous-unsafe-attributes.rs:27:3 | LL | #[unsafe(used)] | ^^^^^^ this is not an unsafe attribute diff --git a/tests/ui/attributes/unsafe/proc-unsafe-attributes.rs b/tests/ui/attributes/unsafe/proc-unsafe-attributes.rs index f29a5b3252b0a..eaf8706369a5a 100644 --- a/tests/ui/attributes/unsafe/proc-unsafe-attributes.rs +++ b/tests/ui/attributes/unsafe/proc-unsafe-attributes.rs @@ -1,5 +1,3 @@ -#![feature(unsafe_attributes)] - #[unsafe(proc_macro)] //~^ ERROR: is not an unsafe attribute //~| ERROR attribute is only usable with crates of the `proc-macro` crate type diff --git a/tests/ui/attributes/unsafe/proc-unsafe-attributes.stderr b/tests/ui/attributes/unsafe/proc-unsafe-attributes.stderr index 79d34d458bd6c..9c5751c82e4c4 100644 --- a/tests/ui/attributes/unsafe/proc-unsafe-attributes.stderr +++ b/tests/ui/attributes/unsafe/proc-unsafe-attributes.stderr @@ -1,11 +1,11 @@ error[E0452]: malformed lint attribute input - --> $DIR/proc-unsafe-attributes.rs:28:16 + --> $DIR/proc-unsafe-attributes.rs:26:16 | LL | #[unsafe(allow(unsafe(dead_code)))] | ^^^^^^^^^^^^^^^^^ bad attribute argument error[E0452]: malformed lint attribute input - --> $DIR/proc-unsafe-attributes.rs:28:16 + --> $DIR/proc-unsafe-attributes.rs:26:16 | LL | #[unsafe(allow(unsafe(dead_code)))] | ^^^^^^^^^^^^^^^^^ bad attribute argument @@ -13,7 +13,7 @@ LL | #[unsafe(allow(unsafe(dead_code)))] = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` error: `proc_macro` is not an unsafe attribute - --> $DIR/proc-unsafe-attributes.rs:3:3 + --> $DIR/proc-unsafe-attributes.rs:1:3 | LL | #[unsafe(proc_macro)] | ^^^^^^ this is not an unsafe attribute @@ -21,7 +21,7 @@ LL | #[unsafe(proc_macro)] = note: extraneous unsafe is not allowed in attributes error: `proc_macro_derive` is not an unsafe attribute - --> $DIR/proc-unsafe-attributes.rs:9:3 + --> $DIR/proc-unsafe-attributes.rs:7:3 | LL | #[unsafe(proc_macro_derive(Foo))] | ^^^^^^ this is not an unsafe attribute @@ -29,7 +29,7 @@ LL | #[unsafe(proc_macro_derive(Foo))] = note: extraneous unsafe is not allowed in attributes error: expected identifier, found keyword `unsafe` - --> $DIR/proc-unsafe-attributes.rs:14:21 + --> $DIR/proc-unsafe-attributes.rs:12:21 | LL | #[proc_macro_derive(unsafe(Foo))] | ^^^^^^ expected identifier, found keyword @@ -40,7 +40,7 @@ LL | #[proc_macro_derive(r#unsafe(Foo))] | ++ error: `proc_macro_attribute` is not an unsafe attribute - --> $DIR/proc-unsafe-attributes.rs:19:3 + --> $DIR/proc-unsafe-attributes.rs:17:3 | LL | #[unsafe(proc_macro_attribute)] | ^^^^^^ this is not an unsafe attribute @@ -48,7 +48,7 @@ LL | #[unsafe(proc_macro_attribute)] = note: extraneous unsafe is not allowed in attributes error: `allow` is not an unsafe attribute - --> $DIR/proc-unsafe-attributes.rs:24:3 + --> $DIR/proc-unsafe-attributes.rs:22:3 | LL | #[unsafe(allow(dead_code))] | ^^^^^^ this is not an unsafe attribute @@ -56,7 +56,7 @@ LL | #[unsafe(allow(dead_code))] = note: extraneous unsafe is not allowed in attributes error: `allow` is not an unsafe attribute - --> $DIR/proc-unsafe-attributes.rs:28:3 + --> $DIR/proc-unsafe-attributes.rs:26:3 | LL | #[unsafe(allow(unsafe(dead_code)))] | ^^^^^^ this is not an unsafe attribute @@ -64,7 +64,7 @@ LL | #[unsafe(allow(unsafe(dead_code)))] = note: extraneous unsafe is not allowed in attributes error: expected identifier, found keyword `unsafe` - --> $DIR/proc-unsafe-attributes.rs:28:16 + --> $DIR/proc-unsafe-attributes.rs:26:16 | LL | #[unsafe(allow(unsafe(dead_code)))] | ^^^^^^ expected identifier, found keyword @@ -75,31 +75,31 @@ LL | #[unsafe(allow(r#unsafe(dead_code)))] | ++ error: the `#[proc_macro]` attribute is only usable with crates of the `proc-macro` crate type - --> $DIR/proc-unsafe-attributes.rs:3:1 + --> $DIR/proc-unsafe-attributes.rs:1:1 | LL | #[unsafe(proc_macro)] | ^^^^^^^^^^^^^^^^^^^^^ error: the `#[proc_macro_derive]` attribute is only usable with crates of the `proc-macro` crate type - --> $DIR/proc-unsafe-attributes.rs:9:1 + --> $DIR/proc-unsafe-attributes.rs:7:1 | LL | #[unsafe(proc_macro_derive(Foo))] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the `#[proc_macro_derive]` attribute is only usable with crates of the `proc-macro` crate type - --> $DIR/proc-unsafe-attributes.rs:14:1 + --> $DIR/proc-unsafe-attributes.rs:12:1 | LL | #[proc_macro_derive(unsafe(Foo))] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the `#[proc_macro_attribute]` attribute is only usable with crates of the `proc-macro` crate type - --> $DIR/proc-unsafe-attributes.rs:19:1 + --> $DIR/proc-unsafe-attributes.rs:17:1 | LL | #[unsafe(proc_macro_attribute)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error[E0452]: malformed lint attribute input - --> $DIR/proc-unsafe-attributes.rs:28:16 + --> $DIR/proc-unsafe-attributes.rs:26:16 | LL | #[unsafe(allow(unsafe(dead_code)))] | ^^^^^^^^^^^^^^^^^ bad attribute argument @@ -107,7 +107,7 @@ LL | #[unsafe(allow(unsafe(dead_code)))] = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` error[E0452]: malformed lint attribute input - --> $DIR/proc-unsafe-attributes.rs:28:16 + --> $DIR/proc-unsafe-attributes.rs:26:16 | LL | #[unsafe(allow(unsafe(dead_code)))] | ^^^^^^^^^^^^^^^^^ bad attribute argument diff --git a/tests/ui/attributes/unsafe/unsafe-attributes.rs b/tests/ui/attributes/unsafe/unsafe-attributes.rs index 33a412add500c..5c57767b3b964 100644 --- a/tests/ui/attributes/unsafe/unsafe-attributes.rs +++ b/tests/ui/attributes/unsafe/unsafe-attributes.rs @@ -1,5 +1,4 @@ //@ build-pass -#![feature(unsafe_attributes)] #[unsafe(no_mangle)] fn a() {} diff --git a/tests/ui/attributes/unsafe/unsafe-safe-attribute.rs b/tests/ui/attributes/unsafe/unsafe-safe-attribute.rs index 67db36afd2e69..5af03a2b8d137 100644 --- a/tests/ui/attributes/unsafe/unsafe-safe-attribute.rs +++ b/tests/ui/attributes/unsafe/unsafe-safe-attribute.rs @@ -1,5 +1,3 @@ -#![feature(unsafe_attributes)] - #[unsafe(repr(C))] //~ ERROR: is not an unsafe attribute struct Foo {} diff --git a/tests/ui/attributes/unsafe/unsafe-safe-attribute.stderr b/tests/ui/attributes/unsafe/unsafe-safe-attribute.stderr index 584b0ea797d0a..55172c91aaeef 100644 --- a/tests/ui/attributes/unsafe/unsafe-safe-attribute.stderr +++ b/tests/ui/attributes/unsafe/unsafe-safe-attribute.stderr @@ -1,5 +1,5 @@ error: `repr` is not an unsafe attribute - --> $DIR/unsafe-safe-attribute.rs:3:3 + --> $DIR/unsafe-safe-attribute.rs:1:3 | LL | #[unsafe(repr(C))] | ^^^^^^ this is not an unsafe attribute diff --git a/tests/ui/attributes/unsafe/unsafe-safe-attribute_diagnostic.rs b/tests/ui/attributes/unsafe/unsafe-safe-attribute_diagnostic.rs index ff2eb61b40538..0f241cc439f34 100644 --- a/tests/ui/attributes/unsafe/unsafe-safe-attribute_diagnostic.rs +++ b/tests/ui/attributes/unsafe/unsafe-safe-attribute_diagnostic.rs @@ -1,5 +1,3 @@ -#![feature(unsafe_attributes)] - #[unsafe(diagnostic::on_unimplemented( //~ ERROR: is not an unsafe attribute message = "testing", ))] diff --git a/tests/ui/attributes/unsafe/unsafe-safe-attribute_diagnostic.stderr b/tests/ui/attributes/unsafe/unsafe-safe-attribute_diagnostic.stderr index 26b5e4e37b931..3bc291db5acf8 100644 --- a/tests/ui/attributes/unsafe/unsafe-safe-attribute_diagnostic.stderr +++ b/tests/ui/attributes/unsafe/unsafe-safe-attribute_diagnostic.stderr @@ -1,5 +1,5 @@ error: `diagnostic::on_unimplemented` is not an unsafe attribute - --> $DIR/unsafe-safe-attribute_diagnostic.rs:3:3 + --> $DIR/unsafe-safe-attribute_diagnostic.rs:1:3 | LL | #[unsafe(diagnostic::on_unimplemented( | ^^^^^^ this is not an unsafe attribute diff --git a/tests/ui/cast/ptr-to-trait-obj-different-args.stderr b/tests/ui/cast/ptr-to-trait-obj-different-args.stderr index b04289ae74748..8e60ca42f0a52 100644 --- a/tests/ui/cast/ptr-to-trait-obj-different-args.stderr +++ b/tests/ui/cast/ptr-to-trait-obj-different-args.stderr @@ -14,6 +14,7 @@ LL | let y: *const dyn Trait = x as _; | = note: expected trait object `dyn Trait` found trait object `dyn Trait` + = help: `dyn Trait` implements `Trait` so you could box the found value and coerce it to the trait object `Box`, you will have to change the expected type as well error[E0308]: mismatched types --> $DIR/ptr-to-trait-obj-different-args.rs:27:34 @@ -25,6 +26,7 @@ LL | let _: *const dyn Trait = x as _; | = note: expected trait object `dyn Trait` found trait object `dyn Trait` + = help: `dyn Trait` implements `Trait` so you could box the found value and coerce it to the trait object `Box`, you will have to change the expected type as well error[E0308]: mismatched types --> $DIR/ptr-to-trait-obj-different-args.rs:28:34 @@ -37,6 +39,7 @@ LL | let _: *const dyn Trait = t as _; | = note: expected trait object `dyn Trait` found trait object `dyn Trait` + = help: `dyn Trait` implements `Trait` so you could box the found value and coerce it to the trait object `Box`, you will have to change the expected type as well error[E0308]: mismatched types --> $DIR/ptr-to-trait-obj-different-args.rs:36:5 diff --git a/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.stderr b/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.current.stderr similarity index 91% rename from tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.stderr rename to tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.current.stderr index d1d598e603f18..5a5b4bfcacf43 100644 --- a/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.stderr +++ b/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.current.stderr @@ -1,5 +1,5 @@ error: lifetime may not live long enough - --> $DIR/ptr-to-trait-obj-different-regions-id-trait.rs:21:17 + --> $DIR/ptr-to-trait-obj-different-regions-id-trait.rs:24:17 | LL | fn m<'a>() { | -- lifetime `'a` defined here diff --git a/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.next.stderr b/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.next.stderr new file mode 100644 index 0000000000000..5a5b4bfcacf43 --- /dev/null +++ b/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.next.stderr @@ -0,0 +1,15 @@ +error: lifetime may not live long enough + --> $DIR/ptr-to-trait-obj-different-regions-id-trait.rs:24:17 + | +LL | fn m<'a>() { + | -- lifetime `'a` defined here +LL | let unsend: *const dyn Cat<'a> = &(); +LL | let _send = unsend as *const S>; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type annotation requires that `'a` must outlive `'static` + | + = note: requirement occurs because of the type `S>`, which makes the generic argument `dyn Cat<'_>` invariant + = note: the struct `S` is invariant over the parameter `T` + = help: see for more information about variance + +error: aborting due to 1 previous error + diff --git a/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.rs b/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.rs index cdd55e243927c..f968dca4fd310 100644 --- a/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.rs +++ b/tests/ui/cast/ptr-to-trait-obj-different-regions-id-trait.rs @@ -1,3 +1,6 @@ +//@ revisions: current next +//@ ignore-compare-mode-next-solver (explicit revisions) +//@[next] compile-flags: -Znext-solver //@ check-fail // // Make sure we can't trick the compiler by using a projection. diff --git a/tests/ui/coercion/coerce-expect-unsized-ascribed.stderr b/tests/ui/coercion/coerce-expect-unsized-ascribed.stderr index 646044ae41abc..0c220a13876b8 100644 --- a/tests/ui/coercion/coerce-expect-unsized-ascribed.stderr +++ b/tests/ui/coercion/coerce-expect-unsized-ascribed.stderr @@ -42,6 +42,7 @@ LL | let _ = type_ascribe!(Box::new( if true { false } else { true }), Box` found struct `Box` + = help: `bool` implements `Debug` so you could box the found value and coerce it to the trait object `Box`, you will have to change the expected type as well error[E0308]: mismatched types --> $DIR/coerce-expect-unsized-ascribed.rs:16:27 @@ -51,6 +52,7 @@ LL | let _ = type_ascribe!(Box::new( match true { true => 'a', false => 'b' | = note: expected struct `Box` found struct `Box` + = help: `char` implements `Debug` so you could box the found value and coerce it to the trait object `Box`, you will have to change the expected type as well error[E0308]: mismatched types --> $DIR/coerce-expect-unsized-ascribed.rs:18:27 @@ -96,6 +98,7 @@ LL | let _ = type_ascribe!(&if true { false } else { true }, &dyn Debug); | = note: expected reference `&dyn Debug` found reference `&bool` + = help: `bool` implements `Debug` so you could box the found value and coerce it to the trait object `Box`, you will have to change the expected type as well error[E0308]: mismatched types --> $DIR/coerce-expect-unsized-ascribed.rs:24:27 @@ -105,6 +108,7 @@ LL | let _ = type_ascribe!(&match true { true => 'a', false => 'b' }, &dyn D | = note: expected reference `&dyn Debug` found reference `&char` + = help: `char` implements `Debug` so you could box the found value and coerce it to the trait object `Box`, you will have to change the expected type as well error[E0308]: mismatched types --> $DIR/coerce-expect-unsized-ascribed.rs:26:27 diff --git a/tests/crashes/116308.rs b/tests/ui/const-generics/adt_const_params/116308.rs similarity index 81% rename from tests/crashes/116308.rs rename to tests/ui/const-generics/adt_const_params/116308.rs index cb96c80d79bdc..9ea7022e29c8a 100644 --- a/tests/crashes/116308.rs +++ b/tests/ui/const-generics/adt_const_params/116308.rs @@ -1,6 +1,8 @@ -//@ known-bug: #116308 +//@ check-pass #![feature(adt_const_params)] +// Regression test for #116308 + pub trait Identity { type Identity; } diff --git a/tests/ui/consts/const-eval/raw-bytes.32bit.stderr b/tests/ui/consts/const-eval/raw-bytes.32bit.stderr index 25f17f9c38a97..27c85cc8ce41a 100644 --- a/tests/ui/consts/const-eval/raw-bytes.32bit.stderr +++ b/tests/ui/consts/const-eval/raw-bytes.32bit.stderr @@ -436,17 +436,27 @@ LL | const TRAIT_OBJ_CONTENT_INVALID: &dyn Trait = unsafe { mem::transmute::<_, ╾ALLOC_ID╼ ╾ALLOC_ID╼ │ ╾──╼╾──╼ } -error[E0080]: evaluation of constant value failed - --> $DIR/raw-bytes.rs:196:62 +error[E0080]: it is undefined behavior to use this value + --> $DIR/raw-bytes.rs:196:1 | LL | const RAW_TRAIT_OBJ_VTABLE_NULL: *const dyn Trait = unsafe { mem::transmute((&92u8, 0usize)) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to some allocation, but got a null pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered null pointer, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: 8, align: 4) { + ╾ALLOC_ID╼ 00 00 00 00 │ ╾──╼.... + } -error[E0080]: evaluation of constant value failed - --> $DIR/raw-bytes.rs:199:65 +error[E0080]: it is undefined behavior to use this value + --> $DIR/raw-bytes.rs:199:1 | LL | const RAW_TRAIT_OBJ_VTABLE_INVALID: *const dyn Trait = unsafe { mem::transmute((&92u8, &3u64)) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC32 as vtable pointer but it does not point to a vtable + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC27, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: 8, align: 4) { + ╾ALLOC_ID╼ ╾ALLOC_ID╼ │ ╾──╼╾──╼ + } error[E0080]: it is undefined behavior to use this value --> $DIR/raw-bytes.rs:204:1 diff --git a/tests/ui/consts/const-eval/raw-bytes.64bit.stderr b/tests/ui/consts/const-eval/raw-bytes.64bit.stderr index 0fb9694895d8e..2b0ce99a88173 100644 --- a/tests/ui/consts/const-eval/raw-bytes.64bit.stderr +++ b/tests/ui/consts/const-eval/raw-bytes.64bit.stderr @@ -436,17 +436,27 @@ LL | const TRAIT_OBJ_CONTENT_INVALID: &dyn Trait = unsafe { mem::transmute::<_, ╾ALLOC_ID╼ ╾ALLOC_ID╼ │ ╾──────╼╾──────╼ } -error[E0080]: evaluation of constant value failed - --> $DIR/raw-bytes.rs:196:62 +error[E0080]: it is undefined behavior to use this value + --> $DIR/raw-bytes.rs:196:1 | LL | const RAW_TRAIT_OBJ_VTABLE_NULL: *const dyn Trait = unsafe { mem::transmute((&92u8, 0usize)) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to some allocation, but got a null pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered null pointer, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: 16, align: 8) { + ╾ALLOC_ID╼ 00 00 00 00 00 00 00 00 │ ╾──────╼........ + } -error[E0080]: evaluation of constant value failed - --> $DIR/raw-bytes.rs:199:65 +error[E0080]: it is undefined behavior to use this value + --> $DIR/raw-bytes.rs:199:1 | LL | const RAW_TRAIT_OBJ_VTABLE_INVALID: *const dyn Trait = unsafe { mem::transmute((&92u8, &3u64)) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC32 as vtable pointer but it does not point to a vtable + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC27, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: 16, align: 8) { + ╾ALLOC_ID╼ ╾ALLOC_ID╼ │ ╾──────╼╾──────╼ + } error[E0080]: it is undefined behavior to use this value --> $DIR/raw-bytes.rs:204:1 diff --git a/tests/ui/consts/const-eval/raw-bytes.rs b/tests/ui/consts/const-eval/raw-bytes.rs index de1a81b00243b..0df732df30e1d 100644 --- a/tests/ui/consts/const-eval/raw-bytes.rs +++ b/tests/ui/consts/const-eval/raw-bytes.rs @@ -194,10 +194,10 @@ const TRAIT_OBJ_CONTENT_INVALID: &dyn Trait = unsafe { mem::transmute::<_, &bool //~| expected a boolean const RAW_TRAIT_OBJ_VTABLE_NULL: *const dyn Trait = unsafe { mem::transmute((&92u8, 0usize)) }; -//~^ ERROR evaluation of constant value failed +//~^ ERROR it is undefined behavior to use this value //~| null pointer const RAW_TRAIT_OBJ_VTABLE_INVALID: *const dyn Trait = unsafe { mem::transmute((&92u8, &3u64)) }; -//~^ ERROR evaluation of constant value failed +//~^ ERROR it is undefined behavior to use this value //~| vtable // Uninhabited types diff --git a/tests/ui/consts/const-eval/ub-incorrect-vtable.32bit.stderr b/tests/ui/consts/const-eval/ub-incorrect-vtable.32bit.stderr index 439ccb24e6167..5c47cbfdf3b1e 100644 --- a/tests/ui/consts/const-eval/ub-incorrect-vtable.32bit.stderr +++ b/tests/ui/consts/const-eval/ub-incorrect-vtable.32bit.stderr @@ -1,46 +1,56 @@ -error[E0080]: evaluation of constant value failed - --> $DIR/ub-incorrect-vtable.rs:19:14 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-incorrect-vtable.rs:18:1 + | +LL | const INVALID_VTABLE_ALIGNMENT: &dyn Trait = + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC1, but expected a vtable pointer | -LL | unsafe { std::mem::transmute((&92u8, &[0usize, 1usize, 1000usize])) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC8 as vtable pointer but it does not point to a vtable + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: 8, align: 4) { + ╾ALLOC0╼ ╾ALLOC1╼ │ ╾──╼╾──╼ + } -error[E0080]: evaluation of constant value failed - --> $DIR/ub-incorrect-vtable.rs:24:14 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-incorrect-vtable.rs:23:1 + | +LL | const INVALID_VTABLE_SIZE: &dyn Trait = + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC3, but expected a vtable pointer | -LL | unsafe { std::mem::transmute((&92u8, &[1usize, usize::MAX, 1usize])) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC9 as vtable pointer but it does not point to a vtable + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: 8, align: 4) { + ╾ALLOC2╼ ╾ALLOC3╼ │ ╾──╼╾──╼ + } error[E0080]: it is undefined behavior to use this value --> $DIR/ub-incorrect-vtable.rs:33:1 | LL | const INVALID_VTABLE_ALIGNMENT_UB: W<&dyn Trait> = - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC1, but expected a vtable pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC5, but expected a vtable pointer | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 8, align: 4) { - ╾ALLOC0╼ ╾ALLOC1╼ │ ╾──╼╾──╼ + ╾ALLOC4╼ ╾ALLOC5╼ │ ╾──╼╾──╼ } error[E0080]: it is undefined behavior to use this value --> $DIR/ub-incorrect-vtable.rs:38:1 | LL | const INVALID_VTABLE_SIZE_UB: W<&dyn Trait> = - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC3, but expected a vtable pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC7, but expected a vtable pointer | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 8, align: 4) { - ╾ALLOC2╼ ╾ALLOC3╼ │ ╾──╼╾──╼ + ╾ALLOC6╼ ╾ALLOC7╼ │ ╾──╼╾──╼ } error[E0080]: it is undefined behavior to use this value --> $DIR/ub-incorrect-vtable.rs:44:1 | LL | const INVALID_VTABLE_UB: W<&dyn Trait> = - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC5, but expected a vtable pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC9, but expected a vtable pointer | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 8, align: 4) { - ╾ALLOC4╼ ╾ALLOC5╼ │ ╾──╼╾──╼ + ╾ALLOC8╼ ╾ALLOC9╼ │ ╾──╼╾──╼ } error[E0080]: it is undefined behavior to use this value @@ -51,7 +61,7 @@ LL | const G: Wide = unsafe { Transmute { t: FOO }.u }; | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 8, align: 4) { - ╾ALLOC6╼ ╾ALLOC7╼ │ ╾──╼╾──╼ + ╾ALLOC10╼ ╾ALLOC11╼ │ ╾──╼╾──╼ } error: aborting due to 6 previous errors diff --git a/tests/ui/consts/const-eval/ub-incorrect-vtable.64bit.stderr b/tests/ui/consts/const-eval/ub-incorrect-vtable.64bit.stderr index 89bf959703a04..f400073aca215 100644 --- a/tests/ui/consts/const-eval/ub-incorrect-vtable.64bit.stderr +++ b/tests/ui/consts/const-eval/ub-incorrect-vtable.64bit.stderr @@ -1,46 +1,56 @@ -error[E0080]: evaluation of constant value failed - --> $DIR/ub-incorrect-vtable.rs:19:14 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-incorrect-vtable.rs:18:1 + | +LL | const INVALID_VTABLE_ALIGNMENT: &dyn Trait = + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC1, but expected a vtable pointer | -LL | unsafe { std::mem::transmute((&92u8, &[0usize, 1usize, 1000usize])) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC8 as vtable pointer but it does not point to a vtable + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: 16, align: 8) { + ╾ALLOC0╼ ╾ALLOC1╼ │ ╾──────╼╾──────╼ + } -error[E0080]: evaluation of constant value failed - --> $DIR/ub-incorrect-vtable.rs:24:14 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-incorrect-vtable.rs:23:1 + | +LL | const INVALID_VTABLE_SIZE: &dyn Trait = + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC3, but expected a vtable pointer | -LL | unsafe { std::mem::transmute((&92u8, &[1usize, usize::MAX, 1usize])) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC9 as vtable pointer but it does not point to a vtable + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: 16, align: 8) { + ╾ALLOC2╼ ╾ALLOC3╼ │ ╾──────╼╾──────╼ + } error[E0080]: it is undefined behavior to use this value --> $DIR/ub-incorrect-vtable.rs:33:1 | LL | const INVALID_VTABLE_ALIGNMENT_UB: W<&dyn Trait> = - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC1, but expected a vtable pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC5, but expected a vtable pointer | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 16, align: 8) { - ╾ALLOC0╼ ╾ALLOC1╼ │ ╾──────╼╾──────╼ + ╾ALLOC4╼ ╾ALLOC5╼ │ ╾──────╼╾──────╼ } error[E0080]: it is undefined behavior to use this value --> $DIR/ub-incorrect-vtable.rs:38:1 | LL | const INVALID_VTABLE_SIZE_UB: W<&dyn Trait> = - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC3, but expected a vtable pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC7, but expected a vtable pointer | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 16, align: 8) { - ╾ALLOC2╼ ╾ALLOC3╼ │ ╾──────╼╾──────╼ + ╾ALLOC6╼ ╾ALLOC7╼ │ ╾──────╼╾──────╼ } error[E0080]: it is undefined behavior to use this value --> $DIR/ub-incorrect-vtable.rs:44:1 | LL | const INVALID_VTABLE_UB: W<&dyn Trait> = - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC5, but expected a vtable pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC9, but expected a vtable pointer | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 16, align: 8) { - ╾ALLOC4╼ ╾ALLOC5╼ │ ╾──────╼╾──────╼ + ╾ALLOC8╼ ╾ALLOC9╼ │ ╾──────╼╾──────╼ } error[E0080]: it is undefined behavior to use this value @@ -51,7 +61,7 @@ LL | const G: Wide = unsafe { Transmute { t: FOO }.u }; | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: 16, align: 8) { - ╾ALLOC6╼ ╾ALLOC7╼ │ ╾──────╼╾──────╼ + ╾ALLOC10╼ ╾ALLOC11╼ │ ╾──────╼╾──────╼ } error: aborting due to 6 previous errors diff --git a/tests/ui/consts/const-eval/ub-incorrect-vtable.rs b/tests/ui/consts/const-eval/ub-incorrect-vtable.rs index 4325495a3801b..8058f7693a71c 100644 --- a/tests/ui/consts/const-eval/ub-incorrect-vtable.rs +++ b/tests/ui/consts/const-eval/ub-incorrect-vtable.rs @@ -17,12 +17,12 @@ trait Trait {} const INVALID_VTABLE_ALIGNMENT: &dyn Trait = unsafe { std::mem::transmute((&92u8, &[0usize, 1usize, 1000usize])) }; -//~^ ERROR evaluation of constant value failed +//~^^ ERROR it is undefined behavior to use this value //~| vtable const INVALID_VTABLE_SIZE: &dyn Trait = unsafe { std::mem::transmute((&92u8, &[1usize, usize::MAX, 1usize])) }; -//~^ ERROR evaluation of constant value failed +//~^^ ERROR it is undefined behavior to use this value //~| vtable #[repr(transparent)] diff --git a/tests/ui/consts/const-eval/ub-wide-ptr.rs b/tests/ui/consts/const-eval/ub-wide-ptr.rs index 3956146f6aef2..991d4424dcf99 100644 --- a/tests/ui/consts/const-eval/ub-wide-ptr.rs +++ b/tests/ui/consts/const-eval/ub-wide-ptr.rs @@ -123,13 +123,13 @@ const TRAIT_OBJ_INT_VTABLE: W<&dyn Trait> = unsafe { mem::transmute(W((&92u8, 4u //~^ ERROR it is undefined behavior to use this value //~| vtable const TRAIT_OBJ_UNALIGNED_VTABLE: &dyn Trait = unsafe { mem::transmute((&92u8, &[0u8; 128])) }; -//~^ ERROR evaluation of constant value failed +//~^ ERROR it is undefined behavior to use this value //~| vtable const TRAIT_OBJ_BAD_DROP_FN_NULL: &dyn Trait = unsafe { mem::transmute((&92u8, &[0usize; 8])) }; -//~^ ERROR evaluation of constant value failed +//~^ ERROR it is undefined behavior to use this value //~| vtable const TRAIT_OBJ_BAD_DROP_FN_INT: &dyn Trait = unsafe { mem::transmute((&92u8, &[1usize; 8])) }; -//~^ ERROR evaluation of constant value failed +//~^ ERROR it is undefined behavior to use this value //~| vtable const TRAIT_OBJ_BAD_DROP_FN_NOT_FN_PTR: W<&dyn Trait> = unsafe { mem::transmute(W((&92u8, &[&42u8; 8]))) }; //~^ ERROR it is undefined behavior to use this value @@ -142,10 +142,10 @@ const TRAIT_OBJ_CONTENT_INVALID: &dyn Trait = unsafe { mem::transmute::<_, &bool // # raw trait object const RAW_TRAIT_OBJ_VTABLE_NULL: *const dyn Trait = unsafe { mem::transmute((&92u8, 0usize)) }; -//~^ ERROR evaluation of constant value failed +//~^ ERROR it is undefined behavior to use this value //~| null pointer const RAW_TRAIT_OBJ_VTABLE_INVALID: *const dyn Trait = unsafe { mem::transmute((&92u8, &3u64)) }; -//~^ ERROR evaluation of constant value failed +//~^ ERROR it is undefined behavior to use this value //~| vtable const RAW_TRAIT_OBJ_CONTENT_INVALID: *const dyn Trait = unsafe { mem::transmute::<_, &bool>(&3u8) } as *const dyn Trait; // ok because raw // Officially blessed way to get the vtable @@ -154,12 +154,12 @@ const DYN_METADATA: ptr::DynMetadata = ptr::metadata::(ptr:: static mut RAW_TRAIT_OBJ_VTABLE_NULL_THROUGH_REF: *const dyn Trait = unsafe { mem::transmute::<_, &dyn Trait>((&92u8, 0usize)) - //~^ ERROR could not evaluate static initializer + //~^^ ERROR it is undefined behavior to use this value //~| null pointer }; static mut RAW_TRAIT_OBJ_VTABLE_INVALID_THROUGH_REF: *const dyn Trait = unsafe { mem::transmute::<_, &dyn Trait>((&92u8, &3u64)) - //~^ ERROR could not evaluate static initializer + //~^^ ERROR it is undefined behavior to use this value //~| vtable }; diff --git a/tests/ui/consts/const-eval/ub-wide-ptr.stderr b/tests/ui/consts/const-eval/ub-wide-ptr.stderr index c29cc836fffdb..92f0029a5b3eb 100644 --- a/tests/ui/consts/const-eval/ub-wide-ptr.stderr +++ b/tests/ui/consts/const-eval/ub-wide-ptr.stderr @@ -218,29 +218,44 @@ LL | const TRAIT_OBJ_INT_VTABLE: W<&dyn Trait> = unsafe { mem::transmute(W((&92u HEX_DUMP } -error[E0080]: evaluation of constant value failed - --> $DIR/ub-wide-ptr.rs:125:57 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-wide-ptr.rs:125:1 | LL | const TRAIT_OBJ_UNALIGNED_VTABLE: &dyn Trait = unsafe { mem::transmute((&92u8, &[0u8; 128])) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC20 as vtable pointer but it does not point to a vtable + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC17, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: $SIZE, align: $ALIGN) { + HEX_DUMP + } -error[E0080]: evaluation of constant value failed - --> $DIR/ub-wide-ptr.rs:128:57 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-wide-ptr.rs:128:1 | LL | const TRAIT_OBJ_BAD_DROP_FN_NULL: &dyn Trait = unsafe { mem::transmute((&92u8, &[0usize; 8])) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC21 as vtable pointer but it does not point to a vtable + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC19, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: $SIZE, align: $ALIGN) { + HEX_DUMP + } -error[E0080]: evaluation of constant value failed - --> $DIR/ub-wide-ptr.rs:131:56 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-wide-ptr.rs:131:1 | LL | const TRAIT_OBJ_BAD_DROP_FN_INT: &dyn Trait = unsafe { mem::transmute((&92u8, &[1usize; 8])) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC22 as vtable pointer but it does not point to a vtable + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC21, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: $SIZE, align: $ALIGN) { + HEX_DUMP + } error[E0080]: it is undefined behavior to use this value --> $DIR/ub-wide-ptr.rs:134:1 | LL | const TRAIT_OBJ_BAD_DROP_FN_NOT_FN_PTR: W<&dyn Trait> = unsafe { mem::transmute(W((&92u8, &[&42u8; 8]))) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC17, but expected a vtable pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value at .0: encountered ALLOC23, but expected a vtable pointer | = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: the raw bytes of the constant (size: $SIZE, align: $ALIGN) { @@ -258,29 +273,49 @@ LL | const TRAIT_OBJ_CONTENT_INVALID: &dyn Trait = unsafe { mem::transmute::<_, HEX_DUMP } -error[E0080]: evaluation of constant value failed - --> $DIR/ub-wide-ptr.rs:144:62 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-wide-ptr.rs:144:1 | LL | const RAW_TRAIT_OBJ_VTABLE_NULL: *const dyn Trait = unsafe { mem::transmute((&92u8, 0usize)) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to some allocation, but got a null pointer + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered null pointer, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: $SIZE, align: $ALIGN) { + HEX_DUMP + } -error[E0080]: evaluation of constant value failed - --> $DIR/ub-wide-ptr.rs:147:65 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-wide-ptr.rs:147:1 | LL | const RAW_TRAIT_OBJ_VTABLE_INVALID: *const dyn Trait = unsafe { mem::transmute((&92u8, &3u64)) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC23 as vtable pointer but it does not point to a vtable + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC28, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: $SIZE, align: $ALIGN) { + HEX_DUMP + } -error[E0080]: could not evaluate static initializer - --> $DIR/ub-wide-ptr.rs:156:5 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-wide-ptr.rs:155:1 | -LL | mem::transmute::<_, &dyn Trait>((&92u8, 0usize)) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds pointer use: expected a pointer to some allocation, but got a null pointer +LL | static mut RAW_TRAIT_OBJ_VTABLE_NULL_THROUGH_REF: *const dyn Trait = unsafe { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered null pointer, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: $SIZE, align: $ALIGN) { + HEX_DUMP + } -error[E0080]: could not evaluate static initializer - --> $DIR/ub-wide-ptr.rs:161:5 +error[E0080]: it is undefined behavior to use this value + --> $DIR/ub-wide-ptr.rs:160:1 | -LL | mem::transmute::<_, &dyn Trait>((&92u8, &3u64)) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ using ALLOC24 as vtable pointer but it does not point to a vtable +LL | static mut RAW_TRAIT_OBJ_VTABLE_INVALID_THROUGH_REF: *const dyn Trait = unsafe { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ constructing invalid value: encountered ALLOC31, but expected a vtable pointer + | + = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. + = note: the raw bytes of the constant (size: $SIZE, align: $ALIGN) { + HEX_DUMP + } error: aborting due to 29 previous errors diff --git a/tests/ui/consts/const-float-bits-conv.rs b/tests/ui/consts/const-float-bits-conv.rs index ba8db4c23dc9b..45e8ea570ed73 100644 --- a/tests/ui/consts/const-float-bits-conv.rs +++ b/tests/ui/consts/const-float-bits-conv.rs @@ -23,6 +23,11 @@ macro_rules! const_assert { }; } +fn has_broken_floats() -> bool { + // i586 targets are broken due to . + std::env::var("TARGET").is_ok_and(|v| v.contains("i586")) +} + fn f32() { const_assert!((1f32).to_bits(), 0x3f800000); const_assert!(u32::from_be_bytes(1f32.to_be_bytes()), 0x3f800000); @@ -38,6 +43,19 @@ fn f32() { const_assert!(f32::from_bits(0x44a72000), 1337.0); const_assert!(f32::from_ne_bytes(0x44a72000u32.to_ne_bytes()), 1337.0); const_assert!(f32::from_bits(0xc1640000), -14.25); + + // Check that NaNs roundtrip their bits regardless of signalingness + // 0xA is 0b1010; 0x5 is 0b0101 -- so these two together clobbers all the mantissa bits + // NOTE: These names assume `f{BITS}::NAN` is a quiet NAN and IEEE754-2008's NaN rules apply! + const QUIET_NAN: u32 = f32::NAN.to_bits() ^ 0x002A_AAAA; + const SIGNALING_NAN: u32 = f32::NAN.to_bits() ^ 0x0055_5555; + + const_assert!(f32::from_bits(QUIET_NAN).is_nan()); + const_assert!(f32::from_bits(SIGNALING_NAN).is_nan()); + const_assert!(f32::from_bits(QUIET_NAN).to_bits(), QUIET_NAN); + if !has_broken_floats() { + const_assert!(f32::from_bits(SIGNALING_NAN).to_bits(), SIGNALING_NAN); + } } fn f64() { @@ -55,6 +73,19 @@ fn f64() { const_assert!(f64::from_bits(0x4094e40000000000), 1337.0); const_assert!(f64::from_ne_bytes(0x4094e40000000000u64.to_ne_bytes()), 1337.0); const_assert!(f64::from_bits(0xc02c800000000000), -14.25); + + // Check that NaNs roundtrip their bits regardless of signalingness + // 0xA is 0b1010; 0x5 is 0b0101 -- so these two together clobbers all the mantissa bits + // NOTE: These names assume `f{BITS}::NAN` is a quiet NAN and IEEE754-2008's NaN rules apply! + const QUIET_NAN: u64 = f64::NAN.to_bits() ^ 0x0005_5555_5555_5555; + const SIGNALING_NAN: u64 = f64::NAN.to_bits() ^ 0x000A_AAAA_AAAA_AAAA; + + const_assert!(f64::from_bits(QUIET_NAN).is_nan()); + const_assert!(f64::from_bits(SIGNALING_NAN).is_nan()); + const_assert!(f64::from_bits(QUIET_NAN).to_bits(), QUIET_NAN); + if !has_broken_floats() { + const_assert!(f64::from_bits(SIGNALING_NAN).to_bits(), SIGNALING_NAN); + } } fn main() { diff --git a/tests/ui/consts/const-float-bits-reject-conv.rs b/tests/ui/consts/const-float-bits-reject-conv.rs deleted file mode 100644 index febb272869a37..0000000000000 --- a/tests/ui/consts/const-float-bits-reject-conv.rs +++ /dev/null @@ -1,68 +0,0 @@ -//@ compile-flags: -Zmir-opt-level=0 -//@ error-pattern: cannot use f32::to_bits on a NaN -#![feature(const_float_bits_conv)] -#![feature(const_float_classify)] - -// Don't promote -const fn nop(x: T) -> T { x } - -macro_rules! const_assert { - ($a:expr) => { - { - const _: () = assert!($a); - assert!(nop($a)); - } - }; - ($a:expr, $b:expr) => { - { - const _: () = assert!($a == $b); - assert_eq!(nop($a), nop($b)); - } - }; -} - -fn f32() { - // Check that NaNs roundtrip their bits regardless of signalingness - // 0xA is 0b1010; 0x5 is 0b0101 -- so these two together clobbers all the mantissa bits - // ...actually, let's just check that these break. :D - const MASKED_NAN1: u32 = f32::NAN.to_bits() ^ 0x002A_AAAA; - //~^ inside - const MASKED_NAN2: u32 = f32::NAN.to_bits() ^ 0x0055_5555; - //~^ inside - - // The rest of the code is dead because the constants already fail to evaluate. - - const_assert!(f32::from_bits(MASKED_NAN1).is_nan()); - const_assert!(f32::from_bits(MASKED_NAN1).is_nan()); - - // LLVM does not guarantee that loads and stores of NaNs preserve their exact bit pattern. - // In practice, this seems to only cause a problem on x86, since the most widely used calling - // convention mandates that floating point values are returned on the x87 FPU stack. See #73328. - // However, during CTFE we still preserve bit patterns (though that is not a guarantee). - const_assert!(f32::from_bits(MASKED_NAN1).to_bits(), MASKED_NAN1); - const_assert!(f32::from_bits(MASKED_NAN2).to_bits(), MASKED_NAN2); -} - -fn f64() { - // Check that NaNs roundtrip their bits regardless of signalingness - // 0xA is 0b1010; 0x5 is 0b0101 -- so these two together clobbers all the mantissa bits - // ...actually, let's just check that these break. :D - const MASKED_NAN1: u64 = f64::NAN.to_bits() ^ 0x000A_AAAA_AAAA_AAAA; - //~^ inside - const MASKED_NAN2: u64 = f64::NAN.to_bits() ^ 0x0005_5555_5555_5555; - //~^ inside - - // The rest of the code is dead because the constants already fail to evaluate. - - const_assert!(f64::from_bits(MASKED_NAN1).is_nan()); - const_assert!(f64::from_bits(MASKED_NAN1).is_nan()); - - // See comment above. - const_assert!(f64::from_bits(MASKED_NAN1).to_bits(), MASKED_NAN1); - const_assert!(f64::from_bits(MASKED_NAN2).to_bits(), MASKED_NAN2); -} - -fn main() { - f32(); - f64(); -} diff --git a/tests/ui/consts/const-float-bits-reject-conv.stderr b/tests/ui/consts/const-float-bits-reject-conv.stderr deleted file mode 100644 index 1511dab12b0e3..0000000000000 --- a/tests/ui/consts/const-float-bits-reject-conv.stderr +++ /dev/null @@ -1,115 +0,0 @@ -error[E0080]: evaluation of constant value failed - --> $SRC_DIR/core/src/num/f32.rs:LL:COL - | - = note: the evaluated program panicked at 'const-eval error: cannot use f32::to_bits on a NaN', $SRC_DIR/core/src/num/f32.rs:LL:COL - | -note: inside `core::f32::::to_bits::ct_f32_to_u32` - --> $SRC_DIR/core/src/num/f32.rs:LL:COL -note: inside `core::f32::::to_bits` - --> $SRC_DIR/core/src/num/f32.rs:LL:COL -note: inside `f32::MASKED_NAN1` - --> $DIR/const-float-bits-reject-conv.rs:28:30 - | -LL | const MASKED_NAN1: u32 = f32::NAN.to_bits() ^ 0x002A_AAAA; - | ^^^^^^^^^^^^^^^^^^ - = note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info) - -error[E0080]: evaluation of constant value failed - --> $SRC_DIR/core/src/num/f32.rs:LL:COL - | - = note: the evaluated program panicked at 'const-eval error: cannot use f32::to_bits on a NaN', $SRC_DIR/core/src/num/f32.rs:LL:COL - | -note: inside `core::f32::::to_bits::ct_f32_to_u32` - --> $SRC_DIR/core/src/num/f32.rs:LL:COL -note: inside `core::f32::::to_bits` - --> $SRC_DIR/core/src/num/f32.rs:LL:COL -note: inside `f32::MASKED_NAN2` - --> $DIR/const-float-bits-reject-conv.rs:30:30 - | -LL | const MASKED_NAN2: u32 = f32::NAN.to_bits() ^ 0x0055_5555; - | ^^^^^^^^^^^^^^^^^^ - = note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info) - -note: erroneous constant encountered - --> $DIR/const-float-bits-reject-conv.rs:35:34 - | -LL | const_assert!(f32::from_bits(MASKED_NAN1).is_nan()); - | ^^^^^^^^^^^ - -note: erroneous constant encountered - --> $DIR/const-float-bits-reject-conv.rs:36:34 - | -LL | const_assert!(f32::from_bits(MASKED_NAN1).is_nan()); - | ^^^^^^^^^^^ - -note: erroneous constant encountered - --> $DIR/const-float-bits-reject-conv.rs:42:34 - | -LL | const_assert!(f32::from_bits(MASKED_NAN1).to_bits(), MASKED_NAN1); - | ^^^^^^^^^^^ - -note: erroneous constant encountered - --> $DIR/const-float-bits-reject-conv.rs:43:34 - | -LL | const_assert!(f32::from_bits(MASKED_NAN2).to_bits(), MASKED_NAN2); - | ^^^^^^^^^^^ - -error[E0080]: evaluation of constant value failed - --> $SRC_DIR/core/src/num/f64.rs:LL:COL - | - = note: the evaluated program panicked at 'const-eval error: cannot use f64::to_bits on a NaN', $SRC_DIR/core/src/num/f64.rs:LL:COL - | -note: inside `core::f64::::to_bits::ct_f64_to_u64` - --> $SRC_DIR/core/src/num/f64.rs:LL:COL -note: inside `core::f64::::to_bits` - --> $SRC_DIR/core/src/num/f64.rs:LL:COL -note: inside `f64::MASKED_NAN1` - --> $DIR/const-float-bits-reject-conv.rs:50:30 - | -LL | const MASKED_NAN1: u64 = f64::NAN.to_bits() ^ 0x000A_AAAA_AAAA_AAAA; - | ^^^^^^^^^^^^^^^^^^ - = note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info) - -error[E0080]: evaluation of constant value failed - --> $SRC_DIR/core/src/num/f64.rs:LL:COL - | - = note: the evaluated program panicked at 'const-eval error: cannot use f64::to_bits on a NaN', $SRC_DIR/core/src/num/f64.rs:LL:COL - | -note: inside `core::f64::::to_bits::ct_f64_to_u64` - --> $SRC_DIR/core/src/num/f64.rs:LL:COL -note: inside `core::f64::::to_bits` - --> $SRC_DIR/core/src/num/f64.rs:LL:COL -note: inside `f64::MASKED_NAN2` - --> $DIR/const-float-bits-reject-conv.rs:52:30 - | -LL | const MASKED_NAN2: u64 = f64::NAN.to_bits() ^ 0x0005_5555_5555_5555; - | ^^^^^^^^^^^^^^^^^^ - = note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info) - -note: erroneous constant encountered - --> $DIR/const-float-bits-reject-conv.rs:57:34 - | -LL | const_assert!(f64::from_bits(MASKED_NAN1).is_nan()); - | ^^^^^^^^^^^ - -note: erroneous constant encountered - --> $DIR/const-float-bits-reject-conv.rs:58:34 - | -LL | const_assert!(f64::from_bits(MASKED_NAN1).is_nan()); - | ^^^^^^^^^^^ - -note: erroneous constant encountered - --> $DIR/const-float-bits-reject-conv.rs:61:34 - | -LL | const_assert!(f64::from_bits(MASKED_NAN1).to_bits(), MASKED_NAN1); - | ^^^^^^^^^^^ - -note: erroneous constant encountered - --> $DIR/const-float-bits-reject-conv.rs:62:34 - | -LL | const_assert!(f64::from_bits(MASKED_NAN2).to_bits(), MASKED_NAN2); - | ^^^^^^^^^^^ - -error: aborting due to 4 previous errors - -For more information about this error, try `rustc --explain E0080`. diff --git a/tests/ui/coroutine/gen_block.none.stderr b/tests/ui/coroutine/gen_block.none.stderr index 64fa2be003de5..15123a49e4855 100644 --- a/tests/ui/coroutine/gen_block.none.stderr +++ b/tests/ui/coroutine/gen_block.none.stderr @@ -73,7 +73,6 @@ LL | let _ = || yield true; = note: see issue #43122 for more information = help: add `#![feature(coroutines)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` error: `yield` can only be used in `#[coroutine]` closures, or `gen` blocks --> $DIR/gen_block.rs:16:16 @@ -95,7 +94,6 @@ LL | let _ = #[coroutine] || yield true; = note: see issue #43122 for more information = help: add `#![feature(coroutines)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` error: aborting due to 11 previous errors diff --git a/tests/ui/coroutine/issue-58888.rs b/tests/ui/coroutine/issue-58888.rs index 6266f97ce8c46..e4fada0cd432e 100644 --- a/tests/ui/coroutine/issue-58888.rs +++ b/tests/ui/coroutine/issue-58888.rs @@ -13,7 +13,8 @@ impl Database { } fn check_connection(&self) -> impl Coroutine + '_ { - #[coroutine] move || { + #[coroutine] + move || { let iter = self.get_connection(); for i in iter { yield i @@ -23,5 +24,5 @@ impl Database { } fn main() { - Database.check_connection(); + let _ = Database.check_connection(); } diff --git a/tests/ui/delegation/correct_body_owner_parent_found_in_diagnostics.rs b/tests/ui/delegation/correct_body_owner_parent_found_in_diagnostics.rs new file mode 100644 index 0000000000000..0a7ec5ab5c1be --- /dev/null +++ b/tests/ui/delegation/correct_body_owner_parent_found_in_diagnostics.rs @@ -0,0 +1,34 @@ +#![feature(fn_delegation)] +#![allow(incomplete_features)] + +use std::marker::PhantomData; + +pub struct InvariantRef<'a, T: ?Sized>(&'a T, PhantomData<&'a mut &'a T>); + +impl<'a> InvariantRef<'a, ()> { + pub const NEW: Self = InvariantRef::new(&()); + //~^ ERROR: no function or associated item named `new` found +} + +trait Trait { + fn foo(&self) -> u8 { 0 } + fn bar(&self) -> u8 { 1 } + fn meh(&self) -> u8 { 2 } +} + +struct Z(u8); + +impl Trait for Z { + reuse ::{foo, bar, meh} { &const { InvariantRef::<'a>::NEW } } + //~^ ERROR: use of undeclared lifetime name `'a` + //~| ERROR: use of undeclared lifetime name `'a` + //~| ERROR: use of undeclared lifetime name `'a` + //~| ERROR: the trait bound `u8: Trait` is not satisfied + //~| ERROR: the trait bound `u8: Trait` is not satisfied + //~| ERROR: the trait bound `u8: Trait` is not satisfied + //~| ERROR: mismatched types + //~| ERROR: mismatched types + //~| ERROR: mismatched types +} + +fn main() { } diff --git a/tests/ui/delegation/correct_body_owner_parent_found_in_diagnostics.stderr b/tests/ui/delegation/correct_body_owner_parent_found_in_diagnostics.stderr new file mode 100644 index 0000000000000..2ce3b388073c4 --- /dev/null +++ b/tests/ui/delegation/correct_body_owner_parent_found_in_diagnostics.stderr @@ -0,0 +1,113 @@ +error[E0261]: use of undeclared lifetime name `'a` + --> $DIR/correct_body_owner_parent_found_in_diagnostics.rs:22:68 + | +LL | reuse ::{foo, bar, meh} { &const { InvariantRef::<'a>::NEW } } + | ^^ undeclared lifetime + | +help: consider introducing lifetime `'a` here + | +LL | reuse ::{foo'a, , bar, meh} { &const { InvariantRef::<'a>::NEW } } + | +++ +help: consider introducing lifetime `'a` here + | +LL | impl<'a> Trait for Z { + | ++++ + +error[E0261]: use of undeclared lifetime name `'a` + --> $DIR/correct_body_owner_parent_found_in_diagnostics.rs:22:68 + | +LL | reuse ::{foo, bar, meh} { &const { InvariantRef::<'a>::NEW } } + | ^^ undeclared lifetime + | +help: consider introducing lifetime `'a` here + | +LL | reuse ::{foo, bar'a, , meh} { &const { InvariantRef::<'a>::NEW } } + | +++ +help: consider introducing lifetime `'a` here + | +LL | impl<'a> Trait for Z { + | ++++ + +error[E0261]: use of undeclared lifetime name `'a` + --> $DIR/correct_body_owner_parent_found_in_diagnostics.rs:22:68 + | +LL | reuse ::{foo, bar, meh} { &const { InvariantRef::<'a>::NEW } } + | ^^ undeclared lifetime + | +help: consider introducing lifetime `'a` here + | +LL | reuse ::{foo, bar, meh'a, } { &const { InvariantRef::<'a>::NEW } } + | +++ +help: consider introducing lifetime `'a` here + | +LL | impl<'a> Trait for Z { + | ++++ + +error[E0599]: no function or associated item named `new` found for struct `InvariantRef` in the current scope + --> $DIR/correct_body_owner_parent_found_in_diagnostics.rs:9:41 + | +LL | pub struct InvariantRef<'a, T: ?Sized>(&'a T, PhantomData<&'a mut &'a T>); + | -------------------------------------- function or associated item `new` not found for this struct +... +LL | pub const NEW: Self = InvariantRef::new(&()); + | ^^^ function or associated item not found in `InvariantRef<'_, _>` + +error[E0308]: mismatched types + --> $DIR/correct_body_owner_parent_found_in_diagnostics.rs:22:53 + | +LL | reuse ::{foo, bar, meh} { &const { InvariantRef::<'a>::NEW } } + | ^^^^^^^^^^^^^^^^^^^^^^^ expected `u8`, found `InvariantRef<'_, ()>` + | + = note: expected type `u8` + found struct `InvariantRef<'_, ()>` + +error[E0277]: the trait bound `u8: Trait` is not satisfied + --> $DIR/correct_body_owner_parent_found_in_diagnostics.rs:22:12 + | +LL | reuse ::{foo, bar, meh} { &const { InvariantRef::<'a>::NEW } } + | ^^ the trait `Trait` is not implemented for `u8` + | + = help: the trait `Trait` is implemented for `Z` + +error[E0308]: mismatched types + --> $DIR/correct_body_owner_parent_found_in_diagnostics.rs:22:53 + | +LL | reuse ::{foo, bar, meh} { &const { InvariantRef::<'a>::NEW } } + | ^^^^^^^^^^^^^^^^^^^^^^^ expected `u8`, found `InvariantRef<'_, ()>` + | + = note: expected type `u8` + found struct `InvariantRef<'_, ()>` + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + +error[E0277]: the trait bound `u8: Trait` is not satisfied + --> $DIR/correct_body_owner_parent_found_in_diagnostics.rs:22:12 + | +LL | reuse ::{foo, bar, meh} { &const { InvariantRef::<'a>::NEW } } + | ^^ the trait `Trait` is not implemented for `u8` + | + = help: the trait `Trait` is implemented for `Z` + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + +error[E0308]: mismatched types + --> $DIR/correct_body_owner_parent_found_in_diagnostics.rs:22:53 + | +LL | reuse ::{foo, bar, meh} { &const { InvariantRef::<'a>::NEW } } + | ^^^^^^^^^^^^^^^^^^^^^^^ expected `u8`, found `InvariantRef<'_, ()>` + | + = note: expected type `u8` + found struct `InvariantRef<'_, ()>` + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + +error[E0277]: the trait bound `u8: Trait` is not satisfied + --> $DIR/correct_body_owner_parent_found_in_diagnostics.rs:22:12 + | +LL | reuse ::{foo, bar, meh} { &const { InvariantRef::<'a>::NEW } } + | ^^ the trait `Trait` is not implemented for `u8` + | + = help: the trait `Trait` is implemented for `Z` + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + +error: aborting due to 10 previous errors + +Some errors have detailed explanations: E0261, E0277, E0308, E0599. +For more information about an error, try `rustc --explain E0261`. diff --git a/tests/ui/deriving/auxiliary/another-proc-macro.rs b/tests/ui/deriving/auxiliary/another-proc-macro.rs new file mode 100644 index 0000000000000..a05175c9de926 --- /dev/null +++ b/tests/ui/deriving/auxiliary/another-proc-macro.rs @@ -0,0 +1,45 @@ +//@ force-host +//@ no-prefer-dynamic + +#![crate_type = "proc-macro"] +#![feature(proc_macro_quote)] + +extern crate proc_macro; + +use proc_macro::{quote, TokenStream}; + +#[proc_macro_derive(AnotherMacro, attributes(pointee))] +pub fn derive(_input: TokenStream) -> TokenStream { + quote! { + const _: () = { + const ANOTHER_MACRO_DERIVED: () = (); + }; + } + .into() +} + +#[proc_macro_attribute] +pub fn pointee( + _attr: proc_macro::TokenStream, + _item: proc_macro::TokenStream, +) -> proc_macro::TokenStream { + quote! { + const _: () = { + const POINTEE_MACRO_ATTR_DERIVED: () = (); + }; + } + .into() +} + +#[proc_macro_attribute] +pub fn default( + _attr: proc_macro::TokenStream, + _item: proc_macro::TokenStream, +) -> proc_macro::TokenStream { + quote! { + const _: () = { + const DEFAULT_MACRO_ATTR_DERIVED: () = (); + }; + } + .into() +} diff --git a/tests/ui/deriving/built-in-proc-macro-scope.rs b/tests/ui/deriving/built-in-proc-macro-scope.rs new file mode 100644 index 0000000000000..41c95f63b135e --- /dev/null +++ b/tests/ui/deriving/built-in-proc-macro-scope.rs @@ -0,0 +1,25 @@ +//@ check-pass +//@ aux-build: another-proc-macro.rs +//@ compile-flags: -Zunpretty=expanded + +#![feature(derive_smart_pointer)] + +#[macro_use] +extern crate another_proc_macro; + +use another_proc_macro::{pointee, AnotherMacro}; + +#[derive(core::marker::SmartPointer)] +#[repr(transparent)] +pub struct Ptr<'a, #[pointee] T: ?Sized> { + data: &'a mut T, +} + +#[pointee] +fn f() {} + +#[derive(AnotherMacro)] +#[pointee] +struct MyStruct; + +fn main() {} diff --git a/tests/ui/deriving/built-in-proc-macro-scope.stdout b/tests/ui/deriving/built-in-proc-macro-scope.stdout new file mode 100644 index 0000000000000..c649b7a9a574c --- /dev/null +++ b/tests/ui/deriving/built-in-proc-macro-scope.stdout @@ -0,0 +1,43 @@ +#![feature(prelude_import)] +#![no_std] +//@ check-pass +//@ aux-build: another-proc-macro.rs +//@ compile-flags: -Zunpretty=expanded + +#![feature(derive_smart_pointer)] +#[prelude_import] +use ::std::prelude::rust_2015::*; +#[macro_use] +extern crate std; + +#[macro_use] +extern crate another_proc_macro; + +use another_proc_macro::{pointee, AnotherMacro}; + +#[repr(transparent)] +pub struct Ptr<'a, #[pointee] T: ?Sized> { + data: &'a mut T, +} +#[automatically_derived] +impl<'a, T: ?Sized + ::core::marker::Unsize<__S>, __S: ?Sized> + ::core::ops::DispatchFromDyn> for Ptr<'a, T> { +} +#[automatically_derived] +impl<'a, T: ?Sized + ::core::marker::Unsize<__S>, __S: ?Sized> + ::core::ops::CoerceUnsized> for Ptr<'a, T> { +} + + + +const _: () = + { + const POINTEE_MACRO_ATTR_DERIVED: () = (); + }; +#[pointee] +struct MyStruct; +const _: () = + { + const ANOTHER_MACRO_DERIVED: () = (); + }; +fn main() {} diff --git a/tests/ui/deriving/proc-macro-attribute-mixing.rs b/tests/ui/deriving/proc-macro-attribute-mixing.rs new file mode 100644 index 0000000000000..489665ebeb520 --- /dev/null +++ b/tests/ui/deriving/proc-macro-attribute-mixing.rs @@ -0,0 +1,20 @@ +// This test certify that we can mix attribute macros from Rust and external proc-macros. +// For instance, `#[derive(Default)]` uses `#[default]` and `#[derive(SmartPointer)]` uses +// `#[pointee]`. +// The scoping rule should allow the use of the said two attributes when external proc-macros +// are in scope. + +//@ check-pass +//@ aux-build: another-proc-macro.rs +//@ compile-flags: -Zunpretty=expanded + +#![feature(derive_smart_pointer)] + +#[macro_use] +extern crate another_proc_macro; + +#[pointee] +fn f() {} + +#[default] +fn g() {} diff --git a/tests/ui/deriving/proc-macro-attribute-mixing.stdout b/tests/ui/deriving/proc-macro-attribute-mixing.stdout new file mode 100644 index 0000000000000..f314f6efbe2b9 --- /dev/null +++ b/tests/ui/deriving/proc-macro-attribute-mixing.stdout @@ -0,0 +1,30 @@ +#![feature(prelude_import)] +#![no_std] +// This test certify that we can mix attribute macros from Rust and external proc-macros. +// For instance, `#[derive(Default)]` uses `#[default]` and `#[derive(SmartPointer)]` uses +// `#[pointee]`. +// The scoping rule should allow the use of the said two attributes when external proc-macros +// are in scope. + +//@ check-pass +//@ aux-build: another-proc-macro.rs +//@ compile-flags: -Zunpretty=expanded + +#![feature(derive_smart_pointer)] +#[prelude_import] +use ::std::prelude::rust_2015::*; +#[macro_use] +extern crate std; + +#[macro_use] +extern crate another_proc_macro; + + +const _: () = + { + const POINTEE_MACRO_ATTR_DERIVED: () = (); + }; +const _: () = + { + const DEFAULT_MACRO_ATTR_DERIVED: () = (); + }; diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.rs b/tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.rs new file mode 100644 index 0000000000000..5548fa2f52e17 --- /dev/null +++ b/tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.rs @@ -0,0 +1,21 @@ +#![allow(unknown_or_malformed_diagnostic_attributes)] + +trait Foo {} + +#[diagnostic::do_not_recommend] +impl Foo for (A,) {} + +#[diagnostic::do_not_recommend] +impl Foo for (A, B) {} + +#[diagnostic::do_not_recommend] +impl Foo for (A, B, C) {} + +impl Foo for i32 {} + +fn check(a: impl Foo) {} + +fn main() { + check(()); + //~^ ERROR the trait bound `(): Foo` is not satisfied +} diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.stderr new file mode 100644 index 0000000000000..e56af28f3fb5d --- /dev/null +++ b/tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.stderr @@ -0,0 +1,21 @@ +error[E0277]: the trait bound `(): Foo` is not satisfied + --> $DIR/do_not_apply_attribute_without_feature_flag.rs:19:11 + | +LL | check(()); + | ----- ^^ the trait `Foo` is not implemented for `()` + | | + | required by a bound introduced by this call + | + = help: the following other types implement trait `Foo`: + (A, B) + (A, B, C) + (A,) +note: required by a bound in `check` + --> $DIR/do_not_apply_attribute_without_feature_flag.rs:16:18 + | +LL | fn check(a: impl Foo) {} + | ^^^ required by this bound in `check` + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0277`. diff --git a/tests/ui/extern/extern-main-issue-86110.stderr b/tests/ui/extern/extern-main-issue-86110.stderr index 8a3262fbcc7be..d69f4e617680a 100644 --- a/tests/ui/extern/extern-main-issue-86110.stderr +++ b/tests/ui/extern/extern-main-issue-86110.stderr @@ -2,7 +2,7 @@ error: the `main` function cannot be declared in an `extern` block --> $DIR/extern-main-issue-86110.rs:4:5 | LL | fn main(); - | ^^^^^^^^^ + | ^^^^^^^^^^ error: aborting due to 1 previous error diff --git a/tests/ui/feature-gates/feature-gate-asm_const.rs b/tests/ui/feature-gates/feature-gate-asm_const.rs deleted file mode 100644 index 42d5ba69222d0..0000000000000 --- a/tests/ui/feature-gates/feature-gate-asm_const.rs +++ /dev/null @@ -1,16 +0,0 @@ -//@ only-x86_64 - -use std::arch::asm; - -unsafe fn foo() { - asm!("mov eax, {}", const N + 1); - //~^ ERROR const operands for inline assembly are unstable -} - -fn main() { - unsafe { - foo::<0>(); - asm!("mov eax, {}", const 123); - //~^ ERROR const operands for inline assembly are unstable - } -} diff --git a/tests/ui/feature-gates/feature-gate-asm_const.stderr b/tests/ui/feature-gates/feature-gate-asm_const.stderr deleted file mode 100644 index 4f83fee675933..0000000000000 --- a/tests/ui/feature-gates/feature-gate-asm_const.stderr +++ /dev/null @@ -1,23 +0,0 @@ -error[E0658]: const operands for inline assembly are unstable - --> $DIR/feature-gate-asm_const.rs:6:25 - | -LL | asm!("mov eax, {}", const N + 1); - | ^^^^^^^^^^^ - | - = note: see issue #93332 for more information - = help: add `#![feature(asm_const)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - -error[E0658]: const operands for inline assembly are unstable - --> $DIR/feature-gate-asm_const.rs:13:29 - | -LL | asm!("mov eax, {}", const 123); - | ^^^^^^^^^ - | - = note: see issue #93332 for more information - = help: add `#![feature(asm_const)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - -error: aborting due to 2 previous errors - -For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/feature-gates/feature-gate-coroutines.none.stderr b/tests/ui/feature-gates/feature-gate-coroutines.none.stderr index 65e7737ef84d5..032d7adf77ab8 100644 --- a/tests/ui/feature-gates/feature-gate-coroutines.none.stderr +++ b/tests/ui/feature-gates/feature-gate-coroutines.none.stderr @@ -47,7 +47,6 @@ LL | yield true; = note: see issue #43122 for more information = help: add `#![feature(coroutines)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` error: `yield` can only be used in `#[coroutine]` closures, or `gen` blocks --> $DIR/feature-gate-coroutines.rs:5:5 @@ -69,7 +68,6 @@ LL | let _ = || yield true; = note: see issue #43122 for more information = help: add `#![feature(coroutines)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` error: `yield` can only be used in `#[coroutine]` closures, or `gen` blocks --> $DIR/feature-gate-coroutines.rs:10:16 diff --git a/tests/ui/feature-gates/feature-gate-derive-smart-pointer.rs b/tests/ui/feature-gates/feature-gate-derive-smart-pointer.rs index 3257a9ca624ba..7b4764ee768ab 100644 --- a/tests/ui/feature-gates/feature-gate-derive-smart-pointer.rs +++ b/tests/ui/feature-gates/feature-gate-derive-smart-pointer.rs @@ -3,7 +3,6 @@ use std::marker::SmartPointer; //~ ERROR use of unstable library feature 'derive #[derive(SmartPointer)] //~ ERROR use of unstable library feature 'derive_smart_pointer' #[repr(transparent)] struct MyPointer<'a, #[pointee] T: ?Sized> { - //~^ ERROR the `#[pointee]` attribute is an experimental feature ptr: &'a T, } diff --git a/tests/ui/feature-gates/feature-gate-derive-smart-pointer.stderr b/tests/ui/feature-gates/feature-gate-derive-smart-pointer.stderr index 19501939dc5be..ea4d1271b7c87 100644 --- a/tests/ui/feature-gates/feature-gate-derive-smart-pointer.stderr +++ b/tests/ui/feature-gates/feature-gate-derive-smart-pointer.stderr @@ -8,16 +8,6 @@ LL | #[derive(SmartPointer)] = help: add `#![feature(derive_smart_pointer)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date -error[E0658]: the `#[pointee]` attribute is an experimental feature - --> $DIR/feature-gate-derive-smart-pointer.rs:5:22 - | -LL | struct MyPointer<'a, #[pointee] T: ?Sized> { - | ^^^^^^^^^^ - | - = note: see issue #123430 for more information - = help: add `#![feature(derive_smart_pointer)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - error[E0658]: use of unstable library feature 'derive_smart_pointer' --> $DIR/feature-gate-derive-smart-pointer.rs:1:5 | @@ -28,6 +18,6 @@ LL | use std::marker::SmartPointer; = help: add `#![feature(derive_smart_pointer)]` to the crate attributes to enable = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date -error: aborting due to 3 previous errors +error: aborting due to 2 previous errors For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/feature-gates/feature-gate-unsafe-attributes.rs b/tests/ui/feature-gates/feature-gate-unsafe-attributes.rs deleted file mode 100644 index 9eba415dda0e5..0000000000000 --- a/tests/ui/feature-gates/feature-gate-unsafe-attributes.rs +++ /dev/null @@ -1,8 +0,0 @@ -#[unsafe(no_mangle)] //~ ERROR [E0658] -extern "C" fn foo() { - -} - -fn main() { - foo(); -} diff --git a/tests/ui/feature-gates/feature-gate-unsafe-attributes.stderr b/tests/ui/feature-gates/feature-gate-unsafe-attributes.stderr deleted file mode 100644 index dfcea756b02b6..0000000000000 --- a/tests/ui/feature-gates/feature-gate-unsafe-attributes.stderr +++ /dev/null @@ -1,13 +0,0 @@ -error[E0658]: `#[unsafe()]` markers for attributes are experimental - --> $DIR/feature-gate-unsafe-attributes.rs:1:3 - | -LL | #[unsafe(no_mangle)] - | ^^^^^^ - | - = note: see issue #123757 for more information - = help: add `#![feature(unsafe_attributes)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - -error: aborting due to 1 previous error - -For more information about this error, try `rustc --explain E0658`. diff --git a/tests/ui/foreign/foreign-safe-fn-arg-mismatch.rs b/tests/ui/foreign/foreign-safe-fn-arg-mismatch.rs new file mode 100644 index 0000000000000..bb1052b15e9d1 --- /dev/null +++ b/tests/ui/foreign/foreign-safe-fn-arg-mismatch.rs @@ -0,0 +1,13 @@ +// Make sure we don't ICE when a foreign fn doesn't implement `Fn` due to arg mismatch. + +unsafe extern "Rust" { + pub safe fn foo(); + pub safe fn bar(x: u32); +} + +fn test(_: impl Fn(i32)) {} + +fn main() { + test(foo); //~ ERROR function is expected to take 1 argument, but it takes 0 arguments + test(bar); //~ ERROR type mismatch in function arguments +} diff --git a/tests/ui/foreign/foreign-safe-fn-arg-mismatch.stderr b/tests/ui/foreign/foreign-safe-fn-arg-mismatch.stderr new file mode 100644 index 0000000000000..73ccecff5ab8c --- /dev/null +++ b/tests/ui/foreign/foreign-safe-fn-arg-mismatch.stderr @@ -0,0 +1,44 @@ +error[E0593]: function is expected to take 1 argument, but it takes 0 arguments + --> $DIR/foreign-safe-fn-arg-mismatch.rs:11:10 + | +LL | pub safe fn foo(); + | ------------------ takes 0 arguments +... +LL | test(foo); + | ---- ^^^ expected function that takes 1 argument + | | + | required by a bound introduced by this call + | +note: required by a bound in `test` + --> $DIR/foreign-safe-fn-arg-mismatch.rs:8:17 + | +LL | fn test(_: impl Fn(i32)) {} + | ^^^^^^^ required by this bound in `test` + +error[E0631]: type mismatch in function arguments + --> $DIR/foreign-safe-fn-arg-mismatch.rs:12:10 + | +LL | pub safe fn bar(x: u32); + | ------------------------ found signature defined here +... +LL | test(bar); + | ---- ^^^ expected due to this + | | + | required by a bound introduced by this call + | + = note: expected function signature `fn(i32) -> _` + found function signature `fn(u32) -> _` +note: required by a bound in `test` + --> $DIR/foreign-safe-fn-arg-mismatch.rs:8:17 + | +LL | fn test(_: impl Fn(i32)) {} + | ^^^^^^^ required by this bound in `test` +help: consider wrapping the function in a closure + | +LL | test(|arg0: i32| bar(/* u32 */)); + | +++++++++++ +++++++++++ + +error: aborting due to 2 previous errors + +Some errors have detailed explanations: E0593, E0631. +For more information about an error, try `rustc --explain E0593`. diff --git a/tests/ui/higher-ranked/subtyping-fn-ptr-coercion.rs b/tests/ui/higher-ranked/subtyping-fn-ptr-coercion.rs new file mode 100644 index 0000000000000..0cecf6808f229 --- /dev/null +++ b/tests/ui/higher-ranked/subtyping-fn-ptr-coercion.rs @@ -0,0 +1,10 @@ +//@ check-pass + +// Check that we use subtyping when reifying a closure into a function pointer. + +fn foo(x: &str) {} + +fn main() { + let c = |_: &str| {}; + let x = c as fn(&'static str); +} diff --git a/tests/ui/impl-trait/recursive-ice-101862.stderr b/tests/ui/impl-trait/recursive-ice-101862.stderr index f4148720c3331..970373422e8a5 100644 --- a/tests/ui/impl-trait/recursive-ice-101862.stderr +++ b/tests/ui/impl-trait/recursive-ice-101862.stderr @@ -11,13 +11,13 @@ LL | vec![].append(&mut ice(x.as_ref())); = note: `#[warn(unconditional_recursion)]` on by default error[E0792]: expected generic type parameter, found `&str` - --> $DIR/recursive-ice-101862.rs:6:5 + --> $DIR/recursive-ice-101862.rs:6:19 | LL | pub fn ice(x: impl AsRef) -> impl IntoIterator { | --------------- this generic parameter must be used with a generic type parameter LL | LL | vec![].append(&mut ice(x.as_ref())); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^^^^^ error: aborting due to 1 previous error; 1 warning emitted diff --git a/tests/ui/intrinsics/safe-intrinsic-mismatch.effects.stderr b/tests/ui/intrinsics/safe-intrinsic-mismatch.effects.stderr index d9a4960feec4d..55983a445a4ca 100644 --- a/tests/ui/intrinsics/safe-intrinsic-mismatch.effects.stderr +++ b/tests/ui/intrinsics/safe-intrinsic-mismatch.effects.stderr @@ -7,13 +7,13 @@ error: intrinsic safety mismatch between list of intrinsics within the compiler --> $DIR/safe-intrinsic-mismatch.rs:11:5 | LL | fn size_of() -> usize; - | ^^^^^^^^^^^^^^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^^^^^^^^^^ error: intrinsic safety mismatch between list of intrinsics within the compiler and core library intrinsics for intrinsic `size_of` --> $DIR/safe-intrinsic-mismatch.rs:11:5 | LL | fn size_of() -> usize; - | ^^^^^^^^^^^^^^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` diff --git a/tests/ui/intrinsics/safe-intrinsic-mismatch.stock.stderr b/tests/ui/intrinsics/safe-intrinsic-mismatch.stock.stderr index 6864c0f36ded0..c59e357b27526 100644 --- a/tests/ui/intrinsics/safe-intrinsic-mismatch.stock.stderr +++ b/tests/ui/intrinsics/safe-intrinsic-mismatch.stock.stderr @@ -2,13 +2,13 @@ error: intrinsic safety mismatch between list of intrinsics within the compiler --> $DIR/safe-intrinsic-mismatch.rs:11:5 | LL | fn size_of() -> usize; - | ^^^^^^^^^^^^^^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^^^^^^^^^^ error: intrinsic safety mismatch between list of intrinsics within the compiler and core library intrinsics for intrinsic `size_of` --> $DIR/safe-intrinsic-mismatch.rs:11:5 | LL | fn size_of() -> usize; - | ^^^^^^^^^^^^^^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` diff --git a/tests/ui/issues/issue-16725.stderr b/tests/ui/issues/issue-16725.stderr index a4a406b3d4ba9..dcb7d58b0f961 100644 --- a/tests/ui/issues/issue-16725.stderr +++ b/tests/ui/issues/issue-16725.stderr @@ -8,7 +8,7 @@ note: the function `bar` is defined here --> $DIR/auxiliary/issue-16725.rs:2:5 | LL | fn bar(); - | ^^^^^^^^ + | ^^^^^^^^^ error: aborting due to 1 previous error diff --git a/tests/ui/lint/clashing-extern-fn.stderr b/tests/ui/lint/clashing-extern-fn.stderr index 43c8cdead9f41..f75ff6d05a15c 100644 --- a/tests/ui/lint/clashing-extern-fn.stderr +++ b/tests/ui/lint/clashing-extern-fn.stderr @@ -21,10 +21,10 @@ warning: `clash` redeclared with a different signature --> $DIR/clashing-extern-fn.rs:14:13 | LL | fn clash(x: u8); - | --------------- `clash` previously declared here + | ---------------- `clash` previously declared here ... LL | fn clash(x: u64); - | ^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration + | ^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn(u8)` found `unsafe extern "C" fn(u64)` @@ -41,7 +41,7 @@ LL | #[link_name = "extern_link_name"] | --------------------------------- `extern_link_name` previously declared here ... LL | fn extern_link_name(x: u32); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn(i16)` found `unsafe extern "C" fn(u32)` @@ -50,7 +50,7 @@ warning: `some_other_extern_link_name` redeclares `some_other_new_name` with a d --> $DIR/clashing-extern-fn.rs:55:9 | LL | fn some_other_new_name(x: i16); - | ------------------------------ `some_other_new_name` previously declared here + | ------------------------------- `some_other_new_name` previously declared here ... LL | #[link_name = "some_other_new_name"] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration @@ -74,10 +74,10 @@ warning: `different_mod` redeclared with a different signature --> $DIR/clashing-extern-fn.rs:72:9 | LL | fn different_mod(x: u8); - | ----------------------- `different_mod` previously declared here + | ------------------------ `different_mod` previously declared here ... LL | fn different_mod(x: u64); - | ^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration + | ^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn(u8)` found `unsafe extern "C" fn(u64)` @@ -86,10 +86,10 @@ warning: `variadic_decl` redeclared with a different signature --> $DIR/clashing-extern-fn.rs:82:9 | LL | fn variadic_decl(x: u8, ...); - | ---------------------------- `variadic_decl` previously declared here + | ----------------------------- `variadic_decl` previously declared here ... LL | fn variadic_decl(x: u8); - | ^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration + | ^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn(u8, ...)` found `unsafe extern "C" fn(u8)` @@ -98,10 +98,10 @@ warning: `weigh_banana` redeclared with a different signature --> $DIR/clashing-extern-fn.rs:142:13 | LL | fn weigh_banana(count: *const Banana) -> u64; - | -------------------------------------------- `weigh_banana` previously declared here + | --------------------------------------------- `weigh_banana` previously declared here ... LL | fn weigh_banana(count: *const Banana) -> u64; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn(*const one::Banana) -> u64` found `unsafe extern "C" fn(*const three::Banana) -> u64` @@ -110,10 +110,10 @@ warning: `draw_point` redeclared with a different signature --> $DIR/clashing-extern-fn.rs:171:13 | LL | fn draw_point(p: Point); - | ----------------------- `draw_point` previously declared here + | ------------------------ `draw_point` previously declared here ... LL | fn draw_point(p: Point); - | ^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration + | ^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn(sameish_members::a::Point)` found `unsafe extern "C" fn(sameish_members::b::Point)` @@ -122,10 +122,10 @@ warning: `origin` redeclared with a different signature --> $DIR/clashing-extern-fn.rs:197:13 | LL | fn origin() -> Point3; - | --------------------- `origin` previously declared here + | ---------------------- `origin` previously declared here ... LL | fn origin() -> Point3; - | ^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration + | ^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn() -> same_sized_members_clash::a::Point3` found `unsafe extern "C" fn() -> same_sized_members_clash::b::Point3` @@ -134,10 +134,10 @@ warning: `transparent_incorrect` redeclared with a different signature --> $DIR/clashing-extern-fn.rs:220:13 | LL | fn transparent_incorrect() -> T; - | ------------------------------- `transparent_incorrect` previously declared here + | -------------------------------- `transparent_incorrect` previously declared here ... LL | fn transparent_incorrect() -> isize; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn() -> T` found `unsafe extern "C" fn() -> isize` @@ -146,10 +146,10 @@ warning: `missing_return_type` redeclared with a different signature --> $DIR/clashing-extern-fn.rs:259:13 | LL | fn missing_return_type() -> usize; - | --------------------------------- `missing_return_type` previously declared here + | ---------------------------------- `missing_return_type` previously declared here ... LL | fn missing_return_type(); - | ^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration + | ^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn() -> usize` found `unsafe extern "C" fn()` @@ -158,10 +158,10 @@ warning: `non_zero_usize` redeclared with a different signature --> $DIR/clashing-extern-fn.rs:277:13 | LL | fn non_zero_usize() -> core::num::NonZero; - | ------------------------------------------------ `non_zero_usize` previously declared here + | ------------------------------------------------- `non_zero_usize` previously declared here ... LL | fn non_zero_usize() -> usize; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn() -> NonZero` found `unsafe extern "C" fn() -> usize` @@ -170,10 +170,10 @@ warning: `non_null_ptr` redeclared with a different signature --> $DIR/clashing-extern-fn.rs:279:13 | LL | fn non_null_ptr() -> core::ptr::NonNull; - | ---------------------------------------------- `non_null_ptr` previously declared here + | ----------------------------------------------- `non_null_ptr` previously declared here ... LL | fn non_null_ptr() -> *const usize; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn() -> NonNull` found `unsafe extern "C" fn() -> *const usize` @@ -182,10 +182,10 @@ warning: `option_non_zero_usize_incorrect` redeclared with a different signature --> $DIR/clashing-extern-fn.rs:373:13 | LL | fn option_non_zero_usize_incorrect() -> usize; - | --------------------------------------------- `option_non_zero_usize_incorrect` previously declared here + | ---------------------------------------------- `option_non_zero_usize_incorrect` previously declared here ... LL | fn option_non_zero_usize_incorrect() -> isize; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn() -> usize` found `unsafe extern "C" fn() -> isize` @@ -194,10 +194,10 @@ warning: `option_non_null_ptr_incorrect` redeclared with a different signature --> $DIR/clashing-extern-fn.rs:375:13 | LL | fn option_non_null_ptr_incorrect() -> *const usize; - | -------------------------------------------------- `option_non_null_ptr_incorrect` previously declared here + | --------------------------------------------------- `option_non_null_ptr_incorrect` previously declared here ... LL | fn option_non_null_ptr_incorrect() -> *const isize; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn() -> *const usize` found `unsafe extern "C" fn() -> *const isize` @@ -206,10 +206,10 @@ warning: `hidden_niche_transparent_no_niche` redeclared with a different signatu --> $DIR/clashing-extern-fn.rs:429:13 | LL | fn hidden_niche_transparent_no_niche() -> usize; - | ----------------------------------------------- `hidden_niche_transparent_no_niche` previously declared here + | ------------------------------------------------ `hidden_niche_transparent_no_niche` previously declared here ... LL | fn hidden_niche_transparent_no_niche() -> Option; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn() -> usize` found `unsafe extern "C" fn() -> Option` @@ -218,10 +218,10 @@ warning: `hidden_niche_unsafe_cell` redeclared with a different signature --> $DIR/clashing-extern-fn.rs:433:13 | LL | fn hidden_niche_unsafe_cell() -> usize; - | -------------------------------------- `hidden_niche_unsafe_cell` previously declared here + | --------------------------------------- `hidden_niche_unsafe_cell` previously declared here ... LL | fn hidden_niche_unsafe_cell() -> Option>>; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn() -> usize` found `unsafe extern "C" fn() -> Option>>` diff --git a/tests/ui/lint/issue-1866.stderr b/tests/ui/lint/issue-1866.stderr index 36d323825a4de..d19a134966831 100644 --- a/tests/ui/lint/issue-1866.stderr +++ b/tests/ui/lint/issue-1866.stderr @@ -2,10 +2,10 @@ warning: `rust_task_is_unwinding` redeclared with a different signature --> $DIR/issue-1866.rs:23:13 | LL | pub fn rust_task_is_unwinding(rt: *const rust_task) -> bool; - | ----------------------------------------------------------- `rust_task_is_unwinding` previously declared here + | ------------------------------------------------------------ `rust_task_is_unwinding` previously declared here ... LL | pub fn rust_task_is_unwinding(rt: *const rust_task) -> bool; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn(*const usize) -> bool` found `unsafe extern "C" fn(*const bool) -> bool` diff --git a/tests/ui/lint/lint-attr-everywhere-late.stderr b/tests/ui/lint/lint-attr-everywhere-late.stderr index ddc31905afbcf..1937b618236b0 100644 --- a/tests/ui/lint/lint-attr-everywhere-late.stderr +++ b/tests/ui/lint/lint-attr-everywhere-late.stderr @@ -406,10 +406,10 @@ error: `clashing1` redeclared with a different signature --> $DIR/lint-attr-everywhere-late.rs:123:5 | LL | fn clashing1(); - | -------------- `clashing1` previously declared here + | --------------- `clashing1` previously declared here ... LL | fn clashing1(_: i32); - | ^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration + | ^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn()` found `unsafe extern "C" fn(i32)` @@ -423,10 +423,10 @@ error: `clashing2` redeclared with a different signature --> $DIR/lint-attr-everywhere-late.rs:128:5 | LL | fn clashing2(); - | -------------- `clashing2` previously declared here + | --------------- `clashing2` previously declared here ... LL | fn clashing2(_: i32); - | ^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration + | ^^^^^^^^^^^^^^^^^^^^^ this signature doesn't match the previous declaration | = note: expected `unsafe extern "C" fn()` found `unsafe extern "C" fn(i32)` diff --git a/tests/ui/lint/lint-missing-doc.stderr b/tests/ui/lint/lint-missing-doc.stderr index 4e9ee4f276962..5165ccc3fd062 100644 --- a/tests/ui/lint/lint-missing-doc.stderr +++ b/tests/ui/lint/lint-missing-doc.stderr @@ -116,7 +116,7 @@ error: missing documentation for a function --> $DIR/lint-missing-doc.rs:196:5 | LL | pub fn extern_fn_undocumented(f: f32) -> f32; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: missing documentation for a static --> $DIR/lint-missing-doc.rs:201:5 diff --git a/tests/ui/lint/unreachable_pub.stderr b/tests/ui/lint/unreachable_pub.stderr index 705a537a3f1c4..65f45fbd81689 100644 --- a/tests/ui/lint/unreachable_pub.stderr +++ b/tests/ui/lint/unreachable_pub.stderr @@ -130,7 +130,7 @@ warning: unreachable `pub` item --> $DIR/unreachable_pub.rs:48:9 | LL | pub fn catalyze() -> bool; - | ---^^^^^^^^^^^^^^^^^^^^^^ + | ---^^^^^^^^^^^^^^^^^^^^^^^ | | | help: consider restricting its visibility: `pub(crate)` | diff --git a/tests/ui/macros/macro-match-nonterminal.stderr b/tests/ui/macros/macro-match-nonterminal.stderr index 831579c4fefd5..f221f92c3cda6 100644 --- a/tests/ui/macros/macro-match-nonterminal.stderr +++ b/tests/ui/macros/macro-match-nonterminal.stderr @@ -1,14 +1,14 @@ error: missing fragment specifier - --> $DIR/macro-match-nonterminal.rs:2:8 + --> $DIR/macro-match-nonterminal.rs:2:6 | LL | ($a, $b) => { - | ^ + | ^^ error: missing fragment specifier - --> $DIR/macro-match-nonterminal.rs:2:8 + --> $DIR/macro-match-nonterminal.rs:2:6 | LL | ($a, $b) => { - | ^ + | ^^ | = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! = note: for more information, see issue #40107 @@ -27,10 +27,10 @@ error: aborting due to 3 previous errors Future incompatibility report: Future breakage diagnostic: error: missing fragment specifier - --> $DIR/macro-match-nonterminal.rs:2:8 + --> $DIR/macro-match-nonterminal.rs:2:6 | LL | ($a, $b) => { - | ^ + | ^^ | = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! = note: for more information, see issue #40107 diff --git a/tests/ui/macros/stringify.rs b/tests/ui/macros/stringify.rs index f06c1f99069ae..37409dd066d5d 100644 --- a/tests/ui/macros/stringify.rs +++ b/tests/ui/macros/stringify.rs @@ -33,8 +33,6 @@ macro_rules! stmt { ($stmt:stmt) => { stringify!($stmt) }; } macro_rules! ty { ($ty:ty) => { stringify!($ty) }; } macro_rules! vis { ($vis:vis) => { stringify!($vis) }; } -// Use this when AST pretty-printing and TokenStream pretty-printing give -// the same result (which is preferable.) macro_rules! c1 { ($frag:ident, [$($tt:tt)*], $s:literal) => { // Prior to #125174: @@ -66,6 +64,8 @@ fn test_block() { } ], "{ let _; true }" ); + + // Attributes are not allowed on vanilla blocks. } #[test] @@ -332,6 +332,20 @@ fn test_expr() { // ExprKind::FormatArgs: untestable because this test works pre-expansion. // ExprKind::Err: untestable. + + // Ones involving attributes. + c1!(expr, [ #[aa] 1 ], "#[aa] 1"); + c1!(expr, [ #[aa] #[bb] x ], "#[aa] #[bb] x"); + c1!(expr, [ #[aa] 1 + 2 ], "#[aa] 1 + 2"); + c1!(expr, [ #[aa] x + 2 ], "#[aa] x + 2"); + c1!(expr, [ #[aa] 1 / #[bb] 2 ], "#[aa] 1 / #[bb] 2"); + c1!(expr, [ #[aa] x / #[bb] 2 ], "#[aa] x / #[bb] 2"); + c1!(expr, [ 1 << #[bb] 2 ], "1 << #[bb] 2"); + c1!(expr, [ x << #[bb] 2 ], "x << #[bb] 2"); + c1!(expr, [ #[aa] (1 + 2) ], "#[aa] (1 + 2)"); + c1!(expr, [ #[aa] #[bb] (x + 2) ], "#[aa] #[bb] (x + 2)"); + c1!(expr, [ #[aa] x[0].p ], "#[aa] x[0].p"); + c1!(expr, [ #[aa] { #![bb] 0 } ], "#[aa] { #![bb] 0 }"); } #[test] @@ -484,6 +498,11 @@ fn test_item() { "macro_rules! stringify { () => {}; }" ); c1!(item, [ pub macro stringify() {} ], "pub macro stringify() {}"); + + // Ones involving attributes. + c1!(item, [ #[aa] mod m; ], "#[aa] mod m;"); + c1!(item, [ mod m { #![bb] } ], "mod m { #![bb] }"); + c1!(item, [ #[aa] mod m { #![bb] } ], "#[aa] mod m { #![bb] }"); } #[test] @@ -492,6 +511,8 @@ fn test_meta() { c1!(meta, [ k = "v" ], "k = \"v\""); c1!(meta, [ list(k1, k2 = "v") ], "list(k1, k2 = \"v\")"); c1!(meta, [ serde::k ], "serde::k"); + + // Attributes are not allowed on metas. } #[test] @@ -580,6 +601,8 @@ fn test_pat() { c1!(pat, [ mac!(...) ], "mac!(...)"); c1!(pat, [ mac![...] ], "mac![...]"); c1!(pat, [ mac! { ... } ], "mac! { ... }"); + + // Attributes are not allowed on patterns. } #[test] @@ -593,6 +616,8 @@ fn test_path() { c1!(path, [ Self::<'static> ], "Self::<'static>"); c1!(path, [ Self() ], "Self()"); c1!(path, [ Self() -> () ], "Self() -> ()"); + + // Attributes are not allowed on paths. } #[test] @@ -622,6 +647,20 @@ fn test_stmt() { c1!(stmt, [ mac!(...) ], "mac!(...)"); c1!(stmt, [ mac![...] ], "mac![...]"); c1!(stmt, [ mac! { ... } ], "mac! { ... }"); + + // Ones involving attributes. + c1!(stmt, [ #[aa] 1 ], "#[aa] 1"); + c1!(stmt, [ #[aa] #[bb] x ], "#[aa] #[bb] x"); + c1!(stmt, [ #[aa] 1 as u32 ], "#[aa] 1 as u32"); + c1!(stmt, [ #[aa] x as u32 ], "#[aa] x as u32"); + c1!(stmt, [ #[aa] 1 .. #[bb] 2 ], "#[aa] 1 .. #[bb] 2"); + c1!(stmt, [ #[aa] x .. #[bb] 2 ], "#[aa] x .. #[bb] 2"); + c1!(stmt, [ 1 || #[bb] 2 ], "1 || #[bb] 2"); + c1!(stmt, [ x || #[bb] 2 ], "x || #[bb] 2"); + c1!(stmt, [ #[aa] (1 + 2) ], "#[aa] (1 + 2)"); + c1!(stmt, [ #[aa] #[bb] (x + 2) ], "#[aa] #[bb] (x + 2)"); + c1!(stmt, [ #[aa] x[0].p ], "#[aa] x[0].p"); + c1!(stmt, [ #[aa] { #![bb] 0 } ], "#[aa] { #![bb] 0 }"); } #[test] @@ -708,6 +747,8 @@ fn test_ty() { // TyKind::CVarArgs // FIXME: todo + + // Attributes are not allowed on types. } #[test] @@ -732,6 +773,8 @@ fn test_vis() { macro_rules! inherited_vis { ($vis:vis struct) => { vis!($vis) }; } assert_eq!(inherited_vis!(struct), ""); assert_eq!(stringify!(), ""); + + // Attributes are not allowed on visibilities. } macro_rules! p { diff --git a/tests/ui/pattern/usefulness/empty-types.exhaustive_patterns.stderr b/tests/ui/pattern/usefulness/empty-types.exhaustive_patterns.stderr index 17cb6fbd94b88..f6f341d6f2f1e 100644 --- a/tests/ui/pattern/usefulness/empty-types.exhaustive_patterns.stderr +++ b/tests/ui/pattern/usefulness/empty-types.exhaustive_patterns.stderr @@ -254,7 +254,7 @@ LL | _ => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:279:9 + --> $DIR/empty-types.rs:281:9 | LL | _ => {} | ^ @@ -262,7 +262,7 @@ LL | _ => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:282:9 + --> $DIR/empty-types.rs:284:9 | LL | (_, _) => {} | ^^^^^^ @@ -270,7 +270,7 @@ LL | (_, _) => {} = note: this pattern matches no values because `(!, !)` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:285:9 + --> $DIR/empty-types.rs:287:9 | LL | Ok(_) => {} | ^^^^^ @@ -278,7 +278,7 @@ LL | Ok(_) => {} = note: this pattern matches no values because `Result` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:286:9 + --> $DIR/empty-types.rs:288:9 | LL | Err(_) => {} | ^^^^^^ @@ -286,7 +286,7 @@ LL | Err(_) => {} = note: this pattern matches no values because `Result` is uninhabited error[E0004]: non-exhaustive patterns: type `&[!]` is non-empty - --> $DIR/empty-types.rs:318:11 + --> $DIR/empty-types.rs:327:11 | LL | match slice_never {} | ^^^^^^^^^^^ @@ -300,7 +300,7 @@ LL + } | error[E0004]: non-exhaustive patterns: `&[]` not covered - --> $DIR/empty-types.rs:329:11 + --> $DIR/empty-types.rs:338:11 | LL | match slice_never { | ^^^^^^^^^^^ pattern `&[]` not covered @@ -313,7 +313,7 @@ LL + &[] => todo!() | error[E0004]: non-exhaustive patterns: `&[]` not covered - --> $DIR/empty-types.rs:343:11 + --> $DIR/empty-types.rs:352:11 | LL | match slice_never { | ^^^^^^^^^^^ pattern `&[]` not covered @@ -327,7 +327,7 @@ LL + &[] => todo!() | error[E0004]: non-exhaustive patterns: type `[!]` is non-empty - --> $DIR/empty-types.rs:350:11 + --> $DIR/empty-types.rs:359:11 | LL | match *slice_never {} | ^^^^^^^^^^^^ @@ -341,7 +341,7 @@ LL + } | error: unreachable pattern - --> $DIR/empty-types.rs:359:9 + --> $DIR/empty-types.rs:368:9 | LL | _ => {} | ^ @@ -349,7 +349,7 @@ LL | _ => {} = note: this pattern matches no values because `[!; 3]` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:362:9 + --> $DIR/empty-types.rs:371:9 | LL | [_, _, _] => {} | ^^^^^^^^^ @@ -357,7 +357,7 @@ LL | [_, _, _] => {} = note: this pattern matches no values because `[!; 3]` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:365:9 + --> $DIR/empty-types.rs:374:9 | LL | [_, ..] => {} | ^^^^^^^ @@ -365,7 +365,7 @@ LL | [_, ..] => {} = note: this pattern matches no values because `[!; 3]` is uninhabited error[E0004]: non-exhaustive patterns: type `[!; 0]` is non-empty - --> $DIR/empty-types.rs:379:11 + --> $DIR/empty-types.rs:388:11 | LL | match array_0_never {} | ^^^^^^^^^^^^^ @@ -379,7 +379,7 @@ LL + } | error: unreachable pattern - --> $DIR/empty-types.rs:386:9 + --> $DIR/empty-types.rs:395:9 | LL | [] => {} | -- matches all the values already @@ -387,7 +387,7 @@ LL | _ => {} | ^ unreachable pattern error[E0004]: non-exhaustive patterns: `[]` not covered - --> $DIR/empty-types.rs:388:11 + --> $DIR/empty-types.rs:397:11 | LL | match array_0_never { | ^^^^^^^^^^^^^ pattern `[]` not covered @@ -401,7 +401,7 @@ LL + [] => todo!() | error: unreachable pattern - --> $DIR/empty-types.rs:407:9 + --> $DIR/empty-types.rs:416:9 | LL | Some(_) => {} | ^^^^^^^ @@ -409,7 +409,7 @@ LL | Some(_) => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:412:9 + --> $DIR/empty-types.rs:421:9 | LL | Some(_a) => {} | ^^^^^^^^ @@ -417,7 +417,7 @@ LL | Some(_a) => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:417:9 + --> $DIR/empty-types.rs:426:9 | LL | None => {} | ---- matches all the values already @@ -426,7 +426,7 @@ LL | _ => {} | ^ unreachable pattern error: unreachable pattern - --> $DIR/empty-types.rs:422:9 + --> $DIR/empty-types.rs:431:9 | LL | None => {} | ---- matches all the values already @@ -435,7 +435,7 @@ LL | _a => {} | ^^ unreachable pattern error: unreachable pattern - --> $DIR/empty-types.rs:594:9 + --> $DIR/empty-types.rs:603:9 | LL | _ => {} | ^ @@ -443,7 +443,7 @@ LL | _ => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:597:9 + --> $DIR/empty-types.rs:606:9 | LL | _x => {} | ^^ @@ -451,7 +451,7 @@ LL | _x => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:600:9 + --> $DIR/empty-types.rs:609:9 | LL | _ if false => {} | ^ @@ -459,7 +459,7 @@ LL | _ if false => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:603:9 + --> $DIR/empty-types.rs:612:9 | LL | _x if false => {} | ^^ diff --git a/tests/ui/pattern/usefulness/empty-types.never_pats.stderr b/tests/ui/pattern/usefulness/empty-types.never_pats.stderr index 1ecb15f2cae3a..55a138c2d1cb2 100644 --- a/tests/ui/pattern/usefulness/empty-types.never_pats.stderr +++ b/tests/ui/pattern/usefulness/empty-types.never_pats.stderr @@ -296,7 +296,7 @@ LL | _ => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:279:9 + --> $DIR/empty-types.rs:281:9 | LL | _ => {} | ^ @@ -304,7 +304,7 @@ LL | _ => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:282:9 + --> $DIR/empty-types.rs:284:9 | LL | (_, _) => {} | ^^^^^^ @@ -312,7 +312,7 @@ LL | (_, _) => {} = note: this pattern matches no values because `(!, !)` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:285:9 + --> $DIR/empty-types.rs:287:9 | LL | Ok(_) => {} | ^^^^^ @@ -320,15 +320,29 @@ LL | Ok(_) => {} = note: this pattern matches no values because `Result` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:286:9 + --> $DIR/empty-types.rs:288:9 | LL | Err(_) => {} | ^^^^^^ | = note: this pattern matches no values because `Result` is uninhabited +error[E0005]: refutable pattern in local binding + --> $DIR/empty-types.rs:297:13 + | +LL | let Ok(_) = *ptr_result_never_err; + | ^^^^^ pattern `Err(!)` not covered + | + = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant + = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html + = note: the matched value is of type `Result` +help: you might want to use `if let` to ignore the variant that isn't matched + | +LL | if let Ok(_) = *ptr_result_never_err { todo!() }; + | ++ +++++++++++ + error[E0004]: non-exhaustive patterns: type `(u32, !)` is non-empty - --> $DIR/empty-types.rs:307:11 + --> $DIR/empty-types.rs:316:11 | LL | match *x {} | ^^ @@ -342,7 +356,7 @@ LL ~ } | error[E0004]: non-exhaustive patterns: type `(!, !)` is non-empty - --> $DIR/empty-types.rs:309:11 + --> $DIR/empty-types.rs:318:11 | LL | match *x {} | ^^ @@ -356,7 +370,7 @@ LL ~ } | error[E0004]: non-exhaustive patterns: `Ok(!)` and `Err(!)` not covered - --> $DIR/empty-types.rs:311:11 + --> $DIR/empty-types.rs:320:11 | LL | match *x {} | ^^ patterns `Ok(!)` and `Err(!)` not covered @@ -378,7 +392,7 @@ LL ~ } | error[E0004]: non-exhaustive patterns: type `[!; 3]` is non-empty - --> $DIR/empty-types.rs:313:11 + --> $DIR/empty-types.rs:322:11 | LL | match *x {} | ^^ @@ -392,7 +406,7 @@ LL ~ } | error[E0004]: non-exhaustive patterns: type `&[!]` is non-empty - --> $DIR/empty-types.rs:318:11 + --> $DIR/empty-types.rs:327:11 | LL | match slice_never {} | ^^^^^^^^^^^ @@ -406,7 +420,7 @@ LL + } | error[E0004]: non-exhaustive patterns: `&[!, ..]` not covered - --> $DIR/empty-types.rs:320:11 + --> $DIR/empty-types.rs:329:11 | LL | match slice_never { | ^^^^^^^^^^^ pattern `&[!, ..]` not covered @@ -420,7 +434,7 @@ LL + &[!, ..] | error[E0004]: non-exhaustive patterns: `&[]`, `&[!]` and `&[!, !]` not covered - --> $DIR/empty-types.rs:329:11 + --> $DIR/empty-types.rs:338:11 | LL | match slice_never { | ^^^^^^^^^^^ patterns `&[]`, `&[!]` and `&[!, !]` not covered @@ -433,7 +447,7 @@ LL + &[] | &[!] | &[!, !] => todo!() | error[E0004]: non-exhaustive patterns: `&[]` and `&[!, ..]` not covered - --> $DIR/empty-types.rs:343:11 + --> $DIR/empty-types.rs:352:11 | LL | match slice_never { | ^^^^^^^^^^^ patterns `&[]` and `&[!, ..]` not covered @@ -447,7 +461,7 @@ LL + &[] | &[!, ..] => todo!() | error[E0004]: non-exhaustive patterns: type `[!]` is non-empty - --> $DIR/empty-types.rs:350:11 + --> $DIR/empty-types.rs:359:11 | LL | match *slice_never {} | ^^^^^^^^^^^^ @@ -461,7 +475,7 @@ LL + } | error: unreachable pattern - --> $DIR/empty-types.rs:359:9 + --> $DIR/empty-types.rs:368:9 | LL | _ => {} | ^ @@ -469,7 +483,7 @@ LL | _ => {} = note: this pattern matches no values because `[!; 3]` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:362:9 + --> $DIR/empty-types.rs:371:9 | LL | [_, _, _] => {} | ^^^^^^^^^ @@ -477,7 +491,7 @@ LL | [_, _, _] => {} = note: this pattern matches no values because `[!; 3]` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:365:9 + --> $DIR/empty-types.rs:374:9 | LL | [_, ..] => {} | ^^^^^^^ @@ -485,7 +499,7 @@ LL | [_, ..] => {} = note: this pattern matches no values because `[!; 3]` is uninhabited error[E0004]: non-exhaustive patterns: type `[!; 0]` is non-empty - --> $DIR/empty-types.rs:379:11 + --> $DIR/empty-types.rs:388:11 | LL | match array_0_never {} | ^^^^^^^^^^^^^ @@ -499,7 +513,7 @@ LL + } | error: unreachable pattern - --> $DIR/empty-types.rs:386:9 + --> $DIR/empty-types.rs:395:9 | LL | [] => {} | -- matches all the values already @@ -507,7 +521,7 @@ LL | _ => {} | ^ unreachable pattern error[E0004]: non-exhaustive patterns: `[]` not covered - --> $DIR/empty-types.rs:388:11 + --> $DIR/empty-types.rs:397:11 | LL | match array_0_never { | ^^^^^^^^^^^^^ pattern `[]` not covered @@ -521,7 +535,7 @@ LL + [] => todo!() | error: unreachable pattern - --> $DIR/empty-types.rs:407:9 + --> $DIR/empty-types.rs:416:9 | LL | Some(_) => {} | ^^^^^^^ @@ -529,7 +543,7 @@ LL | Some(_) => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:412:9 + --> $DIR/empty-types.rs:421:9 | LL | Some(_a) => {} | ^^^^^^^^ @@ -537,7 +551,7 @@ LL | Some(_a) => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:417:9 + --> $DIR/empty-types.rs:426:9 | LL | None => {} | ---- matches all the values already @@ -546,7 +560,7 @@ LL | _ => {} | ^ unreachable pattern error: unreachable pattern - --> $DIR/empty-types.rs:422:9 + --> $DIR/empty-types.rs:431:9 | LL | None => {} | ---- matches all the values already @@ -555,7 +569,7 @@ LL | _a => {} | ^^ unreachable pattern error[E0004]: non-exhaustive patterns: `&Some(!)` not covered - --> $DIR/empty-types.rs:442:11 + --> $DIR/empty-types.rs:451:11 | LL | match ref_opt_never { | ^^^^^^^^^^^^^ pattern `&Some(!)` not covered @@ -574,7 +588,7 @@ LL + &Some(!) | error[E0004]: non-exhaustive patterns: `Some(!)` not covered - --> $DIR/empty-types.rs:483:11 + --> $DIR/empty-types.rs:492:11 | LL | match *ref_opt_never { | ^^^^^^^^^^^^^^ pattern `Some(!)` not covered @@ -593,7 +607,7 @@ LL + Some(!) | error[E0004]: non-exhaustive patterns: `Err(!)` not covered - --> $DIR/empty-types.rs:531:11 + --> $DIR/empty-types.rs:540:11 | LL | match *ref_res_never { | ^^^^^^^^^^^^^^ pattern `Err(!)` not covered @@ -612,7 +626,7 @@ LL + Err(!) | error[E0004]: non-exhaustive patterns: `Err(!)` not covered - --> $DIR/empty-types.rs:542:11 + --> $DIR/empty-types.rs:551:11 | LL | match *ref_res_never { | ^^^^^^^^^^^^^^ pattern `Err(!)` not covered @@ -631,7 +645,7 @@ LL + Err(!) | error[E0004]: non-exhaustive patterns: type `(u32, !)` is non-empty - --> $DIR/empty-types.rs:561:11 + --> $DIR/empty-types.rs:570:11 | LL | match *ref_tuple_half_never {} | ^^^^^^^^^^^^^^^^^^^^^ @@ -645,7 +659,7 @@ LL + } | error: unreachable pattern - --> $DIR/empty-types.rs:594:9 + --> $DIR/empty-types.rs:603:9 | LL | _ => {} | ^ @@ -653,7 +667,7 @@ LL | _ => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:597:9 + --> $DIR/empty-types.rs:606:9 | LL | _x => {} | ^^ @@ -661,7 +675,7 @@ LL | _x => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:600:9 + --> $DIR/empty-types.rs:609:9 | LL | _ if false => {} | ^ @@ -669,7 +683,7 @@ LL | _ if false => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:603:9 + --> $DIR/empty-types.rs:612:9 | LL | _x if false => {} | ^^ @@ -677,7 +691,7 @@ LL | _x if false => {} = note: this pattern matches no values because `!` is uninhabited error[E0004]: non-exhaustive patterns: `&!` not covered - --> $DIR/empty-types.rs:628:11 + --> $DIR/empty-types.rs:637:11 | LL | match ref_never { | ^^^^^^^^^ pattern `&!` not covered @@ -693,7 +707,7 @@ LL + &! | error[E0004]: non-exhaustive patterns: `Ok(!)` not covered - --> $DIR/empty-types.rs:644:11 + --> $DIR/empty-types.rs:653:11 | LL | match *ref_result_never { | ^^^^^^^^^^^^^^^^^ pattern `Ok(!)` not covered @@ -712,7 +726,7 @@ LL + Ok(!) | error[E0004]: non-exhaustive patterns: `Some(!)` not covered - --> $DIR/empty-types.rs:664:11 + --> $DIR/empty-types.rs:673:11 | LL | match *x { | ^^ pattern `Some(!)` not covered @@ -730,7 +744,7 @@ LL ~ None => {}, LL + Some(!) | -error: aborting due to 64 previous errors; 1 warning emitted +error: aborting due to 65 previous errors; 1 warning emitted Some errors have detailed explanations: E0004, E0005. For more information about an error, try `rustc --explain E0004`. diff --git a/tests/ui/pattern/usefulness/empty-types.normal.stderr b/tests/ui/pattern/usefulness/empty-types.normal.stderr index c3421cd592edb..83b3989ffdefb 100644 --- a/tests/ui/pattern/usefulness/empty-types.normal.stderr +++ b/tests/ui/pattern/usefulness/empty-types.normal.stderr @@ -287,7 +287,7 @@ LL | _ => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:279:9 + --> $DIR/empty-types.rs:281:9 | LL | _ => {} | ^ @@ -295,7 +295,7 @@ LL | _ => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:282:9 + --> $DIR/empty-types.rs:284:9 | LL | (_, _) => {} | ^^^^^^ @@ -303,7 +303,7 @@ LL | (_, _) => {} = note: this pattern matches no values because `(!, !)` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:285:9 + --> $DIR/empty-types.rs:287:9 | LL | Ok(_) => {} | ^^^^^ @@ -311,15 +311,29 @@ LL | Ok(_) => {} = note: this pattern matches no values because `Result` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:286:9 + --> $DIR/empty-types.rs:288:9 | LL | Err(_) => {} | ^^^^^^ | = note: this pattern matches no values because `Result` is uninhabited +error[E0005]: refutable pattern in local binding + --> $DIR/empty-types.rs:297:13 + | +LL | let Ok(_) = *ptr_result_never_err; + | ^^^^^ pattern `Err(_)` not covered + | + = note: `let` bindings require an "irrefutable pattern", like a `struct` or an `enum` with only one variant + = note: for more information, visit https://doc.rust-lang.org/book/ch18-02-refutability.html + = note: the matched value is of type `Result` +help: you might want to use `if let` to ignore the variant that isn't matched + | +LL | if let Ok(_) = *ptr_result_never_err { todo!() }; + | ++ +++++++++++ + error[E0004]: non-exhaustive patterns: type `(u32, !)` is non-empty - --> $DIR/empty-types.rs:307:11 + --> $DIR/empty-types.rs:316:11 | LL | match *x {} | ^^ @@ -333,7 +347,7 @@ LL ~ } | error[E0004]: non-exhaustive patterns: type `(!, !)` is non-empty - --> $DIR/empty-types.rs:309:11 + --> $DIR/empty-types.rs:318:11 | LL | match *x {} | ^^ @@ -347,7 +361,7 @@ LL ~ } | error[E0004]: non-exhaustive patterns: `Ok(_)` and `Err(_)` not covered - --> $DIR/empty-types.rs:311:11 + --> $DIR/empty-types.rs:320:11 | LL | match *x {} | ^^ patterns `Ok(_)` and `Err(_)` not covered @@ -369,7 +383,7 @@ LL ~ } | error[E0004]: non-exhaustive patterns: type `[!; 3]` is non-empty - --> $DIR/empty-types.rs:313:11 + --> $DIR/empty-types.rs:322:11 | LL | match *x {} | ^^ @@ -383,7 +397,7 @@ LL ~ } | error[E0004]: non-exhaustive patterns: type `&[!]` is non-empty - --> $DIR/empty-types.rs:318:11 + --> $DIR/empty-types.rs:327:11 | LL | match slice_never {} | ^^^^^^^^^^^ @@ -397,7 +411,7 @@ LL + } | error[E0004]: non-exhaustive patterns: `&[_, ..]` not covered - --> $DIR/empty-types.rs:320:11 + --> $DIR/empty-types.rs:329:11 | LL | match slice_never { | ^^^^^^^^^^^ pattern `&[_, ..]` not covered @@ -411,7 +425,7 @@ LL + &[_, ..] => todo!() | error[E0004]: non-exhaustive patterns: `&[]`, `&[_]` and `&[_, _]` not covered - --> $DIR/empty-types.rs:329:11 + --> $DIR/empty-types.rs:338:11 | LL | match slice_never { | ^^^^^^^^^^^ patterns `&[]`, `&[_]` and `&[_, _]` not covered @@ -424,7 +438,7 @@ LL + &[] | &[_] | &[_, _] => todo!() | error[E0004]: non-exhaustive patterns: `&[]` and `&[_, ..]` not covered - --> $DIR/empty-types.rs:343:11 + --> $DIR/empty-types.rs:352:11 | LL | match slice_never { | ^^^^^^^^^^^ patterns `&[]` and `&[_, ..]` not covered @@ -438,7 +452,7 @@ LL + &[] | &[_, ..] => todo!() | error[E0004]: non-exhaustive patterns: type `[!]` is non-empty - --> $DIR/empty-types.rs:350:11 + --> $DIR/empty-types.rs:359:11 | LL | match *slice_never {} | ^^^^^^^^^^^^ @@ -452,7 +466,7 @@ LL + } | error: unreachable pattern - --> $DIR/empty-types.rs:359:9 + --> $DIR/empty-types.rs:368:9 | LL | _ => {} | ^ @@ -460,7 +474,7 @@ LL | _ => {} = note: this pattern matches no values because `[!; 3]` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:362:9 + --> $DIR/empty-types.rs:371:9 | LL | [_, _, _] => {} | ^^^^^^^^^ @@ -468,7 +482,7 @@ LL | [_, _, _] => {} = note: this pattern matches no values because `[!; 3]` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:365:9 + --> $DIR/empty-types.rs:374:9 | LL | [_, ..] => {} | ^^^^^^^ @@ -476,7 +490,7 @@ LL | [_, ..] => {} = note: this pattern matches no values because `[!; 3]` is uninhabited error[E0004]: non-exhaustive patterns: type `[!; 0]` is non-empty - --> $DIR/empty-types.rs:379:11 + --> $DIR/empty-types.rs:388:11 | LL | match array_0_never {} | ^^^^^^^^^^^^^ @@ -490,7 +504,7 @@ LL + } | error: unreachable pattern - --> $DIR/empty-types.rs:386:9 + --> $DIR/empty-types.rs:395:9 | LL | [] => {} | -- matches all the values already @@ -498,7 +512,7 @@ LL | _ => {} | ^ unreachable pattern error[E0004]: non-exhaustive patterns: `[]` not covered - --> $DIR/empty-types.rs:388:11 + --> $DIR/empty-types.rs:397:11 | LL | match array_0_never { | ^^^^^^^^^^^^^ pattern `[]` not covered @@ -512,7 +526,7 @@ LL + [] => todo!() | error: unreachable pattern - --> $DIR/empty-types.rs:407:9 + --> $DIR/empty-types.rs:416:9 | LL | Some(_) => {} | ^^^^^^^ @@ -520,7 +534,7 @@ LL | Some(_) => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:412:9 + --> $DIR/empty-types.rs:421:9 | LL | Some(_a) => {} | ^^^^^^^^ @@ -528,7 +542,7 @@ LL | Some(_a) => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:417:9 + --> $DIR/empty-types.rs:426:9 | LL | None => {} | ---- matches all the values already @@ -537,7 +551,7 @@ LL | _ => {} | ^ unreachable pattern error: unreachable pattern - --> $DIR/empty-types.rs:422:9 + --> $DIR/empty-types.rs:431:9 | LL | None => {} | ---- matches all the values already @@ -546,7 +560,7 @@ LL | _a => {} | ^^ unreachable pattern error[E0004]: non-exhaustive patterns: `&Some(_)` not covered - --> $DIR/empty-types.rs:442:11 + --> $DIR/empty-types.rs:451:11 | LL | match ref_opt_never { | ^^^^^^^^^^^^^ pattern `&Some(_)` not covered @@ -565,7 +579,7 @@ LL + &Some(_) => todo!() | error[E0004]: non-exhaustive patterns: `Some(_)` not covered - --> $DIR/empty-types.rs:483:11 + --> $DIR/empty-types.rs:492:11 | LL | match *ref_opt_never { | ^^^^^^^^^^^^^^ pattern `Some(_)` not covered @@ -584,7 +598,7 @@ LL + Some(_) => todo!() | error[E0004]: non-exhaustive patterns: `Err(_)` not covered - --> $DIR/empty-types.rs:531:11 + --> $DIR/empty-types.rs:540:11 | LL | match *ref_res_never { | ^^^^^^^^^^^^^^ pattern `Err(_)` not covered @@ -603,7 +617,7 @@ LL + Err(_) => todo!() | error[E0004]: non-exhaustive patterns: `Err(_)` not covered - --> $DIR/empty-types.rs:542:11 + --> $DIR/empty-types.rs:551:11 | LL | match *ref_res_never { | ^^^^^^^^^^^^^^ pattern `Err(_)` not covered @@ -622,7 +636,7 @@ LL + Err(_) => todo!() | error[E0004]: non-exhaustive patterns: type `(u32, !)` is non-empty - --> $DIR/empty-types.rs:561:11 + --> $DIR/empty-types.rs:570:11 | LL | match *ref_tuple_half_never {} | ^^^^^^^^^^^^^^^^^^^^^ @@ -636,7 +650,7 @@ LL + } | error: unreachable pattern - --> $DIR/empty-types.rs:594:9 + --> $DIR/empty-types.rs:603:9 | LL | _ => {} | ^ @@ -644,7 +658,7 @@ LL | _ => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:597:9 + --> $DIR/empty-types.rs:606:9 | LL | _x => {} | ^^ @@ -652,7 +666,7 @@ LL | _x => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:600:9 + --> $DIR/empty-types.rs:609:9 | LL | _ if false => {} | ^ @@ -660,7 +674,7 @@ LL | _ if false => {} = note: this pattern matches no values because `!` is uninhabited error: unreachable pattern - --> $DIR/empty-types.rs:603:9 + --> $DIR/empty-types.rs:612:9 | LL | _x if false => {} | ^^ @@ -668,7 +682,7 @@ LL | _x if false => {} = note: this pattern matches no values because `!` is uninhabited error[E0004]: non-exhaustive patterns: `&_` not covered - --> $DIR/empty-types.rs:628:11 + --> $DIR/empty-types.rs:637:11 | LL | match ref_never { | ^^^^^^^^^ pattern `&_` not covered @@ -684,7 +698,7 @@ LL + &_ => todo!() | error[E0004]: non-exhaustive patterns: `Ok(_)` not covered - --> $DIR/empty-types.rs:644:11 + --> $DIR/empty-types.rs:653:11 | LL | match *ref_result_never { | ^^^^^^^^^^^^^^^^^ pattern `Ok(_)` not covered @@ -703,7 +717,7 @@ LL + Ok(_) => todo!() | error[E0004]: non-exhaustive patterns: `Some(_)` not covered - --> $DIR/empty-types.rs:664:11 + --> $DIR/empty-types.rs:673:11 | LL | match *x { | ^^ pattern `Some(_)` not covered @@ -721,7 +735,7 @@ LL ~ None => {}, LL + Some(_) => todo!() | -error: aborting due to 64 previous errors +error: aborting due to 65 previous errors Some errors have detailed explanations: E0004, E0005. For more information about an error, try `rustc --explain E0004`. diff --git a/tests/ui/pattern/usefulness/empty-types.rs b/tests/ui/pattern/usefulness/empty-types.rs index 639c48cea12c5..d561a0e9c128b 100644 --- a/tests/ui/pattern/usefulness/empty-types.rs +++ b/tests/ui/pattern/usefulness/empty-types.rs @@ -17,7 +17,7 @@ #[derive(Copy, Clone)] enum Void {} -/// A bunch of never situations that can't be normally constructed. +/// A bunch of never situations that can't be normally constructed so we take them as argument. #[derive(Copy, Clone)] struct NeverBundle { never: !, @@ -272,6 +272,8 @@ fn nested_validity_tracking(bundle: NeverBundle) { let ref_never: &! = &never; let tuple_never: (!, !) = bundle.tuple_never; let result_never: Result = bundle.result_never; + let result_never_err: Result = Ok(0); + let ptr_result_never_err: *const Result = &result_never_err as *const _; let union_never = Uninit::::new(); // These should be considered known_valid and warn unreachable. @@ -287,6 +289,13 @@ fn nested_validity_tracking(bundle: NeverBundle) { } // These should be considered !known_valid and not warn unreachable. + unsafe { + match *ptr_result_never_err { + Ok(_) => {} + Err(_) => {} + } + let Ok(_) = *ptr_result_never_err; //[normal,never_pats]~ ERROR refutable pattern + } match ref_never { &_ => {} } diff --git a/tests/ui/privacy/private-in-public-warn.stderr b/tests/ui/privacy/private-in-public-warn.stderr index 3f7b8c281e706..3743879ffa616 100644 --- a/tests/ui/privacy/private-in-public-warn.stderr +++ b/tests/ui/privacy/private-in-public-warn.stderr @@ -100,7 +100,7 @@ error: type `types::Priv` is more private than the item `types::ef1` --> $DIR/private-in-public-warn.rs:28:9 | LL | pub fn ef1(arg: Priv); - | ^^^^^^^^^^^^^^^^^^^^^ function `types::ef1` is reachable at visibility `pub(crate)` + | ^^^^^^^^^^^^^^^^^^^^^^ function `types::ef1` is reachable at visibility `pub(crate)` | note: but type `types::Priv` is only usable at visibility `pub(self)` --> $DIR/private-in-public-warn.rs:9:5 @@ -112,7 +112,7 @@ error: type `types::Priv` is more private than the item `types::ef2` --> $DIR/private-in-public-warn.rs:29:9 | LL | pub fn ef2() -> Priv; - | ^^^^^^^^^^^^^^^^^^^^ function `types::ef2` is reachable at visibility `pub(crate)` + | ^^^^^^^^^^^^^^^^^^^^^ function `types::ef2` is reachable at visibility `pub(crate)` | note: but type `types::Priv` is only usable at visibility `pub(self)` --> $DIR/private-in-public-warn.rs:9:5 diff --git a/tests/ui/proc-macro/auxiliary/parse-invis-delim-issue-128895.rs b/tests/ui/proc-macro/auxiliary/parse-invis-delim-issue-128895.rs new file mode 100644 index 0000000000000..07e135ee8ebef --- /dev/null +++ b/tests/ui/proc-macro/auxiliary/parse-invis-delim-issue-128895.rs @@ -0,0 +1,44 @@ +//@ force-host +//@ no-prefer-dynamic + +#![crate_type = "proc-macro"] + +extern crate proc_macro; + +use proc_macro::*; + +// This proc macro ignores its input and returns this token stream +// +// impl <«A1»: Comparable> Comparable for («A1»,) {} +// +// where `«`/`»` are invisible delimiters. This was being misparsed in bug +// #128895. +#[proc_macro] +pub fn main(_input: TokenStream) -> TokenStream { + let a1 = TokenTree::Group( + Group::new( + Delimiter::None, + std::iter::once(TokenTree::Ident(Ident::new("A1", Span::call_site()))).collect(), + ) + ); + vec![ + TokenTree::Ident(Ident::new("impl", Span::call_site())), + TokenTree::Punct(Punct::new('<', Spacing::Alone)), + a1.clone(), + TokenTree::Punct(Punct::new(':', Spacing::Alone)), + TokenTree::Ident(Ident::new("Comparable", Span::call_site())), + TokenTree::Punct(Punct::new('>', Spacing::Alone)), + TokenTree::Ident(Ident::new("Comparable", Span::call_site())), + TokenTree::Ident(Ident::new("for", Span::call_site())), + TokenTree::Group( + Group::new( + Delimiter::Parenthesis, + vec![ + a1.clone(), + TokenTree::Punct(Punct::new(',', Spacing::Alone)), + ].into_iter().collect::(), + ) + ), + TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())), + ].into_iter().collect::() +} diff --git a/tests/ui/proc-macro/parse-invis-delim-issue-128895.rs b/tests/ui/proc-macro/parse-invis-delim-issue-128895.rs new file mode 100644 index 0000000000000..3d5af5fee217d --- /dev/null +++ b/tests/ui/proc-macro/parse-invis-delim-issue-128895.rs @@ -0,0 +1,14 @@ +//@ aux-build:parse-invis-delim-issue-128895.rs +//@ check-pass + +#![no_std] // Don't load unnecessary hygiene information from std +extern crate std; + +#[macro_use] +extern crate parse_invis_delim_issue_128895; + +trait Comparable {} + +parse_invis_delim_issue_128895::main!(); + +fn main() {} diff --git a/tests/ui/rfcs/rfc-2627-raw-dylib/multiple-declarations.stderr b/tests/ui/rfcs/rfc-2627-raw-dylib/multiple-declarations.stderr index 7866af594449f..b766b5c8dd831 100644 --- a/tests/ui/rfcs/rfc-2627-raw-dylib/multiple-declarations.stderr +++ b/tests/ui/rfcs/rfc-2627-raw-dylib/multiple-declarations.stderr @@ -2,7 +2,7 @@ error: multiple declarations of external function `f` from library `foo.dll` hav --> $DIR/multiple-declarations.rs:13:9 | LL | fn f(x: i32); - | ^^^^^^^^^^^^ + | ^^^^^^^^^^^^^ error: aborting due to 1 previous error diff --git a/tests/ui/rfcs/rfc-2627-raw-dylib/unsupported-abi.stderr b/tests/ui/rfcs/rfc-2627-raw-dylib/unsupported-abi.stderr index d7c7344b596db..ef022404e7f67 100644 --- a/tests/ui/rfcs/rfc-2627-raw-dylib/unsupported-abi.stderr +++ b/tests/ui/rfcs/rfc-2627-raw-dylib/unsupported-abi.stderr @@ -2,7 +2,7 @@ error: ABI not supported by `#[link(kind = "raw-dylib")]` on this architecture --> $DIR/unsupported-abi.rs:6:5 | LL | fn f(x: i32); - | ^^^^^^^^^^^^ + | ^^^^^^^^^^^^^ error: aborting due to 1 previous error diff --git a/tests/ui/rust-2024/unsafe-attributes/in_2024_compatibility.rs b/tests/ui/rust-2024/unsafe-attributes/in_2024_compatibility.rs index c6f9115cde750..f3b8645abaf89 100644 --- a/tests/ui/rust-2024/unsafe-attributes/in_2024_compatibility.rs +++ b/tests/ui/rust-2024/unsafe-attributes/in_2024_compatibility.rs @@ -1,5 +1,4 @@ #![deny(rust_2024_compatibility)] -#![feature(unsafe_attributes)] #[no_mangle] //~^ ERROR: unsafe attribute used without unsafe diff --git a/tests/ui/rust-2024/unsafe-attributes/in_2024_compatibility.stderr b/tests/ui/rust-2024/unsafe-attributes/in_2024_compatibility.stderr index f0689d9883c9e..4629a154ac3ce 100644 --- a/tests/ui/rust-2024/unsafe-attributes/in_2024_compatibility.stderr +++ b/tests/ui/rust-2024/unsafe-attributes/in_2024_compatibility.stderr @@ -1,5 +1,5 @@ error: unsafe attribute used without unsafe - --> $DIR/in_2024_compatibility.rs:4:3 + --> $DIR/in_2024_compatibility.rs:3:3 | LL | #[no_mangle] | ^^^^^^^^^ usage of unsafe attribute diff --git a/tests/ui/rust-2024/unsafe-attributes/unsafe-attribute-marked.rs b/tests/ui/rust-2024/unsafe-attributes/unsafe-attribute-marked.rs index 279ced2525a23..7c919fed976f5 100644 --- a/tests/ui/rust-2024/unsafe-attributes/unsafe-attribute-marked.rs +++ b/tests/ui/rust-2024/unsafe-attributes/unsafe-attribute-marked.rs @@ -4,7 +4,6 @@ //@[edition2024] compile-flags: -Zunstable-options //@ check-pass -#![feature(unsafe_attributes)] #[unsafe(no_mangle)] extern "C" fn foo() {} diff --git a/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.fixed b/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.fixed index 6ebdff0334c8b..586881d180767 100644 --- a/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.fixed +++ b/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.fixed @@ -1,5 +1,4 @@ //@ run-rustfix -#![feature(unsafe_attributes)] #![deny(unsafe_attr_outside_unsafe)] macro_rules! tt { diff --git a/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.rs b/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.rs index c78ff45ea4cdf..03e122c7d57e7 100644 --- a/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.rs +++ b/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.rs @@ -1,5 +1,4 @@ //@ run-rustfix -#![feature(unsafe_attributes)] #![deny(unsafe_attr_outside_unsafe)] macro_rules! tt { diff --git a/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.stderr b/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.stderr index c95984f58ecf7..64debc58905c3 100644 --- a/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.stderr +++ b/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes-fix.stderr @@ -1,5 +1,5 @@ error: unsafe attribute used without unsafe - --> $DIR/unsafe-attributes-fix.rs:44:6 + --> $DIR/unsafe-attributes-fix.rs:43:6 | LL | tt!([no_mangle]); | ^^^^^^^^^ usage of unsafe attribute @@ -7,7 +7,7 @@ LL | tt!([no_mangle]); = warning: this is accepted in the current edition (Rust 2015) but is a hard error in Rust 2024! = note: for more information, see issue #123757 note: the lint level is defined here - --> $DIR/unsafe-attributes-fix.rs:3:9 + --> $DIR/unsafe-attributes-fix.rs:2:9 | LL | #![deny(unsafe_attr_outside_unsafe)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -17,7 +17,7 @@ LL | tt!([unsafe(no_mangle)]); | +++++++ + error: unsafe attribute used without unsafe - --> $DIR/unsafe-attributes-fix.rs:14:11 + --> $DIR/unsafe-attributes-fix.rs:13:11 | LL | #[$e] | ^^ usage of unsafe attribute @@ -34,7 +34,7 @@ LL | #[unsafe($e)] | +++++++ + error: unsafe attribute used without unsafe - --> $DIR/unsafe-attributes-fix.rs:48:7 + --> $DIR/unsafe-attributes-fix.rs:47:7 | LL | meta!(no_mangle); | ^^^^^^^^^ usage of unsafe attribute @@ -47,7 +47,7 @@ LL | meta!(unsafe(no_mangle)); | +++++++ + error: unsafe attribute used without unsafe - --> $DIR/unsafe-attributes-fix.rs:51:8 + --> $DIR/unsafe-attributes-fix.rs:50:8 | LL | meta2!(export_name = "baw"); | ^^^^^^^^^^^ usage of unsafe attribute @@ -60,7 +60,7 @@ LL | meta2!(unsafe(export_name = "baw")); | +++++++ + error: unsafe attribute used without unsafe - --> $DIR/unsafe-attributes-fix.rs:23:11 + --> $DIR/unsafe-attributes-fix.rs:22:11 | LL | #[$e = $l] | ^^ usage of unsafe attribute @@ -77,7 +77,7 @@ LL | #[unsafe($e = $l)] | +++++++ + error: unsafe attribute used without unsafe - --> $DIR/unsafe-attributes-fix.rs:56:3 + --> $DIR/unsafe-attributes-fix.rs:55:3 | LL | #[no_mangle] | ^^^^^^^^^ usage of unsafe attribute diff --git a/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes.edition2024.stderr b/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes.edition2024.stderr index 35475d6671626..fb697e14ef1c7 100644 --- a/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes.edition2024.stderr +++ b/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes.edition2024.stderr @@ -1,5 +1,5 @@ error: unsafe attribute used without unsafe - --> $DIR/unsafe-attributes.rs:9:3 + --> $DIR/unsafe-attributes.rs:8:3 | LL | #[no_mangle] | ^^^^^^^^^ usage of unsafe attribute diff --git a/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes.rs b/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes.rs index 3a6af9dfb2b0f..f6f2994bb6def 100644 --- a/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes.rs +++ b/tests/ui/rust-2024/unsafe-attributes/unsafe-attributes.rs @@ -4,7 +4,6 @@ //@[edition2024] edition:2024 //@[edition2024] compile-flags: -Zunstable-options -#![feature(unsafe_attributes)] #[no_mangle] //[edition2024]~ ERROR: unsafe attribute used without unsafe extern "C" fn foo() {} diff --git a/tests/ui/rust-2024/unsafe-before_exec.e2024.stderr b/tests/ui/rust-2024/unsafe-before_exec.e2024.stderr new file mode 100644 index 0000000000000..2798ccdefd0c6 --- /dev/null +++ b/tests/ui/rust-2024/unsafe-before_exec.e2024.stderr @@ -0,0 +1,11 @@ +error[E0133]: call to unsafe function `before_exec` is unsafe and requires unsafe block + --> $DIR/unsafe-before_exec.rs:14:5 + | +LL | cmd.before_exec(|| Ok(())); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ call to unsafe function + | + = note: consult the function's documentation for information on how to avoid undefined behavior + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0133`. diff --git a/tests/ui/rust-2024/unsafe-before_exec.rs b/tests/ui/rust-2024/unsafe-before_exec.rs new file mode 100644 index 0000000000000..540394da80ef8 --- /dev/null +++ b/tests/ui/rust-2024/unsafe-before_exec.rs @@ -0,0 +1,17 @@ +//@ revisions: e2021 e2024 +//@ only-unix +//@[e2021] edition: 2021 +//@[e2021] check-pass +//@[e2024] edition: 2024 +//@[e2024] compile-flags: -Zunstable-options + +use std::process::Command; +use std::os::unix::process::CommandExt; + +#[allow(deprecated)] +fn main() { + let mut cmd = Command::new("sleep"); + cmd.before_exec(|| Ok(())); + //[e2024]~^ ERROR call to unsafe function `before_exec` is unsafe + drop(cmd); // we don't actually run the command. +} diff --git a/tests/ui/rust-2024/unsafe-env-suggestion.stderr b/tests/ui/rust-2024/unsafe-env-suggestion.stderr index 3aa10a3bed682..1506741f6bc9b 100644 --- a/tests/ui/rust-2024/unsafe-env-suggestion.stderr +++ b/tests/ui/rust-2024/unsafe-env-suggestion.stderr @@ -11,7 +11,7 @@ note: the lint level is defined here | LL | #![deny(deprecated_safe_2024)] | ^^^^^^^^^^^^^^^^^^^^ -help: you can wrap the call in an `unsafe` block if you can guarantee the code is only ever called from single-threaded code +help: you can wrap the call in an `unsafe` block if you can guarantee that the environment access only happens in single-threaded code | LL + // TODO: Audit that the environment access only happens in single-threaded code. LL ~ unsafe { env::set_var("FOO", "BAR") }; @@ -25,7 +25,7 @@ LL | env::remove_var("FOO"); | = warning: this is accepted in the current edition (Rust 2015) but is a hard error in Rust 2024! = note: for more information, see issue #27970 -help: you can wrap the call in an `unsafe` block if you can guarantee the code is only ever called from single-threaded code +help: you can wrap the call in an `unsafe` block if you can guarantee that the environment access only happens in single-threaded code | LL + // TODO: Audit that the environment access only happens in single-threaded code. LL ~ unsafe { env::remove_var("FOO") }; diff --git a/tests/ui/sanitizer/cfi-assoc-ty-lifetime-issue-123053.rs b/tests/ui/sanitizer/cfi/assoc-ty-lifetime-issue-123053.rs similarity index 100% rename from tests/ui/sanitizer/cfi-assoc-ty-lifetime-issue-123053.rs rename to tests/ui/sanitizer/cfi/assoc-ty-lifetime-issue-123053.rs diff --git a/tests/ui/sanitizer/cfi-async-closures.rs b/tests/ui/sanitizer/cfi/async-closures.rs similarity index 100% rename from tests/ui/sanitizer/cfi-async-closures.rs rename to tests/ui/sanitizer/cfi/async-closures.rs diff --git a/tests/ui/sanitizer/cfi-can-reveal-opaques.rs b/tests/ui/sanitizer/cfi/can-reveal-opaques.rs similarity index 100% rename from tests/ui/sanitizer/cfi-can-reveal-opaques.rs rename to tests/ui/sanitizer/cfi/can-reveal-opaques.rs diff --git a/tests/ui/sanitizer/cfi-canonical-jump-tables-requires-cfi.rs b/tests/ui/sanitizer/cfi/canonical-jump-tables-requires-cfi.rs similarity index 100% rename from tests/ui/sanitizer/cfi-canonical-jump-tables-requires-cfi.rs rename to tests/ui/sanitizer/cfi/canonical-jump-tables-requires-cfi.rs diff --git a/tests/ui/sanitizer/cfi-canonical-jump-tables-requires-cfi.stderr b/tests/ui/sanitizer/cfi/canonical-jump-tables-requires-cfi.stderr similarity index 100% rename from tests/ui/sanitizer/cfi-canonical-jump-tables-requires-cfi.stderr rename to tests/ui/sanitizer/cfi/canonical-jump-tables-requires-cfi.stderr diff --git a/tests/ui/sanitizer/cfi-closures.rs b/tests/ui/sanitizer/cfi/closures.rs similarity index 100% rename from tests/ui/sanitizer/cfi-closures.rs rename to tests/ui/sanitizer/cfi/closures.rs diff --git a/tests/ui/sanitizer/cfi-complex-receiver.rs b/tests/ui/sanitizer/cfi/complex-receiver.rs similarity index 100% rename from tests/ui/sanitizer/cfi-complex-receiver.rs rename to tests/ui/sanitizer/cfi/complex-receiver.rs diff --git a/tests/ui/sanitizer/cfi-coroutine.rs b/tests/ui/sanitizer/cfi/coroutine.rs similarity index 100% rename from tests/ui/sanitizer/cfi-coroutine.rs rename to tests/ui/sanitizer/cfi/coroutine.rs diff --git a/tests/ui/sanitizer/cfi-drop-in-place.rs b/tests/ui/sanitizer/cfi/drop-in-place.rs similarity index 100% rename from tests/ui/sanitizer/cfi-drop-in-place.rs rename to tests/ui/sanitizer/cfi/drop-in-place.rs diff --git a/tests/ui/sanitizer/cfi-drop-no-principal.rs b/tests/ui/sanitizer/cfi/drop-no-principal.rs similarity index 100% rename from tests/ui/sanitizer/cfi-drop-no-principal.rs rename to tests/ui/sanitizer/cfi/drop-no-principal.rs diff --git a/tests/ui/sanitizer/cfi-fn-ptr.rs b/tests/ui/sanitizer/cfi/fn-ptr.rs similarity index 100% rename from tests/ui/sanitizer/cfi-fn-ptr.rs rename to tests/ui/sanitizer/cfi/fn-ptr.rs diff --git a/tests/ui/sanitizer/cfi-generalize-pointers-attr-cfg.rs b/tests/ui/sanitizer/cfi/generalize-pointers-attr-cfg.rs similarity index 100% rename from tests/ui/sanitizer/cfi-generalize-pointers-attr-cfg.rs rename to tests/ui/sanitizer/cfi/generalize-pointers-attr-cfg.rs diff --git a/tests/ui/sanitizer/cfi-generalize-pointers-requires-cfi.rs b/tests/ui/sanitizer/cfi/generalize-pointers-requires-cfi.rs similarity index 100% rename from tests/ui/sanitizer/cfi-generalize-pointers-requires-cfi.rs rename to tests/ui/sanitizer/cfi/generalize-pointers-requires-cfi.rs diff --git a/tests/ui/sanitizer/cfi-generalize-pointers-requires-cfi.stderr b/tests/ui/sanitizer/cfi/generalize-pointers-requires-cfi.stderr similarity index 100% rename from tests/ui/sanitizer/cfi-generalize-pointers-requires-cfi.stderr rename to tests/ui/sanitizer/cfi/generalize-pointers-requires-cfi.stderr diff --git a/tests/ui/sanitizer/cfi-invalid-attr-cfi-encoding.rs b/tests/ui/sanitizer/cfi/invalid-attr-encoding.rs similarity index 100% rename from tests/ui/sanitizer/cfi-invalid-attr-cfi-encoding.rs rename to tests/ui/sanitizer/cfi/invalid-attr-encoding.rs diff --git a/tests/ui/sanitizer/cfi-invalid-attr-cfi-encoding.stderr b/tests/ui/sanitizer/cfi/invalid-attr-encoding.stderr similarity index 79% rename from tests/ui/sanitizer/cfi-invalid-attr-cfi-encoding.stderr rename to tests/ui/sanitizer/cfi/invalid-attr-encoding.stderr index 93ec134241e31..1aa6bef17b1e2 100644 --- a/tests/ui/sanitizer/cfi-invalid-attr-cfi-encoding.stderr +++ b/tests/ui/sanitizer/cfi/invalid-attr-encoding.stderr @@ -1,5 +1,5 @@ error: malformed `cfi_encoding` attribute input - --> $DIR/cfi-invalid-attr-cfi-encoding.rs:10:1 + --> $DIR/invalid-attr-encoding.rs:10:1 | LL | #[cfi_encoding] | ^^^^^^^^^^^^^^^ help: must be of the form: `#[cfi_encoding = "encoding"]` diff --git a/tests/ui/sanitizer/cfi-is-incompatible-with-kcfi.aarch64.stderr b/tests/ui/sanitizer/cfi/is-incompatible-with-kcfi.aarch64.stderr similarity index 100% rename from tests/ui/sanitizer/cfi-is-incompatible-with-kcfi.aarch64.stderr rename to tests/ui/sanitizer/cfi/is-incompatible-with-kcfi.aarch64.stderr diff --git a/tests/ui/sanitizer/cfi-is-incompatible-with-kcfi.rs b/tests/ui/sanitizer/cfi/is-incompatible-with-kcfi.rs similarity index 100% rename from tests/ui/sanitizer/cfi-is-incompatible-with-kcfi.rs rename to tests/ui/sanitizer/cfi/is-incompatible-with-kcfi.rs diff --git a/tests/ui/sanitizer/cfi-is-incompatible-with-kcfi.x86_64.stderr b/tests/ui/sanitizer/cfi/is-incompatible-with-kcfi.x86_64.stderr similarity index 100% rename from tests/ui/sanitizer/cfi-is-incompatible-with-kcfi.x86_64.stderr rename to tests/ui/sanitizer/cfi/is-incompatible-with-kcfi.x86_64.stderr diff --git a/tests/ui/sanitizer/cfi-normalize-integers-attr-cfg.rs b/tests/ui/sanitizer/cfi/normalize-integers-attr-cfg.rs similarity index 100% rename from tests/ui/sanitizer/cfi-normalize-integers-attr-cfg.rs rename to tests/ui/sanitizer/cfi/normalize-integers-attr-cfg.rs diff --git a/tests/ui/sanitizer/cfi-normalize-integers-requires-cfi.rs b/tests/ui/sanitizer/cfi/normalize-integers-requires-cfi.rs similarity index 100% rename from tests/ui/sanitizer/cfi-normalize-integers-requires-cfi.rs rename to tests/ui/sanitizer/cfi/normalize-integers-requires-cfi.rs diff --git a/tests/ui/sanitizer/cfi-normalize-integers-requires-cfi.stderr b/tests/ui/sanitizer/cfi/normalize-integers-requires-cfi.stderr similarity index 100% rename from tests/ui/sanitizer/cfi-normalize-integers-requires-cfi.stderr rename to tests/ui/sanitizer/cfi/normalize-integers-requires-cfi.stderr diff --git a/tests/ui/sanitizer/cfi-requires-lto.rs b/tests/ui/sanitizer/cfi/requires-lto.rs similarity index 100% rename from tests/ui/sanitizer/cfi-requires-lto.rs rename to tests/ui/sanitizer/cfi/requires-lto.rs diff --git a/tests/ui/sanitizer/cfi-requires-lto.stderr b/tests/ui/sanitizer/cfi/requires-lto.stderr similarity index 100% rename from tests/ui/sanitizer/cfi-requires-lto.stderr rename to tests/ui/sanitizer/cfi/requires-lto.stderr diff --git a/tests/ui/sanitizer/cfi-self-ref.rs b/tests/ui/sanitizer/cfi/self-ref.rs similarity index 100% rename from tests/ui/sanitizer/cfi-self-ref.rs rename to tests/ui/sanitizer/cfi/self-ref.rs diff --git a/tests/ui/sanitizer/cfi-sized-associated-ty.rs b/tests/ui/sanitizer/cfi/sized-associated-ty.rs similarity index 100% rename from tests/ui/sanitizer/cfi-sized-associated-ty.rs rename to tests/ui/sanitizer/cfi/sized-associated-ty.rs diff --git a/tests/ui/sanitizer/cfi-supertraits.rs b/tests/ui/sanitizer/cfi/supertraits.rs similarity index 100% rename from tests/ui/sanitizer/cfi-supertraits.rs rename to tests/ui/sanitizer/cfi/supertraits.rs diff --git a/tests/ui/sanitizer/cfi-virtual-auto.rs b/tests/ui/sanitizer/cfi/virtual-auto.rs similarity index 100% rename from tests/ui/sanitizer/cfi-virtual-auto.rs rename to tests/ui/sanitizer/cfi/virtual-auto.rs diff --git a/tests/ui/sanitizer/cfi-with-rustc-lto-requires-single-codegen-unit.rs b/tests/ui/sanitizer/cfi/with-rustc-lto-requires-single-codegen-unit.rs similarity index 100% rename from tests/ui/sanitizer/cfi-with-rustc-lto-requires-single-codegen-unit.rs rename to tests/ui/sanitizer/cfi/with-rustc-lto-requires-single-codegen-unit.rs diff --git a/tests/ui/sanitizer/cfi-with-rustc-lto-requires-single-codegen-unit.stderr b/tests/ui/sanitizer/cfi/with-rustc-lto-requires-single-codegen-unit.stderr similarity index 100% rename from tests/ui/sanitizer/cfi-with-rustc-lto-requires-single-codegen-unit.stderr rename to tests/ui/sanitizer/cfi/with-rustc-lto-requires-single-codegen-unit.stderr diff --git a/tests/ui/traits/next-solver/alias-bound-unsound.rs b/tests/ui/traits/next-solver/alias-bound-unsound.rs index a5bd3e7afa846..272e5db3b7ad2 100644 --- a/tests/ui/traits/next-solver/alias-bound-unsound.rs +++ b/tests/ui/traits/next-solver/alias-bound-unsound.rs @@ -27,5 +27,6 @@ fn main() { //~| ERROR overflow evaluating the requirement `&<() as Foo>::Item well-formed` //~| ERROR overflow evaluating the requirement `<() as Foo>::Item == _` //~| ERROR overflow evaluating the requirement `<() as Foo>::Item == _` + //~| ERROR overflow evaluating the requirement `<() as Foo>::Item == _` println!("{x}"); } diff --git a/tests/ui/traits/next-solver/alias-bound-unsound.stderr b/tests/ui/traits/next-solver/alias-bound-unsound.stderr index a5c2f215134a9..e5cf5b6bc3d30 100644 --- a/tests/ui/traits/next-solver/alias-bound-unsound.stderr +++ b/tests/ui/traits/next-solver/alias-bound-unsound.stderr @@ -44,6 +44,12 @@ LL | drop(<() as Foo>::copy_me(&x)); | = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` -error: aborting due to 6 previous errors +error[E0275]: overflow evaluating the requirement `<() as Foo>::Item == _` + --> $DIR/alias-bound-unsound.rs:24:31 + | +LL | drop(<() as Foo>::copy_me(&x)); + | ^^ + +error: aborting due to 7 previous errors For more information about this error, try `rustc --explain E0275`. diff --git a/tests/ui/try-trait/bad-interconversion.stderr b/tests/ui/try-trait/bad-interconversion.stderr index 642a93d64e2a5..9aab2cf6ab848 100644 --- a/tests/ui/try-trait/bad-interconversion.stderr +++ b/tests/ui/try-trait/bad-interconversion.stderr @@ -45,7 +45,7 @@ LL | Some(Err("hello")?) | ^ use `.ok()?` if you want to discard the `Result` error information | = help: the trait `FromResidual>` is not implemented for `Option` - = help: the trait `FromResidual` is implemented for `Option` + = help: the trait `FromResidual>` is implemented for `Option` error[E0277]: the `?` operator can only be used on `Option`s in a function that returns `Option` --> $DIR/bad-interconversion.rs:27:33 @@ -56,7 +56,7 @@ LL | Some(ControlFlow::Break(123)?) | ^ this `?` produces `ControlFlow<{integer}, Infallible>`, which is incompatible with `Option` | = help: the trait `FromResidual>` is not implemented for `Option` - = help: the trait `FromResidual` is implemented for `Option` + = help: the trait `FromResidual>` is implemented for `Option` error[E0277]: the `?` operator can only be used on `ControlFlow`s in a function that returns `ControlFlow` --> $DIR/bad-interconversion.rs:32:39 diff --git a/tests/ui/try-trait/option-to-result.stderr b/tests/ui/try-trait/option-to-result.stderr index 8055b2a0b0452..1a5a925f92fce 100644 --- a/tests/ui/try-trait/option-to-result.stderr +++ b/tests/ui/try-trait/option-to-result.stderr @@ -20,7 +20,7 @@ LL | a?; | ^ use `.ok()?` if you want to discard the `Result` error information | = help: the trait `FromResidual>` is not implemented for `Option` - = help: the trait `FromResidual` is implemented for `Option` + = help: the trait `FromResidual>` is implemented for `Option` error: aborting due to 2 previous errors diff --git a/tests/ui/typeck/const-in-fn-call-generics.rs b/tests/ui/typeck/const-in-fn-call-generics.rs new file mode 100644 index 0000000000000..675dbcc305475 --- /dev/null +++ b/tests/ui/typeck/const-in-fn-call-generics.rs @@ -0,0 +1,16 @@ +fn generic() {} + +trait Collate { + type Pass; + fn collate(self) -> Self::Pass; +} + +impl Collate for i32 { + type Pass = (); + fn collate(self) -> Self::Pass { + generic::<{ true }>() + //~^ ERROR: mismatched types + } +} + +fn main() {} diff --git a/tests/ui/typeck/const-in-fn-call-generics.stderr b/tests/ui/typeck/const-in-fn-call-generics.stderr new file mode 100644 index 0000000000000..12dd454188cac --- /dev/null +++ b/tests/ui/typeck/const-in-fn-call-generics.stderr @@ -0,0 +1,9 @@ +error[E0308]: mismatched types + --> $DIR/const-in-fn-call-generics.rs:11:21 + | +LL | generic::<{ true }>() + | ^^^^ expected `u32`, found `bool` + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0308`. diff --git a/triagebot.toml b/triagebot.toml index 33108f743cb94..a98d5f6a7c215 100644 --- a/triagebot.toml +++ b/triagebot.toml @@ -913,7 +913,7 @@ cc = ["@kobzol"] [assign] warn_non_default_branch = true contributing_url = "https://rustc-dev-guide.rust-lang.org/getting-started.html" -users_on_vacation = ["jyn514", "jhpratt", "oli-obk", "michaelwoerister"] +users_on_vacation = ["jyn514", "jhpratt", "oli-obk"] [assign.adhoc_groups] compiler-team = [