diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index 84d8829c398e6..37de90d64c774 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -25,7 +25,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use rustc_span::{Span, DUMMY_SP}; use smallvec::{smallvec, SmallVec}; -use std::{fmt, iter, mem}; +use std::{fmt, iter}; /// When the main Rust parser encounters a syntax-extension invocation, it /// parses the arguments to the invocation as a token tree. This is a very @@ -399,45 +399,6 @@ impl TokenStream { self.0.len() } - pub fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream { - match streams.len() { - 0 => TokenStream::default(), - 1 => streams.pop().unwrap(), - _ => { - // We are going to extend the first stream in `streams` with - // the elements from the subsequent streams. This requires - // using `make_mut()` on the first stream, and in practice this - // doesn't cause cloning 99.9% of the time. - // - // One very common use case is when `streams` has two elements, - // where the first stream has any number of elements within - // (often 1, but sometimes many more) and the second stream has - // a single element within. - - // Determine how much the first stream will be extended. - // Needed to avoid quadratic blow up from on-the-fly - // reallocations (#57735). - let num_appends = streams.iter().skip(1).map(|ts| ts.len()).sum(); - - // Get the first stream. If it's `None`, create an empty - // stream. - let mut iter = streams.drain(..); - let mut first_stream_lrc = iter.next().unwrap().0; - - // Append the elements to the first stream, after reserving - // space for them. - let first_vec_mut = Lrc::make_mut(&mut first_stream_lrc); - first_vec_mut.reserve(num_appends); - for stream in iter { - first_vec_mut.extend(stream.0.iter().cloned()); - } - - // Create the final `TokenStream`. - TokenStream(first_stream_lrc) - } - } - } - pub fn trees(&self) -> CursorRef<'_> { CursorRef::new(self) } @@ -562,50 +523,65 @@ impl TokenStreamBuilder { } pub fn push>(&mut self, stream: T) { - let mut stream = stream.into(); - - // If `self` is not empty and the last tree within the last stream is a - // token tree marked with `Joint`... - if let Some(TokenStream(ref mut last_stream_lrc)) = self.0.last_mut() - && let Some((TokenTree::Token(last_token), Spacing::Joint)) = last_stream_lrc.last() - // ...and `stream` is not empty and the first tree within it is - // a token tree... - && let TokenStream(ref mut stream_lrc) = stream - && let Some((TokenTree::Token(token), spacing)) = stream_lrc.first() - // ...and the two tokens can be glued together... - && let Some(glued_tok) = last_token.glue(&token) - { - // ...then do so, by overwriting the last token - // tree in `self` and removing the first token tree - // from `stream`. This requires using `make_mut()` - // on the last stream in `self` and on `stream`, - // and in practice this doesn't cause cloning 99.9% - // of the time. - - // Overwrite the last token tree with the merged - // token. - let last_vec_mut = Lrc::make_mut(last_stream_lrc); - *last_vec_mut.last_mut().unwrap() = (TokenTree::Token(glued_tok), *spacing); - - // Remove the first token tree from `stream`. (This - // is almost always the only tree in `stream`.) - let stream_vec_mut = Lrc::make_mut(stream_lrc); - stream_vec_mut.remove(0); - - // Don't push `stream` if it's empty -- that could - // block subsequent token gluing, by getting - // between two token trees that should be glued - // together. - if !stream.is_empty() { - self.0.push(stream); - } - return; - } - self.0.push(stream); + self.0.push(stream.into()); } pub fn build(self) -> TokenStream { - TokenStream::from_streams(self.0) + let mut streams = self.0; + match streams.len() { + 0 => TokenStream::default(), + 1 => streams.pop().unwrap(), + _ => { + // We will extend the first stream in `streams` with the + // elements from the subsequent streams. This requires using + // `make_mut()` on the first stream, and in practice this + // doesn't cause cloning 99.9% of the time. + // + // One very common use case is when `streams` has two elements, + // where the first stream has any number of elements within + // (often 1, but sometimes many more) and the second stream has + // a single element within. + + // Determine how much the first stream will be extended. + // Needed to avoid quadratic blow up from on-the-fly + // reallocations (#57735). + let num_appends = streams.iter().skip(1).map(|ts| ts.len()).sum(); + + // Get the first stream, which will become the result stream. + // If it's `None`, create an empty stream. + let mut iter = streams.drain(..); + let mut res_stream_lrc = iter.next().unwrap().0; + + // Append the subsequent elements to the result stream, after + // reserving space for them. + let res_vec_mut = Lrc::make_mut(&mut res_stream_lrc); + res_vec_mut.reserve(num_appends); + for stream in iter { + let stream_iter = stream.0.iter().cloned(); + + // If (a) `res_mut_vec` is not empty and the last tree + // within it is a token tree marked with `Joint`, and (b) + // `stream` is not empty and the first tree within it is a + // token tree, and (c) the two tokens can be glued + // together... + if let Some((TokenTree::Token(last_tok), Spacing::Joint)) = res_vec_mut.last() + && let Some((TokenTree::Token(tok), spacing)) = stream.0.first() + && let Some(glued_tok) = last_tok.glue(&tok) + { + // ...then overwrite the last token tree in + // `res_vec_mut` with the glued token, and skip the + // first token tree from `stream`. + *res_vec_mut.last_mut().unwrap() = (TokenTree::Token(glued_tok), *spacing); + res_vec_mut.extend(stream_iter.skip(1)); + } else { + // Append all of `stream`. + res_vec_mut.extend(stream_iter); + } + } + + TokenStream(res_stream_lrc) + } + } } } @@ -679,20 +655,6 @@ impl Cursor { }) } - pub fn index(&self) -> usize { - self.index - } - - pub fn append(&mut self, new_stream: TokenStream) { - if new_stream.is_empty() { - return; - } - let index = self.index; - let stream = mem::take(&mut self.stream); - *self = TokenStream::from_streams(smallvec![stream, new_stream]).into_trees(); - self.index = index; - } - pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> { self.stream.0[self.index..].get(n).map(|(tree, _)| tree) } diff --git a/compiler/rustc_borrowck/src/type_check/mod.rs b/compiler/rustc_borrowck/src/type_check/mod.rs index ae6b8e0ae30f7..355254fe9429b 100644 --- a/compiler/rustc_borrowck/src/type_check/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/mod.rs @@ -357,12 +357,20 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { .add_element(live_region_vid, location); }); + // HACK(compiler-errors): Constants that are gathered into Body.required_consts + // have their locations erased... + let locations = if location != Location::START { + location.to_locations() + } else { + Locations::All(constant.span) + }; + if let Some(annotation_index) = constant.user_ty { if let Err(terr) = self.cx.relate_type_and_user_type( constant.literal.ty(), ty::Variance::Invariant, &UserTypeProjection { base: annotation_index, projs: vec![] }, - location.to_locations(), + locations, ConstraintCategory::Boring, ) { let annotation = &self.cx.user_type_annotations[annotation_index]; @@ -390,12 +398,9 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { promoted: &Body<'tcx>, ty, san_ty| { - if let Err(terr) = verifier.cx.eq_types( - ty, - san_ty, - location.to_locations(), - ConstraintCategory::Boring, - ) { + if let Err(terr) = + verifier.cx.eq_types(ty, san_ty, locations, ConstraintCategory::Boring) + { span_mirbug!( verifier, promoted, @@ -416,7 +421,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { } } else { if let Err(terr) = self.cx.fully_perform_op( - location.to_locations(), + locations, ConstraintCategory::Boring, self.cx.param_env.and(type_op::ascribe_user_type::AscribeUserType::new( constant.literal.ty(), @@ -435,7 +440,6 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { } } else if let Some(static_def_id) = constant.check_static_ptr(tcx) { let unnormalized_ty = tcx.type_of(static_def_id); - let locations = location.to_locations(); let normalized_ty = self.cx.normalize(unnormalized_ty, locations); let literal_ty = constant.literal.ty().builtin_deref(true).unwrap().ty; @@ -454,7 +458,7 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { self.cx.normalize_and_prove_instantiated_predicates( def_id, instantiated_predicates, - location.to_locations(), + locations, ); } } diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index 093896c339d29..1e4193a5a16cc 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -329,6 +329,7 @@ impl Ident { sess.symbol_gallery.insert(sym, span); Ident { sym, is_raw, span } } + fn dollar_crate(span: Span) -> Ident { // `$crate` is accepted as an ident only if it comes from the compiler. Ident { sym: kw::DollarCrate, is_raw: false, span } @@ -403,6 +404,7 @@ impl server::TokenStream for Rustc<'_, '_> { fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { stream.is_empty() } + fn from_str(&mut self, src: &str) -> Self::TokenStream { parse_stream_from_source_str( FileName::proc_macro_source_code(src), @@ -411,9 +413,11 @@ impl server::TokenStream for Rustc<'_, '_> { Some(self.call_site), ) } + fn to_string(&mut self, stream: &Self::TokenStream) -> String { pprust::tts_to_string(stream) } + fn expand_expr(&mut self, stream: &Self::TokenStream) -> Result { // Parse the expression from our tokenstream. let expr: PResult<'_, _> = try { @@ -464,12 +468,14 @@ impl server::TokenStream for Rustc<'_, '_> { _ => Err(()), } } + fn from_token_tree( &mut self, tree: TokenTree, ) -> Self::TokenStream { tree.to_internal() } + fn concat_trees( &mut self, base: Option, @@ -484,6 +490,7 @@ impl server::TokenStream for Rustc<'_, '_> { } builder.build() } + fn concat_streams( &mut self, base: Option, @@ -498,6 +505,7 @@ impl server::TokenStream for Rustc<'_, '_> { } builder.build() } + fn into_trees( &mut self, stream: Self::TokenStream, @@ -522,10 +530,10 @@ impl server::TokenStream for Rustc<'_, '_> { // FIXME: It needs to be removed, but there are some // compatibility issues (see #73345). if group.flatten { - cursor.append(group.stream); - continue; + tts.append(&mut self.into_trees(group.stream)); + } else { + tts.push(TokenTree::Group(group)); } - tts.push(TokenTree::Group(group)); } Some(tt) => tts.push(tt), None => return tts, @@ -543,21 +551,27 @@ impl server::Group for Rustc<'_, '_> { flatten: false, } } + fn delimiter(&mut self, group: &Self::Group) -> Delimiter { group.delimiter } + fn stream(&mut self, group: &Self::Group) -> Self::TokenStream { group.stream.clone() } + fn span(&mut self, group: &Self::Group) -> Self::Span { group.span.entire() } + fn span_open(&mut self, group: &Self::Group) -> Self::Span { group.span.open } + fn span_close(&mut self, group: &Self::Group) -> Self::Span { group.span.close } + fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) { group.span = DelimSpan::from_single(span); } @@ -567,15 +581,19 @@ impl server::Punct for Rustc<'_, '_> { fn new(&mut self, ch: char, spacing: Spacing) -> Self::Punct { Punct::new(ch, spacing == Spacing::Joint, server::Span::call_site(self)) } + fn as_char(&mut self, punct: Self::Punct) -> char { punct.ch } + fn spacing(&mut self, punct: Self::Punct) -> Spacing { if punct.joint { Spacing::Joint } else { Spacing::Alone } } + fn span(&mut self, punct: Self::Punct) -> Self::Span { punct.span } + fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct { Punct { span, ..punct } } @@ -585,9 +603,11 @@ impl server::Ident for Rustc<'_, '_> { fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident { Ident::new(self.sess(), Symbol::intern(string), is_raw, span) } + fn span(&mut self, ident: Self::Ident) -> Self::Span { ident.span } + fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident { Ident { span, ..ident } } @@ -639,45 +659,57 @@ impl server::Literal for Rustc<'_, '_> { Ok(Literal { lit, span: self.call_site }) } + fn to_string(&mut self, literal: &Self::Literal) -> String { literal.lit.to_string() } + fn debug_kind(&mut self, literal: &Self::Literal) -> String { format!("{:?}", literal.lit.kind) } + fn symbol(&mut self, literal: &Self::Literal) -> String { literal.lit.symbol.to_string() } + fn suffix(&mut self, literal: &Self::Literal) -> Option { literal.lit.suffix.as_ref().map(Symbol::to_string) } + fn integer(&mut self, n: &str) -> Self::Literal { self.lit(token::Integer, Symbol::intern(n), None) } + fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal { self.lit(token::Integer, Symbol::intern(n), Some(Symbol::intern(kind))) } + fn float(&mut self, n: &str) -> Self::Literal { self.lit(token::Float, Symbol::intern(n), None) } + fn f32(&mut self, n: &str) -> Self::Literal { self.lit(token::Float, Symbol::intern(n), Some(sym::f32)) } + fn f64(&mut self, n: &str) -> Self::Literal { self.lit(token::Float, Symbol::intern(n), Some(sym::f64)) } + fn string(&mut self, string: &str) -> Self::Literal { let quoted = format!("{:?}", string); assert!(quoted.starts_with('"') && quoted.ends_with('"')); let symbol = "ed[1..quoted.len() - 1]; self.lit(token::Str, Symbol::intern(symbol), None) } + fn character(&mut self, ch: char) -> Self::Literal { let quoted = format!("{:?}", ch); assert!(quoted.starts_with('\'') && quoted.ends_with('\'')); let symbol = "ed[1..quoted.len() - 1]; self.lit(token::Char, Symbol::intern(symbol), None) } + fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal { let string = bytes .iter() @@ -687,12 +719,15 @@ impl server::Literal for Rustc<'_, '_> { .collect::(); self.lit(token::ByteStr, Symbol::intern(&string), None) } + fn span(&mut self, literal: &Self::Literal) -> Self::Span { literal.span } + fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) { literal.span = span; } + fn subspan( &mut self, literal: &Self::Literal, @@ -735,6 +770,7 @@ impl server::SourceFile for Rustc<'_, '_> { fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool { Lrc::ptr_eq(file1, file2) } + fn path(&mut self, file: &Self::SourceFile) -> String { match file.name { FileName::Real(ref name) => name @@ -746,6 +782,7 @@ impl server::SourceFile for Rustc<'_, '_> { _ => file.name.prefer_local().to_string(), } } + fn is_real(&mut self, file: &Self::SourceFile) -> bool { file.is_real_file() } @@ -755,6 +792,7 @@ impl server::MultiSpan for Rustc<'_, '_> { fn new(&mut self) -> Self::MultiSpan { vec![] } + fn push(&mut self, spans: &mut Self::MultiSpan, span: Self::Span) { spans.push(span) } @@ -766,6 +804,7 @@ impl server::Diagnostic for Rustc<'_, '_> { diag.set_span(MultiSpan::from_spans(spans)); diag } + fn sub( &mut self, diag: &mut Self::Diagnostic, @@ -775,6 +814,7 @@ impl server::Diagnostic for Rustc<'_, '_> { ) { diag.sub(level.to_internal(), msg, MultiSpan::from_spans(spans), None); } + fn emit(&mut self, mut diag: Self::Diagnostic) { self.sess().span_diagnostic.emit_diagnostic(&mut diag); } @@ -788,38 +828,49 @@ impl server::Span for Rustc<'_, '_> { format!("{:?} bytes({}..{})", span.ctxt(), span.lo().0, span.hi().0) } } + fn def_site(&mut self) -> Self::Span { self.def_site } + fn call_site(&mut self) -> Self::Span { self.call_site } + fn mixed_site(&mut self) -> Self::Span { self.mixed_site } + fn source_file(&mut self, span: Self::Span) -> Self::SourceFile { self.sess().source_map().lookup_char_pos(span.lo()).file } + fn parent(&mut self, span: Self::Span) -> Option { span.parent_callsite() } + fn source(&mut self, span: Self::Span) -> Self::Span { span.source_callsite() } + fn start(&mut self, span: Self::Span) -> LineColumn { let loc = self.sess().source_map().lookup_char_pos(span.lo()); LineColumn { line: loc.line, column: loc.col.to_usize() } } + fn end(&mut self, span: Self::Span) -> LineColumn { let loc = self.sess().source_map().lookup_char_pos(span.hi()); LineColumn { line: loc.line, column: loc.col.to_usize() } } + fn before(&mut self, span: Self::Span) -> Self::Span { span.shrink_to_lo() } + fn after(&mut self, span: Self::Span) -> Self::Span { span.shrink_to_hi() } + fn join(&mut self, first: Self::Span, second: Self::Span) -> Option { let self_loc = self.sess().source_map().lookup_char_pos(first.lo()); let other_loc = self.sess().source_map().lookup_char_pos(second.lo()); @@ -830,9 +881,11 @@ impl server::Span for Rustc<'_, '_> { Some(first.to(second)) } + fn resolved_at(&mut self, span: Self::Span, at: Self::Span) -> Self::Span { span.with_ctxt(at.ctxt()) } + fn source_text(&mut self, span: Self::Span) -> Option { self.sess().source_map().span_to_snippet(span).ok() } @@ -863,6 +916,7 @@ impl server::Span for Rustc<'_, '_> { fn save_span(&mut self, span: Self::Span) -> usize { self.sess().save_proc_macro_span(span) } + fn recover_proc_macro_span(&mut self, id: usize) -> Self::Span { let (resolver, krate, def_site) = (&*self.ecx.resolver, self.krate, self.def_site); *self.rebased_spans.entry(id).or_insert_with(|| { diff --git a/compiler/rustc_expand/src/tokenstream/tests.rs b/compiler/rustc_expand/src/tokenstream/tests.rs index 270532f8edeec..e4a4db204d922 100644 --- a/compiler/rustc_expand/src/tokenstream/tests.rs +++ b/compiler/rustc_expand/src/tokenstream/tests.rs @@ -4,7 +4,6 @@ use rustc_ast::token; use rustc_ast::tokenstream::{Spacing, TokenStream, TokenStreamBuilder, TokenTree}; use rustc_span::create_default_session_globals_then; use rustc_span::{BytePos, Span, Symbol}; -use smallvec::smallvec; fn string_to_ts(string: &str) -> TokenStream { string_to_stream(string.to_owned()) @@ -24,7 +23,10 @@ fn test_concat() { let test_res = string_to_ts("foo::bar::baz"); let test_fst = string_to_ts("foo::bar"); let test_snd = string_to_ts("::baz"); - let eq_res = TokenStream::from_streams(smallvec![test_fst, test_snd]); + let mut builder = TokenStreamBuilder::new(); + builder.push(test_fst); + builder.push(test_snd); + let eq_res = builder.build(); assert_eq!(test_res.trees().count(), 5); assert_eq!(eq_res.trees().count(), 5); assert_eq!(test_res.eq_unspanned(&eq_res), true); diff --git a/compiler/rustc_infer/src/infer/nll_relate/mod.rs b/compiler/rustc_infer/src/infer/nll_relate/mod.rs index ebe156d081d32..846e7f7b92155 100644 --- a/compiler/rustc_infer/src/infer/nll_relate/mod.rs +++ b/compiler/rustc_infer/src/infer/nll_relate/mod.rs @@ -662,7 +662,11 @@ where match b.kind() { ty::ConstKind::Infer(InferConst::Var(_)) if D::forbid_inference_vars() => { // Forbid inference variables in the RHS. - bug!("unexpected inference var {:?}", b) + self.infcx.tcx.sess.delay_span_bug( + self.delegate.span(), + format!("unexpected inference var {:?}", b,), + ); + Ok(a) } // FIXME(invariance): see the related FIXME above. _ => self.infcx.super_combine_consts(self, a, b), diff --git a/compiler/rustc_middle/src/ty/fold.rs b/compiler/rustc_middle/src/ty/fold.rs index 99aa182f3a6a6..31b8fa1ce956f 100644 --- a/compiler/rustc_middle/src/ty/fold.rs +++ b/compiler/rustc_middle/src/ty/fold.rs @@ -371,6 +371,13 @@ where Ok(self.fold_const(c)) } + fn try_fold_unevaluated( + &mut self, + c: ty::Unevaluated<'tcx>, + ) -> Result, Self::Error> { + Ok(self.fold_unevaluated(c)) + } + fn try_fold_predicate( &mut self, p: ty::Predicate<'tcx>, diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs index 2f999f5ffad85..af0803fbd5414 100644 --- a/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs @@ -259,7 +259,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { let cycle = self.resolve_vars_if_possible(cycle.to_owned()); assert!(!cycle.is_empty()); - debug!("report_overflow_error_cycle: cycle={:?}", cycle); + debug!(?cycle, "report_overflow_error_cycle"); // The 'deepest' obligation is most likely to have a useful // cause 'backtrace' @@ -1513,6 +1513,7 @@ impl<'a, 'tcx> InferCtxtPrivExt<'a, 'tcx> for InferCtxt<'a, 'tcx> { } } + #[instrument(level = "debug", skip_all)] fn report_projection_error( &self, obligation: &PredicateObligation<'tcx>, @@ -1551,15 +1552,9 @@ impl<'a, 'tcx> InferCtxtPrivExt<'a, 'tcx> for InferCtxt<'a, 'tcx> { &mut obligations, ); - debug!( - "report_projection_error obligation.cause={:?} obligation.param_env={:?}", - obligation.cause, obligation.param_env - ); + debug!(?obligation.cause, ?obligation.param_env); - debug!( - "report_projection_error normalized_ty={:?} data.ty={:?}", - normalized_ty, data.term, - ); + debug!(?normalized_ty, data.ty = ?data.term); let is_normalized_ty_expected = !matches!( obligation.cause.code().peel_derives(), @@ -2346,6 +2341,7 @@ impl<'a, 'tcx> InferCtxtPrivExt<'a, 'tcx> for InferCtxt<'a, 'tcx> { } } + #[instrument(level = "debug", skip_all)] fn suggest_unsized_bound_if_applicable( &self, err: &mut Diagnostic, @@ -2360,10 +2356,7 @@ impl<'a, 'tcx> InferCtxtPrivExt<'a, 'tcx> for InferCtxt<'a, 'tcx> { ) else { return; }; - debug!( - "suggest_unsized_bound_if_applicable: pred={:?} item_def_id={:?} span={:?}", - pred, item_def_id, span - ); + debug!(?pred, ?item_def_id, ?span); let (Some(node), true) = ( self.tcx.hir().get_if_local(item_def_id), @@ -2374,6 +2367,7 @@ impl<'a, 'tcx> InferCtxtPrivExt<'a, 'tcx> for InferCtxt<'a, 'tcx> { self.maybe_suggest_unsized_generics(err, span, node); } + #[instrument(level = "debug", skip_all)] fn maybe_suggest_unsized_generics<'hir>( &self, err: &mut Diagnostic, @@ -2384,8 +2378,8 @@ impl<'a, 'tcx> InferCtxtPrivExt<'a, 'tcx> for InferCtxt<'a, 'tcx> { return; }; let sized_trait = self.tcx.lang_items().sized_trait(); - debug!("maybe_suggest_unsized_generics: generics.params={:?}", generics.params); - debug!("maybe_suggest_unsized_generics: generics.predicates={:?}", generics.predicates); + debug!(?generics.params); + debug!(?generics.predicates); let Some(param) = generics.params.iter().find(|param| param.span == span) else { return; }; @@ -2399,7 +2393,7 @@ impl<'a, 'tcx> InferCtxtPrivExt<'a, 'tcx> for InferCtxt<'a, 'tcx> { if explicitly_sized { return; } - debug!("maybe_suggest_unsized_generics: param={:?}", param); + debug!(?param); match node { hir::Node::Item( item @ hir::Item { @@ -2517,7 +2511,7 @@ impl<'v> Visitor<'v> for FindTypeParam { if path.segments.len() == 1 && path.segments[0].ident.name == self.param => { if !self.nested { - debug!("FindTypeParam::visit_ty: ty={:?}", ty); + debug!(?ty, "FindTypeParam::visit_ty"); self.invalid_spans.push(ty.span); } } diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs index 09b73b982a0c1..d8003efba872d 100644 --- a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs +++ b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs @@ -1628,16 +1628,12 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { /// ``` /// /// Returns `true` if an async-await specific note was added to the diagnostic. + #[instrument(level = "debug", skip_all, fields(?obligation.predicate, ?obligation.cause.span))] fn maybe_note_obligation_cause_for_async_await( &self, err: &mut Diagnostic, obligation: &PredicateObligation<'tcx>, ) -> bool { - debug!( - "maybe_note_obligation_cause_for_async_await: obligation.predicate={:?} \ - obligation.cause.span={:?}", - obligation.predicate, obligation.cause.span - ); let hir = self.tcx.hir(); // Attempt to detect an async-await error by looking at the obligation causes, looking @@ -1677,7 +1673,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { let mut seen_upvar_tys_infer_tuple = false; while let Some(code) = next_code { - debug!("maybe_note_obligation_cause_for_async_await: code={:?}", code); + debug!(?code); match code { ObligationCauseCode::FunctionArgumentObligation { parent_code, .. } => { next_code = Some(parent_code); @@ -1685,10 +1681,9 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { ObligationCauseCode::ImplDerivedObligation(cause) => { let ty = cause.derived.parent_trait_pred.skip_binder().self_ty(); debug!( - "maybe_note_obligation_cause_for_async_await: ImplDerived \ - parent_trait_ref={:?} self_ty.kind={:?}", - cause.derived.parent_trait_pred, - ty.kind() + parent_trait_ref = ?cause.derived.parent_trait_pred, + self_ty.kind = ?ty.kind(), + "ImplDerived", ); match *ty.kind() { @@ -1717,10 +1712,8 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { | ObligationCauseCode::BuiltinDerivedObligation(derived_obligation) => { let ty = derived_obligation.parent_trait_pred.skip_binder().self_ty(); debug!( - "maybe_note_obligation_cause_for_async_await: \ - parent_trait_ref={:?} self_ty.kind={:?}", - derived_obligation.parent_trait_pred, - ty.kind() + parent_trait_ref = ?derived_obligation.parent_trait_pred, + self_ty.kind = ?ty.kind(), ); match *ty.kind() { @@ -1750,7 +1743,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { } // Only continue if a generator was found. - debug!(?generator, ?trait_ref, ?target_ty, "maybe_note_obligation_cause_for_async_await"); + debug!(?generator, ?trait_ref, ?target_ty); let (Some(generator_did), Some(trait_ref), Some(target_ty)) = (generator, trait_ref, target_ty) else { return false; }; @@ -1760,12 +1753,10 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { let in_progress_typeck_results = self.in_progress_typeck_results.map(|t| t.borrow()); let generator_did_root = self.tcx.typeck_root_def_id(generator_did); debug!( - "maybe_note_obligation_cause_for_async_await: generator_did={:?} \ - generator_did_root={:?} in_progress_typeck_results.hir_owner={:?} span={:?}", - generator_did, - generator_did_root, - in_progress_typeck_results.as_ref().map(|t| t.hir_owner), - span + ?generator_did, + ?generator_did_root, + in_progress_typeck_results.hir_owner = ?in_progress_typeck_results.as_ref().map(|t| t.hir_owner), + ?span, ); let generator_body = generator_did @@ -1788,7 +1779,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { if let Some(body) = generator_body { visitor.visit_body(body); } - debug!("maybe_note_obligation_cause_for_async_await: awaits = {:?}", visitor.awaits); + debug!(awaits = ?visitor.awaits); // Look for a type inside the generator interior that matches the target type to get // a span. @@ -1809,11 +1800,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { let ty_erased = self.tcx.erase_late_bound_regions(ty); let ty_erased = self.tcx.erase_regions(ty_erased); let eq = ty_erased == target_ty_erased; - debug!( - "maybe_note_obligation_cause_for_async_await: ty_erased={:?} \ - target_ty_erased={:?} eq={:?}", - ty_erased, target_ty_erased, eq - ); + debug!(?ty_erased, ?target_ty_erased, ?eq); eq }; @@ -1888,6 +1875,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { /// Unconditionally adds the diagnostic note described in /// `maybe_note_obligation_cause_for_async_await`'s documentation comment. + #[instrument(level = "debug", skip_all)] fn note_obligation_cause_for_async_await( &self, err: &mut Diagnostic, @@ -2037,8 +2025,9 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { } else { // Look at the last interior type to get a span for the `.await`. debug!( - "note_obligation_cause_for_async_await generator_interior_types: {:#?}", - typeck_results.as_ref().map(|t| &t.generator_interior_types) + generator_interior_types = ?format_args!( + "{:#?}", typeck_results.as_ref().map(|t| &t.generator_interior_types) + ), ); explain_yield(interior_span, yield_span, scope_span); } @@ -2073,7 +2062,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { // bar(Foo(std::ptr::null())).await; // ^^^^^^^^^^^^^^^^^^^^^ raw-ptr `*T` created inside this struct ctor. // ``` - debug!("parent_def_kind: {:?}", self.tcx.def_kind(parent_did)); + debug!(parent_def_kind = ?self.tcx.def_kind(parent_did)); let is_raw_borrow_inside_fn_like_call = match self.tcx.def_kind(parent_did) { DefKind::Fn | DefKind::Ctor(..) => target_ty.is_unsafe_ptr(), @@ -2131,7 +2120,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { // Add a note for the item obligation that remains - normally a note pointing to the // bound that introduced the obligation (e.g. `T: Send`). - debug!("note_obligation_cause_for_async_await: next_code={:?}", next_code); + debug!(?next_code); self.note_obligation_cause_code( err, &obligation.predicate, @@ -2688,6 +2677,9 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { )); } + #[instrument( + level = "debug", skip(self, err), fields(trait_pred.self_ty = ?trait_pred.self_ty()) + )] fn suggest_await_before_try( &self, err: &mut Diagnostic, @@ -2695,13 +2687,6 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { trait_pred: ty::PolyTraitPredicate<'tcx>, span: Span, ) { - debug!( - "suggest_await_before_try: obligation={:?}, span={:?}, trait_pred={:?}, trait_pred_self_ty={:?}", - obligation, - span, - trait_pred, - trait_pred.self_ty() - ); let body_hir_id = obligation.cause.body_id; let item_id = self.tcx.hir().get_parent_node(body_hir_id); @@ -2739,14 +2724,13 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { ); debug!( - "suggest_await_before_try: normalized_projection_type {:?}", - self.resolve_vars_if_possible(projection_ty) + normalized_projection_type = ?self.resolve_vars_if_possible(projection_ty) ); let try_obligation = self.mk_trait_obligation_with_new_self_ty( obligation.param_env, trait_pred.map_bound(|trait_pred| (trait_pred, projection_ty.skip_binder())), ); - debug!("suggest_await_before_try: try_trait_obligation {:?}", try_obligation); + debug!(try_trait_obligation = ?try_obligation); if self.predicate_may_hold(&try_obligation) && let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) && snippet.ends_with('?') diff --git a/library/core/src/intrinsics.rs b/library/core/src/intrinsics.rs index ba837ea9a7898..2f9afbec91e98 100644 --- a/library/core/src/intrinsics.rs +++ b/library/core/src/intrinsics.rs @@ -1144,7 +1144,7 @@ extern "rust-intrinsic" { /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] - #[rustc_const_stable(feature = "const_transmute", since = "1.46.0")] + #[rustc_const_stable(feature = "const_transmute", since = "1.56.0")] #[rustc_diagnostic_item = "transmute"] pub fn transmute(e: T) -> U; diff --git a/library/std/src/io/mod.rs b/library/std/src/io/mod.rs index cad1fab7b8f84..f4f2e3f243457 100644 --- a/library/std/src/io/mod.rs +++ b/library/std/src/io/mod.rs @@ -1240,8 +1240,8 @@ impl<'a> IoSlice<'a> { /// use std::io::IoSlice; /// use std::ops::Deref; /// - /// let mut data = [1; 8]; - /// let mut buf = IoSlice::new(&mut data); + /// let data = [1; 8]; + /// let mut buf = IoSlice::new(&data); /// /// // Mark 3 bytes as read. /// buf.advance(3); @@ -1435,10 +1435,10 @@ pub trait Write { /// use std::fs::File; /// /// fn main() -> std::io::Result<()> { - /// let mut data1 = [1; 8]; - /// let mut data2 = [15; 8]; - /// let io_slice1 = IoSlice::new(&mut data1); - /// let io_slice2 = IoSlice::new(&mut data2); + /// let data1 = [1; 8]; + /// let data2 = [15; 8]; + /// let io_slice1 = IoSlice::new(&data1); + /// let io_slice2 = IoSlice::new(&data2); /// /// let mut buffer = File::create("foo.txt")?; /// diff --git a/library/std/src/os/unix/net/ancillary.rs b/library/std/src/os/unix/net/ancillary.rs index ecee123a9b628..7cc901a79445b 100644 --- a/library/std/src/os/unix/net/ancillary.rs +++ b/library/std/src/os/unix/net/ancillary.rs @@ -583,8 +583,8 @@ impl<'a> SocketAncillary<'a> { /// let mut ancillary = SocketAncillary::new(&mut ancillary_buffer[..]); /// ancillary.add_fds(&[sock.as_raw_fd()][..]); /// - /// let mut buf = [1; 8]; - /// let mut bufs = &mut [IoSlice::new(&mut buf[..])][..]; + /// let buf = [1; 8]; + /// let mut bufs = &mut [IoSlice::new(&buf[..])][..]; /// sock.send_vectored_with_ancillary(bufs, &mut ancillary)?; /// Ok(()) /// } diff --git a/library/std/src/sys_common/mutex.rs b/library/std/src/sys_common/mutex.rs index 81eefa1133fa5..48479f5bdb3f7 100644 --- a/library/std/src/sys_common/mutex.rs +++ b/library/std/src/sys_common/mutex.rs @@ -46,13 +46,12 @@ impl Drop for StaticMutexGuard { /// An OS-based mutual exclusion lock. /// -/// This mutex does *not* have a const constructor, cleans up its resources in -/// its `Drop` implementation, may safely be moved (when not borrowed), and -/// does not cause UB when used reentrantly. +/// This mutex cleans up its resources in its `Drop` implementation, may safely +/// be moved (when not borrowed), and does not cause UB when used reentrantly. /// /// This mutex does not implement poisoning. /// -/// This is either a wrapper around `Box` or `imp::Mutex`, +/// This is either a wrapper around `LazyBox` or `imp::Mutex`, /// depending on the platform. It is boxed on platforms where `imp::Mutex` may /// not be moved. pub struct MovableMutex(imp::MovableMutex); diff --git a/library/std/src/sys_common/rwlock.rs b/library/std/src/sys_common/rwlock.rs index 265cebfdc3e0a..ba56f3a8f1b52 100644 --- a/library/std/src/sys_common/rwlock.rs +++ b/library/std/src/sys_common/rwlock.rs @@ -62,12 +62,12 @@ impl Drop for StaticRwLockWriteGuard { /// An OS-based reader-writer lock. /// -/// This rwlock does *not* have a const constructor, cleans up its resources in -/// its `Drop` implementation and may safely be moved (when not borrowed). +/// This rwlock cleans up its resources in its `Drop` implementation and may +/// safely be moved (when not borrowed). /// /// This rwlock does not implement poisoning. /// -/// This is either a wrapper around `Box` or `imp::RwLock`, +/// This is either a wrapper around `LazyBox` or `imp::RwLock`, /// depending on the platform. It is boxed on platforms where `imp::RwLock` may /// not be moved. pub struct MovableRwLock(imp::MovableRwLock); diff --git a/library/std/src/thread/mod.rs b/library/std/src/thread/mod.rs index f7af66ae5b51f..0a6a7cfe976cc 100644 --- a/library/std/src/thread/mod.rs +++ b/library/std/src/thread/mod.rs @@ -1487,13 +1487,14 @@ impl JoinHandle { /// Checks if the associated thread has finished running its main function. /// + /// `is_finished` supports implementing a non-blocking join operation, by checking + /// `is_finished`, and calling `join` if it returns `true`. This function does not block. To + /// block while waiting on the thread to finish, use [`join`][Self::join]. + /// /// This might return `true` for a brief moment after the thread's main /// function has returned, but before the thread itself has stopped running. /// However, once this returns `true`, [`join`][Self::join] can be expected /// to return quickly, without blocking for any significant amount of time. - /// - /// This function does not block. To block while waiting on the thread to finish, - /// use [`join`][Self::join]. #[stable(feature = "thread_is_running", since = "1.61.0")] pub fn is_finished(&self) -> bool { Arc::strong_count(&self.0.packet) == 1 diff --git a/library/std/src/thread/scoped.rs b/library/std/src/thread/scoped.rs index 4fd076e4a2d4b..a387a09dc8b15 100644 --- a/library/std/src/thread/scoped.rs +++ b/library/std/src/thread/scoped.rs @@ -308,13 +308,14 @@ impl<'scope, T> ScopedJoinHandle<'scope, T> { /// Checks if the associated thread has finished running its main function. /// + /// `is_finished` supports implementing a non-blocking join operation, by checking + /// `is_finished`, and calling `join` if it returns `false`. This function does not block. To + /// block while waiting on the thread to finish, use [`join`][Self::join]. + /// /// This might return `true` for a brief moment after the thread's main /// function has returned, but before the thread itself has stopped running. /// However, once this returns `true`, [`join`][Self::join] can be expected /// to return quickly, without blocking for any significant amount of time. - /// - /// This function does not block. To block while waiting on the thread to finish, - /// use [`join`][Self::join]. #[stable(feature = "scoped_threads", since = "1.63.0")] pub fn is_finished(&self) -> bool { Arc::strong_count(&self.0.packet) == 1 diff --git a/src/librustdoc/html/layout.rs b/src/librustdoc/html/layout.rs index de54347a0f74e..7d6d4b71e2eaf 100644 --- a/src/librustdoc/html/layout.rs +++ b/src/librustdoc/html/layout.rs @@ -31,8 +31,6 @@ pub(crate) struct Page<'a> { pub(crate) description: &'a str, pub(crate) keywords: &'a str, pub(crate) resource_suffix: &'a str, - pub(crate) extra_scripts: &'a [&'a str], - pub(crate) static_extra_scripts: &'a [&'a str], } impl<'a> Page<'a> { diff --git a/src/librustdoc/html/render/context.rs b/src/librustdoc/html/render/context.rs index 68e2f0cf9c06c..bfdc44c7e4534 100644 --- a/src/librustdoc/html/render/context.rs +++ b/src/librustdoc/html/render/context.rs @@ -211,8 +211,6 @@ impl<'tcx> Context<'tcx> { description: &desc, keywords: &keywords, resource_suffix: &clone_shared.resource_suffix, - extra_scripts: &[], - static_extra_scripts: &[], }; let mut page_buffer = Buffer::html(); print_item(self, it, &mut page_buffer, &page); @@ -568,8 +566,6 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> { description: "List of all items in this crate", keywords: BASIC_KEYWORDS, resource_suffix: &shared.resource_suffix, - extra_scripts: &[], - static_extra_scripts: &[], }; let sidebar = if shared.cache.crate_version.is_some() { format!("

Crate {}

", crate_name) @@ -693,7 +689,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> { else { unreachable!() }; let items = self.build_sidebar_items(module); let js_dst = self.dst.join(&format!("sidebar-items{}.js", self.shared.resource_suffix)); - let v = format!("initSidebarItems({});", serde_json::to_string(&items).unwrap()); + let v = format!("window.SIDEBAR_ITEMS = {};", serde_json::to_string(&items).unwrap()); self.shared.fs.write(js_dst, v)?; } Ok(()) diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs index cb887d16906a1..3f426ee93e77e 100644 --- a/src/librustdoc/html/render/mod.rs +++ b/src/librustdoc/html/render/mod.rs @@ -56,7 +56,7 @@ use rustc_middle::middle::stability; use rustc_middle::ty; use rustc_middle::ty::TyCtxt; use rustc_span::{ - symbol::{kw, sym, Symbol}, + symbol::{sym, Symbol}, BytePos, FileName, RealFileName, }; use serde::ser::SerializeSeq; @@ -1738,8 +1738,6 @@ pub(crate) fn render_impl_summary( } fn print_sidebar(cx: &Context<'_>, it: &clean::Item, buffer: &mut Buffer) { - let parentlen = cx.current.len() - if it.is_mod() { 1 } else { 0 }; - if it.is_struct() || it.is_trait() || it.is_primitive() @@ -1800,21 +1798,6 @@ fn print_sidebar(cx: &Context<'_>, it: &clean::Item, buffer: &mut Buffer) { write!(buffer, "

In {}

", path); } - // Sidebar refers to the enclosing module, not this module. - let relpath = if it.is_mod() && parentlen != 0 { "./" } else { "" }; - write!( - buffer, - "
\ -
", - name = it.name.unwrap_or(kw::Empty), - ty = it.type_(), - path = relpath - ); - write!( - buffer, - "", - relpath, cx.shared.resource_suffix - ); // Closes sidebar-elems div. buffer.write_str(""); } diff --git a/src/librustdoc/html/render/write_shared.rs b/src/librustdoc/html/render/write_shared.rs index 9bddee199c7bf..27ad91d09e064 100644 --- a/src/librustdoc/html/render/write_shared.rs +++ b/src/librustdoc/html/render/write_shared.rs @@ -475,8 +475,6 @@ if (typeof exports !== 'undefined') {exports.searchIndex = searchIndex}; description: "List of crates", keywords: BASIC_KEYWORDS, resource_suffix: &shared.resource_suffix, - extra_scripts: &[], - static_extra_scripts: &[], }; let content = format!( diff --git a/src/librustdoc/html/sources.rs b/src/librustdoc/html/sources.rs index 524c90e1f4d64..1971d08e5bef5 100644 --- a/src/librustdoc/html/sources.rs +++ b/src/librustdoc/html/sources.rs @@ -203,8 +203,6 @@ impl SourceCollector<'_, '_> { description: &desc, keywords: BASIC_KEYWORDS, resource_suffix: &shared.resource_suffix, - extra_scripts: &[&format!("source-files{}", shared.resource_suffix)], - static_extra_scripts: &[&format!("source-script{}", shared.resource_suffix)], }; let v = layout::render( &shared.layout, diff --git a/src/librustdoc/html/static/js/main.js b/src/librustdoc/html/static/js/main.js index 414bca850e3dc..b320db9104612 100644 --- a/src/librustdoc/html/static/js/main.js +++ b/src/librustdoc/html/static/js/main.js @@ -66,26 +66,18 @@ function showMain() { (function() { window.rootPath = getVar("root-path"); window.currentCrate = getVar("current-crate"); - window.searchJS = resourcePath("search", ".js"); - window.searchIndexJS = resourcePath("search-index", ".js"); - window.settingsJS = resourcePath("settings", ".js"); - const sidebarVars = document.getElementById("sidebar-vars"); - if (sidebarVars) { - window.sidebarCurrent = { - name: sidebarVars.attributes["data-name"].value, - ty: sidebarVars.attributes["data-ty"].value, - relpath: sidebarVars.attributes["data-relpath"].value, - }; - // FIXME: It would be nicer to generate this text content directly in HTML, - // but with the current code it's hard to get the right information in the right place. - const mobileLocationTitle = document.querySelector(".mobile-topbar h2.location"); - const locationTitle = document.querySelector(".sidebar h2.location"); - if (mobileLocationTitle && locationTitle) { - mobileLocationTitle.innerHTML = locationTitle.innerHTML; - } - } }()); +function setMobileTopbar() { + // FIXME: It would be nicer to generate this text content directly in HTML, + // but with the current code it's hard to get the right information in the right place. + const mobileLocationTitle = document.querySelector(".mobile-topbar h2.location"); + const locationTitle = document.querySelector(".sidebar h2.location"); + if (mobileLocationTitle && locationTitle) { + mobileLocationTitle.innerHTML = locationTitle.innerHTML; + } +} + // Gets the human-readable string for the virtual-key code of the // given KeyboardEvent, ev. // @@ -227,7 +219,7 @@ function loadCss(cssFileName) { // Sending request for the CSS and the JS files at the same time so it will // hopefully be loaded when the JS will generate the settings content. loadCss("settings"); - loadScript(window.settingsJS); + loadScript(resourcePath("settings", ".js")); }; window.searchState = { @@ -304,8 +296,8 @@ function loadCss(cssFileName) { function loadSearch() { if (!searchLoaded) { searchLoaded = true; - loadScript(window.searchJS); - loadScript(window.searchIndexJS); + loadScript(resourcePath("search", ".js")); + loadScript(resourcePath("search-index", ".js")); } } @@ -485,40 +477,11 @@ function loadCss(cssFileName) { document.addEventListener("keypress", handleShortcut); document.addEventListener("keydown", handleShortcut); - // delayed sidebar rendering. - window.initSidebarItems = items => { - const sidebar = document.getElementsByClassName("sidebar-elems")[0]; - let others; - const current = window.sidebarCurrent; - - function addSidebarCrates(crates) { - if (!hasClass(document.body, "crate")) { - // We only want to list crates on the crate page. - return; - } - // Draw a convenient sidebar of known crates if we have a listing - const div = document.createElement("div"); - div.className = "block crate"; - div.innerHTML = "

Crates

"; - const ul = document.createElement("ul"); - div.appendChild(ul); - - for (const crate of crates) { - let klass = "crate"; - if (window.rootPath !== "./" && crate === window.currentCrate) { - klass += " current"; - } - const link = document.createElement("a"); - link.href = window.rootPath + crate + "/index.html"; - link.className = klass; - link.textContent = crate; - - const li = document.createElement("li"); - li.appendChild(link); - ul.appendChild(li); - } - others.appendChild(div); + function addSidebarItems() { + if (!window.SIDEBAR_ITEMS) { + return; } + const sidebar = document.getElementsByClassName("sidebar-elems")[0]; /** * Append to the sidebar a "block" of links - a heading along with a list (`
    `) of items. @@ -529,7 +492,7 @@ function loadCss(cssFileName) { * "Modules", or "Macros". */ function block(shortty, id, longty) { - const filtered = items[shortty]; + const filtered = window.SIDEBAR_ITEMS[shortty]; if (!filtered) { return; } @@ -546,17 +509,18 @@ function loadCss(cssFileName) { const desc = item[1]; // can be null let klass = shortty; - if (name === current.name && shortty === current.ty) { - klass += " current"; - } let path; if (shortty === "mod") { path = name + "/index.html"; } else { path = shortty + "." + name + ".html"; } + const current_page = document.location.href.split("/").pop(); + if (path === current_page) { + klass += " current"; + } const link = document.createElement("a"); - link.href = current.relpath + path; + link.href = path; link.title = desc; link.className = klass; link.textContent = name; @@ -565,14 +529,10 @@ function loadCss(cssFileName) { ul.appendChild(li); } div.appendChild(ul); - others.appendChild(div); + sidebar.appendChild(div); } if (sidebar) { - others = document.createElement("div"); - others.className = "others"; - sidebar.appendChild(others); - const isModule = hasClass(document.body, "mod"); if (!isModule) { block("primitive", "primitives", "Primitive Types"); @@ -590,12 +550,8 @@ function loadCss(cssFileName) { block("keyword", "keywords", "Keywords"); block("traitalias", "trait-aliases", "Trait Aliases"); } - - // `crates{version}.js` should always be loaded before this script, so we can use - // it safely. - addSidebarCrates(window.ALL_CRATES); } - }; + } window.register_implementors = imp => { const implementors = document.getElementById("implementors-list"); @@ -680,6 +636,39 @@ function loadCss(cssFileName) { window.register_implementors(window.pending_implementors); } + function addSidebarCrates() { + if (!window.ALL_CRATES) { + return; + } + const sidebarElems = document.getElementsByClassName("sidebar-elems")[0]; + if (!sidebarElems) { + return; + } + // Draw a convenient sidebar of known crates if we have a listing + const div = document.createElement("div"); + div.className = "block crate"; + div.innerHTML = "

    Crates

    "; + const ul = document.createElement("ul"); + div.appendChild(ul); + + for (const crate of window.ALL_CRATES) { + let klass = "crate"; + if (window.rootPath !== "./" && crate === window.currentCrate) { + klass += " current"; + } + const link = document.createElement("a"); + link.href = window.rootPath + crate + "/index.html"; + link.className = klass; + link.textContent = crate; + + const li = document.createElement("li"); + li.appendChild(link); + ul.appendChild(li); + } + sidebarElems.appendChild(div); + } + + function labelForToggleButton(sectionIsCollapsed) { if (sectionIsCollapsed) { // button will expand the section @@ -924,6 +913,9 @@ function loadCss(cssFileName) { buildHelperPopup = () => {}; }; + setMobileTopbar(); + addSidebarItems(); + addSidebarCrates(); onHashChange(null); window.addEventListener("hashchange", onHashChange); searchState.setup(); diff --git a/src/librustdoc/html/static/js/search.js b/src/librustdoc/html/static/js/search.js index c0b274c0a3fd3..cb1609d498340 100644 --- a/src/librustdoc/html/static/js/search.js +++ b/src/librustdoc/html/static/js/search.js @@ -1719,10 +1719,11 @@ function initSearch(rawSearchIndex) { } let crates = ""; - if (window.ALL_CRATES.length > 1) { + const crates_list = Object.keys(rawSearchIndex); + if (crates_list.length > 1) { crates = " in "; diff --git a/src/librustdoc/html/static/js/source-script.js b/src/librustdoc/html/static/js/source-script.js index 14d8a942977d6..10f93a1c058da 100644 --- a/src/librustdoc/html/static/js/source-script.js +++ b/src/librustdoc/html/static/js/source-script.js @@ -9,33 +9,19 @@ (function() { -function getCurrentFilePath() { - const parts = window.location.pathname.split("/"); - const rootPathParts = window.rootPath.split("/"); +const rootPath = document.getElementById("rustdoc-vars").attributes["data-root-path"].value; - for (const rootPathPart of rootPathParts) { - if (rootPathPart === "..") { - parts.pop(); - } - } - let file = window.location.pathname.substring(parts.join("/").length); - if (file.startsWith("/")) { - file = file.substring(1); - } - return file.substring(0, file.length - 5); -} - -function createDirEntry(elem, parent, fullPath, currentFile, hasFoundFile) { +function createDirEntry(elem, parent, fullPath, hasFoundFile) { const name = document.createElement("div"); name.className = "name"; fullPath += elem["name"] + "/"; - name.onclick = () => { - if (hasClass(name, "expand")) { - removeClass(name, "expand"); + name.onclick = ev => { + if (hasClass(ev.target, "expand")) { + removeClass(ev.target, "expand"); } else { - addClass(name, "expand"); + addClass(ev.target, "expand"); } }; name.innerText = elem["name"]; @@ -46,7 +32,7 @@ function createDirEntry(elem, parent, fullPath, currentFile, hasFoundFile) { folders.className = "folders"; if (elem.dirs) { for (const dir of elem.dirs) { - if (createDirEntry(dir, folders, fullPath, currentFile, hasFoundFile)) { + if (createDirEntry(dir, folders, fullPath, hasFoundFile)) { addClass(name, "expand"); hasFoundFile = true; } @@ -60,8 +46,9 @@ function createDirEntry(elem, parent, fullPath, currentFile, hasFoundFile) { for (const file_text of elem.files) { const file = document.createElement("a"); file.innerText = file_text; - file.href = window.rootPath + "src/" + fullPath + file_text + ".html"; - if (!hasFoundFile && currentFile === fullPath + file_text) { + file.href = rootPath + "src/" + fullPath + file_text + ".html"; + const w = window.location.href.split("#")[0]; + if (!hasFoundFile && w === file.href) { file.className = "selected"; addClass(name, "expand"); hasFoundFile = true; @@ -72,7 +59,7 @@ function createDirEntry(elem, parent, fullPath, currentFile, hasFoundFile) { children.appendChild(files); parent.appendChild(name); parent.appendChild(children); - return hasFoundFile && currentFile.startsWith(fullPath); + return hasFoundFile; } function toggleSidebar() { @@ -109,9 +96,6 @@ function createSidebarToggle() { // This function is called from "source-files.js", generated in `html/render/mod.rs`. // eslint-disable-next-line no-unused-vars function createSourceSidebar() { - if (!window.rootPath.endsWith("/")) { - window.rootPath += "/"; - } const container = document.querySelector("nav.sidebar"); const sidebarToggle = createSidebarToggle(); @@ -125,7 +109,6 @@ function createSourceSidebar() { container.classList.add("expanded"); } - const currentFile = getCurrentFilePath(); let hasFoundFile = false; const title = document.createElement("div"); @@ -135,7 +118,7 @@ function createSourceSidebar() { Object.keys(sourcesIndex).forEach(key => { sourcesIndex[key].name = key; hasFoundFile = createDirEntry(sourcesIndex[key], sidebar, "", - currentFile, hasFoundFile); + hasFoundFile); }); container.appendChild(sidebar); diff --git a/src/librustdoc/html/templates/page.html b/src/librustdoc/html/templates/page.html index cd672aadd7e93..c4999e2c74fce 100644 --- a/src/librustdoc/html/templates/page.html +++ b/src/librustdoc/html/templates/page.html @@ -34,17 +34,18 @@ {%- endfor -%} > {#- -#} {#- -#} - {#- -#} + {%- if page.css_class.contains("crate") -%} + {#- -#} + {%- else if page.css_class == "source" -%} + {#- -#} + {#- -#} + {%- else -%} + {#- -#} + {%- endif -%} {#- -#} - {%- for script in page.static_extra_scripts -%} - {#- -#} - {% endfor %} {%- if layout.scrape_examples_extension -%} {#- -#} {%- endif -%} - {%- for script in page.extra_scripts -%} - {#- -#} - {% endfor %}