diff --git a/compiler/rustc_lexer/src/lib.rs b/compiler/rustc_lexer/src/lib.rs index c71e6ffe34d9b..51515976e4ee9 100644 --- a/compiler/rustc_lexer/src/lib.rs +++ b/compiler/rustc_lexer/src/lib.rs @@ -57,29 +57,42 @@ pub enum TokenKind { // Multi-char tokens: /// "// comment" LineComment { doc_style: Option }, + /// `/* block comment */` /// - /// Block comments can be recursive, so the sequence like `/* /* */` + /// Block comments can be recursive, so a sequence like `/* /* */` /// will not be considered terminated and will result in a parsing error. BlockComment { doc_style: Option, terminated: bool }, - /// Any whitespace characters sequence. + + /// Any whitespace character sequence. Whitespace, + /// "ident" or "continue" - /// At this step keywords are also considered identifiers. + /// + /// At this step, keywords are also considered identifiers. Ident, + /// Like the above, but containing invalid unicode codepoints. InvalidIdent, + /// "r#ident" RawIdent, - /// An unknown prefix like `foo#`, `foo'`, `foo"`. Note that only the + + /// An unknown prefix, like `foo#`, `foo'`, `foo"`. + /// + /// Note that only the /// prefix (`foo`) is included in the token, not the separator (which is /// lexed as its own distinct token). In Rust 2021 and later, reserved /// prefixes are reported as errors; in earlier editions, they result in a /// (allowed by default) lint, and are treated as regular identifier /// tokens. UnknownPrefix, - /// "12_u8", "1.0e-40", "b"123"". See `LiteralKind` for more details. + + /// Examples: `"12_u8"`, `"1.0e-40"`, `b"123`. + /// + /// See [LiteralKind] for more details. Literal { kind: LiteralKind, suffix_start: u32 }, + /// "'a" Lifetime { starts_with_number: bool }, diff --git a/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp index 879a3b660b4be..18d37d95a8357 100644 --- a/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp +++ b/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp @@ -69,7 +69,9 @@ extern "C" void LLVMInitializePasses() { initializeAnalysis(Registry); initializeTransformUtils(Registry); initializeInstCombine(Registry); +#if LLVM_VERSION_LT(16, 0) initializeInstrumentation(Registry); +#endif initializeTarget(Registry); } diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 9c229665c7e91..006d813e5f9fa 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -1110,8 +1110,8 @@ impl Rc { #[inline] #[stable(feature = "ptr_eq", since = "1.17.0")] - /// Returns `true` if the two `Rc`s point to the same allocation - /// (in a vein similar to [`ptr::eq`]). + /// Returns `true` if the two `Rc`s point to the same allocation in a vein similar to + /// [`ptr::eq`]. See [that function][`ptr::eq`] for caveats when comparing `dyn Trait` pointers. /// /// # Examples /// @@ -2419,9 +2419,9 @@ impl Weak { } } - /// Returns `true` if the two `Weak`s point to the same allocation (similar to - /// [`ptr::eq`]), or if both don't point to any allocation - /// (because they were created with `Weak::new()`). + /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if + /// both don't point to any allocation (because they were created with `Weak::new()`). See [that + /// function][`ptr::eq`] for caveats when comparing `dyn Trait` pointers. /// /// # Notes /// diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index e8d9de4fb3ce5..81cd770748854 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -1117,8 +1117,8 @@ impl Arc { drop(Weak { ptr: self.ptr }); } - /// Returns `true` if the two `Arc`s point to the same allocation - /// (in a vein similar to [`ptr::eq`]). + /// Returns `true` if the two `Arc`s point to the same allocation in a vein similar to + /// [`ptr::eq`]. See [that function][`ptr::eq`] for caveats when comparing `dyn Trait` pointers. /// /// # Examples /// @@ -2069,9 +2069,9 @@ impl Weak { } } - /// Returns `true` if the two `Weak`s point to the same allocation (similar to - /// [`ptr::eq`]), or if both don't point to any allocation - /// (because they were created with `Weak::new()`). + /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if + /// both don't point to any allocation (because they were created with `Weak::new()`). See [that + /// function][`ptr::eq`] for caveats when comparing `dyn Trait` pointers. /// /// # Notes /// diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index 67e59460d74b0..ed16c5f051f7b 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -802,7 +802,7 @@ impl *const T { /// Returns whether two pointers are guaranteed to be inequal. /// - /// At runtime this function behaves like `Some(self == other)`. + /// At runtime this function behaves like `Some(self != other)`. /// However, in some contexts (e.g., compile-time evaluation), /// it is not always possible to determine inequality of two pointers, so this function may /// spuriously return `None` for pointers that later actually turn out to have its inequality known. diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs index cfffe351a87d9..3a70981d2e138 100644 --- a/library/core/src/ptr/mod.rs +++ b/library/core/src/ptr/mod.rs @@ -1733,6 +1733,12 @@ pub(crate) unsafe fn align_offset(p: *const T, a: usize) -> usize { /// by their address rather than comparing the values they point to /// (which is what the `PartialEq for &T` implementation does). /// +/// When comparing wide pointers, both the address and the metadata are tested for equality. +/// However, note that comparing trait object pointers (`*const dyn Trait`) is unrealiable: pointers +/// to values of the same underlying type can compare inequal (because vtables are duplicated in +/// multiple codegen units), and pointers to values of *different* underlying type can compare equal +/// (since identical vtables can be deduplicated within a codegen unit). +/// /// # Examples /// /// ``` @@ -1759,41 +1765,6 @@ pub(crate) unsafe fn align_offset(p: *const T, a: usize) -> usize { /// assert!(!std::ptr::eq(&a[..2], &a[..3])); /// assert!(!std::ptr::eq(&a[0..2], &a[1..3])); /// ``` -/// -/// Traits are also compared by their implementation: -/// -/// ``` -/// #[repr(transparent)] -/// struct Wrapper { member: i32 } -/// -/// trait Trait {} -/// impl Trait for Wrapper {} -/// impl Trait for i32 {} -/// -/// let wrapper = Wrapper { member: 10 }; -/// -/// // Pointers have equal addresses. -/// assert!(std::ptr::eq( -/// &wrapper as *const Wrapper as *const u8, -/// &wrapper.member as *const i32 as *const u8 -/// )); -/// -/// // Objects have equal addresses, but `Trait` has different implementations. -/// assert!(!std::ptr::eq( -/// &wrapper as &dyn Trait, -/// &wrapper.member as &dyn Trait, -/// )); -/// assert!(!std::ptr::eq( -/// &wrapper as &dyn Trait as *const dyn Trait, -/// &wrapper.member as &dyn Trait as *const dyn Trait, -/// )); -/// -/// // Converting the reference to a `*const u8` compares by address. -/// assert!(std::ptr::eq( -/// &wrapper as &dyn Trait as *const dyn Trait as *const u8, -/// &wrapper.member as &dyn Trait as *const dyn Trait as *const u8, -/// )); -/// ``` #[stable(feature = "ptr_eq", since = "1.17.0")] #[inline] pub fn eq(a: *const T, b: *const T) -> bool { diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index 0bb2566fd4c98..6764002bcd434 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -730,7 +730,7 @@ impl *mut T { /// Returns whether two pointers are guaranteed to be inequal. /// - /// At runtime this function behaves like `Some(self == other)`. + /// At runtime this function behaves like `Some(self != other)`. /// However, in some contexts (e.g., compile-time evaluation), /// it is not always possible to determine inequality of two pointers, so this function may /// spuriously return `None` for pointers that later actually turn out to have its inequality known. diff --git a/library/std/src/sync/mpsc/stream.rs b/library/std/src/sync/mpsc/stream.rs index 4c3812c79f619..4592e9141600e 100644 --- a/library/std/src/sync/mpsc/stream.rs +++ b/library/std/src/sync/mpsc/stream.rs @@ -114,7 +114,7 @@ impl Packet { match self.queue.producer_addition().cnt.fetch_add(1, Ordering::SeqCst) { // As described in the mod's doc comment, -1 == wakeup -1 => UpWoke(self.take_to_wake()), - // As as described before, SPSC queues must be >= -2 + // As described before, SPSC queues must be >= -2 -2 => UpSuccess, // Be sure to preserve the disconnected state, and the return value diff --git a/library/std/src/sys/unix/thread.rs b/library/std/src/sys/unix/thread.rs index 69cd2b500a1f0..c1d30dd9d521b 100644 --- a/library/std/src/sys/unix/thread.rs +++ b/library/std/src/sys/unix/thread.rs @@ -137,7 +137,9 @@ impl Thread { unsafe { // Available since glibc 2.12, musl 1.1.16, and uClibc 1.0.20. let name = truncate_cstr(name, TASK_COMM_LEN); - libc::pthread_setname_np(libc::pthread_self(), name.as_ptr()); + let res = libc::pthread_setname_np(libc::pthread_self(), name.as_ptr()); + // We have no good way of propagating errors here, but in debug-builds let's check that this actually worked. + debug_assert_eq!(res, 0); } } @@ -152,7 +154,9 @@ impl Thread { pub fn set_name(name: &CStr) { unsafe { let name = truncate_cstr(name, libc::MAXTHREADNAMESIZE); - libc::pthread_setname_np(name.as_ptr()); + let res = libc::pthread_setname_np(name.as_ptr()); + // We have no good way of propagating errors here, but in debug-builds let's check that this actually worked. + debug_assert_eq!(res, 0); } } @@ -160,11 +164,12 @@ impl Thread { pub fn set_name(name: &CStr) { unsafe { let cname = CStr::from_bytes_with_nul_unchecked(b"%s\0".as_slice()); - libc::pthread_setname_np( + let res = libc::pthread_setname_np( libc::pthread_self(), cname.as_ptr(), name.as_ptr() as *mut libc::c_void, ); + debug_assert_eq!(res, 0); } } @@ -177,9 +182,8 @@ impl Thread { } if let Some(f) = pthread_setname_np.get() { - unsafe { - f(libc::pthread_self(), name.as_ptr()); - } + let res = unsafe { f(libc::pthread_self(), name.as_ptr()) }; + debug_assert_eq!(res, 0); } } diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs index eeec6f8fee778..96c57c8c85d40 100644 --- a/src/librustdoc/html/render/mod.rs +++ b/src/librustdoc/html/render/mod.rs @@ -1276,6 +1276,15 @@ fn notable_traits_decl(decl: &clean::FnDecl, cx: &Context<'_>) -> String { if let Some((did, ty)) = decl.output.as_return().and_then(|t| Some((t.def_id(cx.cache())?, t))) { + // Box has pass-through impls for Read, Write, Iterator, and Future when the + // boxed type implements one of those. We don't want to treat every Box return + // as being notably an Iterator (etc), though, so we exempt it. Pin has the same + // issue, with a pass-through impl for Future. + if Some(did) == cx.tcx().lang_items().owned_box() + || Some(did) == cx.tcx().lang_items().pin_type() + { + return "".to_string(); + } if let Some(impls) = cx.cache().impls.get(&did) { for i in impls { let impl_ = i.inner_impl(); diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css index 8424b2c4e2932..b60c77195639f 100644 --- a/src/librustdoc/html/static/css/rustdoc.css +++ b/src/librustdoc/html/static/css/rustdoc.css @@ -549,47 +549,38 @@ ul.block, .block li { margin-bottom: 0px; } -pre.example-line-numbers { - overflow: initial; - border: 1px solid; - padding: 13px 8px; - text-align: right; - border-top-left-radius: 5px; - border-bottom-left-radius: 5px; -} - -.src-line-numbers { - text-align: right; -} -.rustdoc:not(.source) .example-wrap > pre:not(.example-line-numbers) { - width: 100%; - overflow-x: auto; -} - -.rustdoc:not(.source) .example-wrap > pre.src-line-numbers { - width: auto; - overflow-x: visible; -} - .rustdoc .example-wrap > pre { margin: 0; + flex-grow: 1; + overflow-x: auto; } -.search-loading { - text-align: center; -} - -.content > .example-wrap pre.src-line-numbers { - position: relative; +.rustdoc .example-wrap > pre.example-line-numbers, +.rustdoc .example-wrap > pre.src-line-numbers { + flex-grow: 0; + overflow: initial; + text-align: right; -webkit-user-select: none; -moz-user-select: none; -ms-user-select: none; user-select: none; } + +.example-line-numbers { + border: 1px solid; + padding: 13px 8px; + border-top-left-radius: 5px; + border-bottom-left-radius: 5px; +} + .src-line-numbers span { cursor: pointer; } +.search-loading { + text-align: center; +} + .docblock-short { overflow-wrap: break-word; overflow-wrap: anywhere; @@ -2033,10 +2024,6 @@ in storage.js padding-bottom: 0; } -.scraped-example:not(.expanded) .code-wrapper pre.src-line-numbers { - overflow-x: hidden; -} - .scraped-example .code-wrapper .prev { position: absolute; top: 0.25em; diff --git a/src/test/rustdoc/doc-notable_trait_box_is_not_an_iterator.rs b/src/test/rustdoc/doc-notable_trait_box_is_not_an_iterator.rs new file mode 100644 index 0000000000000..3fb00c7db8411 --- /dev/null +++ b/src/test/rustdoc/doc-notable_trait_box_is_not_an_iterator.rs @@ -0,0 +1,38 @@ +#![feature(doc_notable_trait)] +#![feature(lang_items)] +#![feature(no_core)] +#![no_core] +#[lang = "owned_box"] +pub struct Box; + +impl Box { + pub fn new(x: T) -> Box { + Box + } +} + +#[doc(notable_trait)] +pub trait FakeIterator {} + +impl FakeIterator for Box {} + +#[lang = "pin"] +pub struct Pin; + +impl Pin { + pub fn new(x: T) -> Pin { + Pin + } +} + +impl FakeIterator for Pin {} + +// @!has doc_notable_trait_box_is_not_an_iterator/fn.foo.html '//*' 'Notable' +pub fn foo(x: T) -> Box { + Box::new(x) +} + +// @!has doc_notable_trait_box_is_not_an_iterator/fn.bar.html '//*' 'Notable' +pub fn bar(x: T) -> Pin { + Pin::new(x) +} diff --git a/src/tools/cargo b/src/tools/cargo index 071eeaf210708..9210810d1fd7b 160000 --- a/src/tools/cargo +++ b/src/tools/cargo @@ -1 +1 @@ -Subproject commit 071eeaf210708219a5a1b2c4728ca2f97df7f2ae +Subproject commit 9210810d1fd7b51ae0439a0a363cc50e36963455 diff --git a/src/tools/rust-analyzer/.github/workflows/publish.yml b/src/tools/rust-analyzer/.github/workflows/publish.yml index a4497f49e3c2f..73e62ab32c6c5 100644 --- a/src/tools/rust-analyzer/.github/workflows/publish.yml +++ b/src/tools/rust-analyzer/.github/workflows/publish.yml @@ -2,8 +2,8 @@ name: publish on: workflow_dispatch: # We can add version input when 1.0 is released and scheduled releases are removed -# schedule: -# - cron: "0 0 * * *" # midnight UTC + # schedule: + # - cron: "0 0 * * *" # midnight UTC push: branches: @@ -50,5 +50,7 @@ jobs: cargo workspaces rename --from test-utils test_utils cargo workspaces rename --from text-edit text_edit cargo workspaces rename ra_ap_%n + # Remove library crates from the workspaces so we don't auto-publish them as well + sed -i 's/ "lib\/\*",//' ./Cargo.toml find crates/rust-analyzer -type f -name '*.rs' -exec sed -i 's/rust_analyzer/ra_ap_rust_analyzer/g' {} + cargo workspaces publish --yes --force '*' --exact --no-git-commit --allow-dirty --skip-published custom 0.0.$PATCH diff --git a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs b/src/tools/rust-analyzer/crates/flycheck/src/lib.rs index e8c63d410aa75..73c3a48b4c5a3 100644 --- a/src/tools/rust-analyzer/crates/flycheck/src/lib.rs +++ b/src/tools/rust-analyzer/crates/flycheck/src/lib.rs @@ -21,6 +21,20 @@ pub use cargo_metadata::diagnostic::{ DiagnosticSpanMacroExpansion, }; +#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] +pub enum InvocationStrategy { + Once, + #[default] + PerWorkspace, +} + +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub enum InvocationLocation { + Root(AbsPathBuf), + #[default] + Workspace, +} + #[derive(Clone, Debug, PartialEq, Eq)] pub enum FlycheckConfig { CargoCommand { @@ -37,6 +51,8 @@ pub enum FlycheckConfig { command: String, args: Vec, extra_env: FxHashMap, + invocation_strategy: InvocationStrategy, + invocation_location: InvocationLocation, }, } @@ -136,11 +152,15 @@ enum Restart { No, } +/// A [`FlycheckActor`] is a single check instance of a workspace. struct FlycheckActor { + /// The workspace id of this flycheck instance. id: usize, sender: Box, config: FlycheckConfig, - workspace_root: AbsPathBuf, + /// Either the workspace root of the workspace we are flychecking, + /// or the project root of the project. + root: AbsPathBuf, /// CargoHandle exists to wrap around the communication needed to be able to /// run `cargo check` without blocking. Currently the Rust standard library /// doesn't provide a way to read sub-process output without blocking, so we @@ -162,11 +182,13 @@ impl FlycheckActor { workspace_root: AbsPathBuf, ) -> FlycheckActor { tracing::info!(%id, ?workspace_root, "Spawning flycheck"); - FlycheckActor { id, sender, config, workspace_root, cargo_handle: None } + FlycheckActor { id, sender, config, root: workspace_root, cargo_handle: None } } - fn progress(&self, progress: Progress) { + + fn report_progress(&self, progress: Progress) { self.send(Message::Progress { id: self.id, progress }); } + fn next_event(&self, inbox: &Receiver) -> Option { let check_chan = self.cargo_handle.as_ref().map(|cargo| &cargo.receiver); if let Ok(msg) = inbox.try_recv() { @@ -178,6 +200,7 @@ impl FlycheckActor { recv(check_chan.unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())), } } + fn run(mut self, inbox: Receiver) { 'event: while let Some(event) = self.next_event(&inbox) { match event { @@ -203,10 +226,10 @@ impl FlycheckActor { "did restart flycheck" ); self.cargo_handle = Some(cargo_handle); - self.progress(Progress::DidStart); + self.report_progress(Progress::DidStart); } Err(error) => { - self.progress(Progress::DidFailToRestart(format!( + self.report_progress(Progress::DidFailToRestart(format!( "Failed to run the following command: {:?} error={}", self.check_command(), error @@ -226,17 +249,17 @@ impl FlycheckActor { self.check_command() ); } - self.progress(Progress::DidFinish(res)); + self.report_progress(Progress::DidFinish(res)); } Event::CheckEvent(Some(message)) => match message { CargoMessage::CompilerArtifact(msg) => { - self.progress(Progress::DidCheckCrate(msg.target.name)); + self.report_progress(Progress::DidCheckCrate(msg.target.name)); } CargoMessage::Diagnostic(msg) => { self.send(Message::AddDiagnostic { id: self.id, - workspace_root: self.workspace_root.clone(), + workspace_root: self.root.clone(), diagnostic: msg, }); } @@ -254,12 +277,12 @@ impl FlycheckActor { "did cancel flycheck" ); cargo_handle.cancel(); - self.progress(Progress::DidCancel); + self.report_progress(Progress::DidCancel); } } fn check_command(&self) -> Command { - let mut cmd = match &self.config { + let (mut cmd, args) = match &self.config { FlycheckConfig::CargoCommand { command, target_triple, @@ -272,9 +295,7 @@ impl FlycheckActor { } => { let mut cmd = Command::new(toolchain::cargo()); cmd.arg(command); - cmd.current_dir(&self.workspace_root); - cmd.args(&["--workspace", "--message-format=json", "--manifest-path"]) - .arg(self.workspace_root.join("Cargo.toml").as_os_str()); + cmd.args(&["--workspace", "--message-format=json"]); if let Some(target) = target_triple { cmd.args(&["--target", target.as_str()]); @@ -293,18 +314,41 @@ impl FlycheckActor { cmd.arg(features.join(" ")); } } - cmd.args(extra_args); cmd.envs(extra_env); - cmd + (cmd, extra_args) } - FlycheckConfig::CustomCommand { command, args, extra_env } => { + FlycheckConfig::CustomCommand { + command, + args, + extra_env, + invocation_strategy, + invocation_location, + } => { let mut cmd = Command::new(command); - cmd.args(args); cmd.envs(extra_env); - cmd + + match invocation_location { + InvocationLocation::Workspace => { + match invocation_strategy { + InvocationStrategy::Once => { + cmd.current_dir(&self.root); + } + InvocationStrategy::PerWorkspace => { + // FIXME: cmd.current_dir(&affected_workspace); + cmd.current_dir(&self.root); + } + } + } + InvocationLocation::Root(root) => { + cmd.current_dir(root); + } + } + + (cmd, args) } }; - cmd.current_dir(&self.workspace_root); + + cmd.args(args); cmd } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs index 6819e9114a024..fafcde25ae708 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs @@ -12,11 +12,11 @@ fn test_copy_expand_simple() { #[derive(Copy)] struct Foo; "#, - expect![[r##" + expect![[r#" #[derive(Copy)] struct Foo; -impl < > core::marker::Copy for Foo< > {}"##]], +impl < > core::marker::Copy for Foo< > {}"#]], ); } @@ -33,7 +33,7 @@ macro Copy {} #[derive(Copy)] struct Foo; "#, - expect![[r##" + expect![[r#" #[rustc_builtin_macro] macro derive {} #[rustc_builtin_macro] @@ -41,7 +41,7 @@ macro Copy {} #[derive(Copy)] struct Foo; -impl < > crate ::marker::Copy for Foo< > {}"##]], +impl < > crate ::marker::Copy for Foo< > {}"#]], ); } @@ -53,11 +53,11 @@ fn test_copy_expand_with_type_params() { #[derive(Copy)] struct Foo; "#, - expect![[r##" + expect![[r#" #[derive(Copy)] struct Foo; -impl core::marker::Copy for Foo {}"##]], +impl core::marker::Copy for Foo {}"#]], ); } @@ -70,11 +70,11 @@ fn test_copy_expand_with_lifetimes() { #[derive(Copy)] struct Foo; "#, - expect![[r##" + expect![[r#" #[derive(Copy)] struct Foo; -impl core::marker::Copy for Foo {}"##]], +impl core::marker::Copy for Foo {}"#]], ); } @@ -86,10 +86,26 @@ fn test_clone_expand() { #[derive(Clone)] struct Foo; "#, - expect![[r##" + expect![[r#" #[derive(Clone)] struct Foo; -impl core::clone::Clone for Foo {}"##]], +impl core::clone::Clone for Foo {}"#]], + ); +} + +#[test] +fn test_clone_expand_with_const_generics() { + check( + r#" +//- minicore: derive, clone +#[derive(Clone)] +struct Foo(u32); +"#, + expect![[r#" +#[derive(Clone)] +struct Foo(u32); + +impl core::clone::Clone for Foo {}"#]], ); } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs index 79989bc2e38b6..8966047c9b259 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs @@ -60,7 +60,8 @@ pub fn find_builtin_derive(ident: &name::Name) -> Option struct BasicAdtInfo { name: tt::Ident, - type_or_const_params: usize, + /// `Some(ty)` if it's a const param of type `ty`, `None` if it's a type param. + param_types: Vec>, } fn parse_adt(tt: &tt::Subtree) -> Result { @@ -92,50 +93,22 @@ fn parse_adt(tt: &tt::Subtree) -> Result { let name_token_id = token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified); let name_token = tt::Ident { id: name_token_id, text: name.text().into() }; - let type_or_const_params = - params.map_or(0, |type_param_list| type_param_list.type_or_const_params().count()); - Ok(BasicAdtInfo { name: name_token, type_or_const_params }) -} - -fn make_type_args(n: usize, bound: Vec) -> Vec { - let mut result = Vec::::with_capacity(n * 2); - result.push( - tt::Leaf::Punct(tt::Punct { - char: '<', - spacing: tt::Spacing::Alone, - id: tt::TokenId::unspecified(), - }) - .into(), - ); - for i in 0..n { - if i > 0 { - result.push( - tt::Leaf::Punct(tt::Punct { - char: ',', - spacing: tt::Spacing::Alone, - id: tt::TokenId::unspecified(), - }) - .into(), - ); - } - result.push( - tt::Leaf::Ident(tt::Ident { - id: tt::TokenId::unspecified(), - text: format!("T{}", i).into(), - }) - .into(), - ); - result.extend(bound.iter().cloned()); - } - result.push( - tt::Leaf::Punct(tt::Punct { - char: '>', - spacing: tt::Spacing::Alone, - id: tt::TokenId::unspecified(), + let param_types = params + .into_iter() + .flat_map(|param_list| param_list.type_or_const_params()) + .map(|param| { + if let ast::TypeOrConstParam::Const(param) = param { + let ty = param + .ty() + .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax()).0) + .unwrap_or_default(); + Some(ty) + } else { + None + } }) - .into(), - ); - result + .collect(); + Ok(BasicAdtInfo { name: name_token, param_types }) } fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResult { @@ -143,14 +116,27 @@ fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResu Ok(info) => info, Err(e) => return ExpandResult::only_err(e), }; + let (params, args): (Vec<_>, Vec<_>) = info + .param_types + .into_iter() + .enumerate() + .map(|(idx, param_ty)| { + let ident = tt::Leaf::Ident(tt::Ident { + id: tt::TokenId::unspecified(), + text: format!("T{idx}").into(), + }); + let ident_ = ident.clone(); + if let Some(ty) = param_ty { + (quote! { const #ident : #ty , }, quote! { #ident_ , }) + } else { + let bound = trait_path.clone(); + (quote! { #ident : #bound , }, quote! { #ident_ , }) + } + }) + .unzip(); let name = info.name; - let trait_path_clone = trait_path.token_trees.clone(); - let bound = (quote! { : ##trait_path_clone }).token_trees; - let type_params = make_type_args(info.type_or_const_params, bound); - let type_args = make_type_args(info.type_or_const_params, Vec::new()); - let trait_path = trait_path.token_trees; let expanded = quote! { - impl ##type_params ##trait_path for #name ##type_args {} + impl < ##params > #trait_path for #name < ##args > {} }; ExpandResult::ok(expanded) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs index 68413df420c7d..d7586d129b768 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs @@ -259,7 +259,6 @@ macro_rules! __known_path { (core::future::Future) => {}; (core::future::IntoFuture) => {}; (core::ops::Try) => {}; - (core::ops::FromResidual) => {}; ($path:path) => { compile_error!("Please register your known path in the path module") }; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs index 8a735b965ab84..2679a1c360267 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs @@ -279,8 +279,6 @@ pub mod known { RangeToInclusive, RangeTo, Range, - Residual, - FromResidual, Neg, Not, None, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 6a5c4966f7ba5..0efff651cc174 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -190,9 +190,7 @@ pub(crate) type InferResult = Result, TypeError>; pub enum InferenceDiagnostic { NoSuchField { expr: ExprId }, BreakOutsideOfLoop { expr: ExprId, is_break: bool }, - IncorrectTryTarget { expr: ExprId }, MismatchedArgCount { call_expr: ExprId, expected: usize, found: usize }, - DoesNotImplement { expr: ExprId, trait_: TraitId, ty: Ty }, } /// A mismatch between an expected and an inferred type. @@ -907,6 +905,17 @@ impl<'a> InferenceContext<'a> { self.db.trait_data(trait_).associated_type_by_name(&name![Item]) } + fn resolve_ops_try_ok(&self) -> Option { + // FIXME resolve via lang_item once try v2 is stable + let path = path![core::ops::Try]; + let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?; + let trait_data = self.db.trait_data(trait_); + trait_data + // FIXME remove once try v2 is stable + .associated_type_by_name(&name![Ok]) + .or_else(|| trait_data.associated_type_by_name(&name![Output])) + } + fn resolve_ops_neg_output(&self) -> Option { let trait_ = self.resolve_lang_item(name![neg])?.as_trait()?; self.db.trait_data(trait_).associated_type_by_name(&name![Output]) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index 59ab50d0717ba..f56108b26c45b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -19,24 +19,24 @@ use hir_def::{ resolver::resolver_for_expr, ConstParamId, FieldId, ItemContainerId, Lookup, }; -use hir_expand::{name, name::Name}; +use hir_expand::name::Name; use stdx::always; use syntax::ast::RangeOp; use crate::{ autoderef::{self, Autoderef}, consteval, - infer::{coerce::CoerceMany, find_continuable, path, BreakableKind}, + infer::{coerce::CoerceMany, find_continuable, BreakableKind}, lower::{ const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode, }, mapping::{from_chalk, ToChalk}, method_resolution::{self, lang_names_for_bin_op, VisibleFromModule}, primitive::{self, UintTy}, - static_lifetime, to_assoc_type_id, to_chalk_trait_id, + static_lifetime, to_chalk_trait_id, utils::{generics, Generics}, - AdtId, AliasEq, AliasTy, Binders, CallableDefId, FnPointer, FnSig, FnSubst, Interner, - ProjectionTy, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind, + AdtId, Binders, CallableDefId, FnPointer, FnSig, FnSubst, Interner, Rawness, Scalar, + Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind, }; use super::{ @@ -564,29 +564,9 @@ impl<'a> InferenceContext<'a> { let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); self.resolve_associated_type(inner_ty, self.resolve_future_future_output()) } - &Expr::Try { expr } => { - let inner_ty = self.infer_expr_inner(expr, &Expectation::none()); - match self.resolve_try_impl_for(inner_ty.clone()) { - Some((_, Some((output, residual)))) => { - if let Some((_trait, false)) = - self.implements_from_residual(self.return_ty.clone(), residual) - { - self.push_diagnostic(InferenceDiagnostic::IncorrectTryTarget { - expr: tgt_expr, - }); - } - output - } - Some((trait_, None)) => { - self.push_diagnostic(InferenceDiagnostic::DoesNotImplement { - expr, - trait_, - ty: inner_ty, - }); - self.err_ty() - } - None => self.err_ty(), - } + Expr::Try { expr } => { + let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); + self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok()) } Expr::Cast { expr, type_ref } => { // FIXME: propagate the "castable to" expectation (and find a test case that shows this is necessary) @@ -1550,67 +1530,4 @@ impl<'a> InferenceContext<'a> { let ctx = self.breakables.pop().expect("breakable stack broken"); (ctx.may_break.then(|| ctx.coerce.complete()), res) } - - /// Check whether `ty` implements `FromResidual` - fn implements_from_residual(&mut self, ty: Ty, r: Ty) -> Option<(hir_def::TraitId, bool)> { - let from_residual_trait = self - .resolver - .resolve_known_trait(self.db.upcast(), &(super::path![core::ops::FromResidual]))?; - let r = GenericArgData::Ty(r).intern(Interner); - let b = TyBuilder::trait_ref(self.db, from_residual_trait); - if b.remaining() != 2 { - return Some((from_residual_trait, false)); - } - let trait_ref = b.push(ty).push(r).build(); - Some((from_residual_trait, self.table.try_obligation(trait_ref.cast(Interner)).is_some())) - } - - fn resolve_try_impl_for(&mut self, ty: Ty) -> Option<(hir_def::TraitId, Option<(Ty, Ty)>)> { - let path = path![core::ops::Try]; - let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?; - - let trait_ref = TyBuilder::trait_ref(self.db, trait_).push(ty).build(); - let substitution = trait_ref.substitution.clone(); - self.push_obligation(trait_ref.clone().cast(Interner)); - - let trait_data = self.db.trait_data(trait_); - let output = trait_data.associated_type_by_name(&name![Output]); - let residual = trait_data.associated_type_by_name(&name![Residual]); - - let output_ty = match output { - Some(output) => { - let output_ty = self.table.new_type_var(); - let alias_eq = AliasEq { - alias: AliasTy::Projection(ProjectionTy { - associated_ty_id: to_assoc_type_id(output), - substitution: substitution.clone(), - }), - ty: output_ty.clone(), - }; - self.push_obligation(alias_eq.cast(Interner)); - output_ty - } - None => self.err_ty(), - }; - let residual_ty = match residual { - Some(residual) => { - let residual_ty = self.table.new_type_var(); - let alias_eq = AliasEq { - alias: AliasTy::Projection(ProjectionTy { - associated_ty_id: to_assoc_type_id(residual), - substitution, - }), - ty: residual_ty.clone(), - }; - self.push_obligation(alias_eq.cast(Interner)); - residual_ty - } - None => self.err_ty(), - }; - // FIXME: We are doing the work twice here I think? - Some(( - trait_, - self.table.try_obligation(trait_ref.cast(Interner)).map(|_| (output_ty, residual_ty)), - )) - } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs index a79efeb6daa84..3a1a3f4fdeb3b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs @@ -1111,24 +1111,6 @@ pub fn resolve_indexing_op( } None } -/// Returns the receiver type for the try branch trait call. -pub fn resolve_branch_op( - db: &dyn HirDatabase, - env: Arc, - ty: Canonical, - try_trait: TraitId, -) -> Option { - let mut table = InferenceTable::new(db, env.clone()); - let ty = table.instantiate_canonical(ty); - let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty); - for (ty, adj) in deref_chain.into_iter().zip(adj) { - let goal = generic_implements_goal(db, env.clone(), try_trait, &ty); - if db.trait_solve(env.krate, goal.cast(Interner)).is_some() { - return Some(adj); - } - } - None -} macro_rules! check_that { ($cond:expr) => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs index b91172e33422d..555b6972fb71e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs @@ -162,16 +162,98 @@ fn test() { ); } +#[test] +fn infer_try() { + check_types( + r#" +//- /main.rs crate:main deps:core +fn test() { + let r: Result = Result::Ok(1); + let v = r?; + v; +} //^ i32 + +//- /core.rs crate:core +pub mod ops { + pub trait Try { + type Ok; + type Error; + } +} + +pub mod result { + pub enum Result { + Ok(O), + Err(E) + } + + impl crate::ops::Try for Result { + type Ok = O; + type Error = E; + } +} + +pub mod prelude { + pub mod rust_2018 { + pub use crate::{result::*, ops::*}; + } +} +"#, + ); +} + #[test] fn infer_try_trait_v2() { check_types( r#" -//- minicore: try -fn test() -> core::ops::ControlFlow { - let r: core::ops::ControlFlow = core::ops::ControlFlow::Continue(1.0); +//- /main.rs crate:main deps:core +fn test() { + let r: Result = Result::Ok(1); let v = r?; - //^ f32 - r + v; +} //^ i32 + +//- /core.rs crate:core +mod ops { + mod try_trait { + pub trait Try: FromResidual { + type Output; + type Residual; + } + pub trait FromResidual::Residual> {} + } + + pub use self::try_trait::FromResidual; + pub use self::try_trait::Try; +} + +mod convert { + pub trait From {} + impl From for T {} +} + +pub mod result { + use crate::convert::From; + use crate::ops::{Try, FromResidual}; + + pub enum Infallible {} + pub enum Result { + Ok(O), + Err(E) + } + + impl Try for Result { + type Output = O; + type Error = Result; + } + + impl> FromResidual> for Result {} +} + +pub mod prelude { + pub mod rust_2018 { + pub use crate::result::*; + } } "#, ); diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs index 6c8b3088adc0f..c5dc60f1ec5f9 100644 --- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs @@ -6,7 +6,7 @@ use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; use either::Either; -use hir_def::{path::ModPath, TraitId}; +use hir_def::path::ModPath; use hir_expand::{name::Name, HirFileId, InFile}; use syntax::{ast, AstPtr, SyntaxNodePtr, TextRange}; @@ -33,7 +33,6 @@ diagnostics![ BreakOutsideOfLoop, InactiveCode, IncorrectCase, - IncorrectTryExpr, InvalidDeriveTarget, MacroError, MalformedDerive, @@ -41,7 +40,6 @@ diagnostics![ MissingFields, MissingMatchArms, MissingUnsafe, - NotImplemented, NoSuchField, ReplaceFilterMapNextWithFindMap, TypeMismatch, @@ -155,16 +153,6 @@ pub struct MismatchedArgCount { pub expected: usize, pub found: usize, } -#[derive(Debug)] -pub struct IncorrectTryExpr { - pub expr: InFile>, -} -#[derive(Debug)] -pub struct NotImplemented { - pub expr: InFile>, - pub trait_: TraitId, - pub ty: Type, -} #[derive(Debug)] pub struct MissingMatchArms { diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index e6c5c6b583398..f5324208c9a4e 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -81,12 +81,11 @@ use crate::db::{DefDatabase, HirDatabase}; pub use crate::{ attrs::{HasAttrs, Namespace}, diagnostics::{ - AnyDiagnostic, BreakOutsideOfLoop, InactiveCode, IncorrectCase, IncorrectTryExpr, - InvalidDeriveTarget, MacroError, MalformedDerive, MismatchedArgCount, MissingFields, - MissingMatchArms, MissingUnsafe, NoSuchField, NotImplemented, - ReplaceFilterMapNextWithFindMap, TypeMismatch, UnimplementedBuiltinMacro, - UnresolvedExternCrate, UnresolvedImport, UnresolvedMacroCall, UnresolvedModule, - UnresolvedProcMacro, + AnyDiagnostic, BreakOutsideOfLoop, InactiveCode, IncorrectCase, InvalidDeriveTarget, + MacroError, MalformedDerive, MismatchedArgCount, MissingFields, MissingMatchArms, + MissingUnsafe, NoSuchField, ReplaceFilterMapNextWithFindMap, TypeMismatch, + UnimplementedBuiltinMacro, UnresolvedExternCrate, UnresolvedImport, UnresolvedMacroCall, + UnresolvedModule, UnresolvedProcMacro, }, has_source::HasSource, semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits}, @@ -1283,45 +1282,30 @@ impl DefWithBody { let infer = db.infer(self.into()); let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1); for d in &infer.diagnostics { - match *d { + match d { hir_ty::InferenceDiagnostic::NoSuchField { expr } => { - let field = source_map.field_syntax(expr); + let field = source_map.field_syntax(*expr); acc.push(NoSuchField { field }.into()) } - hir_ty::InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break } => { + &hir_ty::InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break } => { let expr = source_map .expr_syntax(expr) .expect("break outside of loop in synthetic syntax"); acc.push(BreakOutsideOfLoop { expr, is_break }.into()) } hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => { - match source_map.expr_syntax(call_expr) { + match source_map.expr_syntax(*call_expr) { Ok(source_ptr) => acc.push( MismatchedArgCount { call_expr: source_ptr, - expected: expected, - found: found, + expected: *expected, + found: *found, } .into(), ), Err(SyntheticSyntax) => (), } } - hir_ty::InferenceDiagnostic::IncorrectTryTarget { expr } => { - let expr = source_map.expr_syntax(expr).expect("try in synthetic syntax"); - acc.push(IncorrectTryExpr { expr }.into()) - } - hir_ty::InferenceDiagnostic::DoesNotImplement { expr, trait_, ref ty } => { - let expr = source_map.expr_syntax(expr).expect("try in synthetic syntax"); - acc.push( - NotImplemented { - expr, - trait_, - ty: Type::new(db, DefWithBodyId::from(self), ty.clone()), - } - .into(), - ) - } } } for (expr, mismatch) in infer.expr_type_mismatches() { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs index eaa6de73eb35d..ccdfcb0d9e4fd 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_constant.rs @@ -77,7 +77,7 @@ pub(crate) fn generate_constant(acc: &mut Assists, ctx: &AssistContext<'_>) -> O target_data_for_generate_constant(ctx, current_module, constant_module).unwrap_or_else( || { let indent = IndentLevel::from_node(statement.syntax()); - (statement.syntax().text_range().start(), indent, None, format!("\n{}", indent)) + (statement.syntax().text_range().start(), indent, None, format!("\n{indent}")) }, ); @@ -90,7 +90,7 @@ pub(crate) fn generate_constant(acc: &mut Assists, ctx: &AssistContext<'_>) -> O if let Some(file_id) = file_id { builder.edit_file(file_id); } - builder.insert(offset, format!("{}{}", text, post_string)); + builder.insert(offset, format!("{text}{post_string}")); }, ) } @@ -103,13 +103,13 @@ fn get_text_for_generate_constant( ) -> Option { let constant_token = not_exist_name_ref.pop()?; let vis = if not_exist_name_ref.len() == 0 && !outer_exists { "" } else { "\npub " }; - let mut text = format!("{}const {}: {} = $0;", vis, constant_token, type_name); + let mut text = format!("{vis}const {constant_token}: {type_name} = $0;"); while let Some(name_ref) = not_exist_name_ref.pop() { let vis = if not_exist_name_ref.len() == 0 && !outer_exists { "" } else { "\npub " }; text = text.replace("\n", "\n "); - text = format!("{}mod {} {{{}\n}}", vis, name_ref.to_string(), text); + text = format!("{vis}mod {name_ref} {{{text}\n}}"); } - Some(text.replace("\n", &format!("\n{}", indent))) + Some(text.replace("\n", &format!("\n{indent}"))) } fn target_data_for_generate_constant( @@ -134,7 +134,7 @@ fn target_data_for_generate_constant( .find(|it| it.kind() == SyntaxKind::WHITESPACE && it.to_string().contains("\n")) .is_some(); let post_string = - if siblings_has_newline { format!("{}", indent) } else { format!("\n{}", indent) }; + if siblings_has_newline { format!("{indent}") } else { format!("\n{indent}") }; Some((offset, indent + 1, Some(file_id), post_string)) } _ => Some((TextSize::from(0), 0.into(), Some(file_id), "\n".into())), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs index 5e9995a986644..a6e3d49e0d1ae 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_enum_variant.rs @@ -55,12 +55,11 @@ pub(crate) fn generate_default_from_enum_variant( let buf = format!( r#" -impl Default for {0} {{ +impl Default for {enum_name} {{ fn default() -> Self {{ - Self::{1} + Self::{variant_name} }} }}"#, - enum_name, variant_name ); edit.insert(start_offset, buf); }, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs index cbd33de19eda0..49d9fd707ffcc 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs @@ -1,8 +1,7 @@ use ide_db::famous_defs::FamousDefs; -use itertools::Itertools; use stdx::format_to; use syntax::{ - ast::{self, HasGenericParams, HasName, HasTypeBounds, Impl}, + ast::{self, make, HasGenericParams, HasName, Impl}, AstNode, }; @@ -77,45 +76,47 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<' ) } +// FIXME: based on from utils::generate_impl_text_inner fn generate_trait_impl_text_from_impl(impl_: &ast::Impl, trait_text: &str, code: &str) -> String { - let generic_params = impl_.generic_param_list(); - let mut buf = String::with_capacity(code.len()); - buf.push_str("\n\n"); - buf.push_str("impl"); - - if let Some(generic_params) = &generic_params { - let lifetimes = generic_params.lifetime_params().map(|lt| format!("{}", lt.syntax())); - let toc_params = generic_params.type_or_const_params().map(|toc_param| match toc_param { - ast::TypeOrConstParam::Type(type_param) => { - let mut buf = String::new(); - if let Some(it) = type_param.name() { - format_to!(buf, "{}", it.syntax()); - } - if let Some(it) = type_param.colon_token() { - format_to!(buf, "{} ", it); + let impl_ty = impl_.self_ty().unwrap(); + let generic_params = impl_.generic_param_list().map(|generic_params| { + let lifetime_params = + generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam); + let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| { + // remove defaults since they can't be specified in impls + match param { + ast::TypeOrConstParam::Type(param) => { + let param = param.clone_for_update(); + param.remove_default(); + Some(ast::GenericParam::TypeParam(param)) } - if let Some(it) = type_param.type_bound_list() { - format_to!(buf, "{}", it.syntax()); + ast::TypeOrConstParam::Const(param) => { + let param = param.clone_for_update(); + param.remove_default(); + Some(ast::GenericParam::ConstParam(param)) } - buf } - ast::TypeOrConstParam::Const(const_param) => const_param.syntax().to_string(), }); - let generics = lifetimes.chain(toc_params).format(", "); - format_to!(buf, "<{}>", generics); - } - buf.push(' '); - buf.push_str(trait_text); - buf.push_str(" for "); - buf.push_str(&impl_.self_ty().unwrap().syntax().text().to_string()); + make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params)) + }); + + let mut buf = String::with_capacity(code.len()); + buf.push_str("\n\n"); + + // `impl{generic_params} {trait_text} for {impl_.self_ty()}` + buf.push_str("impl"); + if let Some(generic_params) = &generic_params { + format_to!(buf, "{generic_params}") + } + format_to!(buf, " {trait_text} for {impl_ty}"); match impl_.where_clause() { Some(where_clause) => { - format_to!(buf, "\n{}\n{{\n{}\n}}", where_clause, code); + format_to!(buf, "\n{where_clause}\n{{\n{code}\n}}"); } None => { - format_to!(buf, " {{\n{}\n}}", code); + format_to!(buf, " {{\n{code}\n}}"); } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs index 85b193663a05b..ceae80755037c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -51,14 +51,14 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' Some(field) => { let field_name = field.name()?; let field_ty = field.ty()?; - (format!("{}", field_name), field_ty, field.syntax().text_range()) + (field_name.to_string(), field_ty, field.syntax().text_range()) } None => { let field = ctx.find_node_at_offset::()?; let field_list = ctx.find_node_at_offset::()?; let field_list_index = field_list.fields().position(|it| it == field)?; let field_ty = field.ty()?; - (format!("{}", field_list_index), field_ty, field.syntax().text_range()) + (field_list_index.to_string(), field_ty, field.syntax().text_range()) } }; @@ -77,7 +77,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' for method in methods { let adt = ast::Adt::Struct(strukt.clone()); let name = method.name(ctx.db()).to_string(); - let impl_def = find_struct_impl(ctx, &adt, &name).flatten(); + let impl_def = find_struct_impl(ctx, &adt, &[name]).flatten(); acc.add_group( &GroupLabel("Generate delegate methods…".to_owned()), AssistId("generate_delegate_methods", AssistKind::Generate), @@ -151,7 +151,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' Some(cap) => { let offset = strukt.syntax().text_range().end(); let snippet = render_snippet(cap, impl_def.syntax(), cursor); - let snippet = format!("\n\n{}", snippet); + let snippet = format!("\n\n{snippet}"); builder.insert_snippet(cap, offset, snippet); } None => { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs index 8f4405a8c869c..55b7afb3d3b09 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs @@ -66,7 +66,7 @@ fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( let target = field.syntax().text_range(); acc.add( AssistId("generate_deref", AssistKind::Generate), - format!("Generate `{:?}` impl using `{}`", deref_type_to_generate, field_name), + format!("Generate `{deref_type_to_generate:?}` impl using `{field_name}`"), target, |edit| { generate_edit( @@ -106,7 +106,7 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<() let target = field.syntax().text_range(); acc.add( AssistId("generate_deref", AssistKind::Generate), - format!("Generate `{:?}` impl using `{}`", deref_type_to_generate, field.syntax()), + format!("Generate `{deref_type_to_generate:?}` impl using `{field}`"), target, |edit| { generate_edit( @@ -132,18 +132,16 @@ fn generate_edit( let start_offset = strukt.syntax().text_range().end(); let impl_code = match deref_type { DerefType::Deref => format!( - r#" type Target = {0}; + r#" type Target = {field_type_syntax}; fn deref(&self) -> &Self::Target {{ - &self.{1} + &self.{field_name} }}"#, - field_type_syntax, field_name ), DerefType::DerefMut => format!( r#" fn deref_mut(&mut self) -> &mut Self::Target {{ - &mut self.{} + &mut self.{field_name} }}"#, - field_name ), }; let strukt_adt = ast::Adt::Struct(strukt); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs index c91141f8eb50f..b8415c72a2a32 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_documentation_template.rs @@ -139,40 +139,44 @@ fn make_example_for_fn(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option) -> Option) -> Option None, }; @@ -228,7 +235,9 @@ fn introduction_builder(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option String { // instance `TuplePat`) could be managed later. Some(ast::Pat::IdentPat(ident_pat)) => match ident_pat.name() { Some(name) => match is_a_ref_mut_param(¶m) { - true => format!("&mut {}", name), + true => format!("&mut {name}"), false => name.to_string(), }, None => "_".to_string(), @@ -424,14 +433,15 @@ fn function_call( let name = ast_func.name()?; let arguments = arguments_from_params(param_list); let function_call = if param_list.self_param().is_some() { - format!("{}.{}({})", self_name?, name, arguments) + let self_ = self_name?; + format!("{self_}.{name}({arguments})") } else if let Some(implementation) = self_partial_type(ast_func) { - format!("{}::{}({})", implementation, name, arguments) + format!("{implementation}::{name}({arguments})") } else { - format!("{}({})", name, arguments) + format!("{name}({arguments})") }; match is_unsafe { - true => Some(format!("unsafe {{ {} }}", function_call)), + true => Some(format!("unsafe {{ {function_call} }}")), false => Some(function_call), } } @@ -469,8 +479,8 @@ fn build_path(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option { .unwrap_or_else(|| "*".into()); let module_def: ModuleDef = ctx.sema.to_def(ast_func)?.module(ctx.db()).into(); match module_def.canonical_path(ctx.db()) { - Some(path) => Some(format!("{}::{}::{}", crate_name, path, leaf)), - None => Some(format!("{}::{}", crate_name, leaf)), + Some(path) => Some(format!("{crate_name}::{path}::{leaf}")), + None => Some(format!("{crate_name}::{leaf}")), } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs index 52d27d8a7d633..63e91b835f1f8 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_is_method.rs @@ -52,7 +52,7 @@ pub(crate) fn generate_enum_is_method(acc: &mut Assists, ctx: &AssistContext<'_> let fn_name = format!("is_{}", &to_lower_snake_case(&variant_name.text())); // Return early if we've found an existing new fn - let impl_def = find_struct_impl(ctx, &parent_enum, &fn_name)?; + let impl_def = find_struct_impl(ctx, &parent_enum, &[fn_name.clone()])?; let target = variant.syntax().text_range(); acc.add_group( @@ -61,21 +61,15 @@ pub(crate) fn generate_enum_is_method(acc: &mut Assists, ctx: &AssistContext<'_> "Generate an `is_` method for this enum variant", target, |builder| { - let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{} ", v)); + let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{v} ")); let method = format!( - " /// Returns `true` if the {} is [`{variant}`]. + " /// Returns `true` if the {enum_lowercase_name} is [`{variant_name}`]. /// - /// [`{variant}`]: {}::{variant} + /// [`{variant_name}`]: {enum_name}::{variant_name} #[must_use] - {}fn {}(&self) -> bool {{ - matches!(self, Self::{variant}{}) + {vis}fn {fn_name}(&self) -> bool {{ + matches!(self, Self::{variant_name}{pattern_suffix}) }}", - enum_lowercase_name, - enum_name, - vis, - fn_name, - pattern_suffix, - variant = variant_name ); add_method_to_adt(builder, &parent_enum, impl_def, &method); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs index b19aa0f652aa7..bdd3cf4f06c25 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_enum_projection_method.rs @@ -116,6 +116,14 @@ fn generate_enum_projection_method( assist_description: &str, props: ProjectionProps, ) -> Option<()> { + let ProjectionProps { + fn_name_prefix, + self_param, + return_prefix, + return_suffix, + happy_case, + sad_case, + } = props; let variant = ctx.find_node_at_offset::()?; let variant_name = variant.name()?; let parent_enum = ast::Adt::Enum(variant.parent_enum()); @@ -125,7 +133,7 @@ fn generate_enum_projection_method( let (field,) = record.fields().collect_tuple()?; let name = field.name()?.to_string(); let ty = field.ty()?; - let pattern_suffix = format!(" {{ {} }}", name); + let pattern_suffix = format!(" {{ {name} }}"); (pattern_suffix, ty, name) } ast::StructKind::Tuple(tuple) => { @@ -136,11 +144,10 @@ fn generate_enum_projection_method( ast::StructKind::Unit => return None, }; - let fn_name = - format!("{}_{}", props.fn_name_prefix, &to_lower_snake_case(&variant_name.text())); + let fn_name = format!("{}_{}", fn_name_prefix, &to_lower_snake_case(&variant_name.text())); // Return early if we've found an existing new fn - let impl_def = find_struct_impl(ctx, &parent_enum, &fn_name)?; + let impl_def = find_struct_impl(ctx, &parent_enum, &[fn_name.clone()])?; let target = variant.syntax().text_range(); acc.add_group( @@ -149,27 +156,15 @@ fn generate_enum_projection_method( assist_description, target, |builder| { - let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{} ", v)); + let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{v} ")); let method = format!( - " {0}fn {1}({2}) -> {3}{4}{5} {{ - if let Self::{6}{7} = self {{ - {8}({9}) + " {vis}fn {fn_name}({self_param}) -> {return_prefix}{field_type}{return_suffix} {{ + if let Self::{variant_name}{pattern_suffix} = self {{ + {happy_case}({bound_name}) }} else {{ - {10} + {sad_case} }} - }}", - vis, - fn_name, - props.self_param, - props.return_prefix, - field_type.syntax(), - props.return_suffix, - variant_name, - pattern_suffix, - props.happy_case, - bound_name, - props.sad_case, - ); + }}"); add_method_to_adt(builder, &parent_enum, impl_def, &method); }, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs index 507ea012babfc..7c81d2c6a6cc9 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs @@ -56,23 +56,18 @@ pub(crate) fn generate_from_impl_for_enum( target, |edit| { let start_offset = variant.parent_enum().syntax().text_range().end(); - let from_trait = format!("From<{}>", field_type.syntax()); + let from_trait = format!("From<{field_type}>"); let impl_code = if let Some(name) = field_name { format!( - r#" fn from({0}: {1}) -> Self {{ - Self::{2} {{ {0} }} - }}"#, - name.text(), - field_type.syntax(), - variant_name, + r#" fn from({name}: {field_type}) -> Self {{ + Self::{variant_name} {{ {name} }} + }}"# ) } else { format!( - r#" fn from(v: {}) -> Self {{ - Self::{}(v) - }}"#, - field_type.syntax(), - variant_name, + r#" fn from(v: {field_type}) -> Self {{ + Self::{variant_name}(v) + }}"# ) }; let from_impl = generate_trait_impl_text(&enum_, &from_trait, &impl_code); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs index 8b67982f91582..c229127e48ffc 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs @@ -179,7 +179,7 @@ fn add_func_to_accumulator( let function_template = function_builder.render(adt_name.is_some()); let mut func = function_template.to_string(ctx.config.snippet_cap); if let Some(name) = adt_name { - func = format!("\n{}impl {} {{\n{}\n{}}}", indent, name, func, indent); + func = format!("\n{indent}impl {name} {{\n{func}\n{indent}}}"); } builder.edit_file(file); match ctx.config.snippet_cap { @@ -198,7 +198,7 @@ fn get_adt_source( let file = ctx.sema.parse(range.file_id); let adt_source = ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?; - find_struct_impl(ctx, &adt_source, fn_name).map(|impl_| (impl_, range.file_id)) + find_struct_impl(ctx, &adt_source, &[fn_name.to_string()]).map(|impl_| (impl_, range.file_id)) } struct FunctionTemplate { @@ -212,23 +212,26 @@ struct FunctionTemplate { impl FunctionTemplate { fn to_string(&self, cap: Option) -> String { + let Self { leading_ws, fn_def, ret_type, should_focus_return_type, trailing_ws, tail_expr } = + self; + let f = match cap { Some(cap) => { - let cursor = if self.should_focus_return_type { + let cursor = if *should_focus_return_type { // Focus the return type if there is one - match self.ret_type { - Some(ref ret_type) => ret_type.syntax(), - None => self.tail_expr.syntax(), + match ret_type { + Some(ret_type) => ret_type.syntax(), + None => tail_expr.syntax(), } } else { - self.tail_expr.syntax() + tail_expr.syntax() }; - render_snippet(cap, self.fn_def.syntax(), Cursor::Replace(cursor)) + render_snippet(cap, fn_def.syntax(), Cursor::Replace(cursor)) } - None => self.fn_def.to_string(), + None => fn_def.to_string(), }; - format!("{}{}{}", self.leading_ws, f, self.trailing_ws) + format!("{leading_ws}{f}{trailing_ws}") } } @@ -330,9 +333,9 @@ impl FunctionBuilder { let mut indent = IndentLevel::from_node(&it); if is_method { indent = indent + 1; - leading_ws = format!("{}", indent); + leading_ws = format!("{indent}"); } else { - leading_ws = format!("\n\n{}", indent); + leading_ws = format!("\n\n{indent}"); } fn_def = fn_def.indent(indent); @@ -340,9 +343,10 @@ impl FunctionBuilder { } GeneratedFunctionTarget::InEmptyItemList(it) => { let indent = IndentLevel::from_node(&it); - leading_ws = format!("\n{}", indent + 1); - fn_def = fn_def.indent(indent + 1); - trailing_ws = format!("\n{}", indent); + let leading_indent = indent + 1; + leading_ws = format!("\n{leading_indent}"); + fn_def = fn_def.indent(leading_indent); + trailing_ws = format!("\n{indent}"); } }; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs index 76fcef0cad951..5e71914283495 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs @@ -1,6 +1,9 @@ use ide_db::famous_defs::FamousDefs; use stdx::{format_to, to_lower_snake_case}; -use syntax::ast::{self, AstNode, HasName, HasVisibility}; +use syntax::{ + ast::{self, AstNode, HasName, HasVisibility}, + TextRange, +}; use crate::{ utils::{convert_reference_type, find_impl_block_end, find_struct_impl, generate_impl_text}, @@ -72,92 +75,259 @@ pub(crate) fn generate_getter_mut(acc: &mut Assists, ctx: &AssistContext<'_>) -> generate_getter_impl(acc, ctx, true) } +#[derive(Clone, Debug)] +struct RecordFieldInfo { + field_name: syntax::ast::Name, + field_ty: syntax::ast::Type, + fn_name: String, + target: TextRange, +} + +struct GetterInfo { + impl_def: Option, + strukt: ast::Struct, + mutable: bool, +} + pub(crate) fn generate_getter_impl( acc: &mut Assists, ctx: &AssistContext<'_>, mutable: bool, ) -> Option<()> { - let strukt = ctx.find_node_at_offset::()?; - let field = ctx.find_node_at_offset::()?; + // This if condition denotes two modes this assist can work in: + // - First is acting upon selection of record fields + // - Next is acting upon a single record field + // + // This is the only part where implementation diverges a bit, + // subsequent code is generic for both of these modes - let field_name = field.name()?; - let field_ty = field.ty()?; + let (strukt, info_of_record_fields, fn_names) = if !ctx.has_empty_selection() { + // Selection Mode + let node = ctx.covering_element(); - // Return early if we've found an existing fn - let mut fn_name = to_lower_snake_case(&field_name.to_string()); - if mutable { - format_to!(fn_name, "_mut"); + let node = match node { + syntax::NodeOrToken::Node(n) => n, + syntax::NodeOrToken::Token(t) => t.parent()?, + }; + + let parent_struct = node.ancestors().find_map(ast::Struct::cast)?; + + let (info_of_record_fields, field_names) = + extract_and_parse_record_fields(&parent_struct, ctx.selection_trimmed(), mutable)?; + + (parent_struct, info_of_record_fields, field_names) + } else { + // Single Record Field mode + let strukt = ctx.find_node_at_offset::()?; + let field = ctx.find_node_at_offset::()?; + + let record_field_info = parse_record_field(field, mutable)?; + + let fn_name = record_field_info.fn_name.clone(); + + (strukt, vec![record_field_info], vec![fn_name]) + }; + + // No record fields to do work on :( + if info_of_record_fields.len() == 0 { + return None; } - let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), fn_name.as_str())?; + + let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &fn_names)?; let (id, label) = if mutable { ("generate_getter_mut", "Generate a mut getter method") } else { ("generate_getter", "Generate a getter method") }; - let target = field.syntax().text_range(); + + // Computing collective text range of all record fields in selected region + let target: TextRange = info_of_record_fields + .iter() + .map(|record_field_info| record_field_info.target) + .reduce(|acc, target| acc.cover(target))?; + + let getter_info = GetterInfo { impl_def, strukt, mutable }; + acc.add_group( &GroupLabel("Generate getter/setter".to_owned()), AssistId(id, AssistKind::Generate), label, target, |builder| { + let record_fields_count = info_of_record_fields.len(); + let mut buf = String::with_capacity(512); - if impl_def.is_some() { - buf.push('\n'); + // Check if an impl exists + if let Some(impl_def) = &getter_info.impl_def { + // Check if impl is empty + if let Some(assoc_item_list) = impl_def.assoc_item_list() { + if assoc_item_list.assoc_items().next().is_some() { + // If not empty then only insert a new line + buf.push('\n'); + } + } } - let vis = strukt.visibility().map_or(String::new(), |v| format!("{} ", v)); - let (ty, body) = if mutable { - (format!("&mut {}", field_ty), format!("&mut self.{}", field_name)) - } else { - (|| { - let krate = ctx.sema.scope(field_ty.syntax())?.krate(); - let famous_defs = &FamousDefs(&ctx.sema, krate); - ctx.sema - .resolve_type(&field_ty) - .and_then(|ty| convert_reference_type(ty, ctx.db(), famous_defs)) - .map(|conversion| { - cov_mark::hit!(convert_reference_type); - ( - conversion.convert_type(ctx.db()), - conversion.getter(field_name.to_string()), - ) - }) - })() - .unwrap_or_else(|| (format!("&{}", field_ty), format!("&self.{}", field_name))) - }; - - format_to!( - buf, - " {}fn {}(&{}self) -> {} {{ - {} - }}", - vis, - fn_name, - mutable.then(|| "mut ").unwrap_or_default(), - ty, - body, - ); - - let start_offset = impl_def - .and_then(|impl_def| find_impl_block_end(impl_def, &mut buf)) + for (i, record_field_info) in info_of_record_fields.iter().enumerate() { + // this buf inserts a newline at the end of a getter + // automatically, if one wants to add one more newline + // for separating it from other assoc items, that needs + // to be handled spearately + let mut getter_buf = + generate_getter_from_info(ctx, &getter_info, &record_field_info); + + // Insert `$0` only for last getter we generate + if i == record_fields_count - 1 { + getter_buf = getter_buf.replacen("fn ", "fn $0", 1); + } + + // For first element we do not merge with '\n', as + // that can be inserted by impl_def check defined + // above, for other cases which are: + // + // - impl exists but it empty, here we would ideally + // not want to keep newline between impl { + // and fn () { line + // + // - next if impl itself does not exist, in this + // case we ourselves generate a new impl and that + // again ends up with the same reasoning as above + // for not keeping newline + if i == 0 { + buf = buf + &getter_buf; + } else { + buf = buf + "\n" + &getter_buf; + } + + // We don't insert a new line at the end of + // last getter as it will end up in the end + // of an impl where we would not like to keep + // getter and end of impl ( i.e. `}` ) with an + // extra line for no reason + if i < record_fields_count - 1 { + buf = buf + "\n"; + } + } + + let start_offset = getter_info + .impl_def + .as_ref() + .and_then(|impl_def| find_impl_block_end(impl_def.to_owned(), &mut buf)) .unwrap_or_else(|| { - buf = generate_impl_text(&ast::Adt::Struct(strukt.clone()), &buf); - strukt.syntax().text_range().end() + buf = generate_impl_text(&ast::Adt::Struct(getter_info.strukt.clone()), &buf); + getter_info.strukt.syntax().text_range().end() }); match ctx.config.snippet_cap { - Some(cap) => { - builder.insert_snippet(cap, start_offset, buf.replacen("fn ", "fn $0", 1)) - } + Some(cap) => builder.insert_snippet(cap, start_offset, buf), None => builder.insert(start_offset, buf), } }, ) } +fn generate_getter_from_info( + ctx: &AssistContext<'_>, + info: &GetterInfo, + record_field_info: &RecordFieldInfo, +) -> String { + let mut buf = String::with_capacity(512); + + let vis = info.strukt.visibility().map_or(String::new(), |v| format!("{} ", v)); + let (ty, body) = if info.mutable { + ( + format!("&mut {}", record_field_info.field_ty), + format!("&mut self.{}", record_field_info.field_name), + ) + } else { + (|| { + let krate = ctx.sema.scope(record_field_info.field_ty.syntax())?.krate(); + let famous_defs = &FamousDefs(&ctx.sema, krate); + ctx.sema + .resolve_type(&record_field_info.field_ty) + .and_then(|ty| convert_reference_type(ty, ctx.db(), famous_defs)) + .map(|conversion| { + cov_mark::hit!(convert_reference_type); + ( + conversion.convert_type(ctx.db()), + conversion.getter(record_field_info.field_name.to_string()), + ) + }) + })() + .unwrap_or_else(|| { + ( + format!("&{}", record_field_info.field_ty), + format!("&self.{}", record_field_info.field_name), + ) + }) + }; + + format_to!( + buf, + " {}fn {}(&{}self) -> {} {{ + {} + }}", + vis, + record_field_info.fn_name, + info.mutable.then(|| "mut ").unwrap_or_default(), + ty, + body, + ); + + buf +} + +fn extract_and_parse_record_fields( + node: &ast::Struct, + selection_range: TextRange, + mutable: bool, +) -> Option<(Vec, Vec)> { + let mut field_names: Vec = vec![]; + let field_list = node.field_list()?; + + match field_list { + ast::FieldList::RecordFieldList(ele) => { + let info_of_record_fields_in_selection = ele + .fields() + .filter_map(|record_field| { + if selection_range.contains_range(record_field.syntax().text_range()) { + let record_field_info = parse_record_field(record_field, mutable)?; + field_names.push(record_field_info.fn_name.clone()); + return Some(record_field_info); + } + + None + }) + .collect::>(); + + if info_of_record_fields_in_selection.len() == 0 { + return None; + } + + Some((info_of_record_fields_in_selection, field_names)) + } + ast::FieldList::TupleFieldList(_) => { + return None; + } + } +} + +fn parse_record_field(record_field: ast::RecordField, mutable: bool) -> Option { + let field_name = record_field.name()?; + let field_ty = record_field.ty()?; + + let mut fn_name = to_lower_snake_case(&field_name.to_string()); + if mutable { + format_to!(fn_name, "_mut"); + } + + let target = record_field.syntax().text_range(); + + Some(RecordFieldInfo { field_name, field_ty, fn_name, target }) +} + #[cfg(test)] mod tests { use crate::tests::{check_assist, check_assist_not_applicable}; @@ -489,4 +659,53 @@ impl Context { "#, ); } + + #[test] + fn test_generate_multiple_getters_from_selection() { + check_assist( + generate_getter, + r#" +struct Context { + $0data: Data, + count: usize,$0 +} + "#, + r#" +struct Context { + data: Data, + count: usize, +} + +impl Context { + fn data(&self) -> &Data { + &self.data + } + + fn $0count(&self) -> &usize { + &self.count + } +} + "#, + ); + } + + #[test] + fn test_generate_multiple_getters_from_selection_one_already_exists() { + // As impl for one of the fields already exist, skip it + check_assist_not_applicable( + generate_getter, + r#" +struct Context { + $0data: Data, + count: usize,$0 +} + +impl Context { + fn data(&self) -> &Data { + &self.data + } +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs index 307cea3d0a4f8..9af26c04eb458 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs @@ -28,7 +28,7 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio acc.add( AssistId("generate_impl", AssistKind::Generate), - format!("Generate impl for `{}`", name), + format!("Generate impl for `{name}`"), target, |edit| { let start_offset = nominal.syntax().text_range().end(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs index 9cda74d9e0d31..17fadea0eaf19 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs @@ -39,7 +39,8 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option }; // Return early if we've found an existing new fn - let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), "new")?; + let impl_def = + find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &[String::from("new")])?; let current_module = ctx.sema.scope(strukt.syntax())?.module(); @@ -51,11 +52,13 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option buf.push('\n'); } - let vis = strukt.visibility().map_or(String::new(), |v| format!("{} ", v)); + let vis = strukt.visibility().map_or(String::new(), |v| format!("{v} ")); let trivial_constructors = field_list .fields() .map(|f| { + let name = f.name()?; + let ty = ctx.sema.resolve_type(&f.ty()?)?; let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?)); @@ -72,7 +75,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option &ty, )?; - Some(format!("{}: {}", f.name()?.syntax(), expr)) + Some(format!("{name}: {expr}")) }) .collect::>(); @@ -81,7 +84,10 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option .enumerate() .filter_map(|(i, f)| { if trivial_constructors[i].is_none() { - Some(format!("{}: {}", f.name()?.syntax(), f.ty()?.syntax())) + let name = f.name()?; + let ty = f.ty()?; + + Some(format!("{name}: {ty}")) } else { None } @@ -101,7 +107,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option }) .format(", "); - format_to!(buf, " {}fn new({}) -> Self {{ Self {{ {} }} }}", vis, params, fields); + format_to!(buf, " {vis}fn new({params}) -> Self {{ Self {{ {fields} }} }}"); let start_offset = impl_def .and_then(|impl_def| find_impl_block_start(impl_def, &mut buf)) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs index 2a7ad6ce36811..62f72df1c9d61 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs @@ -36,11 +36,8 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt // Return early if we've found an existing fn let fn_name = to_lower_snake_case(&field_name.to_string()); - let impl_def = find_struct_impl( - ctx, - &ast::Adt::Struct(strukt.clone()), - format!("set_{}", fn_name).as_str(), - )?; + let impl_def = + find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &[format!("set_{fn_name}")])?; let target = field.syntax().text_range(); acc.add_group( @@ -55,18 +52,12 @@ pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt buf.push('\n'); } - let vis = strukt.visibility().map_or(String::new(), |v| format!("{} ", v)); + let vis = strukt.visibility().map_or(String::new(), |v| format!("{v} ")); format_to!( buf, - " {}fn set_{}(&mut self, {}: {}) {{ - self.{} = {}; - }}", - vis, - fn_name, - fn_name, - field_ty, - fn_name, - fn_name, + " {vis}fn set_{fn_name}(&mut self, {fn_name}: {field_ty}) {{ + self.{fn_name} = {fn_name}; + }}" ); let start_offset = impl_def diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs index 38396cd7d7baf..db32e7182c44d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs @@ -331,10 +331,14 @@ fn calc_depth(pat: &ast::Pat, depth: usize) -> usize { // FIXME: change the new fn checking to a more semantic approach when that's more // viable (e.g. we process proc macros, etc) // FIXME: this partially overlaps with `find_impl_block_*` + +/// `find_struct_impl` looks for impl of a struct, but this also has additional feature +/// where it takes a list of function names and check if they exist inside impl_, if +/// even one match is found, it returns None pub(crate) fn find_struct_impl( ctx: &AssistContext<'_>, adt: &ast::Adt, - name: &str, + names: &[String], ) -> Option> { let db = ctx.db(); let module = adt.syntax().parent()?; @@ -362,7 +366,7 @@ pub(crate) fn find_struct_impl( }); if let Some(ref impl_blk) = block { - if has_fn(impl_blk, name) { + if has_any_fn(impl_blk, names) { return None; } } @@ -370,12 +374,12 @@ pub(crate) fn find_struct_impl( Some(block) } -fn has_fn(imp: &ast::Impl, rhs_name: &str) -> bool { +fn has_any_fn(imp: &ast::Impl, names: &[String]) -> bool { if let Some(il) = imp.assoc_item_list() { for item in il.assoc_items() { if let ast::AssocItem::Fn(f) = item { if let Some(name) = f.name() { - if name.text().eq_ignore_ascii_case(rhs_name) { + if names.iter().any(|n| n.eq_ignore_ascii_case(&name.text())) { return true; } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_try_expr.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_try_expr.rs deleted file mode 100644 index 085d8d32598a1..0000000000000 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_try_expr.rs +++ /dev/null @@ -1,37 +0,0 @@ -use hir::InFile; - -use crate::{Diagnostic, DiagnosticsContext}; - -// Diagnostic: incorrect-try-target -// -// This diagnostic is triggered if a question mark operator was used in a context where it is not applicable. -pub(crate) fn incorrect_try_expr( - ctx: &DiagnosticsContext<'_>, - d: &hir::IncorrectTryExpr, -) -> Diagnostic { - Diagnostic::new( - "incorrect-try-target", - format!("the return type of the containing function does not implement `FromResidual`"), - ctx.sema - .diagnostics_display_range(InFile::new(d.expr.file_id, d.expr.value.clone().into())) - .range, - ) -} - -#[cfg(test)] -mod tests { - use crate::tests::check_diagnostics; - - #[test] - fn try_ops_diag() { - check_diagnostics( - r#" -//- minicore: try -fn test() { - core::ops::ControlFlow::::Continue(1.0)?; - // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the return type of the containing function does not implement `FromResidual` -} -"#, - ); - } -} diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/not_implemented.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/not_implemented.rs deleted file mode 100644 index 3bf6a4232298c..0000000000000 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/not_implemented.rs +++ /dev/null @@ -1,35 +0,0 @@ -use hir::{db::DefDatabase, HirDisplay}; - -use crate::{Diagnostic, DiagnosticsContext}; - -// Diagnostic: not-implemented -// -// This diagnostic is triggered if a type doesn't implement a necessary trait. -pub(crate) fn not_implemented(ctx: &DiagnosticsContext<'_>, d: &hir::NotImplemented) -> Diagnostic { - Diagnostic::new( - "not-implemented", - format!( - "the trait `{}` is not implemented for `{}`", - ctx.sema.db.trait_data(d.trait_).name, - d.ty.display(ctx.sema.db) - ), - ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range, - ) -} - -#[cfg(test)] -mod tests { - use crate::tests::check_diagnostics; - - #[test] - fn missing_try_impl() { - check_diagnostics( - r#" -//- minicore: try -fn main() { - ()?; -} //^^ error: the trait `Try` is not implemented for `()` -"#, - ) - } -} diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs index 4577072149a72..ae299f0584148 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs @@ -29,7 +29,6 @@ mod handlers { pub(crate) mod break_outside_of_loop; pub(crate) mod inactive_code; pub(crate) mod incorrect_case; - pub(crate) mod incorrect_try_expr; pub(crate) mod invalid_derive_target; pub(crate) mod macro_error; pub(crate) mod malformed_derive; @@ -37,7 +36,6 @@ mod handlers { pub(crate) mod missing_fields; pub(crate) mod missing_match_arms; pub(crate) mod missing_unsafe; - pub(crate) mod not_implemented; pub(crate) mod no_such_field; pub(crate) mod replace_filter_map_next_with_find_map; pub(crate) mod type_mismatch; @@ -227,14 +225,12 @@ pub fn diagnostics( let d = match diag { AnyDiagnostic::BreakOutsideOfLoop(d) => handlers::break_outside_of_loop::break_outside_of_loop(&ctx, &d), AnyDiagnostic::IncorrectCase(d) => handlers::incorrect_case::incorrect_case(&ctx, &d), - AnyDiagnostic::IncorrectTryExpr(d) => handlers::incorrect_try_expr::incorrect_try_expr(&ctx, &d), AnyDiagnostic::MacroError(d) => handlers::macro_error::macro_error(&ctx, &d), AnyDiagnostic::MalformedDerive(d) => handlers::malformed_derive::malformed_derive(&ctx, &d), AnyDiagnostic::MismatchedArgCount(d) => handlers::mismatched_arg_count::mismatched_arg_count(&ctx, &d), AnyDiagnostic::MissingFields(d) => handlers::missing_fields::missing_fields(&ctx, &d), AnyDiagnostic::MissingMatchArms(d) => handlers::missing_match_arms::missing_match_arms(&ctx, &d), AnyDiagnostic::MissingUnsafe(d) => handlers::missing_unsafe::missing_unsafe(&ctx, &d), - AnyDiagnostic::NotImplemented(d) => handlers::not_implemented::not_implemented(&ctx, &d), AnyDiagnostic::NoSuchField(d) => handlers::no_such_field::no_such_field(&ctx, &d), AnyDiagnostic::ReplaceFilterMapNextWithFindMap(d) => handlers::replace_filter_map_next_with_find_map::replace_filter_map_next_with_find_map(&ctx, &d), AnyDiagnostic::TypeMismatch(d) => handlers::type_mismatch::type_mismatch(&ctx, &d), diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index 5cab017a58dbd..eb997e6fef830 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -4913,22 +4913,6 @@ fn foo() -> NotResult<(), Short> { ``` "#]], ); - check_hover_range( - r#" -//- minicore: try -use core::ops::ControlFlow; -fn foo() -> ControlFlow<()> { - $0ControlFlow::Break(())?$0; - ControlFlow::Continue(()) -} -"#, - expect![[r#" - ```text - Try Target Type: ControlFlow<(), {unknown}> - Propagated as: ControlFlow<(), ()> - ``` - "#]], - ); } #[test] @@ -4944,9 +4928,9 @@ fn foo() -> Option<()> { } "#, expect![[r#" - ```rust - i32 - ```"#]], + ```rust + as Try>::Output + ```"#]], ); } diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs index 77fe0dbf55658..416817ca0b42c 100644 --- a/src/tools/rust-analyzer/crates/ide/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs @@ -482,8 +482,18 @@ impl Analysis { } /// Returns crates this file belongs too. - pub fn crate_for(&self, file_id: FileId) -> Cancellable> { - self.with_db(|db| parent_module::crate_for(db, file_id)) + pub fn crates_for(&self, file_id: FileId) -> Cancellable> { + self.with_db(|db| parent_module::crates_for(db, file_id)) + } + + /// Returns crates this file belongs too. + pub fn transitive_rev_deps(&self, crate_id: CrateId) -> Cancellable> { + self.with_db(|db| db.crate_graph().transitive_rev_deps(crate_id).collect()) + } + + /// Returns crates this file *might* belong too. + pub fn relevant_crates_for(&self, file_id: FileId) -> Cancellable> { + self.with_db(|db| db.relevant_crates(file_id).iter().copied().collect()) } /// Returns the edition of the given crate. diff --git a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs index 8f3cc86873f5e..506f9452cf196 100644 --- a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs +++ b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs @@ -1,6 +1,6 @@ -use hir::Semantics; +use hir::{db::DefDatabase, Semantics}; use ide_db::{ - base_db::{CrateId, FileId, FilePosition}, + base_db::{CrateId, FileId, FileLoader, FilePosition}, RootDatabase, }; use itertools::Itertools; @@ -55,9 +55,13 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec Vec { - let sema = Semantics::new(db); - sema.to_module_defs(file_id).map(|module| module.krate().into()).unique().collect() +pub(crate) fn crates_for(db: &RootDatabase, file_id: FileId) -> Vec { + db.relevant_crates(file_id) + .iter() + .copied() + .filter(|&crate_id| db.crate_def_map(crate_id).modules_for_file(file_id).next().is_some()) + .sorted() + .collect() } #[cfg(test)] @@ -147,7 +151,7 @@ $0 mod foo; "#, ); - assert_eq!(analysis.crate_for(file_id).unwrap().len(), 1); + assert_eq!(analysis.crates_for(file_id).unwrap().len(), 1); } #[test] @@ -162,6 +166,6 @@ mod baz; mod baz; "#, ); - assert_eq!(analysis.crate_for(file_id).unwrap().len(), 2); + assert_eq!(analysis.crates_for(file_id).unwrap().len(), 2); } } diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs index 9e5eb909508f6..27ad1a948d13b 100644 --- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs +++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs @@ -210,9 +210,7 @@ fn get_definition(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Opt let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops); if let Some(&[x]) = def.as_deref() { return Some(x); - } else { - continue; - }; + } } None } diff --git a/src/tools/rust-analyzer/crates/ide/src/status.rs b/src/tools/rust-analyzer/crates/ide/src/status.rs index f4d0387440d47..20810c25b3e81 100644 --- a/src/tools/rust-analyzer/crates/ide/src/status.rs +++ b/src/tools/rust-analyzer/crates/ide/src/status.rs @@ -45,7 +45,7 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { if let Some(file_id) = file_id { format_to!(buf, "\nFile info:\n"); - let crates = crate::parent_module::crate_for(db, file_id); + let crates = crate::parent_module::crates_for(db, file_id); if crates.is_empty() { format_to!(buf, "Does not belong to any crate"); } diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml index 6fd7c3166f821..cf9868740cb03 100644 --- a/src/tools/rust-analyzer/crates/project-model/Cargo.toml +++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml @@ -17,7 +17,6 @@ semver = "1.0.14" serde = { version = "1.0.137", features = ["derive"] } serde_json = "1.0.86" anyhow = "1.0.62" -expect-test = "1.4.0" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } cfg = { path = "../cfg", version = "0.0.0" } @@ -26,3 +25,6 @@ toolchain = { path = "../toolchain", version = "0.0.0" } paths = { path = "../paths", version = "0.0.0" } stdx = { path = "../stdx", version = "0.0.0" } profile = { path = "../profile", version = "0.0.0" } + +[dev-dependencies] +expect-test = "1.4.0" diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs index d9f09c0349566..a26a7c57acfce 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs @@ -6,7 +6,12 @@ //! This module implements this second part. We use "build script" terminology //! here, but it covers procedural macros as well. -use std::{cell::RefCell, io, path::PathBuf, process::Command}; +use std::{ + cell::RefCell, + io, mem, + path::{self, PathBuf}, + process::Command, +}; use cargo_metadata::{camino::Utf8Path, Message}; use la_arena::ArenaMap; @@ -15,11 +20,14 @@ use rustc_hash::FxHashMap; use semver::Version; use serde::Deserialize; -use crate::{cfg_flag::CfgFlag, CargoConfig, CargoFeatures, CargoWorkspace, Package}; +use crate::{ + cfg_flag::CfgFlag, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation, + InvocationStrategy, Package, +}; #[derive(Debug, Default, Clone, PartialEq, Eq)] pub struct WorkspaceBuildScripts { - outputs: ArenaMap>, + outputs: ArenaMap, error: Option, } @@ -38,47 +46,71 @@ pub(crate) struct BuildScriptOutput { pub(crate) proc_macro_dylib_path: Option, } -impl WorkspaceBuildScripts { - fn build_command(config: &CargoConfig) -> Command { - if let Some([program, args @ ..]) = config.run_build_script_command.as_deref() { - let mut cmd = Command::new(program); - cmd.args(args); - cmd.envs(&config.extra_env); - return cmd; - } +impl BuildScriptOutput { + fn is_unchanged(&self) -> bool { + self.cfgs.is_empty() + && self.envs.is_empty() + && self.out_dir.is_none() + && self.proc_macro_dylib_path.is_none() + } +} - let mut cmd = Command::new(toolchain::cargo()); - cmd.envs(&config.extra_env); - cmd.args(&["check", "--quiet", "--workspace", "--message-format=json"]); +impl WorkspaceBuildScripts { + fn build_command(config: &CargoConfig) -> io::Result { + let mut cmd = match config.run_build_script_command.as_deref() { + Some([program, args @ ..]) => { + let mut cmd = Command::new(program); + cmd.args(args); + cmd + } + _ => { + let mut cmd = Command::new(toolchain::cargo()); - // --all-targets includes tests, benches and examples in addition to the - // default lib and bins. This is an independent concept from the --targets - // flag below. - cmd.arg("--all-targets"); + cmd.args(&["check", "--quiet", "--workspace", "--message-format=json"]); - if let Some(target) = &config.target { - cmd.args(&["--target", target]); - } + // --all-targets includes tests, benches and examples in addition to the + // default lib and bins. This is an independent concept from the --targets + // flag below. + cmd.arg("--all-targets"); - match &config.features { - CargoFeatures::All => { - cmd.arg("--all-features"); - } - CargoFeatures::Selected { features, no_default_features } => { - if *no_default_features { - cmd.arg("--no-default-features"); + if let Some(target) = &config.target { + cmd.args(&["--target", target]); } - if !features.is_empty() { - cmd.arg("--features"); - cmd.arg(features.join(" ")); + + match &config.features { + CargoFeatures::All => { + cmd.arg("--all-features"); + } + CargoFeatures::Selected { features, no_default_features } => { + if *no_default_features { + cmd.arg("--no-default-features"); + } + if !features.is_empty() { + cmd.arg("--features"); + cmd.arg(features.join(" ")); + } + } } + + cmd } + }; + + cmd.envs(&config.extra_env); + if config.wrap_rustc_in_build_scripts { + // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use + // that to compile only proc macros and build scripts during the initial + // `cargo check`. + let myself = std::env::current_exe()?; + cmd.env("RUSTC_WRAPPER", myself); + cmd.env("RA_RUSTC_WRAPPER", "1"); } - cmd + Ok(cmd) } - pub(crate) fn run( + /// Runs the build scripts for the given workspace + pub(crate) fn run_for_workspace( config: &CargoConfig, workspace: &CargoWorkspace, progress: &dyn Fn(String), @@ -86,15 +118,23 @@ impl WorkspaceBuildScripts { ) -> io::Result { const RUST_1_62: Version = Version::new(1, 62, 0); - match Self::run_(Self::build_command(config), config, workspace, progress) { + let current_dir = match &config.invocation_location { + InvocationLocation::Root(root) if config.run_build_script_command.is_some() => { + root.as_path() + } + _ => &workspace.workspace_root(), + } + .as_ref(); + + match Self::run_per_ws(Self::build_command(config)?, workspace, current_dir, progress) { Ok(WorkspaceBuildScripts { error: Some(error), .. }) if toolchain.as_ref().map_or(false, |it| *it >= RUST_1_62) => { // building build scripts failed, attempt to build with --keep-going so // that we potentially get more build data - let mut cmd = Self::build_command(config); + let mut cmd = Self::build_command(config)?; cmd.args(&["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1"); - let mut res = Self::run_(cmd, config, workspace, progress)?; + let mut res = Self::run_per_ws(cmd, workspace, current_dir, progress)?; res.error = Some(error); Ok(res) } @@ -102,23 +142,90 @@ impl WorkspaceBuildScripts { } } - fn run_( - mut cmd: Command, + /// Runs the build scripts by invoking the configured command *once*. + /// This populates the outputs for all passed in workspaces. + pub(crate) fn run_once( config: &CargoConfig, - workspace: &CargoWorkspace, + workspaces: &[&CargoWorkspace], progress: &dyn Fn(String), - ) -> io::Result { - if config.wrap_rustc_in_build_scripts { - // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use - // that to compile only proc macros and build scripts during the initial - // `cargo check`. - let myself = std::env::current_exe()?; - cmd.env("RUSTC_WRAPPER", myself); - cmd.env("RA_RUSTC_WRAPPER", "1"); + ) -> io::Result> { + assert_eq!(config.invocation_strategy, InvocationStrategy::Once); + + let current_dir = match &config.invocation_location { + InvocationLocation::Root(root) => root, + InvocationLocation::Workspace => { + return Err(io::Error::new( + io::ErrorKind::Other, + "Cannot run build scripts from workspace with invocation strategy `once`", + )) + } + }; + let cmd = Self::build_command(config)?; + // NB: Cargo.toml could have been modified between `cargo metadata` and + // `cargo check`. We shouldn't assume that package ids we see here are + // exactly those from `config`. + let mut by_id = FxHashMap::default(); + // some workspaces might depend on the same crates, so we need to duplicate the outputs + // to those collisions + let mut collisions = Vec::new(); + let mut res: Vec<_> = workspaces + .iter() + .enumerate() + .map(|(idx, workspace)| { + let mut res = WorkspaceBuildScripts::default(); + for package in workspace.packages() { + res.outputs.insert(package, BuildScriptOutput::default()); + if by_id.contains_key(&workspace[package].id) { + collisions.push((&workspace[package].id, idx, package)); + } else { + by_id.insert(workspace[package].id.clone(), (package, idx)); + } + } + res + }) + .collect(); + + let errors = Self::run_command( + cmd, + current_dir.as_path().as_ref(), + |package, cb| { + if let Some(&(package, workspace)) = by_id.get(package) { + cb(&workspaces[workspace][package].name, &mut res[workspace].outputs[package]); + } + }, + progress, + )?; + res.iter_mut().for_each(|it| it.error = errors.clone()); + collisions.into_iter().for_each(|(id, workspace, package)| { + if let Some(&(p, w)) = by_id.get(id) { + res[workspace].outputs[package] = res[w].outputs[p].clone(); + } + }); + + if tracing::enabled!(tracing::Level::INFO) { + for (idx, workspace) in workspaces.iter().enumerate() { + for package in workspace.packages() { + let package_build_data = &mut res[idx].outputs[package]; + if !package_build_data.is_unchanged() { + tracing::info!( + "{}: {:?}", + workspace[package].manifest.parent().display(), + package_build_data, + ); + } + } + } } - cmd.current_dir(workspace.workspace_root()); + Ok(res) + } + fn run_per_ws( + cmd: Command, + workspace: &CargoWorkspace, + current_dir: &path::Path, + progress: &dyn Fn(String), + ) -> io::Result { let mut res = WorkspaceBuildScripts::default(); let outputs = &mut res.outputs; // NB: Cargo.toml could have been modified between `cargo metadata` and @@ -126,10 +233,46 @@ impl WorkspaceBuildScripts { // exactly those from `config`. let mut by_id: FxHashMap = FxHashMap::default(); for package in workspace.packages() { - outputs.insert(package, None); + outputs.insert(package, BuildScriptOutput::default()); by_id.insert(workspace[package].id.clone(), package); } + res.error = Self::run_command( + cmd, + current_dir, + |package, cb| { + if let Some(&package) = by_id.get(package) { + cb(&workspace[package].name, &mut outputs[package]); + } + }, + progress, + )?; + + if tracing::enabled!(tracing::Level::INFO) { + for package in workspace.packages() { + let package_build_data = &mut outputs[package]; + if !package_build_data.is_unchanged() { + tracing::info!( + "{}: {:?}", + workspace[package].manifest.parent().display(), + package_build_data, + ); + } + } + } + + Ok(res) + } + + fn run_command( + mut cmd: Command, + current_dir: &path::Path, + // ideally this would be something like: + // with_output_for: impl FnMut(&str, dyn FnOnce(&mut BuildScriptOutput)), + // but owned trait objects aren't a thing + mut with_output_for: impl FnMut(&str, &mut dyn FnMut(&str, &mut BuildScriptOutput)), + progress: &dyn Fn(String), + ) -> io::Result> { let errors = RefCell::new(String::new()); let push_err = |err: &str| { let mut e = errors.borrow_mut(); @@ -137,7 +280,8 @@ impl WorkspaceBuildScripts { e.push('\n'); }; - tracing::info!("Running build scripts: {:?}", cmd); + tracing::info!("Running build scripts in {}: {:?}", current_dir.display(), cmd); + cmd.current_dir(current_dir); let output = stdx::process::spawn_with_streaming_output( cmd, &mut |line| { @@ -149,61 +293,58 @@ impl WorkspaceBuildScripts { .unwrap_or_else(|_| Message::TextLine(line.to_string())); match message { - Message::BuildScriptExecuted(message) => { - let package = match by_id.get(&message.package_id.repr) { - Some(&it) => it, - None => return, - }; - progress(format!("running build-script: {}", workspace[package].name)); - - let cfgs = { - let mut acc = Vec::new(); - for cfg in message.cfgs { - match cfg.parse::() { - Ok(it) => acc.push(it), - Err(err) => { - push_err(&format!( - "invalid cfg from cargo-metadata: {}", - err - )); - return; - } - }; + Message::BuildScriptExecuted(mut message) => { + with_output_for(&message.package_id.repr, &mut |name, data| { + progress(format!("running build-script: {}", name)); + let cfgs = { + let mut acc = Vec::new(); + for cfg in &message.cfgs { + match cfg.parse::() { + Ok(it) => acc.push(it), + Err(err) => { + push_err(&format!( + "invalid cfg from cargo-metadata: {}", + err + )); + return; + } + }; + } + acc + }; + if !message.env.is_empty() { + data.envs = mem::take(&mut message.env); } - acc - }; - // cargo_metadata crate returns default (empty) path for - // older cargos, which is not absolute, so work around that. - let out_dir = message.out_dir.into_os_string(); - if !out_dir.is_empty() { - let data = outputs[package].get_or_insert_with(Default::default); - data.out_dir = Some(AbsPathBuf::assert(PathBuf::from(out_dir))); - data.cfgs = cfgs; - } - if !message.env.is_empty() { - outputs[package].get_or_insert_with(Default::default).envs = - message.env; - } + // cargo_metadata crate returns default (empty) path for + // older cargos, which is not absolute, so work around that. + let out_dir = mem::take(&mut message.out_dir).into_os_string(); + if !out_dir.is_empty() { + let out_dir = AbsPathBuf::assert(PathBuf::from(out_dir)); + // inject_cargo_env(package, package_build_data); + // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!() + if let Some(out_dir) = + out_dir.as_os_str().to_str().map(|s| s.to_owned()) + { + data.envs.push(("OUT_DIR".to_string(), out_dir)); + } + data.out_dir = Some(out_dir); + data.cfgs = cfgs; + } + }); } Message::CompilerArtifact(message) => { - let package = match by_id.get(&message.package_id.repr) { - Some(it) => *it, - None => return, - }; - - progress(format!("building proc-macros: {}", message.target.name)); - - if message.target.kind.iter().any(|k| k == "proc-macro") { - // Skip rmeta file - if let Some(filename) = - message.filenames.iter().find(|name| is_dylib(name)) - { - let filename = AbsPathBuf::assert(PathBuf::from(&filename)); - outputs[package] - .get_or_insert_with(Default::default) - .proc_macro_dylib_path = Some(filename); + with_output_for(&message.package_id.repr, &mut |name, data| { + progress(format!("building proc-macros: {}", name)); + if message.target.kind.iter().any(|k| k == "proc-macro") { + // Skip rmeta file + if let Some(filename) = + message.filenames.iter().find(|name| is_dylib(name)) + { + let filename = AbsPathBuf::assert(PathBuf::from(&filename)); + data.proc_macro_dylib_path = Some(filename); + } } - } + }); } Message::CompilerMessage(message) => { progress(message.target.name); @@ -222,32 +363,13 @@ impl WorkspaceBuildScripts { }, )?; - for package in workspace.packages() { - if let Some(package_build_data) = &mut outputs[package] { - tracing::info!( - "{}: {:?}", - workspace[package].manifest.parent().display(), - package_build_data, - ); - // inject_cargo_env(package, package_build_data); - if let Some(out_dir) = &package_build_data.out_dir { - // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!() - if let Some(out_dir) = out_dir.as_os_str().to_str().map(|s| s.to_owned()) { - package_build_data.envs.push(("OUT_DIR".to_string(), out_dir)); - } - } - } - } - - let mut errors = errors.into_inner(); - if !output.status.success() { - if errors.is_empty() { - errors = "cargo check failed".to_string(); - } - res.error = Some(errors); - } - - Ok(res) + let errors = if !output.status.success() { + let errors = errors.into_inner(); + Some(if errors.is_empty() { "cargo check failed".to_string() } else { errors }) + } else { + None + }; + Ok(errors) } pub fn error(&self) -> Option<&str> { @@ -255,11 +377,11 @@ impl WorkspaceBuildScripts { } pub(crate) fn get_output(&self, idx: Package) -> Option<&BuildScriptOutput> { - self.outputs.get(idx)?.as_ref() + self.outputs.get(idx) } } -// FIXME: File a better way to know if it is a dylib. +// FIXME: Find a better way to know if it is a dylib. fn is_dylib(path: &Utf8Path) -> bool { match path.extension().map(|e| e.to_string().to_lowercase()) { None => false, diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index 8e690f1125a01..b4c2ba436772f 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -14,8 +14,8 @@ use rustc_hash::FxHashMap; use serde::Deserialize; use serde_json::from_value; -use crate::CfgOverrides; -use crate::{utf8_stdout, ManifestPath}; +use crate::{utf8_stdout, InvocationLocation, ManifestPath}; +use crate::{CfgOverrides, InvocationStrategy}; /// [`CargoWorkspace`] represents the logical structure of, well, a Cargo /// workspace. It pretty closely mirrors `cargo metadata` output. @@ -106,6 +106,8 @@ pub struct CargoConfig { pub run_build_script_command: Option>, /// Extra env vars to set when invoking the cargo command pub extra_env: FxHashMap, + pub invocation_strategy: InvocationStrategy, + pub invocation_location: InvocationLocation, } impl CargoConfig { @@ -283,8 +285,6 @@ impl CargoWorkspace { } CargoFeatures::Selected { features, no_default_features } => { if *no_default_features { - // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures` - // https://github.com/oli-obk/cargo_metadata/issues/79 meta.features(CargoOpt::NoDefaultFeatures); } if !features.is_empty() { @@ -329,18 +329,21 @@ impl CargoWorkspace { let ws_members = &meta.workspace_members; meta.packages.sort_by(|a, b| a.id.cmp(&b.id)); - for meta_pkg in &meta.packages { + for meta_pkg in meta.packages { let cargo_metadata::Package { - id, - edition, name, - manifest_path, version, - metadata, + id, + source, + targets: meta_targets, + features, + manifest_path, repository, + edition, + metadata, .. } = meta_pkg; - let meta = from_value::(metadata.clone()).unwrap_or_default(); + let meta = from_value::(metadata).unwrap_or_default(); let edition = match edition { cargo_metadata::Edition::E2015 => Edition::Edition2015, cargo_metadata::Edition::E2018 => Edition::Edition2018, @@ -352,35 +355,36 @@ impl CargoWorkspace { }; // We treat packages without source as "local" packages. That includes all members of // the current workspace, as well as any path dependency outside the workspace. - let is_local = meta_pkg.source.is_none(); - let is_member = ws_members.contains(id); + let is_local = source.is_none(); + let is_member = ws_members.contains(&id); let pkg = packages.alloc(PackageData { id: id.repr.clone(), - name: name.clone(), - version: version.clone(), - manifest: AbsPathBuf::assert(PathBuf::from(&manifest_path)).try_into().unwrap(), + name, + version, + manifest: AbsPathBuf::assert(manifest_path.into()).try_into().unwrap(), targets: Vec::new(), is_local, is_member, edition, - repository: repository.clone(), + repository, dependencies: Vec::new(), - features: meta_pkg.features.clone().into_iter().collect(), + features: features.into_iter().collect(), active_features: Vec::new(), metadata: meta.rust_analyzer.unwrap_or_default(), }); let pkg_data = &mut packages[pkg]; pkg_by_id.insert(id, pkg); - for meta_tgt in &meta_pkg.targets { - let is_proc_macro = meta_tgt.kind.as_slice() == ["proc-macro"]; + for meta_tgt in meta_targets { + let cargo_metadata::Target { name, kind, required_features, src_path, .. } = + meta_tgt; let tgt = targets.alloc(TargetData { package: pkg, - name: meta_tgt.name.clone(), - root: AbsPathBuf::assert(PathBuf::from(&meta_tgt.src_path)), - kind: TargetKind::new(meta_tgt.kind.as_slice()), - is_proc_macro, - required_features: meta_tgt.required_features.clone(), + name, + root: AbsPathBuf::assert(src_path.into()), + kind: TargetKind::new(&kind), + is_proc_macro: &*kind == ["proc-macro"], + required_features, }); pkg_data.targets.push(tgt); } diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs index ce78ce85697af..575581fa543a3 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs @@ -67,7 +67,7 @@ impl ProjectManifest { if path.file_name().unwrap_or_default() == "Cargo.toml" { return Ok(ProjectManifest::CargoToml(path)); } - bail!("project root must point to Cargo.toml or rust-project.json: {}", path.display()) + bail!("project root must point to Cargo.toml or rust-project.json: {}", path.display()); } pub fn discover_single(path: &AbsPath) -> Result { @@ -78,7 +78,7 @@ impl ProjectManifest { }; if !candidates.is_empty() { - bail!("more than one project") + bail!("more than one project"); } Ok(res) } @@ -157,3 +157,17 @@ fn utf8_stdout(mut cmd: Command) -> Result { let stdout = String::from_utf8(output.stdout)?; Ok(stdout.trim().to_string()) } + +#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] +pub enum InvocationStrategy { + Once, + #[default] + PerWorkspace, +} + +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub enum InvocationLocation { + Root(AbsPathBuf), + #[default] + Workspace, +} diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs index bc37e3d132a6c..fa8d76f3f4529 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs @@ -64,14 +64,15 @@ impl Sysroot { self.by_name("proc_macro") } - pub fn crates<'a>(&'a self) -> impl Iterator + ExactSizeIterator + 'a { + pub fn crates(&self) -> impl Iterator + ExactSizeIterator + '_ { self.crates.iter().map(|(id, _data)| id) } } impl Sysroot { + /// Attempts to discover the toolchain's sysroot from the given `dir`. pub fn discover(dir: &AbsPath, extra_env: &FxHashMap) -> Result { - tracing::debug!("Discovering sysroot for {}", dir.display()); + tracing::debug!("discovering sysroot for {}", dir.display()); let sysroot_dir = discover_sysroot_dir(dir, extra_env)?; let sysroot_src_dir = discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env)?; @@ -83,11 +84,10 @@ impl Sysroot { cargo_toml: &ManifestPath, extra_env: &FxHashMap, ) -> Option { - tracing::debug!("Discovering rustc source for {}", cargo_toml.display()); + tracing::debug!("discovering rustc source for {}", cargo_toml.display()); let current_dir = cargo_toml.parent(); - discover_sysroot_dir(current_dir, extra_env) - .ok() - .and_then(|sysroot_dir| get_rustc_src(&sysroot_dir)) + let sysroot_dir = discover_sysroot_dir(current_dir, extra_env).ok()?; + get_rustc_src(&sysroot_dir) } pub fn with_sysroot_dir(sysroot_dir: AbsPathBuf) -> Result { @@ -189,6 +189,7 @@ fn discover_sysroot_src_dir(sysroot_path: &AbsPathBuf) -> Option { get_rust_src(sysroot_path) } + fn discover_sysroot_src_dir_or_add_component( sysroot_path: &AbsPathBuf, current_dir: &AbsPath, @@ -199,6 +200,7 @@ fn discover_sysroot_src_dir_or_add_component( let mut rustup = Command::new(toolchain::rustup()); rustup.envs(extra_env); rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]); + tracing::info!("adding rust-src component by {:?}", rustup); utf8_stdout(rustup).ok()?; get_rust_src(sysroot_path) }) @@ -217,7 +219,7 @@ try installing the Rust source the same way you installed rustc", fn get_rustc_src(sysroot_path: &AbsPath) -> Option { let rustc_src = sysroot_path.join("lib/rustlib/rustc-src/rust/compiler/rustc/Cargo.toml"); let rustc_src = ManifestPath::try_from(rustc_src).ok()?; - tracing::debug!("Checking for rustc source code: {}", rustc_src.display()); + tracing::debug!("checking for rustc source code: {}", rustc_src.display()); if fs::metadata(&rustc_src).is_ok() { Some(rustc_src) } else { @@ -227,7 +229,7 @@ fn get_rustc_src(sysroot_path: &AbsPath) -> Option { fn get_rust_src(sysroot_path: &AbsPath) -> Option { let rust_src = sysroot_path.join("lib/rustlib/src/rust/library"); - tracing::debug!("Checking sysroot: {}", rust_src.display()); + tracing::debug!("checking sysroot library: {}", rust_src.display()); if fs::metadata(&rust_src).is_ok() { Some(rust_src) } else { diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 72ddf809288aa..2780c62ed118a 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -2,7 +2,7 @@ //! metadata` or `rust-project.json`) into representation stored in the salsa //! database -- `CrateGraph`. -use std::{collections::VecDeque, fmt, fs, process::Command}; +use std::{collections::VecDeque, fmt, fs, process::Command, sync::Arc}; use anyhow::{format_err, Context, Result}; use base_db::{ @@ -21,8 +21,8 @@ use crate::{ cfg_flag::CfgFlag, rustc_cfg, sysroot::SysrootCrate, - utf8_stdout, CargoConfig, CargoWorkspace, ManifestPath, Package, ProjectJson, ProjectManifest, - Sysroot, TargetKind, WorkspaceBuildScripts, + utf8_stdout, CargoConfig, CargoWorkspace, InvocationStrategy, ManifestPath, Package, + ProjectJson, ProjectManifest, Sysroot, TargetKind, WorkspaceBuildScripts, }; /// A set of cfg-overrides per crate. @@ -209,6 +209,9 @@ impl ProjectWorkspace { ), None => None, }; + if let Some(sysroot) = &sysroot { + tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot"); + } let rustc_dir = match &config.rustc_source { Some(RustcSource::Path(path)) => ManifestPath::try_from(path.clone()).ok(), @@ -217,6 +220,9 @@ impl ProjectWorkspace { } None => None, }; + if let Some(rustc_dir) = &rustc_dir { + tracing::info!(rustc_dir = %rustc_dir.display(), "Using rustc source"); + } let rustc = match rustc_dir { Some(rustc_dir) => Some({ @@ -277,6 +283,9 @@ impl ProjectWorkspace { } (None, None) => None, }; + if let Some(sysroot) = &sysroot { + tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot"); + } let rustc_cfg = rustc_cfg::get(None, target, extra_env); Ok(ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg }) @@ -294,6 +303,7 @@ impl ProjectWorkspace { Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg }) } + /// Runs the build scripts for this [`ProjectWorkspace`]. pub fn run_build_scripts( &self, config: &CargoConfig, @@ -301,9 +311,13 @@ impl ProjectWorkspace { ) -> Result { match self { ProjectWorkspace::Cargo { cargo, toolchain, .. } => { - WorkspaceBuildScripts::run(config, cargo, progress, toolchain).with_context(|| { - format!("Failed to run build scripts for {}", &cargo.workspace_root().display()) - }) + WorkspaceBuildScripts::run_for_workspace(config, cargo, progress, toolchain) + .with_context(|| { + format!( + "Failed to run build scripts for {}", + &cargo.workspace_root().display() + ) + }) } ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => { Ok(WorkspaceBuildScripts::default()) @@ -311,6 +325,49 @@ impl ProjectWorkspace { } } + /// Runs the build scripts for the given [`ProjectWorkspace`]s. Depending on the invocation + /// strategy this may run a single build process for all project workspaces. + pub fn run_all_build_scripts( + workspaces: &[ProjectWorkspace], + config: &CargoConfig, + progress: &dyn Fn(String), + ) -> Vec> { + if matches!(config.invocation_strategy, InvocationStrategy::PerWorkspace) + || config.run_build_script_command.is_none() + { + return workspaces.iter().map(|it| it.run_build_scripts(config, progress)).collect(); + } + + let cargo_ws: Vec<_> = workspaces + .iter() + .filter_map(|it| match it { + ProjectWorkspace::Cargo { cargo, .. } => Some(cargo), + _ => None, + }) + .collect(); + let ref mut outputs = match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress) { + Ok(it) => Ok(it.into_iter()), + // io::Error is not Clone? + Err(e) => Err(Arc::new(e)), + }; + + workspaces + .iter() + .map(|it| match it { + ProjectWorkspace::Cargo { cargo, .. } => match outputs { + Ok(outputs) => Ok(outputs.next().unwrap()), + Err(e) => Err(e.clone()).with_context(|| { + format!( + "Failed to run build scripts for {}", + &cargo.workspace_root().display() + ) + }), + }, + _ => Ok(WorkspaceBuildScripts::default()), + }) + .collect() + } + pub fn set_build_scripts(&mut self, bs: WorkspaceBuildScripts) { match self { ProjectWorkspace::Cargo { build_scripts, .. } => *build_scripts = bs, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs index e1675a030c0f6..6ede194babc20 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cargo_target_spec.rs @@ -118,7 +118,7 @@ impl CargoTargetSpec { global_state_snapshot: &GlobalStateSnapshot, file_id: FileId, ) -> Result> { - let crate_id = match &*global_state_snapshot.analysis.crate_for(file_id)? { + let crate_id = match &*global_state_snapshot.analysis.crates_for(file_id)? { &[crate_id, ..] => crate_id, _ => return Ok(None), }; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs index 2c29b3ee3a6f7..8b77ccde0ee4a 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs @@ -8,8 +8,8 @@ use std::{ use crate::line_index::{LineEndings, LineIndex, OffsetEncoding}; use hir::Name; use ide::{ - LineCol, MonikerDescriptorKind, MonikerResult, StaticIndex, StaticIndexedFile, TextRange, - TokenId, + LineCol, MonikerDescriptorKind, StaticIndex, StaticIndexedFile, TextRange, TokenId, + TokenStaticData, }; use ide_db::LineIndexDatabase; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace}; @@ -75,7 +75,7 @@ impl flags::Scip { let mut symbols_emitted: HashSet = HashSet::default(); let mut tokens_to_symbol: HashMap = HashMap::new(); - for file in si.files { + for StaticIndexedFile { file_id, tokens, .. } in si.files { let mut local_count = 0; let mut new_local_symbol = || { let new_symbol = scip::types::Symbol::new_local(local_count); @@ -84,7 +84,6 @@ impl flags::Scip { new_symbol }; - let StaticIndexedFile { file_id, tokens, .. } = file; let relative_path = match get_relative_filepath(&vfs, &rootpath, file_id) { Some(relative_path) => relative_path, None => continue, @@ -107,28 +106,20 @@ impl flags::Scip { let mut occurrence = scip_types::Occurrence::default(); occurrence.range = text_range_to_scip_range(&line_index, range); - occurrence.symbol = match tokens_to_symbol.get(&id) { - Some(symbol) => symbol.clone(), - None => { - let symbol = match &token.moniker { - Some(moniker) => moniker_to_symbol(&moniker), - None => new_local_symbol(), - }; - - let symbol = scip::symbol::format_symbol(symbol); - tokens_to_symbol.insert(id, symbol.clone()); - symbol - } - }; + occurrence.symbol = tokens_to_symbol + .entry(id) + .or_insert_with(|| { + let symbol = token_to_symbol(&token).unwrap_or_else(&mut new_local_symbol); + scip::symbol::format_symbol(symbol) + }) + .clone(); if let Some(def) = token.definition { if def.range == range { occurrence.symbol_roles |= scip_types::SymbolRole::Definition as i32; } - if !symbols_emitted.contains(&id) { - symbols_emitted.insert(id); - + if symbols_emitted.insert(id) { let mut symbol_info = scip_types::SymbolInformation::default(); symbol_info.symbol = occurrence.symbol.clone(); if let Some(hover) = &token.hover { @@ -207,9 +198,11 @@ fn new_descriptor(name: Name, suffix: scip_types::descriptor::Suffix) -> scip_ty /// /// Only returns a Symbol when it's a non-local symbol. /// So if the visibility isn't outside of a document, then it will return None -fn moniker_to_symbol(moniker: &MonikerResult) -> scip_types::Symbol { +fn token_to_symbol(token: &TokenStaticData) -> Option { use scip_types::descriptor::Suffix::*; + let moniker = token.moniker.as_ref()?; + let package_name = moniker.package_information.name.clone(); let version = moniker.package_information.version.clone(); let descriptors = moniker @@ -233,7 +226,7 @@ fn moniker_to_symbol(moniker: &MonikerResult) -> scip_types::Symbol { }) .collect(); - scip_types::Symbol { + Some(scip_types::Symbol { scheme: "rust-analyzer".into(), package: Some(scip_types::Package { manager: "cargo".to_string(), @@ -244,19 +237,15 @@ fn moniker_to_symbol(moniker: &MonikerResult) -> scip_types::Symbol { .into(), descriptors, ..Default::default() - } + }) } #[cfg(test)] mod test { use super::*; - use hir::Semantics; - use ide::{AnalysisHost, FilePosition}; - use ide_db::defs::IdentClass; - use ide_db::{base_db::fixture::ChangeFixture, helpers::pick_best_token}; + use ide::{AnalysisHost, FilePosition, StaticIndex, TextSize}; + use ide_db::base_db::fixture::ChangeFixture; use scip::symbol::format_symbol; - use syntax::SyntaxKind::*; - use syntax::{AstNode, T}; fn position(ra_fixture: &str) -> (AnalysisHost, FilePosition) { let mut host = AnalysisHost::default(); @@ -273,53 +262,33 @@ mod test { fn check_symbol(ra_fixture: &str, expected: &str) { let (host, position) = position(ra_fixture); + let analysis = host.analysis(); + let si = StaticIndex::compute(&analysis); + let FilePosition { file_id, offset } = position; - let db = host.raw_database(); - let sema = &Semantics::new(db); - let file = sema.parse(file_id).syntax().clone(); - let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind { - IDENT - | INT_NUMBER - | LIFETIME_IDENT - | T![self] - | T![super] - | T![crate] - | T![Self] - | COMMENT => 2, - kind if kind.is_trivia() => 0, - _ => 1, - }) - .expect("OK OK"); - - let navs = sema - .descend_into_macros(original_token.clone()) - .into_iter() - .filter_map(|token| { - IdentClass::classify_token(sema, &token).map(IdentClass::definitions).map(|it| { - it.into_iter().flat_map(|def| { - let module = def.module(db).unwrap(); - let current_crate = module.krate(); - - match MonikerResult::from_def(sema.db, def, current_crate) { - Some(moniker_result) => Some(moniker_to_symbol(&moniker_result)), - None => None, - } - }) - }) - }) - .flatten() - .collect::>(); + let mut found_symbol = None; + for file in &si.files { + if file.file_id != file_id { + continue; + } + for &(range, id) in &file.tokens { + if range.contains(offset - TextSize::from(1)) { + let token = si.tokens.get(id).unwrap(); + found_symbol = token_to_symbol(token); + break; + } + } + } if expected == "" { - assert_eq!(0, navs.len(), "must have no symbols {:?}", navs); + assert!(found_symbol.is_none(), "must have no symbols {:?}", found_symbol); return; } - assert_eq!(1, navs.len(), "must have one symbol {:?}", navs); - - let res = navs.get(0).unwrap(); - let formatted = format_symbol(res.clone()); + assert!(found_symbol.is_some(), "must have one symbol {:?}", found_symbol); + let res = found_symbol.unwrap(); + let formatted = format_symbol(res); assert_eq!(formatted, expected); } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 577a8640a4c00..85322f12a834c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -69,6 +69,19 @@ config_data! { cargo_autoreload: bool = "true", /// Run build scripts (`build.rs`) for more precise code analysis. cargo_buildScripts_enable: bool = "true", + /// Specifies the working directory for running build scripts. + /// - "workspace": run build scripts for a workspace in the workspace's root directory. + /// This is incompatible with `#rust-analyzer.cargo.buildScripts.invocationStrategy#` set to `once`. + /// - "root": run build scripts in the project's root directory. + /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` + /// is set. + cargo_buildScripts_invocationLocation: InvocationLocation = "\"workspace\"", + /// Specifies the invocation strategy to use when running the build scripts command. + /// If `per_workspace` is set, the command will be executed for each workspace. + /// If `once` is set, the command will be executed once. + /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` + /// is set. + cargo_buildScripts_invocationStrategy: InvocationStrategy = "\"per_workspace\"", /// Override the command rust-analyzer uses to run build scripts and /// build procedural macros. The command is required to output json /// and should therefore include `--message-format=json` or a similar @@ -122,6 +135,20 @@ config_data! { /// /// Set to `"all"` to pass `--all-features` to Cargo. checkOnSave_features: Option = "null", + /// Specifies the working directory for running checks. + /// - "workspace": run checks for workspaces in the corresponding workspaces' root directories. + // FIXME: Ideally we would support this in some way + /// This falls back to "root" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`. + /// - "root": run checks in the project's root directory. + /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` + /// is set. + checkOnSave_invocationLocation: InvocationLocation = "\"workspace\"", + /// Specifies the invocation strategy to use when running the checkOnSave command. + /// If `per_workspace` is set, the command will be executed for each workspace. + /// If `once` is set, the command will be executed once. + /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` + /// is set. + checkOnSave_invocationStrategy: InvocationStrategy = "\"per_workspace\"", /// Whether to pass `--no-default-features` to Cargo. Defaults to /// `#rust-analyzer.cargo.noDefaultFeatures#`. checkOnSave_noDefaultFeatures: Option = "null", @@ -1056,6 +1083,16 @@ impl Config { rustc_source, unset_test_crates: UnsetTestCrates::Only(self.data.cargo_unsetTest.clone()), wrap_rustc_in_build_scripts: self.data.cargo_buildScripts_useRustcWrapper, + invocation_strategy: match self.data.cargo_buildScripts_invocationStrategy { + InvocationStrategy::Once => project_model::InvocationStrategy::Once, + InvocationStrategy::PerWorkspace => project_model::InvocationStrategy::PerWorkspace, + }, + invocation_location: match self.data.cargo_buildScripts_invocationLocation { + InvocationLocation::Root => { + project_model::InvocationLocation::Root(self.root_path.clone()) + } + InvocationLocation::Workspace => project_model::InvocationLocation::Workspace, + }, run_build_script_command: self.data.cargo_buildScripts_overrideCommand.clone(), extra_env: self.data.cargo_extraEnv.clone(), } @@ -1087,6 +1124,18 @@ impl Config { command, args, extra_env: self.check_on_save_extra_env(), + invocation_strategy: match self.data.checkOnSave_invocationStrategy { + InvocationStrategy::Once => flycheck::InvocationStrategy::Once, + InvocationStrategy::PerWorkspace => { + flycheck::InvocationStrategy::PerWorkspace + } + }, + invocation_location: match self.data.checkOnSave_invocationLocation { + InvocationLocation::Root => { + flycheck::InvocationLocation::Root(self.root_path.clone()) + } + InvocationLocation::Workspace => flycheck::InvocationLocation::Workspace, + }, } } Some(_) | None => FlycheckConfig::CargoCommand { @@ -1587,6 +1636,20 @@ enum CargoFeaturesDef { Selected(Vec), } +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "snake_case")] +enum InvocationStrategy { + Once, + PerWorkspace, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "snake_case")] +enum InvocationLocation { + Root, + Workspace, +} + #[derive(Deserialize, Debug, Clone)] #[serde(untagged)] enum LifetimeElisionDef { @@ -2001,6 +2064,22 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "Render annotations above the whole item, including documentation comments and attributes." ], }, + "InvocationStrategy" => set! { + "type": "string", + "enum": ["per_workspace", "once"], + "enumDescriptions": [ + "The command will be executed for each workspace.", + "The command will be executed once." + ], + }, + "InvocationLocation" => set! { + "type": "string", + "enum": ["workspace", "root"], + "enumDescriptions": [ + "The command will be executed in the corresponding workspace root.", + "The command will be executed in the project root." + ], + }, _ => panic!("missing entry for {}: {}", ty, default), } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs index f16559148e651..57899b5991466 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs @@ -52,7 +52,7 @@ impl<'a> RequestDispatcher<'a> { let _pctx = stdx::panic_context::enter(panic_context); f(self.global_state, params) }; - if let Ok(response) = result_to_response::(req.id.clone(), result) { + if let Ok(response) = result_to_response::(req.id, result) { self.global_state.respond(response); } @@ -80,7 +80,7 @@ impl<'a> RequestDispatcher<'a> { f(global_state_snapshot, params) }); - if let Ok(response) = thread_result_to_response::(req.id.clone(), result) { + if let Ok(response) = thread_result_to_response::(req.id, result) { self.global_state.respond(response); } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 000ff88e458f6..3fb06c31f7ca7 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -64,7 +64,7 @@ pub(crate) struct GlobalState { pub(crate) source_root_config: SourceRootConfig, pub(crate) proc_macro_clients: Vec>, - pub(crate) flycheck: Vec, + pub(crate) flycheck: Arc<[FlycheckHandle]>, pub(crate) flycheck_sender: Sender, pub(crate) flycheck_receiver: Receiver, @@ -117,6 +117,7 @@ pub(crate) struct GlobalStateSnapshot { vfs: Arc)>>, pub(crate) workspaces: Arc>, pub(crate) proc_macros_loaded: bool, + pub(crate) flycheck: Arc<[FlycheckHandle]>, } impl std::panic::UnwindSafe for GlobalStateSnapshot {} @@ -155,7 +156,7 @@ impl GlobalState { source_root_config: SourceRootConfig::default(), proc_macro_clients: vec![], - flycheck: Vec::new(), + flycheck: Arc::new([]), flycheck_sender, flycheck_receiver, @@ -295,6 +296,7 @@ impl GlobalState { mem_docs: self.mem_docs.clone(), semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache), proc_macros_loaded: !self.fetch_build_data_queue.last_op_result().0.is_empty(), + flycheck: self.flycheck.clone(), } } @@ -398,6 +400,10 @@ impl GlobalStateSnapshot { url_from_abs_path(path) } + pub(crate) fn file_id_to_file_path(&self, file_id: FileId) -> vfs::VfsPath { + self.vfs.read().0.file_path(file_id) + } + pub(crate) fn cargo_target_for_crate_root( &self, crate_id: CrateId, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs index 8c3ea77d06115..34795a8eb40ab 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs @@ -658,7 +658,7 @@ pub(crate) fn handle_parent_module( // check if invoked at the crate root let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; - let crate_id = match snap.analysis.crate_for(file_id)?.first() { + let crate_id = match snap.analysis.crates_for(file_id)?.first() { Some(&crate_id) => crate_id, None => return Ok(None), }; @@ -1782,7 +1782,15 @@ fn run_rustfmt( ) -> Result>> { let file_id = from_proto::file_id(snap, &text_document.uri)?; let file = snap.analysis.file_text(file_id)?; - let crate_ids = snap.analysis.crate_for(file_id)?; + + // find the edition of the package the file belongs to + // (if it belongs to multiple we'll just pick the first one and pray) + let edition = snap + .analysis + .relevant_crates_for(file_id)? + .into_iter() + .find_map(|crate_id| snap.cargo_target_for_crate_root(crate_id)) + .map(|(ws, target)| ws[ws[target].package].edition); let line_index = snap.file_line_index(file_id)?; @@ -1808,9 +1816,7 @@ fn run_rustfmt( ); } } - if let Some(&crate_id) = crate_ids.first() { - // Assume all crates are in the same edition - let edition = snap.analysis.crate_edition(crate_id)?; + if let Some(edition) = edition { cmd.arg("--edition"); cmd.arg(edition.to_string()); } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs index 5a37cbe2e334b..b3cea64d4175e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs @@ -87,6 +87,7 @@ impl GlobalState { state: Progress, message: Option, fraction: Option, + cancel_token: Option, ) { if !self.config.work_done_progress() { return; @@ -95,7 +96,10 @@ impl GlobalState { assert!((0.0..=1.0).contains(&f)); (f * 100.0) as u32 }); - let token = lsp_types::ProgressToken::String(format!("rustAnalyzer/{}", title)); + let cancellable = Some(cancel_token.is_some()); + let token = lsp_types::ProgressToken::String( + cancel_token.unwrap_or_else(|| format!("rustAnalyzer/{}", title)), + ); let work_done_progress = match state { Progress::Begin => { self.send_request::( @@ -105,14 +109,14 @@ impl GlobalState { lsp_types::WorkDoneProgress::Begin(lsp_types::WorkDoneProgressBegin { title: title.into(), - cancellable: None, + cancellable, message, percentage, }) } Progress::Report => { lsp_types::WorkDoneProgress::Report(lsp_types::WorkDoneProgressReport { - cancellable: None, + cancellable, message, percentage, }) diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index 15922dac651cc..2c928a580405c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -10,7 +10,7 @@ use std::{ use always_assert::always; use crossbeam_channel::{select, Receiver}; use flycheck::FlycheckHandle; -use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath}; +use ide_db::base_db::{SourceDatabaseExt, VfsPath}; use itertools::Itertools; use lsp_server::{Connection, Notification, Request}; use lsp_types::notification::Notification as _; @@ -191,7 +191,7 @@ impl GlobalState { // NOTE: don't count blocking select! call as a loop-turn time let _p = profile::span("GlobalState::handle_event"); - tracing::debug!("handle_event({:?})", event); + tracing::debug!("{:?} handle_event({:?})", loop_start, event); let task_queue_len = self.task_pool.handle.len(); if task_queue_len > 0 { tracing::info!("task queue len: {}", task_queue_len); @@ -257,7 +257,7 @@ impl GlobalState { } }; - self.report_progress("Indexing", state, message, Some(fraction)); + self.report_progress("Indexing", state, message, Some(fraction), None); } } Event::Vfs(message) => { @@ -465,7 +465,7 @@ impl GlobalState { } }; - self.report_progress("Fetching", state, msg, None); + self.report_progress("Fetching", state, msg, None, None); } Task::FetchBuildData(progress) => { let (state, msg) = match progress { @@ -481,7 +481,7 @@ impl GlobalState { }; if let Some(state) = state { - self.report_progress("Loading", state, msg, None); + self.report_progress("Loading", state, msg, None, None); } } } @@ -518,6 +518,7 @@ impl GlobalState { state, Some(format!("{}/{}", n_done, n_total)), Some(Progress::fraction(n_done, n_total)), + None, ) } } @@ -542,7 +543,10 @@ impl GlobalState { diag.fix, ), Err(err) => { - tracing::error!("File with cargo diagnostic not found in VFS: {}", err); + tracing::error!( + "flycheck {id}: File with cargo diagnostic not found in VFS: {}", + err + ); } }; } @@ -584,7 +588,13 @@ impl GlobalState { } else { format!("cargo check (#{})", id + 1) }; - self.report_progress(&title, state, message, None); + self.report_progress( + &title, + state, + message, + None, + Some(format!("rust-analyzer/checkOnSave/{}", id)), + ); } } } @@ -698,7 +708,16 @@ impl GlobalState { this.cancel(id); Ok(()) })? - .on::(|_this, _params| { + .on::(|this, params| { + if let lsp_types::NumberOrString::String(s) = ¶ms.token { + if let Some(id) = s.strip_prefix("rust-analyzer/checkOnSave/") { + if let Ok(id) = u32::from_str_radix(id, 10) { + if let Some(flycheck) = this.flycheck.get(id as usize) { + flycheck.cancel(); + } + } + } + } // Just ignore this. It is OK to continue sending progress // notifications for this token, as the client can't know when // we accepted notification. @@ -711,7 +730,7 @@ impl GlobalState { .insert(path.clone(), DocumentData::new(params.text_document.version)) .is_err(); if already_exists { - tracing::error!("duplicate DidOpenTextDocument: {}", path) + tracing::error!("duplicate DidOpenTextDocument: {}", path); } this.vfs .write() @@ -758,69 +777,7 @@ impl GlobalState { Ok(()) })? .on::(|this, params| { - let mut updated = false; if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) { - let (vfs, _) = &*this.vfs.read(); - - // Trigger flychecks for all workspaces that depend on the saved file - if let Some(file_id) = vfs.file_id(&vfs_path) { - let analysis = this.analysis_host.analysis(); - // Crates containing or depending on the saved file - let crate_ids: Vec<_> = analysis - .crate_for(file_id)? - .into_iter() - .flat_map(|id| { - this.analysis_host - .raw_database() - .crate_graph() - .transitive_rev_deps(id) - }) - .sorted() - .unique() - .collect(); - - let crate_root_paths: Vec<_> = crate_ids - .iter() - .filter_map(|&crate_id| { - analysis - .crate_root(crate_id) - .map(|file_id| { - vfs.file_path(file_id).as_path().map(ToOwned::to_owned) - }) - .transpose() - }) - .collect::>()?; - let crate_root_paths: Vec<_> = - crate_root_paths.iter().map(Deref::deref).collect(); - - // Find all workspaces that have at least one target containing the saved file - let workspace_ids = - this.workspaces.iter().enumerate().filter(|(_, ws)| match ws { - project_model::ProjectWorkspace::Cargo { cargo, .. } => { - cargo.packages().any(|pkg| { - cargo[pkg].targets.iter().any(|&it| { - crate_root_paths.contains(&cargo[it].root.as_path()) - }) - }) - } - project_model::ProjectWorkspace::Json { project, .. } => project - .crates() - .any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c)), - project_model::ProjectWorkspace::DetachedFiles { .. } => false, - }); - - // Find and trigger corresponding flychecks - for flycheck in &this.flycheck { - for (id, _) in workspace_ids.clone() { - if id == flycheck.id() { - updated = true; - flycheck.restart(); - continue; - } - } - } - } - // Re-fetch workspaces if a workspace related file has changed if let Some(abs_path) = vfs_path.as_path() { if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) { @@ -828,13 +785,90 @@ impl GlobalState { .request_op(format!("DidSaveTextDocument {}", abs_path.display())); } } + + let file_id = this.vfs.read().0.file_id(&vfs_path); + if let Some(file_id) = file_id { + let world = this.snapshot(); + let mut updated = false; + let task = move || -> std::result::Result<(), ide::Cancelled> { + // Trigger flychecks for all workspaces that depend on the saved file + // Crates containing or depending on the saved file + let crate_ids: Vec<_> = world + .analysis + .crates_for(file_id)? + .into_iter() + .flat_map(|id| world.analysis.transitive_rev_deps(id)) + .flatten() + .sorted() + .unique() + .collect(); + + let crate_root_paths: Vec<_> = crate_ids + .iter() + .filter_map(|&crate_id| { + world + .analysis + .crate_root(crate_id) + .map(|file_id| { + world + .file_id_to_file_path(file_id) + .as_path() + .map(ToOwned::to_owned) + }) + .transpose() + }) + .collect::>()?; + let crate_root_paths: Vec<_> = + crate_root_paths.iter().map(Deref::deref).collect(); + + // Find all workspaces that have at least one target containing the saved file + let workspace_ids = + world.workspaces.iter().enumerate().filter(|(_, ws)| match ws { + project_model::ProjectWorkspace::Cargo { cargo, .. } => { + cargo.packages().any(|pkg| { + cargo[pkg].targets.iter().any(|&it| { + crate_root_paths.contains(&cargo[it].root.as_path()) + }) + }) + } + project_model::ProjectWorkspace::Json { project, .. } => { + project.crates().any(|(c, _)| { + crate_ids.iter().any(|&crate_id| crate_id == c) + }) + } + project_model::ProjectWorkspace::DetachedFiles { .. } => false, + }); + + // Find and trigger corresponding flychecks + for flycheck in world.flycheck.iter() { + for (id, _) in workspace_ids.clone() { + if id == flycheck.id() { + updated = true; + flycheck.restart(); + continue; + } + } + } + // No specific flycheck was triggered, so let's trigger all of them. + if !updated { + for flycheck in world.flycheck.iter() { + flycheck.restart(); + } + } + Ok(()) + }; + this.task_pool.handle.spawn_with_sender(move |_| { + if let Err(e) = std::panic::catch_unwind(task) { + tracing::error!("DidSaveTextDocument flycheck task panicked: {e:?}") + } + }); + return Ok(()); + } } // No specific flycheck was triggered, so let's trigger all of them. - if !updated { - for flycheck in &this.flycheck { - flycheck.restart(); - } + for flycheck in this.flycheck.iter() { + flycheck.restart(); } Ok(()) })? diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index f873489394462..e1f651786dee4 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -175,10 +175,8 @@ impl GlobalState { sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap() } }; - let mut res = Vec::new(); - for ws in workspaces.iter() { - res.push(ws.run_build_scripts(&config, &progress)); - } + let res = ProjectWorkspace::run_all_build_scripts(&workspaces, &config, &progress); + sender.send(Task::FetchBuildData(BuildDataProgress::End((workspaces, res)))).unwrap(); }); } @@ -468,39 +466,54 @@ impl GlobalState { let config = match self.config.flycheck() { Some(it) => it, None => { - self.flycheck = Vec::new(); + self.flycheck = Arc::new([]); self.diagnostics.clear_check_all(); return; } }; let sender = self.flycheck_sender.clone(); - self.flycheck = self - .workspaces - .iter() - .enumerate() - .filter_map(|(id, w)| match w { - ProjectWorkspace::Cargo { cargo, .. } => Some((id, cargo.workspace_root())), - ProjectWorkspace::Json { project, .. } => { - // Enable flychecks for json projects if a custom flycheck command was supplied - // in the workspace configuration. - match config { - FlycheckConfig::CustomCommand { .. } => Some((id, project.path())), - _ => None, - } - } - ProjectWorkspace::DetachedFiles { .. } => None, - }) - .map(|(id, root)| { - let sender = sender.clone(); - FlycheckHandle::spawn( - id, - Box::new(move |msg| sender.send(msg).unwrap()), - config.clone(), - root.to_path_buf(), - ) - }) - .collect(); + let invocation_strategy = match config { + FlycheckConfig::CargoCommand { .. } => flycheck::InvocationStrategy::PerWorkspace, + FlycheckConfig::CustomCommand { invocation_strategy, .. } => invocation_strategy, + }; + + self.flycheck = match invocation_strategy { + flycheck::InvocationStrategy::Once => vec![FlycheckHandle::spawn( + 0, + Box::new(move |msg| sender.send(msg).unwrap()), + config.clone(), + self.config.root_path().clone(), + )], + flycheck::InvocationStrategy::PerWorkspace => { + self.workspaces + .iter() + .enumerate() + .filter_map(|(id, w)| match w { + ProjectWorkspace::Cargo { cargo, .. } => Some((id, cargo.workspace_root())), + ProjectWorkspace::Json { project, .. } => { + // Enable flychecks for json projects if a custom flycheck command was supplied + // in the workspace configuration. + match config { + FlycheckConfig::CustomCommand { .. } => Some((id, project.path())), + _ => None, + } + } + ProjectWorkspace::DetachedFiles { .. } => None, + }) + .map(|(id, root)| { + let sender = sender.clone(); + FlycheckHandle::spawn( + id, + Box::new(move |msg| sender.send(msg).unwrap()), + config.clone(), + root.to_path_buf(), + ) + }) + .collect() + } + } + .into(); } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs index 229e7419b736f..660c057e99c56 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs @@ -645,7 +645,7 @@ impl ast::RecordPatFieldList { } fn get_or_insert_comma_after(syntax: &SyntaxNode) -> SyntaxToken { - let comma = match syntax + match syntax .siblings_with_tokens(Direction::Next) .filter_map(|it| it.into_token()) .find(|it| it.kind() == T![,]) @@ -656,8 +656,7 @@ fn get_or_insert_comma_after(syntax: &SyntaxNode) -> SyntaxToken { ted::insert(Position::after(syntax), &comma); comma } - }; - comma + } } impl ast::StmtList { diff --git a/src/tools/rust-analyzer/crates/syntax/src/lib.rs b/src/tools/rust-analyzer/crates/syntax/src/lib.rs index 4f5e273a520a1..84c66b27e69fa 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/lib.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/lib.rs @@ -92,7 +92,7 @@ impl Parse { SyntaxNode::new_root(self.green.clone()) } pub fn errors(&self) -> &[SyntaxError] { - &*self.errors + &self.errors } } diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs index 59b1c147d7f17..69d2e62b25673 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs @@ -27,7 +27,6 @@ //! generator: pin //! hash: //! index: sized -//! infallible: //! iterator: option //! iterators: iterator, fn //! option: @@ -37,7 +36,7 @@ //! result: //! sized: //! slice: -//! try: infallible +//! try: //! unsize: sized pub mod marker { @@ -151,9 +150,6 @@ pub mod convert { fn as_ref(&self) -> &T; } // endregion:as_ref - // region:infallible - pub enum Infallible {} - // endregion:infallible } pub mod ops { @@ -330,7 +326,7 @@ pub mod ops { Continue(C), Break(B), } - pub trait FromResidual::Residual> { + pub trait FromResidual { #[lang = "from_residual"] fn from_residual(residual: R) -> Self; } @@ -346,13 +342,13 @@ pub mod ops { impl Try for ControlFlow { type Output = C; - type Residual = ControlFlow; + type Residual = ControlFlow; fn from_output(output: Self::Output) -> Self {} fn branch(self) -> ControlFlow {} } impl FromResidual for ControlFlow { - fn from_residual(residual: ControlFlow) -> Self {} + fn from_residual(residual: ControlFlow) -> Self {} } } pub use self::try_::{ControlFlow, FromResidual, Try}; @@ -473,33 +469,6 @@ pub mod option { } } } - // region:try - impl crate::ops::Try for Option { - type Output = T; - type Residual = Option; - - #[inline] - fn from_output(output: Self::Output) -> Self { - Some(output) - } - - #[inline] - fn branch(self) -> crate::ops::ControlFlow { - match self { - Some(v) => crate::ops::ControlFlow::Continue(v), - None => crate::ops::ControlFlow::Break(None), - } - } - } - impl crate::ops::FromResidual for Option { - #[inline] - fn from_residual(residual: Option) -> Self { - match residual { - None => None, - } - } - } - // endregion:try } // endregion:option diff --git a/src/tools/rust-analyzer/docs/user/generated_config.adoc b/src/tools/rust-analyzer/docs/user/generated_config.adoc index acf0aaea859a2..502833de72c16 100644 --- a/src/tools/rust-analyzer/docs/user/generated_config.adoc +++ b/src/tools/rust-analyzer/docs/user/generated_config.adoc @@ -24,6 +24,25 @@ Automatically refresh project info via `cargo metadata` on -- Run build scripts (`build.rs`) for more precise code analysis. -- +[[rust-analyzer.cargo.buildScripts.invocationLocation]]rust-analyzer.cargo.buildScripts.invocationLocation (default: `"workspace"`):: ++ +-- +Specifies the working directory for running build scripts. +- "workspace": run build scripts for a workspace in the workspace's root directory. + This is incompatible with `#rust-analyzer.cargo.buildScripts.invocationStrategy#` set to `once`. +- "root": run build scripts in the project's root directory. +This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` +is set. +-- +[[rust-analyzer.cargo.buildScripts.invocationStrategy]]rust-analyzer.cargo.buildScripts.invocationStrategy (default: `"per_workspace"`):: ++ +-- +Specifies the invocation strategy to use when running the build scripts command. +If `per_workspace` is set, the command will be executed for each workspace. +If `once` is set, the command will be executed once. +This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` +is set. +-- [[rust-analyzer.cargo.buildScripts.overrideCommand]]rust-analyzer.cargo.buildScripts.overrideCommand (default: `null`):: + -- @@ -118,6 +137,25 @@ List of features to activate. Defaults to Set to `"all"` to pass `--all-features` to Cargo. -- +[[rust-analyzer.checkOnSave.invocationLocation]]rust-analyzer.checkOnSave.invocationLocation (default: `"workspace"`):: ++ +-- +Specifies the working directory for running checks. +- "workspace": run checks for workspaces in the corresponding workspaces' root directories. + This falls back to "root" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`. +- "root": run checks in the project's root directory. +This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` +is set. +-- +[[rust-analyzer.checkOnSave.invocationStrategy]]rust-analyzer.checkOnSave.invocationStrategy (default: `"per_workspace"`):: ++ +-- +Specifies the invocation strategy to use when running the checkOnSave command. +If `per_workspace` is set, the command will be executed for each workspace. +If `once` is set, the command will be executed once. +This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` +is set. +-- [[rust-analyzer.checkOnSave.noDefaultFeatures]]rust-analyzer.checkOnSave.noDefaultFeatures (default: `null`):: + -- diff --git a/src/tools/rust-analyzer/editors/code/package-lock.json b/src/tools/rust-analyzer/editors/code/package-lock.json index 3ff4b6897a16b..a72865d4fe44e 100644 --- a/src/tools/rust-analyzer/editors/code/package-lock.json +++ b/src/tools/rust-analyzer/editors/code/package-lock.json @@ -11,7 +11,7 @@ "dependencies": { "d3": "^7.6.1", "d3-graphviz": "^4.1.1", - "vscode-languageclient": "^8.0.0-next.14" + "vscode-languageclient": "^8.0.2" }, "devDependencies": { "@types/node": "~16.11.7", @@ -3791,39 +3791,39 @@ } }, "node_modules/vscode-jsonrpc": { - "version": "8.0.0-next.7", - "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.0.0-next.7.tgz", - "integrity": "sha512-JX/F31LEsims0dAlOTKFE4E+AJMiJvdRSRViifFJSqSN7EzeYyWlfuDchF7g91oRNPZOIWfibTkDf3/UMsQGzQ==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.0.2.tgz", + "integrity": "sha512-RY7HwI/ydoC1Wwg4gJ3y6LpU9FJRZAUnTYMXthqhFXXu77ErDd/xkREpGuk4MyYkk4a+XDWAMqe0S3KkelYQEQ==", "engines": { "node": ">=14.0.0" } }, "node_modules/vscode-languageclient": { - "version": "8.0.0-next.14", - "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-8.0.0-next.14.tgz", - "integrity": "sha512-NqjkOuDTMu8uo+PhoMsV72VO9Gd3wBi/ZpOrkRUOrWKQo7yUdiIw183g8wjH8BImgbK9ZP51HM7TI0ZhCnI1Mw==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-8.0.2.tgz", + "integrity": "sha512-lHlthJtphG9gibGb/y72CKqQUxwPsMXijJVpHEC2bvbFqxmkj9LwQ3aGU9dwjBLqsX1S4KjShYppLvg1UJDF/Q==", "dependencies": { "minimatch": "^3.0.4", "semver": "^7.3.5", - "vscode-languageserver-protocol": "3.17.0-next.16" + "vscode-languageserver-protocol": "3.17.2" }, "engines": { - "vscode": "^1.66.0" + "vscode": "^1.67.0" } }, "node_modules/vscode-languageserver-protocol": { - "version": "3.17.0-next.16", - "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.0-next.16.tgz", - "integrity": "sha512-tx4DnXw9u3N7vw+bx6n2NKp6FoxoNwiP/biH83AS30I2AnTGyLd7afSeH6Oewn2E8jvB7K15bs12sMppkKOVeQ==", + "version": "3.17.2", + "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.2.tgz", + "integrity": "sha512-8kYisQ3z/SQ2kyjlNeQxbkkTNmVFoQCqkmGrzLH6A9ecPlgTbp3wDTnUNqaUxYr4vlAcloxx8zwy7G5WdguYNg==", "dependencies": { - "vscode-jsonrpc": "8.0.0-next.7", - "vscode-languageserver-types": "3.17.0-next.9" + "vscode-jsonrpc": "8.0.2", + "vscode-languageserver-types": "3.17.2" } }, "node_modules/vscode-languageserver-types": { - "version": "3.17.0-next.9", - "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.0-next.9.tgz", - "integrity": "sha512-9/PeDNPYduaoXRUzYpqmu4ZV9L01HGo0wH9FUt+sSHR7IXwA7xoXBfNUlv8gB9H0D2WwEmMomSy1NmhjKQyn3A==" + "version": "3.17.2", + "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.2.tgz", + "integrity": "sha512-zHhCWatviizPIq9B7Vh9uvrH6x3sK8itC84HkamnBWoDFJtzBf7SWlpLCZUit72b3os45h6RWQNC9xHRDF8dRA==" }, "node_modules/which": { "version": "2.0.2", @@ -6634,33 +6634,33 @@ } }, "vscode-jsonrpc": { - "version": "8.0.0-next.7", - "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.0.0-next.7.tgz", - "integrity": "sha512-JX/F31LEsims0dAlOTKFE4E+AJMiJvdRSRViifFJSqSN7EzeYyWlfuDchF7g91oRNPZOIWfibTkDf3/UMsQGzQ==" + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.0.2.tgz", + "integrity": "sha512-RY7HwI/ydoC1Wwg4gJ3y6LpU9FJRZAUnTYMXthqhFXXu77ErDd/xkREpGuk4MyYkk4a+XDWAMqe0S3KkelYQEQ==" }, "vscode-languageclient": { - "version": "8.0.0-next.14", - "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-8.0.0-next.14.tgz", - "integrity": "sha512-NqjkOuDTMu8uo+PhoMsV72VO9Gd3wBi/ZpOrkRUOrWKQo7yUdiIw183g8wjH8BImgbK9ZP51HM7TI0ZhCnI1Mw==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-8.0.2.tgz", + "integrity": "sha512-lHlthJtphG9gibGb/y72CKqQUxwPsMXijJVpHEC2bvbFqxmkj9LwQ3aGU9dwjBLqsX1S4KjShYppLvg1UJDF/Q==", "requires": { "minimatch": "^3.0.4", "semver": "^7.3.5", - "vscode-languageserver-protocol": "3.17.0-next.16" + "vscode-languageserver-protocol": "3.17.2" } }, "vscode-languageserver-protocol": { - "version": "3.17.0-next.16", - "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.0-next.16.tgz", - "integrity": "sha512-tx4DnXw9u3N7vw+bx6n2NKp6FoxoNwiP/biH83AS30I2AnTGyLd7afSeH6Oewn2E8jvB7K15bs12sMppkKOVeQ==", + "version": "3.17.2", + "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.2.tgz", + "integrity": "sha512-8kYisQ3z/SQ2kyjlNeQxbkkTNmVFoQCqkmGrzLH6A9ecPlgTbp3wDTnUNqaUxYr4vlAcloxx8zwy7G5WdguYNg==", "requires": { - "vscode-jsonrpc": "8.0.0-next.7", - "vscode-languageserver-types": "3.17.0-next.9" + "vscode-jsonrpc": "8.0.2", + "vscode-languageserver-types": "3.17.2" } }, "vscode-languageserver-types": { - "version": "3.17.0-next.9", - "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.0-next.9.tgz", - "integrity": "sha512-9/PeDNPYduaoXRUzYpqmu4ZV9L01HGo0wH9FUt+sSHR7IXwA7xoXBfNUlv8gB9H0D2WwEmMomSy1NmhjKQyn3A==" + "version": "3.17.2", + "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.2.tgz", + "integrity": "sha512-zHhCWatviizPIq9B7Vh9uvrH6x3sK8itC84HkamnBWoDFJtzBf7SWlpLCZUit72b3os45h6RWQNC9xHRDF8dRA==" }, "which": { "version": "2.0.2", diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index f1dd3aa79ff04..6771cad28a792 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -37,7 +37,7 @@ "dependencies": { "d3": "^7.6.1", "d3-graphviz": "^4.1.1", - "vscode-languageclient": "^8.0.0-next.14" + "vscode-languageclient": "^8.0.2" }, "devDependencies": { "@types/node": "~16.11.7", @@ -60,6 +60,7 @@ "onCommand:rust-analyzer.analyzerStatus", "onCommand:rust-analyzer.memoryUsage", "onCommand:rust-analyzer.reloadWorkspace", + "onCommand:rust-analyzer.startServer", "workspaceContains:*/Cargo.toml", "workspaceContains:*/rust-project.json" ], @@ -191,6 +192,16 @@ "title": "Restart server", "category": "rust-analyzer" }, + { + "command": "rust-analyzer.startServer", + "title": "Start server", + "category": "rust-analyzer" + }, + { + "command": "rust-analyzer.stopServer", + "title": "Stop server", + "category": "rust-analyzer" + }, { "command": "rust-analyzer.onEnter", "title": "Enhanced enter key", @@ -421,6 +432,32 @@ "default": true, "type": "boolean" }, + "rust-analyzer.cargo.buildScripts.invocationLocation": { + "markdownDescription": "Specifies the working directory for running build scripts.\n- \"workspace\": run build scripts for a workspace in the workspace's root directory.\n This is incompatible with `#rust-analyzer.cargo.buildScripts.invocationStrategy#` set to `once`.\n- \"root\": run build scripts in the project's root directory.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.", + "default": "workspace", + "type": "string", + "enum": [ + "workspace", + "root" + ], + "enumDescriptions": [ + "The command will be executed in the corresponding workspace root.", + "The command will be executed in the project root." + ] + }, + "rust-analyzer.cargo.buildScripts.invocationStrategy": { + "markdownDescription": "Specifies the invocation strategy to use when running the build scripts command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.", + "default": "per_workspace", + "type": "string", + "enum": [ + "per_workspace", + "once" + ], + "enumDescriptions": [ + "The command will be executed for each workspace.", + "The command will be executed once." + ] + }, "rust-analyzer.cargo.buildScripts.overrideCommand": { "markdownDescription": "Override the command rust-analyzer uses to run build scripts and\nbuild procedural macros. The command is required to output json\nand should therefore include `--message-format=json` or a similar\noption.\n\nBy default, a cargo invocation will be constructed for the configured\ntargets and features, with the following base command line:\n\n```bash\ncargo check --quiet --workspace --message-format=json --all-targets\n```\n.", "default": null, @@ -546,6 +583,32 @@ } ] }, + "rust-analyzer.checkOnSave.invocationLocation": { + "markdownDescription": "Specifies the working directory for running checks.\n- \"workspace\": run checks for workspaces in the corresponding workspaces' root directories.\n This falls back to \"root\" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`.\n- \"root\": run checks in the project's root directory.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.", + "default": "workspace", + "type": "string", + "enum": [ + "workspace", + "root" + ], + "enumDescriptions": [ + "The command will be executed in the corresponding workspace root.", + "The command will be executed in the project root." + ] + }, + "rust-analyzer.checkOnSave.invocationStrategy": { + "markdownDescription": "Specifies the invocation strategy to use when running the checkOnSave command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.", + "default": "per_workspace", + "type": "string", + "enum": [ + "per_workspace", + "once" + ], + "enumDescriptions": [ + "The command will be executed for each workspace.", + "The command will be executed once." + ] + }, "rust-analyzer.checkOnSave.noDefaultFeatures": { "markdownDescription": "Whether to pass `--no-default-features` to Cargo. Defaults to\n`#rust-analyzer.cargo.noDefaultFeatures#`.", "default": null, diff --git a/src/tools/rust-analyzer/editors/code/src/ast_inspector.ts b/src/tools/rust-analyzer/editors/code/src/ast_inspector.ts index e57fb20e2cf8b..176040120f410 100644 --- a/src/tools/rust-analyzer/editors/code/src/ast_inspector.ts +++ b/src/tools/rust-analyzer/editors/code/src/ast_inspector.ts @@ -35,8 +35,10 @@ export class AstInspector implements vscode.HoverProvider, vscode.DefinitionProv }); constructor(ctx: Ctx) { - ctx.pushCleanup(vscode.languages.registerHoverProvider({ scheme: "rust-analyzer" }, this)); - ctx.pushCleanup(vscode.languages.registerDefinitionProvider({ language: "rust" }, this)); + ctx.pushExtCleanup( + vscode.languages.registerHoverProvider({ scheme: "rust-analyzer" }, this) + ); + ctx.pushExtCleanup(vscode.languages.registerDefinitionProvider({ language: "rust" }, this)); vscode.workspace.onDidCloseTextDocument( this.onDidCloseTextDocument, this, @@ -52,8 +54,6 @@ export class AstInspector implements vscode.HoverProvider, vscode.DefinitionProv this, ctx.subscriptions ); - - ctx.pushCleanup(this); } dispose() { this.setRustEditor(undefined); diff --git a/src/tools/rust-analyzer/editors/code/src/bootstrap.ts b/src/tools/rust-analyzer/editors/code/src/bootstrap.ts new file mode 100644 index 0000000000000..374c3b8144c38 --- /dev/null +++ b/src/tools/rust-analyzer/editors/code/src/bootstrap.ts @@ -0,0 +1,148 @@ +import * as vscode from "vscode"; +import * as os from "os"; +import { Config } from "./config"; +import { log, isValidExecutable } from "./util"; +import { PersistentState } from "./persistent_state"; +import { exec } from "child_process"; + +export async function bootstrap( + context: vscode.ExtensionContext, + config: Config, + state: PersistentState +): Promise { + const path = await getServer(context, config, state); + if (!path) { + throw new Error( + "Rust Analyzer Language Server is not available. " + + "Please, ensure its [proper installation](https://rust-analyzer.github.io/manual.html#installation)." + ); + } + + log.info("Using server binary at", path); + + if (!isValidExecutable(path)) { + if (config.serverPath) { + throw new Error(`Failed to execute ${path} --version. \`config.server.path\` or \`config.serverPath\` has been set explicitly.\ + Consider removing this config or making a valid server binary available at that path.`); + } else { + throw new Error(`Failed to execute ${path} --version`); + } + } + + return path; +} + +async function patchelf(dest: vscode.Uri): Promise { + await vscode.window.withProgress( + { + location: vscode.ProgressLocation.Notification, + title: "Patching rust-analyzer for NixOS", + }, + async (progress, _) => { + const expression = ` + {srcStr, pkgs ? import {}}: + pkgs.stdenv.mkDerivation { + name = "rust-analyzer"; + src = /. + srcStr; + phases = [ "installPhase" "fixupPhase" ]; + installPhase = "cp $src $out"; + fixupPhase = '' + chmod 755 $out + patchelf --set-interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" $out + ''; + } + `; + const origFile = vscode.Uri.file(dest.fsPath + "-orig"); + await vscode.workspace.fs.rename(dest, origFile, { overwrite: true }); + try { + progress.report({ message: "Patching executable", increment: 20 }); + await new Promise((resolve, reject) => { + const handle = exec( + `nix-build -E - --argstr srcStr '${origFile.fsPath}' -o '${dest.fsPath}'`, + (err, stdout, stderr) => { + if (err != null) { + reject(Error(stderr)); + } else { + resolve(stdout); + } + } + ); + handle.stdin?.write(expression); + handle.stdin?.end(); + }); + } finally { + await vscode.workspace.fs.delete(origFile); + } + } + ); +} + +async function getServer( + context: vscode.ExtensionContext, + config: Config, + state: PersistentState +): Promise { + const explicitPath = serverPath(config); + if (explicitPath) { + if (explicitPath.startsWith("~/")) { + return os.homedir() + explicitPath.slice("~".length); + } + return explicitPath; + } + if (config.package.releaseTag === null) return "rust-analyzer"; + + const ext = process.platform === "win32" ? ".exe" : ""; + const bundled = vscode.Uri.joinPath(context.extensionUri, "server", `rust-analyzer${ext}`); + const bundledExists = await vscode.workspace.fs.stat(bundled).then( + () => true, + () => false + ); + if (bundledExists) { + let server = bundled; + if (await isNixOs()) { + await vscode.workspace.fs.createDirectory(config.globalStorageUri).then(); + const dest = vscode.Uri.joinPath(config.globalStorageUri, `rust-analyzer${ext}`); + let exists = await vscode.workspace.fs.stat(dest).then( + () => true, + () => false + ); + if (exists && config.package.version !== state.serverVersion) { + await vscode.workspace.fs.delete(dest); + exists = false; + } + if (!exists) { + await vscode.workspace.fs.copy(bundled, dest); + await patchelf(dest); + } + server = dest; + } + await state.updateServerVersion(config.package.version); + return server.fsPath; + } + + await state.updateServerVersion(undefined); + await vscode.window.showErrorMessage( + "Unfortunately we don't ship binaries for your platform yet. " + + "You need to manually clone the rust-analyzer repository and " + + "run `cargo xtask install --server` to build the language server from sources. " + + "If you feel that your platform should be supported, please create an issue " + + "about that [here](https://github.com/rust-lang/rust-analyzer/issues) and we " + + "will consider it." + ); + return undefined; +} +function serverPath(config: Config): string | null { + return process.env.__RA_LSP_SERVER_DEBUG ?? config.serverPath; +} + +async function isNixOs(): Promise { + try { + const contents = ( + await vscode.workspace.fs.readFile(vscode.Uri.file("/etc/os-release")) + ).toString(); + const idString = contents.split("\n").find((a) => a.startsWith("ID=")) || "ID=linux"; + return idString.indexOf("nixos") !== -1; + } catch { + return false; + } +} diff --git a/src/tools/rust-analyzer/editors/code/src/client.ts b/src/tools/rust-analyzer/editors/code/src/client.ts index 05d4d08f70b62..fb667619c86be 100644 --- a/src/tools/rust-analyzer/editors/code/src/client.ts +++ b/src/tools/rust-analyzer/editors/code/src/client.ts @@ -4,9 +4,7 @@ import * as ra from "../src/lsp_ext"; import * as Is from "vscode-languageclient/lib/common/utils/is"; import { assert } from "./util"; import { WorkspaceEdit } from "vscode"; -import { Workspace } from "./ctx"; -import { substituteVariablesInEnv } from "./config"; -import { outputChannel, traceOutputChannel } from "./main"; +import { substituteVSCodeVariables } from "./config"; import { randomUUID } from "crypto"; export interface Env { @@ -65,40 +63,42 @@ function renderHoverActions(actions: ra.CommandLinkGroup[]): vscode.MarkdownStri } export async function createClient( - serverPath: string, - workspace: Workspace, - extraEnv: Env + traceOutputChannel: vscode.OutputChannel, + outputChannel: vscode.OutputChannel, + initializationOptions: vscode.WorkspaceConfiguration, + serverOptions: lc.ServerOptions ): Promise { - // '.' Is the fallback if no folder is open - // TODO?: Workspace folders support Uri's (eg: file://test.txt). - // It might be a good idea to test if the uri points to a file. - - const newEnv = substituteVariablesInEnv(Object.assign({}, process.env, extraEnv)); - const run: lc.Executable = { - command: serverPath, - options: { env: newEnv }, - }; - const serverOptions: lc.ServerOptions = { - run, - debug: run, - }; - - let initializationOptions = vscode.workspace.getConfiguration("rust-analyzer"); - - if (workspace.kind === "Detached Files") { - initializationOptions = { - detachedFiles: workspace.files.map((file) => file.uri.fsPath), - ...initializationOptions, - }; - } - const clientOptions: lc.LanguageClientOptions = { documentSelector: [{ scheme: "file", language: "rust" }], initializationOptions, diagnosticCollectionName: "rustc", - traceOutputChannel: traceOutputChannel(), - outputChannel: outputChannel(), + traceOutputChannel, + outputChannel, middleware: { + workspace: { + // HACK: This is a workaround, when the client has been disposed, VSCode + // continues to emit events to the client and the default one for this event + // attempt to restart the client for no reason + async didChangeWatchedFile(event, next) { + if (client.isRunning()) { + await next(event); + } + }, + async configuration( + params: lc.ConfigurationParams, + token: vscode.CancellationToken, + next: lc.ConfigurationRequest.HandlerSignature + ) { + const resp = await next(params, token); + if (resp && Array.isArray(resp)) { + return resp.map((val) => { + return substituteVSCodeVariables(val); + }); + } else { + return resp; + } + }, + }, async provideHover( document: vscode.TextDocument, position: vscode.Position, @@ -255,6 +255,9 @@ export async function createClient( } class ExperimentalFeatures implements lc.StaticFeature { + getState(): lc.FeatureState { + return { kind: "static" }; + } fillClientCapabilities(capabilities: lc.ClientCapabilities): void { const caps: any = capabilities.experimental ?? {}; caps.snippetTextEdit = true; diff --git a/src/tools/rust-analyzer/editors/code/src/commands.ts b/src/tools/rust-analyzer/editors/code/src/commands.ts index b9ad525e361f0..12ceb4f2df8e1 100644 --- a/src/tools/rust-analyzer/editors/code/src/commands.ts +++ b/src/tools/rust-analyzer/editors/code/src/commands.ts @@ -21,16 +21,16 @@ export function analyzerStatus(ctx: Ctx): Cmd { readonly uri = vscode.Uri.parse("rust-analyzer-status://status"); readonly eventEmitter = new vscode.EventEmitter(); - provideTextDocumentContent(_uri: vscode.Uri): vscode.ProviderResult { + async provideTextDocumentContent(_uri: vscode.Uri): Promise { if (!vscode.window.activeTextEditor) return ""; + const client = await ctx.getClient(); const params: ra.AnalyzerStatusParams = {}; const doc = ctx.activeRustEditor?.document; if (doc != null) { - params.textDocument = - ctx.client.code2ProtocolConverter.asTextDocumentIdentifier(doc); + params.textDocument = client.code2ProtocolConverter.asTextDocumentIdentifier(doc); } - return ctx.client.sendRequest(ra.analyzerStatus, params); + return await client.sendRequest(ra.analyzerStatus, params); } get onDidChange(): vscode.Event { @@ -38,7 +38,7 @@ export function analyzerStatus(ctx: Ctx): Cmd { } })(); - ctx.pushCleanup( + ctx.pushExtCleanup( vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-status", tdcp) ); @@ -60,9 +60,14 @@ export function memoryUsage(ctx: Ctx): Cmd { provideTextDocumentContent(_uri: vscode.Uri): vscode.ProviderResult { if (!vscode.window.activeTextEditor) return ""; - return ctx.client.sendRequest(ra.memoryUsage).then((mem: any) => { - return "Per-query memory usage:\n" + mem + "\n(note: database has been cleared)"; - }); + return ctx + .getClient() + .then((it) => it.sendRequest(ra.memoryUsage)) + .then((mem: any) => { + return ( + "Per-query memory usage:\n" + mem + "\n(note: database has been cleared)" + ); + }); } get onDidChange(): vscode.Event { @@ -70,7 +75,7 @@ export function memoryUsage(ctx: Ctx): Cmd { } })(); - ctx.pushCleanup( + ctx.pushExtCleanup( vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-memory", tdcp) ); @@ -83,23 +88,19 @@ export function memoryUsage(ctx: Ctx): Cmd { export function shuffleCrateGraph(ctx: Ctx): Cmd { return async () => { - const client = ctx.client; - if (!client) return; - - await client.sendRequest(ra.shuffleCrateGraph); + return ctx.getClient().then((it) => it.sendRequest(ra.shuffleCrateGraph)); }; } export function matchingBrace(ctx: Ctx): Cmd { return async () => { const editor = ctx.activeRustEditor; - const client = ctx.client; - if (!editor || !client) return; + if (!editor) return; + + const client = await ctx.getClient(); const response = await client.sendRequest(ra.matchingBrace, { - textDocument: ctx.client.code2ProtocolConverter.asTextDocumentIdentifier( - editor.document - ), + textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document), positions: editor.selections.map((s) => client.code2ProtocolConverter.asPosition(s.active) ), @@ -116,14 +117,13 @@ export function matchingBrace(ctx: Ctx): Cmd { export function joinLines(ctx: Ctx): Cmd { return async () => { const editor = ctx.activeRustEditor; - const client = ctx.client; - if (!editor || !client) return; + if (!editor) return; + + const client = await ctx.getClient(); const items: lc.TextEdit[] = await client.sendRequest(ra.joinLines, { ranges: editor.selections.map((it) => client.code2ProtocolConverter.asRange(it)), - textDocument: ctx.client.code2ProtocolConverter.asTextDocumentIdentifier( - editor.document - ), + textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document), }); const textEdits = await client.protocol2CodeConverter.asTextEdits(items); await editor.edit((builder) => { @@ -145,14 +145,12 @@ export function moveItemDown(ctx: Ctx): Cmd { export function moveItem(ctx: Ctx, direction: ra.Direction): Cmd { return async () => { const editor = ctx.activeRustEditor; - const client = ctx.client; - if (!editor || !client) return; + if (!editor) return; + const client = await ctx.getClient(); const lcEdits = await client.sendRequest(ra.moveItem, { range: client.code2ProtocolConverter.asRange(editor.selection), - textDocument: ctx.client.code2ProtocolConverter.asTextDocumentIdentifier( - editor.document - ), + textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document), direction, }); @@ -166,13 +164,13 @@ export function moveItem(ctx: Ctx, direction: ra.Direction): Cmd { export function onEnter(ctx: Ctx): Cmd { async function handleKeypress() { const editor = ctx.activeRustEditor; - const client = ctx.client; - if (!editor || !client) return false; + if (!editor) return false; + const client = await ctx.getClient(); const lcEdits = await client .sendRequest(ra.onEnter, { - textDocument: ctx.client.code2ProtocolConverter.asTextDocumentIdentifier( + textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier( editor.document ), position: client.code2ProtocolConverter.asPosition(editor.selection.active), @@ -198,14 +196,13 @@ export function onEnter(ctx: Ctx): Cmd { export function parentModule(ctx: Ctx): Cmd { return async () => { const editor = vscode.window.activeTextEditor; - const client = ctx.client; - if (!editor || !client) return; + if (!editor) return; if (!(isRustDocument(editor.document) || isCargoTomlDocument(editor.document))) return; + const client = await ctx.getClient(); + const locations = await client.sendRequest(ra.parentModule, { - textDocument: ctx.client.code2ProtocolConverter.asTextDocumentIdentifier( - editor.document - ), + textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document), position: client.code2ProtocolConverter.asPosition(editor.selection.active), }); if (!locations) return; @@ -236,13 +233,11 @@ export function parentModule(ctx: Ctx): Cmd { export function openCargoToml(ctx: Ctx): Cmd { return async () => { const editor = ctx.activeRustEditor; - const client = ctx.client; - if (!editor || !client) return; + if (!editor) return; + const client = await ctx.getClient(); const response = await client.sendRequest(ra.openCargoToml, { - textDocument: ctx.client.code2ProtocolConverter.asTextDocumentIdentifier( - editor.document - ), + textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document), }); if (!response) return; @@ -259,12 +254,13 @@ export function openCargoToml(ctx: Ctx): Cmd { export function ssr(ctx: Ctx): Cmd { return async () => { const editor = vscode.window.activeTextEditor; - const client = ctx.client; - if (!editor || !client) return; + if (!editor) return; + + const client = await ctx.getClient(); const position = editor.selection.active; const selections = editor.selections; - const textDocument = ctx.client.code2ProtocolConverter.asTextDocumentIdentifier( + const textDocument = client.code2ProtocolConverter.asTextDocumentIdentifier( editor.document ); @@ -314,6 +310,10 @@ export function ssr(ctx: Ctx): Cmd { export function serverVersion(ctx: Ctx): Cmd { return async () => { + if (!ctx.serverPath) { + void vscode.window.showWarningMessage(`rust-analyzer server is not running`); + return; + } const { stdout } = spawnSync(ctx.serverPath, ["--version"], { encoding: "utf8" }); const versionString = stdout.slice(`rust-analyzer `.length).trim(); @@ -354,21 +354,22 @@ export function syntaxTree(ctx: Ctx): Cmd { } } - provideTextDocumentContent( + async provideTextDocumentContent( uri: vscode.Uri, ct: vscode.CancellationToken - ): vscode.ProviderResult { + ): Promise { const rustEditor = ctx.activeRustEditor; if (!rustEditor) return ""; + const client = await ctx.getClient(); // When the range based query is enabled we take the range of the selection const range = uri.query === "range=true" && !rustEditor.selection.isEmpty - ? ctx.client.code2ProtocolConverter.asRange(rustEditor.selection) + ? client.code2ProtocolConverter.asRange(rustEditor.selection) : null; const params = { textDocument: { uri: rustEditor.document.uri.toString() }, range }; - return ctx.client.sendRequest(ra.syntaxTree, params, ct); + return client.sendRequest(ra.syntaxTree, params, ct); } get onDidChange(): vscode.Event { @@ -376,12 +377,11 @@ export function syntaxTree(ctx: Ctx): Cmd { } })(); - void new AstInspector(ctx); - - ctx.pushCleanup( + ctx.pushExtCleanup(new AstInspector(ctx)); + ctx.pushExtCleanup( vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-syntax-tree", tdcp) ); - ctx.pushCleanup( + ctx.pushExtCleanup( vscode.languages.setLanguageConfiguration("ra_syntax_tree", { brackets: [["[", ")"]], }) @@ -437,14 +437,14 @@ export function viewHir(ctx: Ctx): Cmd { } } - provideTextDocumentContent( + async provideTextDocumentContent( _uri: vscode.Uri, ct: vscode.CancellationToken - ): vscode.ProviderResult { + ): Promise { const rustEditor = ctx.activeRustEditor; - const client = ctx.client; - if (!rustEditor || !client) return ""; + if (!rustEditor) return ""; + const client = await ctx.getClient(); const params = { textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier( rustEditor.document @@ -459,7 +459,7 @@ export function viewHir(ctx: Ctx): Cmd { } })(); - ctx.pushCleanup( + ctx.pushExtCleanup( vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-hir", tdcp) ); @@ -503,13 +503,13 @@ export function viewFileText(ctx: Ctx): Cmd { } } - provideTextDocumentContent( + async provideTextDocumentContent( _uri: vscode.Uri, ct: vscode.CancellationToken - ): vscode.ProviderResult { + ): Promise { const rustEditor = ctx.activeRustEditor; - const client = ctx.client; - if (!rustEditor || !client) return ""; + if (!rustEditor) return ""; + const client = await ctx.getClient(); const params = client.code2ProtocolConverter.asTextDocumentIdentifier( rustEditor.document @@ -522,7 +522,7 @@ export function viewFileText(ctx: Ctx): Cmd { } })(); - ctx.pushCleanup( + ctx.pushExtCleanup( vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-file-text", tdcp) ); @@ -566,13 +566,13 @@ export function viewItemTree(ctx: Ctx): Cmd { } } - provideTextDocumentContent( + async provideTextDocumentContent( _uri: vscode.Uri, ct: vscode.CancellationToken - ): vscode.ProviderResult { + ): Promise { const rustEditor = ctx.activeRustEditor; - const client = ctx.client; - if (!rustEditor || !client) return ""; + if (!rustEditor) return ""; + const client = await ctx.getClient(); const params = { textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier( @@ -587,7 +587,7 @@ export function viewItemTree(ctx: Ctx): Cmd { } })(); - ctx.pushCleanup( + ctx.pushExtCleanup( vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-item-tree", tdcp) ); @@ -618,8 +618,8 @@ function crateGraph(ctx: Ctx, full: boolean): Cmd { const params = { full: full, }; - - const dot = await ctx.client.sendRequest(ra.viewCrateGraph, params); + const client = await ctx.getClient(); + const dot = await client.sendRequest(ra.viewCrateGraph, params); const uri = panel.webview.asWebviewUri(nodeModulesPath); const html = ` @@ -690,13 +690,13 @@ export function expandMacro(ctx: Ctx): Cmd { eventEmitter = new vscode.EventEmitter(); async provideTextDocumentContent(_uri: vscode.Uri): Promise { const editor = vscode.window.activeTextEditor; - const client = ctx.client; - if (!editor || !client) return ""; + if (!editor) return ""; + const client = await ctx.getClient(); const position = editor.selection.active; const expanded = await client.sendRequest(ra.expandMacro, { - textDocument: ctx.client.code2ProtocolConverter.asTextDocumentIdentifier( + textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier( editor.document ), position, @@ -712,7 +712,7 @@ export function expandMacro(ctx: Ctx): Cmd { } })(); - ctx.pushCleanup( + ctx.pushExtCleanup( vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-expand-macro", tdcp) ); @@ -724,11 +724,11 @@ export function expandMacro(ctx: Ctx): Cmd { } export function reloadWorkspace(ctx: Ctx): Cmd { - return async () => ctx.client.sendRequest(ra.reloadWorkspace); + return async () => (await ctx.getClient()).sendRequest(ra.reloadWorkspace); } async function showReferencesImpl( - client: LanguageClient, + client: LanguageClient | undefined, uri: string, position: lc.Position, locations: lc.Location[] @@ -745,7 +745,7 @@ async function showReferencesImpl( export function showReferences(ctx: Ctx): Cmd { return async (uri: string, position: lc.Position, locations: lc.Location[]) => { - await showReferencesImpl(ctx.client, uri, position, locations); + await showReferencesImpl(await ctx.getClient(), uri, position, locations); }; } @@ -762,25 +762,23 @@ export function applyActionGroup(_ctx: Ctx): Cmd { export function gotoLocation(ctx: Ctx): Cmd { return async (locationLink: lc.LocationLink) => { - const client = ctx.client; - if (client) { - const uri = client.protocol2CodeConverter.asUri(locationLink.targetUri); - let range = client.protocol2CodeConverter.asRange(locationLink.targetSelectionRange); - // collapse the range to a cursor position - range = range.with({ end: range.start }); + const client = await ctx.getClient(); + const uri = client.protocol2CodeConverter.asUri(locationLink.targetUri); + let range = client.protocol2CodeConverter.asRange(locationLink.targetSelectionRange); + // collapse the range to a cursor position + range = range.with({ end: range.start }); - await vscode.window.showTextDocument(uri, { selection: range }); - } + await vscode.window.showTextDocument(uri, { selection: range }); }; } export function openDocs(ctx: Ctx): Cmd { return async () => { - const client = ctx.client; const editor = vscode.window.activeTextEditor; - if (!editor || !client) { + if (!editor) { return; } + const client = await ctx.getClient(); const position = editor.selection.active; const textDocument = { uri: editor.document.uri.toString() }; @@ -795,20 +793,21 @@ export function openDocs(ctx: Ctx): Cmd { export function cancelFlycheck(ctx: Ctx): Cmd { return async () => { - await ctx.client.sendRequest(ra.cancelFlycheck); + const client = await ctx.getClient(); + await client.sendRequest(ra.cancelFlycheck); }; } export function resolveCodeAction(ctx: Ctx): Cmd { - const client = ctx.client; return async (params: lc.CodeAction) => { + const client = await ctx.getClient(); params.command = undefined; - const item = await client.sendRequest(lc.CodeActionResolveRequest.type, params); - if (!item.edit) { + const item = await client?.sendRequest(lc.CodeActionResolveRequest.type, params); + if (!item?.edit) { return; } const itemEdit = item.edit; - const edit = await client.protocol2CodeConverter.asWorkspaceEdit(itemEdit); + const edit = await client?.protocol2CodeConverter.asWorkspaceEdit(itemEdit); // filter out all text edits and recreate the WorkspaceEdit without them so we can apply // snippet edits on our own const lcFileSystemEdit = { @@ -847,11 +846,10 @@ export function run(ctx: Ctx): Cmd { } export function peekTests(ctx: Ctx): Cmd { - const client = ctx.client; - return async () => { const editor = ctx.activeRustEditor; - if (!editor || !client) return; + if (!editor) return; + const client = await ctx.getClient(); await vscode.window.withProgress( { @@ -937,10 +935,10 @@ export function newDebugConfig(ctx: Ctx): Cmd { }; } -export function linkToCommand(ctx: Ctx): Cmd { +export function linkToCommand(_: Ctx): Cmd { return async (commandId: string) => { const link = LINKED_COMMANDS.get(commandId); - if (ctx.client && link) { + if (link) { const { command, arguments: args = [] } = link; await vscode.commands.executeCommand(command, ...args); } diff --git a/src/tools/rust-analyzer/editors/code/src/config.ts b/src/tools/rust-analyzer/editors/code/src/config.ts index 15846a5e8645e..632a7d86faa36 100644 --- a/src/tools/rust-analyzer/editors/code/src/config.ts +++ b/src/tools/rust-analyzer/editors/code/src/config.ts @@ -1,4 +1,5 @@ -import path = require("path"); +import * as path from "path"; +import * as os from "os"; import * as vscode from "vscode"; import { Env } from "./client"; import { log } from "./util"; @@ -10,23 +11,17 @@ export type RunnableEnvCfg = export class Config { readonly extensionId = "rust-lang.rust-analyzer"; + configureLang: vscode.Disposable | undefined; readonly rootSection = "rust-analyzer"; - private readonly requiresWorkspaceReloadOpts = [ - "serverPath", - "server", - // FIXME: This shouldn't be here, changing this setting should reload - // `continueCommentsOnNewline` behavior without restart - "typing", - ].map((opt) => `${this.rootSection}.${opt}`); private readonly requiresReloadOpts = [ "cargo", "procMacro", + "serverPath", + "server", "files", "lens", // works as lens.* - ] - .map((opt) => `${this.rootSection}.${opt}`) - .concat(this.requiresWorkspaceReloadOpts); + ].map((opt) => `${this.rootSection}.${opt}`); readonly package: { version: string; @@ -44,6 +39,11 @@ export class Config { ctx.subscriptions ); this.refreshLogging(); + this.configureLanguage(); + } + + dispose() { + this.configureLang?.dispose(); } private refreshLogging() { @@ -57,33 +57,86 @@ export class Config { private async onDidChangeConfiguration(event: vscode.ConfigurationChangeEvent) { this.refreshLogging(); + this.configureLanguage(); + const requiresReloadOpt = this.requiresReloadOpts.find((opt) => event.affectsConfiguration(opt) ); if (!requiresReloadOpt) return; - const requiresWorkspaceReloadOpt = this.requiresWorkspaceReloadOpts.find((opt) => - event.affectsConfiguration(opt) - ); - - if (!requiresWorkspaceReloadOpt && this.restartServerOnConfigChange) { + if (this.restartServerOnConfigChange) { await vscode.commands.executeCommand("rust-analyzer.reload"); return; } - const message = requiresWorkspaceReloadOpt - ? `Changing "${requiresWorkspaceReloadOpt}" requires a window reload` - : `Changing "${requiresReloadOpt}" requires a reload`; - const userResponse = await vscode.window.showInformationMessage(message, "Reload now"); - - if (userResponse === "Reload now") { - const command = requiresWorkspaceReloadOpt - ? "workbench.action.reloadWindow" - : "rust-analyzer.reload"; - if (userResponse === "Reload now") { - await vscode.commands.executeCommand(command); - } + const message = `Changing "${requiresReloadOpt}" requires a server restart`; + const userResponse = await vscode.window.showInformationMessage(message, "Restart now"); + + if (userResponse) { + const command = "rust-analyzer.reload"; + await vscode.commands.executeCommand(command); + } + } + + /** + * Sets up additional language configuration that's impossible to do via a + * separate language-configuration.json file. See [1] for more information. + * + * [1]: https://github.com/Microsoft/vscode/issues/11514#issuecomment-244707076 + */ + private configureLanguage() { + if (this.typingContinueCommentsOnNewline && !this.configureLang) { + const indentAction = vscode.IndentAction.None; + + this.configureLang = vscode.languages.setLanguageConfiguration("rust", { + onEnterRules: [ + { + // Doc single-line comment + // e.g. ///| + beforeText: /^\s*\/{3}.*$/, + action: { indentAction, appendText: "/// " }, + }, + { + // Parent doc single-line comment + // e.g. //!| + beforeText: /^\s*\/{2}\!.*$/, + action: { indentAction, appendText: "//! " }, + }, + { + // Begins an auto-closed multi-line comment (standard or parent doc) + // e.g. /** | */ or /*! | */ + beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/, + afterText: /^\s*\*\/$/, + action: { + indentAction: vscode.IndentAction.IndentOutdent, + appendText: " * ", + }, + }, + { + // Begins a multi-line comment (standard or parent doc) + // e.g. /** ...| or /*! ...| + beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/, + action: { indentAction, appendText: " * " }, + }, + { + // Continues a multi-line comment + // e.g. * ...| + beforeText: /^(\ \ )*\ \*(\ ([^\*]|\*(?!\/))*)?$/, + action: { indentAction, appendText: "* " }, + }, + { + // Dedents after closing a multi-line comment + // e.g. */| + beforeText: /^(\ \ )*\ \*\/\s*$/, + action: { indentAction, removeText: 1 }, + }, + ], + }); + } + if (!this.typingContinueCommentsOnNewline && this.configureLang) { + this.configureLang.dispose(); + this.configureLang = undefined; } } @@ -187,6 +240,37 @@ export class Config { } } +const VarRegex = new RegExp(/\$\{(.+?)\}/g); + +export function substituteVSCodeVariableInString(val: string): string { + return val.replace(VarRegex, (substring: string, varName) => { + if (typeof varName === "string") { + return computeVscodeVar(varName) || substring; + } else { + return substring; + } + }); +} + +export function substituteVSCodeVariables(resp: any): any { + if (typeof resp === "string") { + return substituteVSCodeVariableInString(resp); + } else if (resp && Array.isArray(resp)) { + return resp.map((val) => { + return substituteVSCodeVariables(val); + }); + } else if (resp && typeof resp === "object") { + const res: { [key: string]: any } = {}; + for (const key in resp) { + const val = resp[key]; + res[key] = substituteVSCodeVariables(val); + } + return res; + } else if (typeof resp === "function") { + return null; + } + return resp; +} export function substituteVariablesInEnv(env: Env): Env { const missingDeps = new Set(); // vscode uses `env:ENV_NAME` for env vars resolution, and it's easier @@ -233,7 +317,7 @@ export function substituteVariablesInEnv(env: Env): Env { } } else { envWithDeps[dep] = { - value: computeVscodeVar(dep), + value: computeVscodeVar(dep) || "${" + dep + "}", deps: [], }; } @@ -264,37 +348,34 @@ export function substituteVariablesInEnv(env: Env): Env { return resolvedEnv; } -function computeVscodeVar(varName: string): string { +function computeVscodeVar(varName: string): string | null { + const workspaceFolder = () => { + const folders = vscode.workspace.workspaceFolders ?? []; + if (folders.length === 1) { + // TODO: support for remote workspaces? + return folders[0].uri.fsPath; + } else if (folders.length > 1) { + // could use currently opened document to detect the correct + // workspace. However, that would be determined by the document + // user has opened on Editor startup. Could lead to + // unpredictable workspace selection in practice. + // It's better to pick the first one + return folders[0].uri.fsPath; + } else { + // no workspace opened + return ""; + } + }; // https://code.visualstudio.com/docs/editor/variables-reference const supportedVariables: { [k: string]: () => string } = { - workspaceFolder: () => { - const folders = vscode.workspace.workspaceFolders ?? []; - if (folders.length === 1) { - // TODO: support for remote workspaces? - return folders[0].uri.fsPath; - } else if (folders.length > 1) { - // could use currently opened document to detect the correct - // workspace. However, that would be determined by the document - // user has opened on Editor startup. Could lead to - // unpredictable workspace selection in practice. - // It's better to pick the first one - return folders[0].uri.fsPath; - } else { - // no workspace opened - return ""; - } - }, + workspaceFolder, workspaceFolderBasename: () => { - const workspaceFolder = computeVscodeVar("workspaceFolder"); - if (workspaceFolder) { - return path.basename(workspaceFolder); - } else { - return ""; - } + return path.basename(workspaceFolder()); }, cwd: () => process.cwd(), + userHome: () => os.homedir(), // see // https://github.com/microsoft/vscode/blob/08ac1bb67ca2459496b272d8f4a908757f24f56f/src/vs/workbench/api/common/extHostVariableResolverService.ts#L81 @@ -308,7 +389,7 @@ function computeVscodeVar(varName: string): string { if (varName in supportedVariables) { return supportedVariables[varName](); } else { - // can't resolve, keep the expression as is - return "${" + varName + "}"; + // return "${" + varName + "}"; + return null; } } diff --git a/src/tools/rust-analyzer/editors/code/src/ctx.ts b/src/tools/rust-analyzer/editors/code/src/ctx.ts index 26510011d4397..044a9470aa947 100644 --- a/src/tools/rust-analyzer/editors/code/src/ctx.ts +++ b/src/tools/rust-analyzer/editors/code/src/ctx.ts @@ -2,10 +2,12 @@ import * as vscode from "vscode"; import * as lc from "vscode-languageclient/node"; import * as ra from "./lsp_ext"; -import { Config } from "./config"; +import { Config, substituteVariablesInEnv, substituteVSCodeVariables } from "./config"; import { createClient } from "./client"; -import { isRustEditor, RustEditor } from "./util"; +import { isRustEditor, log, RustEditor } from "./util"; import { ServerStatusParams } from "./lsp_ext"; +import { PersistentState } from "./persistent_state"; +import { bootstrap } from "./bootstrap"; export type Workspace = | { @@ -16,66 +18,192 @@ export type Workspace = files: vscode.TextDocument[]; }; +export type CommandFactory = { + enabled: (ctx: Ctx) => Cmd; + disabled?: (ctx: Ctx) => Cmd; +}; + export class Ctx { - private constructor( - readonly config: Config, - private readonly extCtx: vscode.ExtensionContext, - readonly client: lc.LanguageClient, - readonly serverPath: string, - readonly statusBar: vscode.StatusBarItem - ) {} - - static async create( - config: Config, - extCtx: vscode.ExtensionContext, - serverPath: string, - workspace: Workspace - ): Promise { - const client = await createClient(serverPath, workspace, config.serverExtraEnv); - - const statusBar = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left); - extCtx.subscriptions.push(statusBar); - statusBar.text = "rust-analyzer"; - statusBar.tooltip = "ready"; - statusBar.command = "rust-analyzer.analyzerStatus"; - statusBar.show(); - - const res = new Ctx(config, extCtx, client, serverPath, statusBar); - - res.pushCleanup(client.start()); - await client.onReady(); - client.onNotification(ra.serverStatus, (params) => res.setServerStatus(params)); - return res; + readonly statusBar: vscode.StatusBarItem; + readonly config: Config; + + private client: lc.LanguageClient | undefined; + private _serverPath: string | undefined; + private traceOutputChannel: vscode.OutputChannel | undefined; + private outputChannel: vscode.OutputChannel | undefined; + private clientSubscriptions: Disposable[]; + private state: PersistentState; + private commandFactories: Record; + private commandDisposables: Disposable[]; + + workspace: Workspace; + + constructor( + readonly extCtx: vscode.ExtensionContext, + workspace: Workspace, + commandFactories: Record + ) { + extCtx.subscriptions.push(this); + this.statusBar = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left); + this.statusBar.text = "rust-analyzer"; + this.statusBar.tooltip = "ready"; + this.statusBar.command = "rust-analyzer.analyzerStatus"; + this.statusBar.show(); + this.workspace = workspace; + this.clientSubscriptions = []; + this.commandDisposables = []; + this.commandFactories = commandFactories; + + this.state = new PersistentState(extCtx.globalState); + this.config = new Config(extCtx); + + this.updateCommands(); } - get activeRustEditor(): RustEditor | undefined { - const editor = vscode.window.activeTextEditor; - return editor && isRustEditor(editor) ? editor : undefined; + dispose() { + this.config.dispose(); + this.statusBar.dispose(); + void this.disposeClient(); + this.commandDisposables.forEach((disposable) => disposable.dispose()); } - get visibleRustEditors(): RustEditor[] { - return vscode.window.visibleTextEditors.filter(isRustEditor); + clientFetcher() { + const self = this; + return { + get client(): lc.LanguageClient | undefined { + return self.client; + }, + }; } - registerCommand(name: string, factory: (ctx: Ctx) => Cmd) { - const fullName = `rust-analyzer.${name}`; - const cmd = factory(this); - const d = vscode.commands.registerCommand(fullName, cmd); - this.pushCleanup(d); + async getClient() { + if (!this.traceOutputChannel) { + this.traceOutputChannel = vscode.window.createOutputChannel( + "Rust Analyzer Language Server Trace" + ); + this.pushExtCleanup(this.traceOutputChannel); + } + if (!this.outputChannel) { + this.outputChannel = vscode.window.createOutputChannel("Rust Analyzer Language Server"); + this.pushExtCleanup(this.outputChannel); + } + + if (!this.client) { + this._serverPath = await bootstrap(this.extCtx, this.config, this.state).catch( + (err) => { + let message = "bootstrap error. "; + + message += + 'See the logs in "OUTPUT > Rust Analyzer Client" (should open automatically). '; + message += + 'To enable verbose logs use { "rust-analyzer.trace.extension": true }'; + + log.error("Bootstrap error", err); + throw new Error(message); + } + ); + const newEnv = substituteVariablesInEnv( + Object.assign({}, process.env, this.config.serverExtraEnv) + ); + const run: lc.Executable = { + command: this._serverPath, + options: { env: newEnv }, + }; + const serverOptions = { + run, + debug: run, + }; + + let rawInitializationOptions = vscode.workspace.getConfiguration("rust-analyzer"); + + if (this.workspace.kind === "Detached Files") { + rawInitializationOptions = { + detachedFiles: this.workspace.files.map((file) => file.uri.fsPath), + ...rawInitializationOptions, + }; + } + + const initializationOptions = substituteVSCodeVariables(rawInitializationOptions); + + this.client = await createClient( + this.traceOutputChannel, + this.outputChannel, + initializationOptions, + serverOptions + ); + this.pushClientCleanup( + this.client.onNotification(ra.serverStatus, (params) => + this.setServerStatus(params) + ) + ); + } + return this.client; } - get extensionPath(): string { - return this.extCtx.extensionPath; + async activate() { + log.info("Activating language client"); + const client = await this.getClient(); + await client.start(); + this.updateCommands(); + return client; + } + + async deactivate() { + log.info("Deactivating language client"); + await this.client?.stop(); + this.updateCommands(); + } + + async stop() { + log.info("Stopping language client"); + await this.disposeClient(); + this.updateCommands(); + } + + private async disposeClient() { + this.clientSubscriptions?.forEach((disposable) => disposable.dispose()); + this.clientSubscriptions = []; + await this.client?.dispose(); + this._serverPath = undefined; + this.client = undefined; + } + + get activeRustEditor(): RustEditor | undefined { + const editor = vscode.window.activeTextEditor; + return editor && isRustEditor(editor) ? editor : undefined; } - get globalState(): vscode.Memento { - return this.extCtx.globalState; + get extensionPath(): string { + return this.extCtx.extensionPath; } get subscriptions(): Disposable[] { return this.extCtx.subscriptions; } + get serverPath(): string | undefined { + return this._serverPath; + } + + private updateCommands() { + this.commandDisposables.forEach((disposable) => disposable.dispose()); + this.commandDisposables = []; + const fetchFactory = (factory: CommandFactory, fullName: string) => { + return this.client && this.client.isRunning() + ? factory.enabled + : factory.disabled || + ((_) => () => + vscode.window.showErrorMessage( + `command ${fullName} failed: rust-analyzer server is not running` + )); + }; + for (const [name, factory] of Object.entries(this.commandFactories)) { + const fullName = `rust-analyzer.${name}`; + const callback = fetchFactory(factory, fullName)(this); + this.commandDisposables.push(vscode.commands.registerCommand(fullName, callback)); + } + } + setServerStatus(status: ServerStatusParams) { let icon = ""; const statusBar = this.statusBar; @@ -111,9 +239,13 @@ export class Ctx { statusBar.text = `${icon}rust-analyzer`; } - pushCleanup(d: Disposable) { + pushExtCleanup(d: Disposable) { this.extCtx.subscriptions.push(d); } + + private pushClientCleanup(d: Disposable) { + this.clientSubscriptions.push(d); + } } export interface Disposable { diff --git a/src/tools/rust-analyzer/editors/code/src/main.ts b/src/tools/rust-analyzer/editors/code/src/main.ts index 41bde4195e07d..8c3a676ffb056 100644 --- a/src/tools/rust-analyzer/editors/code/src/main.ts +++ b/src/tools/rust-analyzer/editors/code/src/main.ts @@ -1,53 +1,37 @@ import * as vscode from "vscode"; import * as lc from "vscode-languageclient/node"; -import * as os from "os"; import * as commands from "./commands"; -import { Ctx } from "./ctx"; -import { Config } from "./config"; -import { log, isValidExecutable, isRustDocument } from "./util"; -import { PersistentState } from "./persistent_state"; +import { CommandFactory, Ctx, Workspace } from "./ctx"; +import { isRustDocument } from "./util"; import { activateTaskProvider } from "./tasks"; import { setContextValue } from "./util"; -import { exec } from "child_process"; - -let ctx: Ctx | undefined; const RUST_PROJECT_CONTEXT_NAME = "inRustProject"; -let TRACE_OUTPUT_CHANNEL: vscode.OutputChannel | null = null; -export function traceOutputChannel() { - if (!TRACE_OUTPUT_CHANNEL) { - TRACE_OUTPUT_CHANNEL = vscode.window.createOutputChannel( - "Rust Analyzer Language Server Trace" - ); - } - return TRACE_OUTPUT_CHANNEL; -} -let OUTPUT_CHANNEL: vscode.OutputChannel | null = null; -export function outputChannel() { - if (!OUTPUT_CHANNEL) { - OUTPUT_CHANNEL = vscode.window.createOutputChannel("Rust Analyzer Language Server"); - } - return OUTPUT_CHANNEL; +export interface RustAnalyzerExtensionApi { + // FIXME: this should be non-optional + readonly client?: lc.LanguageClient; } -export interface RustAnalyzerExtensionApi { - client?: lc.LanguageClient; +export async function deactivate() { + await setContextValue(RUST_PROJECT_CONTEXT_NAME, undefined); } export async function activate( context: vscode.ExtensionContext ): Promise { - // VS Code doesn't show a notification when an extension fails to activate - // so we do it ourselves. - return await tryActivate(context).catch((err) => { - void vscode.window.showErrorMessage(`Cannot activate rust-analyzer: ${err.message}`); - throw err; - }); -} + if (vscode.extensions.getExtension("rust-lang.rust")) { + vscode.window + .showWarningMessage( + `You have both the rust-analyzer (rust-lang.rust-analyzer) and Rust (rust-lang.rust) ` + + "plugins enabled. These are known to conflict and cause various functions of " + + "both plugins to not work correctly. You should disable one of them.", + "Got it" + ) + .then(() => {}, console.error); + } -async function tryActivate(context: vscode.ExtensionContext): Promise { // We only support local folders, not eg. Live Share (`vlsl:` scheme), so don't activate if // only those are in use. // (r-a still somewhat works with Live Share, because commands are tunneled to the host) @@ -65,351 +49,118 @@ async function tryActivate(context: vscode.ExtensionContext): Promise { - let message = "bootstrap error. "; - - message += 'See the logs in "OUTPUT > Rust Analyzer Client" (should open automatically). '; - message += 'To enable verbose logs use { "rust-analyzer.trace.extension": true }'; + const workspace: Workspace = + folders.length === 0 + ? { + kind: "Detached Files", + files: rustDocuments, + } + : { kind: "Workspace Folder" }; - log.error("Bootstrap error", err); - throw new Error(message); + const ctx = new Ctx(context, workspace, createCommands()); + // VS Code doesn't show a notification when an extension fails to activate + // so we do it ourselves. + const api = await activateServer(ctx).catch((err) => { + void vscode.window.showErrorMessage( + `Cannot activate rust-analyzer extension: ${err.message}` + ); + throw err; }); + await setContextValue(RUST_PROJECT_CONTEXT_NAME, true); + return api; +} - if (folders.length === 0) { - ctx = await Ctx.create(config, context, serverPath, { - kind: "Detached Files", - files: rustDocuments, - }); - } else { - // Note: we try to start the server before we activate type hints so that it - // registers its `onDidChangeDocument` handler before us. - // - // This a horribly, horribly wrong way to deal with this problem. - ctx = await Ctx.create(config, context, serverPath, { kind: "Workspace Folder" }); - ctx.pushCleanup(activateTaskProvider(ctx.config)); - } - await initCommonContext(context, ctx); - - warnAboutExtensionConflicts(); - - if (config.typingContinueCommentsOnNewline) { - ctx.pushCleanup(configureLanguage()); +async function activateServer(ctx: Ctx): Promise { + if (ctx.workspace.kind === "Workspace Folder") { + ctx.pushExtCleanup(activateTaskProvider(ctx.config)); } vscode.workspace.onDidChangeConfiguration( - (_) => - ctx?.client - ?.sendNotification("workspace/didChangeConfiguration", { settings: "" }) - .catch(log.error), + async (_) => { + await ctx + .clientFetcher() + .client?.sendNotification("workspace/didChangeConfiguration", { settings: "" }); + }, null, ctx.subscriptions ); - return { - client: ctx.client, - }; -} - -async function initCommonContext(context: vscode.ExtensionContext, ctx: Ctx) { - // Register a "dumb" onEnter command for the case where server fails to - // start. - // - // FIXME: refactor command registration code such that commands are - // **always** registered, even if the server does not start. Use API like - // this perhaps? - // - // ```TypeScript - // registerCommand( - // factory: (Ctx) => ((Ctx) => any), - // fallback: () => any = () => vscode.window.showErrorMessage( - // "rust-analyzer is not available" - // ), - // ) - const defaultOnEnter = vscode.commands.registerCommand("rust-analyzer.onEnter", () => - vscode.commands.executeCommand("default:type", { text: "\n" }) - ); - context.subscriptions.push(defaultOnEnter); - - await setContextValue(RUST_PROJECT_CONTEXT_NAME, true); - - // Commands which invokes manually via command palette, shortcut, etc. - - // Reloading is inspired by @DanTup maneuver: https://github.com/microsoft/vscode/issues/45774#issuecomment-373423895 - ctx.registerCommand("reload", (_) => async () => { - void vscode.window.showInformationMessage("Reloading rust-analyzer..."); - await doDeactivate(); - while (context.subscriptions.length > 0) { - try { - context.subscriptions.pop()!.dispose(); - } catch (err) { - log.error("Dispose error:", err); - } - } - await activate(context).catch(log.error); - }); - - ctx.registerCommand("analyzerStatus", commands.analyzerStatus); - ctx.registerCommand("memoryUsage", commands.memoryUsage); - ctx.registerCommand("shuffleCrateGraph", commands.shuffleCrateGraph); - ctx.registerCommand("reloadWorkspace", commands.reloadWorkspace); - ctx.registerCommand("matchingBrace", commands.matchingBrace); - ctx.registerCommand("joinLines", commands.joinLines); - ctx.registerCommand("parentModule", commands.parentModule); - ctx.registerCommand("syntaxTree", commands.syntaxTree); - ctx.registerCommand("viewHir", commands.viewHir); - ctx.registerCommand("viewFileText", commands.viewFileText); - ctx.registerCommand("viewItemTree", commands.viewItemTree); - ctx.registerCommand("viewCrateGraph", commands.viewCrateGraph); - ctx.registerCommand("viewFullCrateGraph", commands.viewFullCrateGraph); - ctx.registerCommand("expandMacro", commands.expandMacro); - ctx.registerCommand("run", commands.run); - ctx.registerCommand("copyRunCommandLine", commands.copyRunCommandLine); - ctx.registerCommand("debug", commands.debug); - ctx.registerCommand("newDebugConfig", commands.newDebugConfig); - ctx.registerCommand("openDocs", commands.openDocs); - ctx.registerCommand("openCargoToml", commands.openCargoToml); - ctx.registerCommand("peekTests", commands.peekTests); - ctx.registerCommand("moveItemUp", commands.moveItemUp); - ctx.registerCommand("moveItemDown", commands.moveItemDown); - ctx.registerCommand("cancelFlycheck", commands.cancelFlycheck); - - defaultOnEnter.dispose(); - ctx.registerCommand("onEnter", commands.onEnter); - - ctx.registerCommand("ssr", commands.ssr); - ctx.registerCommand("serverVersion", commands.serverVersion); - - // Internal commands which are invoked by the server. - ctx.registerCommand("runSingle", commands.runSingle); - ctx.registerCommand("debugSingle", commands.debugSingle); - ctx.registerCommand("showReferences", commands.showReferences); - ctx.registerCommand("applySnippetWorkspaceEdit", commands.applySnippetWorkspaceEditCommand); - ctx.registerCommand("resolveCodeAction", commands.resolveCodeAction); - ctx.registerCommand("applyActionGroup", commands.applyActionGroup); - ctx.registerCommand("gotoLocation", commands.gotoLocation); - - ctx.registerCommand("linkToCommand", commands.linkToCommand); -} - -export async function deactivate() { - TRACE_OUTPUT_CHANNEL?.dispose(); - TRACE_OUTPUT_CHANNEL = null; - OUTPUT_CHANNEL?.dispose(); - OUTPUT_CHANNEL = null; - await doDeactivate(); -} - -async function doDeactivate() { - await setContextValue(RUST_PROJECT_CONTEXT_NAME, undefined); - await ctx?.client.stop(); - ctx = undefined; -} - -async function bootstrap( - context: vscode.ExtensionContext, - config: Config, - state: PersistentState -): Promise { - const path = await getServer(context, config, state); - if (!path) { - throw new Error( - "Rust Analyzer Language Server is not available. " + - "Please, ensure its [proper installation](https://rust-analyzer.github.io/manual.html#installation)." - ); - } - - log.info("Using server binary at", path); - - if (!isValidExecutable(path)) { - if (config.serverPath) { - throw new Error(`Failed to execute ${path} --version. \`config.server.path\` or \`config.serverPath\` has been set explicitly.\ - Consider removing this config or making a valid server binary available at that path.`); - } else { - throw new Error(`Failed to execute ${path} --version`); - } - } - - return path; + await ctx.activate(); + return ctx.clientFetcher(); } -async function patchelf(dest: vscode.Uri): Promise { - await vscode.window.withProgress( - { - location: vscode.ProgressLocation.Notification, - title: "Patching rust-analyzer for NixOS", +function createCommands(): Record { + return { + onEnter: { + enabled: commands.onEnter, + disabled: (_) => () => vscode.commands.executeCommand("default:type", { text: "\n" }), }, - async (progress, _) => { - const expression = ` - {srcStr, pkgs ? import {}}: - pkgs.stdenv.mkDerivation { - name = "rust-analyzer"; - src = /. + srcStr; - phases = [ "installPhase" "fixupPhase" ]; - installPhase = "cp $src $out"; - fixupPhase = '' - chmod 755 $out - patchelf --set-interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" $out - ''; - } - `; - const origFile = vscode.Uri.file(dest.fsPath + "-orig"); - await vscode.workspace.fs.rename(dest, origFile, { overwrite: true }); - try { - progress.report({ message: "Patching executable", increment: 20 }); - await new Promise((resolve, reject) => { - const handle = exec( - `nix-build -E - --argstr srcStr '${origFile.fsPath}' -o '${dest.fsPath}'`, - (err, stdout, stderr) => { - if (err != null) { - reject(Error(stderr)); - } else { - resolve(stdout); - } - } - ); - handle.stdin?.write(expression); - handle.stdin?.end(); - }); - } finally { - await vscode.workspace.fs.delete(origFile); - } - } - ); -} - -async function getServer( - context: vscode.ExtensionContext, - config: Config, - state: PersistentState -): Promise { - const explicitPath = serverPath(config); - if (explicitPath) { - if (explicitPath.startsWith("~/")) { - return os.homedir() + explicitPath.slice("~".length); - } - return explicitPath; - } - if (config.package.releaseTag === null) return "rust-analyzer"; - - const ext = process.platform === "win32" ? ".exe" : ""; - const bundled = vscode.Uri.joinPath(context.extensionUri, "server", `rust-analyzer${ext}`); - const bundledExists = await vscode.workspace.fs.stat(bundled).then( - () => true, - () => false - ); - if (bundledExists) { - let server = bundled; - if (await isNixOs()) { - await vscode.workspace.fs.createDirectory(config.globalStorageUri).then(); - const dest = vscode.Uri.joinPath(config.globalStorageUri, `rust-analyzer${ext}`); - let exists = await vscode.workspace.fs.stat(dest).then( - () => true, - () => false - ); - if (exists && config.package.version !== state.serverVersion) { - await vscode.workspace.fs.delete(dest); - exists = false; - } - if (!exists) { - await vscode.workspace.fs.copy(bundled, dest); - await patchelf(dest); - } - server = dest; - } - await state.updateServerVersion(config.package.version); - return server.fsPath; - } - - await state.updateServerVersion(undefined); - await vscode.window.showErrorMessage( - "Unfortunately we don't ship binaries for your platform yet. " + - "You need to manually clone the rust-analyzer repository and " + - "run `cargo xtask install --server` to build the language server from sources. " + - "If you feel that your platform should be supported, please create an issue " + - "about that [here](https://github.com/rust-lang/rust-analyzer/issues) and we " + - "will consider it." - ); - return undefined; -} - -function serverPath(config: Config): string | null { - return process.env.__RA_LSP_SERVER_DEBUG ?? config.serverPath; -} - -async function isNixOs(): Promise { - try { - const contents = ( - await vscode.workspace.fs.readFile(vscode.Uri.file("/etc/os-release")) - ).toString(); - const idString = contents.split("\n").find((a) => a.startsWith("ID=")) || "ID=linux"; - return idString.indexOf("nixos") !== -1; - } catch { - return false; - } -} - -function warnAboutExtensionConflicts() { - if (vscode.extensions.getExtension("rust-lang.rust")) { - vscode.window - .showWarningMessage( - `You have both the rust-analyzer (rust-lang.rust-analyzer) and Rust (rust-lang.rust) ` + - "plugins enabled. These are known to conflict and cause various functions of " + - "both plugins to not work correctly. You should disable one of them.", - "Got it" - ) - .then(() => {}, console.error); - } -} - -/** - * Sets up additional language configuration that's impossible to do via a - * separate language-configuration.json file. See [1] for more information. - * - * [1]: https://github.com/Microsoft/vscode/issues/11514#issuecomment-244707076 - */ -function configureLanguage(): vscode.Disposable { - const indentAction = vscode.IndentAction.None; - return vscode.languages.setLanguageConfiguration("rust", { - onEnterRules: [ - { - // Doc single-line comment - // e.g. ///| - beforeText: /^\s*\/{3}.*$/, - action: { indentAction, appendText: "/// " }, - }, - { - // Parent doc single-line comment - // e.g. //!| - beforeText: /^\s*\/{2}\!.*$/, - action: { indentAction, appendText: "//! " }, + reload: { + enabled: (ctx) => async () => { + void vscode.window.showInformationMessage("Reloading rust-analyzer..."); + // FIXME: We should re-use the client, that is ctx.deactivate() if none of the configs have changed + await ctx.stop(); + await ctx.activate(); }, - { - // Begins an auto-closed multi-line comment (standard or parent doc) - // e.g. /** | */ or /*! | */ - beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/, - afterText: /^\s*\*\/$/, - action: { indentAction: vscode.IndentAction.IndentOutdent, appendText: " * " }, + disabled: (ctx) => async () => { + void vscode.window.showInformationMessage("Reloading rust-analyzer..."); + await ctx.activate(); }, - { - // Begins a multi-line comment (standard or parent doc) - // e.g. /** ...| or /*! ...| - beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/, - action: { indentAction, appendText: " * " }, + }, + startServer: { + enabled: (ctx) => async () => { + await ctx.activate(); }, - { - // Continues a multi-line comment - // e.g. * ...| - beforeText: /^(\ \ )*\ \*(\ ([^\*]|\*(?!\/))*)?$/, - action: { indentAction, appendText: "* " }, + disabled: (ctx) => async () => { + await ctx.activate(); }, - { - // Dedents after closing a multi-line comment - // e.g. */| - beforeText: /^(\ \ )*\ \*\/\s*$/, - action: { indentAction, removeText: 1 }, + }, + stopServer: { + enabled: (ctx) => async () => { + // FIXME: We should re-use the client, that is ctx.deactivate() if none of the configs have changed + await ctx.stop(); + ctx.setServerStatus({ + health: "ok", + quiescent: true, + message: "server is not running", + }); }, - ], - }); + }, + + analyzerStatus: { enabled: commands.analyzerStatus }, + memoryUsage: { enabled: commands.memoryUsage }, + shuffleCrateGraph: { enabled: commands.shuffleCrateGraph }, + reloadWorkspace: { enabled: commands.reloadWorkspace }, + matchingBrace: { enabled: commands.matchingBrace }, + joinLines: { enabled: commands.joinLines }, + parentModule: { enabled: commands.parentModule }, + syntaxTree: { enabled: commands.syntaxTree }, + viewHir: { enabled: commands.viewHir }, + viewFileText: { enabled: commands.viewFileText }, + viewItemTree: { enabled: commands.viewItemTree }, + viewCrateGraph: { enabled: commands.viewCrateGraph }, + viewFullCrateGraph: { enabled: commands.viewFullCrateGraph }, + expandMacro: { enabled: commands.expandMacro }, + run: { enabled: commands.run }, + copyRunCommandLine: { enabled: commands.copyRunCommandLine }, + debug: { enabled: commands.debug }, + newDebugConfig: { enabled: commands.newDebugConfig }, + openDocs: { enabled: commands.openDocs }, + openCargoToml: { enabled: commands.openCargoToml }, + peekTests: { enabled: commands.peekTests }, + moveItemUp: { enabled: commands.moveItemUp }, + moveItemDown: { enabled: commands.moveItemDown }, + cancelFlycheck: { enabled: commands.cancelFlycheck }, + ssr: { enabled: commands.ssr }, + serverVersion: { enabled: commands.serverVersion }, + // Internal commands which are invoked by the server. + applyActionGroup: { enabled: commands.applyActionGroup }, + applySnippetWorkspaceEdit: { enabled: commands.applySnippetWorkspaceEditCommand }, + debugSingle: { enabled: commands.debugSingle }, + gotoLocation: { enabled: commands.gotoLocation }, + linkToCommand: { enabled: commands.linkToCommand }, + resolveCodeAction: { enabled: commands.resolveCodeAction }, + runSingle: { enabled: commands.runSingle }, + showReferences: { enabled: commands.showReferences }, + }; } diff --git a/src/tools/rust-analyzer/editors/code/src/run.ts b/src/tools/rust-analyzer/editors/code/src/run.ts index 22e5eda6827d1..dadaa41b1d162 100644 --- a/src/tools/rust-analyzer/editors/code/src/run.ts +++ b/src/tools/rust-analyzer/editors/code/src/run.ts @@ -18,9 +18,9 @@ export async function selectRunnable( showButtons: boolean = true ): Promise { const editor = ctx.activeRustEditor; - const client = ctx.client; - if (!editor || !client) return; + if (!editor) return; + const client = await ctx.getClient(); const textDocument: lc.TextDocumentIdentifier = { uri: editor.document.uri.toString(), };