diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 4a1c70ebd39f..4ee7add1de3b 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -30,10 +30,10 @@ jobs: - os: windows-latest target: aarch64-pc-windows-msvc code-target: win32-arm64 - - os: ubuntu-20.04 + - os: ubuntu-18.04 target: x86_64-unknown-linux-gnu code-target: linux-x64 - - os: ubuntu-20.04 + - os: ubuntu-18.04 target: aarch64-unknown-linux-gnu code-target: linux-arm64 - os: macos-11 diff --git a/Cargo.lock b/Cargo.lock index ef0bd5e3bce2..64654c9961d2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1832,9 +1832,9 @@ checksum = "0685c84d5d54d1c26f7d3eb96cd41550adb97baed141a761cf335d3d33bcd0ae" [[package]] name = "ungrammar" -version = "1.14.9" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66be59c2fd880e3d76d1a6cf6d34114008f1d8af2748d4ad9d39ea712f14fda9" +checksum = "ed01567101450f7d600508e7680df6005ae4fe97119d79b0364cc5910ff39732" [[package]] name = "unicase" diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 032da5f50af6..e02ba2e7c864 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -74,7 +74,7 @@ use rustc_hash::FxHashSet; use stdx::{format_to, impl_from}; use syntax::{ ast::{self, HasAttrs as _, HasDocComments, HasName}, - AstNode, AstPtr, SmolStr, SyntaxKind, SyntaxNodePtr, + AstNode, AstPtr, SmolStr, SyntaxNodePtr, T, }; use tt::{Ident, Leaf, Literal, TokenTree}; @@ -628,43 +628,38 @@ fn emit_def_diagnostic(db: &dyn HirDatabase, acc: &mut Vec, diag: DefDiagnosticKind::UnresolvedProcMacro { ast } => { let mut precise_location = None; - let (node, name) = match ast { + let (node, macro_name) = match ast { MacroCallKind::FnLike { ast_id, .. } => { let node = ast_id.to_node(db.upcast()); (ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))), None) } - MacroCallKind::Derive { ast_id, derive_name, .. } => { + MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => { let node = ast_id.to_node(db.upcast()); // Compute the precise location of the macro name's token in the derive // list. - // FIXME: This does not handle paths to the macro, but neither does the - // rest of r-a. - let derive_attrs = - node.attrs().filter_map(|attr| match attr.as_simple_call() { - Some((name, args)) if name == "derive" => Some(args), - _ => None, - }); - 'outer: for attr in derive_attrs { - let tokens = - attr.syntax().children_with_tokens().filter_map(|elem| match elem { - syntax::NodeOrToken::Node(_) => None, + let token = (|| { + let derive_attr = node.attrs().nth(*derive_attr_index as usize)?; + derive_attr + .syntax() + .children_with_tokens() + .filter_map(|elem| match elem { syntax::NodeOrToken::Token(tok) => Some(tok), - }); - for token in tokens { - if token.kind() == SyntaxKind::IDENT && token.text() == &**derive_name { - precise_location = Some(token.text_range()); - break 'outer; - } - } - } - + _ => None, + }) + .group_by(|t| t.kind() == T![,]) + .into_iter() + .filter(|&(comma, _)| !comma) + .nth(*derive_index as usize) + .and_then(|(_, mut g)| g.find(|t| t.kind() == T![ident])) + })(); + precise_location = token.as_ref().map(|tok| tok.text_range()); ( ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))), - Some(derive_name.clone()), + token.as_ref().map(ToString::to_string), ) } - MacroCallKind::Attr { ast_id, invoc_attr_index, attr_name, .. } => { + MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { let node = ast_id.to_node(db.upcast()); let attr = node .doc_comments_and_attrs() @@ -673,14 +668,15 @@ fn emit_def_diagnostic(db: &dyn HirDatabase, acc: &mut Vec, diag: .unwrap_or_else(|| panic!("cannot find attribute #{}", invoc_attr_index)); ( ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))), - Some(attr_name.clone()), + attr.path() + .and_then(|path| path.segment()) + .and_then(|seg| seg.name_ref()) + .as_ref() + .map(ToString::to_string), ) } }; - acc.push( - UnresolvedProcMacro { node, precise_location, macro_name: name.map(Into::into) } - .into(), - ); + acc.push(UnresolvedProcMacro { node, precise_location, macro_name }.into()); } DefDiagnosticKind::UnresolvedMacroCall { ast, path } => { @@ -1380,6 +1376,27 @@ impl Function { db.function_data(self.id).has_body() } + pub fn as_proc_macro(self, db: &dyn HirDatabase) -> Option { + let function_data = db.function_data(self.id); + let attrs = &function_data.attrs; + if !(attrs.is_proc_macro() + || attrs.is_proc_macro_attribute() + || attrs.is_proc_macro_derive()) + { + return None; + } + let loc = self.id.lookup(db.upcast()); + let krate = loc.krate(db); + let def_map = db.crate_def_map(krate.into()); + let ast_id = + InFile::new(loc.id.file_id(), loc.id.item_tree(db.upcast())[loc.id.value].ast_id); + + let mut exported_proc_macros = def_map.exported_proc_macros(); + exported_proc_macros + .find(|&(id, _)| matches!(id.kind, MacroDefKind::ProcMacro(_, _, id) if id == ast_id)) + .map(|(id, _)| MacroDef { id }) + } + /// A textual representation of the HIR of this function for debugging purposes. pub fn debug_hir(self, db: &dyn HirDatabase) -> String { let body = db.body(self.id.into()); @@ -1779,6 +1796,10 @@ impl MacroDef { } } + pub fn is_builtin_derive(&self) -> bool { + matches!(self.id.kind, MacroDefKind::BuiltInAttr(exp, _) if exp.is_derive()) + } + pub fn is_attr(&self) -> bool { matches!(self.kind(), MacroKind::Attr) } @@ -2026,10 +2047,13 @@ impl Local { pub fn is_param(self, db: &dyn HirDatabase) -> bool { let src = self.source(db); match src.value { - Either::Left(bind_pat) => { - bind_pat.syntax().ancestors().any(|it| ast::Param::can_cast(it.kind())) - } - Either::Right(_self_param) => true, + Either::Left(pat) => pat + .syntax() + .ancestors() + .map(|it| it.kind()) + .take_while(|&kind| ast::Pat::can_cast(kind) || ast::Param::can_cast(kind)) + .any(ast::Param::can_cast), + Either::Right(_) => true, } } @@ -2040,17 +2064,19 @@ impl Local { } } - // FIXME: why is this an option? It shouldn't be? - pub fn name(self, db: &dyn HirDatabase) -> Option { + pub fn name(self, db: &dyn HirDatabase) -> Name { let body = db.body(self.parent); match &body[self.pat_id] { - Pat::Bind { name, .. } => Some(name.clone()), - _ => None, + Pat::Bind { name, .. } => name.clone(), + _ => { + stdx::never!("hir::Local is missing a name!"); + Name::missing() + } } } pub fn is_self(self, db: &dyn HirDatabase) -> bool { - self.name(db) == Some(name![self]) + self.name(db) == name![self] } pub fn is_mut(self, db: &dyn HirDatabase) -> bool { @@ -2413,30 +2439,13 @@ impl Impl { pub fn is_builtin_derive(self, db: &dyn HirDatabase) -> Option> { let src = self.source(db)?; - let item = src.file_id.is_builtin_derive(db.upcast())?; - let hygenic = hir_expand::hygiene::Hygiene::new(db.upcast(), item.file_id); - - // FIXME: handle `cfg_attr` - let attr = item - .value - .attrs() - .filter_map(|it| { - let path = ModPath::from_src(db.upcast(), it.path()?, &hygenic)?; - if path.as_ident()?.to_smol_str() == "derive" { - Some(it) - } else { - None - } - }) - .last()?; - - Some(item.with_value(attr)) + src.file_id.is_builtin_derive(db.upcast()) } } #[derive(Clone, PartialEq, Eq, Debug)] pub struct Type { - krate: CrateId, + krate: CrateId, // FIXME this is probably redundant with the TraitEnvironment env: Arc, ty: Ty, } @@ -2503,6 +2512,10 @@ impl Type { matches!(self.ty.kind(Interner), TyKind::Ref(..)) } + pub fn is_slice(&self) -> bool { + matches!(self.ty.kind(Interner), TyKind::Slice(..)) + } + pub fn is_usize(&self) -> bool { matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Uint(UintTy::Usize))) } @@ -2525,12 +2538,9 @@ impl Type { /// Checks that particular type `ty` implements `std::future::Future`. /// This function is used in `.await` syntax completion. pub fn impls_future(&self, db: &dyn HirDatabase) -> bool { - // No special case for the type of async block, since Chalk can figure it out. - - let krate = self.krate; - - let std_future_trait = - db.lang_item(krate, SmolStr::new_inline("future_trait")).and_then(|it| it.as_trait()); + let std_future_trait = db + .lang_item(self.krate, SmolStr::new_inline("future_trait")) + .and_then(|it| it.as_trait()); let std_future_trait = match std_future_trait { Some(it) => it, None => return false, @@ -2538,13 +2548,7 @@ impl Type { let canonical_ty = Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) }; - method_resolution::implements_trait( - &canonical_ty, - db, - self.env.clone(), - krate, - std_future_trait, - ) + method_resolution::implements_trait(&canonical_ty, db, self.env.clone(), std_future_trait) } /// Checks that particular type `ty` implements `std::ops::FnOnce`. @@ -2552,9 +2556,7 @@ impl Type { /// This function can be used to check if a particular type is callable, since FnOnce is a /// supertrait of Fn and FnMut, so all callable types implements at least FnOnce. pub fn impls_fnonce(&self, db: &dyn HirDatabase) -> bool { - let krate = self.krate; - - let fnonce_trait = match FnTrait::FnOnce.get_id(db, krate) { + let fnonce_trait = match FnTrait::FnOnce.get_id(db, self.krate) { Some(it) => it, None => return false, }; @@ -2565,7 +2567,6 @@ impl Type { &canonical_ty, db, self.env.clone(), - krate, fnonce_trait, ) } @@ -2736,9 +2737,8 @@ impl Type { pub fn autoderef_<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator + 'a { // There should be no inference vars in types passed here let canonical = hir_ty::replace_errors_with_variables(&self.ty); - let environment = self.env.env.clone(); - let ty = InEnvironment { goal: canonical, environment }; - autoderef(db, Some(self.krate), ty).map(|canonical| canonical.value) + let environment = self.env.clone(); + autoderef(db, environment, canonical).map(|canonical| canonical.value) } // This would be nicer if it just returned an iterator, but that runs into @@ -2793,24 +2793,26 @@ impl Type { pub fn iterate_method_candidates( &self, db: &dyn HirDatabase, - krate: Crate, + scope: &SemanticsScope, + // FIXME this can be retrieved from `scope`, except autoimport uses this + // to specify a different set, so the method needs to be split traits_in_scope: &FxHashSet, with_local_impls: Option, name: Option<&Name>, - mut callback: impl FnMut(Type, Function) -> Option, + mut callback: impl FnMut(Function) -> Option, ) -> Option { let _p = profile::span("iterate_method_candidates"); let mut slot = None; self.iterate_method_candidates_dyn( db, - krate, + scope, traits_in_scope, with_local_impls, name, - &mut |ty, assoc_item_id| { + &mut |assoc_item_id| { if let AssocItemId::FunctionId(func) = assoc_item_id { - if let Some(res) = callback(self.derived(ty.clone()), func.into()) { + if let Some(res) = callback(func.into()) { slot = Some(res); return ControlFlow::Break(()); } @@ -2824,50 +2826,55 @@ impl Type { fn iterate_method_candidates_dyn( &self, db: &dyn HirDatabase, - krate: Crate, + scope: &SemanticsScope, traits_in_scope: &FxHashSet, with_local_impls: Option, name: Option<&Name>, - callback: &mut dyn FnMut(&Ty, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>, ) { // There should be no inference vars in types passed here let canonical = hir_ty::replace_errors_with_variables(&self.ty); - let env = self.env.clone(); - let krate = krate.id; + let krate = match scope.krate() { + Some(k) => k, + None => return, + }; + let environment = scope.resolver().generic_def().map_or_else( + || Arc::new(TraitEnvironment::empty(krate.id)), + |d| db.trait_environment(d), + ); method_resolution::iterate_method_candidates_dyn( &canonical, db, - env, - krate, + environment, traits_in_scope, with_local_impls.and_then(|b| b.id.containing_block()).into(), name, method_resolution::LookupMode::MethodCall, - &mut |ty, id| callback(&ty.value, id), + &mut |_adj, id| callback(id), ); } pub fn iterate_path_candidates( &self, db: &dyn HirDatabase, - krate: Crate, + scope: &SemanticsScope, traits_in_scope: &FxHashSet, with_local_impls: Option, name: Option<&Name>, - mut callback: impl FnMut(Type, AssocItem) -> Option, + mut callback: impl FnMut(AssocItem) -> Option, ) -> Option { let _p = profile::span("iterate_path_candidates"); let mut slot = None; self.iterate_path_candidates_dyn( db, - krate, + scope, traits_in_scope, with_local_impls, name, - &mut |ty, assoc_item_id| { - if let Some(res) = callback(self.derived(ty.clone()), assoc_item_id.into()) { + &mut |assoc_item_id| { + if let Some(res) = callback(assoc_item_id.into()) { slot = Some(res); return ControlFlow::Break(()); } @@ -2880,27 +2887,31 @@ impl Type { fn iterate_path_candidates_dyn( &self, db: &dyn HirDatabase, - krate: Crate, + scope: &SemanticsScope, traits_in_scope: &FxHashSet, with_local_impls: Option, name: Option<&Name>, - callback: &mut dyn FnMut(&Ty, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>, ) { let canonical = hir_ty::replace_errors_with_variables(&self.ty); - let env = self.env.clone(); - let krate = krate.id; + let krate = match scope.krate() { + Some(k) => k, + None => return, + }; + let environment = scope.resolver().generic_def().map_or_else( + || Arc::new(TraitEnvironment::empty(krate.id)), + |d| db.trait_environment(d), + ); - method_resolution::iterate_method_candidates_dyn( + method_resolution::iterate_path_candidates( &canonical, db, - env, - krate, + environment, traits_in_scope, with_local_impls.and_then(|b| b.id.containing_block()).into(), name, - method_resolution::LookupMode::Path, - &mut |ty, id| callback(&ty.value, id), + &mut |id| callback(id), ); } diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 243ba63b8a0a..2e0dbf82b777 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -5,7 +5,6 @@ mod source_to_def; use std::{cell::RefCell, fmt, iter}; use base_db::{FileId, FileRange}; -use either::Either; use hir_def::{ body, resolver::{self, HasResolver, Resolver, TypeNs}, @@ -19,17 +18,16 @@ use smallvec::{smallvec, SmallVec}; use syntax::{ algo::skip_trivia_token, ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody}, - match_ast, AstNode, AstToken, Direction, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken, - TextSize, T, + match_ast, AstNode, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize, }; use crate::{ db::HirDatabase, semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, source_analyzer::{resolve_hir_path, SourceAnalyzer}, - Access, AssocItem, BuiltinAttr, Callable, ConstParam, Crate, Field, Function, HasAttrs as _, - HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local, MacroDef, Module, ModuleDef, - Name, Path, ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef, + Access, AssocItem, BuiltinAttr, Callable, ConstParam, Crate, Field, Function, HasSource, + HirFileId, Impl, InFile, Label, LifetimeParam, Local, MacroDef, Module, ModuleDef, Name, Path, + ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef, }; #[derive(Debug, Clone, PartialEq, Eq)] @@ -162,6 +160,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { self.imp.is_attr_macro_call(item) } + pub fn is_derive_annotated(&self, item: &ast::Adt) -> bool { + self.imp.is_derive_annotated(item) + } + pub fn speculative_expand( &self, actual_macro_call: &ast::MacroCall, @@ -350,14 +352,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { self.imp.resolve_bind_pat_to_const(pat) } - pub fn resolve_derive_ident( - &self, - derive: &ast::Attr, - ident: &ast::Ident, - ) -> Option { - self.imp.resolve_derive_ident(derive, ident) - } - pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> { self.imp.record_literal_missing_fields(literal) } @@ -475,11 +469,17 @@ impl<'db> SemanticsImpl<'db> { let adt = InFile::new(file_id, &adt); let src = InFile::new(file_id, attr.clone()); self.with_ctx(|ctx| { - let (_, res) = ctx.attr_to_derive_macro_call(adt, src)?; + let (.., res) = ctx.attr_to_derive_macro_call(adt, src)?; Some(res.to_vec()) }) } + fn is_derive_annotated(&self, adt: &ast::Adt) -> bool { + let file_id = self.find_file(adt.syntax()).file_id; + let adt = InFile::new(file_id, adt); + self.with_ctx(|ctx| ctx.has_derives(adt)) + } + fn is_attr_macro_call(&self, item: &ast::Item) -> bool { let file_id = self.find_file(item.syntax()).file_id; let src = InFile::new(file_id, item.clone()); @@ -668,7 +668,36 @@ impl<'db> SemanticsImpl<'db> { // FIXME replace map.while_some with take_while once stable token.value.ancestors().map(ast::TokenTree::cast).while_some().last() { - let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?; + let parent = tt.syntax().parent()?; + // check for derive attribute here + let macro_call = match_ast! { + match parent { + ast::MacroCall(mcall) => mcall, + // attribute we failed expansion for earlier, this might be a derive invocation + // so try downmapping the token into the pseudo derive expansion + // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works + ast::Meta(meta) => { + let attr = meta.parent_attr()?; + let adt = attr.syntax().parent().and_then(ast::Adt::cast)?; + let call_id = self.with_ctx(|ctx| { + let (_, call_id, _) = ctx.attr_to_derive_macro_call( + token.with_value(&adt), + token.with_value(attr), + )?; + Some(call_id) + })?; + let file_id = call_id.as_file(); + return process_expansion_for_token( + &mut stack, + file_id, + Some(adt.into()), + token.as_ref(), + ); + }, + _ => return None, + } + }; + if tt.left_delimiter_token().map_or(false, |it| it == token.value) { return None; } @@ -898,72 +927,6 @@ impl<'db> SemanticsImpl<'db> { self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) } - fn resolve_derive_ident( - &self, - derive: &ast::Attr, - ident: &ast::Ident, - ) -> Option { - debug_assert!(ident.syntax().parent().and_then(ast::TokenTree::cast).is_some()); - debug_assert!(ident.syntax().ancestors().any(|anc| anc == *derive.syntax())); - // derive macros are always at depth 2, tokentree -> meta -> attribute - let syntax = ident.syntax(); - - let tt = derive.token_tree()?; - let file = self.find_file(derive.syntax()); - let adt = derive.syntax().parent().and_then(ast::Adt::cast)?; - let adt_def = ToDef::to_def(self, file.with_value(adt.clone()))?; - let res = self.with_ctx(|ctx| { - let (attr_id, derives) = ctx.attr_to_derive_macro_call( - file.with_value(&adt), - file.with_value(derive.clone()), - )?; - let attrs = adt_def.attrs(self.db); - let mut derive_paths = attrs.get(attr_id)?.parse_path_comma_token_tree()?; - - let derive_idx = tt - .syntax() - .children_with_tokens() - .filter_map(SyntaxElement::into_token) - .take_while(|tok| tok != syntax) - .filter(|t| t.kind() == T![,]) - .count(); - let path_segment_idx = syntax - .siblings_with_tokens(Direction::Prev) - .filter_map(SyntaxElement::into_token) - .take_while(|tok| matches!(tok.kind(), T![:] | T![ident])) - .filter(|tok| tok.kind() == T![ident]) - .count(); - - let mut mod_path = derive_paths.nth(derive_idx)?; - - if path_segment_idx < mod_path.len() { - // the path for the given ident is a qualifier, resolve to module if possible - while path_segment_idx < mod_path.len() { - mod_path.pop_segment(); - } - Some(Either::Left(mod_path)) - } else { - // otherwise fetch the derive - Some(Either::Right(derives[derive_idx])) - } - })?; - - match res { - Either::Left(path) => { - let len = path.len(); - resolve_hir_path( - self.db, - &self.scope(derive.syntax()).resolver, - &Path::from_known_path(path, vec![None; len]), - ) - .filter(|res| matches!(res, PathResolution::Def(ModuleDef::Module(_)))) - } - Either::Right(derive) => derive - .map(|call| MacroDef { id: self.db.lookup_intern_macro_call(call).def }) - .map(PathResolution::Macro), - } - } - fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> { self.analyze(literal.syntax()) .record_literal_missing_fields(self.db, literal) @@ -1267,6 +1230,10 @@ impl<'a> SemanticsScope<'a> { Some(Crate { id: self.resolver.krate()? }) } + pub(crate) fn resolver(&self) -> &Resolver { + &self.resolver + } + /// Note: `FxHashSet` should be treated as an opaque type, passed into `Type pub fn visible_traits(&self) -> FxHashSet { let resolver = &self.resolver; diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs index 506186123924..dddb8e33dcc0 100644 --- a/crates/hir/src/semantics/source_to_def.rs +++ b/crates/hir/src/semantics/source_to_def.rs @@ -249,9 +249,14 @@ impl SourceToDefCtx<'_, '_> { &mut self, item: InFile<&ast::Adt>, src: InFile, - ) -> Option<(AttrId, &[Option])> { + ) -> Option<(AttrId, MacroCallId, &[Option])> { let map = self.dyn_map(item)?; - map[keys::DERIVE_MACRO_CALL].get(&src.value).map(|(id, ids)| (*id, &**ids)) + map[keys::DERIVE_MACRO_CALL] + .get(&src.value) + .map(|&(attr_id, call_id, ref ids)| (attr_id, call_id, &**ids)) + } + pub(super) fn has_derives(&mut self, adt: InFile<&ast::Adt>) -> bool { + self.dyn_map(adt).as_ref().map_or(false, |map| !map[keys::DERIVE_MACRO_CALL].is_empty()) } fn to_def( diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 64997720393f..537a503f5cff 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -371,10 +371,10 @@ impl SourceAnalyzer { return builtin.map(PathResolution::BuiltinAttr); } return match resolve_hir_path_as_macro(db, &self.resolver, &hir_path) { - res @ Some(m) if m.is_attr() => res.map(PathResolution::Macro), + Some(m) => Some(PathResolution::Macro(m)), // this labels any path that starts with a tool module as the tool itself, this is technically wrong // but there is no benefit in differentiating these two cases for the time being - _ => path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| { + None => path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| { match self.resolver.krate() { Some(krate) => ToolModule::by_name(db, krate.into(), &name_ref.text()), None => ToolModule::builtin(&name_ref.text()), diff --git a/crates/hir_def/src/attr.rs b/crates/hir_def/src/attr.rs index 0a8fb2e47a8f..3ff2d7b915ed 100644 --- a/crates/hir_def/src/attr.rs +++ b/crates/hir_def/src/attr.rs @@ -236,7 +236,9 @@ impl Attrs { pub fn by_key(&self, key: &'static str) -> AttrQuery<'_> { AttrQuery { attrs: self, key } } +} +impl Attrs { pub fn cfg(&self) -> Option { let mut cfgs = self.by_key("cfg").tt_values().map(CfgExpr::parse).collect::>(); match cfgs.len() { @@ -298,6 +300,18 @@ impl Attrs { matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "hidden") }) } + + pub fn is_proc_macro(&self) -> bool { + self.by_key("proc_macro").exists() + } + + pub fn is_proc_macro_attribute(&self) -> bool { + self.by_key("proc_macro_attribute").exists() + } + + pub fn is_proc_macro_derive(&self) -> bool { + self.by_key("proc_macro_derive").exists() + } } impl AttrsWithOwner { diff --git a/crates/hir_def/src/body.rs b/crates/hir_def/src/body.rs index 8488b4f0d03f..a2f64eda062f 100644 --- a/crates/hir_def/src/body.rs +++ b/crates/hir_def/src/body.rs @@ -97,9 +97,9 @@ impl Expander { ) -> Result>, UnresolvedMacro> { if self.recursion_limit(db).check(self.recursion_limit + 1).is_err() { cov_mark::hit!(your_stack_belongs_to_me); - return Ok(ExpandResult::str_err( + return Ok(ExpandResult::only_err(ExpandError::Other( "reached recursion limit during macro expansion".into(), - )); + ))); } let macro_call = InFile::new(self.current_file_id, ¯o_call); @@ -151,7 +151,7 @@ impl Expander { } return ExpandResult::only_err(err.unwrap_or_else(|| { - mbe::ExpandError::Other("failed to parse macro invocation".into()) + ExpandError::Other("failed to parse macro invocation".into()) })); } }; diff --git a/crates/hir_def/src/body/lower.rs b/crates/hir_def/src/body/lower.rs index 7cbeef1488a0..06ad7ce4cd08 100644 --- a/crates/hir_def/src/body/lower.rs +++ b/crates/hir_def/src/body/lower.rs @@ -28,7 +28,7 @@ use crate::{ db::DefDatabase, expr::{ dummy_expr_id, Array, BindingAnnotation, Expr, ExprId, Label, LabelId, Literal, MatchArm, - MatchGuard, Pat, PatId, RecordFieldPat, RecordLitField, Statement, + Pat, PatId, RecordFieldPat, RecordLitField, Statement, }, intern::Interned, item_scope::BuiltinShadowMode, @@ -155,9 +155,6 @@ impl ExprCollector<'_> { fn alloc_expr_desugared(&mut self, expr: Expr) -> ExprId { self.make_expr(expr, Err(SyntheticSyntax)) } - fn unit(&mut self) -> ExprId { - self.alloc_expr_desugared(Expr::Tuple { exprs: Box::default() }) - } fn missing_expr(&mut self) -> ExprId { self.alloc_expr_desugared(Expr::Missing) } @@ -215,33 +212,15 @@ impl ExprCollector<'_> { } }); - let condition = match e.condition() { - None => self.missing_expr(), - Some(condition) => match condition.pat() { - None => self.collect_expr_opt(condition.expr()), - // if let -- desugar to match - Some(pat) => { - let pat = self.collect_pat(pat); - let match_expr = self.collect_expr_opt(condition.expr()); - let placeholder_pat = self.missing_pat(); - let arms = vec![ - MatchArm { pat, expr: then_branch, guard: None }, - MatchArm { - pat: placeholder_pat, - expr: else_branch.unwrap_or_else(|| self.unit()), - guard: None, - }, - ] - .into(); - return Some( - self.alloc_expr(Expr::Match { expr: match_expr, arms }, syntax_ptr), - ); - } - }, - }; + let condition = self.collect_expr_opt(e.condition()); self.alloc_expr(Expr::If { condition, then_branch, else_branch }, syntax_ptr) } + ast::Expr::LetExpr(e) => { + let pat = self.collect_pat_opt(e.pat()); + let expr = self.collect_expr_opt(e.expr()); + self.alloc_expr(Expr::Let { pat, expr }, syntax_ptr) + } ast::Expr::BlockExpr(e) => match e.modifier() { Some(ast::BlockModifier::Try(_)) => { let body = self.collect_block(e); @@ -282,31 +261,7 @@ impl ExprCollector<'_> { let label = e.label().map(|label| self.collect_label(label)); let body = self.collect_block_opt(e.loop_body()); - let condition = match e.condition() { - None => self.missing_expr(), - Some(condition) => match condition.pat() { - None => self.collect_expr_opt(condition.expr()), - // if let -- desugar to match - Some(pat) => { - cov_mark::hit!(infer_resolve_while_let); - let pat = self.collect_pat(pat); - let match_expr = self.collect_expr_opt(condition.expr()); - let placeholder_pat = self.missing_pat(); - let break_ = - self.alloc_expr_desugared(Expr::Break { expr: None, label: None }); - let arms = vec![ - MatchArm { pat, expr: body, guard: None }, - MatchArm { pat: placeholder_pat, expr: break_, guard: None }, - ] - .into(); - let match_expr = - self.alloc_expr_desugared(Expr::Match { expr: match_expr, arms }); - return Some( - self.alloc_expr(Expr::Loop { body: match_expr, label }, syntax_ptr), - ); - } - }, - }; + let condition = self.collect_expr_opt(e.condition()); self.alloc_expr(Expr::While { condition, body, label }, syntax_ptr) } @@ -352,15 +307,9 @@ impl ExprCollector<'_> { self.check_cfg(&arm).map(|()| MatchArm { pat: self.collect_pat_opt(arm.pat()), expr: self.collect_expr_opt(arm.expr()), - guard: arm.guard().map(|guard| match guard.pat() { - Some(pat) => MatchGuard::IfLet { - pat: self.collect_pat(pat), - expr: self.collect_expr_opt(guard.expr()), - }, - None => { - MatchGuard::If { expr: self.collect_expr_opt(guard.expr()) } - } - }), + guard: arm + .guard() + .map(|guard| self.collect_expr_opt(guard.condition())), }) }) .collect() diff --git a/crates/hir_def/src/body/scope.rs b/crates/hir_def/src/body/scope.rs index 2658eece8e85..fc36f1ae526c 100644 --- a/crates/hir_def/src/body/scope.rs +++ b/crates/hir_def/src/body/scope.rs @@ -8,7 +8,7 @@ use rustc_hash::FxHashMap; use crate::{ body::Body, db::DefDatabase, - expr::{Expr, ExprId, LabelId, MatchGuard, Pat, PatId, Statement}, + expr::{Expr, ExprId, LabelId, Pat, PatId, Statement}, BlockId, DefWithBodyId, }; @@ -53,9 +53,9 @@ impl ExprScopes { fn new(body: &Body) -> ExprScopes { let mut scopes = ExprScopes { scopes: Arena::default(), scope_by_expr: FxHashMap::default() }; - let root = scopes.root_scope(); + let mut root = scopes.root_scope(); scopes.add_params_bindings(body, root, &body.params); - compute_expr_scopes(body.body_expr, body, &mut scopes, root); + compute_expr_scopes(body.body_expr, body, &mut scopes, &mut root); scopes } @@ -151,32 +151,32 @@ fn compute_block_scopes( match stmt { Statement::Let { pat, initializer, else_branch, .. } => { if let Some(expr) = initializer { - compute_expr_scopes(*expr, body, scopes, scope); + compute_expr_scopes(*expr, body, scopes, &mut scope); } if let Some(expr) = else_branch { - compute_expr_scopes(*expr, body, scopes, scope); + compute_expr_scopes(*expr, body, scopes, &mut scope); } scope = scopes.new_scope(scope); scopes.add_bindings(body, scope, *pat); } Statement::Expr { expr, .. } => { - compute_expr_scopes(*expr, body, scopes, scope); + compute_expr_scopes(*expr, body, scopes, &mut scope); } } } if let Some(expr) = tail { - compute_expr_scopes(expr, body, scopes, scope); + compute_expr_scopes(expr, body, scopes, &mut scope); } } -fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope: ScopeId) { +fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope: &mut ScopeId) { let make_label = |label: &Option| label.map(|label| (label, body.labels[label].name.clone())); - scopes.set_scope(expr, scope); + scopes.set_scope(expr, *scope); match &body[expr] { Expr::Block { statements, tail, id, label } => { - let scope = scopes.new_block_scope(scope, *id, make_label(label)); + let scope = scopes.new_block_scope(*scope, *id, make_label(label)); // Overwrite the old scope for the block expr, so that every block scope can be found // via the block itself (important for blocks that only contain items, no expressions). scopes.set_scope(expr, scope); @@ -184,46 +184,49 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope } Expr::For { iterable, pat, body: body_expr, label } => { compute_expr_scopes(*iterable, body, scopes, scope); - let scope = scopes.new_labeled_scope(scope, make_label(label)); + let mut scope = scopes.new_labeled_scope(*scope, make_label(label)); scopes.add_bindings(body, scope, *pat); - compute_expr_scopes(*body_expr, body, scopes, scope); + compute_expr_scopes(*body_expr, body, scopes, &mut scope); } Expr::While { condition, body: body_expr, label } => { - let scope = scopes.new_labeled_scope(scope, make_label(label)); - compute_expr_scopes(*condition, body, scopes, scope); - compute_expr_scopes(*body_expr, body, scopes, scope); + let mut scope = scopes.new_labeled_scope(*scope, make_label(label)); + compute_expr_scopes(*condition, body, scopes, &mut scope); + compute_expr_scopes(*body_expr, body, scopes, &mut scope); } Expr::Loop { body: body_expr, label } => { - let scope = scopes.new_labeled_scope(scope, make_label(label)); - compute_expr_scopes(*body_expr, body, scopes, scope); + let mut scope = scopes.new_labeled_scope(*scope, make_label(label)); + compute_expr_scopes(*body_expr, body, scopes, &mut scope); } Expr::Lambda { args, body: body_expr, .. } => { - let scope = scopes.new_scope(scope); + let mut scope = scopes.new_scope(*scope); scopes.add_params_bindings(body, scope, args); - compute_expr_scopes(*body_expr, body, scopes, scope); + compute_expr_scopes(*body_expr, body, scopes, &mut scope); } Expr::Match { expr, arms } => { compute_expr_scopes(*expr, body, scopes, scope); for arm in arms.iter() { - let mut scope = scopes.new_scope(scope); + let mut scope = scopes.new_scope(*scope); scopes.add_bindings(body, scope, arm.pat); - match arm.guard { - Some(MatchGuard::If { expr: guard }) => { - scopes.set_scope(guard, scope); - compute_expr_scopes(guard, body, scopes, scope); - } - Some(MatchGuard::IfLet { pat, expr: guard }) => { - scopes.set_scope(guard, scope); - compute_expr_scopes(guard, body, scopes, scope); - scope = scopes.new_scope(scope); - scopes.add_bindings(body, scope, pat); - } - _ => {} - }; - scopes.set_scope(arm.expr, scope); - compute_expr_scopes(arm.expr, body, scopes, scope); + if let Some(guard) = arm.guard { + scope = scopes.new_scope(scope); + compute_expr_scopes(guard, body, scopes, &mut scope); + } + compute_expr_scopes(arm.expr, body, scopes, &mut scope); + } + } + &Expr::If { condition, then_branch, else_branch } => { + let mut then_branch_scope = scopes.new_scope(*scope); + compute_expr_scopes(condition, body, scopes, &mut then_branch_scope); + compute_expr_scopes(then_branch, body, scopes, &mut then_branch_scope); + if let Some(else_branch) = else_branch { + compute_expr_scopes(else_branch, body, scopes, scope); } } + &Expr::Let { pat, expr } => { + compute_expr_scopes(expr, body, scopes, scope); + *scope = scopes.new_scope(*scope); + scopes.add_bindings(body, *scope, pat); + } e => e.walk_child_exprs(|e| compute_expr_scopes(e, body, scopes, scope)), }; } @@ -500,8 +503,7 @@ fn foo() { } #[test] - fn while_let_desugaring() { - cov_mark::check!(infer_resolve_while_let); + fn while_let_adds_binding() { do_check_local_name( r#" fn test() { @@ -513,5 +515,53 @@ fn test() { "#, 75, ); + do_check_local_name( + r#" +fn test() { + let foo: Option = None; + while (((let Option::Some(_) = foo))) && let Option::Some(spam) = foo { + spam$0 + } +} +"#, + 107, + ); + } + + #[test] + fn match_guard_if_let() { + do_check_local_name( + r#" +fn test() { + let foo: Option = None; + match foo { + _ if let Option::Some(spam) = foo => spam$0, + } +} +"#, + 93, + ); + } + + #[test] + fn let_chains_can_reference_previous_lets() { + do_check_local_name( + r#" +fn test() { + let foo: Option = None; + if let Some(spam) = foo && spa$0m > 1 && let Some(spam) = foo && spam > 1 {} +} +"#, + 61, + ); + do_check_local_name( + r#" +fn test() { + let foo: Option = None; + if let Some(spam) = foo && spam > 1 && let Some(spam) = foo && sp$0am > 1 {} +} +"#, + 100, + ); } } diff --git a/crates/hir_def/src/child_by_source.rs b/crates/hir_def/src/child_by_source.rs index 5c32a31e443d..1e1573d4ae09 100644 --- a/crates/hir_def/src/child_by_source.rs +++ b/crates/hir_def/src/child_by_source.rs @@ -116,11 +116,11 @@ impl ChildBySource for ItemScope { self.derive_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each( |(ast_id, calls)| { let adt = ast_id.to_node(db.upcast()); - calls.for_each(|(attr_id, calls)| { + calls.for_each(|(attr_id, call_id, calls)| { if let Some(Either::Left(attr)) = adt.doc_comments_and_attrs().nth(attr_id.ast_index as usize) { - res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, calls.into())); + res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into())); } }); }, diff --git a/crates/hir_def/src/dyn_map.rs b/crates/hir_def/src/dyn_map.rs index 6f269d7b01fe..166aa04da044 100644 --- a/crates/hir_def/src/dyn_map.rs +++ b/crates/hir_def/src/dyn_map.rs @@ -54,6 +54,7 @@ pub trait Policy { fn insert(map: &mut DynMap, key: Self::K, value: Self::V); fn get<'a>(map: &'a DynMap, key: &Self::K) -> Option<&'a Self::V>; + fn is_empty(map: &DynMap) -> bool; } impl Policy for (K, V) { @@ -65,6 +66,9 @@ impl Policy for (K, V) { fn get<'a>(map: &'a DynMap, key: &K) -> Option<&'a V> { map.map.get::>()?.get(key) } + fn is_empty(map: &DynMap) -> bool { + map.map.get::>().map_or(true, |it| it.is_empty()) + } } pub struct DynMap { @@ -90,6 +94,10 @@ impl KeyMap> { pub fn get(&self, key: &P::K) -> Option<&P::V> { P::get(&self.map, key) } + + pub fn is_empty(&self) -> bool { + P::is_empty(&self.map) + } } impl Index> for DynMap { diff --git a/crates/hir_def/src/expr.rs b/crates/hir_def/src/expr.rs index 6534f970ee6b..4dca8238880d 100644 --- a/crates/hir_def/src/expr.rs +++ b/crates/hir_def/src/expr.rs @@ -59,6 +59,10 @@ pub enum Expr { then_branch: ExprId, else_branch: Option, }, + Let { + pat: PatId, + expr: ExprId, + }, Block { id: BlockId, statements: Box<[Statement]>, @@ -189,17 +193,10 @@ pub enum Array { #[derive(Debug, Clone, Eq, PartialEq)] pub struct MatchArm { pub pat: PatId, - pub guard: Option, + pub guard: Option, pub expr: ExprId, } -#[derive(Debug, Clone, Eq, PartialEq)] -pub enum MatchGuard { - If { expr: ExprId }, - - IfLet { pat: PatId, expr: ExprId }, -} - #[derive(Debug, Clone, Eq, PartialEq)] pub struct RecordLitField { pub name: Name, @@ -232,6 +229,9 @@ impl Expr { f(else_branch); } } + Expr::Let { expr, .. } => { + f(*expr); + } Expr::Block { statements, tail, .. } => { for stmt in statements.iter() { match stmt { diff --git a/crates/hir_def/src/item_scope.rs b/crates/hir_def/src/item_scope.rs index 258d1e0f6c51..fffec96bab90 100644 --- a/crates/hir_def/src/item_scope.rs +++ b/crates/hir_def/src/item_scope.rs @@ -66,8 +66,10 @@ pub struct ItemScope { attr_macros: FxHashMap, MacroCallId>, /// The derive macro invocations in this scope, keyed by the owner item over the actual derive attributes /// paired with the derive macro invocations for the specific attribute. - derive_macros: - FxHashMap, SmallVec<[(AttrId, SmallVec<[Option; 1]>); 1]>>, + derive_macros: FxHashMap< + AstId, + SmallVec<[(AttrId, MacroCallId, SmallVec<[Option; 1]>); 1]>, + >, } pub(crate) static BUILTIN_SCOPE: Lazy> = Lazy::new(|| { @@ -210,7 +212,7 @@ impl ItemScope { idx: usize, ) { if let Some(derives) = self.derive_macros.get_mut(&adt) { - if let Some((_, invocs)) = derives.iter_mut().find(|&&mut (id, _)| id == attr_id) { + if let Some((.., invocs)) = derives.iter_mut().find(|&&mut (id, ..)| id == attr_id) { invocs[idx] = Some(call); } } @@ -223,19 +225,23 @@ impl ItemScope { &mut self, adt: AstId, attr_id: AttrId, + call_id: MacroCallId, len: usize, ) { - self.derive_macros.entry(adt).or_default().push((attr_id, smallvec![None; len])); + self.derive_macros.entry(adt).or_default().push((attr_id, call_id, smallvec![None; len])); } pub(crate) fn derive_macro_invocs( &self, ) -> impl Iterator< - Item = (AstId, impl Iterator])>), + Item = ( + AstId, + impl Iterator])>, + ), > + '_ { - self.derive_macros - .iter() - .map(|(k, v)| (*k, v.iter().map(|(attr_id, invocs)| (*attr_id, &**invocs)))) + self.derive_macros.iter().map(|(k, v)| { + (*k, v.iter().map(|&(attr_id, call_id, ref invocs)| (attr_id, call_id, &**invocs))) + }) } pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option { diff --git a/crates/hir_def/src/item_tree/lower.rs b/crates/hir_def/src/item_tree/lower.rs index 4547bfc55e6c..f1b378a3be5f 100644 --- a/crates/hir_def/src/item_tree/lower.rs +++ b/crates/hir_def/src/item_tree/lower.rs @@ -702,6 +702,7 @@ enum GenericsOwner<'a> { fn is_intrinsic_fn_unsafe(name: &Name) -> bool { // Should be kept in sync with https://github.com/rust-lang/rust/blob/532d2b14c05f9bc20b2d27cbb5f4550d28343a36/compiler/rustc_typeck/src/check/intrinsic.rs#L72-L106 ![ + known::abort, known::add_with_overflow, known::bitreverse, known::black_box, diff --git a/crates/hir_def/src/item_tree/pretty.rs b/crates/hir_def/src/item_tree/pretty.rs index eaaff5a21f7b..0df6e97dd463 100644 --- a/crates/hir_def/src/item_tree/pretty.rs +++ b/crates/hir_def/src/item_tree/pretty.rs @@ -493,14 +493,14 @@ impl<'a> Printer<'a> { w!(self, "]"); } TypeRef::Fn(args_and_ret, varargs) => { - let (ret, args) = + let ((_, return_type), args) = args_and_ret.split_last().expect("TypeRef::Fn is missing return type"); w!(self, "fn("); - for (i, arg) in args.iter().enumerate() { + for (i, (_, typeref)) in args.iter().enumerate() { if i != 0 { w!(self, ", "); } - self.print_type_ref(arg); + self.print_type_ref(&typeref); } if *varargs { if !args.is_empty() { @@ -509,7 +509,7 @@ impl<'a> Printer<'a> { w!(self, "..."); } w!(self, ") -> "); - self.print_type_ref(ret); + self.print_type_ref(&return_type); } TypeRef::Macro(_ast_id) => { w!(self, ""); diff --git a/crates/hir_def/src/keys.rs b/crates/hir_def/src/keys.rs index 93c92c1b9c94..8cd2d7717214 100644 --- a/crates/hir_def/src/keys.rs +++ b/crates/hir_def/src/keys.rs @@ -34,7 +34,8 @@ pub const CONST_PARAM: Key = Key::new(); pub const MACRO: Key = Key::new(); pub const ATTR_MACRO_CALL: Key = Key::new(); -pub const DERIVE_MACRO_CALL: Key]>)> = Key::new(); +pub const DERIVE_MACRO_CALL: Key]>)> = + Key::new(); /// XXX: AST Nodes and SyntaxNodes have identity equality semantics: nodes are /// equal if they point to exactly the same object. @@ -60,4 +61,7 @@ impl Policy for AstPtrPolicy { let key = AstPtr::new(key); map.map.get::, ID>>()?.get(&key) } + fn is_empty(map: &DynMap) -> bool { + map.map.get::, ID>>().map_or(true, |it| it.is_empty()) + } } diff --git a/crates/hir_def/src/lib.rs b/crates/hir_def/src/lib.rs index db1e65d080e0..bb65d1dec87d 100644 --- a/crates/hir_def/src/lib.rs +++ b/crates/hir_def/src/lib.rs @@ -63,8 +63,8 @@ use hir_expand::{ ast_id_map::FileAstId, eager::{expand_eager_macro, ErrorEmitted, ErrorSink}, hygiene::Hygiene, - AstId, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, - UnresolvedMacro, + AstId, ExpandError, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, + MacroDefKind, UnresolvedMacro, }; use item_tree::ExternBlock; use la_arena::Idx; @@ -662,7 +662,7 @@ pub trait AsMacroCall { db: &dyn db::DefDatabase, krate: CrateId, resolver: impl Fn(path::ModPath) -> Option, - error_sink: &mut dyn FnMut(mbe::ExpandError), + error_sink: &mut dyn FnMut(ExpandError), ) -> Result, UnresolvedMacro>; } @@ -672,7 +672,7 @@ impl AsMacroCall for InFile<&ast::MacroCall> { db: &dyn db::DefDatabase, krate: CrateId, resolver: impl Fn(path::ModPath) -> Option, - mut error_sink: &mut dyn FnMut(mbe::ExpandError), + mut error_sink: &mut dyn FnMut(ExpandError), ) -> Result, UnresolvedMacro> { let expands_to = hir_expand::ExpandTo::from_call_site(self.value); let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value)); @@ -681,7 +681,7 @@ impl AsMacroCall for InFile<&ast::MacroCall> { self.value.path().and_then(|path| path::ModPath::from_src(db.upcast(), path, &h)); let path = match error_sink - .option(path, || mbe::ExpandError::Other("malformed macro invocation".into())) + .option(path, || ExpandError::Other("malformed macro invocation".into())) { Ok(path) => path, Err(error) => { @@ -690,9 +690,9 @@ impl AsMacroCall for InFile<&ast::MacroCall> { }; macro_call_as_call_id( + db, &AstIdWithPath::new(ast_id.file_id, ast_id.value, path), expands_to, - db, krate, resolver, error_sink, @@ -714,12 +714,12 @@ impl AstIdWithPath { } fn macro_call_as_call_id( + db: &dyn db::DefDatabase, call: &AstIdWithPath, expand_to: ExpandTo, - db: &dyn db::DefDatabase, krate: CrateId, resolver: impl Fn(path::ModPath) -> Option, - error_sink: &mut dyn FnMut(mbe::ExpandError), + error_sink: &mut dyn FnMut(ExpandError), ) -> Result, UnresolvedMacro> { let def: MacroDefId = resolver(call.path.clone()).ok_or_else(|| UnresolvedMacro { path: call.path.clone() })?; @@ -739,25 +739,21 @@ fn macro_call_as_call_id( } fn derive_macro_as_call_id( + db: &dyn db::DefDatabase, item_attr: &AstIdWithPath, derive_attr: AttrId, - db: &dyn db::DefDatabase, + derive_pos: u32, krate: CrateId, resolver: impl Fn(path::ModPath) -> Option, ) -> Result { let def: MacroDefId = resolver(item_attr.path.clone()) .ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?; - let last_segment = item_attr - .path - .segments() - .last() - .ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?; let res = def.as_lazy_macro( db.upcast(), krate, MacroCallKind::Derive { ast_id: item_attr.ast_id, - derive_name: last_segment.to_string().into_boxed_str(), + derive_index: derive_pos, derive_attr_index: derive_attr.ast_index, }, ); @@ -765,14 +761,13 @@ fn derive_macro_as_call_id( } fn attr_macro_as_call_id( + db: &dyn db::DefDatabase, item_attr: &AstIdWithPath, macro_attr: &Attr, - db: &dyn db::DefDatabase, krate: CrateId, def: MacroDefId, + is_derive: bool, ) -> MacroCallId { - let attr_path = &item_attr.path; - let last_segment = attr_path.segments().last().expect("empty attribute path"); let mut arg = match macro_attr.input.as_deref() { Some(attr::AttrInput::TokenTree(tt, map)) => (tt.clone(), map.clone()), _ => Default::default(), @@ -786,9 +781,9 @@ fn attr_macro_as_call_id( krate, MacroCallKind::Attr { ast_id: item_attr.ast_id, - attr_name: last_segment.to_string().into_boxed_str(), - attr_args: arg, + attr_args: Arc::new(arg), invoc_attr_index: macro_attr.id.ast_index, + is_derive, }, ); res diff --git a/crates/hir_def/src/macro_expansion_tests/builtin_fn_macro.rs b/crates/hir_def/src/macro_expansion_tests/builtin_fn_macro.rs index 6e91301ecdc8..919dd6c07f03 100644 --- a/crates/hir_def/src/macro_expansion_tests/builtin_fn_macro.rs +++ b/crates/hir_def/src/macro_expansion_tests/builtin_fn_macro.rs @@ -313,6 +313,24 @@ fn main() { "foor0bar\nfalse"; } ); } +#[test] +fn test_concat_bytes_expand() { + check( + r##" +#[rustc_builtin_macro] +macro_rules! concat_bytes {} + +fn main() { concat_bytes!(b'A', b"BC", [68, b'E', 70]); } +"##, + expect![[r##" +#[rustc_builtin_macro] +macro_rules! concat_bytes {} + +fn main() { [b'A', 66, 67, 68, b'E', 70]; } +"##]], + ); +} + #[test] fn test_concat_with_captured_expr() { check( diff --git a/crates/hir_def/src/macro_expansion_tests/mbe/tt_conversion.rs b/crates/hir_def/src/macro_expansion_tests/mbe/tt_conversion.rs index 5f4b7d6d0bca..84cc3f3872f2 100644 --- a/crates/hir_def/src/macro_expansion_tests/mbe/tt_conversion.rs +++ b/crates/hir_def/src/macro_expansion_tests/mbe/tt_conversion.rs @@ -108,18 +108,18 @@ fn expansion_does_not_parse_as_expression() { check( r#" macro_rules! stmts { - () => { let _ = 0; } + () => { fn foo() {} } } fn f() { let _ = stmts!/*+errors*/(); } "#, expect![[r#" macro_rules! stmts { - () => { let _ = 0; } + () => { fn foo() {} } } fn f() { let _ = /* parse error: expected expression */ -let _ = 0;; } +fn foo() {}; } "#]], ) } diff --git a/crates/hir_def/src/nameres/attr_resolution.rs b/crates/hir_def/src/nameres/attr_resolution.rs index 4a7211b5c983..4c436250db3c 100644 --- a/crates/hir_def/src/nameres/attr_resolution.rs +++ b/crates/hir_def/src/nameres/attr_resolution.rs @@ -54,7 +54,7 @@ impl DefMap { None => return Err(UnresolvedMacro { path: ast_id.path.clone() }), }; - Ok(ResolvedAttr::Macro(attr_macro_as_call_id(&ast_id, attr, db, self.krate, def))) + Ok(ResolvedAttr::Macro(attr_macro_as_call_id(db, &ast_id, attr, self.krate, def, false))) } pub(crate) fn is_builtin_or_registered_attr(&self, path: &ModPath) -> bool { diff --git a/crates/hir_def/src/nameres/collector.rs b/crates/hir_def/src/nameres/collector.rs index 7f7213f4c3e6..9176c90ae9c0 100644 --- a/crates/hir_def/src/nameres/collector.rs +++ b/crates/hir_def/src/nameres/collector.rs @@ -1036,6 +1036,9 @@ impl DefCollector<'_> { fn resolve_macros(&mut self) -> ReachedFixedPoint { let mut macros = std::mem::take(&mut self.unresolved_macros); let mut resolved = Vec::new(); + let mut push_resolved = |directive: &MacroDirective, call_id| { + resolved.push((directive.module_id, directive.depth, directive.container, call_id)); + }; let mut res = ReachedFixedPoint::Yes; macros.retain(|directive| { let resolver = |path| { @@ -1052,29 +1055,25 @@ impl DefCollector<'_> { match &directive.kind { MacroDirectiveKind::FnLike { ast_id, expand_to } => { let call_id = macro_call_as_call_id( + self.db, ast_id, *expand_to, - self.db, self.def_map.krate, &resolver, &mut |_err| (), ); if let Ok(Ok(call_id)) = call_id { - resolved.push(( - directive.module_id, - call_id, - directive.depth, - directive.container, - )); + push_resolved(directive, call_id); res = ReachedFixedPoint::No; return false; } } MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => { let call_id = derive_macro_as_call_id( + self.db, ast_id, *derive_attr, - self.db, + *derive_pos as u32, self.def_map.krate, &resolver, ); @@ -1086,12 +1085,7 @@ impl DefCollector<'_> { *derive_pos, ); - resolved.push(( - directive.module_id, - call_id, - directive.depth, - directive.container, - )); + push_resolved(directive, call_id); res = ReachedFixedPoint::No; return false; } @@ -1176,9 +1170,20 @@ impl DefCollector<'_> { len = idx; } + // We treat the #[derive] macro as an attribute call, but we do not resolve it for nameres collection. + // This is just a trick to be able to resolve the input to derives as proper paths. + // Check the comment in [`builtin_attr_macro`]. + let call_id = attr_macro_as_call_id( + self.db, + file_ast_id, + attr, + self.def_map.krate, + def, + true, + ); self.def_map.modules[directive.module_id] .scope - .init_derive_attribute(ast_id, attr.id, len + 1); + .init_derive_attribute(ast_id, attr.id, call_id, len + 1); } None => { let diag = DefDiagnostic::malformed_derive( @@ -1198,8 +1203,14 @@ impl DefCollector<'_> { } // Not resolved to a derive helper or the derive attribute, so try to treat as a normal attribute. - let call_id = - attr_macro_as_call_id(file_ast_id, attr, self.db, self.def_map.krate, def); + let call_id = attr_macro_as_call_id( + self.db, + file_ast_id, + attr, + self.def_map.krate, + def, + false, + ); let loc: MacroCallLoc = self.db.lookup_intern_macro_call(call_id); // Skip #[test]/#[bench] expansion, which would merely result in more memory usage @@ -1229,12 +1240,7 @@ impl DefCollector<'_> { .scope .add_attr_macro_invoc(ast_id, call_id); - resolved.push(( - directive.module_id, - call_id, - directive.depth, - directive.container, - )); + push_resolved(directive, call_id); res = ReachedFixedPoint::No; return false; } @@ -1245,7 +1251,7 @@ impl DefCollector<'_> { // Attribute resolution can add unresolved macro invocations, so concatenate the lists. self.unresolved_macros.extend(macros); - for (module_id, macro_call_id, depth, container) in resolved { + for (module_id, depth, container, macro_call_id) in resolved { self.collect_macro_expansion(module_id, macro_call_id, depth, container); } @@ -1321,9 +1327,9 @@ impl DefCollector<'_> { match &directive.kind { MacroDirectiveKind::FnLike { ast_id, expand_to } => { let macro_call_as_call_id = macro_call_as_call_id( + self.db, ast_id, *expand_to, - self.db, self.def_map.krate, |path| { let resolved_res = self.def_map.resolve_path_fp_with_macro( @@ -1411,13 +1417,6 @@ impl ModCollector<'_, '_> { } fn collect(&mut self, items: &[ModItem], container: ItemContainerId) { - struct DefData<'a> { - id: ModuleDefId, - name: &'a Name, - visibility: &'a RawVisibility, - has_constructor: bool, - } - let krate = self.def_collector.def_map.krate; // Note: don't assert that inserted value is fresh: it's simply not true @@ -1467,22 +1466,37 @@ impl ModCollector<'_, '_> { continue; } + let db = self.def_collector.db; let module = self.def_collector.def_map.module_id(self.module_id); + let def_map = &mut self.def_collector.def_map; + let update_def = + |def_collector: &mut DefCollector, id, name: &Name, vis, has_constructor| { + def_collector.def_map.modules[self.module_id].scope.declare(id); + def_collector.update( + self.module_id, + &[(Some(name.clone()), PerNs::from_def(id, vis, has_constructor))], + vis, + ImportType::Named, + ) + }; + let resolve_vis = |def_map: &DefMap, visibility| { + def_map + .resolve_visibility(db, self.module_id, visibility) + .unwrap_or(Visibility::Public) + }; - let mut def = None; match item { ModItem::Mod(m) => self.collect_module(&self.item_tree[m], &attrs), ModItem::Import(import_id) => { - let module_id = self.module_id; let imports = Import::from_use( - self.def_collector.db, + db, krate, self.item_tree, ItemTreeId::new(self.tree_id, import_id), ); self.def_collector.unresolved_imports.extend(imports.into_iter().map( |import| ImportDirective { - module_id, + module_id: self.module_id, import, status: PartialResolvedImport::Unresolved, }, @@ -1492,7 +1506,7 @@ impl ModCollector<'_, '_> { self.def_collector.unresolved_imports.push(ImportDirective { module_id: self.module_id, import: Import::from_extern_crate( - self.def_collector.db, + db, krate, self.item_tree, ItemTreeId::new(self.tree_id, import_id), @@ -1507,7 +1521,7 @@ impl ModCollector<'_, '_> { container: module, id: ItemTreeId::new(self.tree_id, block), } - .intern(self.def_collector.db), + .intern(db), ), ), ModItem::MacroCall(mac) => self.collect_macro_call(&self.item_tree[mac], container), @@ -1517,73 +1531,84 @@ impl ModCollector<'_, '_> { let module = self.def_collector.def_map.module_id(self.module_id); let impl_id = ImplLoc { container: module, id: ItemTreeId::new(self.tree_id, imp) } - .intern(self.def_collector.db); + .intern(db); self.def_collector.def_map.modules[self.module_id].scope.define_impl(impl_id) } ModItem::Function(id) => { - let func = &self.item_tree[id]; - - let ast_id = InFile::new(self.file_id(), func.ast_id); - self.collect_proc_macro_def(&func.name, ast_id, &attrs); + let it = &self.item_tree[id]; - def = Some(DefData { - id: FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) } - .intern(self.def_collector.db) + let is_proc_macro = attrs.parse_proc_macro_decl(&it.name); + let vis = match is_proc_macro { + Some(proc_macro) => { + // FIXME: this should only be done in the root module of `proc-macro` crates, not everywhere + let ast_id = InFile::new(self.tree_id.file_id(), it.ast_id); + let module_id = def_map.module_id(def_map.root()); + self.def_collector.export_proc_macro(proc_macro, ast_id); + Visibility::Module(module_id) + } + None => resolve_vis(def_map, &self.item_tree[it.visibility]), + }; + update_def( + self.def_collector, + FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) } + .intern(db) .into(), - name: &func.name, - visibility: &self.item_tree[func.visibility], - has_constructor: false, - }); + &it.name, + vis, + false, + ); } ModItem::Struct(id) => { let it = &self.item_tree[id]; - def = Some(DefData { - id: StructLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } - .intern(self.def_collector.db) + let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + update_def( + self.def_collector, + StructLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } + .intern(db) .into(), - name: &it.name, - visibility: &self.item_tree[it.visibility], - has_constructor: !matches!(it.fields, Fields::Record(_)), - }); + &it.name, + vis, + !matches!(it.fields, Fields::Record(_)), + ); } ModItem::Union(id) => { let it = &self.item_tree[id]; - def = Some(DefData { - id: UnionLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } - .intern(self.def_collector.db) + let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + update_def( + self.def_collector, + UnionLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } + .intern(db) .into(), - name: &it.name, - visibility: &self.item_tree[it.visibility], - has_constructor: false, - }); + &it.name, + vis, + false, + ); } ModItem::Enum(id) => { let it = &self.item_tree[id]; - def = Some(DefData { - id: EnumLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } - .intern(self.def_collector.db) + let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + update_def( + self.def_collector, + EnumLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } + .intern(db) .into(), - name: &it.name, - visibility: &self.item_tree[it.visibility], - has_constructor: false, - }); + &it.name, + vis, + false, + ); } ModItem::Const(id) => { let it = &self.item_tree[id]; - let const_id = ConstLoc { container, id: ItemTreeId::new(self.tree_id, id) } - .intern(self.def_collector.db); + let const_id = + ConstLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db); match &it.name { Some(name) => { - def = Some(DefData { - id: const_id.into(), - name, - visibility: &self.item_tree[it.visibility], - has_constructor: false, - }); + let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + update_def(self.def_collector, const_id.into(), name, vis, false); } None => { // const _: T = ...; @@ -1596,55 +1621,46 @@ impl ModCollector<'_, '_> { ModItem::Static(id) => { let it = &self.item_tree[id]; - def = Some(DefData { - id: StaticLoc { container, id: ItemTreeId::new(self.tree_id, id) } - .intern(self.def_collector.db) + let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + update_def( + self.def_collector, + StaticLoc { container, id: ItemTreeId::new(self.tree_id, id) } + .intern(db) .into(), - name: &it.name, - visibility: &self.item_tree[it.visibility], - has_constructor: false, - }); + &it.name, + vis, + false, + ); } ModItem::Trait(id) => { let it = &self.item_tree[id]; - def = Some(DefData { - id: TraitLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } - .intern(self.def_collector.db) + let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + update_def( + self.def_collector, + TraitLoc { container: module, id: ItemTreeId::new(self.tree_id, id) } + .intern(db) .into(), - name: &it.name, - visibility: &self.item_tree[it.visibility], - has_constructor: false, - }); + &it.name, + vis, + false, + ); } ModItem::TypeAlias(id) => { let it = &self.item_tree[id]; - def = Some(DefData { - id: TypeAliasLoc { container, id: ItemTreeId::new(self.tree_id, id) } - .intern(self.def_collector.db) + let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); + update_def( + self.def_collector, + TypeAliasLoc { container, id: ItemTreeId::new(self.tree_id, id) } + .intern(db) .into(), - name: &it.name, - visibility: &self.item_tree[it.visibility], - has_constructor: false, - }); + &it.name, + vis, + false, + ); } } - - if let Some(DefData { id, name, visibility, has_constructor }) = def { - self.def_collector.def_map.modules[self.module_id].scope.declare(id); - let vis = self - .def_collector - .def_map - .resolve_visibility(self.def_collector.db, self.module_id, visibility) - .unwrap_or(Visibility::Public); - self.def_collector.update( - self.module_id, - &[(Some(name.clone()), PerNs::from_def(id, vis, has_constructor))], - vis, - ImportType::Named, - ) - } } } @@ -1685,8 +1701,9 @@ impl ModCollector<'_, '_> { { Ok((file_id, is_mod_rs, mod_dir)) => { let item_tree = db.file_item_tree(file_id.into()); + let krate = self.def_collector.def_map.krate; let is_enabled = item_tree - .top_level_attrs(db, self.def_collector.def_map.krate) + .top_level_attrs(db, krate) .cfg() .map_or(true, |cfg| self.is_cfg_enabled(&cfg)); if is_enabled { @@ -1697,7 +1714,7 @@ impl ModCollector<'_, '_> { &self.item_tree[module.visibility], ); ModCollector { - def_collector: &mut *self.def_collector, + def_collector: self.def_collector, macro_depth: self.macro_depth, module_id, tree_id: TreeId::new(file_id.into(), None), @@ -1707,7 +1724,7 @@ impl ModCollector<'_, '_> { .collect_in_top_module(item_tree.top_level_items()); let is_macro_use = is_macro_use || item_tree - .top_level_attrs(db, self.def_collector.def_map.krate) + .top_level_attrs(db, krate) .by_key("macro_use") .exists(); if is_macro_use { @@ -1732,12 +1749,11 @@ impl ModCollector<'_, '_> { definition: Option<(FileId, bool)>, visibility: &crate::visibility::RawVisibility, ) -> LocalModuleId { - let vis = self - .def_collector - .def_map + let def_map = &mut self.def_collector.def_map; + let vis = def_map .resolve_visibility(self.def_collector.db, self.module_id, visibility) .unwrap_or(Visibility::Public); - let modules = &mut self.def_collector.def_map.modules; + let modules = &mut def_map.modules; let origin = match definition { None => ModuleOrigin::Inline { definition: declaration }, Some((definition, is_mod_rs)) => { @@ -1752,10 +1768,10 @@ impl ModCollector<'_, '_> { } modules[self.module_id].children.insert(name.clone(), res); - let module = self.def_collector.def_map.module_id(res); + let module = def_map.module_id(res); let def = ModuleDefId::from(module); - self.def_collector.def_map.modules[self.module_id].scope.declare(def); + def_map.modules[self.module_id].scope.declare(def); self.def_collector.update( self.module_id, &[(Some(name), PerNs::from_def(def, vis, false))], @@ -1829,14 +1845,6 @@ impl ModCollector<'_, '_> { Ok(()) } - /// If `attrs` registers a procedural macro, collects its definition. - fn collect_proc_macro_def(&mut self, func_name: &Name, ast_id: AstId, attrs: &Attrs) { - // FIXME: this should only be done in the root module of `proc-macro` crates, not everywhere - if let Some(proc_macro) = attrs.parse_proc_macro_decl(func_name) { - self.def_collector.export_proc_macro(proc_macro, ast_id); - } - } - fn collect_macro_rules(&mut self, id: FileItemTreeId) { let krate = self.def_collector.def_map.krate; let mac = &self.item_tree[id]; @@ -1970,9 +1978,9 @@ impl ModCollector<'_, '_> { // Case 1: try to resolve in legacy scope and expand macro_rules let mut error = None; match macro_call_as_call_id( + self.def_collector.db, &ast_id, mac.expand_to, - self.def_collector.db, self.def_collector.def_map.krate, |path| { path.as_ident().and_then(|name| { diff --git a/crates/hir_def/src/nameres/proc_macro.rs b/crates/hir_def/src/nameres/proc_macro.rs index 8c3454b31b7d..920df7cec205 100644 --- a/crates/hir_def/src/nameres/proc_macro.rs +++ b/crates/hir_def/src/nameres/proc_macro.rs @@ -31,12 +31,12 @@ impl ProcMacroKind { impl Attrs { #[rustfmt::skip] pub(super) fn parse_proc_macro_decl(&self, func_name: &Name) -> Option { - if self.by_key("proc_macro").exists() { + if self.is_proc_macro() { Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::FnLike }) - } else if self.by_key("proc_macro_attribute").exists() { + } else if self.is_proc_macro_attribute() { Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Attr }) } else if self.by_key("proc_macro_derive").exists() { - let derive = self.by_key("proc_macro_derive").tt_values().next().unwrap(); + let derive = self.by_key("proc_macro_derive").tt_values().next()?; match &*derive.token_trees { // `#[proc_macro_derive(Trait)]` diff --git a/crates/hir_def/src/type_ref.rs b/crates/hir_def/src/type_ref.rs index cfc69feccc6c..ee8ef6caa306 100644 --- a/crates/hir_def/src/type_ref.rs +++ b/crates/hir_def/src/type_ref.rs @@ -1,9 +1,12 @@ //! HIR for references to types. Paths in these are not yet resolved. They can //! be directly created from an ast::TypeRef, without further queries. -use hir_expand::{name::Name, AstId, InFile}; +use hir_expand::{ + name::{AsName, Name}, + AstId, InFile, +}; use std::convert::TryInto; -use syntax::ast; +use syntax::ast::{self, HasName}; use crate::{body::LowerCtx, intern::Interned, path::Path}; @@ -89,7 +92,7 @@ pub enum TypeRef { Array(Box, ConstScalar), Slice(Box), /// A fn pointer. Last element of the vector is the return type. - Fn(Vec, bool /*varargs*/), + Fn(Vec<(Option, TypeRef)>, bool /*varargs*/), // For ImplTrait(Vec>), DynTrait(Vec>), @@ -188,11 +191,22 @@ impl TypeRef { is_varargs = param.dotdotdot_token().is_some(); } - pl.params().map(|p| p.ty()).map(|it| TypeRef::from_ast_opt(ctx, it)).collect() + pl.params() + .map(|it| { + let type_ref = TypeRef::from_ast_opt(ctx, it.ty()); + let name = match it.pat() { + Some(ast::Pat::IdentPat(it)) => Some( + it.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing), + ), + _ => None, + }; + (name, type_ref) + }) + .collect() } else { Vec::new() }; - params.push(ret_ty); + params.push((None, ret_ty)); TypeRef::Fn(params, is_varargs) } // for types are close enough for our purposes to the inner type for now... @@ -230,9 +244,10 @@ impl TypeRef { fn go(type_ref: &TypeRef, f: &mut impl FnMut(&TypeRef)) { f(type_ref); match type_ref { - TypeRef::Fn(types, _) | TypeRef::Tuple(types) => { - types.iter().for_each(|t| go(t, f)) + TypeRef::Fn(params, _) => { + params.iter().for_each(|(_, param_type)| go(¶m_type, f)) } + TypeRef::Tuple(types) => types.iter().for_each(|t| go(t, f)), TypeRef::RawPtr(type_ref, _) | TypeRef::Reference(type_ref, ..) | TypeRef::Array(type_ref, _) diff --git a/crates/hir_def/src/visibility.rs b/crates/hir_def/src/visibility.rs index 80009010c521..f76034a3e221 100644 --- a/crates/hir_def/src/visibility.rs +++ b/crates/hir_def/src/visibility.rs @@ -1,6 +1,6 @@ //! Defines hir-level representation of visibility (e.g. `pub` and `pub(crate)`). -use std::sync::Arc; +use std::{iter, sync::Arc}; use hir_expand::{hygiene::Hygiene, InFile}; use la_arena::ArenaMap; @@ -25,7 +25,7 @@ pub enum RawVisibility { } impl RawVisibility { - pub(crate) fn private() -> RawVisibility { + pub(crate) const fn private() -> RawVisibility { RawVisibility::Module(ModPath::from_kind(PathKind::Super(0))) } @@ -113,10 +113,7 @@ impl Visibility { } pub(crate) fn is_visible_from_other_crate(self) -> bool { - match self { - Visibility::Module(_) => false, - Visibility::Public => true, - } + matches!(self, Visibility::Public) } pub(crate) fn is_visible_from_def_map( @@ -145,10 +142,7 @@ impl Visibility { arc = to_module.def_map(db); &arc }; - let is_block_root = match to_module.block { - Some(_) => to_module_def_map[to_module.local_id].parent.is_none(), - None => false, - }; + let is_block_root = matches!(to_module.block, Some(_) if to_module_def_map[to_module.local_id].parent.is_none()); if is_block_root { to_module = to_module_def_map.containing_module(to_module.local_id).unwrap(); } @@ -161,9 +155,7 @@ impl Visibility { return true; } match def_map[from_module].parent { - Some(parent) => { - from_module = parent; - } + Some(parent) => from_module = parent, None => { match def_map.parent() { Some(module) => { @@ -171,10 +163,8 @@ impl Visibility { def_map = &*parent_arc; from_module = module.local_id; } - None => { - // Reached the root module, nothing left to check. - return false; - } + // Reached the root module, nothing left to check. + None => return false, } } } @@ -194,12 +184,12 @@ impl Visibility { return None; } - let mut a_ancestors = std::iter::successors(Some(mod_a.local_id), |m| { - let parent_id = def_map[*m].parent?; + let mut a_ancestors = iter::successors(Some(mod_a.local_id), |&m| { + let parent_id = def_map[m].parent?; Some(parent_id) }); - let mut b_ancestors = std::iter::successors(Some(mod_b.local_id), |m| { - let parent_id = def_map[*m].parent?; + let mut b_ancestors = iter::successors(Some(mod_b.local_id), |&m| { + let parent_id = def_map[m].parent?; Some(parent_id) }); diff --git a/crates/hir_expand/src/builtin_attr_macro.rs b/crates/hir_expand/src/builtin_attr_macro.rs index 8da8c2ee4f2e..6301da1c832d 100644 --- a/crates/hir_expand/src/builtin_attr_macro.rs +++ b/crates/hir_expand/src/builtin_attr_macro.rs @@ -1,9 +1,12 @@ //! Builtin attributes. -use mbe::ExpandResult; +use itertools::Itertools; use syntax::ast; -use crate::{db::AstDatabase, name, AstId, CrateId, MacroCallId, MacroDefId, MacroDefKind}; +use crate::{ + db::AstDatabase, name, AstId, CrateId, ExpandResult, MacroCallId, MacroCallKind, MacroDefId, + MacroDefKind, +}; macro_rules! register_builtin { ( $(($name:ident, $variant:ident) => $expand:ident),* ) => { @@ -52,7 +55,7 @@ register_builtin! { (bench, Bench) => dummy_attr_expand, (cfg_accessible, CfgAccessible) => dummy_attr_expand, (cfg_eval, CfgEval) => dummy_attr_expand, - (derive, Derive) => dummy_attr_expand, + (derive, Derive) => derive_attr_expand, (global_allocator, GlobalAllocator) => dummy_attr_expand, (test, Test) => dummy_attr_expand, (test_case, TestCase) => dummy_attr_expand @@ -78,3 +81,68 @@ fn dummy_attr_expand( ) -> ExpandResult { ExpandResult::ok(tt.clone()) } + +/// We generate a very specific expansion here, as we do not actually expand the `#[derive]` attribute +/// itself in name res, but we do want to expand it to something for the IDE layer, so that the input +/// derive attributes can be downmapped, and resolved as proper paths. +/// This is basically a hack, that simplifies the hacks we need in a lot of ide layer places to +/// somewhat inconsistently resolve derive attributes. +/// +/// As such, we expand `#[derive(Foo, bar::Bar)]` into +/// ``` +/// #[Foo] +/// #[bar::Bar] +/// (); +/// ``` +/// which allows fallback path resolution in hir::Semantics to properly identify our derives. +/// Since we do not expand the attribute in nameres though, we keep the original item. +/// +/// The ideal expansion here would be for the `#[derive]` to re-emit the annotated item and somehow +/// use the input paths in its output as well. +/// But that would bring two problems with it, for one every derive would duplicate the item token tree +/// wasting a lot of memory, and it would also require some way to use a path in a way that makes it +/// always resolve as a derive without nameres recollecting them. +/// So this hacky approach is a lot more friendly for us, though it does require a bit of support in +/// [`hir::Semantics`] to make this work. +fn derive_attr_expand( + db: &dyn AstDatabase, + id: MacroCallId, + tt: &tt::Subtree, +) -> ExpandResult { + let loc = db.lookup_intern_macro_call(id); + let derives = match &loc.kind { + MacroCallKind::Attr { attr_args, .. } => &attr_args.0, + _ => return ExpandResult::ok(tt.clone()), + }; + + let mk_leaf = |char| { + tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { + char, + spacing: tt::Spacing::Alone, + id: tt::TokenId::unspecified(), + })) + }; + + let mut token_trees = Vec::new(); + for (comma, group) in &derives + .token_trees + .iter() + .filter_map(|tt| match tt { + tt::TokenTree::Leaf(l) => Some(l), + tt::TokenTree::Subtree(_) => None, + }) + .group_by(|l| matches!(l, tt::Leaf::Punct(tt::Punct { char: ',', .. }))) + { + if comma { + continue; + } + token_trees.push(mk_leaf('#')); + token_trees.push(mk_leaf('[')); + token_trees.extend(group.cloned().map(tt::TokenTree::Leaf)); + token_trees.push(mk_leaf(']')); + } + token_trees.push(mk_leaf('(')); + token_trees.push(mk_leaf(')')); + token_trees.push(mk_leaf(';')); + ExpandResult::ok(tt::Subtree { delimiter: tt.delimiter, token_trees }) +} diff --git a/crates/hir_expand/src/builtin_derive_macro.rs b/crates/hir_expand/src/builtin_derive_macro.rs index bd75c51cbc6f..dd7d249efa35 100644 --- a/crates/hir_expand/src/builtin_derive_macro.rs +++ b/crates/hir_expand/src/builtin_derive_macro.rs @@ -2,13 +2,16 @@ use tracing::debug; -use mbe::ExpandResult; use syntax::{ ast::{self, AstNode, HasGenericParams, HasModuleItem, HasName}, match_ast, }; +use tt::TokenId; -use crate::{db::AstDatabase, name, quote, AstId, CrateId, MacroCallId, MacroDefId, MacroDefKind}; +use crate::{ + db::AstDatabase, name, quote, AstId, CrateId, ExpandError, ExpandResult, MacroCallId, + MacroDefId, MacroDefKind, +}; macro_rules! register_builtin { ( $($trait:ident => $expand:ident),* ) => { @@ -71,15 +74,15 @@ struct BasicAdtInfo { type_params: usize, } -fn parse_adt(tt: &tt::Subtree) -> Result { - let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems); // FragmentKind::Items doesn't parse attrs? +fn parse_adt(tt: &tt::Subtree) -> Result { + let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems); let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| { debug!("derive node didn't parse"); - mbe::ExpandError::UnexpectedToken + ExpandError::Other("invalid item definition".into()) })?; let item = macro_items.items().next().ok_or_else(|| { debug!("no module item parsed"); - mbe::ExpandError::NoMatchingRule + ExpandError::Other("no item found".into()) })?; let node = item.syntax(); let (name, params) = match_ast! { @@ -89,18 +92,17 @@ fn parse_adt(tt: &tt::Subtree) -> Result { ast::Union(it) => (it.name(), it.generic_param_list()), _ => { debug!("unexpected node is {:?}", node); - return Err(mbe::ExpandError::ConversionError) + return Err(ExpandError::Other("expected struct, enum or union".into())) }, } }; let name = name.ok_or_else(|| { debug!("parsed item has no name"); - mbe::ExpandError::NoMatchingRule - })?; - let name_token_id = token_map.token_by_range(name.syntax().text_range()).ok_or_else(|| { - debug!("name token not found"); - mbe::ExpandError::ConversionError + ExpandError::Other("missing name".into()) })?; + let name_token_id = token_map + .token_by_range(name.syntax().text_range()) + .unwrap_or_else(|| TokenId::unspecified()); let name_token = tt::Ident { id: name_token_id, text: name.text().into() }; let type_params = params.map_or(0, |type_param_list| type_param_list.type_params().count()); Ok(BasicAdtInfo { name: name_token, type_params }) diff --git a/crates/hir_expand/src/builtin_fn_macro.rs b/crates/hir_expand/src/builtin_fn_macro.rs index 1b49fc0ab4e2..da76b6b08043 100644 --- a/crates/hir_expand/src/builtin_fn_macro.rs +++ b/crates/hir_expand/src/builtin_fn_macro.rs @@ -1,15 +1,16 @@ //! Builtin macro -use crate::{ - db::AstDatabase, name, quote, AstId, CrateId, MacroCallId, MacroCallLoc, MacroDefId, - MacroDefKind, -}; use base_db::{AnchoredPath, Edition, FileId}; use cfg::CfgExpr; use either::Either; -use mbe::{parse_exprs_with_sep, parse_to_token_tree, ExpandResult}; +use mbe::{parse_exprs_with_sep, parse_to_token_tree}; use syntax::ast::{self, AstToken}; +use crate::{ + db::AstDatabase, name, quote, AstId, CrateId, ExpandError, ExpandResult, MacroCallId, + MacroCallLoc, MacroDefId, MacroDefKind, +}; + macro_rules! register_builtin { ( LAZY: $(($name:ident, $kind: ident) => $expand:ident),* , EAGER: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),* ) => { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -113,6 +114,7 @@ register_builtin! { (cfg, Cfg) => cfg_expand, (core_panic, CorePanic) => panic_expand, (std_panic, StdPanic) => panic_expand, + (unreachable, Unreachable) => unreachable_expand, (log_syntax, LogSyntax) => log_syntax_expand, (trace_macros, TraceMacros) => trace_macros_expand, @@ -120,6 +122,7 @@ register_builtin! { (compile_error, CompileError) => compile_error_expand, (concat, Concat) => concat_expand, (concat_idents, ConcatIdents) => concat_idents_expand, + (concat_bytes, ConcatBytes) => concat_bytes_expand, (include, Include) => include_expand, (include_bytes, IncludeBytes) => include_bytes_expand, (include_str, IncludeStr) => include_str_expand, @@ -257,7 +260,7 @@ fn format_args_expand( let mut args = parse_exprs_with_sep(tt, ','); if args.is_empty() { - return ExpandResult::only_err(mbe::ExpandError::NoMatchingRule); + return ExpandResult::only_err(mbe::ExpandError::NoMatchingRule.into()); } for arg in &mut args { // Remove `key =`. @@ -341,7 +344,7 @@ fn panic_expand( let loc: MacroCallLoc = db.lookup_intern_macro_call(id); // Expand to a macro call `$crate::panic::panic_{edition}` let krate = tt::Ident { text: "$crate".into(), id: tt::TokenId::unspecified() }; - let mut call = if db.crate_graph()[loc.krate].edition == Edition::Edition2021 { + let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 { quote!(#krate::panic::panic_2021!) } else { quote!(#krate::panic::panic_2015!) @@ -352,12 +355,37 @@ fn panic_expand( ExpandResult::ok(call) } +fn unreachable_expand( + db: &dyn AstDatabase, + id: MacroCallId, + tt: &tt::Subtree, +) -> ExpandResult { + let loc: MacroCallLoc = db.lookup_intern_macro_call(id); + // Expand to a macro call `$crate::panic::unreachable_{edition}` + let krate = tt::Ident { text: "$crate".into(), id: tt::TokenId::unspecified() }; + let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 { + quote!(#krate::panic::unreachable_2021!) + } else { + quote!(#krate::panic::unreachable_2015!) + }; + + // Pass the original arguments + call.token_trees.push(tt::TokenTree::Subtree(tt.clone())); + ExpandResult::ok(call) +} + fn unquote_str(lit: &tt::Literal) -> Option { let lit = ast::make::tokens::literal(&lit.to_string()); let token = ast::String::cast(lit)?; token.value().map(|it| it.into_owned()) } +fn unquote_byte_string(lit: &tt::Literal) -> Option> { + let lit = ast::make::tokens::literal(&lit.to_string()); + let token = ast::ByteString::cast(lit)?; + token.value().map(|it| it.into_owned()) +} + fn compile_error_expand( _db: &dyn AstDatabase, _id: MacroCallId, @@ -368,12 +396,12 @@ fn compile_error_expand( let text = it.text.as_str(); if text.starts_with('"') && text.ends_with('"') { // FIXME: does not handle raw strings - mbe::ExpandError::Other(text[1..text.len() - 1].into()) + ExpandError::Other(text[1..text.len() - 1].into()) } else { - mbe::ExpandError::BindingError("`compile_error!` argument must be a string".into()) + ExpandError::Other("`compile_error!` argument must be a string".into()) } } - _ => mbe::ExpandError::BindingError("`compile_error!` argument must be a string".into()), + _ => ExpandError::Other("`compile_error!` argument must be a string".into()), }; ExpandResult { value: ExpandedEager::new(quote! {}), err: Some(err) } @@ -414,13 +442,81 @@ fn concat_expand( } tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), _ => { - err.get_or_insert(mbe::ExpandError::UnexpectedToken); + err.get_or_insert(mbe::ExpandError::UnexpectedToken.into()); } } } ExpandResult { value: ExpandedEager::new(quote!(#text)), err } } +fn concat_bytes_expand( + _db: &dyn AstDatabase, + _arg_id: MacroCallId, + tt: &tt::Subtree, +) -> ExpandResult { + let mut bytes = Vec::new(); + let mut err = None; + for (i, t) in tt.token_trees.iter().enumerate() { + match t { + tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { + let token = ast::make::tokens::literal(&lit.to_string()); + match token.kind() { + syntax::SyntaxKind::BYTE => bytes.push(token.text().to_string()), + syntax::SyntaxKind::BYTE_STRING => { + let components = unquote_byte_string(lit).unwrap_or_else(|| Vec::new()); + components.into_iter().for_each(|x| bytes.push(x.to_string())); + } + _ => { + err.get_or_insert(mbe::ExpandError::UnexpectedToken.into()); + break; + } + } + } + tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), + tt::TokenTree::Subtree(tree) + if tree.delimiter_kind() == Some(tt::DelimiterKind::Bracket) => + { + if let Err(e) = concat_bytes_expand_subtree(tree, &mut bytes) { + err.get_or_insert(e); + break; + } + } + _ => { + err.get_or_insert(mbe::ExpandError::UnexpectedToken.into()); + break; + } + } + } + let ident = tt::Ident { text: bytes.join(", ").into(), id: tt::TokenId::unspecified() }; + ExpandResult { value: ExpandedEager::new(quote!([#ident])), err } +} + +fn concat_bytes_expand_subtree( + tree: &tt::Subtree, + bytes: &mut Vec, +) -> Result<(), ExpandError> { + for (ti, tt) in tree.token_trees.iter().enumerate() { + match tt { + tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { + let lit = ast::make::tokens::literal(&lit.to_string()); + match lit.kind() { + syntax::SyntaxKind::BYTE | syntax::SyntaxKind::INT_NUMBER => { + bytes.push(lit.text().to_string()) + } + _ => { + return Err(mbe::ExpandError::UnexpectedToken.into()); + } + } + } + tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if ti % 2 == 1 && punct.char == ',' => (), + _ => { + return Err(mbe::ExpandError::UnexpectedToken.into()); + } + } + } + Ok(()) +} + fn concat_idents_expand( _db: &dyn AstDatabase, _arg_id: MacroCallId, @@ -435,7 +531,7 @@ fn concat_idents_expand( } tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), _ => { - err.get_or_insert(mbe::ExpandError::UnexpectedToken); + err.get_or_insert(mbe::ExpandError::UnexpectedToken.into()); } } } @@ -448,28 +544,28 @@ fn relative_file( call_id: MacroCallId, path_str: &str, allow_recursion: bool, -) -> Result { +) -> Result { let call_site = call_id.as_file().original_file(db); let path = AnchoredPath { anchor: call_site, path: path_str }; - let res = db.resolve_path(path).ok_or_else(|| { - mbe::ExpandError::Other(format!("failed to load file `{path_str}`").into()) - })?; + let res = db + .resolve_path(path) + .ok_or_else(|| ExpandError::Other(format!("failed to load file `{path_str}`").into()))?; // Prevent include itself if res == call_site && !allow_recursion { - Err(mbe::ExpandError::Other(format!("recursive inclusion of `{path_str}`").into())) + Err(ExpandError::Other(format!("recursive inclusion of `{path_str}`").into())) } else { Ok(res) } } -fn parse_string(tt: &tt::Subtree) -> Result { +fn parse_string(tt: &tt::Subtree) -> Result { tt.token_trees .get(0) .and_then(|tt| match tt { tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(it), _ => None, }) - .ok_or(mbe::ExpandError::ConversionError) + .ok_or(mbe::ExpandError::ConversionError.into()) } fn include_expand( @@ -561,7 +657,7 @@ fn env_expand( // The only variable rust-analyzer ever sets is `OUT_DIR`, so only diagnose that to avoid // unnecessary diagnostics for eg. `CARGO_PKG_NAME`. if key == "OUT_DIR" { - err = Some(mbe::ExpandError::Other( + err = Some(ExpandError::Other( r#"`OUT_DIR` not set, enable "run build scripts" to fix"#.into(), )); } diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index 75766a54a74f..d6d33b4cd724 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs @@ -5,7 +5,7 @@ use std::sync::Arc; use base_db::{salsa, SourceDatabase}; use either::Either; use limit::Limit; -use mbe::{syntax_node_to_token_tree, ExpandError, ExpandResult}; +use mbe::syntax_node_to_token_tree; use rustc_hash::FxHashSet; use syntax::{ algo::diff, @@ -15,8 +15,9 @@ use syntax::{ use crate::{ ast_id_map::AstIdMap, fixup, hygiene::HygieneFrame, BuiltinAttrExpander, BuiltinDeriveExpander, - BuiltinFnLikeExpander, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, - MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander, + BuiltinFnLikeExpander, ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, + MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, + ProcMacroExpander, }; /// Total limit on the number of tokens produced by any macro invocation. @@ -47,10 +48,10 @@ impl TokenExpander { db: &dyn AstDatabase, id: MacroCallId, tt: &tt::Subtree, - ) -> mbe::ExpandResult { + ) -> ExpandResult { match self { - TokenExpander::DeclarativeMacro { mac, .. } => mac.expand(tt), - TokenExpander::Builtin(it) => it.expand(db, id, tt), + TokenExpander::DeclarativeMacro { mac, .. } => mac.expand(tt).map_err(Into::into), + TokenExpander::Builtin(it) => it.expand(db, id, tt).map_err(Into::into), TokenExpander::BuiltinAttr(it) => it.expand(db, id, tt), TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt), TokenExpander::ProcMacro(_) => { @@ -336,10 +337,12 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet return None, MacroCallKind::Attr { invoc_attr_index, .. } => { cov_mark::hit!(attribute_macro_attr_censoring); ast::Item::cast(node.clone())? @@ -431,7 +434,11 @@ fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult it, - None => return ExpandResult::str_err("Failed to lower macro args to token tree".into()), + None => { + return ExpandResult::only_err(ExpandError::Other( + "Failed to lower macro args to token tree".into(), + )) + } }; let expander = match db.macro_def(loc.def) { @@ -439,16 +446,23 @@ fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult return ExpandResult::str_err(format!("invalid macro definition: {}", err)), + Err(err) => { + return ExpandResult::only_err(ExpandError::Other( + format!("invalid macro definition: {}", err).into(), + )) + } }; let ExpandResult { value: mut tt, err } = expander.expand(db, id, ¯o_arg.0); // Set a hard limit for the expanded tt let count = tt.count(); if TOKEN_LIMIT.check(count).is_err() { - return ExpandResult::str_err(format!( - "macro invocation exceeds token limit: produced {} tokens, limit is {}", - count, - TOKEN_LIMIT.inner(), + return ExpandResult::only_err(ExpandError::Other( + format!( + "macro invocation exceeds token limit: produced {} tokens, limit is {}", + count, + TOKEN_LIMIT.inner(), + ) + .into(), )); } @@ -465,7 +479,9 @@ fn expand_proc_macro(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult it, - None => return ExpandResult::str_err("No arguments for proc-macro".to_string()), + None => { + return ExpandResult::only_err(ExpandError::Other("No arguments for proc-macro".into())) + } }; let expander = match loc.def.kind { diff --git a/crates/hir_expand/src/eager.rs b/crates/hir_expand/src/eager.rs index 66f7d8e3bc14..1de0d5a77d62 100644 --- a/crates/hir_expand/src/eager.rs +++ b/crates/hir_expand/src/eager.rs @@ -21,7 +21,6 @@ use std::sync::Arc; use base_db::CrateId; -use mbe::ExpandResult; use syntax::{ted, SyntaxNode}; use crate::{ @@ -29,8 +28,8 @@ use crate::{ db::AstDatabase, hygiene::Hygiene, mod_path::ModPath, - EagerCallInfo, ExpandTo, InFile, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, - MacroDefKind, UnresolvedMacro, + EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind, + MacroCallLoc, MacroDefId, MacroDefKind, UnresolvedMacro, }; #[derive(Debug)] @@ -39,12 +38,12 @@ pub struct ErrorEmitted { } pub trait ErrorSink { - fn emit(&mut self, err: mbe::ExpandError); + fn emit(&mut self, err: ExpandError); fn option( &mut self, opt: Option, - error: impl FnOnce() -> mbe::ExpandError, + error: impl FnOnce() -> ExpandError, ) -> Result { match opt { Some(it) => Ok(it), @@ -58,12 +57,12 @@ pub trait ErrorSink { fn option_with( &mut self, opt: impl FnOnce() -> Option, - error: impl FnOnce() -> mbe::ExpandError, + error: impl FnOnce() -> ExpandError, ) -> Result { self.option(opt(), error) } - fn result(&mut self, res: Result) -> Result { + fn result(&mut self, res: Result) -> Result { match res { Ok(it) => Ok(it), Err(e) => { @@ -90,8 +89,8 @@ pub trait ErrorSink { } } -impl ErrorSink for &'_ mut dyn FnMut(mbe::ExpandError) { - fn emit(&mut self, err: mbe::ExpandError) { +impl ErrorSink for &'_ mut dyn FnMut(ExpandError) { + fn emit(&mut self, err: ExpandError) { self(err); } } @@ -102,7 +101,7 @@ pub fn expand_eager_macro( macro_call: InFile, def: MacroDefId, resolver: &dyn Fn(ModPath) -> Option, - diagnostic_sink: &mut dyn FnMut(mbe::ExpandError), + diagnostic_sink: &mut dyn FnMut(ExpandError), ) -> Result, UnresolvedMacro> { let hygiene = Hygiene::new(db, macro_call.file_id); let parsed_args = macro_call @@ -147,7 +146,7 @@ pub fn expand_eager_macro( if let MacroDefKind::BuiltInEager(eager, _) = def.kind { let res = eager.expand(db, arg_id, &subtree); if let Some(err) = res.err { - diagnostic_sink(err); + diagnostic_sink(err.into()); } let loc = MacroCallLoc { @@ -199,7 +198,7 @@ fn eager_macro_recur( curr: InFile, krate: CrateId, macro_resolver: &dyn Fn(ModPath) -> Option, - mut diagnostic_sink: &mut dyn FnMut(mbe::ExpandError), + mut diagnostic_sink: &mut dyn FnMut(ExpandError), ) -> Result, UnresolvedMacro> { let original = curr.value.clone_for_update(); @@ -211,7 +210,7 @@ fn eager_macro_recur( let def = match child.path().and_then(|path| ModPath::from_src(db, path, &hygiene)) { Some(path) => macro_resolver(path.clone()).ok_or_else(|| UnresolvedMacro { path })?, None => { - diagnostic_sink(mbe::ExpandError::Other("malformed macro invocation".into())); + diagnostic_sink(ExpandError::Other("malformed macro invocation".into())); continue; } }; diff --git a/crates/hir_expand/src/fixup.rs b/crates/hir_expand/src/fixup.rs index 2eb3da79dc61..63ab5e9b4a0d 100644 --- a/crates/hir_expand/src/fixup.rs +++ b/crates/hir_expand/src/fixup.rs @@ -97,6 +97,18 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { ]); } }, + ast::LetStmt(it) => { + if it.semicolon_token().is_none() { + append.insert(node.clone(), vec![ + SyntheticToken { + kind: SyntaxKind::SEMICOLON, + text: ";".into(), + range: end_range, + id: EMPTY_ID, + }, + ]); + } + }, _ => (), } } @@ -229,6 +241,34 @@ fn foo () {a . __ra_fixup ; bar () ;} ) } + #[test] + fn incomplete_let() { + check( + r#" +fn foo() { + let x = a +} +"#, + expect![[r#" +fn foo () {let x = a ;} +"#]], + ) + } + + #[test] + fn incomplete_field_expr_in_let() { + check( + r#" +fn foo() { + let x = a. +} +"#, + expect![[r#" +fn foo () {let x = a . __ra_fixup ;} +"#]], + ) + } + #[test] fn field_expr_before_call() { // another case that easily happens while typing diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index 279fdc61dca9..ba0f10151246 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs @@ -17,9 +17,9 @@ pub mod eager; pub mod mod_path; mod fixup; -pub use mbe::{ExpandError, ExpandResult, Origin}; +pub use mbe::{Origin, ValueResult}; -use std::{hash::Hash, iter, sync::Arc}; +use std::{fmt, hash::Hash, iter, sync::Arc}; use base_db::{impl_intern_key, salsa, CrateId, FileId, FileRange, ProcMacroKind}; use either::Either; @@ -39,6 +39,31 @@ use crate::{ proc_macro::ProcMacroExpander, }; +pub type ExpandResult = ValueResult; + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum ExpandError { + UnresolvedProcMacro, + Mbe(mbe::ExpandError), + Other(Box), +} + +impl From for ExpandError { + fn from(mbe: mbe::ExpandError) -> Self { + Self::Mbe(mbe) + } +} + +impl fmt::Display for ExpandError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ExpandError::UnresolvedProcMacro => f.write_str("unresolved proc-macro"), + ExpandError::Mbe(it) => it.fmt(f), + ExpandError::Other(it) => f.write_str(it), + } + } +} + /// Input to the analyzer is a set of files, where each file is identified by /// `FileId` and contains source code. However, another source of source code in /// Rust are macros: each macro can be thought of as producing a "temporary @@ -125,22 +150,24 @@ pub enum MacroCallKind { }, Derive { ast_id: AstId, - derive_name: Box, /// Syntactical index of the invoking `#[derive]` attribute. /// /// Outer attributes are counted first, then inner attributes. This does not support /// out-of-line modules, which may have attributes spread across 2 files! derive_attr_index: u32, + /// Index of the derive macro in the derive attribute + derive_index: u32, }, Attr { ast_id: AstId, - attr_name: Box, - attr_args: (tt::Subtree, mbe::TokenMap), + attr_args: Arc<(tt::Subtree, mbe::TokenMap)>, /// Syntactical index of the invoking `#[attribute]`. /// /// Outer attributes are counted first, then inner attributes. This does not support /// out-of-line modules, which may have attributes spread across 2 files! invoc_attr_index: u32, + /// Whether this attribute is the `#[derive]` attribute. + is_derive: bool, }, } @@ -193,9 +220,18 @@ impl HirFileId { let arg_tt = loc.kind.arg(db)?; + let macro_def = db.macro_def(loc.def).ok()?; + let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?; + let macro_arg = db.macro_arg(macro_file.macro_call_id)?; + let def = loc.def.ast_id().left().and_then(|id| { let def_tt = match id.to_node(db) { ast::Macro::MacroRules(mac) => mac.token_tree()?, + ast::Macro::MacroDef(_) + if matches!(*macro_def, TokenExpander::BuiltinAttr(_)) => + { + return None + } ast::Macro::MacroDef(mac) => mac.body()?, }; Some(InFile::new(id.file_id, def_tt)) @@ -213,10 +249,6 @@ impl HirFileId { _ => None, }); - let macro_def = db.macro_def(loc.def).ok()?; - let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?; - let macro_arg = db.macro_arg(macro_file.macro_call_id)?; - Some(ExpansionInfo { expanded: InFile::new(self, parse.syntax_node()), arg: InFile::new(loc.kind.file_id(), arg_tt), @@ -231,16 +263,16 @@ impl HirFileId { } /// Indicate it is macro file generated for builtin derive - pub fn is_builtin_derive(&self, db: &dyn db::AstDatabase) -> Option> { + pub fn is_builtin_derive(&self, db: &dyn db::AstDatabase) -> Option> { match self.0 { HirFileIdRepr::FileId(_) => None, HirFileIdRepr::MacroFile(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - let item = match loc.def.kind { + let attr = match loc.def.kind { MacroDefKind::BuiltInDerive(..) => loc.kind.to_node(db), _ => return None, }; - Some(item.with_value(ast::Item::cast(item.value.clone())?)) + Some(attr.with_value(ast::Attr::cast(attr.value.clone())?)) } } } @@ -266,7 +298,7 @@ impl HirFileId { } } - /// Return whether this file is an include macro + /// Return whether this file is an attr macro pub fn is_attr_macro(&self, db: &dyn db::AstDatabase) -> bool { match self.0 { HirFileIdRepr::MacroFile(macro_file) => { @@ -277,6 +309,18 @@ impl HirFileId { } } + /// Return whether this file is the pseudo expansion of the derive attribute. + /// See [`crate::builtin_attr_macro::derive_attr_expand`]. + pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::AstDatabase) -> bool { + match self.0 { + HirFileIdRepr::MacroFile(macro_file) => { + let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); + matches!(loc.kind, MacroCallKind::Attr { is_derive: true, .. }) + } + _ => false, + } + } + pub fn is_macro(self) -> bool { matches!(self.0, HirFileIdRepr::MacroFile(_)) } @@ -341,8 +385,29 @@ impl MacroCallKind { MacroCallKind::FnLike { ast_id, .. } => { ast_id.with_value(ast_id.to_node(db).syntax().clone()) } - MacroCallKind::Derive { ast_id, .. } => { - ast_id.with_value(ast_id.to_node(db).syntax().clone()) + MacroCallKind::Derive { ast_id, derive_attr_index, .. } => { + // FIXME: handle `cfg_attr` + ast_id.with_value(ast_id.to_node(db)).map(|it| { + it.doc_comments_and_attrs() + .nth(*derive_attr_index as usize) + .and_then(|it| match it { + Either::Left(attr) => Some(attr.syntax().clone()), + Either::Right(_) => None, + }) + .unwrap_or_else(|| it.syntax().clone()) + }) + } + MacroCallKind::Attr { ast_id, is_derive: true, invoc_attr_index, .. } => { + // FIXME: handle `cfg_attr` + ast_id.with_value(ast_id.to_node(db)).map(|it| { + it.doc_comments_and_attrs() + .nth(*invoc_attr_index as usize) + .and_then(|it| match it { + Either::Left(attr) => Some(attr.syntax().clone()), + Either::Right(_) => None, + }) + .unwrap_or_else(|| it.syntax().clone()) + }) } MacroCallKind::Attr { ast_id, .. } => { ast_id.with_value(ast_id.to_node(db).syntax().clone()) @@ -406,6 +471,7 @@ impl MacroCallKind { match self { MacroCallKind::FnLike { expand_to, .. } => *expand_to, MacroCallKind::Derive { .. } => ExpandTo::Items, + MacroCallKind::Attr { is_derive: true, .. } => ExpandTo::Statements, MacroCallKind::Attr { .. } => ExpandTo::Items, // is this always correct? } } @@ -472,7 +538,7 @@ impl ExpansionInfo { let token_range = token.value.text_range(); match &loc.kind { - MacroCallKind::Attr { attr_args: (_, map), invoc_attr_index, .. } => { + MacroCallKind::Attr { attr_args, invoc_attr_index, is_derive, .. } => { let attr = item .doc_comments_and_attrs() .nth(*invoc_attr_index as usize) @@ -486,8 +552,13 @@ impl ExpansionInfo { let relative_range = token.value.text_range().checked_sub(attr_input_start)?; // shift by the item's tree's max id - let token_id = - self.macro_arg_shift.shift(map.token_by_range(relative_range)?); + let token_id = attr_args.1.token_by_range(relative_range)?; + let token_id = if *is_derive { + // we do not shift for `#[derive]`, as we only need to downmap the derive attribute tokens + token_id + } else { + self.macro_arg_shift.shift(token_id) + }; Some(token_id) } _ => None, @@ -535,13 +606,16 @@ impl ExpansionInfo { // Attributes are a bit special for us, they have two inputs, the input tokentree and the annotated item. let (token_map, tt) = match &loc.kind { - MacroCallKind::Attr { attr_args: (_, arg_token_map), .. } => { + MacroCallKind::Attr { attr_args, is_derive: true, .. } => { + (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned()) + } + MacroCallKind::Attr { attr_args, .. } => { // try unshifting the the token id, if unshifting fails, the token resides in the non-item attribute input // note that the `TokenExpander::map_id_up` earlier only unshifts for declarative macros, so we don't double unshift with this match self.macro_arg_shift.unshift(token_id) { Some(unshifted) => { token_id = unshifted; - (arg_token_map, self.attr_input_or_mac_def.clone()?.syntax().cloned()) + (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned()) } None => (&self.macro_arg.1, self.arg.clone()), } @@ -690,6 +764,13 @@ impl<'a> InFile<&'a SyntaxNode> { } } +impl InFile { + pub fn upmap(self, db: &dyn db::AstDatabase) -> Option> { + let expansion = self.file_id.expansion_info(db)?; + expansion.map_token_up(db, self.as_ref()).map(|(it, _)| it) + } +} + fn ascend_node_border_tokens( db: &dyn db::AstDatabase, InFile { file_id, value: node }: InFile<&SyntaxNode>, @@ -810,10 +891,10 @@ impl ExpandTo { MACRO_TYPE => ExpandTo::Type, ARG_LIST | TRY_EXPR | TUPLE_EXPR | PAREN_EXPR | ARRAY_EXPR | FOR_EXPR | PATH_EXPR - | CLOSURE_EXPR | CONDITION | BREAK_EXPR | RETURN_EXPR | MATCH_EXPR | MATCH_ARM - | MATCH_GUARD | RECORD_EXPR_FIELD | CALL_EXPR | INDEX_EXPR | METHOD_CALL_EXPR - | FIELD_EXPR | AWAIT_EXPR | CAST_EXPR | REF_EXPR | PREFIX_EXPR | RANGE_EXPR - | BIN_EXPR => ExpandTo::Expr, + | CLOSURE_EXPR | BREAK_EXPR | RETURN_EXPR | MATCH_EXPR | MATCH_ARM | MATCH_GUARD + | RECORD_EXPR_FIELD | CALL_EXPR | INDEX_EXPR | METHOD_CALL_EXPR | FIELD_EXPR + | AWAIT_EXPR | CAST_EXPR | REF_EXPR | PREFIX_EXPR | RANGE_EXPR | BIN_EXPR + | LET_EXPR => ExpandTo::Expr, LET_STMT => { // FIXME: Handle LHS Pattern ExpandTo::Expr diff --git a/crates/hir_expand/src/name.rs b/crates/hir_expand/src/name.rs index c36bd09e2b3b..f62c3ca0a004 100644 --- a/crates/hir_expand/src/name.rs +++ b/crates/hir_expand/src/name.rs @@ -233,6 +233,7 @@ pub mod known { column, compile_error, concat_idents, + concat_bytes, concat, const_format_args, core_panic, @@ -252,6 +253,7 @@ pub mod known { std_panic, stringify, trace_macros, + unreachable, // Builtin derives Copy, Clone, diff --git a/crates/hir_expand/src/proc_macro.rs b/crates/hir_expand/src/proc_macro.rs index 27c45f002b44..df6c38761c39 100644 --- a/crates/hir_expand/src/proc_macro.rs +++ b/crates/hir_expand/src/proc_macro.rs @@ -1,9 +1,8 @@ //! Proc Macro Expander stub use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind}; -use mbe::ExpandResult; -use crate::db::AstDatabase; +use crate::{db::AstDatabase, ExpandError, ExpandResult}; #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] pub struct ProcMacroExpander { @@ -37,7 +36,11 @@ impl ProcMacroExpander { let krate_graph = db.crate_graph(); let proc_macro = match krate_graph[self.krate].proc_macro.get(id.0 as usize) { Some(proc_macro) => proc_macro, - None => return ExpandResult::str_err("No proc-macro found.".to_string()), + None => { + return ExpandResult::only_err(ExpandError::Other( + "No proc-macro found.".into(), + )) + } }; // Proc macros have access to the environment variables of the invoking crate. @@ -51,17 +54,17 @@ impl ProcMacroExpander { { ExpandResult { value: tt.clone(), - err: Some(mbe::ExpandError::Other(text.into())), + err: Some(ExpandError::Other(text.into())), } } ProcMacroExpansionError::System(text) | ProcMacroExpansionError::Panic(text) => { - ExpandResult::only_err(mbe::ExpandError::Other(text.into())) + ExpandResult::only_err(ExpandError::Other(text.into())) } }, } } - None => ExpandResult::only_err(mbe::ExpandError::UnresolvedProcMacro), + None => ExpandResult::only_err(ExpandError::UnresolvedProcMacro), } } } diff --git a/crates/hir_ty/src/autoderef.rs b/crates/hir_ty/src/autoderef.rs index 6266554ecf67..dffb36b5de31 100644 --- a/crates/hir_ty/src/autoderef.rs +++ b/crates/hir_ty/src/autoderef.rs @@ -3,20 +3,16 @@ //! reference to a type with the field `bar`. This is an approximation of the //! logic in rustc (which lives in librustc_typeck/check/autoderef.rs). -use std::iter::successors; +use std::sync::Arc; -use base_db::CrateId; -use chalk_ir::{cast::Cast, fold::Fold, interner::HasInterner, VariableKind}; -use hir_def::lang_item::LangItemTarget; +use chalk_ir::cast::Cast; use hir_expand::name::name; use limit::Limit; use syntax::SmolStr; -use tracing::{info, warn}; use crate::{ - db::HirDatabase, static_lifetime, AliasEq, AliasTy, BoundVar, Canonical, CanonicalVarKinds, - ConstrainedSubst, DebruijnIndex, Environment, Guidance, InEnvironment, Interner, - ProjectionTyExt, Solution, Substitution, Ty, TyBuilder, TyKind, + db::HirDatabase, infer::unify::InferenceTable, Canonical, Goal, Interner, ProjectionTyExt, + TraitEnvironment, Ty, TyBuilder, TyKind, }; static AUTODEREF_RECURSION_LIMIT: Limit = Limit::new(10); @@ -26,40 +22,34 @@ pub(crate) enum AutoderefKind { Overloaded, } -pub(crate) struct Autoderef<'db> { - db: &'db dyn HirDatabase, - ty: Canonical, +pub(crate) struct Autoderef<'a, 'db> { + pub(crate) table: &'a mut InferenceTable<'db>, + ty: Ty, at_start: bool, - krate: Option, - environment: Environment, steps: Vec<(AutoderefKind, Ty)>, } -impl<'db> Autoderef<'db> { - pub(crate) fn new( - db: &'db dyn HirDatabase, - krate: Option, - ty: InEnvironment>, - ) -> Self { - let InEnvironment { goal: ty, environment } = ty; - Autoderef { db, ty, at_start: true, environment, krate, steps: Vec::new() } +impl<'a, 'db> Autoderef<'a, 'db> { + pub(crate) fn new(table: &'a mut InferenceTable<'db>, ty: Ty) -> Self { + let ty = table.resolve_ty_shallow(&ty); + Autoderef { table, ty, at_start: true, steps: Vec::new() } } pub(crate) fn step_count(&self) -> usize { self.steps.len() } - pub(crate) fn steps(&self) -> &[(AutoderefKind, chalk_ir::Ty)] { + pub(crate) fn steps(&self) -> &[(AutoderefKind, Ty)] { &self.steps } pub(crate) fn final_ty(&self) -> Ty { - self.ty.value.clone() + self.ty.clone() } } -impl Iterator for Autoderef<'_> { - type Item = (Canonical, usize); +impl Iterator for Autoderef<'_, '_> { + type Item = (Ty, usize); fn next(&mut self) -> Option { if self.at_start { @@ -71,54 +61,42 @@ impl Iterator for Autoderef<'_> { return None; } - let (kind, new_ty) = if let Some(derefed) = builtin_deref(&self.ty.value) { - ( - AutoderefKind::Builtin, - Canonical { value: derefed.clone(), binders: self.ty.binders.clone() }, - ) - } else { - ( - AutoderefKind::Overloaded, - deref_by_trait( - self.db, - self.krate?, - InEnvironment { goal: &self.ty, environment: self.environment.clone() }, - )?, - ) - }; + let (kind, new_ty) = autoderef_step(self.table, self.ty.clone())?; - self.steps.push((kind, self.ty.value.clone())); + self.steps.push((kind, self.ty.clone())); self.ty = new_ty; Some((self.ty.clone(), self.step_count())) } } +pub(crate) fn autoderef_step(table: &mut InferenceTable, ty: Ty) -> Option<(AutoderefKind, Ty)> { + if let Some(derefed) = builtin_deref(&ty) { + Some((AutoderefKind::Builtin, table.resolve_ty_shallow(derefed))) + } else { + Some((AutoderefKind::Overloaded, deref_by_trait(table, ty)?)) + } +} + // FIXME: replace uses of this with Autoderef above pub fn autoderef<'a>( db: &'a dyn HirDatabase, - krate: Option, - ty: InEnvironment>, + env: Arc, + ty: Canonical, ) -> impl Iterator> + 'a { - let InEnvironment { goal: ty, environment } = ty; - successors(Some(ty), move |ty| { - deref(db, krate?, InEnvironment { goal: ty, environment: environment.clone() }) - }) - .take(AUTODEREF_RECURSION_LIMIT.inner()) + let mut table = InferenceTable::new(db, env); + let ty = table.instantiate_canonical(ty); + let mut autoderef = Autoderef::new(&mut table, ty); + let mut v = Vec::new(); + while let Some((ty, _steps)) = autoderef.next() { + v.push(autoderef.table.canonicalize(ty).value); + } + v.into_iter() } -pub(crate) fn deref( - db: &dyn HirDatabase, - krate: CrateId, - ty: InEnvironment<&Canonical>, -) -> Option> { +pub(crate) fn deref(table: &mut InferenceTable, ty: Ty) -> Option { let _p = profile::span("deref"); - match builtin_deref(&ty.goal.value) { - Some(derefed) => { - Some(Canonical { value: derefed.clone(), binders: ty.goal.binders.clone() }) - } - None => deref_by_trait(db, krate, ty), - } + autoderef_step(table, ty).map(|(_, ty)| ty) } fn builtin_deref(ty: &Ty) -> Option<&Ty> { @@ -129,16 +107,12 @@ fn builtin_deref(ty: &Ty) -> Option<&Ty> { } } -fn deref_by_trait( - db: &dyn HirDatabase, - krate: CrateId, - ty: InEnvironment<&Canonical>, -) -> Option> { +fn deref_by_trait(table: &mut InferenceTable, ty: Ty) -> Option { + let db = table.db; let _p = profile::span("deref_by_trait"); - let deref_trait = match db.lang_item(krate, SmolStr::new_inline("deref"))? { - LangItemTarget::TraitId(it) => it, - _ => return None, - }; + let deref_trait = db + .lang_item(table.trait_env.krate, SmolStr::new_inline("deref")) + .and_then(|l| l.as_trait())?; let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?; let projection = { @@ -148,114 +122,16 @@ fn deref_by_trait( // namely Deref's Self type return None; } - b.push(ty.goal.value.clone()).build() + b.push(ty).build() }; - // FIXME make the Canonical / bound var handling nicer - // Check that the type implements Deref at all let trait_ref = projection.trait_ref(db); - let implements_goal = Canonical { - binders: ty.goal.binders.clone(), - value: InEnvironment { - goal: trait_ref.cast(Interner), - environment: ty.environment.clone(), - }, - }; - if db.trait_solve(krate, implements_goal).is_none() { - return None; - } + let implements_goal: Goal = trait_ref.cast(Interner); + table.try_obligation(implements_goal.clone())?; - // Now do the assoc type projection - let alias_eq = AliasEq { - alias: AliasTy::Projection(projection), - ty: TyKind::BoundVar(BoundVar::new( - DebruijnIndex::INNERMOST, - ty.goal.binders.len(Interner), - )) - .intern(Interner), - }; - - let in_env = InEnvironment { goal: alias_eq.cast(Interner), environment: ty.environment }; - - let canonical = Canonical { - value: in_env, - binders: CanonicalVarKinds::from_iter( - Interner, - ty.goal.binders.iter(Interner).cloned().chain(Some(chalk_ir::WithKind::new( - VariableKind::Ty(chalk_ir::TyVariableKind::General), - chalk_ir::UniverseIndex::ROOT, - ))), - ), - }; - - let solution = db.trait_solve(krate, canonical)?; - - match &solution { - Solution::Unique(Canonical { value: ConstrainedSubst { subst, .. }, binders }) - | Solution::Ambig(Guidance::Definite(Canonical { value: subst, binders })) => { - // FIXME: vars may contain solutions for any inference variables - // that happened to be inside ty. To correctly handle these, we - // would have to pass the solution up to the inference context, but - // that requires a larger refactoring (especially if the deref - // happens during method resolution). So for the moment, we just - // check that we're not in the situation where we would actually - // need to handle the values of the additional variables, i.e. - // they're just being 'passed through'. In the 'standard' case where - // we have `impl Deref for Foo { Target = T }`, that should be - // the case. - - // FIXME: if the trait solver decides to truncate the type, these - // assumptions will be broken. We would need to properly introduce - // new variables in that case - - for i in 1..binders.len(Interner) { - if subst.at(Interner, i - 1).assert_ty_ref(Interner).kind(Interner) - != &TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, i - 1)) - { - warn!("complex solution for derefing {:?}: {:?}, ignoring", ty.goal, solution); - return None; - } - } - // FIXME: we remove lifetime variables here since they can confuse - // the method resolution code later - Some(fixup_lifetime_variables(Canonical { - value: subst.at(Interner, subst.len(Interner) - 1).assert_ty_ref(Interner).clone(), - binders: binders.clone(), - })) - } - Solution::Ambig(_) => { - info!("Ambiguous solution for derefing {:?}: {:?}", ty.goal, solution); - None - } - } -} + table.register_obligation(implements_goal); -fn fixup_lifetime_variables + HasInterner>( - c: Canonical, -) -> Canonical { - // Removes lifetime variables from the Canonical, replacing them by static lifetimes. - let mut i = 0; - let subst = Substitution::from_iter( - Interner, - c.binders.iter(Interner).map(|vk| match vk.kind { - VariableKind::Ty(_) => { - let index = i; - i += 1; - BoundVar::new(DebruijnIndex::INNERMOST, index).to_ty(Interner).cast(Interner) - } - VariableKind::Lifetime => static_lifetime().cast(Interner), - VariableKind::Const(_) => unimplemented!(), - }), - ); - let binders = CanonicalVarKinds::from_iter( - Interner, - c.binders.iter(Interner).filter(|vk| match vk.kind { - VariableKind::Ty(_) => true, - VariableKind::Lifetime => false, - VariableKind::Const(_) => true, - }), - ); - let value = subst.apply(c.value, Interner); - Canonical { binders, value } + let result = table.normalize_projection_ty(projection); + Some(table.resolve_ty_shallow(&result)) } diff --git a/crates/hir_ty/src/display.rs b/crates/hir_ty/src/display.rs index f02f4ac02478..2020834fbc49 100644 --- a/crates/hir_ty/src/display.rs +++ b/crates/hir_ty/src/display.rs @@ -1094,20 +1094,32 @@ impl HirDisplay for TypeRef { inner.hir_fmt(f)?; write!(f, "]")?; } - TypeRef::Fn(tys, is_varargs) => { + TypeRef::Fn(parameters, is_varargs) => { // FIXME: Function pointer qualifiers. write!(f, "fn(")?; - f.write_joined(&tys[..tys.len() - 1], ", ")?; - if *is_varargs { - write!(f, "{}...", if tys.len() == 1 { "" } else { ", " })?; - } - write!(f, ")")?; - let ret_ty = tys.last().unwrap(); - match ret_ty { - TypeRef::Tuple(tup) if tup.is_empty() => {} - _ => { - write!(f, " -> ")?; - ret_ty.hir_fmt(f)?; + if let Some(((_, return_type), function_parameters)) = parameters.split_last() { + for index in 0..function_parameters.len() { + let (param_name, param_type) = &function_parameters[index]; + if let Some(name) = param_name { + write!(f, "{}: ", name)?; + } + + param_type.hir_fmt(f)?; + + if index != function_parameters.len() - 1 { + write!(f, ", ")?; + } + } + if *is_varargs { + write!(f, "{}...", if parameters.len() == 1 { "" } else { ", " })?; + } + write!(f, ")")?; + match &return_type { + TypeRef::Tuple(tup) if tup.is_empty() => {} + _ => { + write!(f, " -> ")?; + return_type.hir_fmt(f)?; + } } } } @@ -1177,7 +1189,18 @@ impl HirDisplay for Path { write!(f, "super")?; } } - (_, PathKind::DollarCrate(_)) => write!(f, "{{extern_crate}}")?, + (_, PathKind::DollarCrate(id)) => { + // Resolve `$crate` to the crate's display name. + // FIXME: should use the dependency name instead if available, but that depends on + // the crate invoking `HirDisplay` + let crate_graph = f.db.crate_graph(); + let name = crate_graph[*id] + .display_name + .as_ref() + .map(|name| name.canonical_name()) + .unwrap_or("$crate"); + write!(f, "{name}")? + } } for (seg_idx, segment) in self.segments().iter().enumerate() { diff --git a/crates/hir_ty/src/infer.rs b/crates/hir_ty/src/infer.rs index 173380654e0c..c84604d69bf8 100644 --- a/crates/hir_ty/src/infer.rs +++ b/crates/hir_ty/src/infer.rs @@ -46,7 +46,7 @@ use crate::{ pub use unify::could_unify; pub(crate) use unify::unify; -mod unify; +pub(crate) mod unify; mod path; mod expr; mod pat; @@ -228,7 +228,7 @@ pub enum Adjust { /// The target type is `U` in both cases, with the region and mutability /// being those shared by both the receiver and the returned reference. #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct OverloadedDeref(Mutability); +pub struct OverloadedDeref(pub Mutability); #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub enum AutoBorrow { @@ -455,10 +455,6 @@ impl<'a> InferenceContext<'a> { self.result.method_resolutions.insert(expr, (func, subst)); } - fn write_field_resolution(&mut self, expr: ExprId, field: FieldId) { - self.result.field_resolutions.insert(expr, field); - } - fn write_variant_resolution(&mut self, id: ExprOrPatId, variant: VariantId) { self.result.variant_resolutions.insert(id, variant); } diff --git a/crates/hir_ty/src/infer/coerce.rs b/crates/hir_ty/src/infer/coerce.rs index bddb79c50159..528e3ba88270 100644 --- a/crates/hir_ty/src/infer/coerce.rs +++ b/crates/hir_ty/src/infer/coerce.rs @@ -259,27 +259,19 @@ impl<'a> InferenceContext<'a> { // details of coercion errors though, so I think it's useful to leave // the structure like it is. - let canonicalized = self.canonicalize(from_ty.clone()); - let mut autoderef = Autoderef::new( - self.db, - self.resolver.krate(), - InEnvironment { - goal: canonicalized.value.clone(), - environment: self.trait_env.env.clone(), - }, - ); + let snapshot = self.table.snapshot(); + + let mut autoderef = Autoderef::new(&mut self.table, from_ty.clone()); let mut first_error = None; let mut found = None; - for (referent_ty, autoderefs) in autoderef.by_ref() { + while let Some((referent_ty, autoderefs)) = autoderef.next() { if autoderefs == 0 { // Don't let this pass, otherwise it would cause // &T to autoref to &&T. continue; } - let referent_ty = canonicalized.decanonicalize_ty(&mut self.table, referent_ty); - // At this point, we have deref'd `a` to `referent_ty`. So // imagine we are coercing from `&'a mut Vec` to `&'b mut [T]`. // In the autoderef loop for `&'a mut Vec`, we would get @@ -304,7 +296,7 @@ impl<'a> InferenceContext<'a> { // from `&mut T` to `&U`. let lt = static_lifetime(); // FIXME: handle lifetimes correctly, see rustc let derefd_from_ty = TyKind::Ref(to_mt, lt, referent_ty).intern(Interner); - match self.table.try_unify(&derefd_from_ty, to_ty) { + match autoderef.table.try_unify(&derefd_from_ty, to_ty) { Ok(result) => { found = Some(result.map(|()| derefd_from_ty)); break; @@ -325,6 +317,7 @@ impl<'a> InferenceContext<'a> { let InferOk { value: ty, goals } = match found { Some(d) => d, None => { + self.table.rollback_to(snapshot); let err = first_error.expect("coerce_borrowed_pointer had no error"); return Err(err); } @@ -345,29 +338,13 @@ impl<'a> InferenceContext<'a> { return success(vec![], ty, goals); } - let mut adjustments = self.auto_deref_adjust_steps(&autoderef); + let mut adjustments = auto_deref_adjust_steps(&autoderef); adjustments .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(to_mt)), target: ty.clone() }); success(adjustments, ty, goals) } - pub(super) fn auto_deref_adjust_steps(&self, autoderef: &Autoderef<'_>) -> Vec { - let steps = autoderef.steps(); - let targets = - steps.iter().skip(1).map(|(_, ty)| ty.clone()).chain(iter::once(autoderef.final_ty())); - steps - .iter() - .map(|(kind, _source)| match kind { - // We do not know what kind of deref we require at this point yet - AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)), - AutoderefKind::Builtin => None, - }) - .zip(targets) - .map(|(autoderef, target)| Adjustment { kind: Adjust::Deref(autoderef), target }) - .collect() - } - /// Attempts to coerce from the type of a Rust function item into a function pointer. fn coerce_from_fn_item(&mut self, from_ty: Ty, to_ty: &Ty) -> CoerceResult { match to_ty.kind(Interner) { @@ -578,8 +555,10 @@ impl<'a> InferenceContext<'a> { ); } Solution::Ambig(Guidance::Definite(subst)) => { + // FIXME need to record an obligation here canonicalized.apply_solution(&mut self.table, subst) } + // FIXME actually we maybe should also accept unknown guidance here _ => return Err(TypeError), }; let unsize = @@ -620,3 +599,19 @@ fn coerce_mutabilities(from: Mutability, to: Mutability) -> Result<(), TypeError (Mutability::Not, Mutability::Mut) => Err(TypeError), } } + +pub(super) fn auto_deref_adjust_steps(autoderef: &Autoderef<'_, '_>) -> Vec { + let steps = autoderef.steps(); + let targets = + steps.iter().skip(1).map(|(_, ty)| ty.clone()).chain(iter::once(autoderef.final_ty())); + steps + .iter() + .map(|(kind, _source)| match kind { + // We do not know what kind of deref we require at this point yet + AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)), + AutoderefKind::Builtin => None, + }) + .zip(targets) + .map(|(autoderef, target)| Adjustment { kind: Adjust::Deref(autoderef), target }) + .collect() +} diff --git a/crates/hir_ty/src/infer/expr.rs b/crates/hir_ty/src/infer/expr.rs index 4f1bdee705dd..ad54b72b4b55 100644 --- a/crates/hir_ty/src/infer/expr.rs +++ b/crates/hir_ty/src/infer/expr.rs @@ -1,6 +1,7 @@ //! Type inference for expressions. use std::{ + collections::hash_map::Entry, iter::{repeat, repeat_with}, mem, sync::Arc, @@ -8,10 +9,7 @@ use std::{ use chalk_ir::{cast::Cast, fold::Shift, Mutability, TyVariableKind}; use hir_def::{ - expr::{ - ArithOp, Array, BinaryOp, CmpOp, Expr, ExprId, Literal, MatchGuard, Ordering, Statement, - UnaryOp, - }, + expr::{ArithOp, Array, BinaryOp, CmpOp, Expr, ExprId, Literal, Ordering, Statement, UnaryOp}, path::{GenericArg, GenericArgs}, resolver::resolver_for_expr, FieldId, FunctionId, ItemContainerId, Lookup, @@ -29,15 +27,14 @@ use crate::{ method_resolution, primitive::{self, UintTy}, static_lifetime, to_chalk_trait_id, - traits::FnTrait, utils::{generics, Generics}, - AdtId, Binders, CallableDefId, FnPointer, FnSig, FnSubst, InEnvironment, Interner, - ProjectionTyExt, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind, + AdtId, Binders, CallableDefId, FnPointer, FnSig, FnSubst, Interner, Rawness, Scalar, + Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind, }; use super::{ - find_breakable, BindingMode, BreakableContext, Diverges, Expectation, InferenceContext, - InferenceDiagnostic, TypeMismatch, + coerce::auto_deref_adjust_steps, find_breakable, BindingMode, BreakableContext, Diverges, + Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch, }; impl<'a> InferenceContext<'a> { @@ -80,51 +77,6 @@ impl<'a> InferenceContext<'a> { } } - fn callable_sig_from_fn_trait(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec, Ty)> { - let krate = self.resolver.krate()?; - let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?; - let output_assoc_type = - self.db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?; - - let mut arg_tys = vec![]; - let arg_ty = TyBuilder::tuple(num_args) - .fill(repeat_with(|| { - let arg = self.table.new_type_var(); - arg_tys.push(arg.clone()); - arg - })) - .build(); - - let projection = { - let b = TyBuilder::assoc_type_projection(self.db, output_assoc_type); - if b.remaining() != 2 { - return None; - } - b.push(ty.clone()).push(arg_ty).build() - }; - - let trait_env = self.trait_env.env.clone(); - let obligation = InEnvironment { - goal: projection.trait_ref(self.db).cast(Interner), - environment: trait_env, - }; - let canonical = self.canonicalize(obligation.clone()); - if self.db.trait_solve(krate, canonical.value.cast(Interner)).is_some() { - self.push_obligation(obligation.goal); - let return_ty = self.table.normalize_projection_ty(projection); - Some((arg_tys, return_ty)) - } else { - None - } - } - - pub(crate) fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec, Ty)> { - match ty.callable_sig(self.db) { - Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())), - None => self.callable_sig_from_fn_trait(ty, num_args), - } - } - fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty { self.db.unwind_if_cancelled(); @@ -132,7 +84,6 @@ impl<'a> InferenceContext<'a> { let ty = match &body[tgt_expr] { Expr::Missing => self.err_ty(), &Expr::If { condition, then_branch, else_branch } => { - // if let is desugared to match, so this is always simple if self.infer_expr( condition, &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)), @@ -158,6 +109,11 @@ impl<'a> InferenceContext<'a> { coerce.complete() } + &Expr::Let { pat, expr } => { + let input_ty = self.infer_expr(expr, &Expectation::none()); + self.infer_pat(pat, &input_ty, BindingMode::default()); + TyKind::Scalar(Scalar::Bool).intern(Interner) + } Expr::Block { statements, tail, label, id: _ } => { let old_resolver = mem::replace( &mut self.resolver, @@ -227,7 +183,6 @@ impl<'a> InferenceContext<'a> { coerce: CoerceMany::new(self.err_ty()), label: label.map(|label| self.body[label].name.clone()), }); - // while let is desugared to a match loop, so this is always simple while self.infer_expr( *condition, &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)), @@ -317,22 +272,19 @@ impl<'a> InferenceContext<'a> { } Expr::Call { callee, args } => { let callee_ty = self.infer_expr(*callee, &Expectation::none()); - let canonicalized = self.canonicalize(callee_ty.clone()); - let mut derefs = Autoderef::new( - self.db, - self.resolver.krate(), - InEnvironment { - goal: canonicalized.value.clone(), - environment: self.table.trait_env.env.clone(), - }, - ); - let res = derefs.by_ref().find_map(|(callee_deref_ty, _)| { - let ty = &canonicalized.decanonicalize_ty(&mut self.table, callee_deref_ty); - self.callable_sig(ty, args.len()) - }); + let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone()); + let mut res = None; + // manual loop to be able to access `derefs.table` + while let Some((callee_deref_ty, _)) = derefs.next() { + res = derefs.table.callable_sig(&callee_deref_ty, args.len()); + if res.is_some() { + break; + } + } let (param_tys, ret_ty): (Vec, Ty) = match res { Some(res) => { - self.write_expr_adj(*callee, self.auto_deref_adjust_steps(&derefs)); + let adjustments = auto_deref_adjust_steps(&derefs); + self.write_expr_adj(*callee, adjustments); res } None => (Vec::new(), self.err_ty()), @@ -378,20 +330,11 @@ impl<'a> InferenceContext<'a> { for arm in arms.iter() { self.diverges = Diverges::Maybe; let _pat_ty = self.infer_pat(arm.pat, &input_ty, BindingMode::default()); - match arm.guard { - Some(MatchGuard::If { expr: guard_expr }) => { - self.infer_expr( - guard_expr, - &Expectation::has_type( - TyKind::Scalar(Scalar::Bool).intern(Interner), - ), - ); - } - Some(MatchGuard::IfLet { expr, pat }) => { - let input_ty = self.infer_expr(expr, &Expectation::none()); - let _pat_ty = self.infer_pat(pat, &input_ty, BindingMode::default()); - } - _ => {} + if let Some(guard_expr) = arm.guard { + self.infer_expr( + guard_expr, + &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)), + ); } let arm_ty = self.infer_expr_inner(arm.expr, &expected); @@ -496,78 +439,67 @@ impl<'a> InferenceContext<'a> { } Expr::Field { expr, name } => { let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none()); - let canonicalized = self.canonicalize(receiver_ty); - - let mut autoderef = Autoderef::new( - self.db, - self.resolver.krate(), - InEnvironment { - goal: canonicalized.value.clone(), - environment: self.trait_env.env.clone(), - }, - ); + + let mut autoderef = Autoderef::new(&mut self.table, receiver_ty); let ty = autoderef.by_ref().find_map(|(derefed_ty, _)| { - let module = self.resolver.module(); - let db = self.db; - let is_visible = |field_id: &FieldId| { - module - .map(|mod_id| { - db.field_visibilities(field_id.parent)[field_id.local_id] - .is_visible_from(db.upcast(), mod_id) - }) - .unwrap_or(true) - }; - match canonicalized - .decanonicalize_ty(&mut self.table, derefed_ty) - .kind(Interner) - { - TyKind::Tuple(_, substs) => name.as_tuple_index().and_then(|idx| { - substs - .as_slice(Interner) - .get(idx) - .map(|a| a.assert_ty_ref(Interner)) - .cloned() - }), + let (field_id, parameters) = match derefed_ty.kind(Interner) { + TyKind::Tuple(_, substs) => { + return name.as_tuple_index().and_then(|idx| { + substs + .as_slice(Interner) + .get(idx) + .map(|a| a.assert_ty_ref(Interner)) + .cloned() + }); + } TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => { let local_id = self.db.struct_data(*s).variant_data.field(name)?; let field = FieldId { parent: (*s).into(), local_id }; - if is_visible(&field) { - self.write_field_resolution(tgt_expr, field); - Some( - self.db.field_types((*s).into())[field.local_id] - .clone() - .substitute(Interner, ¶meters), - ) - } else { - None - } + (field, parameters.clone()) } TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), parameters) => { let local_id = self.db.union_data(*u).variant_data.field(name)?; let field = FieldId { parent: (*u).into(), local_id }; - if is_visible(&field) { - self.write_field_resolution(tgt_expr, field); - Some( - self.db.field_types((*u).into())[field.local_id] - .clone() - .substitute(Interner, ¶meters), - ) - } else { - None - } + (field, parameters.clone()) + } + _ => return None, + }; + let module = self.resolver.module(); + let is_visible = module + .map(|mod_id| { + self.db.field_visibilities(field_id.parent)[field_id.local_id] + .is_visible_from(self.db.upcast(), mod_id) + }) + .unwrap_or(true); + if !is_visible { + // Write down the first field resolution even if it is not visible + // This aids IDE features for private fields like goto def and in + // case of autoderef finding an applicable field, this will be + // overwritten in a following cycle + if let Entry::Vacant(entry) = self.result.field_resolutions.entry(tgt_expr) + { + entry.insert(field_id); } - _ => None, + return None; } + // can't have `write_field_resolution` here because `self.table` is borrowed :( + self.result.field_resolutions.insert(tgt_expr, field_id); + let ty = self.db.field_types(field_id.parent)[field_id.local_id] + .clone() + .substitute(Interner, ¶meters); + Some(ty) }); let ty = match ty { Some(ty) => { - self.write_expr_adj(*expr, self.auto_deref_adjust_steps(&autoderef)); + let adjustments = auto_deref_adjust_steps(&autoderef); + self.write_expr_adj(*expr, adjustments); + let ty = self.insert_type_vars(ty); + let ty = self.normalize_associated_types_in(ty); ty } - None => self.err_ty(), + _ => self.err_ty(), }; - let ty = self.insert_type_vars(ty); - self.normalize_associated_types_in(ty) + ty } Expr::Await { expr } => { let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); @@ -625,25 +557,9 @@ impl<'a> InferenceContext<'a> { let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); let inner_ty = self.resolve_ty_shallow(&inner_ty); match op { - UnaryOp::Deref => match self.resolver.krate() { - Some(krate) => { - let canonicalized = self.canonicalize(inner_ty); - match autoderef::deref( - self.db, - krate, - InEnvironment { - goal: &canonicalized.value, - environment: self.trait_env.env.clone(), - }, - ) { - Some(derefed_ty) => { - canonicalized.decanonicalize_ty(&mut self.table, derefed_ty) - } - None => self.err_ty(), - } - } - None => self.err_ty(), - }, + UnaryOp::Deref => { + autoderef::deref(&mut self.table, inner_ty).unwrap_or_else(|| self.err_ty()) + } UnaryOp::Neg => { match inner_ty.kind(Interner) { // Fast path for builtins @@ -729,20 +645,19 @@ impl<'a> InferenceContext<'a> { let base_ty = self.infer_expr_inner(*base, &Expectation::none()); let index_ty = self.infer_expr(*index, &Expectation::none()); - if let (Some(index_trait), Some(krate)) = - (self.resolve_ops_index(), self.resolver.krate()) - { - let canonicalized = self.canonicalize(base_ty); - let self_ty = method_resolution::resolve_indexing_op( + if let Some(index_trait) = self.resolve_ops_index() { + let canonicalized = self.canonicalize(base_ty.clone()); + let receiver_adjustments = method_resolution::resolve_indexing_op( self.db, - &canonicalized.value, self.trait_env.clone(), - krate, + canonicalized.value, index_trait, ); - let self_ty = self_ty.map_or(self.err_ty(), |t| { - canonicalized.decanonicalize_ty(&mut self.table, t) - }); + let (self_ty, adj) = receiver_adjustments + .map_or((self.err_ty(), Vec::new()), |adj| { + adj.apply(&mut self.table, base_ty) + }); + self.write_expr_adj(*base, adj); self.resolve_associated_type_with_params( self_ty, self.resolve_ops_index_output(), @@ -989,22 +904,20 @@ impl<'a> InferenceContext<'a> { let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast()); - let resolved = self.resolver.krate().and_then(|krate| { - method_resolution::lookup_method( - &canonicalized_receiver.value, - self.db, - self.trait_env.clone(), - krate, - &traits_in_scope, - self.resolver.module().into(), - method_name, - ) - }); + let resolved = method_resolution::lookup_method( + &canonicalized_receiver.value, + self.db, + self.trait_env.clone(), + &traits_in_scope, + self.resolver.module().into(), + method_name, + ); let (receiver_ty, method_ty, substs) = match resolved { - Some((ty, func)) => { - let ty = canonicalized_receiver.decanonicalize_ty(&mut self.table, ty); + Some((adjust, func)) => { + let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty); let generics = generics(self.db.upcast(), func.into()); - let substs = self.substs_for_method_call(generics, generic_args, &ty); + let substs = self.substs_for_method_call(generics, generic_args); + self.write_expr_adj(receiver, adjustments); self.write_method_resolution(tgt_expr, func, substs.clone()); (ty, self.db.value_ty(func.into()), substs) } @@ -1117,20 +1030,15 @@ impl<'a> InferenceContext<'a> { &mut self, def_generics: Generics, generic_args: Option<&GenericArgs>, - receiver_ty: &Ty, ) -> Substitution { let (parent_params, self_params, type_params, impl_trait_params) = def_generics.provenance_split(); assert_eq!(self_params, 0); // method shouldn't have another Self param let total_len = parent_params + type_params + impl_trait_params; let mut substs = Vec::with_capacity(total_len); - // Parent arguments are unknown, except for the receiver type - for (_id, param) in def_generics.iter_parent() { - if param.provenance == hir_def::generics::TypeParamProvenance::TraitSelf { - substs.push(receiver_ty.clone()); - } else { - substs.push(self.table.new_type_var()); - } + // Parent arguments are unknown + for _ in def_generics.iter_parent() { + substs.push(self.table.new_type_var()); } // handle provided type arguments if let Some(generic_args) = generic_args { diff --git a/crates/hir_ty/src/infer/path.rs b/crates/hir_ty/src/infer/path.rs index b63ef2ffdc8a..0d6c8f12d24f 100644 --- a/crates/hir_ty/src/infer/path.rs +++ b/crates/hir_ty/src/infer/path.rs @@ -218,14 +218,12 @@ impl<'a> InferenceContext<'a> { } let canonical_ty = self.canonicalize(ty.clone()); - let krate = self.resolver.krate()?; let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast()); method_resolution::iterate_method_candidates( &canonical_ty.value, self.db, self.table.trait_env.clone(), - krate, &traits_in_scope, self.resolver.module().into(), Some(name), diff --git a/crates/hir_ty/src/infer/unify.rs b/crates/hir_ty/src/infer/unify.rs index bb7cdb677e37..21b48b9d8025 100644 --- a/crates/hir_ty/src/infer/unify.rs +++ b/crates/hir_ty/src/infer/unify.rs @@ -1,6 +1,6 @@ //! Unification and canonicalization logic. -use std::{fmt, mem, sync::Arc}; +use std::{fmt, iter, mem, sync::Arc}; use chalk_ir::{ cast::Cast, fold::Fold, interner::HasInterner, zip::Zip, FloatTy, IntTy, NoSolution, @@ -8,12 +8,14 @@ use chalk_ir::{ }; use chalk_solve::infer::ParameterEnaVariableExt; use ena::unify::UnifyKey; +use hir_expand::name; use super::{InferOk, InferResult, InferenceContext, TypeError}; use crate::{ - db::HirDatabase, fold_tys, static_lifetime, AliasEq, AliasTy, BoundVar, Canonical, Const, - DebruijnIndex, GenericArg, Goal, Guidance, InEnvironment, InferenceVar, Interner, Lifetime, - ProjectionTy, Scalar, Solution, Substitution, TraitEnvironment, Ty, TyKind, VariableKind, + db::HirDatabase, fold_tys, static_lifetime, traits::FnTrait, AliasEq, AliasTy, BoundVar, + Canonical, Const, DebruijnIndex, GenericArg, Goal, Guidance, InEnvironment, InferenceVar, + Interner, Lifetime, ProjectionTy, ProjectionTyExt, Scalar, Solution, Substitution, + TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind, }; impl<'a> InferenceContext<'a> { @@ -24,32 +26,20 @@ impl<'a> InferenceContext<'a> { where T::Result: HasInterner, { - // try to resolve obligations before canonicalizing, since this might - // result in new knowledge about variables - self.resolve_obligations_as_possible(); self.table.canonicalize(t) } } #[derive(Debug, Clone)] -pub(super) struct Canonicalized +pub(crate) struct Canonicalized where T: HasInterner, { - pub(super) value: Canonical, + pub(crate) value: Canonical, free_vars: Vec, } impl> Canonicalized { - /// this method is wrong and shouldn't exist - pub(super) fn decanonicalize_ty(&self, table: &mut InferenceTable, ty: Canonical) -> Ty { - let mut vars = self.free_vars.clone(); - while ty.binders.len(Interner) > vars.len() { - vars.push(table.new_type_var().cast(Interner)); - } - chalk_ir::Substitute::apply(&vars, ty.value, Interner) - } - pub(super) fn apply_solution( &self, ctx: &mut InferenceTable, @@ -203,13 +193,16 @@ impl<'a> InferenceTable<'a> { .intern(Interner) } - pub(super) fn canonicalize + HasInterner>( + pub(crate) fn canonicalize + HasInterner>( &mut self, t: T, ) -> Canonicalized where T::Result: HasInterner, { + // try to resolve obligations before canonicalizing, since this might + // result in new knowledge about variables + self.resolve_obligations_as_possible(); let result = self.var_unification_table.canonicalize(Interner, t); let free_vars = result .free_vars @@ -225,7 +218,7 @@ impl<'a> InferenceTable<'a> { /// type annotation (e.g. from a let type annotation, field type or function /// call). `make_ty` handles this already, but e.g. for field types we need /// to do it as well. - pub(super) fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty { + pub(crate) fn normalize_associated_types_in(&mut self, ty: Ty) -> Ty { fold_tys( ty, |ty, _| match ty.kind(Interner) { @@ -238,7 +231,7 @@ impl<'a> InferenceTable<'a> { ) } - pub(super) fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty { + pub(crate) fn normalize_projection_ty(&mut self, proj_ty: ProjectionTy) -> Ty { let var = self.new_type_var(); let alias_eq = AliasEq { alias: AliasTy::Projection(proj_ty), ty: var.clone() }; let obligation = alias_eq.cast(Interner); @@ -299,6 +292,13 @@ impl<'a> InferenceTable<'a> { self.resolve_with_fallback_inner(&mut Vec::new(), t, &fallback) } + pub(crate) fn instantiate_canonical(&mut self, canonical: Canonical) -> T::Result + where + T: HasInterner + Fold + std::fmt::Debug, + { + self.var_unification_table.instantiate_canonical(Interner, canonical) + } + fn resolve_with_fallback_inner( &mut self, var_stack: &mut Vec, @@ -351,6 +351,7 @@ impl<'a> InferenceTable<'a> { /// If `ty` is a type variable with known type, returns that type; /// otherwise, return ty. pub(crate) fn resolve_ty_shallow(&mut self, ty: &Ty) -> Ty { + self.resolve_obligations_as_possible(); self.var_unification_table.normalize_ty_shallow(Interner, ty).unwrap_or_else(|| ty.clone()) } @@ -363,6 +364,16 @@ impl<'a> InferenceTable<'a> { self.var_unification_table.rollback_to(snapshot.var_table_snapshot); } + /// Checks an obligation without registering it. Useful mostly to check + /// whether a trait *might* be implemented before deciding to 'lock in' the + /// choice (during e.g. method resolution or deref). + pub(crate) fn try_obligation(&mut self, goal: Goal) -> Option { + let in_env = InEnvironment::new(&self.trait_env.env, goal); + let canonicalized = self.canonicalize(in_env); + let solution = self.db.trait_solve(self.trait_env.krate, canonicalized.value); + solution + } + pub(crate) fn register_obligation(&mut self, goal: Goal) { let in_env = InEnvironment::new(&self.trait_env.env, goal); self.register_obligation_in_env(in_env) @@ -522,6 +533,51 @@ impl<'a> InferenceTable<'a> { } } } + + pub(crate) fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec, Ty)> { + match ty.callable_sig(self.db) { + Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())), + None => self.callable_sig_from_fn_trait(ty, num_args), + } + } + + fn callable_sig_from_fn_trait(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec, Ty)> { + let krate = self.trait_env.krate; + let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?; + let output_assoc_type = + self.db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?; + + let mut arg_tys = vec![]; + let arg_ty = TyBuilder::tuple(num_args) + .fill(iter::repeat_with(|| { + let arg = self.new_type_var(); + arg_tys.push(arg.clone()); + arg + })) + .build(); + + let projection = { + let b = TyBuilder::assoc_type_projection(self.db, output_assoc_type); + if b.remaining() != 2 { + return None; + } + b.push(ty.clone()).push(arg_ty).build() + }; + + let trait_env = self.trait_env.env.clone(); + let obligation = InEnvironment { + goal: projection.trait_ref(self.db).cast(Interner), + environment: trait_env, + }; + let canonical = self.canonicalize(obligation.clone()); + if self.db.trait_solve(krate, canonical.value.cast(Interner)).is_some() { + self.register_obligation(obligation.goal); + let return_ty = self.normalize_projection_ty(projection); + Some((arg_tys, return_ty)) + } else { + None + } + } } impl<'a> fmt::Debug for InferenceTable<'a> { diff --git a/crates/hir_ty/src/lower.rs b/crates/hir_ty/src/lower.rs index 55b1a67ea7ba..a140dd4057c6 100644 --- a/crates/hir_ty/src/lower.rs +++ b/crates/hir_ty/src/lower.rs @@ -201,7 +201,7 @@ impl<'a> TyLoweringContext<'a> { TypeRef::Placeholder => TyKind::Error.intern(Interner), TypeRef::Fn(params, is_varargs) => { let substs = self.with_shifted_in(DebruijnIndex::ONE, |ctx| { - Substitution::from_iter(Interner, params.iter().map(|tr| ctx.lower_ty(tr))) + Substitution::from_iter(Interner, params.iter().map(|(_, tr)| ctx.lower_ty(tr))) }); TyKind::Function(FnPointer { num_binders: 0, // FIXME lower `for<'a> fn()` correctly diff --git a/crates/hir_ty/src/method_resolution.rs b/crates/hir_ty/src/method_resolution.rs index c91b6f2e82d1..44ece57a8e0e 100644 --- a/crates/hir_ty/src/method_resolution.rs +++ b/crates/hir_ty/src/method_resolution.rs @@ -17,10 +17,11 @@ use rustc_hash::{FxHashMap, FxHashSet}; use stdx::never; use crate::{ - autoderef, + autoderef::{self, AutoderefKind}, consteval::{self, ConstExt}, db::HirDatabase, from_foreign_def_id, + infer::{unify::InferenceTable, Adjust, Adjustment, AutoBorrow, OverloadedDeref, PointerCast}, primitive::{self, FloatTy, IntTy, UintTy}, static_lifetime, utils::all_super_traits, @@ -429,28 +430,25 @@ pub fn def_crates( Some(res) } -/// Look up the method with the given name, returning the actual autoderefed -/// receiver type (but without autoref applied yet). +/// Look up the method with the given name. pub(crate) fn lookup_method( ty: &Canonical, db: &dyn HirDatabase, env: Arc, - krate: CrateId, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: &Name, -) -> Option<(Canonical, FunctionId)> { +) -> Option<(ReceiverAdjustments, FunctionId)> { iterate_method_candidates( ty, db, env, - krate, traits_in_scope, visible_from_module, Some(name), LookupMode::MethodCall, - |ty, f| match f { - AssocItemId::FunctionId(f) => Some((ty.clone(), f)), + |adjustments, f| match f { + AssocItemId::FunctionId(f) => Some((adjustments, f)), _ => None, }, ) @@ -496,33 +494,89 @@ impl From> for VisibleFromModule { } } +#[derive(Debug, Clone, Default)] +pub struct ReceiverAdjustments { + autoref: Option, + autoderefs: usize, + unsize_array: bool, +} + +impl ReceiverAdjustments { + pub(crate) fn apply(&self, table: &mut InferenceTable, ty: Ty) -> (Ty, Vec) { + let mut ty = ty; + let mut adjust = Vec::new(); + for _ in 0..self.autoderefs { + match autoderef::autoderef_step(table, ty.clone()) { + None => { + never!("autoderef not possible for {:?}", ty); + ty = TyKind::Error.intern(Interner); + break; + } + Some((kind, new_ty)) => { + ty = new_ty.clone(); + adjust.push(Adjustment { + kind: Adjust::Deref(match kind { + // FIXME should we know the mutability here? + AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)), + AutoderefKind::Builtin => None, + }), + target: new_ty, + }); + } + } + } + if self.unsize_array { + ty = match ty.kind(Interner) { + TyKind::Array(inner, _) => TyKind::Slice(inner.clone()).intern(Interner), + _ => { + never!("unsize_array with non-array {:?}", ty); + ty + } + }; + // FIXME this is kind of wrong since the unsize needs to happen to a pointer/reference + adjust.push(Adjustment { + kind: Adjust::Pointer(PointerCast::Unsize), + target: ty.clone(), + }); + } + if let Some(m) = self.autoref { + ty = TyKind::Ref(m, static_lifetime(), ty).intern(Interner); + adjust + .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty.clone() }); + } + (ty, adjust) + } + + fn with_autoref(&self, m: Mutability) -> ReceiverAdjustments { + Self { autoref: Some(m), ..*self } + } +} + // This would be nicer if it just returned an iterator, but that runs into // lifetime problems, because we need to borrow temp `CrateImplDefs`. // FIXME add a context type here? -pub fn iterate_method_candidates( +pub(crate) fn iterate_method_candidates( ty: &Canonical, db: &dyn HirDatabase, env: Arc, - krate: CrateId, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: Option<&Name>, mode: LookupMode, - mut callback: impl FnMut(&Canonical, AssocItemId) -> Option, + mut callback: impl FnMut(ReceiverAdjustments, AssocItemId) -> Option, ) -> Option { let mut slot = None; iterate_method_candidates_dyn( ty, db, env, - krate, traits_in_scope, visible_from_module, name, mode, - &mut |ty, item| { + &mut |adj, item| { assert!(slot.is_none()); - if let Some(it) = callback(ty, item) { + if let Some(it) = callback(adj, item) { slot = Some(it); return ControlFlow::Break(()); } @@ -532,28 +586,45 @@ pub fn iterate_method_candidates( slot } +pub fn iterate_path_candidates( + ty: &Canonical, + db: &dyn HirDatabase, + env: Arc, + traits_in_scope: &FxHashSet, + visible_from_module: VisibleFromModule, + name: Option<&Name>, + callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>, +) -> ControlFlow<()> { + iterate_method_candidates_dyn( + ty, + db, + env, + traits_in_scope, + visible_from_module, + name, + LookupMode::Path, + // the adjustments are not relevant for path lookup + &mut |_, id| callback(id), + ) +} + pub fn iterate_method_candidates_dyn( ty: &Canonical, db: &dyn HirDatabase, env: Arc, - krate: CrateId, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: Option<&Name>, mode: LookupMode, - callback: &mut dyn FnMut(&Canonical, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, ) -> ControlFlow<()> { match mode { LookupMode::MethodCall => { - // For method calls, rust first does any number of autoderef, and then one - // autoref (i.e. when the method takes &self or &mut self). We just ignore - // the autoref currently -- when we find a method matching the given name, - // we assume it fits. - - // Also note that when we've got a receiver like &S, even if the method we - // find in the end takes &self, we still do the autoderef step (just as - // rustc does an autoderef and then autoref again). - let ty = InEnvironment { goal: ty.clone(), environment: env.env.clone() }; + // For method calls, rust first does any number of autoderef, and + // then one autoref (i.e. when the method takes &self or &mut self). + // Note that when we've got a receiver like &S, even if the method + // we find in the end takes &self, we still do the autoderef step + // (just as rustc does an autoderef and then autoref again). // We have to be careful about the order we're looking at candidates // in here. Consider the case where we're resolving `x.clone()` @@ -568,29 +639,31 @@ pub fn iterate_method_candidates_dyn( // the methods by autoderef order of *receiver types*, not *self // types*. - let deref_chain = autoderef_method_receiver(db, krate, ty); - let mut deref_chains = stdx::slice_tails(&deref_chain); + let mut table = InferenceTable::new(db, env.clone()); + let ty = table.instantiate_canonical(ty.clone()); + let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty); + let deref_chains = stdx::slice_tails(&deref_chain); - deref_chains.try_for_each(|deref_chain| { + let result = deref_chains.zip(adj).try_for_each(|(deref_chain, adj)| { iterate_method_candidates_with_autoref( deref_chain, + adj, db, env.clone(), - krate, traits_in_scope, visible_from_module, name, callback, ) - }) + }); + result } LookupMode::Path => { // No autoderef for path lookups iterate_method_candidates_for_self_ty( ty, db, - env, - krate, + env.clone(), traits_in_scope, visible_from_module, name, @@ -602,27 +675,27 @@ pub fn iterate_method_candidates_dyn( fn iterate_method_candidates_with_autoref( deref_chain: &[Canonical], + first_adjustment: ReceiverAdjustments, db: &dyn HirDatabase, env: Arc, - krate: CrateId, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: Option<&Name>, - mut callback: &mut dyn FnMut(&Canonical, AssocItemId) -> ControlFlow<()>, + mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, ) -> ControlFlow<()> { let (receiver_ty, rest) = match deref_chain.split_first() { - Some((rec, rest)) => (rec.clone(), rest), + Some((rec, rest)) => (rec, rest), None => { never!("received empty deref-chain"); return ControlFlow::Break(()); } }; iterate_method_candidates_by_receiver( - &receiver_ty, + receiver_ty, + first_adjustment.clone(), &rest, db, env.clone(), - krate, traits_in_scope, visible_from_module, name, @@ -630,17 +703,17 @@ fn iterate_method_candidates_with_autoref( )?; let refed = Canonical { - binders: receiver_ty.binders.clone(), value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone()) .intern(Interner), + binders: receiver_ty.binders.clone(), }; iterate_method_candidates_by_receiver( &refed, + first_adjustment.with_autoref(Mutability::Not), deref_chain, db, env.clone(), - krate, traits_in_scope, visible_from_module, name, @@ -648,16 +721,17 @@ fn iterate_method_candidates_with_autoref( )?; let ref_muted = Canonical { - binders: receiver_ty.binders, - value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value).intern(Interner), + value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value.clone()) + .intern(Interner), + binders: receiver_ty.binders.clone(), }; iterate_method_candidates_by_receiver( &ref_muted, + first_adjustment.with_autoref(Mutability::Mut), deref_chain, db, - env, - krate, + env.clone(), traits_in_scope, visible_from_module, name, @@ -667,14 +741,14 @@ fn iterate_method_candidates_with_autoref( fn iterate_method_candidates_by_receiver( receiver_ty: &Canonical, + receiver_adjustments: ReceiverAdjustments, rest_of_deref_chain: &[Canonical], db: &dyn HirDatabase, env: Arc, - krate: CrateId, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: Option<&Name>, - mut callback: &mut dyn FnMut(&Canonical, AssocItemId) -> ControlFlow<()>, + mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, ) -> ControlFlow<()> { // We're looking for methods with *receiver* type receiver_ty. These could // be found in any of the derefs of receiver_ty, so we have to go through @@ -686,7 +760,7 @@ fn iterate_method_candidates_by_receiver( env.clone(), name, Some(receiver_ty), - krate, + Some(receiver_adjustments.clone()), visible_from_module, &mut callback, )? @@ -697,10 +771,10 @@ fn iterate_method_candidates_by_receiver( self_ty, db, env.clone(), - krate, traits_in_scope, name, Some(receiver_ty), + Some(receiver_adjustments.clone()), &mut callback, )? } @@ -712,11 +786,10 @@ fn iterate_method_candidates_for_self_ty( self_ty: &Canonical, db: &dyn HirDatabase, env: Arc, - krate: CrateId, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: Option<&Name>, - mut callback: &mut dyn FnMut(&Canonical, AssocItemId) -> ControlFlow<()>, + mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, ) -> ControlFlow<()> { iterate_inherent_methods( self_ty, @@ -724,24 +797,24 @@ fn iterate_method_candidates_for_self_ty( env.clone(), name, None, - krate, + None, visible_from_module, &mut callback, )?; - iterate_trait_method_candidates(self_ty, db, env, krate, traits_in_scope, name, None, callback) + iterate_trait_method_candidates(self_ty, db, env, traits_in_scope, name, None, None, callback) } fn iterate_trait_method_candidates( self_ty: &Canonical, db: &dyn HirDatabase, env: Arc, - krate: CrateId, traits_in_scope: &FxHashSet, name: Option<&Name>, receiver_ty: Option<&Canonical>, - callback: &mut dyn FnMut(&Canonical, AssocItemId) -> ControlFlow<()>, + receiver_adjustments: Option, + callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, ) -> ControlFlow<()> { - let receiver_is_array = matches!(self_ty.value.kind(Interner), chalk_ir::TyKind::Array(..)); + let self_is_array = matches!(self_ty.value.kind(Interner), chalk_ir::TyKind::Array(..)); // if ty is `dyn Trait`, the trait doesn't need to be in scope let inherent_trait = self_ty.value.dyn_trait().into_iter().flat_map(|t| all_super_traits(db.upcast(), t)); @@ -763,10 +836,10 @@ fn iterate_trait_method_candidates( // 2021. // This is to make `[a].into_iter()` not break code with the new `IntoIterator` impl for // arrays. - if data.skip_array_during_method_dispatch && receiver_is_array { + if data.skip_array_during_method_dispatch && self_is_array { // FIXME: this should really be using the edition of the method name's span, in case it // comes from a macro - if db.crate_graph()[krate].edition < Edition::Edition2021 { + if db.crate_graph()[env.krate].edition < Edition::Edition2021 { continue; } } @@ -782,14 +855,13 @@ fn iterate_trait_method_candidates( continue; } if !known_implemented { - let goal = generic_implements_goal(db, env.clone(), t, self_ty.clone()); - if db.trait_solve(krate, goal.cast(Interner)).is_none() { + let goal = generic_implements_goal(db, env.clone(), t, self_ty); + if db.trait_solve(env.krate, goal.cast(Interner)).is_none() { continue 'traits; } } known_implemented = true; - // FIXME: we shouldn't be ignoring the binders here - callback(self_ty, item)? + callback(receiver_adjustments.clone().unwrap_or_default(), item)?; } } ControlFlow::Continue(()) @@ -824,11 +896,11 @@ fn iterate_inherent_methods( env: Arc, name: Option<&Name>, receiver_ty: Option<&Canonical>, - krate: CrateId, + receiver_adjustments: Option, visible_from_module: VisibleFromModule, - callback: &mut dyn FnMut(&Canonical, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, ) -> ControlFlow<()> { - let def_crates = match def_crates(db, &self_ty.value, krate) { + let def_crates = match def_crates(db, &self_ty.value, env.krate) { Some(k) => k, None => return ControlFlow::Continue(()), }; @@ -848,6 +920,7 @@ fn iterate_inherent_methods( env.clone(), name, receiver_ty, + receiver_adjustments.clone(), module, callback, )?; @@ -856,7 +929,17 @@ fn iterate_inherent_methods( for krate in def_crates { let impls = db.inherent_impls_in_crate(krate); - impls_for_self_ty(&impls, self_ty, db, env.clone(), name, receiver_ty, module, callback)?; + impls_for_self_ty( + &impls, + self_ty, + db, + env.clone(), + name, + receiver_ty, + receiver_adjustments.clone(), + module, + callback, + )?; } return ControlFlow::Continue(()); @@ -867,8 +950,9 @@ fn iterate_inherent_methods( env: Arc, name: Option<&Name>, receiver_ty: Option<&Canonical>, + receiver_adjustments: Option, visible_from_module: Option, - callback: &mut dyn FnMut(&Canonical, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, ) -> ControlFlow<()> { let impls_for_self_ty = filter_inherent_impls_for_self_ty(impls, &self_ty.value); for &impl_def in impls_for_self_ty { @@ -889,33 +973,32 @@ fn iterate_inherent_methods( // already happens in `is_valid_candidate` above; if not, we // check it here if receiver_ty.is_none() - && inherent_impl_substs(db, env.clone(), impl_def, self_ty).is_none() + && inherent_impl_substs(db, env.clone(), impl_def, &self_ty).is_none() { cov_mark::hit!(impl_self_type_match_without_receiver); continue; } - let receiver_ty = receiver_ty.unwrap_or(self_ty); - callback(receiver_ty, item)?; + callback(receiver_adjustments.clone().unwrap_or_default(), item)?; } } ControlFlow::Continue(()) } } -/// Returns the self type for the index trait call. +/// Returns the receiver type for the index trait call. pub fn resolve_indexing_op( db: &dyn HirDatabase, - ty: &Canonical, env: Arc, - krate: CrateId, + ty: Canonical, index_trait: TraitId, -) -> Option> { - let ty = InEnvironment { goal: ty.clone(), environment: env.env.clone() }; - let deref_chain = autoderef_method_receiver(db, krate, ty); - for ty in deref_chain { - let goal = generic_implements_goal(db, env.clone(), index_trait, ty.clone()); - if db.trait_solve(krate, goal.cast(Interner)).is_some() { - return Some(ty); +) -> Option { + let mut table = InferenceTable::new(db, env.clone()); + let ty = table.instantiate_canonical(ty); + let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty); + for (ty, adj) in deref_chain.into_iter().zip(adj) { + let goal = generic_implements_goal(db, env.clone(), index_trait, &ty); + if db.trait_solve(env.krate, goal.cast(Interner)).is_some() { + return Some(adj); } } None @@ -1067,11 +1150,10 @@ pub fn implements_trait( ty: &Canonical, db: &dyn HirDatabase, env: Arc, - krate: CrateId, trait_: TraitId, ) -> bool { - let goal = generic_implements_goal(db, env, trait_, ty.clone()); - let solution = db.trait_solve(krate, goal.cast(Interner)); + let goal = generic_implements_goal(db, env.clone(), trait_, &ty); + let solution = db.trait_solve(env.krate, goal.cast(Interner)); solution.is_some() } @@ -1080,11 +1162,10 @@ pub fn implements_trait_unique( ty: &Canonical, db: &dyn HirDatabase, env: Arc, - krate: CrateId, trait_: TraitId, ) -> bool { - let goal = generic_implements_goal(db, env, trait_, ty.clone()); - let solution = db.trait_solve(krate, goal.cast(Interner)); + let goal = generic_implements_goal(db, env.clone(), trait_, &ty); + let solution = db.trait_solve(env.krate, goal.cast(Interner)); matches!(solution, Some(crate::Solution::Unique(_))) } @@ -1095,11 +1176,11 @@ fn generic_implements_goal( db: &dyn HirDatabase, env: Arc, trait_: TraitId, - self_ty: Canonical, + self_ty: &Canonical, ) -> Canonical> { let mut kinds = self_ty.binders.interned().to_vec(); let trait_ref = TyBuilder::trait_ref(db, trait_) - .push(self_ty.value) + .push(self_ty.value.clone()) .fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len()) .build(); kinds.extend( @@ -1117,17 +1198,27 @@ fn generic_implements_goal( } fn autoderef_method_receiver( - db: &dyn HirDatabase, - krate: CrateId, - ty: InEnvironment>, -) -> Vec> { - let mut deref_chain: Vec<_> = autoderef::autoderef(db, Some(krate), ty).collect(); + table: &mut InferenceTable, + ty: Ty, +) -> (Vec>, Vec) { + let (mut deref_chain, mut adjustments): (Vec<_>, Vec<_>) = (Vec::new(), Vec::new()); + let mut autoderef = autoderef::Autoderef::new(table, ty); + while let Some((ty, derefs)) = autoderef.next() { + deref_chain.push(autoderef.table.canonicalize(ty).value); + adjustments.push(ReceiverAdjustments { + autoref: None, + autoderefs: derefs, + unsize_array: false, + }); + } // As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!) - if let Some(TyKind::Array(parameters, _)) = deref_chain.last().map(|ty| ty.value.kind(Interner)) - { - let kinds = deref_chain.last().unwrap().binders.clone(); + if let (Some((TyKind::Array(parameters, _), binders)), Some(adj)) = ( + deref_chain.last().map(|ty| (ty.value.kind(Interner), ty.binders.clone())), + adjustments.last().cloned(), + ) { let unsized_ty = TyKind::Slice(parameters.clone()).intern(Interner); - deref_chain.push(Canonical { value: unsized_ty, binders: kinds }) + deref_chain.push(Canonical { value: unsized_ty, binders }); + adjustments.push(ReceiverAdjustments { unsize_array: true, ..adj }); } - deref_chain + (deref_chain, adjustments) } diff --git a/crates/hir_ty/src/tests.rs b/crates/hir_ty/src/tests.rs index 29250dca00cc..7385da56622d 100644 --- a/crates/hir_ty/src/tests.rs +++ b/crates/hir_ty/src/tests.rs @@ -100,6 +100,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour .trim_start_matches("adjustments: ") .split(',') .map(|it| it.trim().to_string()) + .filter(|it| !it.is_empty()) .collect(), ); } else { diff --git a/crates/hir_ty/src/tests/coercion.rs b/crates/hir_ty/src/tests/coercion.rs index dd3b86f05033..c0dddb608ea3 100644 --- a/crates/hir_ty/src/tests/coercion.rs +++ b/crates/hir_ty/src/tests/coercion.rs @@ -242,6 +242,45 @@ fn test() { ); } +#[test] +fn coerce_autoderef_implication_1() { + check_no_mismatches( + r" +//- minicore: deref +struct Foo; +impl core::ops::Deref for Foo { type Target = (); } + +fn takes_ref_foo(x: &Foo) {} +fn test() { + let foo = Foo; + //^^^ type: Foo<{unknown}> + takes_ref_foo(&foo); + + let foo = Foo; + //^^^ type: Foo + let _: &() = &foo; +}", + ); +} + +#[test] +fn coerce_autoderef_implication_2() { + check( + r" +//- minicore: deref +struct Foo; +impl core::ops::Deref for Foo { type Target = (); } + +fn takes_ref_foo(x: &Foo) {} +fn test() { + let foo = Foo; + //^^^ type: Foo<{unknown}> + let _: &u32 = &Foo; + //^^^^ expected &u32, got &Foo<{unknown}> +}", + ); +} + #[test] fn closure_return_coerce() { check_no_mismatches( diff --git a/crates/hir_ty/src/tests/macros.rs b/crates/hir_ty/src/tests/macros.rs index a61175f27335..344e7293c593 100644 --- a/crates/hir_ty/src/tests/macros.rs +++ b/crates/hir_ty/src/tests/macros.rs @@ -190,7 +190,6 @@ fn expr_macro_def_expanded_in_various_places() { !0..6 '1isize': isize !0..6 '1isize': isize !0..6 '1isize': isize - !0..6 '1isize': isize 39..442 '{ ...!(); }': () 73..94 'spam!(...am!())': {unknown} 100..119 'for _ ...!() {}': () @@ -198,6 +197,7 @@ fn expr_macro_def_expanded_in_various_places() { 117..119 '{}': () 124..134 '|| spam!()': || -> isize 140..156 'while ...!() {}': () + 146..153 'spam!()': bool 154..156 '{}': () 161..174 'break spam!()': ! 180..194 'return spam!()': ! @@ -271,7 +271,6 @@ fn expr_macro_rules_expanded_in_various_places() { !0..6 '1isize': isize !0..6 '1isize': isize !0..6 '1isize': isize - !0..6 '1isize': isize 53..456 '{ ...!(); }': () 87..108 'spam!(...am!())': {unknown} 114..133 'for _ ...!() {}': () @@ -279,6 +278,7 @@ fn expr_macro_rules_expanded_in_various_places() { 131..133 '{}': () 138..148 '|| spam!()': || -> isize 154..170 'while ...!() {}': () + 160..167 'spam!()': bool 168..170 '{}': () 175..188 'break spam!()': ! 194..208 'return spam!()': ! diff --git a/crates/hir_ty/src/tests/method_resolution.rs b/crates/hir_ty/src/tests/method_resolution.rs index 9c0c00da3b1d..c118ae24cfe3 100644 --- a/crates/hir_ty/src/tests/method_resolution.rs +++ b/crates/hir_ty/src/tests/method_resolution.rs @@ -1460,3 +1460,121 @@ fn main() { "#, ); } + +#[test] +fn deref_fun_1() { + check_types( + r#" +//- minicore: deref + +struct A(T, U); +struct B(T); +struct C(T); + +impl core::ops::Deref for A, u32> { + type Target = B; + fn deref(&self) -> &B { &self.0 } +} +impl core::ops::Deref for B { + type Target = C; + fn deref(&self) -> &C { loop {} } +} + +impl C { + fn thing(&self) -> T { self.0 } +} + +fn make() -> T { loop {} } + +fn test() { + let a1 = A(make(), make()); + let _: usize = (*a1).0; + a1; + //^^ A, u32> + + let a2 = A(make(), make()); + a2.thing(); + //^^^^^^^^^^ isize + a2; + //^^ A, u32> +} +"#, + ); +} + +#[test] +fn deref_fun_2() { + check_types( + r#" +//- minicore: deref + +struct A(T, U); +struct B(T); +struct C(T); + +impl core::ops::Deref for A, u32> { + type Target = B; + fn deref(&self) -> &B { &self.0 } +} +impl core::ops::Deref for B { + type Target = C; + fn deref(&self) -> &C { loop {} } +} + +impl core::ops::Deref for A, i32> { + type Target = C; + fn deref(&self) -> &C { &self.0 } +} + +impl C { + fn thing(&self) -> T { self.0 } +} + +fn make() -> T { loop {} } + +fn test() { + let a1 = A(make(), 1u32); + a1.thing(); + a1; + //^^ A, u32> + + let a2 = A(make(), 1i32); + let _: &str = a2.thing(); + a2; + //^^ A, i32> +} +"#, + ); +} + +#[test] +fn receiver_adjustment_autoref() { + check( + r#" +struct Foo; +impl Foo { + fn foo(&self) {} +} +fn test() { + Foo.foo(); + //^^^ adjustments: Borrow(Ref(Not)) + (&Foo).foo(); + // ^^^^ adjustments: , +} +"#, + ); +} + +#[test] +fn receiver_adjustment_unsize_array() { + // FIXME not quite correct + check( + r#" +//- minicore: slice +fn test() { + let a = [1, 2, 3]; + a.len(); +} //^ adjustments: Pointer(Unsize), Borrow(Ref(Not)) +"#, + ); +} diff --git a/crates/hir_ty/src/tests/patterns.rs b/crates/hir_ty/src/tests/patterns.rs index 5b08d65c4671..acdd8f50efb8 100644 --- a/crates/hir_ty/src/tests/patterns.rs +++ b/crates/hir_ty/src/tests/patterns.rs @@ -55,6 +55,7 @@ fn infer_pattern() { 139..140 'g': {unknown} 143..144 'e': {unknown} 157..204 'if let... }': () + 160..175 'let [val] = opt': bool 164..169 '[val]': [{unknown}] 165..168 'val': {unknown} 172..175 'opt': [{unknown}] @@ -62,6 +63,7 @@ fn infer_pattern() { 190..191 'h': {unknown} 194..197 'val': {unknown} 210..236 'if let...rue {}': () + 213..233 'let x ... &true': bool 217..225 'x @ true': &bool 221..225 'true': bool 221..225 'true': bool @@ -111,36 +113,42 @@ fn infer_literal_pattern() { 37..38 'x': &i32 46..208 '{ ...) {} }': () 52..75 'if let...y() {}': () + 55..72 'let "f... any()': bool 59..64 '"foo"': &str 59..64 '"foo"': &str 67..70 'any': fn any<&str>() -> &str 67..72 'any()': &str 73..75 '{}': () 80..99 'if let...y() {}': () + 83..96 'let 1 = any()': bool 87..88 '1': i32 87..88 '1': i32 91..94 'any': fn any() -> i32 91..96 'any()': i32 97..99 '{}': () 104..126 'if let...y() {}': () + 107..123 'let 1u... any()': bool 111..115 '1u32': u32 111..115 '1u32': u32 118..121 'any': fn any() -> u32 118..123 'any()': u32 124..126 '{}': () 131..153 'if let...y() {}': () + 134..150 'let 1f... any()': bool 138..142 '1f32': f32 138..142 '1f32': f32 145..148 'any': fn any() -> f32 145..150 'any()': f32 151..153 '{}': () 158..179 'if let...y() {}': () + 161..176 'let 1.0 = any()': bool 165..168 '1.0': f64 165..168 '1.0': f64 171..174 'any': fn any() -> f64 171..176 'any()': f64 177..179 '{}': () 184..206 'if let...y() {}': () + 187..203 'let tr... any()': bool 191..195 'true': bool 191..195 'true': bool 198..201 'any': fn any() -> bool @@ -163,10 +171,12 @@ fn infer_range_pattern() { 8..9 'x': &i32 17..75 '{ ...2 {} }': () 23..45 'if let...u32 {}': () + 26..42 'let 1....= 2u32': bool 30..35 '1..76': u32 38..42 '2u32': u32 43..45 '{}': () 50..73 'if let...u32 {}': () + 53..70 'let 1....= 2u32': bool 57..63 '1..=76': u32 66..70 '2u32': u32 71..73 '{}': () diff --git a/crates/hir_ty/src/tests/regression.rs b/crates/hir_ty/src/tests/regression.rs index 4a0a5d6fa1f4..b17e517ccb0a 100644 --- a/crates/hir_ty/src/tests/regression.rs +++ b/crates/hir_ty/src/tests/regression.rs @@ -1245,3 +1245,56 @@ fn test() { "#]], ); } + +#[test] +fn while_loop_block_expr_iterable() { + check_infer( + r#" +fn test() { + while { true } { + let y = 0; + } +} + "#, + expect![[r#" + 10..59 '{ ... } }': () + 16..57 'while ... }': () + 22..30 '{ true }': bool + 24..28 'true': bool + 31..57 '{ ... }': () + 45..46 'y': i32 + 49..50 '0': i32 + "#]], + ); +} + +#[test] +fn bug_11242() { + // FIXME: wrong, should be u32 + check_types( + r#" +fn foo() +where + A: IntoIterator, + B: IntoIterator, +{ + let _x: ::Item; + // ^^ {unknown} +} + +pub trait Iterator { + type Item; +} + +pub trait IntoIterator { + type Item; + type IntoIter: Iterator; +} + +impl IntoIterator for I { + type Item = I::Item; + type IntoIter = I; +} +"#, + ); +} diff --git a/crates/hir_ty/src/tests/simple.rs b/crates/hir_ty/src/tests/simple.rs index f4d082ea8c6e..c11a70fa663b 100644 --- a/crates/hir_ty/src/tests/simple.rs +++ b/crates/hir_ty/src/tests/simple.rs @@ -2248,6 +2248,7 @@ fn generic_default_in_struct_literal() { 176..193 'Thing ...1i32 }': Thing 187..191 '1i32': i32 199..240 'if let... }': () + 202..221 'let Th... } = z': bool 206..217 'Thing { t }': Thing 214..215 't': i32 220..221 'z': Thing diff --git a/crates/hir_ty/src/tests/traits.rs b/crates/hir_ty/src/tests/traits.rs index c2669646e224..04d8b91e3598 100644 --- a/crates/hir_ty/src/tests/traits.rs +++ b/crates/hir_ty/src/tests/traits.rs @@ -540,6 +540,52 @@ fn test() { ); } +#[test] +fn infer_ops_index_field() { + check_types( + r#" +//- minicore: index +struct Bar; +struct Foo { + field: u32; +} + +impl core::ops::Index for Bar { + type Output = Foo; +} + +fn test() { + let a = Bar; + let b = a[1u32].field; + b; +} //^ u32 +"#, + ); +} + +#[test] +fn infer_ops_index_field_autoderef() { + check_types( + r#" +//- minicore: index +struct Bar; +struct Foo { + field: u32; +} + +impl core::ops::Index for Bar { + type Output = Foo; +} + +fn test() { + let a = Bar; + let b = (&a[1u32]).field; + b; +} //^ u32 +"#, + ); +} + #[test] fn infer_ops_index_int() { check_types( diff --git a/crates/hir_ty/src/traits.rs b/crates/hir_ty/src/traits.rs index bd280ba774d2..b139edbee945 100644 --- a/crates/hir_ty/src/traits.rs +++ b/crates/hir_ty/src/traits.rs @@ -40,8 +40,7 @@ fn create_chalk_solver() -> chalk_recursive::RecursiveSolver { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TraitEnvironment { pub krate: CrateId, - // When we're using Chalk's Ty we can make this a BTreeMap since it's Ord, - // but for now it's too annoying... + // FIXME make this a BTreeMap pub(crate) traits_from_clauses: Vec<(Ty, TraitId)>, pub env: chalk_ir::Environment, } diff --git a/crates/ide/src/annotations.rs b/crates/ide/src/annotations.rs index a7c12c163619..986db75c610d 100644 --- a/crates/ide/src/annotations.rs +++ b/crates/ide/src/annotations.rs @@ -65,10 +65,10 @@ pub(crate) fn annotations( visit_file_defs(&Semantics::new(db), file_id, &mut |def| { let range = match def { Definition::Const(konst) if config.annotate_references => { - konst.source(db).and_then(|node| name_range(&node, file_id)) + konst.source(db).and_then(|node| name_range(db, node, file_id)) } Definition::Trait(trait_) if config.annotate_references || config.annotate_impls => { - trait_.source(db).and_then(|node| name_range(&node, file_id)) + trait_.source(db).and_then(|node| name_range(db, node, file_id)) } Definition::Adt(adt) => match adt { hir::Adt::Enum(enum_) => { @@ -77,7 +77,7 @@ pub(crate) fn annotations( .variants(db) .into_iter() .map(|variant| { - variant.source(db).and_then(|node| name_range(&node, file_id)) + variant.source(db).and_then(|node| name_range(db, node, file_id)) }) .filter_map(std::convert::identity) .for_each(|range| { @@ -91,14 +91,14 @@ pub(crate) fn annotations( }) } if config.annotate_references || config.annotate_impls { - enum_.source(db).and_then(|node| name_range(&node, file_id)) + enum_.source(db).and_then(|node| name_range(db, node, file_id)) } else { None } } _ => { if config.annotate_references || config.annotate_impls { - adt.source(db).and_then(|node| name_range(&node, file_id)) + adt.source(db).and_then(|node| name_range(db, node, file_id)) } else { None } @@ -131,13 +131,17 @@ pub(crate) fn annotations( }); } - fn name_range(node: &InFile, file_id: FileId) -> Option { - if node.file_id == file_id.into() { - node.value.name().map(|it| it.syntax().text_range()) - } else { - // Node is outside the file we are adding annotations to (e.g. macros). - None + fn name_range( + db: &RootDatabase, + node: InFile, + source_file_id: FileId, + ) -> Option { + if let Some(InFile { file_id, value }) = node.original_ast_node(db) { + if file_id == source_file_id.into() { + return value.name().map(|it| it.syntax().text_range()); + } } + None } }); diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs index 32dbd9070b9e..7bb6b24a23de 100644 --- a/crates/ide/src/expand_macro.rs +++ b/crates/ide/src/expand_macro.rs @@ -3,8 +3,7 @@ use ide_db::{ helpers::{insert_whitespace_into_node::insert_ws_into, pick_best_token}, RootDatabase, }; -use itertools::Itertools; -use syntax::{ast, ted, AstNode, SyntaxKind, SyntaxNode}; +use syntax::{ast, ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T}; use crate::FilePosition; @@ -41,20 +40,28 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option< // struct Bar; // ``` - let derive = sema.descend_into_macros(tok.clone()).iter().find_map(|descended| { - let attr = descended.ancestors().find_map(ast::Attr::cast)?; - let (path, tt) = attr.as_simple_call()?; - if path == "derive" { - let mut tt = tt.syntax().children_with_tokens().skip(1).join(""); - tt.pop(); - let expansions = sema.expand_derive_macro(&attr)?; - Some(ExpandedMacro { - name: tt, - expansion: expansions.into_iter().map(insert_ws_into).join(""), - }) - } else { - None + let derive = sema.descend_into_macros(tok.clone()).into_iter().find_map(|descended| { + let hir_file = sema.hir_file_for(&descended.parent()?); + if !hir_file.is_derive_attr_pseudo_expansion(db) { + return None; } + + let name = descended.ancestors().filter_map(ast::Path::cast).last()?.to_string(); + // up map out of the #[derive] expansion + let token = hir::InFile::new(hir_file, descended).upmap(db)?.value; + let attr = token.ancestors().find_map(ast::Attr::cast)?; + let expansions = sema.expand_derive_macro(&attr)?; + let idx = attr + .token_tree()? + .token_trees_and_tokens() + .filter_map(NodeOrToken::into_token) + .take_while(|it| it != &token) + .filter(|it| it.kind() == T![,]) + .count(); + Some(ExpandedMacro { + name, + expansion: expansions.get(idx).cloned().map(insert_ws_into)?.to_string(), + }) }); if derive.is_some() { @@ -372,9 +379,20 @@ struct Foo {} struct Foo {} "#, expect![[r#" - Copy, Clone + Copy impl < >core::marker::Copy for Foo< >{} + "#]], + ); + check( + r#" +//- minicore: copy, clone, derive + +#[derive(Copy, Cl$0one)] +struct Foo {} +"#, + expect![[r#" + Clone impl < >core::clone::Clone for Foo< >{} "#]], diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index b6d9e4021d9b..f886ff7837b4 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -2,7 +2,9 @@ use hir::Semantics; use ide_db::{ base_db::{FileId, FilePosition}, defs::{Definition, IdentClass}, - helpers::{for_each_break_expr, for_each_tail_expr, node_ext::walk_expr, pick_best_token}, + helpers::{ + for_each_break_and_continue_expr, for_each_tail_expr, node_ext::walk_expr, pick_best_token, + }, search::{FileReference, ReferenceCategory, SearchScope}, RootDatabase, }; @@ -10,7 +12,7 @@ use rustc_hash::FxHashSet; use syntax::{ ast::{self, HasLoopBody}, match_ast, AstNode, - SyntaxKind::{IDENT, INT_NUMBER}, + SyntaxKind::{self, IDENT, INT_NUMBER}, SyntaxNode, SyntaxToken, TextRange, T, }; @@ -66,7 +68,9 @@ pub(crate) fn highlight_related( T![for] if config.break_points && token.parent().and_then(ast::ForExpr::cast).is_some() => { highlight_break_points(token) } - T![break] | T![loop] | T![while] if config.break_points => highlight_break_points(token), + T![break] | T![loop] | T![while] | T![continue] if config.break_points => { + highlight_break_points(token) + } _ if config.references => highlight_references(sema, &syntax, token, file_id), _ => None, } @@ -187,6 +191,7 @@ fn highlight_exit_points( fn highlight_break_points(token: SyntaxToken) -> Option> { fn hl( + cursor_token_kind: SyntaxKind, token: Option, label: Option, body: Option, @@ -197,11 +202,23 @@ fn highlight_break_points(token: SyntaxToken) -> Option> { label.as_ref().map(|it| it.syntax().text_range()), ); highlights.extend(range.map(|range| HighlightedRange { category: None, range })); - for_each_break_expr(label, body, &mut |break_| { - let range = cover_range( - break_.break_token().map(|it| it.text_range()), - break_.lifetime().map(|it| it.syntax().text_range()), - ); + for_each_break_and_continue_expr(label, body, &mut |expr| { + let range: Option = match (cursor_token_kind, expr) { + (T![for] | T![while] | T![loop] | T![break], ast::Expr::BreakExpr(break_)) => { + cover_range( + break_.break_token().map(|it| it.text_range()), + break_.lifetime().map(|it| it.syntax().text_range()), + ) + } + ( + T![for] | T![while] | T![loop] | T![continue], + ast::Expr::ContinueExpr(continue_), + ) => cover_range( + continue_.continue_token().map(|it| it.text_range()), + continue_.lifetime().map(|it| it.syntax().text_range()), + ), + _ => None, + }; highlights.extend(range.map(|range| HighlightedRange { category: None, range })); }); Some(highlights) @@ -210,6 +227,7 @@ fn highlight_break_points(token: SyntaxToken) -> Option> { let lbl = match_ast! { match parent { ast::BreakExpr(b) => b.lifetime(), + ast::ContinueExpr(c) => c.lifetime(), ast::LoopExpr(l) => l.label().and_then(|it| it.lifetime()), ast::ForExpr(f) => f.label().and_then(|it| it.lifetime()), ast::WhileExpr(w) => w.label().and_then(|it| it.lifetime()), @@ -224,19 +242,29 @@ fn highlight_break_points(token: SyntaxToken) -> Option> { } None => true, }; + let token_kind = token.kind(); for anc in token.ancestors().flat_map(ast::Expr::cast) { return match anc { - ast::Expr::LoopExpr(l) if label_matches(l.label()) => { - hl(l.loop_token(), l.label(), l.loop_body().and_then(|it| it.stmt_list())) - } - ast::Expr::ForExpr(f) if label_matches(f.label()) => { - hl(f.for_token(), f.label(), f.loop_body().and_then(|it| it.stmt_list())) - } - ast::Expr::WhileExpr(w) if label_matches(w.label()) => { - hl(w.while_token(), w.label(), w.loop_body().and_then(|it| it.stmt_list())) - } + ast::Expr::LoopExpr(l) if label_matches(l.label()) => hl( + token_kind, + l.loop_token(), + l.label(), + l.loop_body().and_then(|it| it.stmt_list()), + ), + ast::Expr::ForExpr(f) if label_matches(f.label()) => hl( + token_kind, + f.for_token(), + f.label(), + f.loop_body().and_then(|it| it.stmt_list()), + ), + ast::Expr::WhileExpr(w) if label_matches(w.label()) => hl( + token_kind, + w.while_token(), + w.label(), + w.loop_body().and_then(|it| it.stmt_list()), + ), ast::Expr::BlockExpr(e) if e.label().is_some() && label_matches(e.label()) => { - hl(None, e.label(), e.stmt_list()) + hl(token_kind, None, e.label(), e.stmt_list()) } _ => continue, }; @@ -804,6 +832,115 @@ fn foo() { ); } + #[test] + fn test_hl_break_for_but_not_continue() { + check( + r#" +fn foo() { + 'outer: for _ in () { + // ^^^^^^^^^^^ + break; + // ^^^^^ + continue; + 'inner: for _ in () { + break; + continue; + 'innermost: for _ in () { + continue 'outer; + break 'outer; + // ^^^^^^^^^^^^ + continue 'inner; + break 'inner; + } + break$0 'outer; + // ^^^^^^^^^^^^ + continue 'outer; + break; + continue; + } + break; + // ^^^^^ + continue; + } +} +"#, + ); + } + + #[test] + fn test_hl_continue_for_but_not_break() { + check( + r#" +fn foo() { + 'outer: for _ in () { + // ^^^^^^^^^^^ + break; + continue; + // ^^^^^^^^ + 'inner: for _ in () { + break; + continue; + 'innermost: for _ in () { + continue 'outer; + // ^^^^^^^^^^^^^^^ + break 'outer; + continue 'inner; + break 'inner; + } + break 'outer; + continue$0 'outer; + // ^^^^^^^^^^^^^^^ + break; + continue; + } + break; + continue; + // ^^^^^^^^ + } +} +"#, + ); + } + + #[test] + fn test_hl_break_and_continue() { + check( + r#" +fn foo() { + 'outer: fo$0r _ in () { + // ^^^^^^^^^^^ + break; + // ^^^^^ + continue; + // ^^^^^^^^ + 'inner: for _ in () { + break; + continue; + 'innermost: for _ in () { + continue 'outer; + // ^^^^^^^^^^^^^^^ + break 'outer; + // ^^^^^^^^^^^^ + continue 'inner; + break 'inner; + } + break 'outer; + // ^^^^^^^^^^^^ + continue 'outer; + // ^^^^^^^^^^^^^^^ + break; + continue; + } + break; + // ^^^^^ + continue; + // ^^^^^^^^ + } +} +"#, + ); + } + #[test] fn test_hl_break_while() { check( diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index f94348ec581d..40a647c90b23 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -18,7 +18,7 @@ use syntax::{ algo, ast, display::{fn_as_proc_macro_label, macro_label}, match_ast, AstNode, Direction, - SyntaxKind::{CONDITION, LET_STMT}, + SyntaxKind::{LET_EXPR, LET_STMT}, SyntaxToken, T, }; @@ -480,11 +480,11 @@ fn local(db: &RootDatabase, it: hir::Local) -> Option { let is_mut = if it.is_mut(db) { "mut " } else { "" }; let desc = match it.source(db).value { Either::Left(ident) => { - let name = it.name(db).unwrap(); + let name = it.name(db); let let_kw = if ident .syntax() .parent() - .map_or(false, |p| p.kind() == LET_STMT || p.kind() == CONDITION) + .map_or(false, |p| p.kind() == LET_STMT || p.kind() == LET_EXPR) { "let " } else { diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index ed76c84ab47e..df0ca941c99d 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -15,6 +15,7 @@ fn check_hover_no_result(ra_fixture: &str) { assert!(hover.is_none(), "hover not expected but found: {:?}", hover.unwrap()); } +#[track_caller] fn check(ra_fixture: &str, expect: Expect) { let (analysis, position) = fixture::position(ra_fixture); let hover = analysis @@ -1310,6 +1311,60 @@ fn test_hover_function_show_qualifiers() { ); } +#[test] +fn test_hover_function_show_types() { + check( + r#"fn foo$0(a: i32, b:i32) -> i32 { 0 }"#, + expect![[r#" + *foo* + + ```rust + test + ``` + + ```rust + fn foo(a: i32, b: i32) -> i32 + ``` + "#]], + ); +} + +#[test] +fn test_hover_function_pointer_show_identifiers() { + check( + r#"type foo$0 = fn(a: i32, b: i32) -> i32;"#, + expect![[r#" + *foo* + + ```rust + test + ``` + + ```rust + type foo = fn(a: i32, b: i32) -> i32 + ``` + "#]], + ); +} + +#[test] +fn test_hover_function_pointer_no_identifier() { + check( + r#"type foo$0 = fn(i32, _: i32) -> i32;"#, + expect![[r#" + *foo* + + ```rust + test + ``` + + ```rust + type foo = fn(i32, i32) -> i32 + ``` + "#]], + ); +} + #[test] fn test_hover_trait_show_qualifiers() { check_actions( @@ -4528,3 +4583,33 @@ pub struct Foo; "##]], ); } + +#[test] +fn hover_dollar_crate() { + // $crate should be resolved to the right crate name. + + check( + r#" +//- /main.rs crate:main deps:dep +dep::m!(KONST$0); +//- /dep.rs crate:dep +#[macro_export] +macro_rules! m { + ( $name:ident ) => { const $name: $crate::Type = $crate::Type; }; +} + +pub struct Type; +"#, + expect![[r#" + *KONST* + + ```rust + main + ``` + + ```rust + const KONST: dep::Type = $crate::Type + ``` + "#]], + ); +} diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index cc304cb10a60..2ca756cbe044 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -243,7 +243,7 @@ fn is_named_constructor( let expr = match_ast! { match let_node { ast::LetStmt(it) => it.initializer(), - ast::Condition(it) => it.expr(), + ast::LetExpr(it) => it.expr(), _ => None, } }?; @@ -372,15 +372,10 @@ fn should_not_display_type_hint( match node { ast::LetStmt(it) => return it.ty().is_some(), ast::Param(it) => return it.ty().is_some(), - ast::MatchArm(_it) => return pat_is_enum_variant(db, bind_pat, pat_ty), - ast::IfExpr(it) => { - return it.condition().and_then(|condition| condition.pat()).is_some() - && pat_is_enum_variant(db, bind_pat, pat_ty); - }, - ast::WhileExpr(it) => { - return it.condition().and_then(|condition| condition.pat()).is_some() - && pat_is_enum_variant(db, bind_pat, pat_ty); - }, + ast::MatchArm(_) => return pat_is_enum_variant(db, bind_pat, pat_ty), + ast::LetExpr(_) => return pat_is_enum_variant(db, bind_pat, pat_ty), + ast::IfExpr(_) => return false, + ast::WhileExpr(_) => return false, ast::ForExpr(it) => { // We *should* display hint only if user provided "in {expr}" and we know the type of expr (and it's not unit). // Type of expr should be iterable. diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs index dd4c17df29f7..834668960f36 100644 --- a/crates/ide/src/navigation_target.rs +++ b/crates/ide/src/navigation_target.rs @@ -390,10 +390,7 @@ impl ToNav for hir::Local { let FileRange { file_id, range: full_range } = InFile::new(file_id, node).original_file_range(db); - let name = match self.name(db) { - Some(it) => it.to_smol_str(), - None => "".into(), - }; + let name = self.name(db).to_smol_str(); let kind = if self.is_self(db) { SymbolKind::SelfParam } else if self.is_param(db) { diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index 5e6f0ef6a57d..c3ac84e58c6f 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs @@ -57,8 +57,6 @@ pub(crate) fn find_all_refs( let syntax = sema.parse(position.file_id).syntax().clone(); let make_searcher = |literal_search: bool| { move |def: Definition| { - let mut usages = - def.usages(sema).set_scope(search_scope.clone()).include_self_refs().all(); let declaration = match def { Definition::Module(module) => { Some(NavigationTarget::from_module_to_decl(sema.db, module)) @@ -72,6 +70,8 @@ pub(crate) fn find_all_refs( nav, } }); + let mut usages = + def.usages(sema).set_scope(search_scope.clone()).include_self_refs().all(); if literal_search { retain_adt_literal_usages(&mut usages, def, sema); } @@ -1535,4 +1535,83 @@ trait Trait { "#]], ) } + + // FIXME: import is classified as function + #[test] + fn attr() { + check( + r#" +//- proc_macros: identity +use proc_macros::identity; + +#[proc_macros::$0identity] +fn func() {} +"#, + expect![[r#" + identity Attribute FileId(1) 1..107 32..40 + + FileId(0) 43..51 + "#]], + ); + check( + r#" +#[proc_macro_attribute] +fn func$0() {} +"#, + expect![[r#" + func Attribute FileId(0) 0..36 27..31 + + (no references) + "#]], + ); + } + + // FIXME: import is classified as function + #[test] + fn proc_macro() { + check( + r#" +//- proc_macros: mirror +use proc_macros::mirror; + +mirror$0! {} +"#, + expect![[r#" + mirror Macro FileId(1) 1..77 22..28 + + FileId(0) 26..32 + "#]], + ) + } + + #[test] + fn derive() { + check( + r#" +//- proc_macros: derive_identity +//- minicore: derive +use proc_macros::DeriveIdentity; + +#[derive(proc_macros::DeriveIdentity$0)] +struct Foo; +"#, + expect![[r#" + derive_identity Derive FileId(2) 1..107 45..60 + + FileId(0) 17..31 + FileId(0) 56..70 + "#]], + ); + check( + r#" +#[proc_macro_derive(Derive, attributes(x))] +pub fn deri$0ve(_stream: TokenStream) -> TokenStream {} +"#, + expect![[r#" + derive Derive FileId(0) 0..97 51..57 + + (no references) + "#]], + ); + } } diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 67062654d6e0..1507e7cdd079 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -426,8 +426,7 @@ fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option { ty_args.format_with(", ", |ty, cb| cb(&ty.display(db))) ); } - format_to!(path, "::{}", def_name); - return Some(path); + return Some(format!(r#""{}::{}""#, path, def_name)); } } } @@ -966,7 +965,7 @@ impl Data { }, kind: DocTest { test_id: Path( - "Data::foo", + "\"Data::foo\"", ), }, cfg: None, @@ -1360,7 +1359,7 @@ impl Foo { }, kind: DocTest { test_id: Path( - "foo::Foo::foo", + "\"foo::Foo::foo\"", ), }, cfg: None, @@ -2066,7 +2065,7 @@ impl Foo { }, kind: DocTest { test_id: Path( - "Foo::t", + "\"Foo::t\"", ), }, cfg: None, diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index f09f291e96ab..1b2fc9c635d3 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -183,7 +183,8 @@ pub(crate) fn highlight( traverse( &mut hl, &sema, - InFile::new(file_id.into(), &root), + file_id, + &root, sema.scope(&root).krate(), range_to_highlight, syntactic_name_ref_highlighting, @@ -194,11 +195,13 @@ pub(crate) fn highlight( fn traverse( hl: &mut Highlights, sema: &Semantics, - root: InFile<&SyntaxNode>, + file_id: FileId, + root: &SyntaxNode, krate: Option, range_to_highlight: TextRange, syntactic_name_ref_highlighting: bool, ) { + let is_unlinked = sema.to_module_def(file_id).is_none(); let mut bindings_shadow_count: FxHashMap = FxHashMap::default(); let mut current_macro_call: Option = None; @@ -209,7 +212,7 @@ fn traverse( // Walk all nodes, keeping track of whether we are inside a macro or not. // If in macro, expand it first and highlight the expanded code. - for event in root.value.preorder_with_tokens() { + for event in root.preorder_with_tokens() { let range = match &event { WalkEvent::Enter(it) | WalkEvent::Leave(it) => it.text_range(), }; @@ -237,6 +240,20 @@ fn traverse( continue; } Some(item) if sema.is_attr_macro_call(&item) => current_attr_call = Some(item), + Some(item) if current_attr_call.is_none() => { + let adt = match item { + ast::Item::Enum(it) => Some(ast::Adt::Enum(it)), + ast::Item::Struct(it) => Some(ast::Adt::Struct(it)), + ast::Item::Union(it) => Some(ast::Adt::Union(it)), + _ => None, + }; + match adt { + Some(adt) if sema.is_derive_annotated(&adt) => { + current_attr_call = Some(adt.into()); + } + _ => (), + } + } None if ast::Attr::can_cast(node.kind()) => inside_attribute = true, _ => (), }, @@ -269,7 +286,7 @@ fn traverse( WalkEvent::Enter(it) => it, WalkEvent::Leave(NodeOrToken::Token(_)) => continue, WalkEvent::Leave(NodeOrToken::Node(node)) => { - inject::doc_comment(hl, sema, root.with_value(&node)); + inject::doc_comment(hl, sema, InFile::new(file_id.into(), &node)); continue; } }; @@ -361,9 +378,14 @@ fn traverse( syntactic_name_ref_highlighting, node, ), - NodeOrToken::Token(token) => highlight::token(sema, krate, token).zip(Some(None)), + NodeOrToken::Token(token) => highlight::token(sema, token).zip(Some(None)), }; if let Some((mut highlight, binding_hash)) = element { + if is_unlinked && highlight.tag == HlTag::UnresolvedReference { + // do not emit unresolved references if the file is unlinked + // let the editor do its highlighting for these tokens instead + continue; + } if inside_attribute { highlight |= HlMod::Attribute } diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs index c869db3b8b7d..8ad27b1fdcd0 100644 --- a/crates/ide/src/syntax_highlighting/highlight.rs +++ b/crates/ide/src/syntax_highlighting/highlight.rs @@ -18,11 +18,7 @@ use crate::{ Highlight, HlMod, HlTag, }; -pub(super) fn token( - sema: &Semantics, - krate: Option, - token: SyntaxToken, -) -> Option { +pub(super) fn token(sema: &Semantics, token: SyntaxToken) -> Option { if let Some(comment) = ast::Comment::cast(token.clone()) { let h = HlTag::Comment; return Some(match comment.kind().doc { @@ -39,17 +35,10 @@ pub(super) fn token( INT_NUMBER | FLOAT_NUMBER => HlTag::NumericLiteral.into(), BYTE => HlTag::ByteLiteral.into(), CHAR => HlTag::CharLiteral.into(), - IDENT => { - let tt = ast::TokenTree::cast(token.parent()?)?; - let ident = ast::Ident::cast(token)?; + IDENT if token.parent().and_then(ast::TokenTree::cast).is_some() => { // from this point on we are inside a token tree, this only happens for identifiers // that were not mapped down into macro invocations - (|| { - let attr = tt.parent_meta()?.parent_attr()?; - let res = sema.resolve_derive_ident(&attr, &ident)?; - Some(highlight_def(sema, krate, Definition::from(res))) - })() - .unwrap_or_else(|| HlTag::None.into()) + HlTag::None.into() } p if p.is_punct() => punctuation(sema, token, p), k if k.is_keyword() => keyword(sema, token, k)?, @@ -246,10 +235,9 @@ fn highlight_name_ref( let mut h = match name_class { NameRefClass::Definition(def) => { if let Definition::Local(local) = &def { - if let Some(name) = local.name(db) { - let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); - *binding_hash = Some(calc_binding_hash(&name, *shadow_count)) - } + let name = local.name(db); + let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); + *binding_hash = Some(calc_binding_hash(&name, *shadow_count)) }; let mut h = highlight_def(sema, krate, def); @@ -299,11 +287,10 @@ fn highlight_name( ) -> Highlight { let name_kind = NameClass::classify(sema, &name); if let Some(NameClass::Definition(Definition::Local(local))) = &name_kind { - if let Some(name) = local.name(sema.db) { - let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); - *shadow_count += 1; - *binding_hash = Some(calc_binding_hash(&name, *shadow_count)) - } + let name = local.name(sema.db); + let shadow_count = bindings_shadow_count.entry(name.clone()).or_default(); + *shadow_count += 1; + *binding_hash = Some(calc_binding_hash(&name, *shadow_count)) }; match name_kind { Some(NameClass::Definition(def)) => { diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html b/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html index 645e6c589dde..b035e786d352 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html @@ -58,5 +58,4 @@ impl t for foo { pub fn is_static() {} pub fn is_not_static(&self) {} -} - \ No newline at end of file +} \ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html b/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html new file mode 100644 index 000000000000..9fe2b50cde79 --- /dev/null +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html @@ -0,0 +1,57 @@ + + +
#[allow(dead_code)]
+#[rustfmt::skip]
+#[proc_macros::identity]
+#[derive(Copy)]
+/// This is a doc comment
+// This is a normal comment
+/// This is a doc comment
+#[derive(Copy)]
+// This is another normal comment
+/// This is another doc comment
+// This is another normal comment
+#[derive(Copy)]
+// The reason for these being here is to test AttrIds
+struct Foo;
\ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html b/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html index 2f96a1be85ca..3e20b2f3512d 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html @@ -46,5 +46,4 @@ fn main() { let foo = Some(92); let nums = iter::repeat(foo.unwrap()); -} - \ No newline at end of file +} \ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/highlighting.html b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html similarity index 75% rename from crates/ide/src/syntax_highlighting/test_data/highlighting.html rename to crates/ide/src/syntax_highlighting/test_data/highlight_general.html index 793f554c6379..22bdfffa3ef4 100644 --- a/crates/ide/src/syntax_highlighting/test_data/highlighting.html +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html @@ -44,9 +44,6 @@
use inner::{self as inner_mod};
 mod inner {}
 
-#[allow()]
-#[rustfmt::skip]
-#[proc_macros::identity]
 pub mod ops {
     #[lang = "fn_once"]
     pub trait FnOnce<Args> {}
@@ -58,11 +55,8 @@
     pub trait Fn<Args>: FnMut<Args> {}
 }
 
-proc_macros::mirror! {
-    {
-        ,i32 :x pub
-        ,i32 :y pub
-    } Foo struct
+struct Foo {
+    x: u32,
 }
 
 trait Bar where Self: {
@@ -71,7 +65,7 @@
 
 impl Bar for Foo where Self: {
     fn bar(&self) -> i32 {
-        self.x
+        self.x
     }
 }
 
@@ -81,26 +75,17 @@
     }
 
     fn qux(&mut self) {
-        self.x = 0;
+        self.x = 0;
     }
 
     fn quop(&self) -> i32 {
-        self.x
+        self.x
     }
 }
 
 use self::FooCopy::{self as BarCopy};
 
 #[derive(Copy)]
-/// This is a doc comment
-// This is a normal comment
-/// This is a doc comment
-#[derive(Copy)]
-// This is another normal comment
-/// This is another doc comment
-// This is another normal comment
-#[derive(Copy)]
-// The reason for these being here is to test AttrIds
 struct FooCopy {
     x: u32,
 }
@@ -146,68 +131,19 @@
     let bar = foobar();
 }
 
-macro_rules! def_fn {
-    ($($tt:tt)*) => {$($tt)*}
-}
-
-def_fn! {
-    fn bar() -> u32 {
-        100
-    }
-}
-
-macro_rules! dont_color_me_braces {
-    () => {0}
-}
-
-macro_rules! noop {
-    ($expr:expr) => {
-        $expr
-    }
-}
-
-macro_rules! keyword_frag {
-    ($type:ty) => ($type)
-}
-
-macro with_args($i:ident) {
-    $i
-}
-
-macro without_args {
-    ($i:ident) => {
-        $i
-    }
-}
-
 // comment
 fn main() {
-    println!("Hello, {}!", 92);
-    dont_color_me_braces!();
-
-    let mut vec = Vec::new();
-    if true {
-        let x = 92;
-        vec.push(Foo { x, y: 1 });
-    }
-
-    for e in vec {
-        // Do nothing
-    }
-
-    noop!(noop!(1));
-
     let mut x = 42;
     x += 1;
     let y = &mut x;
     let z = &y;
 
-    let Foo { x: z, y } = Foo { x: z, y };
+    let Foo { x: z, y } = Foo { x: z, y };
 
     y;
 
-    let mut foo = Foo { x, y: x };
-    let foo2 = Foo { x, y: x };
+    let mut foo = Foo { x, y: x };
+    let foo2 = Foo { x, y: x };
     foo.quop();
     foo.qux();
     foo.baz(foo2);
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html b/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
index 1713306dae2a..023e791f8bb8 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
@@ -45,10 +45,17 @@
 
 fn main() {
     fixture(r#"
-        trait Foo {
-            fn foo() {
-                println!("2 + 2 = {}", 4);
-            }
-        }"#
+trait Foo {
+    fn foo() {
+        println!("2 + 2 = {}", 4);
+    }
+}"#
+    );
+    fixture(r"
+fn foo() {
+    foo($0{
+        92
+    }$0)
+}"
     );
 }
\ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html new file mode 100644 index 000000000000..c3f71d443f67 --- /dev/null +++ b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html @@ -0,0 +1,88 @@ + + +
proc_macros::mirror! {
+    {
+        ,i32 :x pub
+        ,i32 :y pub
+    } Foo struct
+}
+macro_rules! def_fn {
+    ($($tt:tt)*) => {$($tt)*}
+}
+
+def_fn! {
+    fn bar() -> u32 {
+        100
+    }
+}
+
+macro_rules! dont_color_me_braces {
+    () => {0}
+}
+
+macro_rules! noop {
+    ($expr:expr) => {
+        $expr
+    }
+}
+
+macro_rules! keyword_frag {
+    ($type:ty) => ($type)
+}
+
+macro with_args($i:ident) {
+    $i
+}
+
+macro without_args {
+    ($i:ident) => {
+        $i
+    }
+}
+
+fn main() {
+    println!("Hello, {}!", 92);
+    dont_color_me_braces!();
+    noop!(noop!(1));
+}
\ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/test_data/rainbow_highlighting.html b/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html similarity index 100% rename from crates/ide/src/syntax_highlighting/test_data/rainbow_highlighting.html rename to crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html diff --git a/crates/ide/src/syntax_highlighting/test_data/injection.html b/crates/ide/src/syntax_highlighting/test_data/injection.html deleted file mode 100644 index 2e9ad144f19a..000000000000 --- a/crates/ide/src/syntax_highlighting/test_data/injection.html +++ /dev/null @@ -1,53 +0,0 @@ - - -
fn f(ra_fixture: &str) {}
-fn main() {
-    f(r"
-fn foo() {
-    foo($0{
-        92
-    }$0)
-}");
-}
-    
\ No newline at end of file diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 4beab9909c41..ec50fde3562c 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -6,19 +6,98 @@ use test_utils::{bench, bench_fixture, skip_slow_tests, AssertLinear}; use crate::{fixture, FileRange, HlTag, TextRange}; +#[test] +fn attributes() { + check_highlighting( + r#" +//- proc_macros: identity +//- minicore: derive, copy +#[allow(dead_code)] +#[rustfmt::skip] +#[proc_macros::identity] +#[derive(Copy)] +/// This is a doc comment +// This is a normal comment +/// This is a doc comment +#[derive(Copy)] +// This is another normal comment +/// This is another doc comment +// This is another normal comment +#[derive(Copy)] +// The reason for these being here is to test AttrIds +struct Foo; +"#, + expect_file!["./test_data/highlight_attributes.html"], + false, + ); +} +#[test] +fn macros() { + check_highlighting( + r#" +//- proc_macros: mirror +proc_macros::mirror! { + { + ,i32 :x pub + ,i32 :y pub + } Foo struct +} +macro_rules! def_fn { + ($($tt:tt)*) => {$($tt)*} +} + +def_fn! { + fn bar() -> u32 { + 100 + } +} + +macro_rules! dont_color_me_braces { + () => {0} +} + +macro_rules! noop { + ($expr:expr) => { + $expr + } +} + +macro_rules! keyword_frag { + ($type:ty) => ($type) +} + +macro with_args($i:ident) { + $i +} + +macro without_args { + ($i:ident) => { + $i + } +} + +fn main() { + println!("Hello, {}!", 92); + dont_color_me_braces!(); + noop!(noop!(1)); +} +"#, + expect_file!["./test_data/highlight_macros.html"], + false, + ); +} + +/// If what you want to test feels like a specific entity consider making a new test instead, +/// this test fixture here in fact should shrink instead of grow ideally. #[test] fn test_highlighting() { check_highlighting( r#" -//- proc_macros: identity, mirror //- minicore: derive, copy //- /main.rs crate:main deps:foo use inner::{self as inner_mod}; mod inner {} -#[allow()] -#[rustfmt::skip] -#[proc_macros::identity] pub mod ops { #[lang = "fn_once"] pub trait FnOnce {} @@ -30,11 +109,8 @@ pub mod ops { pub trait Fn: FnMut {} } -proc_macros::mirror! { - { - ,i32 :x pub - ,i32 :y pub - } Foo struct +struct Foo { + x: u32, } trait Bar where Self: { @@ -64,15 +140,6 @@ impl Foo { use self::FooCopy::{self as BarCopy}; #[derive(Copy)] -/// This is a doc comment -// This is a normal comment -/// This is a doc comment -#[derive(Copy)] -// This is another normal comment -/// This is another doc comment -// This is another normal comment -#[derive(Copy)] -// The reason for these being here is to test AttrIds struct FooCopy { x: u32, } @@ -118,57 +185,8 @@ fn foo() { let bar = foobar(); } -macro_rules! def_fn { - ($($tt:tt)*) => {$($tt)*} -} - -def_fn! { - fn bar() -> u32 { - 100 - } -} - -macro_rules! dont_color_me_braces { - () => {0} -} - -macro_rules! noop { - ($expr:expr) => { - $expr - } -} - -macro_rules! keyword_frag { - ($type:ty) => ($type) -} - -macro with_args($i:ident) { - $i -} - -macro without_args { - ($i:ident) => { - $i - } -} - // comment fn main() { - println!("Hello, {}!", 92); - dont_color_me_braces!(); - - let mut vec = Vec::new(); - if true { - let x = 92; - vec.push(Foo { x, y: 1 }); - } - - for e in vec { - // Do nothing - } - - noop!(noop!(1)); - let mut x = 42; x += 1; let y = &mut x; @@ -288,161 +306,12 @@ macro_rules! die { panic!(); }; } -"# - .trim(), - expect_file!["./test_data/highlighting.html"], - false, - ); -} - -#[test] -fn test_rainbow_highlighting() { - check_highlighting( - r#" -fn main() { - let hello = "hello"; - let x = hello.to_string(); - let y = hello.to_string(); - - let x = "other color please!"; - let y = x.to_string(); -} - -fn bar() { - let mut hello = "hello"; -} -"# - .trim(), - expect_file!["./test_data/rainbow_highlighting.html"], - true, - ); -} - -#[test] -fn benchmark_syntax_highlighting_long_struct() { - if skip_slow_tests() { - return; - } - - let fixture = bench_fixture::big_struct(); - let (analysis, file_id) = fixture::file(&fixture); - - let hash = { - let _pt = bench("syntax highlighting long struct"); - analysis - .highlight(file_id) - .unwrap() - .iter() - .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct)) - .count() - }; - assert_eq!(hash, 2001); -} - -#[test] -fn syntax_highlighting_not_quadratic() { - if skip_slow_tests() { - return; - } - - let mut al = AssertLinear::default(); - while al.next_round() { - for i in 6..=10 { - let n = 1 << i; - - let fixture = bench_fixture::big_struct_n(n); - let (analysis, file_id) = fixture::file(&fixture); - - let time = Instant::now(); - - let hash = analysis - .highlight(file_id) - .unwrap() - .iter() - .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct)) - .count(); - assert!(hash > n as usize); - - let elapsed = time.elapsed(); - al.sample(n as f64, elapsed.as_millis() as f64); - } - } -} - -#[test] -fn benchmark_syntax_highlighting_parser() { - if skip_slow_tests() { - return; - } - - let fixture = bench_fixture::glorious_old_parser(); - let (analysis, file_id) = fixture::file(&fixture); - - let hash = { - let _pt = bench("syntax highlighting parser"); - analysis - .highlight(file_id) - .unwrap() - .iter() - .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Function)) - .count() - }; - assert_eq!(hash, 1616); -} - -#[test] -fn test_ranges() { - let (analysis, file_id) = fixture::file( - r#" -#[derive(Clone, Debug)] -struct Foo { - pub x: i32, - pub y: i32, -} "#, - ); - - // The "x" - let highlights = &analysis - .highlight_range(FileRange { file_id, range: TextRange::at(45.into(), 1.into()) }) - .unwrap(); - - assert_eq!(&highlights[0].highlight.to_string(), "field.declaration.public"); -} - -#[test] -fn test_flattening() { - check_highlighting( - r##" -fn fixture(ra_fixture: &str) {} - -fn main() { - fixture(r#" - trait Foo { - fn foo() { - println!("2 + 2 = {}", 4); - } - }"# - ); -}"## - .trim(), - expect_file!["./test_data/highlight_injection.html"], + expect_file!["./test_data/highlight_general.html"], false, ); } -#[test] -fn ranges_sorted() { - let (analysis, file_id) = fixture::file( - r#" -#[foo(bar = "bar")] -macro_rules! test {} -}"# - .trim(), - ); - let _ = analysis.highlight(file_id).unwrap(); -} - #[test] fn test_string_highlighting() { // The format string detection is based on macro-expansion, @@ -555,8 +424,7 @@ fn main() { toho!("{}fmt", 0); asm!("mov eax, {0}"); format_args!(concat!("{}"), "{}"); -}"# - .trim(), +}"#, expect_file!["./test_data/highlight_strings.html"], false, ); @@ -630,8 +498,7 @@ fn main() { packed.a.calls_autoref(); } } -"# - .trim(), +"#, expect_file!["./test_data/highlight_unsafe.html"], false, ); @@ -781,8 +648,7 @@ pub fn block_comments2() {} //! ``` //! fn test() {} //! ``` -"# - .trim(), +"#, expect_file!["./test_data/highlight_doctest.html"], false, ); @@ -792,14 +658,14 @@ pub fn block_comments2() {} fn test_extern_crate() { check_highlighting( r#" - //- /main.rs crate:main deps:std,alloc - extern crate std; - extern crate alloc as abc; - //- /std/lib.rs crate:std - pub struct S; - //- /alloc/lib.rs crate:alloc - pub struct A - "#, +//- /main.rs crate:main deps:std,alloc +extern crate std; +extern crate alloc as abc; +//- /std/lib.rs crate:std +pub struct S; +//- /alloc/lib.rs crate:alloc +pub struct A +"#, expect_file!["./test_data/highlight_extern_crate.html"], false, ); @@ -809,41 +675,41 @@ fn test_extern_crate() { fn test_crate_root() { check_highlighting( r#" - //- minicore: iterators - //- /main.rs crate:main deps:foo - extern crate foo; - use core::iter; +//- minicore: iterators +//- /main.rs crate:main deps:foo +extern crate foo; +use core::iter; - pub const NINETY_TWO: u8 = 92; +pub const NINETY_TWO: u8 = 92; - use foo as foooo; +use foo as foooo; - pub(crate) fn main() { - let baz = iter::repeat(92); - } +pub(crate) fn main() { + let baz = iter::repeat(92); +} - mod bar { - pub(in super) const FORTY_TWO: u8 = 42; +mod bar { + pub(in super) const FORTY_TWO: u8 = 42; - mod baz { - use super::super::NINETY_TWO; - use crate::foooo::Point; + mod baz { + use super::super::NINETY_TWO; + use crate::foooo::Point; - pub(in super::super) const TWENTY_NINE: u8 = 29; - } - } - //- /foo.rs crate:foo - struct Point { - x: u8, - y: u8, - } + pub(in super::super) const TWENTY_NINE: u8 = 29; + } +} +//- /foo.rs crate:foo +struct Point { + x: u8, + y: u8, +} - mod inner { - pub(super) fn swap(p: crate::Point) -> crate::Point { - crate::Point { x: p.y, y: p.x } - } - } - "#, +mod inner { + pub(super) fn swap(p: crate::Point) -> crate::Point { + crate::Point { x: p.y, y: p.x } + } +} +"#, expect_file!["./test_data/highlight_crate_root.html"], false, ); @@ -853,14 +719,14 @@ fn test_crate_root() { fn test_default_library() { check_highlighting( r#" - //- minicore: option, iterators - use core::iter; +//- minicore: option, iterators +use core::iter; - fn main() { - let foo = Some(92); - let nums = iter::repeat(foo.unwrap()); - } - "#, +fn main() { + let foo = Some(92); + let nums = iter::repeat(foo.unwrap()); +} +"#, expect_file!["./test_data/highlight_default_library.html"], false, ); @@ -888,7 +754,7 @@ impl t for foo { pub fn is_static() {} pub fn is_not_static(&self) {} } - "#, +"#, expect_file!["./test_data/highlight_assoc_functions.html"], false, ) @@ -898,26 +764,161 @@ impl t for foo { fn test_injection() { check_highlighting( r##" -fn f(ra_fixture: &str) {} +fn fixture(ra_fixture: &str) {} + fn main() { - f(r" + fixture(r#" +trait Foo { + fn foo() { + println!("2 + 2 = {}", 4); + } +}"# + ); + fixture(r" fn foo() { foo(\$0{ 92 }\$0) -}"); +}" + ); } - "##, - expect_file!["./test_data/injection.html"], +"##, + expect_file!["./test_data/highlight_injection.html"], false, ); } +#[test] +fn test_rainbow_highlighting() { + check_highlighting( + r#" +fn main() { + let hello = "hello"; + let x = hello.to_string(); + let y = hello.to_string(); + + let x = "other color please!"; + let y = x.to_string(); +} + +fn bar() { + let mut hello = "hello"; +} +"#, + expect_file!["./test_data/highlight_rainbow.html"], + true, + ); +} + +#[test] +fn test_ranges() { + let (analysis, file_id) = fixture::file( + r#" +#[derive(Clone, Debug)] +struct Foo { + pub x: i32, + pub y: i32, +} +"#, + ); + + // The "x" + let highlights = &analysis + .highlight_range(FileRange { file_id, range: TextRange::at(45.into(), 1.into()) }) + .unwrap(); + + assert_eq!(&highlights[0].highlight.to_string(), "field.declaration.public"); +} + +#[test] +fn ranges_sorted() { + let (analysis, file_id) = fixture::file( + r#" +#[foo(bar = "bar")] +macro_rules! test {} +}"# + .trim(), + ); + let _ = analysis.highlight(file_id).unwrap(); +} + /// Highlights the code given by the `ra_fixture` argument, renders the /// result as HTML, and compares it with the HTML file given as `snapshot`. /// Note that the `snapshot` file is overwritten by the rendered HTML. fn check_highlighting(ra_fixture: &str, expect: ExpectFile, rainbow: bool) { - let (analysis, file_id) = fixture::file(ra_fixture); + let (analysis, file_id) = fixture::file(ra_fixture.trim()); let actual_html = &analysis.highlight_as_html(file_id, rainbow).unwrap(); expect.assert_eq(actual_html) } + +#[test] +fn benchmark_syntax_highlighting_long_struct() { + if skip_slow_tests() { + return; + } + + let fixture = bench_fixture::big_struct(); + let (analysis, file_id) = fixture::file(&fixture); + + let hash = { + let _pt = bench("syntax highlighting long struct"); + analysis + .highlight(file_id) + .unwrap() + .iter() + .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct)) + .count() + }; + assert_eq!(hash, 2001); +} + +#[test] +fn syntax_highlighting_not_quadratic() { + if skip_slow_tests() { + return; + } + + let mut al = AssertLinear::default(); + while al.next_round() { + for i in 6..=10 { + let n = 1 << i; + + let fixture = bench_fixture::big_struct_n(n); + let (analysis, file_id) = fixture::file(&fixture); + + let time = Instant::now(); + + let hash = analysis + .highlight(file_id) + .unwrap() + .iter() + .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct)) + .count(); + assert!(hash > n as usize); + + let elapsed = time.elapsed(); + al.sample(n as f64, elapsed.as_millis() as f64); + } + } +} + +#[test] +fn benchmark_syntax_highlighting_parser() { + if skip_slow_tests() { + return; + } + + let fixture = bench_fixture::glorious_old_parser(); + let (analysis, file_id) = fixture::file(&fixture); + + let hash = { + let _pt = bench("syntax highlighting parser"); + analysis + .highlight(file_id) + .unwrap() + .iter() + .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Function)) + .count() + }; + assert_eq!(hash, 1616); +} diff --git a/crates/ide/src/typing/on_enter.rs b/crates/ide/src/typing/on_enter.rs index 88d9777f1dc6..48c1713270b6 100644 --- a/crates/ide/src/typing/on_enter.rs +++ b/crates/ide/src/typing/on_enter.rs @@ -24,6 +24,10 @@ use text_edit::TextEdit; // // This action needs to be assigned to shortcut explicitly. // +// Note that, depending on the other installed extensions, this feature can visibly slow down typing. +// Similarly, if rust-analyzer crashes or stops responding, `Enter` might not work. +// In that case, you can still press `Shift-Enter` to insert a newline. +// // VS Code:: // // Add the following to `keybindings.json`: diff --git a/crates/ide_assists/src/handlers/add_missing_match_arms.rs b/crates/ide_assists/src/handlers/add_missing_match_arms.rs index eeed0386ad6a..ec8fbb497451 100644 --- a/crates/ide_assists/src/handlers/add_missing_match_arms.rs +++ b/crates/ide_assists/src/handlers/add_missing_match_arms.rs @@ -1,11 +1,11 @@ use std::iter::{self, Peekable}; use either::Either; -use hir::{Adt, HasSource, ModuleDef, Semantics}; +use hir::{Adt, Crate, HasAttrs, HasSource, ModuleDef, Semantics}; use ide_db::helpers::{mod_path_to_ast, FamousDefs}; use ide_db::RootDatabase; use itertools::Itertools; -use syntax::ast::{self, make, AstNode, HasName, MatchArm, MatchArmList, MatchExpr, Pat}; +use syntax::ast::{self, make, AstNode, HasName, MatchArmList, MatchExpr, Pat}; use crate::{ utils::{self, render_snippet, Cursor}, @@ -52,36 +52,45 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext) -> let expr = match_expr.expr()?; - let mut arms: Vec = match_arm_list.arms().collect(); - if let [arm] = arms.as_slice() { - if let Some(Pat::WildcardPat(..)) = arm.pat() { - arms.clear(); - } - } + let mut has_catch_all_arm = false; - let top_lvl_pats: Vec<_> = arms - .iter() - .filter_map(ast::MatchArm::pat) - .flat_map(|pat| match pat { - // Special case OrPat as separate top-level pats - Pat::OrPat(or_pat) => Either::Left(or_pat.pats()), - _ => Either::Right(iter::once(pat)), + let top_lvl_pats: Vec<_> = match_arm_list + .arms() + .filter_map(|arm| Some((arm.pat()?, arm.guard().is_some()))) + .flat_map(|(pat, has_guard)| { + match pat { + // Special case OrPat as separate top-level pats + Pat::OrPat(or_pat) => Either::Left(or_pat.pats()), + _ => Either::Right(iter::once(pat)), + } + .map(move |pat| (pat, has_guard)) + }) + .map(|(pat, has_guard)| { + has_catch_all_arm |= !has_guard && matches!(pat, Pat::WildcardPat(_)); + pat }) // Exclude top level wildcards so that they are expanded by this assist, retains status quo in #8129. .filter(|pat| !matches!(pat, Pat::WildcardPat(_))) .collect(); let module = ctx.sema.scope(expr.syntax()).module()?; + let (mut missing_pats, is_non_exhaustive): ( + Peekable>>, + bool, + ) = if let Some(enum_def) = resolve_enum_def(&ctx.sema, &expr) { + let is_non_exhaustive = enum_def.is_non_exhaustive(ctx.db()); - let mut missing_pats: Peekable>> = if let Some(enum_def) = - resolve_enum_def(&ctx.sema, &expr) - { let variants = enum_def.variants(ctx.db()); let missing_pats = variants .into_iter() - .filter_map(|variant| build_pat(ctx.db(), module, variant)) - .filter(|variant_pat| is_variant_missing(&top_lvl_pats, variant_pat)); + .filter_map(|variant| { + Some(( + build_pat(ctx.db(), module, variant)?, + variant.should_be_hidden(ctx.db(), module.krate()), + )) + }) + .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat)); let option_enum = FamousDefs(&ctx.sema, Some(module.krate())).core_option_Option().map(lift_enum); @@ -92,8 +101,11 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext) -> } else { Box::new(missing_pats) }; - missing_pats.peekable() + (missing_pats.peekable(), is_non_exhaustive) } else if let Some(enum_defs) = resolve_tuple_of_enum_def(&ctx.sema, &expr) { + let is_non_exhaustive = + enum_defs.iter().any(|enum_def| enum_def.is_non_exhaustive(ctx.db())); + let mut n_arms = 1; let variants_of_enums: Vec> = enum_defs .into_iter() @@ -117,17 +129,23 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext) -> .multi_cartesian_product() .inspect(|_| cov_mark::hit!(add_missing_match_arms_lazy_computation)) .map(|variants| { + let is_hidden = variants + .iter() + .any(|variant| variant.should_be_hidden(ctx.db(), module.krate())); let patterns = variants.into_iter().filter_map(|variant| build_pat(ctx.db(), module, variant)); - ast::Pat::from(make::tuple_pat(patterns)) + + (ast::Pat::from(make::tuple_pat(patterns)), is_hidden) }) - .filter(|variant_pat| is_variant_missing(&top_lvl_pats, variant_pat)); - (Box::new(missing_pats) as Box>).peekable() + .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat)); + ((Box::new(missing_pats) as Box>).peekable(), is_non_exhaustive) } else { return None; }; - if missing_pats.peek().is_none() { + let mut needs_catch_all_arm = is_non_exhaustive && !has_catch_all_arm; + + if !needs_catch_all_arm && missing_pats.peek().is_none() { return None; } @@ -138,8 +156,10 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext) -> |builder| { let new_match_arm_list = match_arm_list.clone_for_update(); let missing_arms = missing_pats - .map(|pat| make::match_arm(iter::once(pat), None, make::ext::expr_todo())) - .map(|it| it.clone_for_update()); + .map(|(pat, hidden)| { + (make::match_arm(iter::once(pat), None, make::ext::expr_todo()), hidden) + }) + .map(|(it, hidden)| (it.clone_for_update(), hidden)); let catch_all_arm = new_match_arm_list .arms() @@ -159,7 +179,22 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext) -> } } let mut first_new_arm = None; - for arm in missing_arms { + for (arm, hidden) in missing_arms { + if hidden { + needs_catch_all_arm = !has_catch_all_arm; + } else { + first_new_arm.get_or_insert_with(|| arm.clone()); + new_match_arm_list.add_arm(arm); + } + } + if needs_catch_all_arm && !has_catch_all_arm { + cov_mark::hit!(added_wildcard_pattern); + let arm = make::match_arm( + iter::once(make::wildcard_pat().into()), + None, + make::ext::expr_todo(), + ) + .clone_for_update(); first_new_arm.get_or_insert_with(|| arm.clone()); new_match_arm_list.add_arm(arm); } @@ -250,11 +285,29 @@ enum ExtendedVariant { Variant(hir::Variant), } +impl ExtendedVariant { + fn should_be_hidden(self, db: &RootDatabase, krate: Crate) -> bool { + match self { + ExtendedVariant::Variant(var) => { + var.attrs(db).has_doc_hidden() && var.module(db).krate() != krate + } + _ => false, + } + } +} + fn lift_enum(e: hir::Enum) -> ExtendedEnum { ExtendedEnum::Enum(e) } impl ExtendedEnum { + fn is_non_exhaustive(self, db: &RootDatabase) -> bool { + match self { + ExtendedEnum::Enum(e) => e.attrs(db).by_key("non_exhaustive").exists(), + _ => false, + } + } + fn variants(self, db: &RootDatabase) -> Vec { match self { ExtendedEnum::Enum(e) => { @@ -1280,6 +1333,352 @@ fn foo(t: bool) { $0true => todo!(), false => todo!(), } +}"#, + ); + } + + #[test] + fn does_not_fill_hidden_variants() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E) { + match $0t { + } +} +//- /e.rs crate:e +pub enum E { A, #[doc(hidden)] B, } +"#, + r#" +fn foo(t: ::e::E) { + match t { + $0e::E::A => todo!(), + _ => todo!(), + } +} +"#, + ); + } + + #[test] + fn does_not_fill_hidden_variants_tuple() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: (bool, ::e::E)) { + match $0t { + } +} +//- /e.rs crate:e +pub enum E { A, #[doc(hidden)] B, } +"#, + r#" +fn foo(t: (bool, ::e::E)) { + match t { + $0(true, e::E::A) => todo!(), + (false, e::E::A) => todo!(), + _ => todo!(), + } +} +"#, + ); + } + + #[test] + fn fills_wildcard_with_only_hidden_variants() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E) { + match $0t { + } +} +//- /e.rs crate:e +pub enum E { #[doc(hidden)] A, } +"#, + r#" +fn foo(t: ::e::E) { + match t { + ${0:_} => todo!(), + } +} +"#, + ); + } + + #[test] + fn does_not_fill_wildcard_when_hidden_variants_are_explicit() { + check_assist_not_applicable( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E) { + match $0t { + e::E::A => todo!(), + } +} +//- /e.rs crate:e +pub enum E { #[doc(hidden)] A, } +"#, + ); + } + + // FIXME: I don't think the assist should be applicable in this case + #[test] + fn does_not_fill_wildcard_with_wildcard() { + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E) { + match $0t { + _ => todo!(), + } +} +//- /e.rs crate:e +pub enum E { #[doc(hidden)] A, } +"#, + r#" +fn foo(t: ::e::E) { + match t { + _ => todo!(), + } +} +"#, + ); + } + + #[test] + fn fills_wildcard_on_non_exhaustive_with_explicit_matches() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E) { + match $0t { + e::E::A => todo!(), + } +} +//- /e.rs crate:e +#[non_exhaustive] +pub enum E { A, } +"#, + r#" +fn foo(t: ::e::E) { + match t { + e::E::A => todo!(), + ${0:_} => todo!(), + } +} +"#, + ); + } + + #[test] + fn fills_wildcard_on_non_exhaustive_without_matches() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E) { + match $0t { + } +} +//- /e.rs crate:e +#[non_exhaustive] +pub enum E { A, } +"#, + r#" +fn foo(t: ::e::E) { + match t { + $0e::E::A => todo!(), + _ => todo!(), + } +} +"#, + ); + } + + #[test] + fn fills_wildcard_on_non_exhaustive_with_doc_hidden() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E) { + match $0t { + } +} +//- /e.rs crate:e +#[non_exhaustive] +pub enum E { A, #[doc(hidden)] B }"#, + r#" +fn foo(t: ::e::E) { + match t { + $0e::E::A => todo!(), + _ => todo!(), + } +} +"#, + ); + } + + #[test] + fn fills_wildcard_on_non_exhaustive_with_doc_hidden_with_explicit_arms() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E) { + match $0t { + e::E::A => todo!(), + } +} +//- /e.rs crate:e +#[non_exhaustive] +pub enum E { A, #[doc(hidden)] B }"#, + r#" +fn foo(t: ::e::E) { + match t { + e::E::A => todo!(), + ${0:_} => todo!(), + } +} +"#, + ); + } + + #[test] + fn fill_wildcard_with_partial_wildcard() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E, b: bool) { + match $0t { + _ if b => todo!(), + } +} +//- /e.rs crate:e +pub enum E { #[doc(hidden)] A, }"#, + r#" +fn foo(t: ::e::E, b: bool) { + match t { + _ if b => todo!(), + ${0:_} => todo!(), + } +} +"#, + ); + } + + #[test] + fn does_not_fill_wildcard_with_partial_wildcard_and_wildcard() { + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E, b: bool) { + match $0t { + _ if b => todo!(), + _ => todo!(), + } +} +//- /e.rs crate:e +pub enum E { #[doc(hidden)] A, }"#, + r#" +fn foo(t: ::e::E, b: bool) { + match t { + _ if b => todo!(), + _ => todo!(), + } +} +"#, + ); + } + + #[test] + fn non_exhaustive_doc_hidden_tuple_fills_wildcard() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +//- /main.rs crate:main deps:e +fn foo(t: ::e::E) { + match $0t { + } +} +//- /e.rs crate:e +#[non_exhaustive] +pub enum E { A, #[doc(hidden)] B, }"#, + r#" +fn foo(t: ::e::E) { + match t { + $0e::E::A => todo!(), + _ => todo!(), + } +} +"#, + ); + } + + #[test] + fn ignores_doc_hidden_for_crate_local_enums() { + check_assist( + add_missing_match_arms, + r#" +enum E { A, #[doc(hidden)] B, } + +fn foo(t: E) { + match $0t { + } +}"#, + r#" +enum E { A, #[doc(hidden)] B, } + +fn foo(t: E) { + match t { + $0E::A => todo!(), + E::B => todo!(), + } +}"#, + ); + } + + #[test] + fn ignores_doc_hidden_for_crate_local_enums_but_not_non_exhaustive() { + cov_mark::check!(added_wildcard_pattern); + check_assist( + add_missing_match_arms, + r#" +#[non_exhaustive] +enum E { A, #[doc(hidden)] B, } + +fn foo(t: E) { + match $0t { + } +}"#, + r#" +#[non_exhaustive] +enum E { A, #[doc(hidden)] B, } + +fn foo(t: E) { + match t { + $0E::A => todo!(), + E::B => todo!(), + _ => todo!(), + } }"#, ); } diff --git a/crates/ide_assists/src/handlers/auto_import.rs b/crates/ide_assists/src/handlers/auto_import.rs index cac736ff850a..9c0233b028f6 100644 --- a/crates/ide_assists/src/handlers/auto_import.rs +++ b/crates/ide_assists/src/handlers/auto_import.rs @@ -3,7 +3,7 @@ use ide_db::helpers::{ insert_use::{insert_use, ImportScope}, mod_path_to_ast, }; -use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxElement}; +use syntax::{ast, AstNode, NodeOrToken, SyntaxElement}; use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel}; @@ -139,9 +139,7 @@ pub(super) fn find_importable_node(ctx: &AssistContext) -> Option<(ImportAssets, { ImportAssets::for_ident_pat(&ctx.sema, &pat).zip(Some(pat.syntax().clone().into())) } else { - // FIXME: Descend? - let ident = ctx.find_token_at_offset()?; - ImportAssets::for_derive_ident(&ctx.sema, &ident).zip(Some(ident.syntax().clone().into())) + None } } diff --git a/crates/ide_assists/src/handlers/convert_bool_then.rs b/crates/ide_assists/src/handlers/convert_bool_then.rs index b8c55eb852f4..274718e6ea90 100644 --- a/crates/ide_assists/src/handlers/convert_bool_then.rs +++ b/crates/ide_assists/src/handlers/convert_bool_then.rs @@ -2,7 +2,7 @@ use hir::{known, AsAssocItem, Semantics}; use ide_db::{ helpers::{ for_each_tail_expr, - node_ext::{block_as_lone_tail, preorder_expr}, + node_ext::{block_as_lone_tail, is_pattern_cond, preorder_expr}, FamousDefs, }, RootDatabase, @@ -45,8 +45,7 @@ pub(crate) fn convert_if_to_bool_then(acc: &mut Assists, ctx: &AssistContext) -> return None; } - let cond = expr.condition().filter(|cond| !cond.is_pattern_cond())?; - let cond = cond.expr()?; + let cond = expr.condition().filter(|cond| !is_pattern_cond(cond.clone()))?; let then = expr.then_branch()?; let else_ = match expr.else_branch()? { ast::ElseBranch::Block(b) => b, @@ -209,7 +208,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext) -> _ => receiver, }; let if_expr = make::expr_if( - make::condition(cond, None), + cond, closure_body.reset_indent(), Some(ast::ElseBranch::Block(make::block_expr(None, Some(none_path)))), ) diff --git a/crates/ide_assists/src/handlers/convert_iter_for_each_to_for.rs b/crates/ide_assists/src/handlers/convert_iter_for_each_to_for.rs index 0d2daa8dc30c..4a4ad984db14 100644 --- a/crates/ide_assists/src/handlers/convert_iter_for_each_to_for.rs +++ b/crates/ide_assists/src/handlers/convert_iter_for_each_to_for.rs @@ -154,11 +154,11 @@ fn is_ref_and_impls_iter_method( let has_wanted_method = ty .iterate_method_candidates( sema.db, - krate, + &scope, &traits_in_scope, None, Some(&wanted_method), - |_, func| { + |func| { if func.ret_type(sema.db).impls_trait(sema.db, iter_trait, &[]) { return Some(()); } diff --git a/crates/ide_assists/src/handlers/convert_to_guarded_return.rs b/crates/ide_assists/src/handlers/convert_to_guarded_return.rs index 884905a91881..193d1cdfb243 100644 --- a/crates/ide_assists/src/handlers/convert_to_guarded_return.rs +++ b/crates/ide_assists/src/handlers/convert_to_guarded_return.rs @@ -1,5 +1,6 @@ use std::iter::once; +use ide_db::helpers::node_ext::{is_pattern_cond, single_let}; use syntax::{ ast::{ self, @@ -48,25 +49,28 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext) let cond = if_expr.condition()?; // Check if there is an IfLet that we can handle. - let if_let_pat = match cond.pat() { - None => None, // No IfLet, supported. - Some(ast::Pat::TupleStructPat(pat)) if pat.fields().count() == 1 => { - let path = pat.path()?; - if path.qualifier().is_some() { - return None; - } + let (if_let_pat, cond_expr) = if is_pattern_cond(cond.clone()) { + let let_ = single_let(cond)?; + match let_.pat() { + Some(ast::Pat::TupleStructPat(pat)) if pat.fields().count() == 1 => { + let path = pat.path()?; + if path.qualifier().is_some() { + return None; + } - let bound_ident = pat.fields().next().unwrap(); - if !ast::IdentPat::can_cast(bound_ident.syntax().kind()) { - return None; - } + let bound_ident = pat.fields().next().unwrap(); + if !ast::IdentPat::can_cast(bound_ident.syntax().kind()) { + return None; + } - Some((path, bound_ident)) + (Some((path, bound_ident)), let_.expr()?) + } + _ => return None, // Unsupported IfLet. } - Some(_) => return None, // Unsupported IfLet. + } else { + (None, cond) }; - let cond_expr = cond.expr()?; let then_block = if_expr.then_branch()?; let then_block = then_block.stmt_list()?; @@ -119,8 +123,7 @@ pub(crate) fn convert_to_guarded_return(acc: &mut Assists, ctx: &AssistContext) let then_branch = make::block_expr(once(make::expr_stmt(early_expression).into()), None); let cond = invert_boolean_expression(cond_expr); - make::expr_if(make::condition(cond, None), then_branch, None) - .indent(if_indent_level) + make::expr_if(cond, then_branch, None).indent(if_indent_level) }; new_expr.syntax().clone_for_update() } diff --git a/crates/ide_assists/src/handlers/convert_while_to_loop.rs b/crates/ide_assists/src/handlers/convert_while_to_loop.rs index 2bc64e77a38f..0fa2dcfbde1f 100644 --- a/crates/ide_assists/src/handlers/convert_while_to_loop.rs +++ b/crates/ide_assists/src/handlers/convert_while_to_loop.rs @@ -1,5 +1,6 @@ use std::iter::once; +use ide_db::helpers::node_ext::is_pattern_cond; use syntax::{ ast::{ self, @@ -42,7 +43,6 @@ pub(crate) fn convert_while_to_loop(acc: &mut Assists, ctx: &AssistContext) -> O let while_expr = while_kw.parent().and_then(ast::WhileExpr::cast)?; let while_body = while_expr.loop_body()?; let while_cond = while_expr.condition()?; - let while_cond_expr = while_cond.expr()?; let target = while_expr.syntax().text_range(); acc.add( @@ -55,19 +55,15 @@ pub(crate) fn convert_while_to_loop(acc: &mut Assists, ctx: &AssistContext) -> O let break_block = make::block_expr(once(make::expr_stmt(make::expr_break(None)).into()), None) .indent(while_indent_level); - let block_expr = match while_cond.pat() { - Some(_) => { - let if_expr = make::expr_if(while_cond, while_body, Some(break_block.into())); - let stmts = once(make::expr_stmt(if_expr).into()); - make::block_expr(stmts, None) - } - None => { - let if_cond = make::condition(invert_boolean_expression(while_cond_expr), None); - let if_expr = make::expr_if(if_cond, break_block, None); - let stmts = - once(make::expr_stmt(if_expr).into()).chain(while_body.statements()); - make::block_expr(stmts, while_body.tail_expr()) - } + let block_expr = if is_pattern_cond(while_cond.clone()) { + let if_expr = make::expr_if(while_cond, while_body, Some(break_block.into())); + let stmts = once(make::expr_stmt(if_expr).into()); + make::block_expr(stmts, None) + } else { + let if_cond = invert_boolean_expression(while_cond); + let if_expr = make::expr_if(if_cond, break_block, None); + let stmts = once(make::expr_stmt(if_expr).into()).chain(while_body.statements()); + make::block_expr(stmts, while_body.tail_expr()) }; let replacement = make::expr_loop(block_expr.indent(while_indent_level)); diff --git a/crates/ide_assists/src/handlers/destructure_tuple_binding.rs b/crates/ide_assists/src/handlers/destructure_tuple_binding.rs index 5f361f01dc1d..ac17e3f057a5 100644 --- a/crates/ide_assists/src/handlers/destructure_tuple_binding.rs +++ b/crates/ide_assists/src/handlers/destructure_tuple_binding.rs @@ -371,7 +371,7 @@ fn handle_ref_field_usage(ctx: &AssistContext, field_expr: &FieldExpr) -> RefDat fn is_auto_ref(ctx: &AssistContext, call_expr: &MethodCallExpr) -> bool { fn impl_(ctx: &AssistContext, call_expr: &MethodCallExpr) -> Option { let rec = call_expr.receiver()?; - let rec_ty = ctx.sema.type_of_expr(&rec)?.adjusted(); + let rec_ty = ctx.sema.type_of_expr(&rec)?.original(); // input must be actual value if rec_ty.is_reference() { return Some(false); diff --git a/crates/ide_assists/src/handlers/extract_function.rs b/crates/ide_assists/src/handlers/extract_function.rs index 877c5b0ceff6..e80dce0c4555 100644 --- a/crates/ide_assists/src/handlers/extract_function.rs +++ b/crates/ide_assists/src/handlers/extract_function.rs @@ -393,7 +393,7 @@ impl Param { } fn to_param(&self, ctx: &AssistContext, module: hir::Module) -> ast::Param { - let var = self.var.name(ctx.db()).unwrap().to_string(); + let var = self.var.name(ctx.db()).to_string(); let var_name = make::name(&var); let pat = match self.kind() { ParamKind::MutValue => make::ident_pat(false, true, var_name), @@ -1144,12 +1144,12 @@ fn make_call(ctx: &AssistContext, fun: &Function, indent: IndentLevel) -> String match fun.outliving_locals.as_slice() { [] => {} [var] => { - format_to!(buf, "let {}{} = ", mut_modifier(var), var.local.name(ctx.db()).unwrap()) + format_to!(buf, "let {}{} = ", mut_modifier(var), var.local.name(ctx.db())) } vars => { buf.push_str("let ("); let bindings = vars.iter().format_with(", ", |local, f| { - f(&format_args!("{}{}", mut_modifier(local), local.local.name(ctx.db()).unwrap())) + f(&format_args!("{}{}", mut_modifier(local), local.local.name(ctx.db()))) }); format_to!(buf, "{}", bindings); buf.push_str(") = "); @@ -1219,28 +1219,26 @@ impl FlowHandler { let stmt = make::expr_stmt(action); let block = make::block_expr(iter::once(stmt.into()), None); let controlflow_break_path = make::path_from_text("ControlFlow::Break"); - let condition = make::condition( + let condition = make::expr_let( + make::tuple_struct_pat( + controlflow_break_path, + iter::once(make::wildcard_pat().into()), + ) + .into(), call_expr, - Some( - make::tuple_struct_pat( - controlflow_break_path, - iter::once(make::wildcard_pat().into()), - ) - .into(), - ), ); - make::expr_if(condition, block, None) + make::expr_if(condition.into(), block, None) } FlowHandler::IfOption { action } => { let path = make::ext::ident_path("Some"); let value_pat = make::ext::simple_ident_pat(make::name("value")); let pattern = make::tuple_struct_pat(path, iter::once(value_pat.into())); - let cond = make::condition(call_expr, Some(pattern.into())); + let cond = make::expr_let(pattern.into(), call_expr); let value = make::expr_path(make::ext::ident_path("value")); let action_expr = action.make_result_handler(Some(value)); let action_stmt = make::expr_stmt(action_expr); let then = make::block_expr(iter::once(action_stmt.into()), None); - make::expr_if(cond, then, None) + make::expr_if(cond.into(), then, None) } FlowHandler::MatchOption { none } => { let some_name = "value"; @@ -1290,7 +1288,7 @@ impl FlowHandler { } fn path_expr_from_local(ctx: &AssistContext, var: Local) -> ast::Expr { - let name = var.name(ctx.db()).unwrap().to_string(); + let name = var.name(ctx.db()).to_string(); make::expr_path(make::ext::ident_path(&name)) } diff --git a/crates/ide_assists/src/handlers/extract_module.rs b/crates/ide_assists/src/handlers/extract_module.rs index 6cc311fd7585..64875adfae23 100644 --- a/crates/ide_assists/src/handlers/extract_module.rs +++ b/crates/ide_assists/src/handlers/extract_module.rs @@ -779,7 +779,8 @@ fn get_replacements_for_visibilty_change( ast::Item::Enum(it) => replacements.push((it.visibility(), it.syntax().clone())), ast::Item::ExternCrate(it) => replacements.push((it.visibility(), it.syntax().clone())), ast::Item::Fn(it) => replacements.push((it.visibility(), it.syntax().clone())), - ast::Item::Impl(it) => impls.push(it), + //Associated item's visibility should not be changed + ast::Item::Impl(it) if it.for_token().is_none() => impls.push(it), ast::Item::MacroRules(it) => replacements.push((it.visibility(), it.syntax().clone())), ast::Item::MacroDef(it) => replacements.push((it.visibility(), it.syntax().clone())), ast::Item::Module(it) => replacements.push((it.visibility(), it.syntax().clone())), @@ -825,11 +826,7 @@ fn add_change_vis( vis: Option, node_or_token_opt: Option, ) -> Option<()> { - if let Some(vis) = vis { - if vis.syntax().text() == "pub" { - ted::replace(vis.syntax(), make::visibility_pub_crate().syntax().clone_for_update()); - } - } else { + if let None = vis { if let Some(node_or_token) = node_or_token_opt { let pub_crate_vis = make::visibility_pub_crate().clone_for_update(); if let Some(node) = node_or_token.as_node() { @@ -962,8 +959,8 @@ mod modname { pub(crate) inner: SomeType, } - pub(crate) struct PrivateStruct1 { - pub(crate) inner: i32, + pub struct PrivateStruct1 { + pub inner: i32, } impl PrivateStruct { @@ -1033,7 +1030,7 @@ mod modname { pub(crate) struct A {} impl A { - pub(crate) fn new_a() -> i32 { + pub fn new_a() -> i32 { 2 } } @@ -1148,7 +1145,7 @@ mod modname { pub struct PrivateStruct; $0struct Strukt { - field: PrivateStruct, + field: PrivateStruct, }$0 struct Strukt1 { @@ -1164,7 +1161,7 @@ mod modname { use super::PrivateStruct; pub(crate) struct Strukt { - pub(crate) field: PrivateStruct, + pub(crate) field: PrivateStruct, } } @@ -1203,7 +1200,7 @@ mod modname { use super::A; impl A { - pub(crate) fn new_a() -> i32 { + pub fn new_a() -> i32 { 2 } } @@ -1251,7 +1248,7 @@ mod modname { use super::super::foo::A; impl A { - pub(crate) fn new_a() -> i32 { + pub fn new_a() -> i32 { 2 } } @@ -1378,4 +1375,39 @@ mod modname { ", ) } + + #[test] + fn test_do_not_apply_visibility_modifier_to_trait_impl_items() { + check_assist( + extract_module, + r" + trait ATrait { + fn function(); + } + + struct A {} + +$0impl ATrait for A { + fn function() {} +}$0 + ", + r" + trait ATrait { + fn function(); + } + + struct A {} + +mod modname { + use super::A; + + use super::ATrait; + + impl ATrait for A { + fn function() {} + } +} + ", + ) + } } diff --git a/crates/ide_assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/ide_assists/src/handlers/extract_struct_from_enum_variant.rs index 82e0970cc4bf..1cdd4187af4e 100644 --- a/crates/ide_assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/crates/ide_assists/src/handlers/extract_struct_from_enum_variant.rs @@ -11,15 +11,14 @@ use ide_db::{ search::FileReference, RootDatabase, }; -use itertools::Itertools; +use itertools::{Itertools, Position}; use rustc_hash::FxHashSet; use syntax::{ ast::{ self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasAttrs, HasGenericParams, - HasName, HasTypeBounds, HasVisibility, + HasName, HasVisibility, }, - match_ast, - ted::{self, Position}, + match_ast, ted, SyntaxElement, SyntaxKind::*, SyntaxNode, T, }; @@ -106,7 +105,12 @@ pub(crate) fn extract_struct_from_enum_variant( } let indent = enum_ast.indent_level(); - let def = create_struct_def(variant_name.clone(), &variant, &field_list, &enum_ast); + let generic_params = enum_ast + .generic_param_list() + .and_then(|known_generics| extract_generic_params(&known_generics, &field_list)); + let generics = generic_params.as_ref().map(|generics| generics.clone_for_update()); + let def = + create_struct_def(variant_name.clone(), &variant, &field_list, generics, &enum_ast); def.reindent_to(indent); let start_offset = &variant.parent_enum().syntax().clone(); @@ -118,7 +122,7 @@ pub(crate) fn extract_struct_from_enum_variant( ], ); - update_variant(&variant, enum_ast.generic_param_list()); + update_variant(&variant, generic_params.map(|g| g.clone_for_update())); }, ) } @@ -159,10 +163,77 @@ fn existing_definition(db: &RootDatabase, variant_name: &ast::Name, variant: &Va .any(|(name, _)| name.to_string() == variant_name.to_string()) } +fn extract_generic_params( + known_generics: &ast::GenericParamList, + field_list: &Either, +) -> Option { + let mut generics = known_generics.generic_params().map(|param| (param, false)).collect_vec(); + + let tagged_one = match field_list { + Either::Left(field_list) => field_list + .fields() + .filter_map(|f| f.ty()) + .fold(false, |tagged, ty| tag_generics_in_variant(&ty, &mut generics) || tagged), + Either::Right(field_list) => field_list + .fields() + .filter_map(|f| f.ty()) + .fold(false, |tagged, ty| tag_generics_in_variant(&ty, &mut generics) || tagged), + }; + + let generics = generics.into_iter().filter_map(|(param, tag)| tag.then(|| param)); + tagged_one.then(|| make::generic_param_list(generics)) +} + +fn tag_generics_in_variant(ty: &ast::Type, generics: &mut [(ast::GenericParam, bool)]) -> bool { + let mut tagged_one = false; + + for token in ty.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token) { + for (param, tag) in generics.iter_mut().filter(|(_, tag)| !tag) { + match param { + ast::GenericParam::LifetimeParam(lt) + if matches!(token.kind(), T![lifetime_ident]) => + { + if let Some(lt) = lt.lifetime() { + if lt.text().as_str() == token.text() { + *tag = true; + tagged_one = true; + break; + } + } + } + param if matches!(token.kind(), T![ident]) => { + if match param { + ast::GenericParam::ConstParam(konst) => konst + .name() + .map(|name| name.text().as_str() == token.text()) + .unwrap_or_default(), + ast::GenericParam::TypeParam(ty) => ty + .name() + .map(|name| name.text().as_str() == token.text()) + .unwrap_or_default(), + ast::GenericParam::LifetimeParam(lt) => lt + .lifetime() + .map(|lt| lt.text().as_str() == token.text()) + .unwrap_or_default(), + } { + *tag = true; + tagged_one = true; + break; + } + } + _ => (), + } + } + } + + tagged_one +} + fn create_struct_def( variant_name: ast::Name, variant: &ast::Variant, field_list: &Either, + generics: Option, enum_: &ast::Enum, ) -> ast::Struct { let enum_vis = enum_.visibility(); @@ -204,9 +275,7 @@ fn create_struct_def( field_list.reindent_to(IndentLevel::single()); - // FIXME: This uses all the generic params of the enum, but the variant might not use all of them. - let strukt = make::struct_(enum_vis, variant_name, enum_.generic_param_list(), field_list) - .clone_for_update(); + let strukt = make::struct_(enum_vis, variant_name, generics, field_list).clone_for_update(); // FIXME: Consider making this an actual function somewhere (like in `AttrsOwnerEdit`) after some deliberation let attrs_and_docs = |node: &SyntaxNode| { @@ -233,36 +302,53 @@ fn create_struct_def( _ => tok, }) .collect(); - ted::insert_all(Position::first_child_of(strukt.syntax()), variant_attrs); + ted::insert_all(ted::Position::first_child_of(strukt.syntax()), variant_attrs); // copy attributes from enum ted::insert_all( - Position::first_child_of(strukt.syntax()), + ted::Position::first_child_of(strukt.syntax()), enum_.attrs().map(|it| it.syntax().clone_for_update().into()).collect(), ); strukt } -fn update_variant(variant: &ast::Variant, generic: Option) -> Option<()> { +fn update_variant(variant: &ast::Variant, generics: Option) -> Option<()> { let name = variant.name()?; - let ty = match generic { - // FIXME: This uses all the generic params of the enum, but the variant might not use all of them. - Some(gpl) => { - let gpl = gpl.clone_for_update(); - gpl.generic_params().for_each(|gp| { - let tbl = match gp { - ast::GenericParam::LifetimeParam(it) => it.type_bound_list(), - ast::GenericParam::TypeParam(it) => it.type_bound_list(), - ast::GenericParam::ConstParam(_) => return, - }; - if let Some(tbl) = tbl { - tbl.remove(); + let ty = generics + .filter(|generics| generics.generic_params().count() > 0) + .map(|generics| { + let mut generic_str = String::with_capacity(8); + + for (p, more) in generics.generic_params().with_position().map(|p| match p { + Position::First(p) | Position::Middle(p) => (p, true), + Position::Last(p) | Position::Only(p) => (p, false), + }) { + match p { + ast::GenericParam::ConstParam(konst) => { + if let Some(name) = konst.name() { + generic_str.push_str(name.text().as_str()); + } + } + ast::GenericParam::LifetimeParam(lt) => { + if let Some(lt) = lt.lifetime() { + generic_str.push_str(lt.text().as_str()); + } + } + ast::GenericParam::TypeParam(ty) => { + if let Some(name) = ty.name() { + generic_str.push_str(name.text().as_str()); + } + } } - }); - make::ty(&format!("{}<{}>", name.text(), gpl.generic_params().join(", "))) - } - None => make::ty(&name.text()), - }; + if more { + generic_str.push_str(", "); + } + } + + make::ty(&format!("{}<{}>", &name.text(), &generic_str)) + }) + .unwrap_or_else(|| make::ty(&name.text())); + let tuple_field = make::tuple_field(None, ty); let replacement = make::variant( name, @@ -902,4 +988,92 @@ enum A { $0One(u8, u32) } fn test_extract_not_applicable_no_field_named() { check_assist_not_applicable(extract_struct_from_enum_variant, r"enum A { $0None {} }"); } + + #[test] + fn test_extract_struct_only_copies_needed_generics() { + check_assist( + extract_struct_from_enum_variant, + r#" +enum X<'a, 'b, 'x> { + $0A { a: &'a &'x mut () }, + B { b: &'b () }, + C { c: () }, +} +"#, + r#" +struct A<'a, 'x>{ a: &'a &'x mut () } + +enum X<'a, 'b, 'x> { + A(A<'a, 'x>), + B { b: &'b () }, + C { c: () }, +} +"#, + ); + } + + #[test] + fn test_extract_struct_with_liftime_type_const() { + check_assist( + extract_struct_from_enum_variant, + r#" +enum X<'b, T, V, const C: usize> { + $0A { a: T, b: X<'b>, c: [u8; C] }, + D { d: V }, +} +"#, + r#" +struct A<'b, T, const C: usize>{ a: T, b: X<'b>, c: [u8; C] } + +enum X<'b, T, V, const C: usize> { + A(A<'b, T, C>), + D { d: V }, +} +"#, + ); + } + + #[test] + fn test_extract_struct_without_generics() { + check_assist( + extract_struct_from_enum_variant, + r#" +enum X<'a, 'b> { + A { a: &'a () }, + B { b: &'b () }, + $0C { c: () }, +} +"#, + r#" +struct C{ c: () } + +enum X<'a, 'b> { + A { a: &'a () }, + B { b: &'b () }, + C(C), +} +"#, + ); + } + + #[test] + fn test_extract_struct_keeps_trait_bounds() { + check_assist( + extract_struct_from_enum_variant, + r#" +enum En { + $0A { a: T }, + B { b: V }, +} +"#, + r#" +struct A{ a: T } + +enum En { + A(A), + B { b: V }, +} +"#, + ); + } } diff --git a/crates/ide_assists/src/handlers/generate_is_empty_from_len.rs b/crates/ide_assists/src/handlers/generate_is_empty_from_len.rs index cb3fbed21995..db0cfbe2ec0f 100644 --- a/crates/ide_assists/src/handlers/generate_is_empty_from_len.rs +++ b/crates/ide_assists/src/handlers/generate_is_empty_from_len.rs @@ -90,10 +90,9 @@ fn get_impl_method( let impl_def: hir::Impl = ctx.sema.to_def(impl_)?; let scope = ctx.sema.scope(impl_.syntax()); - let krate = impl_def.module(db).krate(); let ty = impl_def.self_ty(db); let traits_in_scope = scope.visible_traits(); - ty.iterate_method_candidates(db, krate, &traits_in_scope, None, Some(fn_name), |_, func| { + ty.iterate_method_candidates(db, &scope, &traits_in_scope, None, Some(fn_name), |func| { Some(func) }) } diff --git a/crates/ide_assists/src/handlers/invert_if.rs b/crates/ide_assists/src/handlers/invert_if.rs index 20f6b0c54c9e..46f11f4af324 100644 --- a/crates/ide_assists/src/handlers/invert_if.rs +++ b/crates/ide_assists/src/handlers/invert_if.rs @@ -1,3 +1,4 @@ +use ide_db::helpers::node_ext::is_pattern_cond; use syntax::{ ast::{self, AstNode}, T, @@ -34,12 +35,12 @@ pub(crate) fn invert_if(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { return None; } + let cond = expr.condition()?; // This assist should not apply for if-let. - if expr.condition()?.is_pattern_cond() { + if is_pattern_cond(cond.clone()) { return None; } - let cond = expr.condition()?.expr()?; let then_node = expr.then_branch()?.syntax().clone(); let else_block = match expr.else_branch()? { ast::ElseBranch::Block(it) => it, diff --git a/crates/ide_assists/src/handlers/move_guard.rs b/crates/ide_assists/src/handlers/move_guard.rs index 366f308f6eff..5c05cb921d9d 100644 --- a/crates/ide_assists/src/handlers/move_guard.rs +++ b/crates/ide_assists/src/handlers/move_guard.rs @@ -1,8 +1,5 @@ use syntax::{ - ast::{ - edit::AstNodeEdit, make, AstNode, BlockExpr, Condition, ElseBranch, Expr, IfExpr, MatchArm, - Pat, - }, + ast::{edit::AstNodeEdit, make, AstNode, BlockExpr, ElseBranch, Expr, IfExpr, MatchArm, Pat}, SyntaxKind::WHITESPACE, }; @@ -44,18 +41,11 @@ pub(crate) fn move_guard_to_arm_body(acc: &mut Assists, ctx: &AssistContext) -> } let space_before_guard = guard.syntax().prev_sibling_or_token(); - // FIXME: support `if let` guards too - if guard.let_token().is_some() { - return None; - } - let guard_condition = guard.expr()?; + let guard_condition = guard.condition()?; let arm_expr = match_arm.expr()?; - let if_expr = make::expr_if( - make::condition(guard_condition, None), - make::block_expr(None, Some(arm_expr.clone())), - None, - ) - .indent(arm_expr.indent_level()); + let if_expr = + make::expr_if(guard_condition, make::block_expr(None, Some(arm_expr.clone())), None) + .indent(arm_expr.indent_level()); let target = guard.syntax().text_range(); acc.add( @@ -193,17 +183,13 @@ pub(crate) fn move_arm_cond_to_match_guard(acc: &mut Assists, ctx: &AssistContex ) } -// Parses an if-else-if chain to get the conditons and the then branches until we encounter an else +// Parses an if-else-if chain to get the conditions and the then branches until we encounter an else // branch or the end. -fn parse_if_chain(if_expr: IfExpr) -> Option<(Vec<(Condition, BlockExpr)>, Option)> { +fn parse_if_chain(if_expr: IfExpr) -> Option<(Vec<(Expr, BlockExpr)>, Option)> { let mut conds_blocks = Vec::new(); let mut curr_if = if_expr; let tail = loop { let cond = curr_if.condition()?; - // Not support moving if let to arm guard - if cond.is_pattern_cond() { - return None; - } conds_blocks.push((cond, curr_if.then_branch()?)); match curr_if.else_branch() { Some(ElseBranch::IfExpr(e)) => { @@ -280,6 +266,31 @@ fn main() { ); } + #[test] + fn move_let_guard_to_arm_body_works() { + check_assist( + move_guard_to_arm_body, + r#" +fn main() { + match 92 { + x $0if (let 1 = x) => false, + _ => true + } +} +"#, + r#" +fn main() { + match 92 { + x => if (let 1 = x) { + false + }, + _ => true + } +} +"#, + ); + } + #[test] fn move_guard_to_arm_body_works_complex_match() { check_assist( @@ -440,13 +451,21 @@ fn main() { } #[test] - fn move_arm_cond_to_match_guard_if_let_not_works() { - check_assist_not_applicable( + fn move_arm_cond_to_match_guard_if_let_works() { + check_assist( move_arm_cond_to_match_guard, r#" fn main() { match 92 { - x => if let 62 = x { $0false }, + x => if let 62 = x && true { $0false }, + _ => true + } +} +"#, + r#" +fn main() { + match 92 { + x if let 62 = x && true => false, _ => true } } @@ -898,7 +917,7 @@ fn main() { #[test] fn move_arm_cond_to_match_guard_elseif_iflet() { - check_assist_not_applicable( + check_assist( move_arm_cond_to_match_guard, r#" fn main() { @@ -915,9 +934,21 @@ fn main() { 4 }, } -} -"#, - ) +}"#, + r#" +fn main() { + match 92 { + 3 => 0, + x if x > 10 => 1, + x if x > 5 => 2, + x if let 4 = 4 => { + 42; + 3 + } + x => 4, + } +}"#, + ); } #[test] diff --git a/crates/ide_assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide_assists/src/handlers/replace_derive_with_manual_impl.rs index b3723710a863..27f2960c7ed4 100644 --- a/crates/ide_assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/crates/ide_assists/src/handlers/replace_derive_with_manual_impl.rs @@ -1,13 +1,13 @@ -use hir::ModuleDef; -use ide_db::helpers::insert_whitespace_into_node::insert_ws_into; -use ide_db::helpers::{ - get_path_at_cursor_in_tt, import_assets::NameToImport, mod_path_to_ast, - parse_tt_as_comma_sep_paths, +use hir::{InFile, ModuleDef}; +use ide_db::{ + helpers::{ + import_assets::NameToImport, insert_whitespace_into_node::insert_ws_into, mod_path_to_ast, + }, + items_locator, }; -use ide_db::items_locator; use itertools::Itertools; use syntax::{ - ast::{self, AstNode, AstToken, HasName}, + ast::{self, AstNode, HasName}, SyntaxKind::WHITESPACE, }; @@ -25,6 +25,7 @@ use crate::{ // Converts a `derive` impl into a manual one. // // ``` +// # //- minicore: derive // # trait Debug { fn fmt(&self, f: &mut Formatter) -> Result<()>; } // #[derive(Deb$0ug, Display)] // struct S; @@ -45,20 +46,30 @@ pub(crate) fn replace_derive_with_manual_impl( acc: &mut Assists, ctx: &AssistContext, ) -> Option<()> { - let attr = ctx.find_node_at_offset::()?; - let (name, args) = attr.as_simple_call()?; - if name != "derive" { + let attr = ctx.find_node_at_offset_with_descend::()?; + let path = attr.path()?; + let hir_file = ctx.sema.hir_file_for(attr.syntax()); + if !hir_file.is_derive_attr_pseudo_expansion(ctx.db()) { return None; } - if !args.syntax().text_range().contains(ctx.offset()) { - cov_mark::hit!(outside_of_attr_args); + let InFile { file_id, value } = hir_file.call_node(ctx.db())?; + if file_id.is_macro() { + // FIXME: make this work in macro files return None; } + // collect the derive paths from the #[derive] expansion + let current_derives = ctx + .sema + .parse_or_expand(hir_file)? + .descendants() + .filter_map(ast::Attr::cast) + .filter_map(|attr| attr.path()) + .collect::>(); - let ident = args.syntax().token_at_offset(ctx.offset()).find_map(ast::Ident::cast)?; - let trait_path = get_path_at_cursor_in_tt(&ident)?; - let adt = attr.syntax().parent().and_then(ast::Adt::cast)?; + let adt = value.parent().and_then(ast::Adt::cast)?; + let attr = ast::Attr::cast(value)?; + let args = attr.token_tree()?; let current_module = ctx.sema.scope(adt.syntax()).module()?; let current_crate = current_module.krate(); @@ -66,7 +77,7 @@ pub(crate) fn replace_derive_with_manual_impl( let found_traits = items_locator::items_with_name( &ctx.sema, current_crate, - NameToImport::exact_case_sensitive(trait_path.segments().last()?.to_string()), + NameToImport::exact_case_sensitive(path.segments().last()?.to_string()), items_locator::AssocItemSearch::Exclude, Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()), ) @@ -83,8 +94,6 @@ pub(crate) fn replace_derive_with_manual_impl( }); let mut no_traits_found = true; - let current_derives = parse_tt_as_comma_sep_paths(args.clone())?; - let current_derives = current_derives.as_slice(); for (replace_trait_path, trait_) in found_traits.inspect(|_| no_traits_found = false) { add_assist( acc, @@ -92,14 +101,14 @@ pub(crate) fn replace_derive_with_manual_impl( &attr, ¤t_derives, &args, - &trait_path, + &path, &replace_trait_path, Some(trait_), &adt, )?; } if no_traits_found { - add_assist(acc, ctx, &attr, ¤t_derives, &args, &trait_path, &trait_path, None, &adt)?; + add_assist(acc, ctx, &attr, ¤t_derives, &args, &path, &path, None, &adt)?; } Some(()) } @@ -128,7 +137,7 @@ fn add_assist( let impl_def_with_items = impl_def_from_trait(&ctx.sema, adt, &annotated_name, trait_, replace_trait_path); update_attribute(builder, old_derives, old_tree, old_trait_path, attr); - let trait_path = format!("{}", replace_trait_path); + let trait_path = replace_trait_path.to_string(); match (ctx.config.snippet_cap, impl_def_with_items) { (None, _) => { builder.insert(insert_pos, generate_trait_impl_text(adt, &trait_path, "")) @@ -258,7 +267,7 @@ mod tests { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: fmt +//- minicore: fmt, derive #[derive(Debu$0g)] struct Foo { bar: String, @@ -282,7 +291,7 @@ impl core::fmt::Debug for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: fmt +//- minicore: fmt, derive #[derive(Debu$0g)] struct Foo(String, usize); "#, @@ -301,7 +310,7 @@ impl core::fmt::Debug for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: fmt +//- minicore: fmt, derive #[derive(Debu$0g)] struct Foo; "#, @@ -321,7 +330,7 @@ impl core::fmt::Debug for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: fmt +//- minicore: fmt, derive #[derive(Debu$0g)] enum Foo { Bar, @@ -351,7 +360,7 @@ impl core::fmt::Debug for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: fmt +//- minicore: fmt, derive #[derive(Debu$0g)] enum Foo { Bar(usize, usize), @@ -380,7 +389,7 @@ impl core::fmt::Debug for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: fmt +//- minicore: fmt, derive #[derive(Debu$0g)] enum Foo { Bar { @@ -415,7 +424,7 @@ impl core::fmt::Debug for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: default +//- minicore: default, derive #[derive(Defau$0lt)] struct Foo { foo: usize, @@ -439,7 +448,7 @@ impl Default for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: default +//- minicore: default, derive #[derive(Defau$0lt)] struct Foo(usize); "#, @@ -459,7 +468,7 @@ impl Default for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: default +//- minicore: default, derive #[derive(Defau$0lt)] struct Foo; "#, @@ -480,7 +489,7 @@ impl Default for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: hash +//- minicore: hash, derive #[derive(Has$0h)] struct Foo { bin: usize, @@ -508,7 +517,7 @@ impl core::hash::Hash for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: hash +//- minicore: hash, derive #[derive(Has$0h)] struct Foo(usize, usize); "#, @@ -530,7 +539,7 @@ impl core::hash::Hash for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: hash +//- minicore: hash, derive #[derive(Has$0h)] enum Foo { Bar, @@ -557,7 +566,7 @@ impl core::hash::Hash for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: clone +//- minicore: clone, derive #[derive(Clo$0ne)] struct Foo { bin: usize, @@ -584,7 +593,7 @@ impl Clone for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: clone +//- minicore: clone, derive #[derive(Clo$0ne)] struct Foo(usize, usize); "#, @@ -605,7 +614,7 @@ impl Clone for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: clone +//- minicore: clone, derive #[derive(Clo$0ne)] struct Foo; "#, @@ -626,7 +635,7 @@ impl Clone for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: clone +//- minicore: clone, derive #[derive(Clo$0ne)] enum Foo { Bar, @@ -656,7 +665,7 @@ impl Clone for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: clone +//- minicore: clone, derive #[derive(Clo$0ne)] enum Foo { Bar(String), @@ -686,7 +695,7 @@ impl Clone for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: clone +//- minicore: clone, derive #[derive(Clo$0ne)] enum Foo { Bar { @@ -720,7 +729,7 @@ impl Clone for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: ord +//- minicore: ord, derive #[derive(Partial$0Ord)] struct Foo { bin: usize, @@ -745,7 +754,7 @@ impl PartialOrd for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: ord +//- minicore: ord, derive #[derive(Partial$0Ord)] struct Foo { bin: usize, @@ -782,7 +791,7 @@ impl PartialOrd for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: ord +//- minicore: ord, derive #[derive(Partial$0Ord)] struct Foo(usize, usize, usize); "#, @@ -811,7 +820,7 @@ impl PartialOrd for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: eq +//- minicore: eq, derive #[derive(Partial$0Eq)] struct Foo { bin: usize, @@ -838,7 +847,7 @@ impl PartialEq for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: eq +//- minicore: eq, derive #[derive(Partial$0Eq)] struct Foo(usize, usize); "#, @@ -859,7 +868,7 @@ impl PartialEq for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: eq +//- minicore: eq, derive #[derive(Partial$0Eq)] struct Foo; "#, @@ -880,7 +889,7 @@ impl PartialEq for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: eq +//- minicore: eq, derive #[derive(Partial$0Eq)] enum Foo { Bar, @@ -907,7 +916,7 @@ impl PartialEq for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: eq +//- minicore: eq, derive #[derive(Partial$0Eq)] enum Foo { Bar(String), @@ -937,7 +946,7 @@ impl PartialEq for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: eq +//- minicore: eq, derive #[derive(Partial$0Eq)] enum Foo { Bar { @@ -981,6 +990,7 @@ impl PartialEq for Foo { check_assist( replace_derive_with_manual_impl, r#" +//- minicore: derive mod foo { pub trait Bar { type Qux; @@ -1026,10 +1036,11 @@ impl foo::Bar for Foo { ) } #[test] - fn add_custom_impl_for_unique_input() { + fn add_custom_impl_for_unique_input_unknown() { check_assist( replace_derive_with_manual_impl, r#" +//- minicore: derive #[derive(Debu$0g)] struct Foo { bar: String, @@ -1052,6 +1063,7 @@ impl Debug for Foo { check_assist( replace_derive_with_manual_impl, r#" +//- minicore: derive #[derive(Debug$0)] pub struct Foo { bar: String, @@ -1074,6 +1086,7 @@ impl Debug for Foo { check_assist( replace_derive_with_manual_impl, r#" +//- minicore: derive #[derive(Display, Debug$0, Serialize)] struct Foo {} "#, @@ -1093,7 +1106,7 @@ impl Debug for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: default +//- minicore: default, derive #[derive(Defau$0lt)] struct Foo { foo: T, @@ -1120,7 +1133,7 @@ impl Default for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: clone +//- minicore: clone, derive #[derive(Clo$0ne)] struct Foo(T, usize); "#, @@ -1141,6 +1154,7 @@ impl Clone for Foo { check_assist_not_applicable( replace_derive_with_manual_impl, r#" +//- minicore: derive #[derive($0)] struct Foo {} "#, @@ -1152,6 +1166,7 @@ struct Foo {} check_assist_not_applicable( replace_derive_with_manual_impl, r#" +//- minicore: derive, fmt #[derive$0(Debug)] struct Foo {} "#, @@ -1160,6 +1175,7 @@ struct Foo {} check_assist_not_applicable( replace_derive_with_manual_impl, r#" +//- minicore: derive, fmt #[derive(Debug)$0] struct Foo {} "#, @@ -1171,6 +1187,7 @@ struct Foo {} check_assist_not_applicable( replace_derive_with_manual_impl, r#" +//- minicore: derive #[allow(non_camel_$0case_types)] struct Foo {} "#, @@ -1179,10 +1196,10 @@ struct Foo {} #[test] fn works_at_start_of_file() { - cov_mark::check!(outside_of_attr_args); check_assist_not_applicable( replace_derive_with_manual_impl, r#" +//- minicore: derive, fmt $0#[derive(Debug)] struct S; "#, @@ -1194,7 +1211,7 @@ struct S; check_assist( replace_derive_with_manual_impl, r#" -//- minicore: clone +//- minicore: clone, derive #[derive(std::fmt::Debug, Clo$0ne)] pub struct Foo; "#, @@ -1216,7 +1233,7 @@ impl Clone for Foo { check_assist( replace_derive_with_manual_impl, r#" -//- minicore: fmt +//- minicore: fmt, derive #[derive(core::fmt::Deb$0ug, Clone)] pub struct Foo; "#, diff --git a/crates/ide_assists/src/handlers/replace_if_let_with_match.rs b/crates/ide_assists/src/handlers/replace_if_let_with_match.rs index 779093479278..b594c64c412d 100644 --- a/crates/ide_assists/src/handlers/replace_if_let_with_match.rs +++ b/crates/ide_assists/src/handlers/replace_if_let_with_match.rs @@ -1,7 +1,12 @@ use std::iter::{self, successors}; use either::Either; -use ide_db::{defs::NameClass, ty_filter::TryEnum, RootDatabase}; +use ide_db::{ + defs::NameClass, + helpers::node_ext::{is_pattern_cond, single_let}, + ty_filter::TryEnum, + RootDatabase, +}; use syntax::{ ast::{ self, @@ -60,15 +65,22 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext) None } }); - let scrutinee_to_be_expr = if_expr.condition()?.expr()?; + let scrutinee_to_be_expr = if_expr.condition()?; + let scrutinee_to_be_expr = match single_let(scrutinee_to_be_expr.clone()) { + Some(cond) => cond.expr()?, + None => scrutinee_to_be_expr, + }; let mut pat_seen = false; let mut cond_bodies = Vec::new(); for if_expr in if_exprs { let cond = if_expr.condition()?; - let expr = cond.expr()?; - let cond = match cond.pat() { - Some(pat) => { + let cond = match single_let(cond.clone()) { + Some(let_) => { + let pat = let_.pat()?; + let expr = let_.expr()?; + // FIXME: If one `let` is wrapped in parentheses and the second is not, + // we'll exit here. if scrutinee_to_be_expr.syntax().text() != expr.syntax().text() { // Only if all condition expressions are equal we can merge them into a match return None; @@ -76,7 +88,9 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext) pat_seen = true; Either::Left(pat) } - None => Either::Right(expr), + // Multiple `let`, unsupported. + None if is_pattern_cond(cond.clone()) => return None, + None => Either::Right(cond), }; let body = if_expr.then_branch()?; cond_bodies.push((cond, body)); @@ -217,11 +231,11 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext) } } - let condition = make::condition(scrutinee, Some(if_let_pat)); + let condition = make::expr_let(if_let_pat, scrutinee); let then_block = make_block_expr(then_expr.reset_indent()); let else_expr = if is_empty_expr(&else_expr) { None } else { Some(else_expr) }; let if_let_expr = make::expr_if( - condition, + condition.into(), then_block, else_expr.map(make_block_expr).map(ast::ElseBranch::Block), ) @@ -372,6 +386,18 @@ impl VariantData { ) } + #[test] + fn test_if_let_with_match_let_chain() { + check_assist_not_applicable( + replace_if_let_with_match, + r#" +fn main() { + if $0let true = true && let Some(1) = None {} +} +"#, + ) + } + #[test] fn test_if_let_with_match_basic() { check_assist( diff --git a/crates/ide_assists/src/handlers/replace_let_with_if_let.rs b/crates/ide_assists/src/handlers/replace_let_with_if_let.rs index 1062cc395378..a5fa8a110d31 100644 --- a/crates/ide_assists/src/handlers/replace_let_with_if_let.rs +++ b/crates/ide_assists/src/handlers/replace_let_with_if_let.rs @@ -62,7 +62,7 @@ pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext) -> let block = make::ext::empty_block_expr().indent(IndentLevel::from_node(let_stmt.syntax())); - let if_ = make::expr_if(make::condition(init, Some(pat)), block, None); + let if_ = make::expr_if(make::expr_let(pat, init).into(), block, None); let stmt = make::expr_stmt(if_); edit.replace_ast(ast::Stmt::from(let_stmt), ast::Stmt::from(stmt)); diff --git a/crates/ide_assists/src/handlers/replace_qualified_name_with_use.rs b/crates/ide_assists/src/handlers/replace_qualified_name_with_use.rs index 3121e2298178..71c674a8dd74 100644 --- a/crates/ide_assists/src/handlers/replace_qualified_name_with_use.rs +++ b/crates/ide_assists/src/handlers/replace_qualified_name_with_use.rs @@ -84,7 +84,8 @@ pub(crate) fn replace_qualified_name_with_use( ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)), ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)), }; - shorten_paths(scope.as_syntax_node(), &path.clone_for_update()); + shorten_paths(scope.as_syntax_node(), &path); + let path = drop_generic_args(&path); // stick the found import in front of the to be replaced path let path = match path_to_qualifier.and_then(|it| mod_path_to_ast(&it).qualifier()) { Some(qualifier) => make::path_concat(qualifier, path), @@ -95,7 +96,17 @@ pub(crate) fn replace_qualified_name_with_use( ) } -/// Adds replacements to `re` that shorten `path` in all descendants of `node`. +fn drop_generic_args(path: &ast::Path) -> ast::Path { + let path = path.clone_for_update(); + if let Some(segment) = path.segment() { + if let Some(generic_args) = segment.generic_arg_list() { + ted::remove(generic_args.syntax()); + } + } + path +} + +/// Mutates `node` to shorten `path` in all descendants of `node`. fn shorten_paths(node: &SyntaxNode, path: &ast::Path) { for child in node.children() { match_ast! { @@ -389,6 +400,37 @@ mod std { fn main() { drop(0); } +", + ); + } + + #[test] + fn replace_should_drop_generic_args_in_use() { + check_assist( + replace_qualified_name_with_use, + r" +mod std { + pub mod mem { + pub fn drop(_: T) {} + } +} + +fn main() { + std::mem::drop::$0(0); +} +", + r" +use std::mem::drop; + +mod std { + pub mod mem { + pub fn drop(_: T) {} + } +} + +fn main() { + drop::(0); +} ", ); } diff --git a/crates/ide_assists/src/tests/generated.rs b/crates/ide_assists/src/tests/generated.rs index 0ad4b3bc345c..485b807d0556 100644 --- a/crates/ide_assists/src/tests/generated.rs +++ b/crates/ide_assists/src/tests/generated.rs @@ -1766,6 +1766,7 @@ fn doctest_replace_derive_with_manual_impl() { check_doc_test( "replace_derive_with_manual_impl", r#####" +//- minicore: derive trait Debug { fn fmt(&self, f: &mut Formatter) -> Result<()>; } #[derive(Deb$0ug, Display)] struct S; diff --git a/crates/ide_completion/src/completions/dot.rs b/crates/ide_completion/src/completions/dot.rs index 3bb3f883cdb9..981e25b55c7b 100644 --- a/crates/ide_completion/src/completions/dot.rs +++ b/crates/ide_completion/src/completions/dot.rs @@ -74,30 +74,28 @@ fn complete_methods( receiver: &hir::Type, mut f: impl FnMut(hir::Function), ) { - if let Some(krate) = ctx.krate { - let mut seen_methods = FxHashSet::default(); - let mut traits_in_scope = ctx.scope.visible_traits(); - - // Remove drop from the environment as calling `Drop::drop` is not allowed - if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() { - cov_mark::hit!(dot_remove_drop_trait); - traits_in_scope.remove(&drop_trait.into()); - } - - receiver.iterate_method_candidates( - ctx.db, - krate, - &traits_in_scope, - ctx.module, - None, - |_ty, func| { - if func.self_param(ctx.db).is_some() && seen_methods.insert(func.name(ctx.db)) { - f(func); - } - None::<()> - }, - ); - } + let mut seen_methods = FxHashSet::default(); + let mut traits_in_scope = ctx.scope.visible_traits(); + + // Remove drop from the environment as calling `Drop::drop` is not allowed + if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() { + cov_mark::hit!(dot_remove_drop_trait); + traits_in_scope.remove(&drop_trait.into()); + } + + receiver.iterate_method_candidates( + ctx.db, + &ctx.scope, + &traits_in_scope, + ctx.module, + None, + |func| { + if func.self_param(ctx.db).is_some() && seen_methods.insert(func.name(ctx.db)) { + f(func); + } + None::<()> + }, + ); } #[cfg(test)] diff --git a/crates/ide_completion/src/completions/pattern.rs b/crates/ide_completion/src/completions/pattern.rs index f1b4fa720591..c8a9cf21da12 100644 --- a/crates/ide_completion/src/completions/pattern.rs +++ b/crates/ide_completion/src/completions/pattern.rs @@ -134,39 +134,37 @@ fn pattern_path_completion( .for_each(|variant| acc.add_enum_variant(ctx, variant, None)); } res @ (hir::PathResolution::TypeParam(_) | hir::PathResolution::SelfType(_)) => { - if let Some(krate) = ctx.krate { - let ty = match res { - hir::PathResolution::TypeParam(param) => param.ty(ctx.db), - hir::PathResolution::SelfType(impl_def) => impl_def.self_ty(ctx.db), - _ => return, - }; + let ty = match res { + hir::PathResolution::TypeParam(param) => param.ty(ctx.db), + hir::PathResolution::SelfType(impl_def) => impl_def.self_ty(ctx.db), + _ => return, + }; - if let Some(hir::Adt::Enum(e)) = ty.as_adt() { - e.variants(ctx.db) - .into_iter() - .for_each(|variant| acc.add_enum_variant(ctx, variant, None)); - } + if let Some(hir::Adt::Enum(e)) = ty.as_adt() { + e.variants(ctx.db) + .into_iter() + .for_each(|variant| acc.add_enum_variant(ctx, variant, None)); + } - let traits_in_scope = ctx.scope.visible_traits(); - let mut seen = FxHashSet::default(); - ty.iterate_path_candidates( - ctx.db, - krate, - &traits_in_scope, - ctx.module, - None, - |_ty, item| { - // Note associated consts cannot be referenced in patterns - if let AssocItem::TypeAlias(ta) = item { - // We might iterate candidates of a trait multiple times here, so deduplicate them. - if seen.insert(item) { - acc.add_type_alias(ctx, ta); - } + let traits_in_scope = ctx.scope.visible_traits(); + let mut seen = FxHashSet::default(); + ty.iterate_path_candidates( + ctx.db, + &ctx.scope, + &traits_in_scope, + ctx.module, + None, + |item| { + // Note associated consts cannot be referenced in patterns + if let AssocItem::TypeAlias(ta) = item { + // We might iterate candidates of a trait multiple times here, so deduplicate them. + if seen.insert(item) { + acc.add_type_alias(ctx, ta); } - None::<()> - }, - ); - } + } + None::<()> + }, + ); } _ => {} } diff --git a/crates/ide_completion/src/completions/qualified_path.rs b/crates/ide_completion/src/completions/qualified_path.rs index cf78f7c1adf4..d63aacbadbc9 100644 --- a/crates/ide_completion/src/completions/qualified_path.rs +++ b/crates/ide_completion/src/completions/qualified_path.rs @@ -138,11 +138,11 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon let traits_in_scope = ctx.scope.visible_traits(); ty.iterate_path_candidates( ctx.db, - krate, + &ctx.scope, &traits_in_scope, ctx.module, None, - |_ty, item| { + |item| { add_assoc_item(acc, ctx, item); None::<()> }, @@ -164,35 +164,33 @@ pub(crate) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon } } hir::PathResolution::TypeParam(_) | hir::PathResolution::SelfType(_) => { - if let Some(krate) = ctx.krate { - let ty = match resolution { - hir::PathResolution::TypeParam(param) => param.ty(ctx.db), - hir::PathResolution::SelfType(impl_def) => impl_def.self_ty(ctx.db), - _ => return, - }; - - if let Some(hir::Adt::Enum(e)) = ty.as_adt() { - add_enum_variants(acc, ctx, e); - } + let ty = match resolution { + hir::PathResolution::TypeParam(param) => param.ty(ctx.db), + hir::PathResolution::SelfType(impl_def) => impl_def.self_ty(ctx.db), + _ => return, + }; - let traits_in_scope = ctx.scope.visible_traits(); - let mut seen = FxHashSet::default(); - ty.iterate_path_candidates( - ctx.db, - krate, - &traits_in_scope, - ctx.module, - None, - |_ty, item| { - // We might iterate candidates of a trait multiple times here, so deduplicate - // them. - if seen.insert(item) { - add_assoc_item(acc, ctx, item); - } - None::<()> - }, - ); + if let Some(hir::Adt::Enum(e)) = ty.as_adt() { + add_enum_variants(acc, ctx, e); } + + let traits_in_scope = ctx.scope.visible_traits(); + let mut seen = FxHashSet::default(); + ty.iterate_path_candidates( + ctx.db, + &ctx.scope, + &traits_in_scope, + ctx.module, + None, + |item| { + // We might iterate candidates of a trait multiple times here, so deduplicate + // them. + if seen.insert(item) { + add_assoc_item(acc, ctx, item); + } + None::<()> + }, + ); } _ => {} } diff --git a/crates/ide_completion/src/completions/record.rs b/crates/ide_completion/src/completions/record.rs index 13b47356197c..78d06231060d 100644 --- a/crates/ide_completion/src/completions/record.rs +++ b/crates/ide_completion/src/completions/record.rs @@ -63,11 +63,12 @@ pub(crate) fn complete_record_literal( } if let hir::Adt::Struct(strukt) = ctx.expected_type.as_ref()?.as_adt()? { - let module = if let Some(module) = ctx.module { module } else { strukt.module(ctx.db) }; + if ctx.path_qual().is_none() { + let module = if let Some(module) = ctx.module { module } else { strukt.module(ctx.db) }; + let path = module.find_use_path(ctx.db, hir::ModuleDef::from(strukt)); - let path = module.find_use_path(ctx.db, hir::ModuleDef::from(strukt)); - - acc.add_struct_literal(ctx, strukt, path, None); + acc.add_struct_literal(ctx, strukt, path, None); + } } Some(()) diff --git a/crates/ide_completion/src/completions/trait_impl.rs b/crates/ide_completion/src/completions/trait_impl.rs index 4b1de8058de0..0d59f77a55c9 100644 --- a/crates/ide_completion/src/completions/trait_impl.rs +++ b/crates/ide_completion/src/completions/trait_impl.rs @@ -58,15 +58,15 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext ( hir::AssocItem::Function(fn_item), ImplCompletionKind::All | ImplCompletionKind::Fn, - ) => add_function_impl(&trigger, acc, ctx, fn_item, hir_impl), + ) => add_function_impl(acc, ctx, &trigger, fn_item, hir_impl), ( hir::AssocItem::TypeAlias(type_item), ImplCompletionKind::All | ImplCompletionKind::TypeAlias, - ) => add_type_alias_impl(&trigger, acc, ctx, type_item), + ) => add_type_alias_impl(acc, ctx, &trigger, type_item), ( hir::AssocItem::Const(const_item), ImplCompletionKind::All | ImplCompletionKind::Const, - ) => add_const_impl(&trigger, acc, ctx, const_item, hir_impl), + ) => add_const_impl(acc, ctx, &trigger, const_item, hir_impl), _ => {} } }); @@ -126,9 +126,9 @@ fn completion_match(mut token: SyntaxToken) -> Option<(ImplCompletionKind, Synta } fn add_function_impl( - fn_def_node: &SyntaxNode, acc: &mut Completions, ctx: &CompletionContext, + fn_def_node: &SyntaxNode, func: hir::Function, impl_def: hir::Impl, ) { @@ -199,9 +199,9 @@ fn get_transformed_assoc_item( } fn add_type_alias_impl( - type_def_node: &SyntaxNode, acc: &mut Completions, ctx: &CompletionContext, + type_def_node: &SyntaxNode, type_alias: hir::TypeAlias, ) { let alias_name = type_alias.name(ctx.db).to_smol_str(); @@ -217,9 +217,9 @@ fn add_type_alias_impl( } fn add_const_impl( - const_def_node: &SyntaxNode, acc: &mut Completions, ctx: &CompletionContext, + const_def_node: &SyntaxNode, const_: hir::Const, impl_def: hir::Impl, ) { diff --git a/crates/ide_completion/src/config.rs b/crates/ide_completion/src/config.rs index 5e5c7efdfb92..c4e91e72830b 100644 --- a/crates/ide_completion/src/config.rs +++ b/crates/ide_completion/src/config.rs @@ -13,6 +13,7 @@ pub struct CompletionConfig { pub enable_postfix_completions: bool, pub enable_imports_on_the_fly: bool, pub enable_self_on_the_fly: bool, + pub enable_private_editable: bool, pub add_call_parenthesis: bool, pub add_call_argument_snippets: bool, pub snippet_cap: Option, diff --git a/crates/ide_completion/src/context.rs b/crates/ide_completion/src/context.rs index d711215491c1..c4e145ffcb55 100644 --- a/crates/ide_completion/src/context.rs +++ b/crates/ide_completion/src/context.rs @@ -360,6 +360,9 @@ impl<'a> CompletionContext<'a> { None => return Visible::No, }; if !vis.is_visible_from(self.db, module.into()) { + if !self.config.enable_private_editable { + return Visible::No; + } // If the definition location is editable, also show private items let root_file = defining_crate.root_file(self.db); let source_root_id = self.db.file_source_root(root_file); @@ -575,6 +578,14 @@ impl<'a> CompletionContext<'a> { (ty, name) }, + ast::LetExpr(it) => { + cov_mark::hit!(expected_type_if_let_without_leading_char); + let ty = it.pat() + .and_then(|pat| self.sema.type_of_pat(&pat)) + .or_else(|| it.expr().and_then(|it| self.sema.type_of_expr(&it))) + .map(TypeInfo::original); + (ty, None) + }, ast::ArgList(_) => { cov_mark::hit!(expected_type_fn_param); ActiveParameter::at_token( @@ -641,9 +652,7 @@ impl<'a> CompletionContext<'a> { (ty, None) }, ast::IfExpr(it) => { - cov_mark::hit!(expected_type_if_let_without_leading_char); let ty = it.condition() - .and_then(|cond| cond.expr()) .and_then(|e| self.sema.type_of_expr(&e)) .map(TypeInfo::original); (ty, None) @@ -939,7 +948,7 @@ fn pattern_context_for(original_file: &SyntaxNode, pat: ast::Pat) -> PatternCont return (PatternRefutability::Irrefutable, has_type_ascription) }, ast::MatchArm(_) => PatternRefutability::Refutable, - ast::Condition(_) => PatternRefutability::Refutable, + ast::LetExpr(_) => PatternRefutability::Refutable, ast::ForExpr(_) => PatternRefutability::Irrefutable, _ => PatternRefutability::Irrefutable, } diff --git a/crates/ide_completion/src/tests.rs b/crates/ide_completion/src/tests.rs index bb10929c1ee2..655f24aeb1b0 100644 --- a/crates/ide_completion/src/tests.rs +++ b/crates/ide_completion/src/tests.rs @@ -64,6 +64,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig { enable_postfix_completions: true, enable_imports_on_the_fly: true, enable_self_on_the_fly: true, + enable_private_editable: true, add_call_parenthesis: true, add_call_argument_snippets: true, snippet_cap: SnippetCap::new(true), diff --git a/crates/ide_completion/src/tests/attribute.rs b/crates/ide_completion/src/tests/attribute.rs index 2e643453afc6..ae7ba7e055ce 100644 --- a/crates/ide_completion/src/tests/attribute.rs +++ b/crates/ide_completion/src/tests/attribute.rs @@ -735,6 +735,26 @@ mod derive { ) } + #[test] + fn derive_no_attrs() { + check_derive( + r#" +//- proc_macros: identity +//- minicore: derive +#[derive($0)] struct Test; +"#, + expect![[r#""#]], + ); + check_derive( + r#" +//- proc_macros: identity +//- minicore: derive +#[derive(i$0)] struct Test; +"#, + expect![[r#""#]], + ); + } + #[test] fn derive_flyimport() { check_derive( diff --git a/crates/ide_db/src/defs.rs b/crates/ide_db/src/defs.rs index 172acdbc3c40..e41c97ea9532 100644 --- a/crates/ide_db/src/defs.rs +++ b/crates/ide_db/src/defs.rs @@ -14,7 +14,7 @@ use hir::{ use stdx::impl_from; use syntax::{ ast::{self, AstNode}, - match_ast, AstToken, SyntaxKind, SyntaxNode, SyntaxToken, + match_ast, SyntaxKind, SyntaxNode, SyntaxToken, }; use crate::RootDatabase; @@ -106,7 +106,7 @@ impl Definition { Definition::TypeAlias(it) => it.name(db), Definition::BuiltinType(it) => it.name(), Definition::SelfType(_) => return None, - Definition::Local(it) => it.name(db)?, + Definition::Local(it) => it.name(db), Definition::GenericParam(it) => it.name(db), Definition::Label(it) => it.name(db), Definition::BuiltinAttr(_) => return None, // FIXME @@ -142,16 +142,6 @@ impl IdentClass { token: &SyntaxToken, ) -> Option { let parent = token.parent()?; - // resolve derives if possible - if let Some(ident) = ast::Ident::cast(token.clone()) { - let attr = ast::TokenTree::cast(parent.clone()) - .and_then(|tt| tt.parent_meta()) - .and_then(|meta| meta.parent_attr()); - if let Some(attr) = attr { - return NameRefClass::classify_derive(sema, &attr, &ident) - .map(IdentClass::NameRefClass); - } - } Self::classify_node(sema, &parent) } @@ -216,8 +206,23 @@ impl NameClass { let parent = name.syntax().parent()?; - let def = if let Some(item) = ast::Item::cast(parent.clone()) { - match item { + let definition = match_ast! { + match parent { + ast::Item(it) => classify_item(sema, it)?, + ast::IdentPat(it) => return classify_ident_pat(sema, it), + ast::Rename(it) => classify_rename(sema, it)?, + ast::SelfParam(it) => Definition::Local(sema.to_def(&it)?), + ast::RecordField(it) => Definition::Field(sema.to_def(&it)?), + ast::Variant(it) => Definition::Variant(sema.to_def(&it)?), + ast::TypeParam(it) => Definition::GenericParam(sema.to_def(&it)?.into()), + ast::ConstParam(it) => Definition::GenericParam(sema.to_def(&it)?.into()), + _ => return None, + } + }; + return Some(NameClass::Definition(definition)); + + fn classify_item(sema: &Semantics, item: ast::Item) -> Option { + let definition = match item { ast::Item::MacroRules(it) => { Definition::Macro(sema.to_def(&ast::Macro::MacroRules(it))?) } @@ -225,7 +230,12 @@ impl NameClass { Definition::Macro(sema.to_def(&ast::Macro::MacroDef(it))?) } ast::Item::Const(it) => Definition::Const(sema.to_def(&it)?), - ast::Item::Fn(it) => Definition::Function(sema.to_def(&it)?), + ast::Item::Fn(it) => { + let def = sema.to_def(&it)?; + def.as_proc_macro(sema.db) + .map(Definition::Macro) + .unwrap_or(Definition::Function(def)) + } ast::Item::Module(it) => Definition::Module(sema.to_def(&it)?), ast::Item::Static(it) => Definition::Static(sema.to_def(&it)?), ast::Item::Trait(it) => Definition::Trait(sema.to_def(&it)?), @@ -234,14 +244,20 @@ impl NameClass { ast::Item::Struct(it) => Definition::Adt(hir::Adt::Struct(sema.to_def(&it)?)), ast::Item::Union(it) => Definition::Adt(hir::Adt::Union(sema.to_def(&it)?)), _ => return None, - } - } else if let Some(it) = ast::IdentPat::cast(parent.clone()) { - if let Some(def) = sema.resolve_bind_pat_to_const(&it) { + }; + Some(definition) + } + + fn classify_ident_pat( + sema: &Semantics, + ident_pat: ast::IdentPat, + ) -> Option { + if let Some(def) = sema.resolve_bind_pat_to_const(&ident_pat) { return Some(NameClass::ConstReference(Definition::from(def))); } - let local = sema.to_def(&it)?; - let pat_parent = it.syntax().parent(); + let local = sema.to_def(&ident_pat)?; + let pat_parent = ident_pat.syntax().parent(); if let Some(record_pat_field) = pat_parent.and_then(ast::RecordPatField::cast) { if record_pat_field.name_ref().is_none() { if let Some(field) = sema.resolve_record_pat_field(&record_pat_field) { @@ -252,57 +268,23 @@ impl NameClass { } } } + Some(NameClass::Definition(Definition::Local(local))) + } - Definition::Local(local) - } else if let Some(it) = ast::Rename::cast(parent.clone()) { - if let Some(use_tree) = it.syntax().parent().and_then(ast::UseTree::cast) { + fn classify_rename( + sema: &Semantics, + rename: ast::Rename, + ) -> Option { + if let Some(use_tree) = rename.syntax().parent().and_then(ast::UseTree::cast) { let path = use_tree.path()?; - let path_segment = path.segment()?; - let name_ref = path_segment.name_ref()?; - let name_ref = if name_ref.self_token().is_some() { - use_tree - .syntax() - .parent() - .as_ref() - // Skip over UseTreeList - .and_then(|it| { - let use_tree = it.parent().and_then(ast::UseTree::cast)?; - let path = use_tree.path()?; - let path_segment = path.segment()?; - path_segment.name_ref() - }) - .unwrap_or(name_ref) - } else { - name_ref - }; - let name_ref_class = NameRefClass::classify(sema, &name_ref)?; - - match name_ref_class { - NameRefClass::Definition(def) => def, - NameRefClass::FieldShorthand { local_ref: _, field_ref } => { - Definition::Field(field_ref) - } - } + sema.resolve_path(&path).map(Definition::from) } else { - let extern_crate = it.syntax().parent().and_then(ast::ExternCrate::cast)?; + let extern_crate = rename.syntax().parent().and_then(ast::ExternCrate::cast)?; let krate = sema.resolve_extern_crate(&extern_crate)?; let root_module = krate.root_module(sema.db); - Definition::Module(root_module) - } - } else { - match_ast! { - match parent { - ast::SelfParam(it) => Definition::Local(sema.to_def(&it)?), - ast::RecordField(it) => Definition::Field(sema.to_def(&it)?), - ast::Variant(it) => Definition::Variant(sema.to_def(&it)?), - ast::TypeParam(it) => Definition::GenericParam(sema.to_def(&it)?.into()), - ast::ConstParam(it) => Definition::GenericParam(sema.to_def(&it)?.into()), - _ => return None, - } + Some(Definition::Module(root_module)) } - }; - - Some(NameClass::Definition(def)) + } } pub fn classify_lifetime( @@ -312,19 +294,14 @@ impl NameClass { let _p = profile::span("classify_lifetime").detail(|| lifetime.to_string()); let parent = lifetime.syntax().parent()?; - match_ast! { - match parent { - ast::LifetimeParam(it) => { - let def = sema.to_def(&it)?; - Some(NameClass::Definition(Definition::GenericParam(def.into()))) - }, - ast::Label(it) => { - let def = sema.to_def(&it)?; - Some(NameClass::Definition(Definition::Label(def))) - }, - _ => None, - } + if let Some(it) = ast::LifetimeParam::cast(parent.clone()) { + sema.to_def(&it).map(Into::into).map(Definition::GenericParam) + } else if let Some(it) = ast::Label::cast(parent.clone()) { + sema.to_def(&it).map(Definition::Label) + } else { + None } + .map(NameClass::Definition) } } @@ -456,14 +433,6 @@ impl NameRefClass { _ => None, } } - - pub fn classify_derive( - sema: &Semantics, - attr: &ast::Attr, - ident: &ast::Ident, - ) -> Option { - sema.resolve_derive_ident(&attr, &ident).map(Definition::from).map(NameRefClass::Definition) - } } impl_from!( diff --git a/crates/ide_db/src/helpers.rs b/crates/ide_db/src/helpers.rs index 2d3d64093385..fcad17298483 100644 --- a/crates/ide_db/src/helpers.rs +++ b/crates/ide_db/src/helpers.rs @@ -9,14 +9,14 @@ pub mod node_ext; pub mod rust_doc; pub mod format_string; -use std::{collections::VecDeque, iter}; +use std::collections::VecDeque; use base_db::FileId; -use hir::{ItemInNs, MacroDef, ModuleDef, Name, PathResolution, Semantics}; +use hir::{ItemInNs, MacroDef, ModuleDef, Name, Semantics}; use itertools::Itertools; use syntax::{ ast::{self, make, HasLoopBody}, - AstNode, AstToken, Direction, SyntaxElement, SyntaxKind, SyntaxToken, TokenAtOffset, WalkEvent, + AstNode, AstToken, Preorder, RustLanguage, SyntaxKind, SyntaxToken, TokenAtOffset, WalkEvent, T, }; @@ -32,49 +32,6 @@ pub fn item_name(db: &RootDatabase, item: ItemInNs) -> Option { } } -/// Parses and returns the derive path at the cursor position in the given attribute, if it is a derive. -/// This special case is required because the derive macro is a compiler builtin that discards the input derives. -/// -/// The returned path is synthesized from TokenTree tokens and as such cannot be used with the [`Semantics`]. -pub fn get_path_in_derive_attr( - sema: &hir::Semantics, - attr: &ast::Attr, - cursor: &ast::Ident, -) -> Option { - let path = attr.path()?; - let tt = attr.token_tree()?; - if !tt.syntax().text_range().contains_range(cursor.syntax().text_range()) { - return None; - } - let scope = sema.scope(attr.syntax()); - let resolved_attr = sema.resolve_path(&path)?; - let derive = FamousDefs(sema, scope.krate()).core_macros_builtin_derive()?; - if PathResolution::Macro(derive) != resolved_attr { - return None; - } - get_path_at_cursor_in_tt(cursor) -} - -/// Parses the path the identifier is part of inside a token tree. -pub fn get_path_at_cursor_in_tt(cursor: &ast::Ident) -> Option { - let cursor = cursor.syntax(); - let first = cursor - .siblings_with_tokens(Direction::Prev) - .filter_map(SyntaxElement::into_token) - .take_while(|tok| tok.kind() != T!['('] && tok.kind() != T![,]) - .last()?; - let path_tokens = first - .siblings_with_tokens(Direction::Next) - .filter_map(SyntaxElement::into_token) - .take_while(|tok| tok != cursor); - - syntax::hacks::parse_expr_from_str(&path_tokens.chain(iter::once(cursor.clone())).join("")) - .and_then(|expr| match expr { - ast::Expr::PathExpr(it) => it.path(), - _ => None, - }) -} - /// Picks the token with the highest rank returned by the passed in function. pub fn pick_best_token( tokens: TokenAtOffset, @@ -82,6 +39,9 @@ pub fn pick_best_token( ) -> Option { tokens.max_by_key(move |t| f(t.kind())) } +pub fn pick_token(mut tokens: TokenAtOffset) -> Option { + tokens.find_map(T::cast) +} /// Converts the mod path struct into its ast representation. pub fn mod_path_to_ast(path: &hir::ModPath) -> ast::Path { @@ -226,50 +186,107 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { | ast::Expr::TryExpr(_) | ast::Expr::TupleExpr(_) | ast::Expr::WhileExpr(_) + | ast::Expr::LetExpr(_) | ast::Expr::YieldExpr(_) => cb(expr), } } -/// Calls `cb` on each break expr inside of `body` that is applicable for the given label. -pub fn for_each_break_expr( +pub fn for_each_break_and_continue_expr( + label: Option, + body: Option, + cb: &mut dyn FnMut(ast::Expr), +) { + let label = label.and_then(|lbl| lbl.lifetime()); + if let Some(b) = body { + let tree_depth_iterator = TreeWithDepthIterator::new(b); + for (expr, depth) in tree_depth_iterator { + match expr { + ast::Expr::BreakExpr(b) + if (depth == 0 && b.lifetime().is_none()) + || eq_label_lt(&label, &b.lifetime()) => + { + cb(ast::Expr::BreakExpr(b)); + } + ast::Expr::ContinueExpr(c) + if (depth == 0 && c.lifetime().is_none()) + || eq_label_lt(&label, &c.lifetime()) => + { + cb(ast::Expr::ContinueExpr(c)); + } + _ => (), + } + } + } +} + +fn for_each_break_expr( label: Option, body: Option, cb: &mut dyn FnMut(ast::BreakExpr), ) { let label = label.and_then(|lbl| lbl.lifetime()); - let mut depth = 0; if let Some(b) = body { - let preorder = &mut b.syntax().preorder(); - let ev_as_expr = |ev| match ev { - WalkEvent::Enter(it) => Some(WalkEvent::Enter(ast::Expr::cast(it)?)), - WalkEvent::Leave(it) => Some(WalkEvent::Leave(ast::Expr::cast(it)?)), - }; - let eq_label = |lt: Option| { - lt.zip(label.as_ref()).map_or(false, |(lt, lbl)| lt.text() == lbl.text()) - }; - while let Some(node) = preorder.find_map(ev_as_expr) { - match node { - WalkEvent::Enter(expr) => match expr { - ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_) => { - depth += 1 - } - ast::Expr::BlockExpr(e) if e.label().is_some() => depth += 1, - ast::Expr::BreakExpr(b) - if (depth == 0 && b.lifetime().is_none()) || eq_label(b.lifetime()) => - { - cb(b); - } - _ => (), - }, - WalkEvent::Leave(expr) => match expr { - ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_) => { - depth -= 1 - } - ast::Expr::BlockExpr(e) if e.label().is_some() => depth -= 1, - _ => (), - }, + let tree_depth_iterator = TreeWithDepthIterator::new(b); + for (expr, depth) in tree_depth_iterator { + match expr { + ast::Expr::BreakExpr(b) + if (depth == 0 && b.lifetime().is_none()) + || eq_label_lt(&label, &b.lifetime()) => + { + cb(b); + } + _ => (), + } + } + } +} + +fn eq_label_lt(lt1: &Option, lt2: &Option) -> bool { + lt1.as_ref().zip(lt2.as_ref()).map_or(false, |(lt, lbl)| lt.text() == lbl.text()) +} + +struct TreeWithDepthIterator { + preorder: Preorder, + depth: u32, +} + +impl TreeWithDepthIterator { + fn new(body: ast::StmtList) -> Self { + let preorder = body.syntax().preorder(); + Self { preorder, depth: 0 } + } +} + +impl<'a> Iterator for TreeWithDepthIterator { + type Item = (ast::Expr, u32); + + fn next(&mut self) -> Option { + while let Some(event) = self.preorder.find_map(|ev| match ev { + WalkEvent::Enter(it) => ast::Expr::cast(it).map(WalkEvent::Enter), + WalkEvent::Leave(it) => ast::Expr::cast(it).map(WalkEvent::Leave), + }) { + match event { + WalkEvent::Enter( + ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_), + ) => { + self.depth += 1; + } + WalkEvent::Leave( + ast::Expr::LoopExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::ForExpr(_), + ) => { + self.depth -= 1; + } + WalkEvent::Enter(ast::Expr::BlockExpr(e)) if e.label().is_some() => { + self.depth += 1; + } + WalkEvent::Leave(ast::Expr::BlockExpr(e)) if e.label().is_some() => { + self.depth -= 1; + } + WalkEvent::Enter(expr) => return Some((expr, self.depth)), + _ => (), } } + None } } diff --git a/crates/ide_db/src/helpers/import_assets.rs b/crates/ide_db/src/helpers/import_assets.rs index e2731fecd997..a2aa3f45010a 100644 --- a/crates/ide_db/src/helpers/import_assets.rs +++ b/crates/ide_db/src/helpers/import_assets.rs @@ -1,18 +1,17 @@ //! Look up accessible paths for items. use hir::{ AsAssocItem, AssocItem, AssocItemContainer, Crate, ItemInNs, MacroDef, ModPath, Module, - ModuleDef, PathResolution, PrefixKind, ScopeDef, Semantics, Type, + ModuleDef, PathResolution, PrefixKind, ScopeDef, Semantics, SemanticsScope, Type, }; use itertools::Itertools; use rustc_hash::FxHashSet; use syntax::{ ast::{self, HasName}, utils::path_to_string_stripping_turbo_fish, - AstNode, AstToken, SyntaxNode, + AstNode, SyntaxNode, }; use crate::{ - helpers::get_path_in_derive_attr, items_locator::{self, AssocItemSearch, DEFAULT_QUERY_SEARCH_LIMIT}, RootDatabase, }; @@ -139,23 +138,6 @@ impl ImportAssets { }) } - pub fn for_derive_ident(sema: &Semantics, ident: &ast::Ident) -> Option { - let attr = ident.syntax().ancestors().find_map(ast::Attr::cast)?; - let path = get_path_in_derive_attr(sema, &attr, ident)?; - - if let Some(_) = path.qualifier() { - return None; - } - - let name = NameToImport::exact_case_sensitive(path.segment()?.name_ref()?.to_string()); - let candidate_node = attr.syntax().clone(); - Some(Self { - import_candidate: ImportCandidate::Path(PathImportCandidate { qualifier: None, name }), - module_with_candidate: sema.scope(&candidate_node).module()?, - candidate_node, - }) - } - pub fn for_fuzzy_path( module_with_candidate: Module, qualifier: Option, @@ -257,7 +239,6 @@ impl ImportAssets { let _p = profile::span("import_assets::search_for"); let scope_definitions = self.scope_definitions(sema); - let current_crate = self.module_with_candidate.krate(); let mod_path = |item| { get_mod_path( sema.db, @@ -267,15 +248,18 @@ impl ImportAssets { ) }; + let krate = self.module_with_candidate.krate(); + let scope = sema.scope(&self.candidate_node); + match &self.import_candidate { ImportCandidate::Path(path_candidate) => { - path_applicable_imports(sema, current_crate, path_candidate, mod_path) + path_applicable_imports(sema, krate, path_candidate, mod_path) } ImportCandidate::TraitAssocItem(trait_candidate) => { - trait_applicable_items(sema, current_crate, trait_candidate, true, mod_path) + trait_applicable_items(sema, krate, &scope, trait_candidate, true, mod_path) } ImportCandidate::TraitMethod(trait_candidate) => { - trait_applicable_items(sema, current_crate, trait_candidate, false, mod_path) + trait_applicable_items(sema, krate, &scope, trait_candidate, false, mod_path) } } .into_iter() @@ -464,6 +448,7 @@ fn module_with_segment_name( fn trait_applicable_items( sema: &Semantics, current_crate: Crate, + scope: &SemanticsScope, trait_candidate: &TraitImportCandidate, trait_assoc_item: bool, mod_path: impl Fn(ItemInNs) -> Option, @@ -501,11 +486,11 @@ fn trait_applicable_items( if trait_assoc_item { trait_candidate.receiver_ty.iterate_path_candidates( db, - current_crate, + scope, &trait_candidates, None, None, - |_, assoc| { + |assoc| { if required_assoc_items.contains(&assoc) { if let AssocItem::Function(f) = assoc { if f.self_param(db).is_some() { @@ -528,11 +513,11 @@ fn trait_applicable_items( } else { trait_candidate.receiver_ty.iterate_method_candidates( db, - current_crate, + scope, &trait_candidates, None, None, - |_, function| { + |function| { let assoc = function.as_assoc_item(db)?; if required_assoc_items.contains(&assoc) { let located_trait = assoc.containing_trait(db)?; diff --git a/crates/ide_db/src/helpers/node_ext.rs b/crates/ide_db/src/helpers/node_ext.rs index 82178ed74962..5df3ed1366f1 100644 --- a/crates/ide_db/src/helpers/node_ext.rs +++ b/crates/ide_db/src/helpers/node_ext.rs @@ -216,3 +216,29 @@ pub fn vis_eq(this: &ast::Visibility, other: &ast::Visibility) -> bool { _ => false, } } + +/// Returns the `let` only if there is exactly one (that is, `let pat = expr` +/// or `((let pat = expr))`, but not `let pat = expr && expr` or `non_let_expr`). +pub fn single_let(expr: ast::Expr) -> Option { + match expr { + ast::Expr::ParenExpr(expr) => expr.expr().and_then(single_let), + ast::Expr::LetExpr(expr) => Some(expr), + _ => None, + } +} + +pub fn is_pattern_cond(expr: ast::Expr) -> bool { + match expr { + ast::Expr::BinExpr(expr) + if expr.op_kind() == Some(ast::BinaryOp::LogicOp(ast::LogicOp::And)) => + { + expr.lhs() + .map(is_pattern_cond) + .or_else(|| expr.rhs().map(is_pattern_cond)) + .unwrap_or(false) + } + ast::Expr::ParenExpr(expr) => expr.expr().map_or(false, is_pattern_cond), + ast::Expr::LetExpr(_) => true, + _ => false, + } +} diff --git a/crates/ide_ssr/src/resolving.rs b/crates/ide_ssr/src/resolving.rs index 844b19779a87..e15cb874efbe 100644 --- a/crates/ide_ssr/src/resolving.rs +++ b/crates/ide_ssr/src/resolving.rs @@ -222,11 +222,11 @@ impl<'db> ResolutionScope<'db> { let module = self.scope.module()?; adt.ty(self.scope.db).iterate_path_candidates( self.scope.db, - module.krate(), + &self.scope, &self.scope.visible_traits(), Some(module), None, - |_ty, assoc_item| { + |assoc_item| { let item_name = assoc_item.name(self.scope.db)?; if item_name.to_smol_str().as_str() == name.text() { Some(hir::PathResolution::AssocItem(assoc_item)) diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index b4c6d3bf6187..944d3ef87d81 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -599,7 +599,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match { src = it; res.unmatched_tts += src.len(); } - res.add_err(ExpandError::binding_error("leftover tokens")); + res.add_err(ExpandError::LeftoverTokens); if let Some(error_reover_item) = error_recover_item { res.bindings = bindings_builder.build(&error_reover_item); @@ -658,7 +658,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match { fn match_leaf(lhs: &tt::Leaf, src: &mut TtIter) -> Result<(), ExpandError> { let rhs = src .expect_leaf() - .map_err(|()| ExpandError::BindingError(format!("expected leaf: `{lhs}`").into()))?; + .map_err(|()| ExpandError::binding_error(format!("expected leaf: `{lhs}`")))?; match (lhs, rhs) { ( tt::Leaf::Punct(tt::Punct { char: lhs, .. }), diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index e24a75a9b038..b1b3f63fd3a5 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -17,7 +17,7 @@ impl Bindings { fn get(&self, name: &str, nesting: &mut [NestingState]) -> Result<&Fragment, ExpandError> { macro_rules! binding_err { - ($($arg:tt)*) => { ExpandError::BindingError(format!($($arg)*).into()) }; + ($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) }; } let mut b: &Binding = @@ -178,7 +178,7 @@ fn expand_repeat( ); return ExpandResult { value: Fragment::Tokens(Subtree::default().into()), - err: Some(ExpandError::Other("Expand exceed limit".into())), + err: Some(ExpandError::LimitExceeded), }; } diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 07b7f4d1a5e8..6402ceadaaae 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -67,18 +67,17 @@ impl fmt::Display for ParseError { #[derive(Debug, PartialEq, Eq, Clone)] pub enum ExpandError { + BindingError(Box>), + LeftoverTokens, + ConversionError, + LimitExceeded, NoMatchingRule, UnexpectedToken, - BindingError(Box), - ConversionError, - // FIXME: no way mbe should know about proc macros. - UnresolvedProcMacro, - Other(Box), } impl ExpandError { - fn binding_error(e: &str) -> ExpandError { - ExpandError::BindingError(e.into()) + fn binding_error(e: impl Into>) -> ExpandError { + ExpandError::BindingError(Box::new(e.into())) } } @@ -89,8 +88,8 @@ impl fmt::Display for ExpandError { ExpandError::UnexpectedToken => f.write_str("unexpected token in input"), ExpandError::BindingError(e) => f.write_str(e), ExpandError::ConversionError => f.write_str("could not convert tokens"), - ExpandError::UnresolvedProcMacro => f.write_str("unresolved proc macro"), - ExpandError::Other(e) => f.write_str(e), + ExpandError::LimitExceeded => f.write_str("Expand exceed limit"), + ExpandError::LeftoverTokens => f.write_str("leftover tokens"), } } } @@ -311,42 +310,41 @@ fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> { Ok(()) } +pub type ExpandResult = ValueResult; + #[derive(Debug, Clone, Eq, PartialEq)] -pub struct ExpandResult { +pub struct ValueResult { pub value: T, - pub err: Option, + pub err: Option, } -impl ExpandResult { +impl ValueResult { pub fn ok(value: T) -> Self { Self { value, err: None } } - pub fn only_err(err: ExpandError) -> Self + pub fn only_err(err: E) -> Self where T: Default, { Self { value: Default::default(), err: Some(err) } } - pub fn str_err(err: String) -> Self - where - T: Default, - { - Self::only_err(ExpandError::Other(err.into())) + pub fn map(self, f: impl FnOnce(T) -> U) -> ValueResult { + ValueResult { value: f(self.value), err: self.err } } - pub fn map(self, f: impl FnOnce(T) -> U) -> ExpandResult { - ExpandResult { value: f(self.value), err: self.err } + pub fn map_err(self, f: impl FnOnce(E) -> E2) -> ValueResult { + ValueResult { value: self.value, err: self.err.map(f) } } - pub fn result(self) -> Result { + pub fn result(self) -> Result { self.err.map_or(Ok(self.value), Err) } } -impl From> for ExpandResult { - fn from(result: Result) -> Self { +impl From> for ValueResult { + fn from(result: Result) -> Self { result.map_or_else(Self::only_err, Self::ok) } } diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index 3a006a5a1030..fc5590b71845 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -106,7 +106,7 @@ impl<'a> TtIter<'a> { } let err = if error || !cursor.is_root() { - Some(ExpandError::BindingError(format!("expected {entry_point:?}").into())) + Some(ExpandError::binding_error(format!("expected {entry_point:?}"))) } else { None }; diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs index 9dbba89c568e..a40db15049de 100644 --- a/crates/parser/src/grammar/expressions.rs +++ b/crates/parser/src/grammar/expressions.rs @@ -29,6 +29,15 @@ fn expr_no_struct(p: &mut Parser) { expr_bp(p, None, r, 1); } +/// Parses the expression in `let pattern = expression`. +/// It needs to be parsed with lower precedence than `&&`, so that +/// `if let true = true && false` is parsed as `if (let true = true) && (true)` +/// and not `if let true = (true && true)`. +fn expr_let(p: &mut Parser) { + let r = Restrictions { forbid_structs: true, prefer_stmt: false }; + expr_bp(p, None, r, 5); +} + pub(super) fn stmt(p: &mut Parser, semicolon: Semicolon) { if p.eat(T![;]) { return; @@ -185,6 +194,7 @@ fn current_op(p: &Parser) -> (u8, SyntaxKind) { T![%] if p.at(T![%=]) => (1, T![%=]), T![%] => (11, T![%]), T![&] if p.at(T![&=]) => (1, T![&=]), + // If you update this, remember to update `expr_let()` too. T![&] if p.at(T![&&]) => (4, T![&&]), T![&] => (8, T![&]), T![/] if p.at(T![/=]) => (1, T![/=]), diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs index 4b7a1b31fbdf..e2c1b1fec579 100644 --- a/crates/parser/src/grammar/expressions/atom.rs +++ b/crates/parser/src/grammar/expressions/atom.rs @@ -79,6 +79,7 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar closure_expr(p) } T![if] => if_expr(p), + T![let] => let_expr(p), T![loop] => loop_expr(p, None), T![box] => box_expr(p, None), @@ -286,7 +287,7 @@ fn if_expr(p: &mut Parser) -> CompletedMarker { assert!(p.at(T![if])); let m = p.start(); p.bump(T![if]); - condition(p); + expr_no_struct(p); block_expr(p); if p.at(T![else]) { p.bump(T![else]); @@ -335,7 +336,7 @@ fn while_expr(p: &mut Parser, m: Option) -> CompletedMarker { assert!(p.at(T![while])); let m = m.unwrap_or_else(|| p.start()); p.bump(T![while]); - condition(p); + expr_no_struct(p); block_expr(p); m.complete(p, WHILE_EXPR) } @@ -355,22 +356,18 @@ fn for_expr(p: &mut Parser, m: Option) -> CompletedMarker { m.complete(p, FOR_EXPR) } -// test cond -// fn foo() { if let Some(_) = None {} } -// fn bar() { -// if let Some(_) | Some(_) = None {} -// if let | Some(_) = None {} -// while let Some(_) | Some(_) = None {} -// while let | Some(_) = None {} +// test let_expr +// fn foo() { +// if let Some(_) = None && true {} +// while 1 == 5 && (let None = None) {} // } -fn condition(p: &mut Parser) { +fn let_expr(p: &mut Parser) -> CompletedMarker { let m = p.start(); - if p.eat(T![let]) { - patterns::pattern_top(p); - p.expect(T![=]); - } - expr_no_struct(p); - m.complete(p, CONDITION); + p.bump(T![let]); + patterns::pattern_top(p); + p.expect(T![=]); + expr_let(p); + m.complete(p, LET_EXPR) } // test match_expr @@ -482,10 +479,6 @@ fn match_guard(p: &mut Parser) -> CompletedMarker { assert!(p.at(T![if])); let m = p.start(); p.bump(T![if]); - if p.eat(T![let]) { - patterns::pattern_top(p); - p.expect(T![=]); - } expr(p); m.complete(p, MATCH_GUARD) } diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs index 601a5792afde..d04b5dbf0087 100644 --- a/crates/parser/src/syntax_kind/generated.rs +++ b/crates/parser/src/syntax_kind/generated.rs @@ -178,7 +178,6 @@ pub enum SyntaxKind { CLOSURE_EXPR, IF_EXPR, WHILE_EXPR, - CONDITION, LOOP_EXPR, FOR_EXPR, CONTINUE_EXPR, @@ -188,6 +187,7 @@ pub enum SyntaxKind { STMT_LIST, RETURN_EXPR, YIELD_EXPR, + LET_EXPR, MATCH_EXPR, MATCH_ARM_LIST, MATCH_ARM, diff --git a/crates/parser/src/tests/top_entries.rs b/crates/parser/src/tests/top_entries.rs index 24e41b46f8e4..eb640dc7fc74 100644 --- a/crates/parser/src/tests/top_entries.rs +++ b/crates/parser/src/tests/top_entries.rs @@ -289,17 +289,19 @@ fn expr() { TopEntryPoint::Expr, "let _ = 0;", expect![[r#" - ERROR - LET_KW "let" - WHITESPACE " " - UNDERSCORE "_" - WHITESPACE " " - EQ "=" - WHITESPACE " " - INT_NUMBER "0" - SEMICOLON ";" - error 0: expected expression - "#]], + ERROR + LET_EXPR + LET_KW "let" + WHITESPACE " " + WILDCARD_PAT + UNDERSCORE "_" + WHITESPACE " " + EQ "=" + WHITESPACE " " + LITERAL + INT_NUMBER "0" + SEMICOLON ";" + "#]], ); } diff --git a/crates/parser/test_data/parser/err/0008_item_block_recovery.txt b/crates/parser/test_data/parser/err/0008_item_block_recovery.txt index 6dd70e7cd9b6..60b2fe98755e 100644 --- a/crates/parser/test_data/parser/err/0008_item_block_recovery.txt +++ b/crates/parser/test_data/parser/err/0008_item_block_recovery.txt @@ -29,9 +29,8 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - LITERAL - TRUE_KW "true" + LITERAL + TRUE_KW "true" WHITESPACE " " BLOCK_EXPR STMT_LIST diff --git a/crates/parser/test_data/parser/err/0019_let_recover.rs b/crates/parser/test_data/parser/err/0019_let_recover.rs index 48bf3d68bd38..5108d5a49be3 100644 --- a/crates/parser/test_data/parser/err/0019_let_recover.rs +++ b/crates/parser/test_data/parser/err/0019_let_recover.rs @@ -1,5 +1,5 @@ fn foo() { - let foo = + let foo = 11 let bar = 1; let let baz = 92; diff --git a/crates/parser/test_data/parser/err/0019_let_recover.txt b/crates/parser/test_data/parser/err/0019_let_recover.txt index 25722b135582..7d62e0cc14f5 100644 --- a/crates/parser/test_data/parser/err/0019_let_recover.txt +++ b/crates/parser/test_data/parser/err/0019_let_recover.txt @@ -20,6 +20,9 @@ SOURCE_FILE IDENT "foo" WHITESPACE " " EQ "=" + WHITESPACE " " + LITERAL + INT_NUMBER "11" WHITESPACE "\n " LET_STMT LET_KW "let" @@ -57,9 +60,8 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - LITERAL - TRUE_KW "true" + LITERAL + TRUE_KW "true" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -73,9 +75,8 @@ SOURCE_FILE WHILE_EXPR WHILE_KW "while" WHITESPACE " " - CONDITION - LITERAL - TRUE_KW "true" + LITERAL + TRUE_KW "true" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -95,13 +96,12 @@ SOURCE_FILE WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" -error 24: expected expression -error 24: expected SEMICOLON -error 49: expected pattern -error 49: expected SEMICOLON -error 75: expected pattern -error 75: expected SEMICOLON -error 98: expected pattern -error 98: expected SEMICOLON -error 124: expected pattern -error 124: expected SEMICOLON +error 27: expected SEMICOLON +error 52: expected pattern +error 52: expected SEMICOLON +error 78: expected pattern +error 78: expected SEMICOLON +error 101: expected pattern +error 101: expected SEMICOLON +error 127: expected pattern +error 127: expected SEMICOLON diff --git a/crates/parser/test_data/parser/err/0024_many_type_parens.txt b/crates/parser/test_data/parser/err/0024_many_type_parens.txt index 446e1a823380..82e6a11249b2 100644 --- a/crates/parser/test_data/parser/err/0024_many_type_parens.txt +++ b/crates/parser/test_data/parser/err/0024_many_type_parens.txt @@ -180,116 +180,118 @@ SOURCE_FILE ERROR PLUS "+" WHITESPACE " " - EXPR_STMT - TUPLE_EXPR - L_PAREN "(" - FOR_EXPR - FOR_KW "for" - PATH_PAT - PATH - PATH_SEGMENT - L_ANGLE "<" - ERROR - LIFETIME_IDENT "'a" - R_ANGLE ">" - WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + FOR_EXPR + FOR_KW "for" + PATH_PAT + PATH + PATH_SEGMENT + L_ANGLE "<" + ERROR + LIFETIME_IDENT "'a" + R_ANGLE ">" + WHITESPACE " " + BIN_EXPR BIN_EXPR BIN_EXPR BIN_EXPR - BIN_EXPR - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "Trait" - L_ANGLE "<" - ERROR - LIFETIME_IDENT "'a" - R_ANGLE ">" - ERROR - R_PAREN ")" - WHITESPACE " " - PLUS "+" - WHITESPACE " " - PAREN_EXPR - L_PAREN "(" PATH_EXPR PATH PATH_SEGMENT NAME_REF - IDENT "Copy" + IDENT "Trait" + L_ANGLE "<" + ERROR + LIFETIME_IDENT "'a" + R_ANGLE ">" + ERROR R_PAREN ")" - R_ANGLE ">" - ERROR - SEMICOLON ";" - WHITESPACE "\n " - LET_STMT - LET_KW "let" - WHITESPACE " " - WILDCARD_PAT - UNDERSCORE "_" - COLON ":" + WHITESPACE " " + PLUS "+" + WHITESPACE " " + PAREN_EXPR + L_PAREN "(" + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "Copy" + R_PAREN ")" + R_ANGLE ">" + ERROR + SEMICOLON ";" + WHITESPACE "\n " + LET_EXPR + LET_KW "let" + WHITESPACE " " + WILDCARD_PAT + UNDERSCORE "_" + ERROR + COLON ":" WHITESPACE " " - DYN_TRAIT_TYPE - TYPE_BOUND_LIST - TYPE_BOUND - PATH_TYPE - PATH - PATH_SEGMENT - NAME_REF - IDENT "Box" - GENERIC_ARG_LIST - L_ANGLE "<" - TYPE_ARG - PAREN_TYPE - L_PAREN "(" - FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" - WHITESPACE " " - PATH_TYPE - PATH - PATH_SEGMENT - NAME_REF - IDENT "Trait" - GENERIC_ARG_LIST - L_ANGLE "<" - LIFETIME_ARG - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" - R_PAREN ")" - WHITESPACE " " - PLUS "+" - WHITESPACE " " - TYPE_BOUND - L_PAREN "(" - PATH_TYPE - PATH - PATH_SEGMENT - NAME_REF - IDENT "Copy" - R_PAREN ")" - WHITESPACE " " - PLUS "+" - WHITESPACE " " - TYPE_BOUND + BIN_EXPR + BIN_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "Box" + L_ANGLE "<" + TUPLE_EXPR L_PAREN "(" - QUESTION "?" - PATH_TYPE + FOR_EXPR + FOR_KW "for" + PATH_PAT + PATH + PATH_SEGMENT + L_ANGLE "<" + ERROR + LIFETIME_IDENT "'a" + R_ANGLE ">" + WHITESPACE " " + BIN_EXPR + BIN_EXPR + BIN_EXPR + BIN_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "Trait" + L_ANGLE "<" + ERROR + LIFETIME_IDENT "'a" + R_ANGLE ">" + ERROR + R_PAREN ")" + WHITESPACE " " + PLUS "+" + WHITESPACE " " + PAREN_EXPR + L_PAREN "(" + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "Copy" + R_PAREN ")" + WHITESPACE " " + PLUS "+" + WHITESPACE " " + PAREN_EXPR + L_PAREN "(" + ERROR + QUESTION "?" + PATH_EXPR PATH PATH_SEGMENT NAME_REF IDENT "Sized" R_PAREN ")" - ERROR - R_ANGLE ">" - SEMICOLON ";" + R_ANGLE ">" + ERROR + SEMICOLON ";" WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" @@ -312,10 +314,18 @@ error 168: expected expression error 179: expected expression error 180: expected a block error 180: expected COMMA -error 180: expected expression -error 180: expected R_PAREN -error 180: expected SEMICOLON -error 215: expected COMMA -error 215: expected R_ANGLE -error 235: expected SEMICOLON -error 235: expected expression +error 190: expected EQ +error 190: expected expression +error 191: expected COMMA +error 201: expected type +error 204: expected IN_KW +error 211: expected expression +error 214: expected expression +error 228: expected expression +error 229: expected R_PAREN +error 229: expected a block +error 229: expected COMMA +error 236: expected expression +error 237: expected COMMA +error 237: expected expression +error 237: expected R_PAREN diff --git a/crates/parser/test_data/parser/inline/ok/0030_cond.rast b/crates/parser/test_data/parser/inline/ok/0030_cond.rast deleted file mode 100644 index 3aa330f55d15..000000000000 --- a/crates/parser/test_data/parser/inline/ok/0030_cond.rast +++ /dev/null @@ -1,209 +0,0 @@ -SOURCE_FILE@0..197 - FN@0..37 - FN_KW@0..2 "fn" - WHITESPACE@2..3 " " - NAME@3..6 - IDENT@3..6 "foo" - PARAM_LIST@6..8 - L_PAREN@6..7 "(" - R_PAREN@7..8 ")" - WHITESPACE@8..9 " " - BLOCK_EXPR@9..37 - STMT_LIST@9..37 - L_CURLY@9..10 "{" - WHITESPACE@10..11 " " - IF_EXPR@11..35 - IF_KW@11..13 "if" - WHITESPACE@13..14 " " - CONDITION@14..32 - LET_KW@14..17 "let" - WHITESPACE@17..18 " " - TUPLE_STRUCT_PAT@18..25 - PATH@18..22 - PATH_SEGMENT@18..22 - NAME_REF@18..22 - IDENT@18..22 "Some" - L_PAREN@22..23 "(" - WILDCARD_PAT@23..24 - UNDERSCORE@23..24 "_" - R_PAREN@24..25 ")" - WHITESPACE@25..26 " " - EQ@26..27 "=" - WHITESPACE@27..28 " " - PATH_EXPR@28..32 - PATH@28..32 - PATH_SEGMENT@28..32 - NAME_REF@28..32 - IDENT@28..32 "None" - WHITESPACE@32..33 " " - BLOCK_EXPR@33..35 - STMT_LIST@33..35 - L_CURLY@33..34 "{" - R_CURLY@34..35 "}" - WHITESPACE@35..36 " " - R_CURLY@36..37 "}" - WHITESPACE@37..38 "\n" - FN@38..196 - FN_KW@38..40 "fn" - WHITESPACE@40..41 " " - NAME@41..44 - IDENT@41..44 "bar" - PARAM_LIST@44..46 - L_PAREN@44..45 "(" - R_PAREN@45..46 ")" - WHITESPACE@46..47 " " - BLOCK_EXPR@47..196 - STMT_LIST@47..196 - L_CURLY@47..48 "{" - WHITESPACE@48..53 "\n " - EXPR_STMT@53..87 - IF_EXPR@53..87 - IF_KW@53..55 "if" - WHITESPACE@55..56 " " - CONDITION@56..84 - LET_KW@56..59 "let" - WHITESPACE@59..60 " " - OR_PAT@60..77 - TUPLE_STRUCT_PAT@60..67 - PATH@60..64 - PATH_SEGMENT@60..64 - NAME_REF@60..64 - IDENT@60..64 "Some" - L_PAREN@64..65 "(" - WILDCARD_PAT@65..66 - UNDERSCORE@65..66 "_" - R_PAREN@66..67 ")" - WHITESPACE@67..68 " " - PIPE@68..69 "|" - WHITESPACE@69..70 " " - TUPLE_STRUCT_PAT@70..77 - PATH@70..74 - PATH_SEGMENT@70..74 - NAME_REF@70..74 - IDENT@70..74 "Some" - L_PAREN@74..75 "(" - WILDCARD_PAT@75..76 - UNDERSCORE@75..76 "_" - R_PAREN@76..77 ")" - WHITESPACE@77..78 " " - EQ@78..79 "=" - WHITESPACE@79..80 " " - PATH_EXPR@80..84 - PATH@80..84 - PATH_SEGMENT@80..84 - NAME_REF@80..84 - IDENT@80..84 "None" - WHITESPACE@84..85 " " - BLOCK_EXPR@85..87 - STMT_LIST@85..87 - L_CURLY@85..86 "{" - R_CURLY@86..87 "}" - WHITESPACE@87..92 "\n " - EXPR_STMT@92..118 - IF_EXPR@92..118 - IF_KW@92..94 "if" - WHITESPACE@94..95 " " - CONDITION@95..115 - LET_KW@95..98 "let" - WHITESPACE@98..99 " " - PIPE@99..100 "|" - WHITESPACE@100..101 " " - TUPLE_STRUCT_PAT@101..108 - PATH@101..105 - PATH_SEGMENT@101..105 - NAME_REF@101..105 - IDENT@101..105 "Some" - L_PAREN@105..106 "(" - WILDCARD_PAT@106..107 - UNDERSCORE@106..107 "_" - R_PAREN@107..108 ")" - WHITESPACE@108..109 " " - EQ@109..110 "=" - WHITESPACE@110..111 " " - PATH_EXPR@111..115 - PATH@111..115 - PATH_SEGMENT@111..115 - NAME_REF@111..115 - IDENT@111..115 "None" - WHITESPACE@115..116 " " - BLOCK_EXPR@116..118 - STMT_LIST@116..118 - L_CURLY@116..117 "{" - R_CURLY@117..118 "}" - WHITESPACE@118..123 "\n " - EXPR_STMT@123..160 - WHILE_EXPR@123..160 - WHILE_KW@123..128 "while" - WHITESPACE@128..129 " " - CONDITION@129..157 - LET_KW@129..132 "let" - WHITESPACE@132..133 " " - OR_PAT@133..150 - TUPLE_STRUCT_PAT@133..140 - PATH@133..137 - PATH_SEGMENT@133..137 - NAME_REF@133..137 - IDENT@133..137 "Some" - L_PAREN@137..138 "(" - WILDCARD_PAT@138..139 - UNDERSCORE@138..139 "_" - R_PAREN@139..140 ")" - WHITESPACE@140..141 " " - PIPE@141..142 "|" - WHITESPACE@142..143 " " - TUPLE_STRUCT_PAT@143..150 - PATH@143..147 - PATH_SEGMENT@143..147 - NAME_REF@143..147 - IDENT@143..147 "Some" - L_PAREN@147..148 "(" - WILDCARD_PAT@148..149 - UNDERSCORE@148..149 "_" - R_PAREN@149..150 ")" - WHITESPACE@150..151 " " - EQ@151..152 "=" - WHITESPACE@152..153 " " - PATH_EXPR@153..157 - PATH@153..157 - PATH_SEGMENT@153..157 - NAME_REF@153..157 - IDENT@153..157 "None" - WHITESPACE@157..158 " " - BLOCK_EXPR@158..160 - STMT_LIST@158..160 - L_CURLY@158..159 "{" - R_CURLY@159..160 "}" - WHITESPACE@160..165 "\n " - WHILE_EXPR@165..194 - WHILE_KW@165..170 "while" - WHITESPACE@170..171 " " - CONDITION@171..191 - LET_KW@171..174 "let" - WHITESPACE@174..175 " " - PIPE@175..176 "|" - WHITESPACE@176..177 " " - TUPLE_STRUCT_PAT@177..184 - PATH@177..181 - PATH_SEGMENT@177..181 - NAME_REF@177..181 - IDENT@177..181 "Some" - L_PAREN@181..182 "(" - WILDCARD_PAT@182..183 - UNDERSCORE@182..183 "_" - R_PAREN@183..184 ")" - WHITESPACE@184..185 " " - EQ@185..186 "=" - WHITESPACE@186..187 " " - PATH_EXPR@187..191 - PATH@187..191 - PATH_SEGMENT@187..191 - NAME_REF@187..191 - IDENT@187..191 "None" - WHITESPACE@191..192 " " - BLOCK_EXPR@192..194 - STMT_LIST@192..194 - L_CURLY@192..193 "{" - R_CURLY@193..194 "}" - WHITESPACE@194..195 "\n" - R_CURLY@195..196 "}" - WHITESPACE@196..197 "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0030_cond.rs b/crates/parser/test_data/parser/inline/ok/0030_cond.rs deleted file mode 100644 index 2552a2621f9e..000000000000 --- a/crates/parser/test_data/parser/inline/ok/0030_cond.rs +++ /dev/null @@ -1,7 +0,0 @@ -fn foo() { if let Some(_) = None {} } -fn bar() { - if let Some(_) | Some(_) = None {} - if let | Some(_) = None {} - while let Some(_) | Some(_) = None {} - while let | Some(_) = None {} -} diff --git a/crates/parser/test_data/parser/inline/ok/0030_cond.txt b/crates/parser/test_data/parser/inline/ok/0030_cond.txt deleted file mode 100644 index 5b8993701398..000000000000 --- a/crates/parser/test_data/parser/inline/ok/0030_cond.txt +++ /dev/null @@ -1,209 +0,0 @@ -SOURCE_FILE - FN - FN_KW "fn" - WHITESPACE " " - NAME - IDENT "foo" - PARAM_LIST - L_PAREN "(" - R_PAREN ")" - WHITESPACE " " - BLOCK_EXPR - STMT_LIST - L_CURLY "{" - WHITESPACE " " - IF_EXPR - IF_KW "if" - WHITESPACE " " - CONDITION - LET_KW "let" - WHITESPACE " " - TUPLE_STRUCT_PAT - PATH - PATH_SEGMENT - NAME_REF - IDENT "Some" - L_PAREN "(" - WILDCARD_PAT - UNDERSCORE "_" - R_PAREN ")" - WHITESPACE " " - EQ "=" - WHITESPACE " " - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "None" - WHITESPACE " " - BLOCK_EXPR - STMT_LIST - L_CURLY "{" - R_CURLY "}" - WHITESPACE " " - R_CURLY "}" - WHITESPACE "\n" - FN - FN_KW "fn" - WHITESPACE " " - NAME - IDENT "bar" - PARAM_LIST - L_PAREN "(" - R_PAREN ")" - WHITESPACE " " - BLOCK_EXPR - STMT_LIST - L_CURLY "{" - WHITESPACE "\n " - EXPR_STMT - IF_EXPR - IF_KW "if" - WHITESPACE " " - CONDITION - LET_KW "let" - WHITESPACE " " - OR_PAT - TUPLE_STRUCT_PAT - PATH - PATH_SEGMENT - NAME_REF - IDENT "Some" - L_PAREN "(" - WILDCARD_PAT - UNDERSCORE "_" - R_PAREN ")" - WHITESPACE " " - PIPE "|" - WHITESPACE " " - TUPLE_STRUCT_PAT - PATH - PATH_SEGMENT - NAME_REF - IDENT "Some" - L_PAREN "(" - WILDCARD_PAT - UNDERSCORE "_" - R_PAREN ")" - WHITESPACE " " - EQ "=" - WHITESPACE " " - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "None" - WHITESPACE " " - BLOCK_EXPR - STMT_LIST - L_CURLY "{" - R_CURLY "}" - WHITESPACE "\n " - EXPR_STMT - IF_EXPR - IF_KW "if" - WHITESPACE " " - CONDITION - LET_KW "let" - WHITESPACE " " - PIPE "|" - WHITESPACE " " - TUPLE_STRUCT_PAT - PATH - PATH_SEGMENT - NAME_REF - IDENT "Some" - L_PAREN "(" - WILDCARD_PAT - UNDERSCORE "_" - R_PAREN ")" - WHITESPACE " " - EQ "=" - WHITESPACE " " - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "None" - WHITESPACE " " - BLOCK_EXPR - STMT_LIST - L_CURLY "{" - R_CURLY "}" - WHITESPACE "\n " - EXPR_STMT - WHILE_EXPR - WHILE_KW "while" - WHITESPACE " " - CONDITION - LET_KW "let" - WHITESPACE " " - OR_PAT - TUPLE_STRUCT_PAT - PATH - PATH_SEGMENT - NAME_REF - IDENT "Some" - L_PAREN "(" - WILDCARD_PAT - UNDERSCORE "_" - R_PAREN ")" - WHITESPACE " " - PIPE "|" - WHITESPACE " " - TUPLE_STRUCT_PAT - PATH - PATH_SEGMENT - NAME_REF - IDENT "Some" - L_PAREN "(" - WILDCARD_PAT - UNDERSCORE "_" - R_PAREN ")" - WHITESPACE " " - EQ "=" - WHITESPACE " " - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "None" - WHITESPACE " " - BLOCK_EXPR - STMT_LIST - L_CURLY "{" - R_CURLY "}" - WHITESPACE "\n " - WHILE_EXPR - WHILE_KW "while" - WHITESPACE " " - CONDITION - LET_KW "let" - WHITESPACE " " - PIPE "|" - WHITESPACE " " - TUPLE_STRUCT_PAT - PATH - PATH_SEGMENT - NAME_REF - IDENT "Some" - L_PAREN "(" - WILDCARD_PAT - UNDERSCORE "_" - R_PAREN ")" - WHITESPACE " " - EQ "=" - WHITESPACE " " - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "None" - WHITESPACE " " - BLOCK_EXPR - STMT_LIST - L_CURLY "{" - R_CURLY "}" - WHITESPACE "\n" - R_CURLY "}" - WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0030_let_expr.rast b/crates/parser/test_data/parser/inline/ok/0030_let_expr.rast new file mode 100644 index 000000000000..dcffcb1ce2f6 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0030_let_expr.rast @@ -0,0 +1,90 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + EXPR_STMT + IF_EXPR + IF_KW "if" + WHITESPACE " " + BIN_EXPR + LET_EXPR + LET_KW "let" + WHITESPACE " " + TUPLE_STRUCT_PAT + PATH + PATH_SEGMENT + NAME_REF + IDENT "Some" + L_PAREN "(" + WILDCARD_PAT + UNDERSCORE "_" + R_PAREN ")" + WHITESPACE " " + EQ "=" + WHITESPACE " " + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "None" + WHITESPACE " " + AMP2 "&&" + WHITESPACE " " + LITERAL + TRUE_KW "true" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n " + WHILE_EXPR + WHILE_KW "while" + WHITESPACE " " + BIN_EXPR + BIN_EXPR + LITERAL + INT_NUMBER "1" + WHITESPACE " " + EQ2 "==" + WHITESPACE " " + LITERAL + INT_NUMBER "5" + WHITESPACE " " + AMP2 "&&" + WHITESPACE " " + PAREN_EXPR + L_PAREN "(" + LET_EXPR + LET_KW "let" + WHITESPACE " " + IDENT_PAT + NAME + IDENT "None" + WHITESPACE " " + EQ "=" + WHITESPACE " " + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "None" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0030_let_expr.rs b/crates/parser/test_data/parser/inline/ok/0030_let_expr.rs new file mode 100644 index 000000000000..0131d5e33829 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0030_let_expr.rs @@ -0,0 +1,4 @@ +fn foo() { + if let Some(_) = None && true {} + while 1 == 5 && (let None = None) {} +} diff --git a/crates/parser/test_data/parser/inline/ok/0030_let_expr.txt b/crates/parser/test_data/parser/inline/ok/0030_let_expr.txt new file mode 100644 index 000000000000..dcffcb1ce2f6 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0030_let_expr.txt @@ -0,0 +1,90 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + EXPR_STMT + IF_EXPR + IF_KW "if" + WHITESPACE " " + BIN_EXPR + LET_EXPR + LET_KW "let" + WHITESPACE " " + TUPLE_STRUCT_PAT + PATH + PATH_SEGMENT + NAME_REF + IDENT "Some" + L_PAREN "(" + WILDCARD_PAT + UNDERSCORE "_" + R_PAREN ")" + WHITESPACE " " + EQ "=" + WHITESPACE " " + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "None" + WHITESPACE " " + AMP2 "&&" + WHITESPACE " " + LITERAL + TRUE_KW "true" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n " + WHILE_EXPR + WHILE_KW "while" + WHITESPACE " " + BIN_EXPR + BIN_EXPR + LITERAL + INT_NUMBER "1" + WHITESPACE " " + EQ2 "==" + WHITESPACE " " + LITERAL + INT_NUMBER "5" + WHITESPACE " " + AMP2 "&&" + WHITESPACE " " + PAREN_EXPR + L_PAREN "(" + LET_EXPR + LET_KW "let" + WHITESPACE " " + IDENT_PAT + NAME + IDENT "None" + WHITESPACE " " + EQ "=" + WHITESPACE " " + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "None" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0031_while_expr.txt b/crates/parser/test_data/parser/inline/ok/0031_while_expr.txt index fc1ca4934c5c..16c522414af4 100644 --- a/crates/parser/test_data/parser/inline/ok/0031_while_expr.txt +++ b/crates/parser/test_data/parser/inline/ok/0031_while_expr.txt @@ -16,9 +16,8 @@ SOURCE_FILE WHILE_EXPR WHILE_KW "while" WHITESPACE " " - CONDITION - LITERAL - TRUE_KW "true" + LITERAL + TRUE_KW "true" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -30,7 +29,7 @@ SOURCE_FILE WHILE_EXPR WHILE_KW "while" WHITESPACE " " - CONDITION + LET_EXPR LET_KW "let" WHITESPACE " " TUPLE_STRUCT_PAT @@ -69,15 +68,14 @@ SOURCE_FILE WHILE_EXPR WHILE_KW "while" WHITESPACE " " - CONDITION - BLOCK_EXPR - STMT_LIST - L_CURLY "{" - WHITESPACE " " - LITERAL - TRUE_KW "true" - WHITESPACE " " - R_CURLY "}" + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE " " + LITERAL + TRUE_KW "true" + WHITESPACE " " + R_CURLY "}" WHITESPACE " " BLOCK_EXPR STMT_LIST diff --git a/crates/parser/test_data/parser/inline/ok/0064_if_expr.txt b/crates/parser/test_data/parser/inline/ok/0064_if_expr.txt index c1f8381271fe..e2e964e44d12 100644 --- a/crates/parser/test_data/parser/inline/ok/0064_if_expr.txt +++ b/crates/parser/test_data/parser/inline/ok/0064_if_expr.txt @@ -16,9 +16,8 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - LITERAL - TRUE_KW "true" + LITERAL + TRUE_KW "true" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -30,9 +29,8 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - LITERAL - TRUE_KW "true" + LITERAL + TRUE_KW "true" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -51,9 +49,8 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - LITERAL - TRUE_KW "true" + LITERAL + TRUE_KW "true" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -65,9 +62,8 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - LITERAL - FALSE_KW "false" + LITERAL + FALSE_KW "false" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -86,12 +82,11 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "S" + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -103,15 +98,14 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - BLOCK_EXPR - STMT_LIST - L_CURLY "{" - WHITESPACE " " - LITERAL - TRUE_KW "true" - WHITESPACE " " - R_CURLY "}" + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE " " + LITERAL + TRUE_KW "true" + WHITESPACE " " + R_CURLY "}" WHITESPACE " " BLOCK_EXPR STMT_LIST diff --git a/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.txt b/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.txt index 50ce9933b999..cbf5e84e8cdc 100644 --- a/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.txt +++ b/crates/parser/test_data/parser/inline/ok/0088_break_ambiguity.txt @@ -15,9 +15,8 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - BREAK_EXPR - BREAK_KW "break" + BREAK_EXPR + BREAK_KW "break" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -28,9 +27,8 @@ SOURCE_FILE WHILE_EXPR WHILE_KW "while" WHITESPACE " " - CONDITION - BREAK_EXPR - BREAK_KW "break" + BREAK_EXPR + BREAK_KW "break" WHITESPACE " " BLOCK_EXPR STMT_LIST diff --git a/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.txt b/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.txt index 2a853713934b..e4e215593ee9 100644 --- a/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.txt +++ b/crates/parser/test_data/parser/inline/ok/0096_no_semi_after_block.txt @@ -16,9 +16,8 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - LITERAL - TRUE_KW "true" + LITERAL + TRUE_KW "true" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -50,9 +49,8 @@ SOURCE_FILE WHILE_EXPR WHILE_KW "while" WHITESPACE " " - CONDITION - LITERAL - TRUE_KW "true" + LITERAL + TRUE_KW "true" WHITESPACE " " BLOCK_EXPR STMT_LIST diff --git a/crates/parser/test_data/parser/inline/ok/0109_label.txt b/crates/parser/test_data/parser/inline/ok/0109_label.txt index bd57fa9d4a12..48d0bde845a5 100644 --- a/crates/parser/test_data/parser/inline/ok/0109_label.txt +++ b/crates/parser/test_data/parser/inline/ok/0109_label.txt @@ -35,9 +35,8 @@ SOURCE_FILE WHITESPACE " " WHILE_KW "while" WHITESPACE " " - CONDITION - LITERAL - TRUE_KW "true" + LITERAL + TRUE_KW "true" WHITESPACE " " BLOCK_EXPR STMT_LIST diff --git a/crates/parser/test_data/parser/inline/ok/0118_match_guard.txt b/crates/parser/test_data/parser/inline/ok/0118_match_guard.txt index a28b6ea5d1fd..96318b52195e 100644 --- a/crates/parser/test_data/parser/inline/ok/0118_match_guard.txt +++ b/crates/parser/test_data/parser/inline/ok/0118_match_guard.txt @@ -49,19 +49,20 @@ SOURCE_FILE MATCH_GUARD IF_KW "if" WHITESPACE " " - LET_KW "let" - WHITESPACE " " - IDENT_PAT - NAME - IDENT "foo" - WHITESPACE " " - EQ "=" - WHITESPACE " " - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "bar" + LET_EXPR + LET_KW "let" + WHITESPACE " " + IDENT_PAT + NAME + IDENT "foo" + WHITESPACE " " + EQ "=" + WHITESPACE " " + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "bar" WHITESPACE " " FAT_ARROW "=>" WHITESPACE " " diff --git a/crates/parser/test_data/parser/ok/0033_label_break.txt b/crates/parser/test_data/parser/ok/0033_label_break.txt index 9807bf0d9a6d..df1acd6b83b9 100644 --- a/crates/parser/test_data/parser/ok/0033_label_break.txt +++ b/crates/parser/test_data/parser/ok/0033_label_break.txt @@ -51,16 +51,15 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - CALL_EXPR - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "condition_not_met" - ARG_LIST - L_PAREN "(" - R_PAREN ")" + CALL_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "condition_not_met" + ARG_LIST + L_PAREN "(" + R_PAREN ")" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -92,16 +91,15 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - CALL_EXPR - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "condition_not_met" - ARG_LIST - L_PAREN "(" - R_PAREN ")" + CALL_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "condition_not_met" + ARG_LIST + L_PAREN "(" + R_PAREN ")" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -153,16 +151,15 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - CALL_EXPR - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "foo" - ARG_LIST - L_PAREN "(" - R_PAREN ")" + CALL_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "foo" + ARG_LIST + L_PAREN "(" + R_PAREN ")" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -187,16 +184,15 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - CALL_EXPR - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "bar" - ARG_LIST - L_PAREN "(" - R_PAREN ")" + CALL_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "bar" + ARG_LIST + L_PAREN "(" + R_PAREN ")" WHITESPACE " " BLOCK_EXPR STMT_LIST diff --git a/crates/parser/test_data/parser/ok/0035_weird_exprs.txt b/crates/parser/test_data/parser/ok/0035_weird_exprs.txt index 5f62748c479c..4ec703e517c2 100644 --- a/crates/parser/test_data/parser/ok/0035_weird_exprs.txt +++ b/crates/parser/test_data/parser/ok/0035_weird_exprs.txt @@ -280,21 +280,20 @@ SOURCE_FILE WHILE_EXPR WHILE_KW "while" WHITESPACE " " - CONDITION - PREFIX_EXPR - BANG "!" - METHOD_CALL_EXPR - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "x" - DOT "." - NAME_REF - IDENT "get" - ARG_LIST - L_PAREN "(" - R_PAREN ")" + PREFIX_EXPR + BANG "!" + METHOD_CALL_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "x" + DOT "." + NAME_REF + IDENT "get" + ARG_LIST + L_PAREN "(" + R_PAREN ")" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -443,12 +442,11 @@ SOURCE_FILE WHILE_EXPR WHILE_KW "while" WHITESPACE " " - CONDITION - PAREN_EXPR - L_PAREN "(" - RETURN_EXPR - RETURN_KW "return" - R_PAREN ")" + PAREN_EXPR + L_PAREN "(" + RETURN_EXPR + RETURN_KW "return" + R_PAREN ")" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -457,12 +455,11 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - PAREN_EXPR - L_PAREN "(" - RETURN_EXPR - RETURN_KW "return" - R_PAREN ")" + PAREN_EXPR + L_PAREN "(" + RETURN_EXPR + RETURN_KW "return" + R_PAREN ")" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -495,12 +492,11 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - PAREN_EXPR - L_PAREN "(" - RETURN_EXPR - RETURN_KW "return" - R_PAREN ")" + PAREN_EXPR + L_PAREN "(" + RETURN_EXPR + RETURN_KW "return" + R_PAREN ")" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -549,12 +545,11 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - PAREN_EXPR - L_PAREN "(" - RETURN_EXPR - RETURN_KW "return" - R_PAREN ")" + PAREN_EXPR + L_PAREN "(" + RETURN_EXPR + RETURN_KW "return" + R_PAREN ")" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -572,12 +567,11 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - PAREN_EXPR - L_PAREN "(" - RETURN_EXPR - RETURN_KW "return" - R_PAREN ")" + PAREN_EXPR + L_PAREN "(" + RETURN_EXPR + RETURN_KW "return" + R_PAREN ")" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -1037,9 +1031,8 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - BREAK_EXPR - BREAK_KW "break" + BREAK_EXPR + BREAK_KW "break" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -1089,18 +1082,17 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - BIN_EXPR - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "i" - WHITESPACE " " - EQ2 "==" - WHITESPACE " " - LITERAL - INT_NUMBER "1" + BIN_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "i" + WHITESPACE " " + EQ2 "==" + WHITESPACE " " + LITERAL + INT_NUMBER "1" WHITESPACE " " BLOCK_EXPR STMT_LIST @@ -1344,18 +1336,17 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - BIN_EXPR - PATH_EXPR - PATH - PATH_SEGMENT - NAME_REF - IDENT "u8" - WHITESPACE " " - NEQ "!=" - WHITESPACE " " - LITERAL - INT_NUMBER "0u8" + BIN_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "u8" + WHITESPACE " " + NEQ "!=" + WHITESPACE " " + LITERAL + INT_NUMBER "0u8" WHITESPACE " " BLOCK_EXPR STMT_LIST diff --git a/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.txt b/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.txt index ac23e7d1d96a..aecc71d4829b 100644 --- a/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.txt +++ b/crates/parser/test_data/parser/ok/0047_minus_in_inner_pattern.txt @@ -219,7 +219,7 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION + LET_EXPR LET_KW "let" WHITESPACE " " TUPLE_STRUCT_PAT diff --git a/crates/parser/test_data/parser/ok/0056_neq_in_type.txt b/crates/parser/test_data/parser/ok/0056_neq_in_type.txt index 2d78eaffc524..55ce31275fba 100644 --- a/crates/parser/test_data/parser/ok/0056_neq_in_type.txt +++ b/crates/parser/test_data/parser/ok/0056_neq_in_type.txt @@ -15,47 +15,46 @@ SOURCE_FILE IF_EXPR IF_KW "if" WHITESPACE " " - CONDITION - BIN_EXPR - CAST_EXPR - METHOD_CALL_EXPR - LITERAL - FLOAT_NUMBER "1.0f32" - DOT "." - NAME_REF - IDENT "floor" - ARG_LIST - L_PAREN "(" - R_PAREN ")" - WHITESPACE " " - AS_KW "as" - WHITESPACE " " - PATH_TYPE - PATH - PATH_SEGMENT - NAME_REF - IDENT "i64" + BIN_EXPR + CAST_EXPR + METHOD_CALL_EXPR + LITERAL + FLOAT_NUMBER "1.0f32" + DOT "." + NAME_REF + IDENT "floor" + ARG_LIST + L_PAREN "(" + R_PAREN ")" WHITESPACE " " - NEQ "!=" + AS_KW "as" WHITESPACE " " - CAST_EXPR - METHOD_CALL_EXPR - LITERAL - FLOAT_NUMBER "1.0f32" - DOT "." - NAME_REF - IDENT "floor" - ARG_LIST - L_PAREN "(" - R_PAREN ")" - WHITESPACE " " - AS_KW "as" - WHITESPACE " " - PATH_TYPE - PATH - PATH_SEGMENT - NAME_REF - IDENT "i64" + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "i64" + WHITESPACE " " + NEQ "!=" + WHITESPACE " " + CAST_EXPR + METHOD_CALL_EXPR + LITERAL + FLOAT_NUMBER "1.0f32" + DOT "." + NAME_REF + IDENT "floor" + ARG_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + AS_KW "as" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "i64" WHITESPACE " " BLOCK_EXPR STMT_LIST diff --git a/crates/parser/test_data/parser/ok/0059_loops_in_parens.txt b/crates/parser/test_data/parser/ok/0059_loops_in_parens.txt index 1eeb6c957f77..79bc7f971d18 100644 --- a/crates/parser/test_data/parser/ok/0059_loops_in_parens.txt +++ b/crates/parser/test_data/parser/ok/0059_loops_in_parens.txt @@ -86,9 +86,8 @@ SOURCE_FILE WHILE_EXPR WHILE_KW "while" WHITESPACE " " - CONDITION - LITERAL - TRUE_KW "true" + LITERAL + TRUE_KW "true" WHITESPACE " " BLOCK_EXPR STMT_LIST diff --git a/crates/project_model/src/cargo_workspace.rs b/crates/project_model/src/cargo_workspace.rs index c2cf3c4ce3ae..48051e4b5e8f 100644 --- a/crates/project_model/src/cargo_workspace.rs +++ b/crates/project_model/src/cargo_workspace.rs @@ -210,6 +210,8 @@ pub struct TargetData { pub kind: TargetKind, /// Is this target a proc-macro pub is_proc_macro: bool, + /// Required features of the target without which it won't build + pub required_features: Vec, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -348,6 +350,7 @@ impl CargoWorkspace { root: AbsPathBuf::assert(PathBuf::from(&meta_tgt.src_path)), kind: TargetKind::new(meta_tgt.kind.as_slice()), is_proc_macro, + required_features: meta_tgt.required_features.clone(), }); pkg_data.targets.push(tgt); } diff --git a/crates/rust-analyzer/src/caps.rs b/crates/rust-analyzer/src/caps.rs index a1acd543c5f5..457399a61898 100644 --- a/crates/rust-analyzer/src/caps.rs +++ b/crates/rust-analyzer/src/caps.rs @@ -117,7 +117,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities { "hoverRange": true, "joinLines": true, "matchingBrace": true, - "moveItems": true, + "moveItem": true, "onEnter": true, "openCargoToml": true, "parentModule": true, diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs index a8894e0f022c..ec5dd16d001c 100644 --- a/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/crates/rust-analyzer/src/cargo_target_spec.rs @@ -1,5 +1,7 @@ //! See `CargoTargetSpec` +use std::mem; + use cfg::{CfgAtom, CfgExpr}; use ide::{FileId, RunnableKind, TestId}; use project_model::{self, ManifestPath, TargetKind}; @@ -18,17 +20,22 @@ pub(crate) struct CargoTargetSpec { pub(crate) package: String, pub(crate) target: String, pub(crate) target_kind: TargetKind, + pub(crate) required_features: Vec, } impl CargoTargetSpec { pub(crate) fn runnable_args( snap: &GlobalStateSnapshot, - spec: Option, + mut spec: Option, kind: &RunnableKind, cfg: &Option, ) -> Result<(Vec, Vec)> { let mut args = Vec::new(); let mut extra_args = Vec::new(); + + let target_required_features = + spec.as_mut().map(|spec| mem::take(&mut spec.required_features)).unwrap_or(Vec::new()); + match kind { RunnableKind::Test { test_id, attr } => { args.push("test".to_string()); @@ -87,14 +94,20 @@ impl CargoTargetSpec { let cargo_config = snap.config.cargo(); if cargo_config.all_features { args.push("--all-features".to_string()); + + for feature in target_required_features { + args.push("--features".to_string()); + args.push(feature); + } } else { let mut features = Vec::new(); if let Some(cfg) = cfg.as_ref() { required_features(cfg, &mut features); } - for feature in cargo_config.features { - features.push(feature.clone()); - } + + features.extend(cargo_config.features); + features.extend(target_required_features); + features.dedup(); for feature in features { args.push("--features".to_string()); @@ -126,6 +139,7 @@ impl CargoTargetSpec { package: cargo_ws.package_flag(package_data), target: target_data.name.clone(), target_kind: target_data.kind, + required_features: target_data.required_features.clone(), }; Ok(Some(res)) diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 55a542c3c16d..f52e1e751278 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -10,7 +10,11 @@ use hir::{ db::{AstDatabase, DefDatabase, HirDatabase}, AssocItem, Crate, Function, HasSource, HirDisplay, ModuleDef, }; -use hir_def::{body::BodySourceMap, expr::ExprId, FunctionId}; +use hir_def::{ + body::{BodySourceMap, SyntheticSyntax}, + expr::ExprId, + FunctionId, +}; use hir_ty::{TyExt, TypeWalk}; use ide::{Analysis, AnalysisHost, LineCol, RootDatabase}; use ide_db::base_db::{ @@ -28,7 +32,7 @@ use syntax::{AstNode, SyntaxNode}; use vfs::{AbsPathBuf, Vfs, VfsPath}; use crate::cli::{ - flags, + flags::{self, OutputFormat}, load_cargo::{load_workspace, LoadCargoConfig}, print_memory_usage, progress_report::ProgressReport, @@ -191,7 +195,7 @@ impl flags::AnalysisStats { ) { let mut bar = match verbosity { Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(), - _ if self.parallel => ProgressReport::hidden(), + _ if self.parallel || self.output.is_some() => ProgressReport::hidden(), _ => ProgressReport::new(funcs.len() as u64), }; @@ -252,7 +256,7 @@ impl flags::AnalysisStats { for (expr_id, _) in body.exprs.iter() { let ty = &inference_result[expr_id]; num_exprs += 1; - if ty.is_unknown() { + let unknown_or_partial = if ty.is_unknown() { num_exprs_unknown += 1; if verbosity.is_spammy() { if let Some((path, start, end)) = @@ -270,6 +274,7 @@ impl flags::AnalysisStats { bar.println(format!("{}: Unknown type", name,)); } } + true } else { let mut is_partially_unknown = false; ty.walk(&mut |ty| { @@ -280,7 +285,8 @@ impl flags::AnalysisStats { if is_partially_unknown { num_exprs_partially_unknown += 1; } - } + is_partially_unknown + }; if self.only.is_some() && verbosity.is_spammy() { // in super-verbose mode for just one function, we print every single expression if let Some((_, start, end)) = @@ -298,6 +304,13 @@ impl flags::AnalysisStats { bar.println(format!("unknown location: {}", ty.display(db))); } } + if unknown_or_partial && self.output == Some(OutputFormat::Csv) { + println!( + r#"{},type,"{}""#, + location_csv(db, &analysis, vfs, &sm, expr_id), + ty.display(db) + ); + } if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) { num_type_mismatches += 1; if verbosity.is_verbose() { @@ -323,6 +336,14 @@ impl flags::AnalysisStats { )); } } + if self.output == Some(OutputFormat::Csv) { + println!( + r#"{},mismatch,"{}","{}""#, + location_csv(db, &analysis, vfs, &sm, expr_id), + mismatch.expected.display(db), + mismatch.actual.display(db) + ); + } } } if verbosity.is_spammy() { @@ -358,6 +379,28 @@ impl flags::AnalysisStats { } } +fn location_csv( + db: &RootDatabase, + analysis: &Analysis, + vfs: &Vfs, + sm: &BodySourceMap, + expr_id: ExprId, +) -> String { + let src = match sm.expr_syntax(expr_id) { + Ok(s) => s, + Err(SyntheticSyntax) => return "synthetic,,".to_string(), + }; + let root = db.parse_or_expand(src.file_id).unwrap(); + let node = src.map(|e| e.to_node(&root).syntax().clone()); + let original_range = node.as_ref().original_file_range(db); + let path = vfs.file_path(original_range.file_id); + let line_index = analysis.file_line_index(original_range.file_id).unwrap(); + let text_range = original_range.range; + let (start, end) = + (line_index.line_col(text_range.start()), line_index.line_col(text_range.end())); + format!("{},{}:{},{}:{}", path, start.line + 1, start.col, end.line + 1, end.col) +} + fn expr_syntax_range( db: &RootDatabase, analysis: &Analysis, diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs index b759d912c968..19907ebddb6a 100644 --- a/crates/rust-analyzer/src/cli/flags.rs +++ b/crates/rust-analyzer/src/cli/flags.rs @@ -1,6 +1,6 @@ //! Grammar for the command-line arguments. #![allow(unreachable_pub)] -use std::path::PathBuf; +use std::{path::PathBuf, str::FromStr}; use ide_ssr::{SsrPattern, SsrRule}; @@ -54,6 +54,8 @@ xflags::xflags! { /// Directory with Cargo.toml. required path: PathBuf { + optional --output format: OutputFormat + /// Randomize order in which crates, modules, and items are processed. optional --randomize /// Run type inference in parallel. @@ -160,6 +162,7 @@ pub struct Highlight { pub struct AnalysisStats { pub path: PathBuf, + pub output: Option, pub randomize: bool, pub parallel: bool, pub memory_usage: bool, @@ -215,6 +218,11 @@ impl RustAnalyzer { } // generated end +#[derive(Debug, PartialEq, Eq)] +pub enum OutputFormat { + Csv, +} + impl RustAnalyzer { pub fn verbosity(&self) -> Verbosity { if self.quiet { @@ -227,3 +235,14 @@ impl RustAnalyzer { } } } + +impl FromStr for OutputFormat { + type Err = String; + + fn from_str(s: &str) -> Result { + match s { + "csv" => Ok(Self::Csv), + _ => Err(format!("unknown output format `{}`", s)), + } + } +} diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 76b72707974c..af779ee000d8 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -161,13 +161,15 @@ config_data! { } }"#, /// Whether to show postfix snippets like `dbg`, `if`, `not`, etc. - completion_postfix_enable: bool = "true", + completion_postfix_enable: bool = "true", /// Toggles the additional completions that automatically add imports when completed. /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. completion_autoimport_enable: bool = "true", /// Toggles the additional completions that automatically show method calls and field accesses /// with `self` prefixed to them when inside a method. - completion_autoself_enable: bool = "true", + completion_autoself_enable: bool = "true", + /// Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position. + completion_privateEditable_enable: bool = "false", /// Whether to show native rust-analyzer diagnostics. diagnostics_enable: bool = "true", @@ -875,6 +877,7 @@ impl Config { enable_imports_on_the_fly: self.data.completion_autoimport_enable && completion_item_edit_resolve(&self.caps), enable_self_on_the_fly: self.data.completion_autoself_enable, + enable_private_editable: self.data.completion_privateEditable_enable, add_call_parenthesis: self.data.completion_addCallParenthesis, add_call_argument_snippets: self.data.completion_addCallArgumentSnippets, insert_use: self.insert_use_config(), diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 10f9d4233bd4..6c6448347d1b 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -275,9 +275,7 @@ pub(crate) fn handle_on_type_formatting( let char_typed = params.ch.chars().next().unwrap_or('\0'); let text = snap.analysis.file_text(position.file_id)?; - if !text[usize::from(position.offset)..].starts_with(char_typed) { - // Add `always!` here once VS Code bug is fixed: - // https://github.com/rust-analyzer/rust-analyzer/issues/10002 + if stdx::never!(!text[usize::from(position.offset)..].starts_with(char_typed)) { return Ok(None); } diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index 20e7cfb10c6e..c6e08d920292 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -134,6 +134,7 @@ fn integrated_completion_benchmark() { enable_postfix_completions: true, enable_imports_on_the_fly: true, enable_self_on_the_fly: true, + enable_private_editable: true, add_call_parenthesis: true, add_call_argument_snippets: true, snippet_cap: SnippetCap::new(true), @@ -171,6 +172,7 @@ fn integrated_completion_benchmark() { enable_postfix_completions: true, enable_imports_on_the_fly: true, enable_self_on_the_fly: true, + enable_private_editable: true, add_call_parenthesis: true, add_call_argument_snippets: true, snippet_cap: SnippetCap::new(true), diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 43c1f2fa80bb..f59cd4f257c5 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml @@ -30,7 +30,7 @@ rayon = "1" expect-test = "1.2.0-pre.1" proc-macro2 = "1.0.8" quote = "1.0.2" -ungrammar = "=1.14.9" +ungrammar = "=1.15.0" test_utils = { path = "../test_utils" } sourcegen = { path = "../sourcegen" } diff --git a/crates/syntax/src/ast/expr_ext.rs b/crates/syntax/src/ast/expr_ext.rs index 28b4f5bc02e2..ae3a3c93931d 100644 --- a/crates/syntax/src/ast/expr_ext.rs +++ b/crates/syntax/src/ast/expr_ext.rs @@ -49,25 +49,81 @@ impl From for ElseBranch { impl ast::IfExpr { pub fn then_branch(&self) -> Option { - self.blocks().next() + self.children_after_condition().next() } pub fn else_branch(&self) -> Option { - let res = match self.blocks().nth(1) { + let res = match self.children_after_condition().nth(1) { Some(block) => ElseBranch::Block(block), None => { - let elif: ast::IfExpr = support::child(self.syntax())?; + let elif = self.children_after_condition().next()?; ElseBranch::IfExpr(elif) } }; Some(res) } - pub fn blocks(&self) -> AstChildren { - support::children(self.syntax()) + fn children_after_condition(&self) -> impl Iterator { + self.syntax().children().skip(1).filter_map(N::cast) } } +#[test] +fn if_block_condition() { + let parse = ast::SourceFile::parse( + r#" + fn test() { + if { true } { "if" } + else if { false } { "first elif" } + else if true { "second elif" } + else if (true) { "third elif" } + else { "else" } + } + "#, + ); + let if_ = parse.tree().syntax().descendants().find_map(ast::IfExpr::cast).unwrap(); + assert_eq!(if_.then_branch().unwrap().syntax().text(), r#"{ "if" }"#); + let elif = match if_.else_branch().unwrap() { + ElseBranch::IfExpr(elif) => elif, + ElseBranch::Block(_) => panic!("should be `else if`"), + }; + assert_eq!(elif.then_branch().unwrap().syntax().text(), r#"{ "first elif" }"#); + let elif = match elif.else_branch().unwrap() { + ElseBranch::IfExpr(elif) => elif, + ElseBranch::Block(_) => panic!("should be `else if`"), + }; + assert_eq!(elif.then_branch().unwrap().syntax().text(), r#"{ "second elif" }"#); + let elif = match elif.else_branch().unwrap() { + ElseBranch::IfExpr(elif) => elif, + ElseBranch::Block(_) => panic!("should be `else if`"), + }; + assert_eq!(elif.then_branch().unwrap().syntax().text(), r#"{ "third elif" }"#); + let else_ = match elif.else_branch().unwrap() { + ElseBranch::Block(else_) => else_, + ElseBranch::IfExpr(_) => panic!("should be `else`"), + }; + assert_eq!(else_.syntax().text(), r#"{ "else" }"#); +} + +#[test] +fn if_condition_with_if_inside() { + let parse = ast::SourceFile::parse( + r#" + fn test() { + if if true { true } else { false } { "if" } + else { "else" } + } + "#, + ); + let if_ = parse.tree().syntax().descendants().find_map(ast::IfExpr::cast).unwrap(); + assert_eq!(if_.then_branch().unwrap().syntax().text(), r#"{ "if" }"#); + let else_ = match if_.else_branch().unwrap() { + ElseBranch::Block(else_) => else_, + ElseBranch::IfExpr(_) => panic!("should be `else`"), + }; + assert_eq!(else_.syntax().text(), r#"{ "else" }"#); +} + impl ast::PrefixExpr { pub fn op_kind(&self) -> Option { let res = match self.op_token()?.kind() { diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index 09c5af210f58..11247fbf1c98 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -884,7 +884,7 @@ pub struct IfExpr { impl ast::HasAttrs for IfExpr {} impl IfExpr { pub fn if_token(&self) -> Option { support::token(&self.syntax, T![if]) } - pub fn condition(&self) -> Option { support::child(&self.syntax) } + pub fn condition(&self) -> Option { support::child(&self.syntax) } pub fn else_token(&self) -> Option { support::token(&self.syntax, T![else]) } } @@ -1035,10 +1035,9 @@ pub struct WhileExpr { pub(crate) syntax: SyntaxNode, } impl ast::HasAttrs for WhileExpr {} -impl ast::HasLoopBody for WhileExpr {} impl WhileExpr { pub fn while_token(&self) -> Option { support::token(&self.syntax, T![while]) } - pub fn condition(&self) -> Option { support::child(&self.syntax) } + pub fn condition(&self) -> Option { support::child(&self.syntax) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -1051,6 +1050,18 @@ impl YieldExpr { pub fn expr(&self) -> Option { support::child(&self.syntax) } } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct LetExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for LetExpr {} +impl LetExpr { + pub fn let_token(&self) -> Option { support::token(&self.syntax, T![let]) } + pub fn pat(&self) -> Option { support::child(&self.syntax) } + pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } + pub fn expr(&self) -> Option { support::child(&self.syntax) } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct StmtList { pub(crate) syntax: SyntaxNode, @@ -1106,17 +1117,6 @@ impl ArgList { pub fn r_paren_token(&self) -> Option { support::token(&self.syntax, T![')']) } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Condition { - pub(crate) syntax: SyntaxNode, -} -impl Condition { - pub fn let_token(&self) -> Option { support::token(&self.syntax, T![let]) } - pub fn pat(&self) -> Option { support::child(&self.syntax) } - pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } - pub fn expr(&self) -> Option { support::child(&self.syntax) } -} - #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct MatchArmList { pub(crate) syntax: SyntaxNode, @@ -1147,10 +1147,7 @@ pub struct MatchGuard { } impl MatchGuard { pub fn if_token(&self) -> Option { support::token(&self.syntax, T![if]) } - pub fn let_token(&self) -> Option { support::token(&self.syntax, T![let]) } - pub fn pat(&self) -> Option { support::child(&self.syntax) } - pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } - pub fn expr(&self) -> Option { support::child(&self.syntax) } + pub fn condition(&self) -> Option { support::child(&self.syntax) } } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -1524,6 +1521,7 @@ pub enum Expr { TupleExpr(TupleExpr), WhileExpr(WhileExpr), YieldExpr(YieldExpr), + LetExpr(LetExpr), } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -2664,6 +2662,17 @@ impl AstNode for YieldExpr { } fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl AstNode for LetExpr { + fn can_cast(kind: SyntaxKind) -> bool { kind == LET_EXPR } + fn cast(syntax: SyntaxNode) -> Option { + if Self::can_cast(syntax.kind()) { + Some(Self { syntax }) + } else { + None + } + } + fn syntax(&self) -> &SyntaxNode { &self.syntax } +} impl AstNode for StmtList { fn can_cast(kind: SyntaxKind) -> bool { kind == STMT_LIST } fn cast(syntax: SyntaxNode) -> Option { @@ -2719,17 +2728,6 @@ impl AstNode for ArgList { } fn syntax(&self) -> &SyntaxNode { &self.syntax } } -impl AstNode for Condition { - fn can_cast(kind: SyntaxKind) -> bool { kind == CONDITION } - fn cast(syntax: SyntaxNode) -> Option { - if Self::can_cast(syntax.kind()) { - Some(Self { syntax }) - } else { - None - } - } - fn syntax(&self) -> &SyntaxNode { &self.syntax } -} impl AstNode for MatchArmList { fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_ARM_LIST } fn cast(syntax: SyntaxNode) -> Option { @@ -3336,6 +3334,9 @@ impl From for Expr { impl From for Expr { fn from(node: YieldExpr) -> Expr { Expr::YieldExpr(node) } } +impl From for Expr { + fn from(node: LetExpr) -> Expr { Expr::LetExpr(node) } +} impl AstNode for Expr { fn can_cast(kind: SyntaxKind) -> bool { match kind { @@ -3344,7 +3345,7 @@ impl AstNode for Expr { | INDEX_EXPR | LITERAL | LOOP_EXPR | MACRO_CALL | MACRO_STMTS | MATCH_EXPR | METHOD_CALL_EXPR | PAREN_EXPR | PATH_EXPR | PREFIX_EXPR | RANGE_EXPR | RECORD_EXPR | REF_EXPR | RETURN_EXPR | TRY_EXPR | TUPLE_EXPR | WHILE_EXPR - | YIELD_EXPR => true, + | YIELD_EXPR | LET_EXPR => true, _ => false, } } @@ -3381,6 +3382,7 @@ impl AstNode for Expr { TUPLE_EXPR => Expr::TupleExpr(TupleExpr { syntax }), WHILE_EXPR => Expr::WhileExpr(WhileExpr { syntax }), YIELD_EXPR => Expr::YieldExpr(YieldExpr { syntax }), + LET_EXPR => Expr::LetExpr(LetExpr { syntax }), _ => return None, }; Some(res) @@ -3418,6 +3420,7 @@ impl AstNode for Expr { Expr::TupleExpr(it) => &it.syntax, Expr::WhileExpr(it) => &it.syntax, Expr::YieldExpr(it) => &it.syntax, + Expr::LetExpr(it) => &it.syntax, } } } @@ -3883,6 +3886,7 @@ impl AstNode for AnyHasAttrs { | TUPLE_EXPR | WHILE_EXPR | YIELD_EXPR + | LET_EXPR | STMT_LIST | RECORD_EXPR_FIELD_LIST | RECORD_EXPR_FIELD @@ -4537,6 +4541,11 @@ impl std::fmt::Display for YieldExpr { std::fmt::Display::fmt(self.syntax(), f) } } +impl std::fmt::Display for LetExpr { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(self.syntax(), f) + } +} impl std::fmt::Display for StmtList { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) @@ -4562,11 +4571,6 @@ impl std::fmt::Display for ArgList { std::fmt::Display::fmt(self.syntax(), f) } } -impl std::fmt::Display for Condition { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - std::fmt::Display::fmt(self.syntax(), f) - } -} impl std::fmt::Display for MatchArmList { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 8a1bcebbf678..1a754ef46097 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -397,7 +397,7 @@ pub fn expr_match(expr: ast::Expr, match_arm_list: ast::MatchArmList) -> ast::Ex expr_from_text(&format!("match {} {}", expr, match_arm_list)) } pub fn expr_if( - condition: ast::Condition, + condition: ast::Expr, then_branch: ast::BlockExpr, else_branch: Option, ) -> ast::Expr { @@ -456,14 +456,8 @@ pub fn expr_assignment(lhs: ast::Expr, rhs: ast::Expr) -> ast::Expr { fn expr_from_text(text: &str) -> ast::Expr { ast_from_text(&format!("const C: () = {};", text)) } - -pub fn condition(expr: ast::Expr, pattern: Option) -> ast::Condition { - match pattern { - None => ast_from_text(&format!("const _: () = while {} {{}};", expr)), - Some(pattern) => { - ast_from_text(&format!("const _: () = while let {} = {} {{}};", pattern, expr)) - } - } +pub fn expr_let(pattern: ast::Pat, expr: ast::Expr) -> ast::LetExpr { + ast_from_text(&format!("const _: () = while let {} = {} {{}};", pattern, expr)) } pub fn arg_list(args: impl IntoIterator) -> ast::ArgList { diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index 067e13ee14dd..97455abd8094 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs @@ -528,12 +528,6 @@ impl ast::Item { } } -impl ast::Condition { - pub fn is_pattern_cond(&self) -> bool { - self.let_token().is_some() - } -} - #[derive(Debug, Clone, PartialEq, Eq)] pub enum FieldKind { Name(ast::NameRef), @@ -711,6 +705,15 @@ impl ast::RangePat { } impl ast::TokenTree { + pub fn token_trees_and_tokens( + &self, + ) -> impl Iterator> { + self.syntax().children_with_tokens().filter_map(|not| match not { + NodeOrToken::Node(node) => ast::TokenTree::cast(node).map(NodeOrToken::Node), + NodeOrToken::Token(t) => Some(NodeOrToken::Token(t)), + }) + } + pub fn left_delimiter_token(&self) -> Option { self.syntax() .first_child_or_token()? @@ -766,6 +769,15 @@ impl ast::HasLoopBody for ast::ForExpr { } } +impl ast::HasLoopBody for ast::WhileExpr { + fn loop_body(&self) -> Option { + let mut exprs = support::children(self.syntax()); + let first = exprs.next(); + let second = exprs.next(); + second.or(first) + } +} + impl ast::HasAttrs for ast::AnyHasDocComments {} impl From for ast::Item { diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 0f7855a053e1..e30f6cd79c3e 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -52,14 +52,15 @@ pub use crate::{ ptr::{AstPtr, SyntaxNodePtr}, syntax_error::SyntaxError, syntax_node::{ - PreorderWithTokens, SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren, - SyntaxToken, SyntaxTreeBuilder, + PreorderWithTokens, RustLanguage, SyntaxElement, SyntaxElementChildren, SyntaxNode, + SyntaxNodeChildren, SyntaxToken, SyntaxTreeBuilder, }, token_text::TokenText, }; pub use parser::{SyntaxKind, T}; pub use rowan::{ - Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent, + api::Preorder, Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize, + TokenAtOffset, WalkEvent, }; pub use smol_str::SmolStr; diff --git a/crates/syntax/src/tests/ast_src.rs b/crates/syntax/src/tests/ast_src.rs index c0f1d5ef5652..aeff851ce4d2 100644 --- a/crates/syntax/src/tests/ast_src.rs +++ b/crates/syntax/src/tests/ast_src.rs @@ -133,7 +133,6 @@ pub(crate) const KINDS_SRC: KindsSrc = KindsSrc { "CLOSURE_EXPR", "IF_EXPR", "WHILE_EXPR", - "CONDITION", "LOOP_EXPR", "FOR_EXPR", "CONTINUE_EXPR", @@ -143,6 +142,7 @@ pub(crate) const KINDS_SRC: KindsSrc = KindsSrc { "STMT_LIST", "RETURN_EXPR", "YIELD_EXPR", + "LET_EXPR", "MATCH_EXPR", "MATCH_ARM_LIST", "MATCH_ARM", diff --git a/crates/syntax/src/tests/sourcegen_ast.rs b/crates/syntax/src/tests/sourcegen_ast.rs index 4a608da9ff4b..5ed56a81c497 100644 --- a/crates/syntax/src/tests/sourcegen_ast.rs +++ b/crates/syntax/src/tests/sourcegen_ast.rs @@ -85,8 +85,9 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { .traits .iter() .filter(|trait_name| { - // For loops have two expressions so this might collide, therefor manual impl it - node.name != "ForExpr" || trait_name.as_str() != "HasLoopBody" + // Loops have two expressions so this might collide, therefor manual impl it + node.name != "ForExpr" && node.name != "WhileExpr" + || trait_name.as_str() != "HasLoopBody" }) .map(|trait_name| { let trait_name = format_ident!("{}", trait_name); diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs index 8dc47e0bd3f4..3ea5844c9515 100644 --- a/crates/syntax/src/validation.rs +++ b/crates/syntax/src/validation.rs @@ -38,6 +38,7 @@ pub(crate) fn validate(root: &SyntaxNode) -> Vec { ast::PtrType(it) => validate_trait_object_ptr_ty(it, &mut errors), ast::FnPtrType(it) => validate_trait_object_fn_ptr_ret_ty(it, &mut errors), ast::MacroRules(it) => validate_macro_rules(it, &mut errors), + ast::LetExpr(it) => validate_let_expr(it, &mut errors), _ => (), } } @@ -343,3 +344,33 @@ fn validate_const(const_: ast::Const, errors: &mut Vec) { errors.push(SyntaxError::new("const globals cannot be mutable", mut_token.text_range())); } } + +fn validate_let_expr(let_: ast::LetExpr, errors: &mut Vec) { + let mut token = let_.syntax().clone(); + loop { + token = match token.parent() { + Some(it) => it, + None => break, + }; + + if ast::ParenExpr::can_cast(token.kind()) { + continue; + } else if let Some(it) = ast::BinExpr::cast(token.clone()) { + if it.op_kind() == Some(ast::BinaryOp::LogicOp(ast::LogicOp::And)) { + continue; + } + } else if ast::IfExpr::can_cast(token.kind()) + || ast::WhileExpr::can_cast(token.kind()) + || ast::MatchGuard::can_cast(token.kind()) + { + // It must be part of the condition since the expressions are inside a block. + return; + } + + break; + } + errors.push(SyntaxError::new( + "`let` expressions are not supported here", + let_.syntax().text_range(), + )); +} diff --git a/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rast b/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rast index d4963979c860..50057a02d809 100644 --- a/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rast +++ b/crates/syntax/test_data/parser/validation/0031_block_inner_attrs.rast @@ -49,9 +49,8 @@ SOURCE_FILE@0..350 IF_EXPR@134..257 IF_KW@134..136 "if" WHITESPACE@136..137 " " - CONDITION@137..141 - LITERAL@137..141 - TRUE_KW@137..141 "true" + LITERAL@137..141 + TRUE_KW@137..141 "true" WHITESPACE@141..142 " " BLOCK_EXPR@142..257 STMT_LIST@142..257 @@ -94,9 +93,8 @@ SOURCE_FILE@0..350 WHILE_EXPR@262..347 WHILE_KW@262..267 "while" WHITESPACE@267..268 " " - CONDITION@268..272 - LITERAL@268..272 - TRUE_KW@268..272 "true" + LITERAL@268..272 + TRUE_KW@268..272 "true" WHITESPACE@272..273 " " BLOCK_EXPR@273..347 STMT_LIST@273..347 diff --git a/crates/syntax/test_data/parser/validation/invalid_let_expr.rast b/crates/syntax/test_data/parser/validation/invalid_let_expr.rast new file mode 100644 index 000000000000..5b37b5978320 --- /dev/null +++ b/crates/syntax/test_data/parser/validation/invalid_let_expr.rast @@ -0,0 +1,216 @@ +SOURCE_FILE@0..282 + FN@0..281 + FN_KW@0..2 "fn" + WHITESPACE@2..3 " " + NAME@3..6 + IDENT@3..6 "foo" + PARAM_LIST@6..8 + L_PAREN@6..7 "(" + R_PAREN@7..8 ")" + WHITESPACE@8..9 " " + BLOCK_EXPR@9..281 + STMT_LIST@9..281 + L_CURLY@9..10 "{" + WHITESPACE@10..15 "\n " + CONST@15..42 + CONST_KW@15..20 "const" + WHITESPACE@20..21 " " + UNDERSCORE@21..22 "_" + COLON@22..23 ":" + WHITESPACE@23..24 " " + TUPLE_TYPE@24..26 + L_PAREN@24..25 "(" + R_PAREN@25..26 ")" + WHITESPACE@26..27 " " + EQ@27..28 "=" + WHITESPACE@28..29 " " + LET_EXPR@29..41 + LET_KW@29..32 "let" + WHITESPACE@32..33 " " + WILDCARD_PAT@33..34 + UNDERSCORE@33..34 "_" + WHITESPACE@34..35 " " + EQ@35..36 "=" + WHITESPACE@36..37 " " + PATH_EXPR@37..41 + PATH@37..41 + PATH_SEGMENT@37..41 + NAME_REF@37..41 + IDENT@37..41 "None" + SEMICOLON@41..42 ";" + WHITESPACE@42..48 "\n\n " + LET_STMT@48..83 + LET_KW@48..51 "let" + WHITESPACE@51..52 " " + WILDCARD_PAT@52..53 + UNDERSCORE@52..53 "_" + WHITESPACE@53..54 " " + EQ@54..55 "=" + WHITESPACE@55..56 " " + IF_EXPR@56..82 + IF_KW@56..58 "if" + WHITESPACE@58..59 " " + LITERAL@59..63 + TRUE_KW@59..63 "true" + WHITESPACE@63..64 " " + BLOCK_EXPR@64..82 + STMT_LIST@64..82 + L_CURLY@64..65 "{" + WHITESPACE@65..66 " " + PAREN_EXPR@66..80 + L_PAREN@66..67 "(" + LET_EXPR@67..79 + LET_KW@67..70 "let" + WHITESPACE@70..71 " " + WILDCARD_PAT@71..72 + UNDERSCORE@71..72 "_" + WHITESPACE@72..73 " " + EQ@73..74 "=" + WHITESPACE@74..75 " " + PATH_EXPR@75..79 + PATH@75..79 + PATH_SEGMENT@75..79 + NAME_REF@75..79 + IDENT@75..79 "None" + R_PAREN@79..80 ")" + WHITESPACE@80..81 " " + R_CURLY@81..82 "}" + SEMICOLON@82..83 ";" + WHITESPACE@83..89 "\n\n " + IF_EXPR@89..279 + IF_KW@89..91 "if" + WHITESPACE@91..92 " " + BIN_EXPR@92..114 + LITERAL@92..96 + TRUE_KW@92..96 "true" + WHITESPACE@96..97 " " + AMP2@97..99 "&&" + WHITESPACE@99..100 " " + PAREN_EXPR@100..114 + L_PAREN@100..101 "(" + LET_EXPR@101..113 + LET_KW@101..104 "let" + WHITESPACE@104..105 " " + WILDCARD_PAT@105..106 + UNDERSCORE@105..106 "_" + WHITESPACE@106..107 " " + EQ@107..108 "=" + WHITESPACE@108..109 " " + PATH_EXPR@109..113 + PATH@109..113 + PATH_SEGMENT@109..113 + NAME_REF@109..113 + IDENT@109..113 "None" + R_PAREN@113..114 ")" + WHITESPACE@114..115 " " + BLOCK_EXPR@115..279 + STMT_LIST@115..279 + L_CURLY@115..116 "{" + WHITESPACE@116..125 "\n " + EXPR_STMT@125..140 + PAREN_EXPR@125..139 + L_PAREN@125..126 "(" + LET_EXPR@126..138 + LET_KW@126..129 "let" + WHITESPACE@129..130 " " + WILDCARD_PAT@130..131 + UNDERSCORE@130..131 "_" + WHITESPACE@131..132 " " + EQ@132..133 "=" + WHITESPACE@133..134 " " + PATH_EXPR@134..138 + PATH@134..138 + PATH_SEGMENT@134..138 + NAME_REF@134..138 + IDENT@134..138 "None" + R_PAREN@138..139 ")" + SEMICOLON@139..140 ";" + WHITESPACE@140..149 "\n " + WHILE_EXPR@149..273 + WHILE_KW@149..154 "while" + WHITESPACE@154..155 " " + LET_EXPR@155..167 + LET_KW@155..158 "let" + WHITESPACE@158..159 " " + WILDCARD_PAT@159..160 + UNDERSCORE@159..160 "_" + WHITESPACE@160..161 " " + EQ@161..162 "=" + WHITESPACE@162..163 " " + PATH_EXPR@163..167 + PATH@163..167 + PATH_SEGMENT@163..167 + NAME_REF@163..167 + IDENT@163..167 "None" + WHITESPACE@167..168 " " + BLOCK_EXPR@168..273 + STMT_LIST@168..273 + L_CURLY@168..169 "{" + WHITESPACE@169..182 "\n " + MATCH_EXPR@182..263 + MATCH_KW@182..187 "match" + WHITESPACE@187..188 " " + PATH_EXPR@188..192 + PATH@188..192 + PATH_SEGMENT@188..192 + NAME_REF@188..192 + IDENT@188..192 "None" + WHITESPACE@192..193 " " + MATCH_ARM_LIST@193..263 + L_CURLY@193..194 "{" + WHITESPACE@194..211 "\n " + MATCH_ARM@211..249 + WILDCARD_PAT@211..212 + UNDERSCORE@211..212 "_" + WHITESPACE@212..213 " " + MATCH_GUARD@213..228 + IF_KW@213..215 "if" + WHITESPACE@215..216 " " + LET_EXPR@216..228 + LET_KW@216..219 "let" + WHITESPACE@219..220 " " + WILDCARD_PAT@220..221 + UNDERSCORE@220..221 "_" + WHITESPACE@221..222 " " + EQ@222..223 "=" + WHITESPACE@223..224 " " + PATH_EXPR@224..228 + PATH@224..228 + PATH_SEGMENT@224..228 + NAME_REF@224..228 + IDENT@224..228 "None" + WHITESPACE@228..229 " " + FAT_ARROW@229..231 "=>" + WHITESPACE@231..232 " " + BLOCK_EXPR@232..249 + STMT_LIST@232..249 + L_CURLY@232..233 "{" + WHITESPACE@233..234 " " + LET_STMT@234..247 + LET_KW@234..237 "let" + WHITESPACE@237..238 " " + WILDCARD_PAT@238..239 + UNDERSCORE@238..239 "_" + WHITESPACE@239..240 " " + EQ@240..241 "=" + WHITESPACE@241..242 " " + PATH_EXPR@242..246 + PATH@242..246 + PATH_SEGMENT@242..246 + NAME_REF@242..246 + IDENT@242..246 "None" + SEMICOLON@246..247 ";" + WHITESPACE@247..248 " " + R_CURLY@248..249 "}" + WHITESPACE@249..262 "\n " + R_CURLY@262..263 "}" + WHITESPACE@263..272 "\n " + R_CURLY@272..273 "}" + WHITESPACE@273..278 "\n " + R_CURLY@278..279 "}" + WHITESPACE@279..280 "\n" + R_CURLY@280..281 "}" + WHITESPACE@281..282 "\n" +error 29..41: `let` expressions are not supported here +error 67..79: `let` expressions are not supported here +error 126..138: `let` expressions are not supported here diff --git a/crates/syntax/test_data/parser/validation/invalid_let_expr.rs b/crates/syntax/test_data/parser/validation/invalid_let_expr.rs new file mode 100644 index 000000000000..1515ae5334d2 --- /dev/null +++ b/crates/syntax/test_data/parser/validation/invalid_let_expr.rs @@ -0,0 +1,14 @@ +fn foo() { + const _: () = let _ = None; + + let _ = if true { (let _ = None) }; + + if true && (let _ = None) { + (let _ = None); + while let _ = None { + match None { + _ if let _ = None => { let _ = None; } + } + } + } +} diff --git a/crates/text_edit/src/lib.rs b/crates/text_edit/src/lib.rs index ab50052db218..0169c52b2c7b 100644 --- a/crates/text_edit/src/lib.rs +++ b/crates/text_edit/src/lib.rs @@ -90,28 +90,22 @@ impl TextEdit { _ => (), } - let mut total_len = TextSize::of(&*text); + let text_size = TextSize::of(&*text); + let mut total_len = text_size.clone(); for indel in &self.indels { total_len += TextSize::of(&indel.insert); - total_len -= indel.delete.end() - indel.delete.start(); + total_len -= indel.delete.len(); } - let mut buf = String::with_capacity(total_len.into()); - let mut prev = 0; - for indel in &self.indels { - let start: usize = indel.delete.start().into(); - let end: usize = indel.delete.end().into(); - if start > prev { - buf.push_str(&text[prev..start]); - } - buf.push_str(&indel.insert); - prev = end; + + if let Some(additional) = total_len.checked_sub(text_size.into()) { + text.reserve(additional.into()); + } + + for indel in self.indels.iter().rev() { + indel.apply(text); } - buf.push_str(&text[prev..text.len()]); - assert_eq!(TextSize::of(&buf), total_len); - // FIXME: figure out a way to mutate the text in-place or reuse the - // memory in some other way - *text = buf; + assert_eq!(TextSize::of(&*text), total_len); } pub fn union(&mut self, other: TextEdit) -> Result<(), TextEdit> { @@ -203,3 +197,45 @@ fn check_disjoint_and_sort(indels: &mut [impl std::borrow::Borrow]) -> bo l.delete.end() <= r.delete.start() || l == r }) } + +#[cfg(test)] +mod tests { + use super::{TextEdit, TextEditBuilder, TextRange}; + + fn range(start: u32, end: u32) -> TextRange { + TextRange::new(start.into(), end.into()) + } + + #[test] + fn test_apply() { + let mut text = "_11h1_2222_xx3333_4444_6666".to_string(); + let mut builder = TextEditBuilder::default(); + builder.replace(range(3, 4), "1".to_string()); + builder.delete(range(11, 13)); + builder.insert(22.into(), "_5555".to_string()); + + let text_edit = builder.finish(); + text_edit.apply(&mut text); + + assert_eq!(text, "_1111_2222_3333_4444_5555_6666") + } + + #[test] + fn test_union() { + let mut edit1 = TextEdit::delete(range(7, 11)); + let mut builder = TextEditBuilder::default(); + builder.delete(range(1, 5)); + builder.delete(range(13, 17)); + + let edit2 = builder.finish(); + assert!(edit1.union(edit2).is_ok()); + assert_eq!(edit1.indels.len(), 3); + } + + #[test] + fn test_union_panics() { + let mut edit1 = TextEdit::delete(range(7, 11)); + let edit2 = TextEdit::delete(range(9, 13)); + assert!(edit1.union(edit2).is_err()); + } +} diff --git a/docs/dev/architecture.md b/docs/dev/architecture.md index 2f2c4351c737..99ddc188d814 100644 --- a/docs/dev/architecture.md +++ b/docs/dev/architecture.md @@ -111,7 +111,7 @@ env UPDATE_EXPECT=1 cargo qt After adding a new inline test you need to run `cargo test -p xtask` and also update the test data as described above. -Note [`api_walkthrough`](https://github.com/rust-analyzer/rust-analyzer/blob/2fb6af89eb794f775de60b82afe56b6f986c2a40/crates/ra_syntax/src/lib.rs#L190-L348) +Note [`api_walkthrough`](https://github.com/rust-analyzer/rust-analyzer/blob/2fb6af89eb794f775de60b82afe56b6f986c2a40/crates/ra_syntax/src/lib.rs#L190-L348) in particular: it shows off various methods of working with syntax tree. See [#93](https://github.com/rust-analyzer/rust-analyzer/pull/93) for an example PR which fixes a bug in the grammar. diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index b10b0d355222..7e6c8225b184 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -213,6 +213,11 @@ Note that your client must specify the `additionalTextEdits` LSP client capabili Toggles the additional completions that automatically show method calls and field accesses with `self` prefixed to them when inside a method. -- +[[rust-analyzer.completion.privateEditable.enable]]rust-analyzer.completion.privateEditable.enable (default: `false`):: ++ +-- +Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position. +-- [[rust-analyzer.diagnostics.enable]]rust-analyzer.diagnostics.enable (default: `true`):: + -- diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc index c631ae46554a..3578c0d78a6c 100644 --- a/docs/user/manual.adoc +++ b/docs/user/manual.adoc @@ -40,6 +40,20 @@ To add the sources manually, run the following command: $ rustup component add rust-src ``` +=== Toolchain + +Only the latest stable standard library source is officially supported for use with rust-analyzer. +If you are using an older toolchain or have an override set, rust-analyzer may fail to understand the Rust source. +You will either need to update your toolchain or use an older version of rust-analyzer that is compatible with your toolchain. + +If you are using an override in your project, you can still force rust-analyzer to use the stable toolchain via the environment variable `RUSTUP_TOOLCHAIN`. +For example, with VS Code or coc-rust-analyzer: + +[source,json] +---- +{ "rust-analyzer.server.extraEnv": { "RUSTUP_TOOLCHAIN": "stable" } } +---- + === VS Code This is the best supported editor at the moment. @@ -86,8 +100,7 @@ It will ask your permission to download the matching language server version bin ===== Nightly We ship nightly releases for VS Code. -You can opt in to these by switching to the pre-release version in the Code extension page or settings. -To help us out with testing the newest code and follow the bleeding edge of our `master`, please use the following config: +To help us out by testing the newest code, you can enable pre-release versions in the Code extension page. ==== Manual installation @@ -240,7 +253,6 @@ The are several LSP client implementations for vim or neovim: * same configurations as VSCode extension, `rust-analyzer.server.path`, `rust-analyzer.cargo.features` etc. * same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.ssr` etc. * inlay hints for variables and method chaining, _Neovim Only_ - * semantic highlighting is not implemented yet Note: for code actions, use `coc-codeaction-cursor` and `coc-codeaction-selected`; `coc-codeaction` and `coc-codeaction-line` are unlikely to be useful. @@ -362,22 +374,19 @@ if executable('rust-analyzer') endif ---- -=== Sublime Text 3 - -Prerequisites: You have installed the <>. +=== Sublime Text -You also need the `LSP` package. -To install it: +==== Sublime Text 4: +* Follow the instructions in link:https://github.com/sublimelsp/LSP-rust-analyzer[LSP-rust-analyzer]. -1. If you've never installed a Sublime Text package, install Package Control: - * Open the command palette (Win/Linux: `ctrl+shift+p`, Mac: `cmd+shift+p`) - * Type `Install Package Control`, press enter -2. In the command palette, run `Package control: Install package`, and in the list that pops up, type `LSP` and press enter. +NOTE: Install link:https://packagecontrol.io/packages/LSP-file-watcher-chokidar[LSP-file-watcher-chokidar] to enable file watching (`workspace/didChangeWatchedFiles`). -Finally, with your Rust project open, in the command palette, run `LSP: Enable Language Server In Project` or `LSP: Enable Language Server Globally`, then select `rust-analyzer` in the list that pops up to enable the rust-analyzer LSP. -The latter means that rust-analyzer is enabled by default in Rust projects. +==== Sublime Text 3: +* Install the <>. +* Install the link:https://packagecontrol.io/packages/LSP[LSP package]. +* From the command palette, run `LSP: Enable Language Server Globally` and select `rust-analyzer`. -If it worked, you should see "rust-analyzer, Line X, Column Y" on the left side of the bottom bar, and after waiting a bit, functionality like tooltips on hovering over variables should become available. +If it worked, you should see "rust-analyzer, Line X, Column Y" on the left side of the status bar, and after waiting a bit, functionalities like tooltips on hovering over variables should become available. If you get an error saying `No such file or directory: 'rust-analyzer'`, see the <> section on installing the language server binary. @@ -427,6 +436,40 @@ Then click on apply, and restart the LSP server for your rust project. https://gitlab.com/cppit/jucipp[juCi++] has built-in support for the language server protocol, and since version 1.7.0 offers installation of both Rust and rust-analyzer when opening a Rust file. +=== Kakoune + +https://kakoune.org/[Kakoune] supports LSP with the help of https://github.com/kak-lsp/kak-lsp[`kak-lsp`]. +Follow the https://github.com/kak-lsp/kak-lsp#installation[instructions] to install `kak-lsp`. +To configure `kak-lsp`, refer to the https://github.com/kak-lsp/kak-lsp#configuring-kak-lsp[configuration section] which is basically about copying the https://github.com/kak-lsp/kak-lsp/blob/master/kak-lsp.toml[configuration file] in the right place (latest versions should use `rust-analyzer` by default). + +Finally, you need to configure Kakoune to talk to `kak-lsp` (see https://github.com/kak-lsp/kak-lsp#usage[Usage section]). +A basic configuration will only get you LSP but you can also activate inlay diagnostics and auto-formatting on save. +The following might help you get all of this. + +[source,txt] +---- +eval %sh{kak-lsp --kakoune -s $kak_session} # Not needed if you load it with plug.kak. +hook global WinSetOption filetype=rust %{ + # Enable LSP + lsp-enable-window + + # Auto-formatting on save + hook window BufWritePre .* lsp-formatting-sync + + # Configure inlay hints (only on save) + hook window -group rust-inlay-hints BufWritePost .* rust-analyzer-inlay-hints + hook -once -always window WinSetOption filetype=.* %{ + remove-hooks window rust-inlay-hints + } +} +---- + +=== Helix + +https://docs.helix-editor.com/[Helix] supports LSP by default. +However, it won't install `rust-analyzer` automatically. +You can follow instructions for installing <>. + == Troubleshooting Start with looking at the rust-analyzer version. diff --git a/editors/code/package.json b/editors/code/package.json index b114bf4cfa0a..1252752a9ae0 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -310,7 +310,7 @@ ], "scope": "machine-overridable", "default": null, - "markdownDescription": "Path to rust-analyzer executable (points to bundled binary by default). If this is set, then `#rust-analyzer.updates.channel#` setting is not used" + "markdownDescription": "Path to rust-analyzer executable (points to bundled binary by default)." }, "rust-analyzer.server.extraEnv": { "type": [ @@ -643,6 +643,11 @@ "default": true, "type": "boolean" }, + "rust-analyzer.completion.privateEditable.enable": { + "markdownDescription": "Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.", + "default": false, + "type": "boolean" + }, "rust-analyzer.diagnostics.enable": { "markdownDescription": "Whether to show native rust-analyzer diagnostics.", "default": true, diff --git a/xtask/src/flags.rs b/xtask/src/flags.rs index 69b3cb9c1751..993c64cceaf0 100644 --- a/xtask/src/flags.rs +++ b/xtask/src/flags.rs @@ -113,9 +113,15 @@ pub struct Bb { impl Xtask { pub const HELP: &'static str = Self::HELP_; + #[allow(dead_code)] pub fn from_env() -> xflags::Result { Self::from_env_() } + + #[allow(dead_code)] + pub fn from_vec(args: Vec) -> xflags::Result { + Self::from_vec_(args) + } } // generated end diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs index 7b190d425f47..e4d3e981aff4 100644 --- a/xtask/src/metrics.rs +++ b/xtask/src/metrics.rs @@ -32,6 +32,11 @@ impl flags::Metrics { let _env = pushenv("RA_METRICS", "1"); + { + // https://github.com/rust-analyzer/rust-analyzer/issues/9997 + let _d = pushd("target/rustc-perf/collector/benchmarks/webrender")?; + cmd!("cargo update -p url --precise 1.6.1").run()?; + } metrics.measure_build()?; metrics.measure_analysis_stats_self()?; metrics.measure_analysis_stats("ripgrep")?;