Merge #11287
11287: fix: rust-analyzer spamming overly error message when workspace not being loaded r=lnicola a=Milo123459
Fixes #10120
Co-authored-by: Milo <50248166+Milo123459@users.noreply.github.com>
diff --git a/crates/hir/src/has_source.rs b/crates/hir/src/has_source.rs
index a3f4a65..8669b00 100644
--- a/crates/hir/src/has_source.rs
+++ b/crates/hir/src/has_source.rs
@@ -16,6 +16,9 @@
pub trait HasSource {
type Ast;
+ /// Fetches the definition's source node.
+ /// Using [`crate::Semantics::source`] is preferred when working with [`crate::Semantics`],
+ /// as that caches the parsed file in the semantics' cache.
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>>;
}
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index b0ff797..4b4eb38 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -28,6 +28,7 @@
pub mod diagnostics;
pub mod db;
+pub mod symbols;
mod display;
@@ -40,14 +41,18 @@
adt::{ReprKind, VariantData},
body::{BodyDiagnostic, SyntheticSyntax},
expr::{BindingAnnotation, LabelId, Pat, PatId},
+ item_tree::ItemTreeNode,
lang_item::LangItemTarget,
- nameres,
+ nameres::{self, diagnostics::DefDiagnostic},
per_ns::PerNs,
resolver::{HasResolver, Resolver},
- AttrDefId, ConstId, ConstParamId, EnumId, FunctionId, GenericDefId, HasModule, LifetimeParamId,
- LocalEnumVariantId, LocalFieldId, StaticId, StructId, TypeAliasId, TypeParamId, UnionId,
+ src::HasSource as _,
+ AdtId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, DefWithBodyId, EnumId,
+ FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
+ LocalEnumVariantId, LocalFieldId, Lookup, ModuleId, StaticId, StructId, TraitId, TypeAliasId,
+ TypeParamId, UnionId,
};
-use hir_expand::{name::name, MacroCallKind, MacroDefKind};
+use hir_expand::{name::name, MacroCallKind, MacroDefId, MacroDefKind};
use hir_ty::{
autoderef,
consteval::{eval_const, ComputedExpr, ConstEvalCtx, ConstEvalError, ConstExt},
@@ -106,28 +111,14 @@
builtin_attr::AttributeTemplate,
find_path::PrefixKind,
import_map,
- item_scope::ItemScope,
- item_tree::ItemTreeNode,
- nameres::{DefMap, ModuleData, ModuleOrigin, ModuleSource},
+ nameres::ModuleSource,
path::{ModPath, PathKind},
- src::HasSource as DefHasSource, // xx: I don't like this shadowing of HasSource... :(
type_ref::{Mutability, TypeRef},
visibility::Visibility,
- AdtId,
- AssocItemId,
- AssocItemLoc,
- DefWithBodyId,
- ImplId,
- ItemContainerId,
- ItemLoc,
- Lookup,
- ModuleDefId,
- ModuleId,
- TraitId,
},
hir_expand::{
name::{known, Name},
- ExpandResult, HirFileId, InFile, MacroDefId, MacroFile, Origin,
+ ExpandResult, HirFileId, InFile, MacroFile, Origin,
},
hir_ty::display::HirDisplay,
};
@@ -191,6 +182,11 @@
Module { id: def_map.module_id(def_map.root()) }
}
+ pub fn modules(self, db: &dyn HirDatabase) -> Vec<Module> {
+ let def_map = db.crate_def_map(self.id);
+ def_map.modules().map(|(id, _)| def_map.module_id(id).into()).collect()
+ }
+
pub fn root_file(self, db: &dyn HirDatabase) -> FileId {
db.crate_graph()[self.id].root_file_id
}
@@ -523,191 +519,7 @@
// FIXME: This is accidentally quadratic.
continue;
}
- match &diag.kind {
- DefDiagnosticKind::UnresolvedModule { ast: declaration, candidate } => {
- let decl = declaration.to_node(db.upcast());
- acc.push(
- UnresolvedModule {
- decl: InFile::new(declaration.file_id, AstPtr::new(&decl)),
- candidate: candidate.clone(),
- }
- .into(),
- )
- }
- DefDiagnosticKind::UnresolvedExternCrate { ast } => {
- let item = ast.to_node(db.upcast());
- acc.push(
- UnresolvedExternCrate {
- decl: InFile::new(ast.file_id, AstPtr::new(&item)),
- }
- .into(),
- );
- }
-
- DefDiagnosticKind::UnresolvedImport { id, index } => {
- let file_id = id.file_id();
- let item_tree = id.item_tree(db.upcast());
- let import = &item_tree[id.value];
-
- let use_tree = import.use_tree_to_ast(db.upcast(), file_id, *index);
- acc.push(
- UnresolvedImport { decl: InFile::new(file_id, AstPtr::new(&use_tree)) }
- .into(),
- );
- }
-
- DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } => {
- let item = ast.to_node(db.upcast());
- acc.push(
- InactiveCode {
- node: ast.with_value(AstPtr::new(&item).into()),
- cfg: cfg.clone(),
- opts: opts.clone(),
- }
- .into(),
- );
- }
-
- DefDiagnosticKind::UnresolvedProcMacro { ast } => {
- let mut precise_location = None;
- let (node, name) = match ast {
- MacroCallKind::FnLike { ast_id, .. } => {
- let node = ast_id.to_node(db.upcast());
- (ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))), None)
- }
- MacroCallKind::Derive { ast_id, derive_name, .. } => {
- let node = ast_id.to_node(db.upcast());
-
- // Compute the precise location of the macro name's token in the derive
- // list.
- // FIXME: This does not handle paths to the macro, but neither does the
- // rest of r-a.
- let derive_attrs =
- node.attrs().filter_map(|attr| match attr.as_simple_call() {
- Some((name, args)) if name == "derive" => Some(args),
- _ => None,
- });
- 'outer: for attr in derive_attrs {
- let tokens =
- attr.syntax().children_with_tokens().filter_map(|elem| {
- match elem {
- syntax::NodeOrToken::Node(_) => None,
- syntax::NodeOrToken::Token(tok) => Some(tok),
- }
- });
- for token in tokens {
- if token.kind() == SyntaxKind::IDENT
- && token.text() == &**derive_name
- {
- precise_location = Some(token.text_range());
- break 'outer;
- }
- }
- }
-
- (
- ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
- Some(derive_name.clone()),
- )
- }
- MacroCallKind::Attr { ast_id, invoc_attr_index, attr_name, .. } => {
- let node = ast_id.to_node(db.upcast());
- let attr = node
- .doc_comments_and_attrs()
- .nth((*invoc_attr_index) as usize)
- .and_then(Either::right)
- .unwrap_or_else(|| {
- panic!("cannot find attribute #{}", invoc_attr_index)
- });
- (
- ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
- Some(attr_name.clone()),
- )
- }
- };
- acc.push(
- UnresolvedProcMacro {
- node,
- precise_location,
- macro_name: name.map(Into::into),
- }
- .into(),
- );
- }
-
- DefDiagnosticKind::UnresolvedMacroCall { ast, path } => {
- let node = ast.to_node(db.upcast());
- acc.push(
- UnresolvedMacroCall {
- macro_call: InFile::new(ast.file_id, AstPtr::new(&node)),
- path: path.clone(),
- }
- .into(),
- );
- }
-
- DefDiagnosticKind::MacroError { ast, message } => {
- let node = match ast {
- MacroCallKind::FnLike { ast_id, .. } => {
- let node = ast_id.to_node(db.upcast());
- ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node)))
- }
- MacroCallKind::Derive { ast_id, .. } => {
- // FIXME: point to the attribute instead, this creates very large diagnostics
- let node = ast_id.to_node(db.upcast());
- ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node)))
- }
- MacroCallKind::Attr { ast_id, .. } => {
- // FIXME: point to the attribute instead, this creates very large diagnostics
- let node = ast_id.to_node(db.upcast());
- ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node)))
- }
- };
- acc.push(MacroError { node, message: message.clone() }.into());
- }
-
- DefDiagnosticKind::UnimplementedBuiltinMacro { ast } => {
- let node = ast.to_node(db.upcast());
- // Must have a name, otherwise we wouldn't emit it.
- let name = node.name().expect("unimplemented builtin macro with no name");
- acc.push(
- UnimplementedBuiltinMacro {
- node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&name))),
- }
- .into(),
- );
- }
- DefDiagnosticKind::InvalidDeriveTarget { ast, id } => {
- let node = ast.to_node(db.upcast());
- let derive = node.attrs().nth(*id as usize);
- match derive {
- Some(derive) => {
- acc.push(
- InvalidDeriveTarget {
- node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
- }
- .into(),
- );
- }
- None => stdx::never!("derive diagnostic on item without derive attribute"),
- }
- }
- DefDiagnosticKind::MalformedDerive { ast, id } => {
- let node = ast.to_node(db.upcast());
- let derive = node.attrs().nth(*id as usize);
- match derive {
- Some(derive) => {
- acc.push(
- MalformedDerive {
- node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
- }
- .into(),
- );
- }
- None => stdx::never!("derive diagnostic on item without derive attribute"),
- }
- }
- }
+ emit_def_diagnostic(db, acc, diag);
}
for decl in self.declarations(db) {
match decl {
@@ -767,6 +579,180 @@
}
}
+fn emit_def_diagnostic(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, diag: &DefDiagnostic) {
+ match &diag.kind {
+ DefDiagnosticKind::UnresolvedModule { ast: declaration, candidate } => {
+ let decl = declaration.to_node(db.upcast());
+ acc.push(
+ UnresolvedModule {
+ decl: InFile::new(declaration.file_id, AstPtr::new(&decl)),
+ candidate: candidate.clone(),
+ }
+ .into(),
+ )
+ }
+ DefDiagnosticKind::UnresolvedExternCrate { ast } => {
+ let item = ast.to_node(db.upcast());
+ acc.push(
+ UnresolvedExternCrate { decl: InFile::new(ast.file_id, AstPtr::new(&item)) }.into(),
+ );
+ }
+
+ DefDiagnosticKind::UnresolvedImport { id, index } => {
+ let file_id = id.file_id();
+ let item_tree = id.item_tree(db.upcast());
+ let import = &item_tree[id.value];
+
+ let use_tree = import.use_tree_to_ast(db.upcast(), file_id, *index);
+ acc.push(
+ UnresolvedImport { decl: InFile::new(file_id, AstPtr::new(&use_tree)) }.into(),
+ );
+ }
+
+ DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } => {
+ let item = ast.to_node(db.upcast());
+ acc.push(
+ InactiveCode {
+ node: ast.with_value(AstPtr::new(&item).into()),
+ cfg: cfg.clone(),
+ opts: opts.clone(),
+ }
+ .into(),
+ );
+ }
+
+ DefDiagnosticKind::UnresolvedProcMacro { ast } => {
+ let mut precise_location = None;
+ let (node, name) = match ast {
+ MacroCallKind::FnLike { ast_id, .. } => {
+ let node = ast_id.to_node(db.upcast());
+ (ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))), None)
+ }
+ MacroCallKind::Derive { ast_id, derive_name, .. } => {
+ let node = ast_id.to_node(db.upcast());
+
+ // Compute the precise location of the macro name's token in the derive
+ // list.
+ // FIXME: This does not handle paths to the macro, but neither does the
+ // rest of r-a.
+ let derive_attrs =
+ node.attrs().filter_map(|attr| match attr.as_simple_call() {
+ Some((name, args)) if name == "derive" => Some(args),
+ _ => None,
+ });
+ 'outer: for attr in derive_attrs {
+ let tokens =
+ attr.syntax().children_with_tokens().filter_map(|elem| match elem {
+ syntax::NodeOrToken::Node(_) => None,
+ syntax::NodeOrToken::Token(tok) => Some(tok),
+ });
+ for token in tokens {
+ if token.kind() == SyntaxKind::IDENT && token.text() == &**derive_name {
+ precise_location = Some(token.text_range());
+ break 'outer;
+ }
+ }
+ }
+
+ (
+ ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
+ Some(derive_name.clone()),
+ )
+ }
+ MacroCallKind::Attr { ast_id, invoc_attr_index, attr_name, .. } => {
+ let node = ast_id.to_node(db.upcast());
+ let attr = node
+ .doc_comments_and_attrs()
+ .nth((*invoc_attr_index) as usize)
+ .and_then(Either::right)
+ .unwrap_or_else(|| panic!("cannot find attribute #{}", invoc_attr_index));
+ (
+ ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
+ Some(attr_name.clone()),
+ )
+ }
+ };
+ acc.push(
+ UnresolvedProcMacro { node, precise_location, macro_name: name.map(Into::into) }
+ .into(),
+ );
+ }
+
+ DefDiagnosticKind::UnresolvedMacroCall { ast, path } => {
+ let node = ast.to_node(db.upcast());
+ acc.push(
+ UnresolvedMacroCall {
+ macro_call: InFile::new(ast.file_id, AstPtr::new(&node)),
+ path: path.clone(),
+ }
+ .into(),
+ );
+ }
+
+ DefDiagnosticKind::MacroError { ast, message } => {
+ let node = match ast {
+ MacroCallKind::FnLike { ast_id, .. } => {
+ let node = ast_id.to_node(db.upcast());
+ ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node)))
+ }
+ MacroCallKind::Derive { ast_id, .. } => {
+ // FIXME: point to the attribute instead, this creates very large diagnostics
+ let node = ast_id.to_node(db.upcast());
+ ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node)))
+ }
+ MacroCallKind::Attr { ast_id, .. } => {
+ // FIXME: point to the attribute instead, this creates very large diagnostics
+ let node = ast_id.to_node(db.upcast());
+ ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node)))
+ }
+ };
+ acc.push(MacroError { node, message: message.clone() }.into());
+ }
+
+ DefDiagnosticKind::UnimplementedBuiltinMacro { ast } => {
+ let node = ast.to_node(db.upcast());
+ // Must have a name, otherwise we wouldn't emit it.
+ let name = node.name().expect("unimplemented builtin macro with no name");
+ acc.push(
+ UnimplementedBuiltinMacro {
+ node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&name))),
+ }
+ .into(),
+ );
+ }
+ DefDiagnosticKind::InvalidDeriveTarget { ast, id } => {
+ let node = ast.to_node(db.upcast());
+ let derive = node.attrs().nth(*id as usize);
+ match derive {
+ Some(derive) => {
+ acc.push(
+ InvalidDeriveTarget {
+ node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
+ }
+ .into(),
+ );
+ }
+ None => stdx::never!("derive diagnostic on item without derive attribute"),
+ }
+ }
+ DefDiagnosticKind::MalformedDerive { ast, id } => {
+ let node = ast.to_node(db.upcast());
+ let derive = node.attrs().nth(*id as usize);
+ match derive {
+ Some(derive) => {
+ acc.push(
+ MalformedDerive {
+ node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
+ }
+ .into(),
+ );
+ }
+ None => stdx::never!("derive diagnostic on item without derive attribute"),
+ }
+ }
+ }
+}
+
impl HasVisibility for Module {
fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
let def_map = self.id.def_map(db.upcast());
@@ -1107,7 +1093,14 @@
pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
let krate = self.module(db).id.krate();
- let source_map = db.body_with_source_map(self.into()).1;
+ let (body, source_map) = db.body_with_source_map(self.into());
+
+ for (_, def_map) in body.blocks(db.upcast()) {
+ for diag in def_map.diagnostics() {
+ emit_def_diagnostic(db, acc, diag);
+ }
+ }
+
for diag in source_map.diagnostics() {
match diag {
BodyDiagnostic::InactiveCode { node, cfg, opts } => acc.push(
@@ -1610,6 +1603,12 @@
}
impl Trait {
+ pub fn lang(db: &dyn HirDatabase, krate: Crate, name: &Name) -> Option<Trait> {
+ db.lang_item(krate.into(), name.to_smol_str())
+ .and_then(LangItemTarget::as_trait)
+ .map(Into::into)
+ }
+
pub fn module(self, db: &dyn HirDatabase) -> Module {
Module { id: self.id.lookup(db.upcast()).container }
}
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 444fb4e..9596e81 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -2,7 +2,7 @@
mod source_to_def;
-use std::{cell::RefCell, fmt};
+use std::{cell::RefCell, fmt, iter};
use base_db::{FileId, FileRange};
use either::Either;
@@ -443,8 +443,7 @@
fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let sa = self.analyze_no_infer(macro_call.syntax());
let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
- let node = self.db.parse_or_expand(file_id)?;
- self.cache(node.clone(), file_id);
+ let node = self.parse_or_expand(file_id)?;
Some(node)
}
@@ -452,8 +451,7 @@
let src = self.find_file(item.syntax()).with_value(item.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?;
let file_id = macro_call_id.as_file();
- let node = self.db.parse_or_expand(file_id)?;
- self.cache(node.clone(), file_id);
+ let node = self.parse_or_expand(file_id)?;
Some(node)
}
@@ -750,10 +748,9 @@
}
fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
- let root = self.db.parse_or_expand(src.file_id).unwrap();
- let node = src.value.to_node(&root);
- self.cache(root, src.file_id);
- src.with_value(&node).original_file_range(self.db.upcast())
+ let root = self.parse_or_expand(src.file_id).unwrap();
+ let node = src.map(|it| it.to_node(&root));
+ node.as_ref().original_file_range(self.db.upcast())
}
fn token_ancestors_with_macros(
@@ -768,7 +765,17 @@
node: SyntaxNode,
) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
let node = self.find_file(&node);
- node.ancestors_with_macros(self.db.upcast()).map(|it| it.value)
+ let db = self.db.upcast();
+ iter::successors(Some(node.cloned()), move |&InFile { file_id, ref value }| {
+ match value.parent() {
+ Some(parent) => Some(InFile::new(file_id, parent)),
+ None => {
+ self.cache(value.clone(), file_id);
+ file_id.call_node(db)
+ }
+ }
+ })
+ .map(|it| it.value)
}
fn ancestors_at_offset_with_macros(
diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs
index f9c6564..5061861 100644
--- a/crates/hir/src/semantics/source_to_def.rs
+++ b/crates/hir/src/semantics/source_to_def.rs
@@ -242,7 +242,7 @@
pub(super) fn item_to_macro_call(&mut self, src: InFile<ast::Item>) -> Option<MacroCallId> {
let map = self.dyn_map(src.as_ref())?;
- map[keys::ATTR_MACRO_CALL].get(&src).copied()
+ map[keys::ATTR_MACRO_CALL].get(&src.value).copied()
}
pub(super) fn attr_to_derive_macro_call(
@@ -251,7 +251,7 @@
src: InFile<ast::Attr>,
) -> Option<(AttrId, &[Option<MacroCallId>])> {
let map = self.dyn_map(item)?;
- map[keys::DERIVE_MACRO_CALL].get(&src).map(|(id, ids)| (*id, &**ids))
+ map[keys::DERIVE_MACRO_CALL].get(&src.value).map(|(id, ids)| (*id, &**ids))
}
fn to_def<Ast: AstNode + 'static, ID: Copy + 'static>(
@@ -259,7 +259,7 @@
src: InFile<Ast>,
key: Key<Ast, ID>,
) -> Option<ID> {
- self.dyn_map(src.as_ref())?[key].get(&src).copied()
+ self.dyn_map(src.as_ref())?[key].get(&src.value).copied()
}
fn dyn_map<Ast: AstNode + 'static>(&mut self, src: InFile<&Ast>) -> Option<&DynMap> {
@@ -277,7 +277,7 @@
pub(super) fn type_param_to_def(&mut self, src: InFile<ast::TypeParam>) -> Option<TypeParamId> {
let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
let dyn_map = self.cache_for(container, src.file_id);
- dyn_map[keys::TYPE_PARAM].get(&src).copied()
+ dyn_map[keys::TYPE_PARAM].get(&src.value).copied()
}
pub(super) fn lifetime_param_to_def(
@@ -286,7 +286,7 @@
) -> Option<LifetimeParamId> {
let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
let dyn_map = self.cache_for(container, src.file_id);
- dyn_map[keys::LIFETIME_PARAM].get(&src).copied()
+ dyn_map[keys::LIFETIME_PARAM].get(&src.value).copied()
}
pub(super) fn const_param_to_def(
@@ -295,7 +295,7 @@
) -> Option<ConstParamId> {
let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into();
let dyn_map = self.cache_for(container, src.file_id);
- dyn_map[keys::CONST_PARAM].get(&src).copied()
+ dyn_map[keys::CONST_PARAM].get(&src.value).copied()
}
pub(super) fn generic_param_to_def(
@@ -316,10 +316,9 @@
}
pub(super) fn macro_to_def(&mut self, src: InFile<ast::Macro>) -> Option<MacroDefId> {
- let makro =
- self.dyn_map(src.as_ref()).and_then(|it| it[keys::MACRO_CALL].get(&src).copied());
- if let res @ Some(_) = makro {
- return res;
+ let makro = self.dyn_map(src.as_ref()).and_then(|it| it[keys::MACRO].get(&src.value));
+ if let Some(&makro) = makro {
+ return Some(makro);
}
// Not all macros are recorded in the dyn map, only the ones behaving like items, so fall back
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index ef3dfa1..869f4a1 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -5,7 +5,10 @@
//!
//! So, this modules should not be used during hir construction, it exists
//! purely for "IDE needs".
-use std::{iter::once, sync::Arc};
+use std::{
+ iter::{self, once},
+ sync::Arc,
+};
use hir_def::{
body::{
@@ -25,7 +28,7 @@
};
use syntax::{
ast::{self, AstNode},
- SyntaxNode, TextRange, TextSize,
+ SyntaxKind, SyntaxNode, TextRange, TextSize,
};
use crate::{
@@ -488,14 +491,20 @@
.scope_by_expr()
.iter()
.filter_map(|(id, scope)| {
- let source = source_map.expr_syntax(*id).ok()?;
- // FIXME: correctly handle macro expansion
- if source.file_id != offset.file_id {
- return None;
+ let InFile { file_id, value } = source_map.expr_syntax(*id).ok()?;
+ if offset.file_id == file_id {
+ let root = db.parse_or_expand(file_id)?;
+ let node = value.to_node(&root);
+ return Some((node.syntax().text_range(), scope));
}
- let root = source.file_syntax(db.upcast());
- let node = source.value.to_node(&root);
- Some((node.syntax().text_range(), scope))
+
+ // FIXME handle attribute expansion
+ let source = iter::successors(file_id.call_node(db.upcast()), |it| {
+ it.file_id.call_node(db.upcast())
+ })
+ .find(|it| it.file_id == offset.file_id)
+ .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
+ Some((source.value.text_range(), scope))
})
// find containing scope
.min_by_key(|(expr_range, _scope)| {
diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs
new file mode 100644
index 0000000..f1669d5
--- /dev/null
+++ b/crates/hir/src/symbols.rs
@@ -0,0 +1,359 @@
+//! File symbol extraction.
+
+use base_db::FileRange;
+use either::Either;
+use hir_def::{
+ item_tree::ItemTreeNode, src::HasSource, AdtId, AssocItemId, AssocItemLoc, DefWithBodyId,
+ ImplId, ItemContainerId, ItemLoc, Lookup, ModuleDefId, ModuleId, TraitId,
+};
+use hir_expand::{HirFileId, InFile};
+use hir_ty::db::HirDatabase;
+use syntax::{ast::HasName, AstNode, SmolStr, SyntaxNode, SyntaxNodePtr};
+
+use crate::{HasSource as _, MacroDef, Module, Semantics};
+
+/// The actual data that is stored in the index. It should be as compact as
+/// possible.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FileSymbol {
+ pub name: SmolStr,
+ pub loc: DeclarationLocation,
+ pub kind: FileSymbolKind,
+ pub container_name: Option<SmolStr>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct DeclarationLocation {
+ /// The file id for both the `ptr` and `name_ptr`.
+ pub hir_file_id: HirFileId,
+ /// This points to the whole syntax node of the declaration.
+ pub ptr: SyntaxNodePtr,
+ /// This points to the [`syntax::ast::Name`] identifier of the declaration.
+ pub name_ptr: SyntaxNodePtr,
+}
+
+impl DeclarationLocation {
+ pub fn syntax<DB: HirDatabase>(&self, sema: &Semantics<DB>) -> Option<SyntaxNode> {
+ let root = sema.parse_or_expand(self.hir_file_id)?;
+ Some(self.ptr.to_node(&root))
+ }
+
+ pub fn original_range(&self, db: &dyn HirDatabase) -> Option<FileRange> {
+ find_original_file_range(db, self.hir_file_id, &self.ptr)
+ }
+
+ pub fn original_name_range(&self, db: &dyn HirDatabase) -> Option<FileRange> {
+ find_original_file_range(db, self.hir_file_id, &self.name_ptr)
+ }
+}
+
+fn find_original_file_range(
+ db: &dyn HirDatabase,
+ file_id: HirFileId,
+ ptr: &SyntaxNodePtr,
+) -> Option<FileRange> {
+ let root = db.parse_or_expand(file_id)?;
+ let node = ptr.to_node(&root);
+ let node = InFile::new(file_id, &node);
+
+ Some(node.original_file_range(db.upcast()))
+}
+
+#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
+pub enum FileSymbolKind {
+ Const,
+ Enum,
+ Function,
+ Macro,
+ Module,
+ Static,
+ Struct,
+ Trait,
+ TypeAlias,
+ Union,
+}
+
+impl FileSymbolKind {
+ pub fn is_type(self: FileSymbolKind) -> bool {
+ matches!(
+ self,
+ FileSymbolKind::Struct
+ | FileSymbolKind::Enum
+ | FileSymbolKind::Trait
+ | FileSymbolKind::TypeAlias
+ | FileSymbolKind::Union
+ )
+ }
+}
+
+/// Represents an outstanding module that the symbol collector must collect symbols from.
+struct SymbolCollectorWork {
+ module_id: ModuleId,
+ parent: Option<DefWithBodyId>,
+}
+
+pub struct SymbolCollector<'a> {
+ db: &'a dyn HirDatabase,
+ symbols: Vec<FileSymbol>,
+ work: Vec<SymbolCollectorWork>,
+ current_container_name: Option<SmolStr>,
+}
+
+/// Given a [`ModuleId`] and a [`HirDatabase`], use the DefMap for the module's crate to collect
+/// all symbols that should be indexed for the given module.
+impl<'a> SymbolCollector<'a> {
+ pub fn collect(db: &dyn HirDatabase, module: Module) -> Vec<FileSymbol> {
+ let mut symbol_collector = SymbolCollector {
+ db,
+ symbols: Default::default(),
+ current_container_name: None,
+ // The initial work is the root module we're collecting, additional work will
+ // be populated as we traverse the module's definitions.
+ work: vec![SymbolCollectorWork { module_id: module.into(), parent: None }],
+ };
+
+ while let Some(work) = symbol_collector.work.pop() {
+ symbol_collector.do_work(work);
+ }
+
+ symbol_collector.symbols
+ }
+
+ fn do_work(&mut self, work: SymbolCollectorWork) {
+ self.db.unwind_if_cancelled();
+
+ let parent_name = work.parent.and_then(|id| self.def_with_body_id_name(id));
+ self.with_container_name(parent_name, |s| s.collect_from_module(work.module_id));
+ }
+
+ fn collect_from_module(&mut self, module_id: ModuleId) {
+ let def_map = module_id.def_map(self.db.upcast());
+ let scope = &def_map[module_id.local_id].scope;
+
+ for module_def_id in scope.declarations() {
+ match module_def_id {
+ ModuleDefId::ModuleId(id) => self.push_module(id),
+ ModuleDefId::FunctionId(id) => {
+ self.push_decl_assoc(id, FileSymbolKind::Function);
+ self.collect_from_body(id);
+ }
+ ModuleDefId::AdtId(AdtId::StructId(id)) => {
+ self.push_decl(id, FileSymbolKind::Struct)
+ }
+ ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id, FileSymbolKind::Enum),
+ ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id, FileSymbolKind::Union),
+ ModuleDefId::ConstId(id) => {
+ self.push_decl_assoc(id, FileSymbolKind::Const);
+ self.collect_from_body(id);
+ }
+ ModuleDefId::StaticId(id) => {
+ self.push_decl_assoc(id, FileSymbolKind::Static);
+ self.collect_from_body(id);
+ }
+ ModuleDefId::TraitId(id) => {
+ self.push_decl(id, FileSymbolKind::Trait);
+ self.collect_from_trait(id);
+ }
+ ModuleDefId::TypeAliasId(id) => {
+ self.push_decl_assoc(id, FileSymbolKind::TypeAlias);
+ }
+ // Don't index these.
+ ModuleDefId::BuiltinType(_) => {}
+ ModuleDefId::EnumVariantId(_) => {}
+ }
+ }
+
+ for impl_id in scope.impls() {
+ self.collect_from_impl(impl_id);
+ }
+
+ for const_id in scope.unnamed_consts() {
+ self.collect_from_body(const_id);
+ }
+
+ for macro_def_id in scope.macro_declarations() {
+ self.push_decl_macro(macro_def_id.into());
+ }
+ }
+
+ fn collect_from_body(&mut self, body_id: impl Into<DefWithBodyId>) {
+ let body_id = body_id.into();
+ let body = self.db.body(body_id);
+
+ // Descend into the blocks and enqueue collection of all modules within.
+ for (_, def_map) in body.blocks(self.db.upcast()) {
+ for (id, _) in def_map.modules() {
+ self.work.push(SymbolCollectorWork {
+ module_id: def_map.module_id(id),
+ parent: Some(body_id),
+ });
+ }
+ }
+ }
+
+ fn collect_from_impl(&mut self, impl_id: ImplId) {
+ let impl_data = self.db.impl_data(impl_id);
+ for &assoc_item_id in &impl_data.items {
+ self.push_assoc_item(assoc_item_id)
+ }
+ }
+
+ fn collect_from_trait(&mut self, trait_id: TraitId) {
+ let trait_data = self.db.trait_data(trait_id);
+ self.with_container_name(trait_data.name.as_text(), |s| {
+ for &(_, assoc_item_id) in &trait_data.items {
+ s.push_assoc_item(assoc_item_id);
+ }
+ });
+ }
+
+ fn with_container_name(&mut self, container_name: Option<SmolStr>, f: impl FnOnce(&mut Self)) {
+ if let Some(container_name) = container_name {
+ let prev = self.current_container_name.replace(container_name);
+ f(self);
+ self.current_container_name = prev;
+ } else {
+ f(self);
+ }
+ }
+
+ fn current_container_name(&self) -> Option<SmolStr> {
+ self.current_container_name.clone()
+ }
+
+ fn def_with_body_id_name(&self, body_id: DefWithBodyId) -> Option<SmolStr> {
+ match body_id {
+ DefWithBodyId::FunctionId(id) => Some(
+ id.lookup(self.db.upcast()).source(self.db.upcast()).value.name()?.text().into(),
+ ),
+ DefWithBodyId::StaticId(id) => Some(
+ id.lookup(self.db.upcast()).source(self.db.upcast()).value.name()?.text().into(),
+ ),
+ DefWithBodyId::ConstId(id) => Some(
+ id.lookup(self.db.upcast()).source(self.db.upcast()).value.name()?.text().into(),
+ ),
+ }
+ }
+
+ fn push_assoc_item(&mut self, assoc_item_id: AssocItemId) {
+ match assoc_item_id {
+ AssocItemId::FunctionId(id) => self.push_decl_assoc(id, FileSymbolKind::Function),
+ AssocItemId::ConstId(id) => self.push_decl_assoc(id, FileSymbolKind::Const),
+ AssocItemId::TypeAliasId(id) => self.push_decl_assoc(id, FileSymbolKind::TypeAlias),
+ }
+ }
+
+ fn push_decl_assoc<L, T>(&mut self, id: L, kind: FileSymbolKind)
+ where
+ L: Lookup<Data = AssocItemLoc<T>>,
+ T: ItemTreeNode,
+ <T as ItemTreeNode>::Source: HasName,
+ {
+ fn container_name(db: &dyn HirDatabase, container: ItemContainerId) -> Option<SmolStr> {
+ match container {
+ ItemContainerId::ModuleId(module_id) => {
+ let module = Module::from(module_id);
+ module.name(db).and_then(|name| name.as_text())
+ }
+ ItemContainerId::TraitId(trait_id) => {
+ let trait_data = db.trait_data(trait_id);
+ trait_data.name.as_text()
+ }
+ ItemContainerId::ImplId(_) | ItemContainerId::ExternBlockId(_) => None,
+ }
+ }
+
+ self.push_file_symbol(|s| {
+ let loc = id.lookup(s.db.upcast());
+ let source = loc.source(s.db.upcast());
+ let name_node = source.value.name()?;
+ let container_name =
+ container_name(s.db, loc.container).or_else(|| s.current_container_name());
+
+ Some(FileSymbol {
+ name: name_node.text().into(),
+ kind,
+ container_name,
+ loc: DeclarationLocation {
+ hir_file_id: source.file_id,
+ ptr: SyntaxNodePtr::new(source.value.syntax()),
+ name_ptr: SyntaxNodePtr::new(name_node.syntax()),
+ },
+ })
+ })
+ }
+
+ fn push_decl<L, T>(&mut self, id: L, kind: FileSymbolKind)
+ where
+ L: Lookup<Data = ItemLoc<T>>,
+ T: ItemTreeNode,
+ <T as ItemTreeNode>::Source: HasName,
+ {
+ self.push_file_symbol(|s| {
+ let loc = id.lookup(s.db.upcast());
+ let source = loc.source(s.db.upcast());
+ let name_node = source.value.name()?;
+
+ Some(FileSymbol {
+ name: name_node.text().into(),
+ kind,
+ container_name: s.current_container_name(),
+ loc: DeclarationLocation {
+ hir_file_id: source.file_id,
+ ptr: SyntaxNodePtr::new(source.value.syntax()),
+ name_ptr: SyntaxNodePtr::new(name_node.syntax()),
+ },
+ })
+ })
+ }
+
+ fn push_module(&mut self, module_id: ModuleId) {
+ self.push_file_symbol(|s| {
+ let def_map = module_id.def_map(s.db.upcast());
+ let module_data = &def_map[module_id.local_id];
+ let declaration = module_data.origin.declaration()?;
+ let module = declaration.to_node(s.db.upcast());
+ let name_node = module.name()?;
+
+ Some(FileSymbol {
+ name: name_node.text().into(),
+ kind: FileSymbolKind::Module,
+ container_name: s.current_container_name(),
+ loc: DeclarationLocation {
+ hir_file_id: declaration.file_id,
+ ptr: SyntaxNodePtr::new(module.syntax()),
+ name_ptr: SyntaxNodePtr::new(name_node.syntax()),
+ },
+ })
+ })
+ }
+
+ fn push_decl_macro(&mut self, macro_def: MacroDef) {
+ self.push_file_symbol(|s| {
+ let name = macro_def.name(s.db)?.as_text()?;
+ let source = macro_def.source(s.db)?;
+
+ let (ptr, name_ptr) = match source.value {
+ Either::Left(m) => {
+ (SyntaxNodePtr::new(m.syntax()), SyntaxNodePtr::new(m.name()?.syntax()))
+ }
+ Either::Right(f) => {
+ (SyntaxNodePtr::new(f.syntax()), SyntaxNodePtr::new(f.name()?.syntax()))
+ }
+ };
+
+ Some(FileSymbol {
+ name,
+ kind: FileSymbolKind::Macro,
+ container_name: s.current_container_name(),
+ loc: DeclarationLocation { hir_file_id: source.file_id, name_ptr, ptr },
+ })
+ })
+ }
+
+ fn push_file_symbol(&mut self, f: impl FnOnce(&Self) -> Option<FileSymbol>) {
+ if let Some(file_symbol) = f(self) {
+ self.symbols.push(file_symbol);
+ }
+ }
+}
diff --git a/crates/hir_def/src/attr.rs b/crates/hir_def/src/attr.rs
index 30642f6..b4ddfba 100644
--- a/crates/hir_def/src/attr.rs
+++ b/crates/hir_def/src/attr.rs
@@ -255,6 +255,10 @@
}
}
+ pub fn lang(&self) -> Option<&SmolStr> {
+ self.by_key("lang").string_value()
+ }
+
pub fn docs(&self) -> Option<Documentation> {
let docs = self.by_key("doc").attrs().flat_map(|attr| match attr.input.as_deref()? {
AttrInput::Literal(s) => Some(s),
@@ -390,7 +394,9 @@
if let InFile { file_id, value: ModuleSource::SourceFile(file) } =
mod_data.definition_source(db)
{
- map.merge(AttrSourceMap::new(InFile::new(file_id, &file)));
+ map.append_module_inline_attrs(AttrSourceMap::new(InFile::new(
+ file_id, &file,
+ )));
}
return map;
}
@@ -552,6 +558,11 @@
pub struct AttrSourceMap {
source: Vec<Either<ast::Attr, ast::Comment>>,
file_id: HirFileId,
+ /// If this map is for a module, this will be the [`HirFileId`] of the module's definition site,
+ /// while `file_id` will be the one of the module declaration site.
+ /// The usize is the index into `source` from which point on the entries reside in the def site
+ /// file.
+ mod_def_site_file_id: Option<(HirFileId, usize)>,
}
impl AttrSourceMap {
@@ -559,11 +570,19 @@
Self {
source: collect_attrs(owner.value).map(|(_, it)| it).collect(),
file_id: owner.file_id,
+ mod_def_site_file_id: None,
}
}
- fn merge(&mut self, other: Self) {
+ /// Append a second source map to this one, this is required for modules, whose outline and inline
+ /// attributes can reside in different files
+ fn append_module_inline_attrs(&mut self, other: Self) {
+ assert!(self.mod_def_site_file_id.is_none() && other.mod_def_site_file_id.is_none());
+ let len = self.source.len();
self.source.extend(other.source);
+ if other.file_id != self.file_id {
+ self.mod_def_site_file_id = Some((other.file_id, len));
+ }
}
/// Maps the lowered `Attr` back to its original syntax node.
@@ -577,9 +596,15 @@
}
fn source_of_id(&self, id: AttrId) -> InFile<&Either<ast::Attr, ast::Comment>> {
+ let ast_idx = id.ast_index as usize;
+ let file_id = match self.mod_def_site_file_id {
+ Some((file_id, def_site_cut)) if def_site_cut <= ast_idx => file_id,
+ _ => self.file_id,
+ };
+
self.source
- .get(id.ast_index as usize)
- .map(|it| InFile::new(self.file_id, it))
+ .get(ast_idx)
+ .map(|it| InFile::new(file_id, it))
.unwrap_or_else(|| panic!("cannot find attr at index {:?}", id))
}
}
@@ -754,20 +779,20 @@
}
#[derive(Debug, Clone, Copy)]
-pub struct AttrQuery<'a> {
- attrs: &'a Attrs,
+pub struct AttrQuery<'attr> {
+ attrs: &'attr Attrs,
key: &'static str,
}
-impl<'a> AttrQuery<'a> {
- pub fn tt_values(self) -> impl Iterator<Item = &'a Subtree> {
+impl<'attr> AttrQuery<'attr> {
+ pub fn tt_values(self) -> impl Iterator<Item = &'attr Subtree> {
self.attrs().filter_map(|attr| match attr.input.as_deref()? {
AttrInput::TokenTree(it, _) => Some(it),
_ => None,
})
}
- pub fn string_value(self) -> Option<&'a SmolStr> {
+ pub fn string_value(self) -> Option<&'attr SmolStr> {
self.attrs().find_map(|attr| match attr.input.as_deref()? {
AttrInput::Literal(it) => Some(it),
_ => None,
@@ -778,7 +803,7 @@
self.attrs().next().is_some()
}
- pub fn attrs(self) -> impl Iterator<Item = &'a Attr> + Clone {
+ pub fn attrs(self) -> impl Iterator<Item = &'attr Attr> + Clone {
let key = self.key;
self.attrs
.iter()
diff --git a/crates/hir_def/src/body/lower.rs b/crates/hir_def/src/body/lower.rs
index e322a95..7cbeef1 100644
--- a/crates/hir_def/src/body/lower.rs
+++ b/crates/hir_def/src/body/lower.rs
@@ -552,7 +552,7 @@
ast::Expr::MacroCall(e) => {
let macro_ptr = AstPtr::new(&e);
let mut ids = vec![];
- self.collect_macro_call(e, macro_ptr, |this, expansion| {
+ self.collect_macro_call(e, macro_ptr, true, |this, expansion| {
ids.push(match expansion {
Some(it) => this.collect_expr(it),
None => this.alloc_expr(Expr::Missing, syntax_ptr.clone()),
@@ -576,6 +576,7 @@
&mut self,
e: ast::MacroCall,
syntax_ptr: AstPtr<ast::MacroCall>,
+ record_diagnostics: bool,
mut collector: F,
) {
// File containing the macro call. Expansion errors will be attached here.
@@ -587,28 +588,32 @@
let res = match res {
Ok(res) => res,
Err(UnresolvedMacro { path }) => {
- self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedMacroCall {
- node: InFile::new(outer_file, syntax_ptr),
- path,
- });
+ if record_diagnostics {
+ self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedMacroCall {
+ node: InFile::new(outer_file, syntax_ptr),
+ path,
+ });
+ }
collector(self, None);
return;
}
};
- match &res.err {
- Some(ExpandError::UnresolvedProcMacro) => {
- self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedProcMacro {
- node: InFile::new(outer_file, syntax_ptr),
- });
+ if record_diagnostics {
+ match &res.err {
+ Some(ExpandError::UnresolvedProcMacro) => {
+ self.source_map.diagnostics.push(BodyDiagnostic::UnresolvedProcMacro {
+ node: InFile::new(outer_file, syntax_ptr),
+ });
+ }
+ Some(err) => {
+ self.source_map.diagnostics.push(BodyDiagnostic::MacroError {
+ node: InFile::new(outer_file, syntax_ptr),
+ message: err.to_string(),
+ });
+ }
+ None => {}
}
- Some(err) => {
- self.source_map.diagnostics.push(BodyDiagnostic::MacroError {
- node: InFile::new(outer_file, syntax_ptr),
- message: err.to_string(),
- });
- }
- None => {}
}
match res.value {
@@ -663,29 +668,33 @@
let macro_ptr = AstPtr::new(&m);
let syntax_ptr = AstPtr::new(&stmt.expr().unwrap());
- self.collect_macro_call(m, macro_ptr, |this, expansion| match expansion {
- Some(expansion) => {
- let statements: ast::MacroStmts = expansion;
+ self.collect_macro_call(
+ m,
+ macro_ptr,
+ false,
+ |this, expansion| match expansion {
+ Some(expansion) => {
+ let statements: ast::MacroStmts = expansion;
- statements.statements().for_each(|stmt| this.collect_stmt(stmt));
- if let Some(expr) = statements.expr() {
- let expr = this.collect_expr(expr);
+ statements.statements().for_each(|stmt| this.collect_stmt(stmt));
+ if let Some(expr) = statements.expr() {
+ let expr = this.collect_expr(expr);
+ this.statements_in_scope
+ .push(Statement::Expr { expr, has_semi });
+ }
+ }
+ None => {
+ let expr = this.alloc_expr(Expr::Missing, syntax_ptr.clone());
this.statements_in_scope.push(Statement::Expr { expr, has_semi });
}
- }
- None => {
- let expr = this.alloc_expr(Expr::Missing, syntax_ptr.clone());
- this.statements_in_scope.push(Statement::Expr { expr, has_semi });
- }
- });
+ },
+ );
} else {
let expr = self.collect_expr_opt(stmt.expr());
self.statements_in_scope.push(Statement::Expr { expr, has_semi });
}
}
- ast::Stmt::Item(item) => {
- self.check_cfg(&item);
- }
+ ast::Stmt::Item(_item) => {}
}
}
@@ -878,7 +887,7 @@
Some(call) => {
let macro_ptr = AstPtr::new(&call);
let mut pat = None;
- self.collect_macro_call(call, macro_ptr, |this, expanded_pat| {
+ self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| {
pat = Some(this.collect_pat_opt(expanded_pat));
});
diff --git a/crates/hir_def/src/child_by_source.rs b/crates/hir_def/src/child_by_source.rs
index 1ef41d9..6b4abd8 100644
--- a/crates/hir_def/src/child_by_source.rs
+++ b/crates/hir_def/src/child_by_source.rs
@@ -30,49 +30,50 @@
impl ChildBySource for TraitId {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
let data = db.trait_data(*self);
- // FIXME attribute macros
- for &(_, item) in data.items.iter() {
- child_by_source_assoc_items(db, res, file_id, item);
- }
+
+ data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
+ |(ast_id, call_id)| {
+ res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id);
+ },
+ );
+ data.items.iter().for_each(|&(_, item)| {
+ add_assoc_item(db, res, file_id, item);
+ });
}
}
impl ChildBySource for ImplId {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
let data = db.impl_data(*self);
- // FIXME attribute macros
- for &item in data.items.iter() {
- child_by_source_assoc_items(db, res, file_id, item);
- }
+ data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
+ |(ast_id, call_id)| {
+ res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id);
+ },
+ );
+ data.items.iter().for_each(|&item| {
+ add_assoc_item(db, res, file_id, item);
+ });
}
}
-fn child_by_source_assoc_items(
- db: &dyn DefDatabase,
- res: &mut DynMap,
- file_id: HirFileId,
- item: AssocItemId,
-) {
+fn add_assoc_item(db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId, item: AssocItemId) {
match item {
AssocItemId::FunctionId(func) => {
let loc = func.lookup(db);
if loc.id.file_id() == file_id {
- let src = loc.source(db);
- res[keys::FUNCTION].insert(src, func)
+ res[keys::FUNCTION].insert(loc.source(db).value, func)
}
}
AssocItemId::ConstId(konst) => {
let loc = konst.lookup(db);
if loc.id.file_id() == file_id {
- let src = loc.source(db);
- res[keys::CONST].insert(src, konst)
+ res[keys::CONST].insert(loc.source(db).value, konst)
}
}
AssocItemId::TypeAliasId(ty) => {
let loc = ty.lookup(db);
if loc.id.file_id() == file_id {
- let src = loc.source(db);
- res[keys::TYPE_ALIAS].insert(src, ty)
+ res[keys::TYPE_ALIAS].insert(loc.source(db).value, ty)
}
}
}
@@ -88,120 +89,75 @@
impl ChildBySource for ItemScope {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
- self.declarations().for_each(|item| add_module_def(db, file_id, res, item));
+ self.declarations().for_each(|item| add_module_def(db, res, file_id, item));
+ self.impls().for_each(|imp| add_impl(db, res, file_id, imp));
+ self.unnamed_consts().for_each(|konst| {
+ let loc = konst.lookup(db);
+ if loc.id.file_id() == file_id {
+ res[keys::CONST].insert(loc.source(db).value, konst);
+ }
+ });
self.macros().for_each(|(_, makro)| {
let ast_id = makro.ast_id();
if ast_id.either(|it| it.file_id, |it| it.file_id) == file_id {
let src = match ast_id {
- Either::Left(ast_id) => ast_id.with_value(ast_id.to_node(db.upcast())),
+ Either::Left(ast_id) => ast_id.to_node(db.upcast()),
// FIXME: Do we need to add proc-macros into a PROCMACRO dynmap here?
Either::Right(_fn) => return,
};
- res[keys::MACRO_CALL].insert(src, makro);
+ res[keys::MACRO].insert(src, makro);
}
});
- self.unnamed_consts().for_each(|konst| {
- let loc = konst.lookup(db);
- if loc.id.file_id() == file_id {
- let src = loc.source(db);
- res[keys::CONST].insert(src, konst);
- }
- });
- self.impls().for_each(|imp| add_impl(db, file_id, res, imp));
- self.attr_macro_invocs().for_each(|(ast_id, call_id)| {
- if ast_id.file_id == file_id {
- let item = ast_id.with_value(ast_id.to_node(db.upcast()));
- res[keys::ATTR_MACRO_CALL].insert(item, call_id);
- }
- });
- self.derive_macro_invocs().for_each(|(ast_id, calls)| {
- if ast_id.file_id != file_id {
- return;
- }
- let adt = ast_id.to_node(db.upcast());
- for (attr_id, calls) in calls {
- if let Some(Either::Right(attr)) =
- adt.doc_comments_and_attrs().nth(attr_id.ast_index as usize)
- {
- res[keys::DERIVE_MACRO_CALL]
- .insert(ast_id.with_value(attr), (attr_id, calls.into()));
- }
- }
- });
+ self.attr_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each(
+ |(ast_id, call_id)| {
+ res[keys::ATTR_MACRO_CALL].insert(ast_id.to_node(db.upcast()), call_id);
+ },
+ );
+ self.derive_macro_invocs().filter(|(id, _)| id.file_id == file_id).for_each(
+ |(ast_id, calls)| {
+ let adt = ast_id.to_node(db.upcast());
+ calls.for_each(|(attr_id, calls)| {
+ if let Some(Either::Right(attr)) =
+ adt.doc_comments_and_attrs().nth(attr_id.ast_index as usize)
+ {
+ res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, calls.into()));
+ }
+ });
+ },
+ );
fn add_module_def(
db: &dyn DefDatabase,
- file_id: HirFileId,
map: &mut DynMap,
+ file_id: HirFileId,
item: ModuleDefId,
) {
+ macro_rules! insert {
+ ($map:ident[$key:path].$insert:ident($id:ident)) => {{
+ let loc = $id.lookup(db);
+ if loc.id.file_id() == file_id {
+ $map[$key].$insert(loc.source(db).value, $id)
+ }
+ }};
+ }
match item {
- ModuleDefId::FunctionId(func) => {
- let loc = func.lookup(db);
- if loc.id.file_id() == file_id {
- let src = loc.source(db);
- map[keys::FUNCTION].insert(src, func)
- }
- }
- ModuleDefId::ConstId(konst) => {
- let loc = konst.lookup(db);
- if loc.id.file_id() == file_id {
- let src = loc.source(db);
- map[keys::CONST].insert(src, konst)
- }
- }
- ModuleDefId::StaticId(statik) => {
- let loc = statik.lookup(db);
- if loc.id.file_id() == file_id {
- let src = loc.source(db);
- map[keys::STATIC].insert(src, statik)
- }
- }
- ModuleDefId::TypeAliasId(ty) => {
- let loc = ty.lookup(db);
- if loc.id.file_id() == file_id {
- let src = loc.source(db);
- map[keys::TYPE_ALIAS].insert(src, ty)
- }
- }
- ModuleDefId::TraitId(trait_) => {
- let loc = trait_.lookup(db);
- if loc.id.file_id() == file_id {
- let src = loc.source(db);
- map[keys::TRAIT].insert(src, trait_)
- }
- }
+ ModuleDefId::FunctionId(id) => insert!(map[keys::FUNCTION].insert(id)),
+ ModuleDefId::ConstId(id) => insert!(map[keys::CONST].insert(id)),
+ ModuleDefId::StaticId(id) => insert!(map[keys::STATIC].insert(id)),
+ ModuleDefId::TypeAliasId(id) => insert!(map[keys::TYPE_ALIAS].insert(id)),
+ ModuleDefId::TraitId(id) => insert!(map[keys::TRAIT].insert(id)),
ModuleDefId::AdtId(adt) => match adt {
- AdtId::StructId(strukt) => {
- let loc = strukt.lookup(db);
- if loc.id.file_id() == file_id {
- let src = loc.source(db);
- map[keys::STRUCT].insert(src, strukt)
- }
- }
- AdtId::UnionId(union_) => {
- let loc = union_.lookup(db);
- if loc.id.file_id() == file_id {
- let src = loc.source(db);
- map[keys::UNION].insert(src, union_)
- }
- }
- AdtId::EnumId(enum_) => {
- let loc = enum_.lookup(db);
- if loc.id.file_id() == file_id {
- let src = loc.source(db);
- map[keys::ENUM].insert(src, enum_)
- }
- }
+ AdtId::StructId(id) => insert!(map[keys::STRUCT].insert(id)),
+ AdtId::UnionId(id) => insert!(map[keys::UNION].insert(id)),
+ AdtId::EnumId(id) => insert!(map[keys::ENUM].insert(id)),
},
_ => (),
}
}
- fn add_impl(db: &dyn DefDatabase, file_id: HirFileId, map: &mut DynMap, imp: ImplId) {
+ fn add_impl(db: &dyn DefDatabase, map: &mut DynMap, file_id: HirFileId, imp: ImplId) {
let loc = imp.lookup(db);
if loc.id.file_id() == file_id {
- let src = loc.source(db);
- map[keys::IMPL].insert(src, imp)
+ map[keys::IMPL].insert(loc.source(db).value, imp)
}
}
}
@@ -215,12 +171,8 @@
for (local_id, source) in arena_map.value.iter() {
let id = FieldId { parent, local_id };
match source.clone() {
- Either::Left(source) => {
- res[keys::TUPLE_FIELD].insert(arena_map.with_value(source), id)
- }
- Either::Right(source) => {
- res[keys::RECORD_FIELD].insert(arena_map.with_value(source), id)
- }
+ Either::Left(source) => res[keys::TUPLE_FIELD].insert(source, id),
+ Either::Right(source) => res[keys::RECORD_FIELD].insert(source, id),
}
}
}
@@ -232,7 +184,7 @@
let arena_map = arena_map.as_ref();
for (local_id, source) in arena_map.value.iter() {
let id = EnumVariantId { parent: *self, local_id };
- res[keys::VARIANT].insert(arena_map.with_value(source.clone()), id)
+ res[keys::VARIANT].insert(source.clone(), id)
}
}
}
diff --git a/crates/hir_def/src/data.rs b/crates/hir_def/src/data.rs
index 753084f..3a39a65 100644
--- a/crates/hir_def/src/data.rs
+++ b/crates/hir_def/src/data.rs
@@ -1,8 +1,8 @@
//! Contains basic data about various HIR declarations.
-use std::sync::Arc;
+use std::{mem, sync::Arc};
-use hir_expand::{name::Name, AstId, ExpandResult, InFile};
+use hir_expand::{name::Name, AstId, ExpandResult, HirFileId, InFile, MacroCallId};
use syntax::ast;
use crate::{
@@ -10,8 +10,8 @@
body::{Expander, Mark},
db::DefDatabase,
intern::Interned,
- item_tree::{self, AssocItem, FnFlags, ItemTreeId, ModItem, Param},
- nameres::attr_resolution::ResolvedAttr,
+ item_tree::{self, AssocItem, FnFlags, ItemTreeId, ModItem, Param, TreeId},
+ nameres::{attr_resolution::ResolvedAttr, DefMap},
type_ref::{TraitRef, TypeBound, TypeRef},
visibility::RawVisibility,
AssocItemId, AstIdWithPath, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId,
@@ -184,6 +184,8 @@
/// method calls to this trait's methods when the receiver is an array and the crate edition is
/// 2015 or 2018.
pub skip_array_during_method_dispatch: bool,
+ // box it as the vec is usually empty anyways
+ pub attribute_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
}
impl TraitData {
@@ -199,27 +201,24 @@
let is_auto = tr_def.is_auto;
let is_unsafe = tr_def.is_unsafe;
let module_id = tr_loc.container;
- let container = ItemContainerId::TraitId(tr);
let visibility = item_tree[tr_def.visibility].clone();
- let mut expander = Expander::new(db, tr_loc.id.file_id(), module_id);
let skip_array_during_method_dispatch = item_tree
.attrs(db, tr_loc.container.krate(), ModItem::from(tr_loc.id.value).into())
.by_key("rustc_skip_array_during_method_dispatch")
.exists();
- let items = collect_items(
+ let mut collector = AssocItemCollector::new(
db,
module_id,
- &mut expander,
- tr_def.items.iter().copied(),
- tr_loc.id.tree_id(),
- container,
- 100,
+ tr_loc.id.file_id(),
+ ItemContainerId::TraitId(tr),
);
+ collector.collect(tr_loc.id.tree_id(), &tr_def.items);
Arc::new(TraitData {
name,
- items,
+ attribute_calls: collector.take_attr_calls(),
+ items: collector.items,
is_auto,
is_unsafe,
visibility,
@@ -247,6 +246,10 @@
_ => None,
})
}
+
+ pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
+ self.attribute_calls.iter().flat_map(|it| it.iter()).copied()
+ }
}
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -255,6 +258,8 @@
pub self_ty: Interned<TypeRef>,
pub items: Vec<AssocItemId>,
pub is_negative: bool,
+ // box it as the vec is usually empty anyways
+ pub attribute_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
}
impl ImplData {
@@ -268,21 +273,23 @@
let self_ty = impl_def.self_ty.clone();
let is_negative = impl_def.is_negative;
let module_id = impl_loc.container;
- let container = ItemContainerId::ImplId(id);
- let mut expander = Expander::new(db, impl_loc.id.file_id(), module_id);
- let items = collect_items(
+ let mut collector = AssocItemCollector::new(
db,
module_id,
- &mut expander,
- impl_def.items.iter().copied(),
- impl_loc.id.tree_id(),
- container,
- 100,
+ impl_loc.id.file_id(),
+ ItemContainerId::ImplId(id),
);
- let items = items.into_iter().map(|(_, item)| item).collect();
+ collector.collect(impl_loc.id.tree_id(), &impl_def.items);
- Arc::new(ImplData { target_trait, self_ty, items, is_negative })
+ let attribute_calls = collector.take_attr_calls();
+ let items = collector.items.into_iter().map(|(_, item)| item).collect();
+
+ Arc::new(ImplData { target_trait, self_ty, items, is_negative, attribute_calls })
+ }
+
+ pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
+ self.attribute_calls.iter().flat_map(|it| it.iter()).copied()
}
}
@@ -333,102 +340,128 @@
}
}
-fn collect_items(
- db: &dyn DefDatabase,
- module: ModuleId,
- expander: &mut Expander,
- assoc_items: impl Iterator<Item = AssocItem>,
- tree_id: item_tree::TreeId,
+struct AssocItemCollector<'a> {
+ db: &'a dyn DefDatabase,
+ module_id: ModuleId,
+ def_map: Arc<DefMap>,
container: ItemContainerId,
- limit: usize,
-) -> Vec<(Name, AssocItemId)> {
- if limit == 0 {
- return Vec::new();
+ expander: Expander,
+
+ items: Vec<(Name, AssocItemId)>,
+ attr_calls: Vec<(AstId<ast::Item>, MacroCallId)>,
+}
+
+impl<'a> AssocItemCollector<'a> {
+ fn new(
+ db: &'a dyn DefDatabase,
+ module_id: ModuleId,
+ file_id: HirFileId,
+ container: ItemContainerId,
+ ) -> Self {
+ Self {
+ db,
+ module_id,
+ def_map: module_id.def_map(db),
+ container,
+ expander: Expander::new(db, file_id, module_id),
+
+ items: Vec::new(),
+ attr_calls: Vec::new(),
+ }
}
- let item_tree = tree_id.item_tree(db);
- let crate_graph = db.crate_graph();
- let cfg_options = &crate_graph[module.krate].cfg_options;
- let def_map = module.def_map(db);
-
- let mut items = Vec::new();
- 'items: for item in assoc_items {
- let attrs = item_tree.attrs(db, module.krate, ModItem::from(item).into());
- if !attrs.is_cfg_enabled(cfg_options) {
- continue;
+ fn take_attr_calls(&mut self) -> Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>> {
+ let attribute_calls = mem::take(&mut self.attr_calls);
+ if attribute_calls.is_empty() {
+ None
+ } else {
+ Some(Box::new(attribute_calls))
}
+ }
- for attr in &*attrs {
- let ast_id = AstIdWithPath {
- path: (*attr.path).clone(),
- ast_id: AstId::new(expander.current_file_id(), item.ast_id(&item_tree).upcast()),
- };
- if let Ok(ResolvedAttr::Macro(call_id)) =
- def_map.resolve_attr_macro(db, module.local_id, ast_id, attr)
- {
- let res = expander.enter_expand_id(db, call_id);
- items.extend(collect_macro_items(db, module, expander, container, limit, res));
- continue 'items;
- }
- }
+ fn collect(&mut self, tree_id: TreeId, assoc_items: &[AssocItem]) {
+ let item_tree = tree_id.item_tree(self.db);
- match item {
- AssocItem::Function(id) => {
- let item = &item_tree[id];
- let def = FunctionLoc { container, id: ItemTreeId::new(tree_id, id) }.intern(db);
- items.push((item.name.clone(), def.into()));
+ 'items: for &item in assoc_items {
+ let attrs = item_tree.attrs(self.db, self.module_id.krate, ModItem::from(item).into());
+ if !attrs.is_cfg_enabled(self.expander.cfg_options()) {
+ continue;
}
- AssocItem::Const(id) => {
- let item = &item_tree[id];
- let name = match item.name.clone() {
- Some(name) => name,
- None => continue,
- };
- let def = ConstLoc { container, id: ItemTreeId::new(tree_id, id) }.intern(db);
- items.push((name, def.into()));
- }
- AssocItem::TypeAlias(id) => {
- let item = &item_tree[id];
- let def = TypeAliasLoc { container, id: ItemTreeId::new(tree_id, id) }.intern(db);
- items.push((item.name.clone(), def.into()));
- }
- AssocItem::MacroCall(call) => {
- let call = &item_tree[call];
- let ast_id_map = db.ast_id_map(tree_id.file_id());
- let root = db.parse_or_expand(tree_id.file_id()).unwrap();
- let call = ast_id_map.get(call.ast_id).to_node(&root);
- let _cx = stdx::panic_context::enter(format!("collect_items MacroCall: {}", call));
- let res = expander.enter_expand(db, call);
- if let Ok(res) = res {
- items.extend(collect_macro_items(db, module, expander, container, limit, res));
+ for attr in &*attrs {
+ let ast_id =
+ AstId::new(self.expander.current_file_id(), item.ast_id(&item_tree).upcast());
+ let ast_id_with_path = AstIdWithPath { path: (*attr.path).clone(), ast_id };
+
+ if let Ok(ResolvedAttr::Macro(call_id)) = self.def_map.resolve_attr_macro(
+ self.db,
+ self.module_id.local_id,
+ ast_id_with_path,
+ attr,
+ ) {
+ self.attr_calls.push((ast_id, call_id));
+ let res = self.expander.enter_expand_id(self.db, call_id);
+ self.collect_macro_items(res);
+ continue 'items;
+ }
+ }
+
+ match item {
+ AssocItem::Function(id) => {
+ let item = &item_tree[id];
+ let def =
+ FunctionLoc { container: self.container, id: ItemTreeId::new(tree_id, id) }
+ .intern(self.db);
+ self.items.push((item.name.clone(), def.into()));
+ }
+ AssocItem::Const(id) => {
+ let item = &item_tree[id];
+ let name = match item.name.clone() {
+ Some(name) => name,
+ None => continue,
+ };
+ let def =
+ ConstLoc { container: self.container, id: ItemTreeId::new(tree_id, id) }
+ .intern(self.db);
+ self.items.push((name, def.into()));
+ }
+ AssocItem::TypeAlias(id) => {
+ let item = &item_tree[id];
+ let def = TypeAliasLoc {
+ container: self.container,
+ id: ItemTreeId::new(tree_id, id),
+ }
+ .intern(self.db);
+ self.items.push((item.name.clone(), def.into()));
+ }
+ AssocItem::MacroCall(call) => {
+ let call = &item_tree[call];
+ let ast_id_map = self.db.ast_id_map(self.expander.current_file_id());
+ let root = self.db.parse_or_expand(self.expander.current_file_id()).unwrap();
+ let call = ast_id_map.get(call.ast_id).to_node(&root);
+ let _cx =
+ stdx::panic_context::enter(format!("collect_items MacroCall: {}", call));
+ let res = self.expander.enter_expand(self.db, call);
+
+ if let Ok(res) = res {
+ self.collect_macro_items(res);
+ }
}
}
}
}
- items
-}
+ fn collect_macro_items(&mut self, res: ExpandResult<Option<(Mark, ast::MacroItems)>>) {
+ if let Some((mark, mac)) = res.value {
+ let src: InFile<ast::MacroItems> = self.expander.to_source(mac);
+ let tree_id = item_tree::TreeId::new(src.file_id, None);
+ let item_tree = tree_id.item_tree(self.db);
+ let iter: Vec<_> =
+ item_tree.top_level_items().iter().filter_map(ModItem::as_assoc_item).collect();
-fn collect_macro_items(
- db: &dyn DefDatabase,
- module: ModuleId,
- expander: &mut Expander,
- container: ItemContainerId,
- limit: usize,
- res: ExpandResult<Option<(Mark, ast::MacroItems)>>,
-) -> Vec<(Name, AssocItemId)> {
- if let Some((mark, mac)) = res.value {
- let src: InFile<ast::MacroItems> = expander.to_source(mac);
- let tree_id = item_tree::TreeId::new(src.file_id, None);
- let item_tree = tree_id.item_tree(db);
- let iter = item_tree.top_level_items().iter().filter_map(ModItem::as_assoc_item);
- let items = collect_items(db, module, expander, iter, tree_id, container, limit - 1);
+ self.collect(tree_id, &iter);
- expander.exit(db, mark);
-
- return items;
+ self.expander.exit(self.db, mark);
+ }
}
-
- Vec::new()
}
diff --git a/crates/hir_def/src/generics.rs b/crates/hir_def/src/generics.rs
index c4bd5b3..7febf1b 100644
--- a/crates/hir_def/src/generics.rs
+++ b/crates/hir_def/src/generics.rs
@@ -454,14 +454,17 @@
}
impl ChildBySource for GenericDefId {
- fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, _: HirFileId) {
+ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
+ let (gfile_id, generic_params_list) = file_id_and_params_of(*self, db);
+ if gfile_id != file_id {
+ return;
+ }
+
let generic_params = db.generic_params(*self);
let mut types_idx_iter = generic_params.types.iter().map(|(idx, _)| idx);
let lts_idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx);
let consts_idx_iter = generic_params.consts.iter().map(|(idx, _)| idx);
- let (file_id, generic_params_list) = file_id_and_params_of(*self, db);
-
// For traits the first type index is `Self`, skip it.
if let GenericDefId::TraitId(_) = *self {
types_idx_iter.next().unwrap(); // advance_by(1);
@@ -470,15 +473,15 @@
if let Some(generic_params_list) = generic_params_list {
for (local_id, ast_param) in types_idx_iter.zip(generic_params_list.type_params()) {
let id = TypeParamId { parent: *self, local_id };
- res[keys::TYPE_PARAM].insert(InFile::new(file_id, ast_param), id);
+ res[keys::TYPE_PARAM].insert(ast_param, id);
}
for (local_id, ast_param) in lts_idx_iter.zip(generic_params_list.lifetime_params()) {
let id = LifetimeParamId { parent: *self, local_id };
- res[keys::LIFETIME_PARAM].insert(InFile::new(file_id, ast_param), id);
+ res[keys::LIFETIME_PARAM].insert(ast_param, id);
}
for (local_id, ast_param) in consts_idx_iter.zip(generic_params_list.const_params()) {
let id = ConstParamId { parent: *self, local_id };
- res[keys::CONST_PARAM].insert(InFile::new(file_id, ast_param), id);
+ res[keys::CONST_PARAM].insert(ast_param, id);
}
}
}
diff --git a/crates/hir_def/src/item_tree.rs b/crates/hir_def/src/item_tree.rs
index c51201f..0af5d65 100644
--- a/crates/hir_def/src/item_tree.rs
+++ b/crates/hir_def/src/item_tree.rs
@@ -790,14 +790,26 @@
}
Some((prefix, ImportKind::Plain))
}
- (Some(prefix), PathKind::Super(0)) => {
- // `some::path::self` == `some::path`
- if path.segments().is_empty() {
- Some((prefix, ImportKind::TypeOnly))
- } else {
- None
+ (Some(mut prefix), PathKind::Super(n))
+ if *n > 0 && prefix.segments().is_empty() =>
+ {
+ // `super::super` + `super::rest`
+ match &mut prefix.kind {
+ PathKind::Super(m) => {
+ cov_mark::hit!(concat_super_mod_paths);
+ *m += *n;
+ for segment in path.segments() {
+ prefix.push_segment(segment.clone());
+ }
+ Some((prefix, ImportKind::Plain))
+ }
+ _ => None,
}
}
+ (Some(prefix), PathKind::Super(0)) if path.segments().is_empty() => {
+ // `some::path::self` == `some::path`
+ Some((prefix, ImportKind::TypeOnly))
+ }
(Some(_), _) => None,
}
}
diff --git a/crates/hir_def/src/keys.rs b/crates/hir_def/src/keys.rs
index 3a9cf6e..93c92c1 100644
--- a/crates/hir_def/src/keys.rs
+++ b/crates/hir_def/src/keys.rs
@@ -2,7 +2,7 @@
use std::marker::PhantomData;
-use hir_expand::{InFile, MacroCallId, MacroDefId};
+use hir_expand::{MacroCallId, MacroDefId};
use rustc_hash::FxHashMap;
use syntax::{ast, AstNode, AstPtr};
@@ -13,7 +13,7 @@
StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId,
};
-pub type Key<K, V> = crate::dyn_map::Key<InFile<K>, V, AstPtrPolicy<K, V>>;
+pub type Key<K, V> = crate::dyn_map::Key<K, V, AstPtrPolicy<K, V>>;
pub const FUNCTION: Key<ast::Fn, FunctionId> = Key::new();
pub const CONST: Key<ast::Const, ConstId> = Key::new();
@@ -32,7 +32,7 @@
pub const LIFETIME_PARAM: Key<ast::LifetimeParam, LifetimeParamId> = Key::new();
pub const CONST_PARAM: Key<ast::ConstParam, ConstParamId> = Key::new();
-pub const MACRO_CALL: Key<ast::Macro, MacroDefId> = Key::new();
+pub const MACRO: Key<ast::Macro, MacroDefId> = Key::new();
pub const ATTR_MACRO_CALL: Key<ast::Item, MacroCallId> = Key::new();
pub const DERIVE_MACRO_CALL: Key<ast::Attr, (AttrId, Box<[Option<MacroCallId>]>)> = Key::new();
@@ -47,17 +47,17 @@
}
impl<AST: AstNode + 'static, ID: 'static> Policy for AstPtrPolicy<AST, ID> {
- type K = InFile<AST>;
+ type K = AST;
type V = ID;
- fn insert(map: &mut DynMap, key: InFile<AST>, value: ID) {
- let key = key.as_ref().map(AstPtr::new);
+ fn insert(map: &mut DynMap, key: AST, value: ID) {
+ let key = AstPtr::new(&key);
map.map
- .entry::<FxHashMap<InFile<AstPtr<AST>>, ID>>()
+ .entry::<FxHashMap<AstPtr<AST>, ID>>()
.or_insert_with(Default::default)
.insert(key, value);
}
- fn get<'a>(map: &'a DynMap, key: &InFile<AST>) -> Option<&'a ID> {
- let key = key.as_ref().map(AstPtr::new);
- map.map.get::<FxHashMap<InFile<AstPtr<AST>>, ID>>()?.get(&key)
+ fn get<'a>(map: &'a DynMap, key: &AST) -> Option<&'a ID> {
+ let key = AstPtr::new(key);
+ map.map.get::<FxHashMap<AstPtr<AST>, ID>>()?.get(&key)
}
}
diff --git a/crates/hir_def/src/lang_item.rs b/crates/hir_def/src/lang_item.rs
index 6797a77..8778501 100644
--- a/crates/hir_def/src/lang_item.rs
+++ b/crates/hir_def/src/lang_item.rs
@@ -144,8 +144,8 @@
let _p = profile::span("lang_item_query");
let lang_items = db.crate_lang_items(start_crate);
let start_crate_target = lang_items.items.get(&item);
- if let Some(target) = start_crate_target {
- return Some(*target);
+ if let Some(&target) = start_crate_target {
+ return Some(target);
}
db.crate_graph()[start_crate]
.dependencies
diff --git a/crates/hir_def/src/nameres/tests.rs b/crates/hir_def/src/nameres/tests.rs
index 37336ce..4794932 100644
--- a/crates/hir_def/src/nameres/tests.rs
+++ b/crates/hir_def/src/nameres/tests.rs
@@ -890,3 +890,38 @@
"#]],
);
}
+
+#[test]
+fn braced_supers_in_use_tree() {
+ cov_mark::check!(concat_super_mod_paths);
+ check(
+ r#"
+mod some_module {
+ pub fn unknown_func() {}
+}
+
+mod other_module {
+ mod some_submodule {
+ use { super::{ super::unknown_func, }, };
+ }
+}
+
+use some_module::unknown_func;
+ "#,
+ expect![[r#"
+ crate
+ other_module: t
+ some_module: t
+ unknown_func: v
+
+ crate::some_module
+ unknown_func: v
+
+ crate::other_module
+ some_submodule: t
+
+ crate::other_module::some_submodule
+ unknown_func: v
+ "#]],
+ )
+}
diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs
index 5f190b8..b455fd5 100644
--- a/crates/hir_expand/src/lib.rs
+++ b/crates/hir_expand/src/lib.rs
@@ -350,6 +350,53 @@
}
}
+ /// Returns the original file range that best describes the location of this macro call.
+ ///
+ /// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros
+ /// get the whole `ast::MacroCall`, attribute macros get the attribute's range, and derives
+ /// get only the specific derive that is being referred to.
+ pub fn original_call_range(self, db: &dyn db::AstDatabase) -> FileRange {
+ let mut kind = self;
+ loop {
+ match kind.file_id().0 {
+ HirFileIdRepr::MacroFile(file) => {
+ kind = db.lookup_intern_macro_call(file.macro_call_id).kind;
+ }
+ _ => break,
+ }
+ }
+
+ // `call_id` is now the outermost macro call, so its location is in a real file.
+ let file_id = match kind.file_id().0 {
+ HirFileIdRepr::FileId(it) => it,
+ HirFileIdRepr::MacroFile(_) => unreachable!("encountered unexpected macro file"),
+ };
+ let range = match kind {
+ MacroCallKind::FnLike { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
+ MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
+ // FIXME: should be the range of the macro name, not the whole derive
+ ast_id
+ .to_node(db)
+ .doc_comments_and_attrs()
+ .nth(derive_attr_index as usize)
+ .expect("missing derive")
+ .expect_right("derive is a doc comment?")
+ .syntax()
+ .text_range()
+ }
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => ast_id
+ .to_node(db)
+ .doc_comments_and_attrs()
+ .nth(invoc_attr_index as usize)
+ .expect("missing attribute")
+ .expect_right("attribute macro is a doc comment?")
+ .syntax()
+ .text_range(),
+ };
+
+ FileRange { range, file_id }
+ }
+
fn arg(&self, db: &dyn db::AstDatabase) -> Option<SyntaxNode> {
match self {
MacroCallKind::FnLike { ast_id, .. } => {
@@ -589,10 +636,7 @@
) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
iter::successors(Some(self.cloned()), move |node| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
- None => {
- let parent_node = node.file_id.call_node(db)?;
- Some(parent_node)
- }
+ None => node.file_id.call_node(db),
})
}
@@ -623,15 +667,13 @@
}
// Fall back to whole macro call.
- let mut node = self.cloned();
- while let Some(call_node) = node.file_id.call_node(db) {
- node = call_node;
+ match self.file_id.0 {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range(db)
+ }
}
-
- let orig_file = node.file_id.original_file(db);
- assert_eq!(node.file_id, orig_file.into());
-
- FileRange { file_id: orig_file, range: node.value.text_range() }
}
/// Attempts to map the syntax node back up its macro calls.
diff --git a/crates/hir_expand/src/name.rs b/crates/hir_expand/src/name.rs
index 3636ab6..4dcda0f 100644
--- a/crates/hir_expand/src/name.rs
+++ b/crates/hir_expand/src/name.rs
@@ -309,26 +309,6 @@
wrapping_mul,
wrapping_sub,
// known methods of lang items
- add,
- mul,
- sub,
- div,
- rem,
- shl,
- shr,
- bitxor,
- bitor,
- bitand,
- add_assign,
- mul_assign,
- sub_assign,
- div_assign,
- rem_assign,
- shl_assign,
- shr_assign,
- bitxor_assign,
- bitor_assign,
- bitand_assign,
eq,
ne,
ge,
@@ -336,12 +316,38 @@
le,
lt,
// lang items
- not,
- neg,
+ add_assign,
+ add,
+ bitand_assign,
+ bitand,
+ bitor_assign,
+ bitor,
+ bitxor_assign,
+ bitxor,
+ deref_mut,
+ deref,
+ div_assign,
+ div,
+ fn_mut,
+ fn_once,
future_trait,
- owned_box,
index,
- partial_ord
+ index_mut,
+ mul_assign,
+ mul,
+ neg,
+ not,
+ owned_box,
+ partial_ord,
+ r#fn,
+ rem_assign,
+ rem,
+ shl_assign,
+ shl,
+ shr_assign,
+ shr,
+ sub_assign,
+ sub,
);
// self/Self cannot be used as an identifier
diff --git a/crates/hir_ty/src/display.rs b/crates/hir_ty/src/display.rs
index 95d1550..f02f4ac 100644
--- a/crates/hir_ty/src/display.rs
+++ b/crates/hir_ty/src/display.rs
@@ -151,6 +151,12 @@
write!(self, "{}", sep)?;
}
first = false;
+
+ // Abbreviate multiple omitted types with a single ellipsis.
+ if self.should_truncate() {
+ return write!(self, "{}", TYPE_HINT_TRUNCATION);
+ }
+
e.hir_fmt(self)?;
}
Ok(())
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index b8482d0..db7b80f 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -66,8 +66,7 @@
salsa::{self, ParallelDatabase},
Env, FileLoader, FileSet, SourceDatabase, VfsPath,
},
- symbol_index::{self, FileSymbol},
- LineIndexDatabase,
+ symbol_index, LineIndexDatabase,
};
use syntax::SourceFile;
diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs
index 52e0995..1c3ff06 100644
--- a/crates/ide/src/navigation_target.rs
+++ b/crates/ide/src/navigation_target.rs
@@ -4,8 +4,8 @@
use either::Either;
use hir::{
- AssocItem, Documentation, FieldSource, HasAttrs, HasSource, HirDisplay, InFile, ModuleSource,
- Semantics,
+ symbols::FileSymbol, AssocItem, Documentation, FieldSource, HasAttrs, HasSource, HirDisplay,
+ InFile, ModuleSource, Semantics,
};
use ide_db::{
base_db::{FileId, FileRange},
@@ -17,8 +17,6 @@
match_ast, AstNode, SmolStr, TextRange,
};
-use crate::FileSymbol;
-
/// `NavigationTarget` represents an element in the editor's UI which you can
/// click on to navigate to a particular piece of code.
///
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index 949c6dc..5e6f0ef 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -1514,4 +1514,25 @@
"#]],
)
}
+
+ #[test]
+ fn attr_assoc_item() {
+ check(
+ r#"
+//- proc_macros: identity
+
+trait Trait {
+ #[proc_macros::identity]
+ fn func() {
+ Self::func$0();
+ }
+}
+"#,
+ expect![[r#"
+ func Function FileId(0) 48..87 51..55
+
+ FileId(0) 74..78
+ "#]],
+ )
+ }
}
diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs
index f20d629..f09f291 100644
--- a/crates/ide/src/syntax_highlighting.rs
+++ b/crates/ide/src/syntax_highlighting.rs
@@ -16,7 +16,7 @@
use ide_db::RootDatabase;
use rustc_hash::FxHashMap;
use syntax::{
- ast::{self, HasFormatSpecifier},
+ ast::{self, IsString},
AstNode, AstToken, NodeOrToken,
SyntaxKind::*,
SyntaxNode, TextRange, WalkEvent, T,
@@ -336,17 +336,19 @@
}
highlight_format_string(hl, &string, &expanded_string, range);
// Highlight escape sequences
- if let Some(char_ranges) = string.char_ranges() {
- for (piece_range, _) in char_ranges.iter().filter(|(_, char)| char.is_ok()) {
- if string.text()[piece_range.start().into()..].starts_with('\\') {
- hl.add(HlRange {
- range: piece_range + range.start(),
- highlight: HlTag::EscapeSequence.into(),
- binding_hash: None,
- });
- }
+ string.escaped_char_ranges(&mut |piece_range, char| {
+ if char.is_err() {
+ return;
}
- }
+
+ if string.text()[piece_range.start().into()..].starts_with('\\') {
+ hl.add(HlRange {
+ range: piece_range + range.start(),
+ highlight: HlTag::EscapeSequence.into(),
+ binding_hash: None,
+ });
+ }
+ });
}
}
diff --git a/crates/ide/src/syntax_highlighting/format.rs b/crates/ide/src/syntax_highlighting/format.rs
index f83262f..c74b9f5 100644
--- a/crates/ide/src/syntax_highlighting/format.rs
+++ b/crates/ide/src/syntax_highlighting/format.rs
@@ -1,9 +1,9 @@
//! Syntax highlighting for format macro strings.
-use ide_db::SymbolKind;
-use syntax::{
- ast::{self, FormatSpecifier, HasFormatSpecifier},
- AstNode, AstToken, TextRange,
+use ide_db::{
+ helpers::format_string::{is_format_string, lex_format_specifiers, FormatSpecifier},
+ SymbolKind,
};
+use syntax::{ast, TextRange};
use crate::{syntax_highlighting::highlights::Highlights, HlRange, HlTag};
@@ -13,11 +13,11 @@
expanded_string: &ast::String,
range: TextRange,
) {
- if is_format_string(expanded_string).is_none() {
+ if !is_format_string(expanded_string) {
return;
}
- string.lex_format_specifier(|piece_range, kind| {
+ lex_format_specifiers(string, &mut |piece_range, kind| {
if let Some(highlight) = highlight_format_specifier(kind) {
stack.add(HlRange {
range: piece_range + range.start(),
@@ -28,32 +28,6 @@
});
}
-fn is_format_string(string: &ast::String) -> Option<()> {
- // Check if `string` is a format string argument of a macro invocation.
- // `string` is a string literal, mapped down into the innermost macro expansion.
- // Since `format_args!` etc. remove the format string when expanding, but place all arguments
- // in the expanded output, we know that the string token is (part of) the format string if it
- // appears in `format_args!` (otherwise it would have been mapped down further).
- //
- // This setup lets us correctly highlight the components of `concat!("{}", "bla")` format
- // strings. It still fails for `concat!("{", "}")`, but that is rare.
-
- let macro_call = string.syntax().ancestors().find_map(ast::MacroCall::cast)?;
- let name = macro_call.path()?.segment()?.name_ref()?;
-
- if !matches!(
- name.text().as_str(),
- "format_args" | "format_args_nl" | "const_format_args" | "panic_2015" | "panic_2021"
- ) {
- return None;
- }
-
- // NB: we match against `panic_2015`/`panic_2021` here because they have a special-cased arm for
- // `"{}"`, which otherwise wouldn't get highlighted.
-
- Some(())
-}
-
fn highlight_format_specifier(kind: FormatSpecifier) -> Option<HlTag> {
Some(match kind {
FormatSpecifier::Open
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
index 6e87147..a04a211 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
@@ -46,6 +46,8 @@
<span class="comment documentation">//! </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">test</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
<span class="comment documentation">//! ```</span>
+<span class="keyword">mod</span> <span class="module declaration">outline_module</span><span class="semicolon">;</span>
+
<span class="comment documentation">/// ```</span>
<span class="comment documentation">/// </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"early doctests should not go boom"</span><span class="semicolon injected">;</span>
<span class="comment documentation">/// ```</span>
@@ -170,4 +172,6 @@
```
</span><span class="function documentation injected intra_doc_link">[`block_comments`]</span><span class="comment documentation"> tests these without indentation
*/</span>
-<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration public">block_comments2</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span></code></pre>
\ No newline at end of file
+<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration public">block_comments2</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+
+</code></pre>
\ No newline at end of file
diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs
index c9c2331..4beab99 100644
--- a/crates/ide/src/syntax_highlighting/tests.rs
+++ b/crates/ide/src/syntax_highlighting/tests.rs
@@ -641,11 +641,14 @@
fn test_highlight_doc_comment() {
check_highlighting(
r#"
+//- /main.rs
//! This is a module to test doc injection.
//! ```
//! fn test() {}
//! ```
+mod outline_module;
+
/// ```
/// let _ = "early doctests should not go boom";
/// ```
@@ -771,6 +774,13 @@
[`block_comments`] tests these without indentation
*/
pub fn block_comments2() {}
+
+//- /outline_module.rs
+//! This is an outline module whose purpose is to test that its inline attribute injection does not
+//! spill into its parent.
+//! ```
+//! fn test() {}
+//! ```
"#
.trim(),
expect_file!["./test_data/highlight_doctest.html"],
diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs
index 37ae923..95a774d 100644
--- a/crates/ide/src/typing.rs
+++ b/crates/ide/src/typing.rs
@@ -22,9 +22,7 @@
use syntax::{
algo::find_node_at_offset,
ast::{self, edit::IndentLevel, AstToken},
- AstNode, Parse, SourceFile,
- SyntaxKind::{self, FIELD_EXPR, METHOD_CALL_EXPR},
- TextRange, TextSize,
+ AstNode, Parse, SourceFile, SyntaxKind, TextRange, TextSize,
};
use text_edit::{Indel, TextEdit};
@@ -195,22 +193,46 @@
let whitespace =
file.syntax().token_at_offset(offset).left_biased().and_then(ast::Whitespace::cast)?;
+ // if prior is fn call over multiple lines dont indent
+ // or if previous is method call over multiples lines keep that indent
let current_indent = {
let text = whitespace.text();
- let newline = text.rfind('\n')?;
- &text[newline + 1..]
+ let (_prefix, suffix) = text.rsplit_once('\n')?;
+ suffix
};
let current_indent_len = TextSize::of(current_indent);
let parent = whitespace.syntax().parent()?;
// Make sure dot is a part of call chain
- if !matches!(parent.kind(), FIELD_EXPR | METHOD_CALL_EXPR) {
+ let receiver = if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) {
+ field_expr.expr()?
+ } else if let Some(method_call_expr) = ast::MethodCallExpr::cast(parent.clone()) {
+ method_call_expr.receiver()?
+ } else {
return None;
+ };
+
+ let receiver_is_multiline = receiver.syntax().text().find_char('\n').is_some();
+ let target_indent = match (receiver, receiver_is_multiline) {
+ // if receiver is multiline field or method call, just take the previous `.` indentation
+ (ast::Expr::MethodCallExpr(expr), true) => {
+ expr.dot_token().as_ref().map(IndentLevel::from_token)
+ }
+ (ast::Expr::FieldExpr(expr), true) => {
+ expr.dot_token().as_ref().map(IndentLevel::from_token)
+ }
+ // if receiver is multiline expression, just keeps its indentation
+ (_, true) => Some(IndentLevel::from_node(&parent)),
+ _ => None,
+ };
+ let target_indent = match target_indent {
+ Some(x) => x,
+ // in all other cases, take previous indentation and indent once
+ None => IndentLevel::from_node(&parent) + 1,
}
- let prev_indent = IndentLevel::from_node(&parent);
- let target_indent = format!(" {}", prev_indent);
- let target_indent_len = TextSize::of(&target_indent);
- if current_indent_len == target_indent_len {
+ .to_string();
+
+ if current_indent_len == TextSize::of(&target_indent) {
return None;
}
@@ -661,4 +683,35 @@
"#,
);
}
+
+ #[test]
+ fn regression_629() {
+ type_char_noop(
+ '.',
+ r#"
+fn foo() {
+ CompletionItem::new(
+ CompletionKind::Reference,
+ ctx.source_range(),
+ field.name().to_string(),
+ )
+ .foo()
+ $0
+}
+"#,
+ );
+ type_char_noop(
+ '.',
+ r#"
+fn foo() {
+ CompletionItem::new(
+ CompletionKind::Reference,
+ ctx.source_range(),
+ field.name().to_string(),
+ )
+ $0
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide_assists/src/handlers/add_missing_impl_members.rs b/crates/ide_assists/src/handlers/add_missing_impl_members.rs
index a10eca1..3105b28 100644
--- a/crates/ide_assists/src/handlers/add_missing_impl_members.rs
+++ b/crates/ide_assists/src/handlers/add_missing_impl_members.rs
@@ -942,4 +942,366 @@
"#,
)
}
+
+ #[test]
+ fn test_assoc_type_when_trait_with_same_name_in_scope() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Foo {}
+
+pub trait Types {
+ type Foo;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl<T: Types> Behavior<T> for Impl { $0 }"#,
+ r#"
+pub trait Foo {}
+
+pub trait Types {
+ type Foo;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl<T: Types> Behavior<T> for Impl {
+ fn reproduce(&self, foo: <T as Types>::Foo) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl { $0 }"#,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl {
+ fn reproduce(&self, foo: <u32 as Types>::Foo) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type_qualified() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+impl Types for std::string::String {
+ type Foo = bool;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<std::string::String> for Impl { $0 }"#,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+impl Types for std::string::String {
+ type Foo = bool;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<std::string::String> for Impl {
+ fn reproduce(&self, foo: <std::string::String as Types>::Foo) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type_multi_option_ambiguous() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+pub trait Types2 {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl Types2 for u32 {
+ type Foo = String;
+}
+
+pub trait Behavior<T: Types + Types2> {
+ fn reproduce(&self, foo: <T as Types2>::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl { $0 }"#,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+pub trait Types2 {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl Types2 for u32 {
+ type Foo = String;
+}
+
+pub trait Behavior<T: Types + Types2> {
+ fn reproduce(&self, foo: <T as Types2>::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl {
+ fn reproduce(&self, foo: <u32 as Types2>::Foo) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type_multi_option() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+pub trait Types2 {
+ type Bar;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl Types2 for u32 {
+ type Bar = String;
+}
+
+pub trait Behavior<T: Types + Types2> {
+ fn reproduce(&self, foo: T::Bar);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl { $0 }"#,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+pub trait Types2 {
+ type Bar;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl Types2 for u32 {
+ type Bar = String;
+}
+
+pub trait Behavior<T: Types + Types2> {
+ fn reproduce(&self, foo: T::Bar);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl {
+ fn reproduce(&self, foo: <u32 as Types2>::Bar) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type_multi_option_foreign() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod bar {
+ pub trait Types2 {
+ type Bar;
+ }
+}
+
+pub trait Types {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl bar::Types2 for u32 {
+ type Bar = String;
+}
+
+pub trait Behavior<T: Types + bar::Types2> {
+ fn reproduce(&self, foo: T::Bar);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl { $0 }"#,
+ r#"
+mod bar {
+ pub trait Types2 {
+ type Bar;
+ }
+}
+
+pub trait Types {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl bar::Types2 for u32 {
+ type Bar = String;
+}
+
+pub trait Behavior<T: Types + bar::Types2> {
+ fn reproduce(&self, foo: T::Bar);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl {
+ fn reproduce(&self, foo: <u32 as bar::Types2>::Bar) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_transform_path_in_path_expr() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+pub trait Const {
+ const FOO: u32;
+}
+
+pub trait Trait<T: Const> {
+ fn foo() -> bool {
+ match T::FOO {
+ 0 => true,
+ _ => false,
+ }
+ }
+}
+
+impl Const for u32 {
+ const FOO: u32 = 1;
+}
+
+struct Impl;
+
+impl Trait<u32> for Impl { $0 }"#,
+ r#"
+pub trait Const {
+ const FOO: u32;
+}
+
+pub trait Trait<T: Const> {
+ fn foo() -> bool {
+ match T::FOO {
+ 0 => true,
+ _ => false,
+ }
+ }
+}
+
+impl Const for u32 {
+ const FOO: u32 = 1;
+}
+
+struct Impl;
+
+impl Trait<u32> for Impl {
+ $0fn foo() -> bool {
+ match <u32 as Const>::FOO {
+ 0 => true,
+ _ => false,
+ }
+ }
+}"#,
+ );
+ }
}
diff --git a/crates/ide_assists/src/handlers/merge_match_arms.rs b/crates/ide_assists/src/handlers/merge_match_arms.rs
index 0ebc5d2..622ead8 100644
--- a/crates/ide_assists/src/handlers/merge_match_arms.rs
+++ b/crates/ide_assists/src/handlers/merge_match_arms.rs
@@ -1,8 +1,8 @@
-use std::iter::successors;
-
+use hir::TypeInfo;
+use std::{collections::HashMap, iter::successors};
use syntax::{
algo::neighbor,
- ast::{self, AstNode},
+ ast::{self, AstNode, HasName},
Direction,
};
@@ -40,13 +40,19 @@
}
let current_expr = current_arm.expr()?;
let current_text_range = current_arm.syntax().text_range();
+ let current_arm_types = get_arm_types(&ctx, ¤t_arm);
// We check if the following match arms match this one. We could, but don't,
// compare to the previous match arm as well.
let arms_to_merge = successors(Some(current_arm), |it| neighbor(it, Direction::Next))
.take_while(|arm| match arm.expr() {
Some(expr) if arm.guard().is_none() => {
- expr.syntax().text() == current_expr.syntax().text()
+ let same_text = expr.syntax().text() == current_expr.syntax().text();
+ if !same_text {
+ return false;
+ }
+
+ are_same_types(¤t_arm_types, arm, ctx)
}
_ => false,
})
@@ -88,6 +94,79 @@
matches!(a.pat(), Some(ast::Pat::WildcardPat(..)))
}
+fn are_same_types(
+ current_arm_types: &HashMap<String, Option<TypeInfo>>,
+ arm: &ast::MatchArm,
+ ctx: &AssistContext,
+) -> bool {
+ let arm_types = get_arm_types(&ctx, &arm);
+ for (other_arm_type_name, other_arm_type) in arm_types {
+ match (current_arm_types.get(&other_arm_type_name), other_arm_type) {
+ (Some(Some(current_arm_type)), Some(other_arm_type))
+ if other_arm_type.original == current_arm_type.original =>
+ {
+ ()
+ }
+ _ => return false,
+ }
+ }
+
+ true
+}
+
+fn get_arm_types(
+ context: &AssistContext,
+ arm: &ast::MatchArm,
+) -> HashMap<String, Option<TypeInfo>> {
+ let mut mapping: HashMap<String, Option<TypeInfo>> = HashMap::new();
+
+ fn recurse(
+ map: &mut HashMap<String, Option<TypeInfo>>,
+ ctx: &AssistContext,
+ pat: &Option<ast::Pat>,
+ ) {
+ if let Some(local_pat) = pat {
+ match pat {
+ Some(ast::Pat::TupleStructPat(tuple)) => {
+ for field in tuple.fields() {
+ recurse(map, ctx, &Some(field));
+ }
+ }
+ Some(ast::Pat::TuplePat(tuple)) => {
+ for field in tuple.fields() {
+ recurse(map, ctx, &Some(field));
+ }
+ }
+ Some(ast::Pat::RecordPat(record)) => {
+ if let Some(field_list) = record.record_pat_field_list() {
+ for field in field_list.fields() {
+ recurse(map, ctx, &field.pat());
+ }
+ }
+ }
+ Some(ast::Pat::ParenPat(parentheses)) => {
+ recurse(map, ctx, &parentheses.pat());
+ }
+ Some(ast::Pat::SlicePat(slice)) => {
+ for slice_pat in slice.pats() {
+ recurse(map, ctx, &Some(slice_pat));
+ }
+ }
+ Some(ast::Pat::IdentPat(ident_pat)) => {
+ if let Some(name) = ident_pat.name() {
+ let pat_type = ctx.sema.type_of_pat(local_pat);
+ map.insert(name.text().to_string(), pat_type);
+ }
+ }
+ _ => (),
+ }
+ }
+ }
+
+ recurse(&mut mapping, &context, &arm.pat());
+ mapping
+}
+
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
@@ -244,4 +323,503 @@
"#,
);
}
+
+ #[test]
+ fn merge_match_arms_different_type() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+//- minicore: result
+fn func() {
+ match Result::<f64, f32>::Ok(0f64) {
+ Ok(x) => $0x.classify(),
+ Err(x) => x.classify()
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_different_type_multiple_fields() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+//- minicore: result
+fn func() {
+ match Result::<(f64, f64), (f32, f32)>::Ok((0f64, 0f64)) {
+ Ok(x) => $0x.1.classify(),
+ Err(x) => x.1.classify()
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_same_type_multiple_fields() {
+ check_assist(
+ merge_match_arms,
+ r#"
+//- minicore: result
+fn func() {
+ match Result::<(f64, f64), (f64, f64)>::Ok((0f64, 0f64)) {
+ Ok(x) => $0x.1.classify(),
+ Err(x) => x.1.classify()
+ };
+}
+"#,
+ r#"
+fn func() {
+ match Result::<(f64, f64), (f64, f64)>::Ok((0f64, 0f64)) {
+ Ok(x) | Err(x) => x.1.classify(),
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_same_type_subsequent_arm_with_different_type_in_other() {
+ check_assist(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ OptionA(f32),
+ OptionB(f32),
+ OptionC(f64)
+}
+
+fn func(e: MyEnum) {
+ match e {
+ MyEnum::OptionA(x) => $0x.classify(),
+ MyEnum::OptionB(x) => x.classify(),
+ MyEnum::OptionC(x) => x.classify(),
+ };
+}
+"#,
+ r#"
+enum MyEnum {
+ OptionA(f32),
+ OptionB(f32),
+ OptionC(f64)
+}
+
+fn func(e: MyEnum) {
+ match e {
+ MyEnum::OptionA(x) | MyEnum::OptionB(x) => x.classify(),
+ MyEnum::OptionC(x) => x.classify(),
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_same_type_skip_arm_with_different_type_in_between() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ OptionA(f32),
+ OptionB(f64),
+ OptionC(f32)
+}
+
+fn func(e: MyEnum) {
+ match e {
+ MyEnum::OptionA(x) => $0x.classify(),
+ MyEnum::OptionB(x) => x.classify(),
+ MyEnum::OptionC(x) => x.classify(),
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_same_type_different_number_of_fields() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+//- minicore: result
+fn func() {
+ match Result::<(f64, f64, f64), (f64, f64)>::Ok((0f64, 0f64, 0f64)) {
+ Ok(x) => $0x.1.classify(),
+ Err(x) => x.1.classify()
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_same_destructuring_different_types() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+struct Point {
+ x: i32,
+ y: i32,
+}
+
+fn func() {
+ let p = Point { x: 0, y: 7 };
+
+ match p {
+ Point { x, y: 0 } => $0"",
+ Point { x: 0, y } => "",
+ Point { x, y } => "",
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_range() {
+ check_assist(
+ merge_match_arms,
+ r#"
+fn func() {
+ let x = 'c';
+
+ match x {
+ 'a'..='j' => $0"",
+ 'c'..='z' => "",
+ _ => "other",
+ };
+}
+"#,
+ r#"
+fn func() {
+ let x = 'c';
+
+ match x {
+ 'a'..='j' | 'c'..='z' => "",
+ _ => "other",
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn merge_match_arms_enum_without_field() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ NoField,
+ AField(u8)
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::NoField => $0"",
+ MyEnum::AField(x) => ""
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_enum_destructuring_different_types() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ Move { x: i32, y: i32 },
+ Write(String),
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::Move { x, y } => $0"",
+ MyEnum::Write(text) => "",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_enum_destructuring_same_types() {
+ check_assist(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ Move { x: i32, y: i32 },
+ Crawl { x: i32, y: i32 }
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::Move { x, y } => $0"",
+ MyEnum::Crawl { x, y } => "",
+ };
+}
+ "#,
+ r#"
+enum MyEnum {
+ Move { x: i32, y: i32 },
+ Crawl { x: i32, y: i32 }
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::Move { x, y } | MyEnum::Crawl { x, y } => "",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_enum_destructuring_same_types_different_name() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ Move { x: i32, y: i32 },
+ Crawl { a: i32, b: i32 }
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::Move { x, y } => $0"",
+ MyEnum::Crawl { a, b } => "",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_enum_nested_pattern_different_names() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+enum Color {
+ Rgb(i32, i32, i32),
+ Hsv(i32, i32, i32),
+}
+
+enum Message {
+ Quit,
+ Move { x: i32, y: i32 },
+ Write(String),
+ ChangeColor(Color),
+}
+
+fn main(msg: Message) {
+ match msg {
+ Message::ChangeColor(Color::Rgb(r, g, b)) => $0"",
+ Message::ChangeColor(Color::Hsv(h, s, v)) => "",
+ _ => "other"
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_enum_nested_pattern_same_names() {
+ check_assist(
+ merge_match_arms,
+ r#"
+enum Color {
+ Rgb(i32, i32, i32),
+ Hsv(i32, i32, i32),
+}
+
+enum Message {
+ Quit,
+ Move { x: i32, y: i32 },
+ Write(String),
+ ChangeColor(Color),
+}
+
+fn main(msg: Message) {
+ match msg {
+ Message::ChangeColor(Color::Rgb(a, b, c)) => $0"",
+ Message::ChangeColor(Color::Hsv(a, b, c)) => "",
+ _ => "other"
+ };
+}
+ "#,
+ r#"
+enum Color {
+ Rgb(i32, i32, i32),
+ Hsv(i32, i32, i32),
+}
+
+enum Message {
+ Quit,
+ Move { x: i32, y: i32 },
+ Write(String),
+ ChangeColor(Color),
+}
+
+fn main(msg: Message) {
+ match msg {
+ Message::ChangeColor(Color::Rgb(a, b, c)) | Message::ChangeColor(Color::Hsv(a, b, c)) => "",
+ _ => "other"
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_enum_destructuring_with_ignore() {
+ check_assist(
+ merge_match_arms,
+ r#"
+enum MyEnum {
+ Move { x: i32, a: i32 },
+ Crawl { x: i32, b: i32 }
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::Move { x, .. } => $0"",
+ MyEnum::Crawl { x, .. } => "",
+ };
+}
+ "#,
+ r#"
+enum MyEnum {
+ Move { x: i32, a: i32 },
+ Crawl { x: i32, b: i32 }
+}
+
+fn func(x: MyEnum) {
+ match x {
+ MyEnum::Move { x, .. } | MyEnum::Crawl { x, .. } => "",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_nested_with_conflicting_identifier() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+enum Color {
+ Rgb(i32, i32, i32),
+ Hsv(i32, i32, i32),
+}
+
+enum Message {
+ Move { x: i32, y: i32 },
+ ChangeColor(u8, Color),
+}
+
+fn main(msg: Message) {
+ match msg {
+ Message::ChangeColor(x, Color::Rgb(y, b, c)) => $0"",
+ Message::ChangeColor(y, Color::Hsv(x, b, c)) => "",
+ _ => "other"
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_tuple() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+fn func() {
+ match (0, "boo") {
+ (x, y) => $0"",
+ (y, x) => "",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_parentheses() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+fn func(x: i32) {
+ let variable = 2;
+ match x {
+ 1 => $0"",
+ ((((variable)))) => "",
+ _ => "other"
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_refpat() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+fn func() {
+ let name = Some(String::from(""));
+ let n = String::from("");
+ match name {
+ Some(ref n) => $0"",
+ Some(n) => "",
+ _ => "other",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_slice() {
+ check_assist_not_applicable(
+ merge_match_arms,
+ r#"
+fn func(binary: &[u8]) {
+ let space = b' ';
+ match binary {
+ [0x7f, b'E', b'L', b'F', ..] => $0"",
+ [space] => "",
+ _ => "other",
+ };
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn merge_match_arms_slice_identical() {
+ check_assist(
+ merge_match_arms,
+ r#"
+fn func(binary: &[u8]) {
+ let space = b' ';
+ match binary {
+ [space, 5u8] => $0"",
+ [space] => "",
+ _ => "other",
+ };
+}
+ "#,
+ r#"
+fn func(binary: &[u8]) {
+ let space = b' ';
+ match binary {
+ [space, 5u8] | [space] => "",
+ _ => "other",
+ };
+}
+ "#,
+ )
+ }
}
diff --git a/crates/ide_assists/src/handlers/replace_qualified_name_with_use.rs b/crates/ide_assists/src/handlers/replace_qualified_name_with_use.rs
index b0b9ad4..3121e22 100644
--- a/crates/ide_assists/src/handlers/replace_qualified_name_with_use.rs
+++ b/crates/ide_assists/src/handlers/replace_qualified_name_with_use.rs
@@ -216,7 +216,7 @@
",
);
}
- #[test]
+
#[test]
fn dont_import_trivial_paths() {
cov_mark::check!(dont_import_trivial_paths);
diff --git a/crates/ide_completion/src/completions.rs b/crates/ide_completion/src/completions.rs
index 19fdc6c..e399213 100644
--- a/crates/ide_completion/src/completions.rs
+++ b/crates/ide_completion/src/completions.rs
@@ -14,6 +14,7 @@
pub(crate) mod snippet;
pub(crate) mod trait_impl;
pub(crate) mod unqualified_path;
+pub(crate) mod format_string;
use std::iter;
diff --git a/crates/ide_completion/src/completions/attribute.rs b/crates/ide_completion/src/completions/attribute.rs
index f67d7d5..2603ad2 100644
--- a/crates/ide_completion/src/completions/attribute.rs
+++ b/crates/ide_completion/src/completions/attribute.rs
@@ -56,6 +56,7 @@
_ => (),
},
(_, Some(_)) => (),
+ (_, None) if attribute.expr().is_some() => (),
(_, None) => complete_new_attribute(acc, ctx, attribute),
}
Some(())
diff --git a/crates/ide_completion/src/completions/fn_param.rs b/crates/ide_completion/src/completions/fn_param.rs
index bb4ce0a..a55bab6 100644
--- a/crates/ide_completion/src/completions/fn_param.rs
+++ b/crates/ide_completion/src/completions/fn_param.rs
@@ -83,9 +83,15 @@
let whole_param = param.syntax().text().to_string();
file_params.remove(&whole_param);
- if let Some(pattern) = param.pat() {
- let binding = pattern.syntax().text().to_string();
- file_params.retain(|_, v| v != &binding);
+ match param.pat() {
+ // remove suggestions for patterns that already exist
+ // if the type is missing we are checking the current param to be completed
+ // in which case this would find itself removing the suggestions due to itself
+ Some(pattern) if param.ty().is_some() => {
+ let binding = pattern.syntax().text().to_string();
+ file_params.retain(|_, v| v != &binding);
+ }
+ _ => (),
}
})
}
diff --git a/crates/ide_completion/src/completions/format_string.rs b/crates/ide_completion/src/completions/format_string.rs
new file mode 100644
index 0000000..08f5a59
--- /dev/null
+++ b/crates/ide_completion/src/completions/format_string.rs
@@ -0,0 +1,107 @@
+//! Completes identifiers in format string literals.
+
+use ide_db::helpers::format_string::is_format_string;
+use itertools::Itertools;
+use syntax::{ast, AstToken, TextRange, TextSize};
+
+use crate::{context::CompletionContext, CompletionItem, CompletionItemKind, Completions};
+
+/// Complete identifiers in format strings.
+pub(crate) fn format_string(acc: &mut Completions, ctx: &CompletionContext) {
+ let string = match ast::String::cast(ctx.token.clone()) {
+ Some(it) if is_format_string(&it) => it,
+ _ => return,
+ };
+ let cursor = ctx.position.offset;
+ let lit_start = ctx.token.text_range().start();
+ let cursor_in_lit = cursor - lit_start;
+
+ let prefix = &string.text()[..cursor_in_lit.into()];
+ let braces = prefix.char_indices().rev().skip_while(|&(_, c)| c.is_alphanumeric()).next_tuple();
+ let brace_offset = match braces {
+ // escaped brace
+ Some(((_, '{'), (_, '{'))) => return,
+ Some(((idx, '{'), _)) => lit_start + TextSize::from(idx as u32 + 1),
+ _ => return,
+ };
+
+ let source_range = TextRange::new(brace_offset, cursor);
+ ctx.locals.iter().for_each(|(name, _)| {
+ CompletionItem::new(CompletionItemKind::Binding, source_range, name.to_smol_str())
+ .add_to(acc);
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::tests::{check_edit, completion_list_no_kw};
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list_no_kw(ra_fixture);
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn no_completion_without_brace() {
+ check(
+ r#"
+macro_rules! format_args {
+($lit:literal $(tt:tt)*) => { 0 },
+}
+fn main() {
+let foobar = 1;
+format_args!("f$0");
+}
+"#,
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn completes_locals() {
+ check_edit(
+ "foobar",
+ r#"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+fn main() {
+ let foobar = 1;
+ format_args!("{f$0");
+}
+"#,
+ r#"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+fn main() {
+ let foobar = 1;
+ format_args!("{foobar");
+}
+"#,
+ );
+ check_edit(
+ "foobar",
+ r#"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+fn main() {
+ let foobar = 1;
+ format_args!("{$0");
+}
+"#,
+ r#"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+fn main() {
+ let foobar = 1;
+ format_args!("{foobar");
+}
+"#,
+ );
+ }
+}
diff --git a/crates/ide_completion/src/completions/postfix.rs b/crates/ide_completion/src/completions/postfix.rs
index a212a98..e8e0c7e 100644
--- a/crates/ide_completion/src/completions/postfix.rs
+++ b/crates/ide_completion/src/completions/postfix.rs
@@ -179,7 +179,7 @@
}
postfix_snippet("box", "Box::new(expr)", &format!("Box::new({})", receiver_text)).add_to(acc);
- postfix_snippet("dbg", "dbg!(expr)", &format!("dbg!({})", receiver_text)).add_to(acc);
+ postfix_snippet("dbg", "dbg!(expr)", &format!("dbg!({})", receiver_text)).add_to(acc); // fixme
postfix_snippet("dbgr", "dbg!(&expr)", &format!("dbg!(&{})", receiver_text)).add_to(acc);
postfix_snippet("call", "function(expr)", &format!("${{1}}({})", receiver_text)).add_to(acc);
@@ -225,6 +225,10 @@
) -> Option<impl Fn(&str, &str, &str) -> Builder + 'ctx> {
let receiver_syntax = receiver.syntax();
let receiver_range = ctx.sema.original_range_opt(receiver_syntax)?.range;
+ if ctx.source_range().end() < receiver_range.start() {
+ // This shouldn't happen, yet it does. I assume this might be due to an incorrect token mapping.
+ return None;
+ }
let delete_range = TextRange::new(receiver_range.start(), ctx.source_range().end());
// Wrapping impl Fn in an option ruins lifetime inference for the parameters in a way that
diff --git a/crates/ide_completion/src/completions/trait_impl.rs b/crates/ide_completion/src/completions/trait_impl.rs
index 46da7cf..b214c5c 100644
--- a/crates/ide_completion/src/completions/trait_impl.rs
+++ b/crates/ide_completion/src/completions/trait_impl.rs
@@ -31,7 +31,7 @@
//! }
//! ```
-use hir::{self, HasAttrs, HasSource};
+use hir::{self, HasAttrs};
use ide_db::{path_transform::PathTransform, traits::get_missing_assoc_items, SymbolKind};
use syntax::{
ast::{self, edit_in_place::AttrsOwnerEdit},
@@ -151,7 +151,7 @@
let range = replacement_range(ctx, fn_def_node);
- if let Some(source) = func.source(ctx.db) {
+ if let Some(source) = ctx.sema.source(func) {
let assoc_item = ast::AssocItem::Fn(source.value);
if let Some(transformed_item) = get_transformed_assoc_item(ctx, assoc_item, impl_def) {
let transformed_fn = match transformed_item {
@@ -189,7 +189,7 @@
target_scope,
source_scope,
trait_,
- impl_def.source(ctx.db)?.value,
+ ctx.sema.source(impl_def)?.value,
);
transform.apply(assoc_item.syntax());
@@ -227,7 +227,7 @@
let const_name = const_.name(ctx.db).map(|n| n.to_smol_str());
if let Some(const_name) = const_name {
- if let Some(source) = const_.source(ctx.db) {
+ if let Some(source) = ctx.sema.source(const_) {
let assoc_item = ast::AssocItem::Const(source.value);
if let Some(transformed_item) = get_transformed_assoc_item(ctx, assoc_item, impl_def) {
let transformed_const = match transformed_item {
diff --git a/crates/ide_completion/src/context.rs b/crates/ide_completion/src/context.rs
index 26f6c17..9eec4fd 100644
--- a/crates/ide_completion/src/context.rs
+++ b/crates/ide_completion/src/context.rs
@@ -3,7 +3,7 @@
use std::iter;
use base_db::SourceDatabaseExt;
-use hir::{Local, Name, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo};
+use hir::{HasAttrs, Local, Name, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo};
use ide_db::{
active_parameter::ActiveParameter,
base_db::{FilePosition, SourceDatabase},
@@ -85,6 +85,7 @@
Function,
Closure,
}
+
/// `CompletionContext` is created early during completion to figure out, where
/// exactly is the cursor, syntax-wise.
#[derive(Debug)]
@@ -120,6 +121,7 @@
pub(super) lifetime_ctx: Option<LifetimeContext>,
pub(super) pattern_ctx: Option<PatternContext>,
pub(super) path_context: Option<PathCompletionContext>,
+
pub(super) locals: Vec<(Name, Local)>,
no_completion_required: bool,
@@ -308,6 +310,14 @@
self.token.kind() == BANG && self.token.parent().map_or(false, |it| it.kind() == MACRO_CALL)
}
+ /// Whether the given trait is an operator trait or not.
+ pub(crate) fn is_ops_trait(&self, trait_: hir::Trait) -> bool {
+ match trait_.attrs(self.db).lang() {
+ Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang.as_str()),
+ None => false,
+ }
+ }
+
/// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items.
pub(crate) fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
let _p = profile::span("CompletionContext::process_all_names");
@@ -388,6 +398,7 @@
locals.push((name, local));
}
});
+
let mut ctx = CompletionContext {
sema,
scope,
@@ -889,6 +900,7 @@
});
PatternContext { refutability, is_param, has_type_ascription }
}
+
fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<N> {
syntax.covering_element(range).ancestors().find_map(N::cast)
}
@@ -915,6 +927,38 @@
token.kind() == T![&]
}
+const OP_TRAIT_LANG_NAMES: &[&str] = &[
+ "add_assign",
+ "add",
+ "bitand_assign",
+ "bitand",
+ "bitor_assign",
+ "bitor",
+ "bitxor_assign",
+ "bitxor",
+ "deref_mut",
+ "deref",
+ "div_assign",
+ "div",
+ "eq",
+ "fn_mut",
+ "fn_once",
+ "fn",
+ "index_mut",
+ "index",
+ "mul_assign",
+ "mul",
+ "neg",
+ "not",
+ "partial_ord",
+ "rem_assign",
+ "rem",
+ "shl_assign",
+ "shl",
+ "shr_assign",
+ "shr",
+ "sub",
+];
#[cfg(test)]
mod tests {
use expect_test::{expect, Expect};
diff --git a/crates/ide_completion/src/item.rs b/crates/ide_completion/src/item.rs
index 4a6e034..b2a0b04 100644
--- a/crates/ide_completion/src/item.rs
+++ b/crates/ide_completion/src/item.rs
@@ -139,6 +139,8 @@
/// }
/// ```
pub is_local: bool,
+ /// Set for method completions of the `core::ops` and `core::cmp` family.
+ pub is_op_method: bool,
/// This is set in cases like these:
///
/// ```
@@ -175,6 +177,7 @@
}
impl CompletionRelevance {
+ const BASE_LINE: u32 = 1;
/// Provides a relevance score. Higher values are more relevant.
///
/// The absolute value of the relevance score is not meaningful, for
@@ -185,7 +188,7 @@
/// See is_relevant if you need to make some judgement about score
/// in an absolute sense.
pub fn score(&self) -> u32 {
- let mut score = 0;
+ let mut score = Self::BASE_LINE;
if self.exact_name_match {
score += 1;
@@ -198,6 +201,9 @@
if self.is_local {
score += 1;
}
+ if self.is_op_method {
+ score -= 1;
+ }
if self.exact_postfix_snippet_match {
score += 100;
}
@@ -208,7 +214,7 @@
/// some threshold such that we think it is especially likely
/// to be relevant.
pub fn is_relevant(&self) -> bool {
- self.score() > 0
+ self.score() > (Self::BASE_LINE + 1)
}
}
@@ -558,6 +564,7 @@
// This test asserts that the relevance score for these items is ascending, and
// that any items in the same vec have the same score.
let expected_relevance_order = vec![
+ vec![CompletionRelevance { is_op_method: true, ..CompletionRelevance::default() }],
vec![CompletionRelevance::default()],
vec![
CompletionRelevance { exact_name_match: true, ..CompletionRelevance::default() },
@@ -588,10 +595,8 @@
..CompletionRelevance::default()
}],
vec![CompletionRelevance {
- exact_name_match: false,
- type_match: None,
- is_local: false,
exact_postfix_snippet_match: true,
+ ..CompletionRelevance::default()
}],
];
diff --git a/crates/ide_completion/src/lib.rs b/crates/ide_completion/src/lib.rs
index 6a087ed..a2217af 100644
--- a/crates/ide_completion/src/lib.rs
+++ b/crates/ide_completion/src/lib.rs
@@ -168,6 +168,7 @@
completions::flyimport::import_on_the_fly(&mut acc, &ctx);
completions::lifetime::complete_lifetime(&mut acc, &ctx);
completions::lifetime::complete_label(&mut acc, &ctx);
+ completions::format_string::format_string(&mut acc, &ctx);
Some(acc)
}
diff --git a/crates/ide_completion/src/render.rs b/crates/ide_completion/src/render.rs
index cd71ad1..15dacc8 100644
--- a/crates/ide_completion/src/render.rs
+++ b/crates/ide_completion/src/render.rs
@@ -400,6 +400,7 @@
(relevance.exact_name_match, "name"),
(relevance.is_local, "local"),
(relevance.exact_postfix_snippet_match, "snippet"),
+ (relevance.is_op_method, "op_method"),
]
.into_iter()
.filter_map(|(cond, desc)| if cond { Some(desc) } else { None })
@@ -580,6 +581,7 @@
Exact,
),
is_local: false,
+ is_op_method: false,
exact_postfix_snippet_match: false,
},
trigger_call_info: true,
@@ -600,6 +602,7 @@
Exact,
),
is_local: false,
+ is_op_method: false,
exact_postfix_snippet_match: false,
},
},
@@ -685,6 +688,7 @@
CouldUnify,
),
is_local: false,
+ is_op_method: false,
exact_postfix_snippet_match: false,
},
},
@@ -1350,6 +1354,23 @@
}
#[test]
+ fn op_method_relevances() {
+ check_relevance(
+ r#"
+#[lang = "sub"]
+trait Sub {
+ fn sub(self, other: Self) -> Self { self }
+}
+impl Sub for u32 {}
+fn foo(a: u32) { a.$0 }
+"#,
+ expect![[r#"
+ me sub(…) (as Sub) [op_method]
+ "#]],
+ )
+ }
+
+ #[test]
fn struct_field_method_ref() {
check_kinds(
r#"
diff --git a/crates/ide_completion/src/render/function.rs b/crates/ide_completion/src/render/function.rs
index bd46e1f..fc2cb93 100644
--- a/crates/ide_completion/src/render/function.rs
+++ b/crates/ide_completion/src/render/function.rs
@@ -70,6 +70,13 @@
item.set_relevance(CompletionRelevance {
type_match: compute_type_match(completion, &ret_type),
exact_name_match: compute_exact_name_match(completion, &call),
+ is_op_method: match func_type {
+ FuncType::Method(_) => func
+ .as_assoc_item(ctx.db())
+ .and_then(|trait_| trait_.containing_trait_or_trait_impl(ctx.db()))
+ .map_or(false, |trait_| completion.is_ops_trait(trait_)),
+ _ => false,
+ },
..CompletionRelevance::default()
});
diff --git a/crates/ide_completion/src/render/macro_.rs b/crates/ide_completion/src/render/macro_.rs
index 5386cbc..8d6a634 100644
--- a/crates/ide_completion/src/render/macro_.rs
+++ b/crates/ide_completion/src/render/macro_.rs
@@ -1,8 +1,8 @@
//! Renderer for macro invocations.
use either::Either;
-use hir::{db::HirDatabase, Documentation, HasSource};
-use ide_db::SymbolKind;
+use hir::{Documentation, HasSource, InFile, Semantics};
+use ide_db::{RootDatabase, SymbolKind};
use syntax::{
display::{fn_as_proc_macro_label, macro_label},
SmolStr,
@@ -30,8 +30,6 @@
macro_: hir::MacroDef,
import_to_add: Option<ImportEdit>,
) -> CompletionItem {
- let db = completion.db;
-
let source_range = if completion.is_immediately_after_macro_bang() {
cov_mark::hit!(completes_macro_call_if_cursor_at_bang_token);
completion.token.parent().map_or_else(|| ctx.source_range(), |it| it.text_range())
@@ -54,7 +52,7 @@
label(&ctx, needs_bang, bra, ket, &name),
);
item.set_deprecated(ctx.is_deprecated(macro_))
- .set_detail(detail(db, macro_))
+ .set_detail(detail(&completion.sema, macro_))
.set_documentation(docs);
if let Some(import_to_add) = import_to_add {
@@ -104,9 +102,11 @@
SmolStr::from_iter([name, "!"])
}
-fn detail(db: &dyn HirDatabase, macro_: hir::MacroDef) -> Option<String> {
+fn detail(sema: &Semantics<RootDatabase>, macro_: hir::MacroDef) -> Option<String> {
// FIXME: This is parsing the file!
- let detail = match macro_.source(db)?.value {
+ let InFile { file_id, value } = macro_.source(sema.db)?;
+ let _ = sema.parse_or_expand(file_id);
+ let detail = match value {
Either::Left(node) => macro_label(&node),
Either::Right(node) => fn_as_proc_macro_label(&node),
};
diff --git a/crates/ide_completion/src/tests/attribute.rs b/crates/ide_completion/src/tests/attribute.rs
index c2bd26e..2d2a1b8 100644
--- a/crates/ide_completion/src/tests/attribute.rs
+++ b/crates/ide_completion/src/tests/attribute.rs
@@ -41,6 +41,19 @@
}
#[test]
+fn proc_macros_on_comment() {
+ check(
+ r#"
+//- proc_macros: identity
+/// $0
+#[proc_macros::identity]
+struct Foo;
+"#,
+ expect![[r#""#]],
+ )
+}
+
+#[test]
fn proc_macros_qualified() {
check(
r#"
diff --git a/crates/ide_completion/src/tests/pattern.rs b/crates/ide_completion/src/tests/pattern.rs
index c9a31ee..7f437e1 100644
--- a/crates/ide_completion/src/tests/pattern.rs
+++ b/crates/ide_completion/src/tests/pattern.rs
@@ -368,3 +368,17 @@
"#]],
)
}
+
+#[test]
+fn completes_fully_equal() {
+ check_empty(
+ r#"
+fn foo(bar: u32) {}
+fn bar(bar$0) {}
+"#,
+ expect![[r#"
+ bn bar: u32
+ kw mut
+ "#]],
+ )
+}
diff --git a/crates/ide_db/src/helpers.rs b/crates/ide_db/src/helpers.rs
index 344f8db..2d3d640 100644
--- a/crates/ide_db/src/helpers.rs
+++ b/crates/ide_db/src/helpers.rs
@@ -7,6 +7,7 @@
pub mod insert_whitespace_into_node;
pub mod node_ext;
pub mod rust_doc;
+pub mod format_string;
use std::{collections::VecDeque, iter};
diff --git a/crates/ide_db/src/helpers/format_string.rs b/crates/ide_db/src/helpers/format_string.rs
new file mode 100644
index 0000000..29f61a9
--- /dev/null
+++ b/crates/ide_db/src/helpers/format_string.rs
@@ -0,0 +1,292 @@
+//! Tools to work with format string literals for the `format_args!` family of macros.
+use syntax::{
+ ast::{self, IsString},
+ AstNode, AstToken, TextRange,
+};
+
+pub fn is_format_string(string: &ast::String) -> bool {
+ // Check if `string` is a format string argument of a macro invocation.
+ // `string` is a string literal, mapped down into the innermost macro expansion.
+ // Since `format_args!` etc. remove the format string when expanding, but place all arguments
+ // in the expanded output, we know that the string token is (part of) the format string if it
+ // appears in `format_args!` (otherwise it would have been mapped down further).
+ //
+ // This setup lets us correctly highlight the components of `concat!("{}", "bla")` format
+ // strings. It still fails for `concat!("{", "}")`, but that is rare.
+ format!("{string} {bar}", bar = string);
+ (|| {
+ let macro_call = string.syntax().ancestors().find_map(ast::MacroCall::cast)?;
+ let name = macro_call.path()?.segment()?.name_ref()?;
+
+ if !matches!(
+ name.text().as_str(),
+ "format_args" | "format_args_nl" | "const_format_args" | "panic_2015" | "panic_2021"
+ ) {
+ return None;
+ }
+
+ // NB: we match against `panic_2015`/`panic_2021` here because they have a special-cased arm for
+ // `"{}"`, which otherwise wouldn't get highlighted.
+
+ Some(())
+ })()
+ .is_some()
+}
+
+#[derive(Debug)]
+pub enum FormatSpecifier {
+ Open,
+ Close,
+ Integer,
+ Identifier,
+ Colon,
+ Fill,
+ Align,
+ Sign,
+ NumberSign,
+ Zero,
+ DollarSign,
+ Dot,
+ Asterisk,
+ QuestionMark,
+}
+
+pub fn lex_format_specifiers(
+ string: &ast::String,
+ mut callback: &mut dyn FnMut(TextRange, FormatSpecifier),
+) {
+ let mut char_ranges = Vec::new();
+ string.escaped_char_ranges(&mut |range, res| char_ranges.push((range, res)));
+ let mut chars = char_ranges
+ .iter()
+ .filter_map(|(range, res)| Some((*range, *res.as_ref().ok()?)))
+ .peekable();
+
+ while let Some((range, first_char)) = chars.next() {
+ if let '{' = first_char {
+ // Format specifier, see syntax at https://doc.rust-lang.org/std/fmt/index.html#syntax
+ if let Some((_, '{')) = chars.peek() {
+ // Escaped format specifier, `{{`
+ chars.next();
+ continue;
+ }
+
+ callback(range, FormatSpecifier::Open);
+
+ // check for integer/identifier
+ let (_, int_char) = chars.peek().copied().unwrap_or_default();
+ match int_char {
+ // integer
+ '0'..='9' => read_integer(&mut chars, &mut callback),
+ // identifier
+ c if c == '_' || c.is_alphabetic() => read_identifier(&mut chars, &mut callback),
+ _ => {}
+ }
+
+ if let Some((_, ':')) = chars.peek() {
+ skip_char_and_emit(&mut chars, FormatSpecifier::Colon, &mut callback);
+
+ // check for fill/align
+ let mut cloned = chars.clone().take(2);
+ let (_, first) = cloned.next().unwrap_or_default();
+ let (_, second) = cloned.next().unwrap_or_default();
+ match second {
+ '<' | '^' | '>' => {
+ // alignment specifier, first char specifies fillment
+ skip_char_and_emit(&mut chars, FormatSpecifier::Fill, &mut callback);
+ skip_char_and_emit(&mut chars, FormatSpecifier::Align, &mut callback);
+ }
+ _ => {
+ if let '<' | '^' | '>' = first {
+ skip_char_and_emit(&mut chars, FormatSpecifier::Align, &mut callback);
+ }
+ }
+ }
+
+ // check for sign
+ match chars.peek().copied().unwrap_or_default().1 {
+ '+' | '-' => {
+ skip_char_and_emit(&mut chars, FormatSpecifier::Sign, &mut callback);
+ }
+ _ => {}
+ }
+
+ // check for `#`
+ if let Some((_, '#')) = chars.peek() {
+ skip_char_and_emit(&mut chars, FormatSpecifier::NumberSign, &mut callback);
+ }
+
+ // check for `0`
+ let mut cloned = chars.clone().take(2);
+ let first = cloned.next().map(|next| next.1);
+ let second = cloned.next().map(|next| next.1);
+
+ if first == Some('0') && second != Some('$') {
+ skip_char_and_emit(&mut chars, FormatSpecifier::Zero, &mut callback);
+ }
+
+ // width
+ match chars.peek().copied().unwrap_or_default().1 {
+ '0'..='9' => {
+ read_integer(&mut chars, &mut callback);
+ if let Some((_, '$')) = chars.peek() {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::DollarSign,
+ &mut callback,
+ );
+ }
+ }
+ c if c == '_' || c.is_alphabetic() => {
+ read_identifier(&mut chars, &mut callback);
+
+ if chars.peek().map(|&(_, c)| c) == Some('?') {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::QuestionMark,
+ &mut callback,
+ );
+ }
+
+ // can be either width (indicated by dollar sign, or type in which case
+ // the next sign has to be `}`)
+ let next = chars.peek().map(|&(_, c)| c);
+
+ match next {
+ Some('$') => skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::DollarSign,
+ &mut callback,
+ ),
+ Some('}') => {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::Close,
+ &mut callback,
+ );
+ continue;
+ }
+ _ => continue,
+ };
+ }
+ _ => {}
+ }
+
+ // precision
+ if let Some((_, '.')) = chars.peek() {
+ skip_char_and_emit(&mut chars, FormatSpecifier::Dot, &mut callback);
+
+ match chars.peek().copied().unwrap_or_default().1 {
+ '*' => {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::Asterisk,
+ &mut callback,
+ );
+ }
+ '0'..='9' => {
+ read_integer(&mut chars, &mut callback);
+ if let Some((_, '$')) = chars.peek() {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::DollarSign,
+ &mut callback,
+ );
+ }
+ }
+ c if c == '_' || c.is_alphabetic() => {
+ read_identifier(&mut chars, &mut callback);
+ if chars.peek().map(|&(_, c)| c) != Some('$') {
+ continue;
+ }
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::DollarSign,
+ &mut callback,
+ );
+ }
+ _ => {
+ continue;
+ }
+ }
+ }
+
+ // type
+ match chars.peek().copied().unwrap_or_default().1 {
+ '?' => {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::QuestionMark,
+ &mut callback,
+ );
+ }
+ c if c == '_' || c.is_alphabetic() => {
+ read_identifier(&mut chars, &mut callback);
+
+ if chars.peek().map(|&(_, c)| c) == Some('?') {
+ skip_char_and_emit(
+ &mut chars,
+ FormatSpecifier::QuestionMark,
+ &mut callback,
+ );
+ }
+ }
+ _ => {}
+ }
+ }
+
+ if let Some((_, '}')) = chars.peek() {
+ skip_char_and_emit(&mut chars, FormatSpecifier::Close, &mut callback);
+ }
+ continue;
+ }
+ }
+
+ fn skip_char_and_emit<I, F>(
+ chars: &mut std::iter::Peekable<I>,
+ emit: FormatSpecifier,
+ callback: &mut F,
+ ) where
+ I: Iterator<Item = (TextRange, char)>,
+ F: FnMut(TextRange, FormatSpecifier),
+ {
+ let (range, _) = chars.next().unwrap();
+ callback(range, emit);
+ }
+
+ fn read_integer<I, F>(chars: &mut std::iter::Peekable<I>, callback: &mut F)
+ where
+ I: Iterator<Item = (TextRange, char)>,
+ F: FnMut(TextRange, FormatSpecifier),
+ {
+ let (mut range, c) = chars.next().unwrap();
+ assert!(c.is_ascii_digit());
+ while let Some(&(r, next_char)) = chars.peek() {
+ if next_char.is_ascii_digit() {
+ chars.next();
+ range = range.cover(r);
+ } else {
+ break;
+ }
+ }
+ callback(range, FormatSpecifier::Integer);
+ }
+
+ fn read_identifier<I, F>(chars: &mut std::iter::Peekable<I>, callback: &mut F)
+ where
+ I: Iterator<Item = (TextRange, char)>,
+ F: FnMut(TextRange, FormatSpecifier),
+ {
+ let (mut range, c) = chars.next().unwrap();
+ assert!(c.is_alphabetic() || c == '_');
+ while let Some(&(r, next_char)) = chars.peek() {
+ if next_char == '_' || next_char.is_ascii_digit() || next_char.is_alphabetic() {
+ chars.next();
+ range = range.cover(r);
+ } else {
+ break;
+ }
+ }
+ callback(range, FormatSpecifier::Identifier);
+ }
+}
diff --git a/crates/ide_db/src/items_locator.rs b/crates/ide_db/src/items_locator.rs
index ca8266b..db38a48 100644
--- a/crates/ide_db/src/items_locator.rs
+++ b/crates/ide_db/src/items_locator.rs
@@ -5,6 +5,7 @@
use either::Either;
use hir::{
import_map::{self, ImportKind},
+ symbols::FileSymbol,
AsAssocItem, Crate, ItemInNs, Semantics,
};
use limit::Limit;
@@ -13,8 +14,7 @@
use crate::{
defs::{Definition, NameClass},
helpers::import_assets::NameToImport,
- symbol_index::{self, FileSymbol},
- RootDatabase,
+ symbol_index, RootDatabase,
};
/// A value to use, when uncertain which limit to pick.
diff --git a/crates/ide_db/src/lib.rs b/crates/ide_db/src/lib.rs
index bcbb09e..602eaf0 100644
--- a/crates/ide_db/src/lib.rs
+++ b/crates/ide_db/src/lib.rs
@@ -26,7 +26,10 @@
salsa::{self, Durability},
AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
};
-use hir::db::{AstDatabase, DefDatabase, HirDatabase};
+use hir::{
+ db::{AstDatabase, DefDatabase, HirDatabase},
+ symbols::FileSymbolKind,
+};
use rustc_hash::FxHashSet;
use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase};
@@ -183,6 +186,23 @@
}
}
+impl From<FileSymbolKind> for SymbolKind {
+ fn from(it: FileSymbolKind) -> Self {
+ match it {
+ FileSymbolKind::Const => SymbolKind::Const,
+ FileSymbolKind::Enum => SymbolKind::Enum,
+ FileSymbolKind::Function => SymbolKind::Function,
+ FileSymbolKind::Macro => SymbolKind::Macro,
+ FileSymbolKind::Module => SymbolKind::Module,
+ FileSymbolKind::Static => SymbolKind::Static,
+ FileSymbolKind::Struct => SymbolKind::Struct,
+ FileSymbolKind::Trait => SymbolKind::Trait,
+ FileSymbolKind::TypeAlias => SymbolKind::TypeAlias,
+ FileSymbolKind::Union => SymbolKind::Union,
+ }
+ }
+}
+
#[cfg(test)]
mod tests {
mod sourcegen_lints;
diff --git a/crates/ide_db/src/path_transform.rs b/crates/ide_db/src/path_transform.rs
index 1b4f793..524af7f 100644
--- a/crates/ide_db/src/path_transform.rs
+++ b/crates/ide_db/src/path_transform.rs
@@ -118,14 +118,20 @@
impl<'a> Ctx<'a> {
fn apply(&self, item: &SyntaxNode) {
- for event in item.preorder() {
- let node = match event {
- syntax::WalkEvent::Enter(_) => continue,
- syntax::WalkEvent::Leave(it) => it,
- };
- if let Some(path) = ast::Path::cast(node.clone()) {
- self.transform_path(path);
- }
+ // `transform_path` may update a node's parent and that would break the
+ // tree traversal. Thus all paths in the tree are collected into a vec
+ // so that such operation is safe.
+ let paths = item
+ .preorder()
+ .filter_map(|event| match event {
+ syntax::WalkEvent::Enter(_) => None,
+ syntax::WalkEvent::Leave(node) => Some(node),
+ })
+ .filter_map(ast::Path::cast)
+ .collect::<Vec<_>>();
+
+ for path in paths {
+ self.transform_path(path);
}
}
fn transform_path(&self, path: ast::Path) -> Option<()> {
@@ -145,10 +151,60 @@
match resolution {
hir::PathResolution::TypeParam(tp) => {
if let Some(subst) = self.substs.get(&tp) {
- ted::replace(path.syntax(), subst.clone_subtree().clone_for_update().syntax())
+ let parent = path.syntax().parent()?;
+ if let Some(parent) = ast::Path::cast(parent.clone()) {
+ // Path inside path means that there is an associated
+ // type/constant on the type parameter. It is necessary
+ // to fully qualify the type with `as Trait`. Even
+ // though it might be unnecessary if `subst` is generic
+ // type, always fully qualifying the path is safer
+ // because of potential clash of associated types from
+ // multiple traits
+
+ let trait_ref = find_trait_for_assoc_item(
+ self.source_scope,
+ tp,
+ parent.segment()?.name_ref()?,
+ )
+ .and_then(|trait_ref| {
+ let found_path = self.target_module.find_use_path(
+ self.source_scope.db.upcast(),
+ hir::ModuleDef::Trait(trait_ref),
+ )?;
+ match ast::make::ty_path(mod_path_to_ast(&found_path)) {
+ ast::Type::PathType(path_ty) => Some(path_ty),
+ _ => None,
+ }
+ });
+
+ let segment = ast::make::path_segment_ty(subst.clone(), trait_ref);
+ let qualified =
+ ast::make::path_from_segments(std::iter::once(segment), false);
+ ted::replace(path.syntax(), qualified.clone_for_update().syntax());
+ } else if let Some(path_ty) = ast::PathType::cast(parent) {
+ ted::replace(
+ path_ty.syntax(),
+ subst.clone_subtree().clone_for_update().syntax(),
+ );
+ } else {
+ ted::replace(
+ path.syntax(),
+ subst.clone_subtree().clone_for_update().syntax(),
+ );
+ }
}
}
hir::PathResolution::Def(def) => {
+ if let hir::ModuleDef::Trait(_) = def {
+ if matches!(path.segment()?.kind()?, ast::PathSegmentKind::Type { .. }) {
+ // `speculative_resolve` resolves segments like `<T as
+ // Trait>` into `Trait`, but just the trait name should
+ // not be used as the replacement of the original
+ // segment.
+ return None;
+ }
+ }
+
let found_path =
self.target_module.find_use_path(self.source_scope.db.upcast(), def)?;
let res = mod_path_to_ast(&found_path).clone_for_update();
@@ -195,3 +251,34 @@
Some(result)
}
+
+fn find_trait_for_assoc_item(
+ scope: &SemanticsScope,
+ type_param: hir::TypeParam,
+ assoc_item: ast::NameRef,
+) -> Option<hir::Trait> {
+ let db = scope.db;
+ let trait_bounds = type_param.trait_bounds(db);
+
+ let assoc_item_name = assoc_item.text();
+
+ for trait_ in trait_bounds {
+ let names = trait_.items(db).into_iter().filter_map(|item| match item {
+ hir::AssocItem::TypeAlias(ta) => Some(ta.name(db)),
+ hir::AssocItem::Const(cst) => cst.name(db),
+ _ => None,
+ });
+
+ for name in names {
+ if assoc_item_name.as_str() == name.as_text()?.as_str() {
+ // It is fine to return the first match because in case of
+ // multiple possibilities, the exact trait must be disambiguated
+ // in the definition of trait being implemented, so this search
+ // should not be needed.
+ return Some(trait_);
+ }
+ }
+ }
+
+ None
+}
diff --git a/crates/ide_db/src/symbol_index.rs b/crates/ide_db/src/symbol_index.rs
index 62f4a81..9697ba2 100644
--- a/crates/ide_db/src/symbol_index.rs
+++ b/crates/ide_db/src/symbol_index.rs
@@ -30,21 +30,18 @@
use base_db::{
salsa::{self, ParallelDatabase},
- CrateId, FileRange, SourceDatabaseExt, SourceRootId, Upcast,
+ SourceDatabaseExt, SourceRootId, Upcast,
};
-use either::Either;
use fst::{self, Streamer};
use hir::{
- db::{DefDatabase, HirDatabase},
- AdtId, AssocItemId, AssocItemLoc, DefHasSource, DefWithBodyId, HasSource, HirFileId, ImplId,
- InFile, ItemContainerId, ItemLoc, ItemTreeNode, Lookup, MacroDef, Module, ModuleDefId,
- ModuleId, Semantics, TraitId,
+ db::HirDatabase,
+ symbols::{FileSymbol, SymbolCollector},
+ Crate, Module,
};
use rayon::prelude::*;
use rustc_hash::FxHashSet;
-use syntax::{ast::HasName, AstNode, SmolStr, SyntaxNode, SyntaxNodePtr};
-use crate::{RootDatabase, SymbolKind};
+use crate::RootDatabase;
#[derive(Debug)]
pub struct Query {
@@ -96,7 +93,7 @@
pub trait SymbolsDatabase: HirDatabase + SourceDatabaseExt + Upcast<dyn HirDatabase> {
/// The symbol index for a given module. These modules should only be in source roots that
/// are inside local_roots.
- fn module_symbols(&self, module_id: ModuleId) -> Arc<SymbolIndex>;
+ fn module_symbols(&self, module: Module) -> Arc<SymbolIndex>;
/// The symbol index for a given source root within library_roots.
fn library_symbols(&self, source_root_id: SourceRootId) -> Arc<SymbolIndex>;
@@ -119,20 +116,20 @@
let symbols = db
.source_root_crates(source_root_id)
.iter()
- .flat_map(|&krate| module_ids_for_crate(db.upcast(), krate))
+ .flat_map(|&krate| Crate::from(krate).modules(db.upcast()))
// we specifically avoid calling SymbolsDatabase::module_symbols here, even they do the same thing,
// as the index for a library is not going to really ever change, and we do not want to store each
// module's index in salsa.
- .map(|module_id| SymbolCollector::collect(db, module_id))
+ .map(|module| SymbolCollector::collect(db.upcast(), module))
.flatten()
.collect();
Arc::new(SymbolIndex::new(symbols))
}
-fn module_symbols(db: &dyn SymbolsDatabase, module_id: ModuleId) -> Arc<SymbolIndex> {
+fn module_symbols(db: &dyn SymbolsDatabase, module: Module) -> Arc<SymbolIndex> {
let _p = profile::span("module_symbols");
- let symbols = SymbolCollector::collect(db, module_id);
+ let symbols = SymbolCollector::collect(db.upcast(), module);
Arc::new(SymbolIndex::new(symbols))
}
@@ -191,41 +188,36 @@
.map_with(Snap::new(db), |snap, &root| snap.library_symbols(root))
.collect()
} else {
- let mut module_ids = Vec::new();
+ let mut modules = Vec::new();
for &root in db.local_roots().iter() {
let crates = db.source_root_crates(root);
for &krate in crates.iter() {
- module_ids.extend(module_ids_for_crate(db, krate));
+ modules.extend(Crate::from(krate).modules(db));
}
}
- module_ids
+ modules
.par_iter()
- .map_with(Snap::new(db), |snap, &module_id| snap.module_symbols(module_id))
+ .map_with(Snap::new(db), |snap, &module| snap.module_symbols(module))
.collect()
};
query.search(&indices)
}
-pub fn crate_symbols(db: &RootDatabase, krate: CrateId, query: Query) -> Vec<FileSymbol> {
+pub fn crate_symbols(db: &RootDatabase, krate: Crate, query: Query) -> Vec<FileSymbol> {
let _p = profile::span("crate_symbols").detail(|| format!("{:?}", query));
- let module_ids = module_ids_for_crate(db, krate);
- let indices: Vec<_> = module_ids
+ let modules = krate.modules(db);
+ let indices: Vec<_> = modules
.par_iter()
- .map_with(Snap::new(db), |snap, &module_id| snap.module_symbols(module_id))
+ .map_with(Snap::new(db), |snap, &module| snap.module_symbols(module))
.collect();
query.search(&indices)
}
-fn module_ids_for_crate(db: &dyn DefDatabase, krate: CrateId) -> Vec<ModuleId> {
- let def_map = db.crate_def_map(krate);
- def_map.modules().map(|(id, _)| def_map.module_id(id)).collect()
-}
-
pub fn index_resolve(db: &RootDatabase, name: &str) -> Vec<FileSymbol> {
let mut query = Query::new(name.to_string());
query.exact();
@@ -356,374 +348,12 @@
}
}
-/// The actual data that is stored in the index. It should be as compact as
-/// possible.
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct FileSymbol {
- pub name: SmolStr,
- pub loc: DeclarationLocation,
- pub kind: FileSymbolKind,
- pub container_name: Option<SmolStr>,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct DeclarationLocation {
- /// The file id for both the `ptr` and `name_ptr`.
- pub hir_file_id: HirFileId,
- /// This points to the whole syntax node of the declaration.
- pub ptr: SyntaxNodePtr,
- /// This points to the [`syntax::ast::Name`] identifier of the declaration.
- pub name_ptr: SyntaxNodePtr,
-}
-
-impl DeclarationLocation {
- pub fn syntax(&self, semantics: &Semantics<'_, RootDatabase>) -> Option<SyntaxNode> {
- let root = semantics.parse_or_expand(self.hir_file_id)?;
- Some(self.ptr.to_node(&root))
- }
-
- pub fn original_range(&self, db: &dyn HirDatabase) -> Option<FileRange> {
- find_original_file_range(db, self.hir_file_id, &self.ptr)
- }
-
- pub fn original_name_range(&self, db: &dyn HirDatabase) -> Option<FileRange> {
- find_original_file_range(db, self.hir_file_id, &self.name_ptr)
- }
-}
-
-fn find_original_file_range(
- db: &dyn HirDatabase,
- file_id: HirFileId,
- ptr: &SyntaxNodePtr,
-) -> Option<FileRange> {
- let root = db.parse_or_expand(file_id)?;
- let node = ptr.to_node(&root);
- let node = InFile::new(file_id, &node);
-
- Some(node.original_file_range(db.upcast()))
-}
-
-#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
-pub enum FileSymbolKind {
- Const,
- Enum,
- Function,
- Macro,
- Module,
- Static,
- Struct,
- Trait,
- TypeAlias,
- Union,
-}
-
-impl FileSymbolKind {
- fn is_type(self: FileSymbolKind) -> bool {
- matches!(
- self,
- FileSymbolKind::Struct
- | FileSymbolKind::Enum
- | FileSymbolKind::Trait
- | FileSymbolKind::TypeAlias
- | FileSymbolKind::Union
- )
- }
-}
-
-impl From<FileSymbolKind> for SymbolKind {
- fn from(it: FileSymbolKind) -> Self {
- match it {
- FileSymbolKind::Const => SymbolKind::Const,
- FileSymbolKind::Enum => SymbolKind::Enum,
- FileSymbolKind::Function => SymbolKind::Function,
- FileSymbolKind::Macro => SymbolKind::Macro,
- FileSymbolKind::Module => SymbolKind::Module,
- FileSymbolKind::Static => SymbolKind::Static,
- FileSymbolKind::Struct => SymbolKind::Struct,
- FileSymbolKind::Trait => SymbolKind::Trait,
- FileSymbolKind::TypeAlias => SymbolKind::TypeAlias,
- FileSymbolKind::Union => SymbolKind::Union,
- }
- }
-}
-
-/// Represents an outstanding module that the symbol collector must collect symbols from.
-struct SymbolCollectorWork {
- module_id: ModuleId,
- parent: Option<DefWithBodyId>,
-}
-
-struct SymbolCollector<'a> {
- db: &'a dyn SymbolsDatabase,
- symbols: Vec<FileSymbol>,
- work: Vec<SymbolCollectorWork>,
- current_container_name: Option<SmolStr>,
-}
-
-/// Given a [`ModuleId`] and a [`SymbolsDatabase`], use the DefMap for the module's crate to collect all symbols that should be
-/// indexed for the given module.
-impl<'a> SymbolCollector<'a> {
- fn collect(db: &dyn SymbolsDatabase, module_id: ModuleId) -> Vec<FileSymbol> {
- let mut symbol_collector = SymbolCollector {
- db,
- symbols: Default::default(),
- current_container_name: None,
- // The initial work is the root module we're collecting, additional work will
- // be populated as we traverse the module's definitions.
- work: vec![SymbolCollectorWork { module_id, parent: None }],
- };
-
- while let Some(work) = symbol_collector.work.pop() {
- symbol_collector.do_work(work);
- }
-
- symbol_collector.symbols
- }
-
- fn do_work(&mut self, work: SymbolCollectorWork) {
- self.db.unwind_if_cancelled();
-
- let parent_name = work.parent.and_then(|id| self.def_with_body_id_name(id));
- self.with_container_name(parent_name, |s| s.collect_from_module(work.module_id));
- }
-
- fn collect_from_module(&mut self, module_id: ModuleId) {
- let def_map = module_id.def_map(self.db.upcast());
- let scope = &def_map[module_id.local_id].scope;
-
- for module_def_id in scope.declarations() {
- match module_def_id {
- ModuleDefId::ModuleId(id) => self.push_module(id),
- ModuleDefId::FunctionId(id) => {
- self.push_decl_assoc(id, FileSymbolKind::Function);
- self.collect_from_body(id);
- }
- ModuleDefId::AdtId(AdtId::StructId(id)) => {
- self.push_decl(id, FileSymbolKind::Struct)
- }
- ModuleDefId::AdtId(AdtId::EnumId(id)) => self.push_decl(id, FileSymbolKind::Enum),
- ModuleDefId::AdtId(AdtId::UnionId(id)) => self.push_decl(id, FileSymbolKind::Union),
- ModuleDefId::ConstId(id) => {
- self.push_decl_assoc(id, FileSymbolKind::Const);
- self.collect_from_body(id);
- }
- ModuleDefId::StaticId(id) => {
- self.push_decl_assoc(id, FileSymbolKind::Static);
- self.collect_from_body(id);
- }
- ModuleDefId::TraitId(id) => {
- self.push_decl(id, FileSymbolKind::Trait);
- self.collect_from_trait(id);
- }
- ModuleDefId::TypeAliasId(id) => {
- self.push_decl_assoc(id, FileSymbolKind::TypeAlias);
- }
- // Don't index these.
- ModuleDefId::BuiltinType(_) => {}
- ModuleDefId::EnumVariantId(_) => {}
- }
- }
-
- for impl_id in scope.impls() {
- self.collect_from_impl(impl_id);
- }
-
- for const_id in scope.unnamed_consts() {
- self.collect_from_body(const_id);
- }
-
- for macro_def_id in scope.macro_declarations() {
- self.push_decl_macro(macro_def_id.into());
- }
- }
-
- fn collect_from_body(&mut self, body_id: impl Into<DefWithBodyId>) {
- let body_id = body_id.into();
- let body = self.db.body(body_id);
-
- // Descend into the blocks and enqueue collection of all modules within.
- for (_, def_map) in body.blocks(self.db.upcast()) {
- for (id, _) in def_map.modules() {
- self.work.push(SymbolCollectorWork {
- module_id: def_map.module_id(id),
- parent: Some(body_id),
- });
- }
- }
- }
-
- fn collect_from_impl(&mut self, impl_id: ImplId) {
- let impl_data = self.db.impl_data(impl_id);
- for &assoc_item_id in &impl_data.items {
- self.push_assoc_item(assoc_item_id)
- }
- }
-
- fn collect_from_trait(&mut self, trait_id: TraitId) {
- let trait_data = self.db.trait_data(trait_id);
- self.with_container_name(trait_data.name.as_text(), |s| {
- for &(_, assoc_item_id) in &trait_data.items {
- s.push_assoc_item(assoc_item_id);
- }
- });
- }
-
- fn with_container_name(&mut self, container_name: Option<SmolStr>, f: impl FnOnce(&mut Self)) {
- if let Some(container_name) = container_name {
- let prev = self.current_container_name.replace(container_name);
- f(self);
- self.current_container_name = prev;
- } else {
- f(self);
- }
- }
-
- fn current_container_name(&self) -> Option<SmolStr> {
- self.current_container_name.clone()
- }
-
- fn def_with_body_id_name(&self, body_id: DefWithBodyId) -> Option<SmolStr> {
- match body_id {
- DefWithBodyId::FunctionId(id) => Some(
- id.lookup(self.db.upcast()).source(self.db.upcast()).value.name()?.text().into(),
- ),
- DefWithBodyId::StaticId(id) => Some(
- id.lookup(self.db.upcast()).source(self.db.upcast()).value.name()?.text().into(),
- ),
- DefWithBodyId::ConstId(id) => Some(
- id.lookup(self.db.upcast()).source(self.db.upcast()).value.name()?.text().into(),
- ),
- }
- }
-
- fn push_assoc_item(&mut self, assoc_item_id: AssocItemId) {
- match assoc_item_id {
- AssocItemId::FunctionId(id) => self.push_decl_assoc(id, FileSymbolKind::Function),
- AssocItemId::ConstId(id) => self.push_decl_assoc(id, FileSymbolKind::Const),
- AssocItemId::TypeAliasId(id) => self.push_decl_assoc(id, FileSymbolKind::TypeAlias),
- }
- }
-
- fn push_decl_assoc<L, T>(&mut self, id: L, kind: FileSymbolKind)
- where
- L: Lookup<Data = AssocItemLoc<T>>,
- T: ItemTreeNode,
- <T as ItemTreeNode>::Source: HasName,
- {
- fn container_name(db: &dyn HirDatabase, container: ItemContainerId) -> Option<SmolStr> {
- match container {
- ItemContainerId::ModuleId(module_id) => {
- let module = Module::from(module_id);
- module.name(db).and_then(|name| name.as_text())
- }
- ItemContainerId::TraitId(trait_id) => {
- let trait_data = db.trait_data(trait_id);
- trait_data.name.as_text()
- }
- ItemContainerId::ImplId(_) | ItemContainerId::ExternBlockId(_) => None,
- }
- }
-
- self.push_file_symbol(|s| {
- let loc = id.lookup(s.db.upcast());
- let source = loc.source(s.db.upcast());
- let name_node = source.value.name()?;
- let container_name =
- container_name(s.db.upcast(), loc.container).or_else(|| s.current_container_name());
-
- Some(FileSymbol {
- name: name_node.text().into(),
- kind,
- container_name,
- loc: DeclarationLocation {
- hir_file_id: source.file_id,
- ptr: SyntaxNodePtr::new(source.value.syntax()),
- name_ptr: SyntaxNodePtr::new(name_node.syntax()),
- },
- })
- })
- }
-
- fn push_decl<L, T>(&mut self, id: L, kind: FileSymbolKind)
- where
- L: Lookup<Data = ItemLoc<T>>,
- T: ItemTreeNode,
- <T as ItemTreeNode>::Source: HasName,
- {
- self.push_file_symbol(|s| {
- let loc = id.lookup(s.db.upcast());
- let source = loc.source(s.db.upcast());
- let name_node = source.value.name()?;
-
- Some(FileSymbol {
- name: name_node.text().into(),
- kind,
- container_name: s.current_container_name(),
- loc: DeclarationLocation {
- hir_file_id: source.file_id,
- ptr: SyntaxNodePtr::new(source.value.syntax()),
- name_ptr: SyntaxNodePtr::new(name_node.syntax()),
- },
- })
- })
- }
-
- fn push_module(&mut self, module_id: ModuleId) {
- self.push_file_symbol(|s| {
- let def_map = module_id.def_map(s.db.upcast());
- let module_data = &def_map[module_id.local_id];
- let declaration = module_data.origin.declaration()?;
- let module = declaration.to_node(s.db.upcast());
- let name_node = module.name()?;
-
- Some(FileSymbol {
- name: name_node.text().into(),
- kind: FileSymbolKind::Module,
- container_name: s.current_container_name(),
- loc: DeclarationLocation {
- hir_file_id: declaration.file_id,
- ptr: SyntaxNodePtr::new(module.syntax()),
- name_ptr: SyntaxNodePtr::new(name_node.syntax()),
- },
- })
- })
- }
-
- fn push_decl_macro(&mut self, macro_def: MacroDef) {
- self.push_file_symbol(|s| {
- let name = macro_def.name(s.db.upcast())?.as_text()?;
- let source = macro_def.source(s.db.upcast())?;
-
- let (ptr, name_ptr) = match source.value {
- Either::Left(m) => {
- (SyntaxNodePtr::new(m.syntax()), SyntaxNodePtr::new(m.name()?.syntax()))
- }
- Either::Right(f) => {
- (SyntaxNodePtr::new(f.syntax()), SyntaxNodePtr::new(f.name()?.syntax()))
- }
- };
-
- Some(FileSymbol {
- name,
- kind: FileSymbolKind::Macro,
- container_name: s.current_container_name(),
- loc: DeclarationLocation { hir_file_id: source.file_id, name_ptr, ptr },
- })
- })
- }
-
- fn push_file_symbol(&mut self, f: impl FnOnce(&Self) -> Option<FileSymbol>) {
- if let Some(file_symbol) = f(self) {
- self.symbols.push(file_symbol);
- }
- }
-}
-
#[cfg(test)]
mod tests {
use base_db::fixture::WithFixture;
use expect_test::expect_file;
+ use hir::symbols::SymbolCollector;
use super::*;
@@ -792,11 +422,10 @@
"#,
);
- let symbols: Vec<_> = module_ids_for_crate(db.upcast(), db.test_crate())
+ let symbols: Vec<_> = Crate::from(db.test_crate())
+ .modules(&db)
.into_iter()
- .map(|module_id| {
- (module_id, SymbolCollector::collect(&db as &dyn SymbolsDatabase, module_id))
- })
+ .map(|module_id| (module_id, SymbolCollector::collect(&db, module_id)))
.collect();
expect_file!["./test_data/test_symbol_index_collection.txt"].assert_debug_eq(&symbols);
diff --git a/crates/ide_db/src/test_data/test_symbol_index_collection.txt b/crates/ide_db/src/test_data/test_symbol_index_collection.txt
index 176d7e1..cc51d85 100644
--- a/crates/ide_db/src/test_data/test_symbol_index_collection.txt
+++ b/crates/ide_db/src/test_data/test_symbol_index_collection.txt
@@ -1,11 +1,13 @@
[
(
- ModuleId {
- krate: CrateId(
- 0,
- ),
- block: None,
- local_id: Idx::<ModuleData>(0),
+ Module {
+ id: ModuleId {
+ krate: CrateId(
+ 0,
+ ),
+ block: None,
+ local_id: Idx::<ModuleData>(0),
+ },
},
[
FileSymbol {
@@ -459,12 +461,14 @@
],
),
(
- ModuleId {
- krate: CrateId(
- 0,
- ),
- block: None,
- local_id: Idx::<ModuleData>(1),
+ Module {
+ id: ModuleId {
+ krate: CrateId(
+ 0,
+ ),
+ block: None,
+ local_id: Idx::<ModuleData>(1),
+ },
},
[
FileSymbol {
@@ -492,12 +496,14 @@
],
),
(
- ModuleId {
- krate: CrateId(
- 0,
- ),
- block: None,
- local_id: Idx::<ModuleData>(2),
+ Module {
+ id: ModuleId {
+ krate: CrateId(
+ 0,
+ ),
+ block: None,
+ local_id: Idx::<ModuleData>(2),
+ },
},
[
FileSymbol {
diff --git a/crates/proc_macro_srv/src/abis/abi_1_56/mod.rs b/crates/proc_macro_srv/src/abis/abi_1_56/mod.rs
index 96b71d3..ee1c2d4 100644
--- a/crates/proc_macro_srv/src/abis/abi_1_56/mod.rs
+++ b/crates/proc_macro_srv/src/abis/abi_1_56/mod.rs
@@ -51,7 +51,7 @@
&proc_macro::bridge::server::SameThread,
rustc_server::Rustc::default(),
parsed_body,
- false,
+ true,
);
return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
}
@@ -62,7 +62,7 @@
&proc_macro::bridge::server::SameThread,
rustc_server::Rustc::default(),
parsed_body,
- false,
+ true,
);
return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
}
@@ -74,7 +74,7 @@
rustc_server::Rustc::default(),
parsed_attributes,
parsed_body,
- false,
+ true,
);
return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
}
diff --git a/crates/proc_macro_srv/src/abis/abi_1_58/mod.rs b/crates/proc_macro_srv/src/abis/abi_1_58/mod.rs
index 4421382..dd3fd8b 100644
--- a/crates/proc_macro_srv/src/abis/abi_1_58/mod.rs
+++ b/crates/proc_macro_srv/src/abis/abi_1_58/mod.rs
@@ -51,7 +51,7 @@
&proc_macro::bridge::server::SameThread,
rustc_server::Rustc::default(),
parsed_body,
- false,
+ true,
);
return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
}
@@ -62,7 +62,7 @@
&proc_macro::bridge::server::SameThread,
rustc_server::Rustc::default(),
parsed_body,
- false,
+ true,
);
return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
}
@@ -74,7 +74,7 @@
rustc_server::Rustc::default(),
parsed_attributes,
parsed_body,
- false,
+ true,
);
return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
}
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 88d86ef..1df19ff 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -1364,7 +1364,23 @@
.map(|(field, _ty, doc, default)| {
let name = format!("rust-analyzer.{}", field.replace("_", "."));
let doc = doc_comment_to_string(*doc);
- format!("[[{}]]{} (default: `{}`)::\n+\n--\n{}--\n", name, name, default, doc)
+ if default.contains('\n') {
+ format!(
+ r#"[[{}]]{}::
++
+--
+Default:
+----
+{}
+----
+{}
+--
+"#,
+ name, name, default, doc
+ )
+ } else {
+ format!("[[{}]]{} (default: `{}`)::\n+\n--\n{}--\n", name, name, default, doc)
+ }
})
.collect::<String>()
}
diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs
index 4211206..5c1aed3 100644
--- a/crates/syntax/src/ast.rs
+++ b/crates/syntax/src/ast.rs
@@ -25,10 +25,7 @@
SlicePatComponents, StructKind, TypeBoundKind, VisibilityKind,
},
operators::{ArithOp, BinaryOp, CmpOp, LogicOp, Ordering, RangeOp, UnaryOp},
- token_ext::{
- CommentKind, CommentPlacement, CommentShape, FormatSpecifier, HasFormatSpecifier, IsString,
- QuoteOffsets, Radix,
- },
+ token_ext::{CommentKind, CommentPlacement, CommentShape, IsString, QuoteOffsets, Radix},
traits::{
DocCommentIter, HasArgList, HasAttrs, HasDocComments, HasGenericParams, HasLoopBody,
HasModuleItem, HasName, HasTypeBounds, HasVisibility,
diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs
index 8e55afb..09c5af2 100644
--- a/crates/syntax/src/ast/generated/nodes.rs
+++ b/crates/syntax/src/ast/generated/nodes.rs
@@ -162,6 +162,7 @@
pub(crate) syntax: SyntaxNode,
}
impl ast::HasAttrs for MacroCall {}
+impl ast::HasDocComments for MacroCall {}
impl MacroCall {
pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
@@ -259,6 +260,7 @@
pub(crate) syntax: SyntaxNode,
}
impl ast::HasAttrs for ExternBlock {}
+impl ast::HasDocComments for ExternBlock {}
impl ExternBlock {
pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) }
pub fn extern_item_list(&self) -> Option<ExternItemList> { support::child(&self.syntax) }
@@ -270,6 +272,7 @@
}
impl ast::HasAttrs for ExternCrate {}
impl ast::HasVisibility for ExternCrate {}
+impl ast::HasDocComments for ExternCrate {}
impl ExternCrate {
pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) }
pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) }
@@ -1543,6 +1546,7 @@
Use(Use),
}
impl ast::HasAttrs for Item {}
+impl ast::HasDocComments for Item {}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum Stmt {
@@ -1597,6 +1601,7 @@
TypeAlias(TypeAlias),
}
impl ast::HasAttrs for AssocItem {}
+impl ast::HasDocComments for AssocItem {}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ExternItem {
@@ -1606,6 +1611,7 @@
TypeAlias(TypeAlias),
}
impl ast::HasAttrs for ExternItem {}
+impl ast::HasDocComments for ExternItem {}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum GenericParam {
@@ -3902,10 +3908,9 @@
impl AstNode for AnyHasDocComments {
fn can_cast(kind: SyntaxKind) -> bool {
match kind {
- SOURCE_FILE | CONST | ENUM | FN | IMPL | MACRO_RULES | MACRO_DEF | MODULE | STATIC
- | STRUCT | TRAIT | TYPE_ALIAS | UNION | USE | RECORD_FIELD | TUPLE_FIELD | VARIANT => {
- true
- }
+ MACRO_CALL | SOURCE_FILE | CONST | ENUM | EXTERN_BLOCK | EXTERN_CRATE | FN | IMPL
+ | MACRO_RULES | MACRO_DEF | MODULE | STATIC | STRUCT | TRAIT | TYPE_ALIAS | UNION
+ | USE | RECORD_FIELD | TUPLE_FIELD | VARIANT => true,
_ => false,
}
}
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index 3575020..f80c5e3 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -188,6 +188,14 @@
ast_from_text(&format!("use {};", name_ref))
}
+pub fn path_segment_ty(type_ref: ast::Type, trait_ref: Option<ast::PathType>) -> ast::PathSegment {
+ let text = match trait_ref {
+ Some(trait_ref) => format!("fn f(x: <{} as {}>) {{}}", type_ref, trait_ref),
+ None => format!("fn f(x: <{}>) {{}}", type_ref),
+ };
+ ast_from_text(&text)
+}
+
pub fn path_segment_self() -> ast::PathSegment {
ast_from_text("use self;")
}
@@ -218,9 +226,9 @@
) -> ast::Path {
let segments = segments.into_iter().map(|it| it.syntax().clone()).join("::");
ast_from_text(&if is_abs {
- format!("use ::{};", segments)
+ format!("fn f(x: ::{}) {{}}", segments)
} else {
- format!("use {};", segments)
+ format!("fn f(x: {}) {{}}", segments)
})
}
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index 470e9a0..705aa5e 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -772,7 +772,6 @@
}
impl ast::HasAttrs for ast::AnyHasDocComments {}
-impl ast::HasDocComments for ast::Item {}
impl From<ast::Adt> for ast::Item {
fn from(it: ast::Adt) -> Self {
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs
index cc7e515..16ac35b 100644
--- a/crates/syntax/src/ast/token_ext.rs
+++ b/crates/syntax/src/ast/token_ext.rs
@@ -164,6 +164,25 @@
fn close_quote_text_range(&self) -> Option<TextRange> {
self.quote_offsets().map(|it| it.quotes.1)
}
+ fn escaped_char_ranges(
+ &self,
+ cb: &mut dyn FnMut(TextRange, Result<char, rustc_lexer::unescape::EscapeError>),
+ ) {
+ let text_range_no_quotes = match self.text_range_between_quotes() {
+ Some(it) => it,
+ None => return,
+ };
+
+ let start = self.syntax().text_range().start();
+ let text = &self.text()[text_range_no_quotes - start];
+ let offset = text_range_no_quotes.start() - start;
+
+ unescape_literal(text, Mode::Str, &mut |range, unescaped_char| {
+ let text_range =
+ TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
+ cb(text_range + offset, unescaped_char);
+ });
+ }
}
impl IsString for ast::String {}
@@ -257,366 +276,6 @@
}
}
-#[derive(Debug)]
-pub enum FormatSpecifier {
- Open,
- Close,
- Integer,
- Identifier,
- Colon,
- Fill,
- Align,
- Sign,
- NumberSign,
- Zero,
- DollarSign,
- Dot,
- Asterisk,
- QuestionMark,
-}
-
-pub trait HasFormatSpecifier: AstToken {
- fn char_ranges(
- &self,
- ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>>;
-
- fn lex_format_specifier<F>(&self, mut callback: F)
- where
- F: FnMut(TextRange, FormatSpecifier),
- {
- let char_ranges = match self.char_ranges() {
- Some(char_ranges) => char_ranges,
- None => return,
- };
- let mut chars = char_ranges.iter().peekable();
-
- while let Some((range, first_char)) = chars.next() {
- match first_char {
- Ok('{') => {
- // Format specifier, see syntax at https://doc.rust-lang.org/std/fmt/index.html#syntax
- if let Some((_, Ok('{'))) = chars.peek() {
- // Escaped format specifier, `{{`
- chars.next();
- continue;
- }
-
- callback(*range, FormatSpecifier::Open);
-
- // check for integer/identifier
- match chars
- .peek()
- .and_then(|next| next.1.as_ref().ok())
- .copied()
- .unwrap_or_default()
- {
- '0'..='9' => {
- // integer
- read_integer(&mut chars, &mut callback);
- }
- c if c == '_' || c.is_alphabetic() => {
- // identifier
- read_identifier(&mut chars, &mut callback);
- }
- _ => {}
- }
-
- if let Some((_, Ok(':'))) = chars.peek() {
- skip_char_and_emit(&mut chars, FormatSpecifier::Colon, &mut callback);
-
- // check for fill/align
- let mut cloned = chars.clone().take(2);
- let first = cloned
- .next()
- .and_then(|next| next.1.as_ref().ok())
- .copied()
- .unwrap_or_default();
- let second = cloned
- .next()
- .and_then(|next| next.1.as_ref().ok())
- .copied()
- .unwrap_or_default();
- match second {
- '<' | '^' | '>' => {
- // alignment specifier, first char specifies fillment
- skip_char_and_emit(
- &mut chars,
- FormatSpecifier::Fill,
- &mut callback,
- );
- skip_char_and_emit(
- &mut chars,
- FormatSpecifier::Align,
- &mut callback,
- );
- }
- _ => match first {
- '<' | '^' | '>' => {
- skip_char_and_emit(
- &mut chars,
- FormatSpecifier::Align,
- &mut callback,
- );
- }
- _ => {}
- },
- }
-
- // check for sign
- match chars
- .peek()
- .and_then(|next| next.1.as_ref().ok())
- .copied()
- .unwrap_or_default()
- {
- '+' | '-' => {
- skip_char_and_emit(
- &mut chars,
- FormatSpecifier::Sign,
- &mut callback,
- );
- }
- _ => {}
- }
-
- // check for `#`
- if let Some((_, Ok('#'))) = chars.peek() {
- skip_char_and_emit(
- &mut chars,
- FormatSpecifier::NumberSign,
- &mut callback,
- );
- }
-
- // check for `0`
- let mut cloned = chars.clone().take(2);
- let first = cloned.next().and_then(|next| next.1.as_ref().ok()).copied();
- let second = cloned.next().and_then(|next| next.1.as_ref().ok()).copied();
-
- if first == Some('0') && second != Some('$') {
- skip_char_and_emit(&mut chars, FormatSpecifier::Zero, &mut callback);
- }
-
- // width
- match chars
- .peek()
- .and_then(|next| next.1.as_ref().ok())
- .copied()
- .unwrap_or_default()
- {
- '0'..='9' => {
- read_integer(&mut chars, &mut callback);
- if let Some((_, Ok('$'))) = chars.peek() {
- skip_char_and_emit(
- &mut chars,
- FormatSpecifier::DollarSign,
- &mut callback,
- );
- }
- }
- c if c == '_' || c.is_alphabetic() => {
- read_identifier(&mut chars, &mut callback);
-
- if chars.peek().and_then(|next| next.1.as_ref().ok()).copied()
- == Some('?')
- {
- skip_char_and_emit(
- &mut chars,
- FormatSpecifier::QuestionMark,
- &mut callback,
- );
- }
-
- // can be either width (indicated by dollar sign, or type in which case
- // the next sign has to be `}`)
- let next =
- chars.peek().and_then(|next| next.1.as_ref().ok()).copied();
-
- match next {
- Some('$') => skip_char_and_emit(
- &mut chars,
- FormatSpecifier::DollarSign,
- &mut callback,
- ),
- Some('}') => {
- skip_char_and_emit(
- &mut chars,
- FormatSpecifier::Close,
- &mut callback,
- );
- continue;
- }
- _ => continue,
- };
- }
- _ => {}
- }
-
- // precision
- if let Some((_, Ok('.'))) = chars.peek() {
- skip_char_and_emit(&mut chars, FormatSpecifier::Dot, &mut callback);
-
- match chars
- .peek()
- .and_then(|next| next.1.as_ref().ok())
- .copied()
- .unwrap_or_default()
- {
- '*' => {
- skip_char_and_emit(
- &mut chars,
- FormatSpecifier::Asterisk,
- &mut callback,
- );
- }
- '0'..='9' => {
- read_integer(&mut chars, &mut callback);
- if let Some((_, Ok('$'))) = chars.peek() {
- skip_char_and_emit(
- &mut chars,
- FormatSpecifier::DollarSign,
- &mut callback,
- );
- }
- }
- c if c == '_' || c.is_alphabetic() => {
- read_identifier(&mut chars, &mut callback);
- if chars.peek().and_then(|next| next.1.as_ref().ok()).copied()
- != Some('$')
- {
- continue;
- }
- skip_char_and_emit(
- &mut chars,
- FormatSpecifier::DollarSign,
- &mut callback,
- );
- }
- _ => {
- continue;
- }
- }
- }
-
- // type
- match chars
- .peek()
- .and_then(|next| next.1.as_ref().ok())
- .copied()
- .unwrap_or_default()
- {
- '?' => {
- skip_char_and_emit(
- &mut chars,
- FormatSpecifier::QuestionMark,
- &mut callback,
- );
- }
- c if c == '_' || c.is_alphabetic() => {
- read_identifier(&mut chars, &mut callback);
-
- if chars.peek().and_then(|next| next.1.as_ref().ok()).copied()
- == Some('?')
- {
- skip_char_and_emit(
- &mut chars,
- FormatSpecifier::QuestionMark,
- &mut callback,
- );
- }
- }
- _ => {}
- }
- }
-
- match chars.peek() {
- Some((_, Ok('}'))) => {
- skip_char_and_emit(&mut chars, FormatSpecifier::Close, &mut callback);
- }
- Some((_, _)) | None => continue,
- }
- }
- _ => {
- while let Some((_, Ok(next_char))) = chars.peek() {
- if next_char == &'{' {
- break;
- }
- chars.next();
- }
- }
- };
- }
-
- fn skip_char_and_emit<'a, I, F>(
- chars: &mut std::iter::Peekable<I>,
- emit: FormatSpecifier,
- callback: &mut F,
- ) where
- I: Iterator<Item = &'a (TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>,
- F: FnMut(TextRange, FormatSpecifier),
- {
- let (range, _) = chars.next().unwrap();
- callback(*range, emit);
- }
-
- fn read_integer<'a, I, F>(chars: &mut std::iter::Peekable<I>, callback: &mut F)
- where
- I: Iterator<Item = &'a (TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>,
- F: FnMut(TextRange, FormatSpecifier),
- {
- let (mut range, c) = chars.next().unwrap();
- assert!(c.as_ref().unwrap().is_ascii_digit());
- while let Some((r, Ok(next_char))) = chars.peek() {
- if next_char.is_ascii_digit() {
- chars.next();
- range = range.cover(*r);
- } else {
- break;
- }
- }
- callback(range, FormatSpecifier::Integer);
- }
-
- fn read_identifier<'a, I, F>(chars: &mut std::iter::Peekable<I>, callback: &mut F)
- where
- I: Iterator<Item = &'a (TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>,
- F: FnMut(TextRange, FormatSpecifier),
- {
- let (mut range, c) = chars.next().unwrap();
- assert!(c.as_ref().unwrap().is_alphabetic() || *c.as_ref().unwrap() == '_');
- while let Some((r, Ok(next_char))) = chars.peek() {
- if *next_char == '_' || next_char.is_ascii_digit() || next_char.is_alphabetic() {
- chars.next();
- range = range.cover(*r);
- } else {
- break;
- }
- }
- callback(range, FormatSpecifier::Identifier);
- }
- }
-}
-
-impl HasFormatSpecifier for ast::String {
- fn char_ranges(
- &self,
- ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> {
- let text = self.text();
- let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
- let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start();
-
- let mut res = Vec::with_capacity(text.len());
- unescape_literal(text, Mode::Str, &mut |range, unescaped_char| {
- res.push((
- TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap())
- + offset,
- unescaped_char,
- ));
- });
-
- Some(res)
- }
-}
-
impl ast::IntNumber {
pub fn radix(&self) -> Radix {
match self.text().get(..2).unwrap_or_default() {
diff --git a/crates/syntax/src/tests/sourcegen_ast.rs b/crates/syntax/src/tests/sourcegen_ast.rs
index ac92840..4a608da 100644
--- a/crates/syntax/src/tests/sourcegen_ast.rs
+++ b/crates/syntax/src/tests/sourcegen_ast.rs
@@ -793,9 +793,11 @@
"Const",
"TypeAlias",
"Impl",
+ "ExternBlock",
+ "ExternCrate",
+ "MacroCall",
"MacroRules",
"MacroDef",
- "Macro",
"Use",
];
diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc
index 88dcacf..f7a533c 100644
--- a/docs/user/generated_config.adoc
+++ b/docs/user/generated_config.adoc
@@ -146,7 +146,12 @@
--
Whether to add parenthesis when completing functions.
--
-[[rust-analyzer.completion.snippets]]rust-analyzer.completion.snippets (default: `{
+[[rust-analyzer.completion.snippets]]rust-analyzer.completion.snippets::
++
+--
+Default:
+----
+{
"Arc::new": {
"postfix": "arc",
"body": "Arc::new(${receiver})",
@@ -186,10 +191,10 @@
"description": "Wrap the expression in an `Option::Some`",
"scope": "expr"
}
- }`)::
-+
---
+ }
+----
Custom completion snippets.
+
--
[[rust-analyzer.completion.postfix.enable]]rust-analyzer.completion.postfix.enable (default: `true`)::
+
diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc
index 13165e8..a549604 100644
--- a/docs/user/manual.adoc
+++ b/docs/user/manual.adoc
@@ -411,7 +411,9 @@
=== Kate Text Editor
Support for the language server protocol is built into Kate through the LSP plugin, which is included by default.
-It is preconfigured to use Rls for rust sources, but allows you to use rust-analyzer through a simple settings change.
+It is preconfigured to use rust-analyzer for Rust sources since Kate 21.12.
+
+Earlier versions allow you to use rust-analyzer through a simple settings change.
In the LSP Client settings of Kate, copy the content of the third tab "default parameters" to the second tab "server configuration".
Then in the configuration replace:
[source,json]