Auto merge of #16211 - tetsuharuohzeki:update-lint, r=Veykril
Use Cargo's [workspace.lints.*] to config clippy
This change begin to use [`[workspace.lints.*]`](https://doc.rust-lang.org/cargo/reference/workspaces.html#the-lints-table) that is stabilized since [Rust 1.74](https://blog.rust-lang.org/2023/11/16/Rust-1.74.0.html).
By this change, we make the configure more readable and simplify `xargo lint` more.
diff --git a/Cargo.lock b/Cargo.lock
index 7310ecc..c7d110e 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -945,24 +945,24 @@
[[package]]
name = "lsp-server"
-version = "0.7.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b52dccdf3302eefab8c8a1273047f0a3c3dca4b527c8458d00c09484c8371928"
+version = "0.7.6"
dependencies = [
"crossbeam-channel",
+ "ctrlc",
"log",
+ "lsp-types",
"serde",
"serde_json",
]
[[package]]
name = "lsp-server"
-version = "0.7.5"
+version = "0.7.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "248f65b78f6db5d8e1b1604b4098a28b43d21a8eb1deeca22b1c421b276c7095"
dependencies = [
"crossbeam-channel",
- "ctrlc",
"log",
- "lsp-types",
"serde",
"serde_json",
]
@@ -1526,7 +1526,7 @@
"ide-ssr",
"itertools",
"load-cargo",
- "lsp-server 0.7.4",
+ "lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lsp-types",
"mbe",
"mimalloc",
diff --git a/Cargo.toml b/Cargo.toml
index d40db83e..7054020 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -88,7 +88,7 @@
# In-tree crates that are published separately and follow semver. See lib/README.md
line-index = { version = "0.1.1" }
la-arena = { version = "0.3.1" }
-lsp-server = { version = "0.7.4" }
+lsp-server = { version = "0.7.6" }
# non-local crates
anyhow = "1.0.75"
diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs
index 1c84103..5792154 100644
--- a/crates/hir-expand/src/hygiene.rs
+++ b/crates/hir-expand/src/hygiene.rs
@@ -149,15 +149,16 @@
transparency: Transparency,
) -> SyntaxContextId {
let syntax_context_data = db.lookup_intern_syntax_context(ctxt);
- let mut opaque = syntax_context_data.opaque;
- let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent;
+ let mut opaque = handle_self_ref(ctxt, syntax_context_data.opaque);
+ let mut opaque_and_semitransparent =
+ handle_self_ref(ctxt, syntax_context_data.opaque_and_semitransparent);
if transparency >= Transparency::Opaque {
let parent = opaque;
+ // Unlike rustc, with salsa we can't prefetch the to be allocated ID to create cycles with
+ // salsa when interning, so we use a sentinel value that effectively means the current
+ // syntax context.
let new_opaque = SyntaxContextId::SELF_REF;
- // But we can't just grab the to be allocated ID either as that would not deduplicate
- // things!
- // So we need a new salsa store type here ...
opaque = db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
@@ -169,6 +170,9 @@
if transparency >= Transparency::SemiTransparent {
let parent = opaque_and_semitransparent;
+ // Unlike rustc, with salsa we can't prefetch the to be allocated ID to create cycles with
+ // salsa when interning, so we use a sentinel value that effectively means the current
+ // syntax context.
let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF;
opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id,
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index e295dd8..428cedb 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -217,6 +217,7 @@
name: Name,
/// Contains the type the field resolves to
field_with_same_name: Option<Ty>,
+ assoc_func_with_same_name: Option<AssocItemId>,
},
// FIXME: This should be emitted in body lowering
BreakOutsideOfLoop {
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index a5e77a1..84954ca 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -1575,11 +1575,30 @@
}
None => None,
};
+
+ let assoc_func_with_same_name = method_resolution::iterate_method_candidates(
+ &canonicalized_receiver.value,
+ self.db,
+ self.table.trait_env.clone(),
+ self.get_traits_in_scope().as_ref().left_or_else(|&it| it),
+ VisibleFromModule::Filter(self.resolver.module()),
+ Some(method_name),
+ method_resolution::LookupMode::Path,
+ |_ty, item, visible| {
+ if visible {
+ Some(item)
+ } else {
+ None
+ }
+ },
+ );
+
self.result.diagnostics.push(InferenceDiagnostic::UnresolvedMethodCall {
expr: tgt_expr,
receiver: receiver_ty.clone(),
name: method_name.clone(),
field_with_same_name: field_with_same_name_exists,
+ assoc_func_with_same_name,
});
(
receiver_ty,
diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs
index 1cb36f9..ba591e1 100644
--- a/crates/hir/src/diagnostics.rs
+++ b/crates/hir/src/diagnostics.rs
@@ -8,7 +8,7 @@
use base_db::CrateId;
use cfg::{CfgExpr, CfgOptions};
use either::Either;
-use hir_def::path::ModPath;
+use hir_def::{path::ModPath, AssocItemId};
use hir_expand::{name::Name, HirFileId, InFile};
use syntax::{ast, AstPtr, SyntaxError, SyntaxNodePtr, TextRange};
@@ -215,6 +215,7 @@
pub receiver: Type,
pub name: Name,
pub field_with_same_name: Option<Type>,
+ pub assoc_func_with_same_name: Option<AssocItemId>,
}
#[derive(Debug)]
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 8576260..087404c 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -1680,6 +1680,7 @@
receiver,
name,
field_with_same_name,
+ assoc_func_with_same_name,
} => {
let expr = expr_syntax(*expr);
@@ -1691,6 +1692,7 @@
field_with_same_name: field_with_same_name
.clone()
.map(|ty| Type::new(db, DefWithBodyId::from(self), ty)),
+ assoc_func_with_same_name: assoc_func_with_same_name.clone(),
}
.into(),
)
@@ -4657,6 +4659,9 @@
pub fn return_type(&self) -> Type {
self.ty.derived(self.sig.ret().clone())
}
+ pub fn sig(&self) -> &CallableSig {
+ &self.sig
+ }
}
fn closure_source(db: &dyn HirDatabase, closure: ClosureId) -> Option<ast::ClosureExpr> {
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index f8dd81a..fdc604a 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -39,8 +39,8 @@
source_analyzer::{resolve_hir_path, SourceAnalyzer},
Access, Adjust, Adjustment, AutoBorrow, BindingMode, BuiltinAttr, Callable, ConstParam, Crate,
DeriveHelper, Field, Function, HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local,
- Macro, Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef, ToolModule, Trait, Type,
- TypeAlias, TypeParam, VariantDef,
+ Macro, Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef, Struct, ToolModule, Trait,
+ Type, TypeAlias, TypeParam, VariantDef,
};
pub enum DescendPreference {
@@ -228,6 +228,14 @@
pub fn to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
self.imp.to_module_def(file)
}
+
+ pub fn to_struct_def(&self, s: &ast::Struct) -> Option<Struct> {
+ self.imp.to_def(s).map(Struct::from)
+ }
+
+ pub fn to_impl_def(&self, i: &ast::Impl) -> Option<Impl> {
+ self.imp.to_def(i).map(Impl::from)
+ }
}
impl<'db> SemanticsImpl<'db> {
@@ -670,7 +678,7 @@
_ => 0,
};
// FIXME: here, the attribute's text range is used to strip away all
- // entries from the start of the attribute "list" up the the invoking
+ // entries from the start of the attribute "list" up the invoking
// attribute. But in
// ```
// mod foo {
diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs
index ad214e1..4da0dfb 100644
--- a/crates/hir/src/symbols.rs
+++ b/crates/hir/src/symbols.rs
@@ -9,7 +9,7 @@
TraitId,
};
use hir_expand::{HirFileId, InFile};
-use hir_ty::db::HirDatabase;
+use hir_ty::{db::HirDatabase, display::HirDisplay};
use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr};
use crate::{Module, ModuleDef, Semantics};
@@ -231,9 +231,12 @@
fn collect_from_impl(&mut self, impl_id: ImplId) {
let impl_data = self.db.impl_data(impl_id);
- for &assoc_item_id in &impl_data.items {
- self.push_assoc_item(assoc_item_id)
- }
+ let impl_name = Some(SmolStr::new(impl_data.self_ty.display(self.db).to_string()));
+ self.with_container_name(impl_name, |s| {
+ for &assoc_item_id in &impl_data.items {
+ s.push_assoc_item(assoc_item_id)
+ }
+ })
}
fn collect_from_trait(&mut self, trait_id: TraitId) {
diff --git a/crates/ide-assists/src/handlers/bool_to_enum.rs b/crates/ide-assists/src/handlers/bool_to_enum.rs
index 0f2d105..b7b00e7 100644
--- a/crates/ide-assists/src/handlers/bool_to_enum.rs
+++ b/crates/ide-assists/src/handlers/bool_to_enum.rs
@@ -16,11 +16,14 @@
edit_in_place::{AttrsOwnerEdit, Indent},
make, HasName,
},
- ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T,
+ AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T,
};
use text_edit::TextRange;
-use crate::assist_context::{AssistContext, Assists};
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils,
+};
// Assist: bool_to_enum
//
@@ -73,7 +76,7 @@
let usages = definition.usages(&ctx.sema).all();
add_enum_def(edit, ctx, &usages, target_node, &target_module);
- replace_usages(edit, ctx, &usages, definition, &target_module);
+ replace_usages(edit, ctx, usages, definition, &target_module);
},
)
}
@@ -169,8 +172,8 @@
/// Converts an expression of type `bool` to one of the new enum type.
fn bool_expr_to_enum_expr(expr: ast::Expr) -> ast::Expr {
- let true_expr = make::expr_path(make::path_from_text("Bool::True")).clone_for_update();
- let false_expr = make::expr_path(make::path_from_text("Bool::False")).clone_for_update();
+ let true_expr = make::expr_path(make::path_from_text("Bool::True"));
+ let false_expr = make::expr_path(make::path_from_text("Bool::False"));
if let ast::Expr::Literal(literal) = &expr {
match literal.kind() {
@@ -184,7 +187,6 @@
make::tail_only_block_expr(true_expr),
Some(ast::ElseBranch::Block(make::tail_only_block_expr(false_expr))),
)
- .clone_for_update()
}
}
@@ -192,21 +194,19 @@
fn replace_usages(
edit: &mut SourceChangeBuilder,
ctx: &AssistContext<'_>,
- usages: &UsageSearchResult,
+ usages: UsageSearchResult,
target_definition: Definition,
target_module: &hir::Module,
) {
- for (file_id, references) in usages.iter() {
- edit.edit_file(*file_id);
+ for (file_id, references) in usages {
+ edit.edit_file(file_id);
- let refs_with_imports =
- augment_references_with_imports(edit, ctx, references, target_module);
+ let refs_with_imports = augment_references_with_imports(ctx, references, target_module);
refs_with_imports.into_iter().rev().for_each(
- |FileReferenceWithImport { range, old_name, new_name, import_data }| {
+ |FileReferenceWithImport { range, name, import_data }| {
// replace the usages in patterns and expressions
- if let Some(ident_pat) = old_name.syntax().ancestors().find_map(ast::IdentPat::cast)
- {
+ if let Some(ident_pat) = name.syntax().ancestors().find_map(ast::IdentPat::cast) {
cov_mark::hit!(replaces_record_pat_shorthand);
let definition = ctx.sema.to_def(&ident_pat).map(Definition::Local);
@@ -214,36 +214,35 @@
replace_usages(
edit,
ctx,
- &def.usages(&ctx.sema).all(),
+ def.usages(&ctx.sema).all(),
target_definition,
target_module,
)
}
- } else if let Some(initializer) = find_assignment_usage(&new_name) {
+ } else if let Some(initializer) = find_assignment_usage(&name) {
cov_mark::hit!(replaces_assignment);
replace_bool_expr(edit, initializer);
- } else if let Some((prefix_expr, inner_expr)) = find_negated_usage(&new_name) {
+ } else if let Some((prefix_expr, inner_expr)) = find_negated_usage(&name) {
cov_mark::hit!(replaces_negation);
edit.replace(
prefix_expr.syntax().text_range(),
format!("{} == Bool::False", inner_expr),
);
- } else if let Some((record_field, initializer)) = old_name
+ } else if let Some((record_field, initializer)) = name
.as_name_ref()
.and_then(ast::RecordExprField::for_field_name)
.and_then(|record_field| ctx.sema.resolve_record_field(&record_field))
.and_then(|(got_field, _, _)| {
- find_record_expr_usage(&new_name, got_field, target_definition)
+ find_record_expr_usage(&name, got_field, target_definition)
})
{
cov_mark::hit!(replaces_record_expr);
- let record_field = edit.make_mut(record_field);
let enum_expr = bool_expr_to_enum_expr(initializer);
- record_field.replace_expr(enum_expr);
- } else if let Some(pat) = find_record_pat_field_usage(&old_name) {
+ utils::replace_record_field_expr(ctx, edit, record_field, enum_expr);
+ } else if let Some(pat) = find_record_pat_field_usage(&name) {
match pat {
ast::Pat::IdentPat(ident_pat) => {
cov_mark::hit!(replaces_record_pat);
@@ -253,7 +252,7 @@
replace_usages(
edit,
ctx,
- &def.usages(&ctx.sema).all(),
+ def.usages(&ctx.sema).all(),
target_definition,
target_module,
)
@@ -270,40 +269,44 @@
}
_ => (),
}
- } else if let Some((ty_annotation, initializer)) = find_assoc_const_usage(&new_name)
- {
+ } else if let Some((ty_annotation, initializer)) = find_assoc_const_usage(&name) {
edit.replace(ty_annotation.syntax().text_range(), "Bool");
replace_bool_expr(edit, initializer);
- } else if let Some(receiver) = find_method_call_expr_usage(&new_name) {
+ } else if let Some(receiver) = find_method_call_expr_usage(&name) {
edit.replace(
receiver.syntax().text_range(),
format!("({} == Bool::True)", receiver),
);
- } else if new_name.syntax().ancestors().find_map(ast::UseTree::cast).is_none() {
+ } else if name.syntax().ancestors().find_map(ast::UseTree::cast).is_none() {
// for any other usage in an expression, replace it with a check that it is the true variant
- if let Some((record_field, expr)) = new_name
- .as_name_ref()
- .and_then(ast::RecordExprField::for_field_name)
- .and_then(|record_field| {
- record_field.expr().map(|expr| (record_field, expr))
- })
+ if let Some((record_field, expr)) =
+ name.as_name_ref().and_then(ast::RecordExprField::for_field_name).and_then(
+ |record_field| record_field.expr().map(|expr| (record_field, expr)),
+ )
{
- record_field.replace_expr(
+ utils::replace_record_field_expr(
+ ctx,
+ edit,
+ record_field,
make::expr_bin_op(
expr,
ast::BinaryOp::CmpOp(ast::CmpOp::Eq { negated: false }),
make::expr_path(make::path_from_text("Bool::True")),
- )
- .clone_for_update(),
+ ),
);
} else {
- edit.replace(range, format!("{} == Bool::True", new_name.text()));
+ edit.replace(range, format!("{} == Bool::True", name.text()));
}
}
// add imports across modules where needed
if let Some((import_scope, path)) = import_data {
- insert_use(&import_scope, path, &ctx.config.insert_use);
+ let scope = match import_scope.clone() {
+ ImportScope::File(it) => ImportScope::File(edit.make_mut(it)),
+ ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)),
+ ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)),
+ };
+ insert_use(&scope, path, &ctx.config.insert_use);
}
},
)
@@ -312,37 +315,31 @@
struct FileReferenceWithImport {
range: TextRange,
- old_name: ast::NameLike,
- new_name: ast::NameLike,
+ name: ast::NameLike,
import_data: Option<(ImportScope, ast::Path)>,
}
fn augment_references_with_imports(
- edit: &mut SourceChangeBuilder,
ctx: &AssistContext<'_>,
- references: &[FileReference],
+ references: Vec<FileReference>,
target_module: &hir::Module,
) -> Vec<FileReferenceWithImport> {
let mut visited_modules = FxHashSet::default();
references
- .iter()
+ .into_iter()
.filter_map(|FileReference { range, name, .. }| {
let name = name.clone().into_name_like()?;
- ctx.sema.scope(name.syntax()).map(|scope| (*range, name, scope.module()))
+ ctx.sema.scope(name.syntax()).map(|scope| (range, name, scope.module()))
})
.map(|(range, name, ref_module)| {
- let old_name = name.clone();
- let new_name = edit.make_mut(name.clone());
-
// if the referenced module is not the same as the target one and has not been seen before, add an import
let import_data = if ref_module.nearest_non_block_module(ctx.db()) != *target_module
&& !visited_modules.contains(&ref_module)
{
visited_modules.insert(ref_module);
- let import_scope =
- ImportScope::find_insert_use_container(new_name.syntax(), &ctx.sema);
+ let import_scope = ImportScope::find_insert_use_container(name.syntax(), &ctx.sema);
let path = ref_module
.find_use_path_prefixed(
ctx.sema.db,
@@ -360,7 +357,7 @@
None
};
- FileReferenceWithImport { range, old_name, new_name, import_data }
+ FileReferenceWithImport { range, name, import_data }
})
.collect()
}
@@ -405,13 +402,12 @@
let record_field = ast::RecordExprField::for_field_name(name_ref)?;
let initializer = record_field.expr()?;
- if let Definition::Field(expected_field) = target_definition {
- if got_field != expected_field {
- return None;
+ match target_definition {
+ Definition::Field(expected_field) if got_field == expected_field => {
+ Some((record_field, initializer))
}
+ _ => None,
}
-
- Some((record_field, initializer))
}
fn find_record_pat_field_usage(name: &ast::NameLike) -> Option<ast::Pat> {
@@ -466,12 +462,9 @@
let indent = IndentLevel::from_node(&insert_before);
enum_def.reindent_to(indent);
- ted::insert_all(
- ted::Position::before(&edit.make_syntax_mut(insert_before)),
- vec![
- enum_def.syntax().clone().into(),
- make::tokens::whitespace(&format!("\n\n{indent}")).into(),
- ],
+ edit.insert(
+ insert_before.text_range().start(),
+ format!("{}\n\n{indent}", enum_def.syntax().text()),
);
}
@@ -801,6 +794,78 @@
}
#[test]
+ fn local_var_init_struct_usage() {
+ check_assist(
+ bool_to_enum,
+ r#"
+struct Foo {
+ foo: bool,
+}
+
+fn main() {
+ let $0foo = true;
+ let s = Foo { foo };
+}
+"#,
+ r#"
+struct Foo {
+ foo: bool,
+}
+
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn main() {
+ let foo = Bool::True;
+ let s = Foo { foo: foo == Bool::True };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn local_var_init_struct_usage_in_macro() {
+ check_assist(
+ bool_to_enum,
+ r#"
+struct Struct {
+ boolean: bool,
+}
+
+macro_rules! identity {
+ ($body:expr) => {
+ $body
+ }
+}
+
+fn new() -> Struct {
+ let $0boolean = true;
+ identity![Struct { boolean }]
+}
+"#,
+ r#"
+struct Struct {
+ boolean: bool,
+}
+
+macro_rules! identity {
+ ($body:expr) => {
+ $body
+ }
+}
+
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+fn new() -> Struct {
+ let boolean = Bool::True;
+ identity![Struct { boolean: boolean == Bool::True }]
+}
+"#,
+ )
+ }
+
+ #[test]
fn field_struct_basic() {
cov_mark::check!(replaces_record_expr);
check_assist(
@@ -1322,6 +1387,46 @@
}
#[test]
+ fn field_in_macro() {
+ check_assist(
+ bool_to_enum,
+ r#"
+struct Struct {
+ $0boolean: bool,
+}
+
+fn boolean(x: Struct) {
+ let Struct { boolean } = x;
+}
+
+macro_rules! identity { ($body:expr) => { $body } }
+
+fn new() -> Struct {
+ identity!(Struct { boolean: true })
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Bool { True, False }
+
+struct Struct {
+ boolean: Bool,
+}
+
+fn boolean(x: Struct) {
+ let Struct { boolean } = x;
+}
+
+macro_rules! identity { ($body:expr) => { $body } }
+
+fn new() -> Struct {
+ identity!(Struct { boolean: Bool::True })
+}
+"#,
+ )
+ }
+
+ #[test]
fn field_non_bool() {
cov_mark::check!(not_applicable_non_bool_field);
check_assist_not_applicable(
diff --git a/crates/ide-assists/src/handlers/extract_variable.rs b/crates/ide-assists/src/handlers/extract_variable.rs
index e7c884d..874b81d 100644
--- a/crates/ide-assists/src/handlers/extract_variable.rs
+++ b/crates/ide-assists/src/handlers/extract_variable.rs
@@ -1,12 +1,8 @@
use hir::TypeInfo;
-use stdx::format_to;
use syntax::{
- ast::{self, AstNode},
- NodeOrToken,
- SyntaxKind::{
- BLOCK_EXPR, BREAK_EXPR, CLOSURE_EXPR, COMMENT, LOOP_EXPR, MATCH_ARM, MATCH_GUARD,
- PATH_EXPR, RETURN_EXPR,
- },
+ ast::{self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasName},
+ ted, NodeOrToken,
+ SyntaxKind::{BLOCK_EXPR, BREAK_EXPR, COMMENT, LOOP_EXPR, MATCH_GUARD, PATH_EXPR, RETURN_EXPR},
SyntaxNode,
};
@@ -66,98 +62,140 @@
.as_ref()
.map_or(false, |it| matches!(it, ast::Expr::FieldExpr(_) | ast::Expr::MethodCallExpr(_)));
- let reference_modifier = match ty.filter(|_| needs_adjust) {
- Some(receiver_type) if receiver_type.is_mutable_reference() => "&mut ",
- Some(receiver_type) if receiver_type.is_reference() => "&",
- _ => "",
- };
-
- let var_modifier = match parent {
- Some(ast::Expr::RefExpr(expr)) if expr.mut_token().is_some() => "mut ",
- _ => "",
- };
-
let anchor = Anchor::from(&to_extract)?;
- let indent = anchor.syntax().prev_sibling_or_token()?.as_token()?.clone();
let target = to_extract.syntax().text_range();
acc.add(
AssistId("extract_variable", AssistKind::RefactorExtract),
"Extract into variable",
target,
move |edit| {
- let field_shorthand =
- match to_extract.syntax().parent().and_then(ast::RecordExprField::cast) {
- Some(field) => field.name_ref(),
- None => None,
- };
+ let field_shorthand = to_extract
+ .syntax()
+ .parent()
+ .and_then(ast::RecordExprField::cast)
+ .filter(|field| field.name_ref().is_some());
- let mut buf = String::new();
+ let (var_name, expr_replace) = match field_shorthand {
+ Some(field) => (field.to_string(), field.syntax().clone()),
+ None => (
+ suggest_name::for_variable(&to_extract, &ctx.sema),
+ to_extract.syntax().clone(),
+ ),
+ };
- let var_name = match &field_shorthand {
- Some(it) => it.to_string(),
- None => suggest_name::for_variable(&to_extract, &ctx.sema),
+ let ident_pat = match parent {
+ Some(ast::Expr::RefExpr(expr)) if expr.mut_token().is_some() => {
+ make::ident_pat(false, true, make::name(&var_name))
+ }
+ _ => make::ident_pat(false, false, make::name(&var_name)),
};
- let expr_range = match &field_shorthand {
- Some(it) => it.syntax().text_range().cover(to_extract.syntax().text_range()),
- None => to_extract.syntax().text_range(),
+
+ let to_extract = match ty.as_ref().filter(|_| needs_adjust) {
+ Some(receiver_type) if receiver_type.is_mutable_reference() => {
+ make::expr_ref(to_extract, true)
+ }
+ Some(receiver_type) if receiver_type.is_reference() => {
+ make::expr_ref(to_extract, false)
+ }
+ _ => to_extract,
};
+ let expr_replace = edit.make_syntax_mut(expr_replace);
+ let let_stmt =
+ make::let_stmt(ident_pat.into(), None, Some(to_extract)).clone_for_update();
+ let name_expr = make::expr_path(make::ext::ident_path(&var_name)).clone_for_update();
+
match anchor {
- Anchor::Before(_) | Anchor::Replace(_) => {
- format_to!(buf, "let {var_modifier}{var_name} = {reference_modifier}")
- }
- Anchor::WrapInBlock(_) => {
- format_to!(buf, "{{ let {var_name} = {reference_modifier}")
- }
- };
- format_to!(buf, "{to_extract}");
+ Anchor::Before(place) => {
+ let prev_ws = place.prev_sibling_or_token().and_then(|it| it.into_token());
+ let indent_to = IndentLevel::from_node(&place);
+ let insert_place = edit.make_syntax_mut(place);
- if let Anchor::Replace(stmt) = anchor {
- cov_mark::hit!(test_extract_var_expr_stmt);
- if stmt.semicolon_token().is_none() {
- buf.push(';');
- }
- match ctx.config.snippet_cap {
- Some(cap) => {
- let snip = buf.replace(
- &format!("let {var_modifier}{var_name}"),
- &format!("let {var_modifier}$0{var_name}"),
- );
- edit.replace_snippet(cap, expr_range, snip)
- }
- None => edit.replace(expr_range, buf),
- }
- return;
- }
+ // Adjust ws to insert depending on if this is all inline or on separate lines
+ let trailing_ws = if prev_ws.is_some_and(|it| it.text().starts_with("\n")) {
+ format!("\n{indent_to}")
+ } else {
+ format!(" ")
+ };
- buf.push(';');
-
- // We want to maintain the indent level,
- // but we do not want to duplicate possible
- // extra newlines in the indent block
- let text = indent.text();
- if text.starts_with('\n') {
- buf.push('\n');
- buf.push_str(text.trim_start_matches('\n'));
- } else {
- buf.push_str(text);
- }
-
- edit.replace(expr_range, var_name.clone());
- let offset = anchor.syntax().text_range().start();
- match ctx.config.snippet_cap {
- Some(cap) => {
- let snip = buf.replace(
- &format!("let {var_modifier}{var_name}"),
- &format!("let {var_modifier}$0{var_name}"),
+ ted::insert_all_raw(
+ ted::Position::before(insert_place),
+ vec![
+ let_stmt.syntax().clone().into(),
+ make::tokens::whitespace(&trailing_ws).into(),
+ ],
);
- edit.insert_snippet(cap, offset, snip)
- }
- None => edit.insert(offset, buf),
- }
- if let Anchor::WrapInBlock(_) = anchor {
- edit.insert(anchor.syntax().text_range().end(), " }");
+ ted::replace(expr_replace, name_expr.syntax());
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ if let Some(ast::Pat::IdentPat(ident_pat)) = let_stmt.pat() {
+ if let Some(name) = ident_pat.name() {
+ edit.add_tabstop_before(cap, name);
+ }
+ }
+ }
+ }
+ Anchor::Replace(stmt) => {
+ cov_mark::hit!(test_extract_var_expr_stmt);
+
+ let stmt_replace = edit.make_mut(stmt);
+ ted::replace(stmt_replace.syntax(), let_stmt.syntax());
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ if let Some(ast::Pat::IdentPat(ident_pat)) = let_stmt.pat() {
+ if let Some(name) = ident_pat.name() {
+ edit.add_tabstop_before(cap, name);
+ }
+ }
+ }
+ }
+ Anchor::WrapInBlock(to_wrap) => {
+ let indent_to = to_wrap.indent_level();
+
+ let block = if to_wrap.syntax() == &expr_replace {
+ // Since `expr_replace` is the same that needs to be wrapped in a block,
+ // we can just directly replace it with a block
+ let block =
+ make::block_expr([let_stmt.into()], Some(name_expr)).clone_for_update();
+ ted::replace(expr_replace, block.syntax());
+
+ block
+ } else {
+ // `expr_replace` is a descendant of `to_wrap`, so both steps need to be
+ // handled seperately, otherwise we wrap the wrong expression
+ let to_wrap = edit.make_mut(to_wrap);
+
+ // Replace the target expr first so that we don't need to find where
+ // `expr_replace` is in the wrapped `to_wrap`
+ ted::replace(expr_replace, name_expr.syntax());
+
+ // Wrap `to_wrap` in a block
+ let block = make::block_expr([let_stmt.into()], Some(to_wrap.clone()))
+ .clone_for_update();
+ ted::replace(to_wrap.syntax(), block.syntax());
+
+ block
+ };
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ // Adding a tabstop to `name` requires finding the let stmt again, since
+ // the existing `let_stmt` is not actually added to the tree
+ let pat = block.statements().find_map(|stmt| {
+ let ast::Stmt::LetStmt(let_stmt) = stmt else { return None };
+ let_stmt.pat()
+ });
+
+ if let Some(ast::Pat::IdentPat(ident_pat)) = pat {
+ if let Some(name) = ident_pat.name() {
+ edit.add_tabstop_before(cap, name);
+ }
+ }
+ }
+
+ // fixup indentation of block
+ block.indent(indent_to);
+ }
}
},
)
@@ -181,7 +219,7 @@
enum Anchor {
Before(SyntaxNode),
Replace(ast::ExprStmt),
- WrapInBlock(SyntaxNode),
+ WrapInBlock(ast::Expr),
}
impl Anchor {
@@ -204,16 +242,16 @@
}
if let Some(parent) = node.parent() {
- if parent.kind() == CLOSURE_EXPR {
+ if let Some(parent) = ast::ClosureExpr::cast(parent.clone()) {
cov_mark::hit!(test_extract_var_in_closure_no_block);
- return Some(Anchor::WrapInBlock(node));
+ return parent.body().map(Anchor::WrapInBlock);
}
- if parent.kind() == MATCH_ARM {
+ if let Some(parent) = ast::MatchArm::cast(parent) {
if node.kind() == MATCH_GUARD {
cov_mark::hit!(test_extract_var_in_match_guard);
} else {
cov_mark::hit!(test_extract_var_in_match_arm_no_block);
- return Some(Anchor::WrapInBlock(node));
+ return parent.expr().map(Anchor::WrapInBlock);
}
}
}
@@ -229,13 +267,6 @@
None
})
}
-
- fn syntax(&self) -> &SyntaxNode {
- match self {
- Anchor::Before(it) | Anchor::WrapInBlock(it) => it,
- Anchor::Replace(stmt) => stmt.syntax(),
- }
- }
}
#[cfg(test)]
@@ -502,7 +533,10 @@
fn main() {
let x = true;
let tuple = match x {
- true => { let $0var_name = 2 + 2; (var_name, true) }
+ true => {
+ let $0var_name = 2 + 2;
+ (var_name, true)
+ }
_ => (0, false)
};
}
@@ -579,7 +613,10 @@
"#,
r#"
fn main() {
- let lambda = |x: u32| { let $0var_name = x * 2; var_name };
+ let lambda = |x: u32| {
+ let $0var_name = x * 2;
+ var_name
+ };
}
"#,
);
diff --git a/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/crates/ide-assists/src/handlers/generate_delegate_trait.rs
index f4fa6a7..0d34502a 100644
--- a/crates/ide-assists/src/handlers/generate_delegate_trait.rs
+++ b/crates/ide-assists/src/handlers/generate_delegate_trait.rs
@@ -2,22 +2,25 @@
use crate::{
assist_context::{AssistContext, Assists},
- utils::convert_param_list_to_arg_list,
+ utils::{convert_param_list_to_arg_list, suggest_name},
};
use either::Either;
use hir::{db::HirDatabase, HasVisibility};
use ide_db::{
assists::{AssistId, GroupLabel},
path_transform::PathTransform,
+ FxHashMap, FxHashSet,
};
+use itertools::Itertools;
use syntax::{
ast::{
self,
edit::{self, AstNodeEdit},
- make, AssocItem, HasGenericParams, HasName, HasVisibility as astHasVisibility, Path,
+ make, AssocItem, GenericArgList, GenericParamList, HasGenericParams, HasName,
+ HasTypeBounds, HasVisibility as astHasVisibility, Path,
},
ted::{self, Position},
- AstNode, NodeOrToken, SyntaxKind,
+ AstNode, NodeOrToken, SmolStr, SyntaxKind,
};
// Assist: generate_delegate_trait
@@ -77,7 +80,7 @@
// }
//
// fn method_(&mut self) -> bool {
-// <A as SomeTrait>::method_( &mut self.a )
+// <A as SomeTrait>::method_(&mut self.a)
// }
// }
// ```
@@ -98,6 +101,7 @@
}
/// A utility object that represents a struct's field.
+#[derive(Debug)]
struct Field {
name: String,
ty: ast::Type,
@@ -111,44 +115,33 @@
f: Either<ast::RecordField, (ast::TupleField, ast::TupleFieldList)>,
) -> Option<Field> {
let db = ctx.sema.db;
- let name: String;
- let range: syntax::TextRange;
- let ty: ast::Type;
let module = ctx.sema.to_module_def(ctx.file_id())?;
- match f {
+ let (name, range, ty) = match f {
Either::Left(f) => {
- name = f.name()?.to_string();
- ty = f.ty()?;
- range = f.syntax().text_range();
+ let name = f.name()?.to_string();
+ (name, f.syntax().text_range(), f.ty()?)
}
Either::Right((f, l)) => {
- name = l.fields().position(|it| it == f)?.to_string();
- ty = f.ty()?;
- range = f.syntax().text_range();
+ let name = l.fields().position(|it| it == f)?.to_string();
+ (name, f.syntax().text_range(), f.ty()?)
}
};
let hir_ty = ctx.sema.resolve_type(&ty)?;
let type_impls = hir::Impl::all_for_type(db, hir_ty.clone());
let mut impls = Vec::with_capacity(type_impls.len());
- let type_param = hir_ty.as_type_param(db);
- if let Some(tp) = type_param {
+ if let Some(tp) = hir_ty.as_type_param(db) {
for tb in tp.trait_bounds(db) {
- impls.push(Delegee::Bound(BoundCase(tb)));
+ impls.push(Delegee::Bound(tb));
}
};
for imp in type_impls {
- match imp.trait_(db) {
- Some(tr) => {
- if tr.is_visible_from(db, module) {
- impls.push(Delegee::Impls(ImplCase(tr, imp)))
- }
- }
- None => (),
+ if let Some(tr) = imp.trait_(db).filter(|tr| tr.is_visible_from(db, module)) {
+ impls.push(Delegee::Impls(tr, imp))
}
}
@@ -161,19 +154,17 @@
/// actually implements the trait and the second way is when the field
/// has a bound type parameter. We handle these cases in different ways
/// hence the enum.
+#[derive(Debug)]
enum Delegee {
- Bound(BoundCase),
- Impls(ImplCase),
+ Bound(hir::Trait),
+ Impls(hir::Trait, hir::Impl),
}
-struct BoundCase(hir::Trait);
-struct ImplCase(hir::Trait, hir::Impl);
-
impl Delegee {
fn signature(&self, db: &dyn HirDatabase) -> String {
let mut s = String::new();
- let (Delegee::Bound(BoundCase(it)) | Delegee::Impls(ImplCase(it, _))) = self;
+ let (Delegee::Bound(it) | Delegee::Impls(it, _)) = self;
for m in it.module(db).path_to_root(db).iter().rev() {
if let Some(name) = m.name(db) {
@@ -200,25 +191,33 @@
pub(crate) fn delegate(&self, field: Field, acc: &mut Assists, ctx: &AssistContext<'_>) {
let db = ctx.db();
+
for delegee in &field.impls {
+ let trait_ = match delegee {
+ Delegee::Bound(b) => b,
+ Delegee::Impls(i, _) => i,
+ };
+
+ // Skip trait that has `Self` type, which cannot be delegated
+ //
+ // See [`test_self_ty`]
+ if has_self_type(*trait_, ctx).is_some() {
+ continue;
+ }
+
// FIXME : We can omit already implemented impl_traits
// But we don't know what the &[hir::Type] argument should look like.
-
- // let trait_ = match delegee {
- // Delegee::Bound(b) => b.0,
- // Delegee::Impls(i) => i.1,
- // };
-
// if self.hir_ty.impls_trait(db, trait_, &[]) {
// continue;
// }
let signature = delegee.signature(db);
+
let Some(delegate) = generate_impl(ctx, self, &field.ty, &field.name, delegee) else {
continue;
};
acc.add_group(
- &GroupLabel("Delegate trait impl for field...".to_owned()),
+ &GroupLabel(format!("Generate delegate impls for field `{}`", field.name)),
AssistId("generate_delegate_trait", ide_db::assists::AssistKind::Generate),
format!("Generate delegate impl `{}` for `{}`", signature, field.name),
field.range,
@@ -241,46 +240,40 @@
delegee: &Delegee,
) -> Option<ast::Impl> {
let delegate: ast::Impl;
- let source: ast::Impl;
- let genpar: Option<ast::GenericParamList>;
let db = ctx.db();
- let base_path = make::path_from_text(&field_ty.to_string().as_str());
- let s_path = make::ext::ident_path(&strukt.name.to_string());
+ let ast_strukt = &strukt.strukt;
+ let strukt_ty = make::ty_path(make::ext::ident_path(&strukt.name.to_string()));
match delegee {
Delegee::Bound(delegee) => {
- let in_file = ctx.sema.source(delegee.0.to_owned())?;
- let source: ast::Trait = in_file.value;
+ let bound_def = ctx.sema.source(delegee.to_owned())?.value;
+ let bound_params = bound_def.generic_param_list();
+ let strukt_params = ast_strukt.generic_param_list();
delegate = make::impl_trait(
- delegee.0.is_unsafe(db),
- None,
- None,
- strukt.strukt.generic_param_list(),
- None,
- delegee.0.is_auto(db),
- make::ty(&delegee.0.name(db).to_smol_str()),
- make::ty_path(s_path),
- source.where_clause(),
- strukt.strukt.where_clause(),
+ delegee.is_unsafe(db),
+ bound_params.clone(),
+ bound_params.map(|params| params.to_generic_args()),
+ strukt_params.clone(),
+ strukt_params.map(|params| params.to_generic_args()),
+ delegee.is_auto(db),
+ make::ty(&delegee.name(db).to_smol_str()),
+ strukt_ty,
+ bound_def.where_clause(),
+ ast_strukt.where_clause(),
None,
)
.clone_for_update();
- genpar = source.generic_param_list();
- let delegate_assoc_items = delegate.get_or_create_assoc_item_list();
- let gen_args: String =
- genpar.map_or_else(String::new, |params| params.to_generic_args().to_string());
-
// Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths
let qualified_path_type = make::path_from_text(&format!(
- "<{} as {}{}>",
- base_path.to_string(),
- delegee.0.name(db).to_smol_str(),
- gen_args.to_string()
+ "<{} as {}>",
+ field_ty.to_string(),
+ delegate.trait_()?.to_string()
));
- match source.assoc_item_list() {
+ let delegate_assoc_items = delegate.get_or_create_assoc_item_list();
+ match bound_def.assoc_item_list() {
Some(ai) => {
ai.assoc_items()
.filter(|item| matches!(item, AssocItem::MacroCall(_)).not())
@@ -295,66 +288,394 @@
None => {}
};
- let target = ctx.sema.scope(strukt.strukt.syntax())?;
- let source = ctx.sema.scope(source.syntax())?;
-
- let transform =
- PathTransform::trait_impl(&target, &source, delegee.0, delegate.clone());
+ let target_scope = ctx.sema.scope(strukt.strukt.syntax())?;
+ let source_scope = ctx.sema.scope(bound_def.syntax())?;
+ let transform = PathTransform::generic_transformation(&target_scope, &source_scope);
transform.apply(&delegate.syntax());
}
- Delegee::Impls(delegee) => {
- let in_file = ctx.sema.source(delegee.1.to_owned())?;
- source = in_file.value;
+ Delegee::Impls(trait_, old_impl) => {
+ let old_impl = ctx.sema.source(old_impl.to_owned())?.value;
+
+ // `old_trait_args` contains names of generic args for trait in `old_impl`
+ let old_trait_args = old_impl
+ .trait_()?
+ .generic_arg_list()
+ .map(|l| l.generic_args().map(|arg| arg.to_string()))
+ .map_or_else(|| FxHashSet::default(), |it| it.collect());
+
+ let old_impl_params = old_impl.generic_param_list();
+
+ // Resolve conflicts with generic parameters in strukt.
+ // These generics parameters will also be used in `field_ty` and `where_clauses`,
+ // so we should substitude arguments in them as well.
+ let (renamed_strukt_params, field_ty, ty_where_clause) = if let Some(strukt_params) =
+ resolve_conflicts_for_strukt(ast_strukt, old_impl_params.as_ref())
+ {
+ let strukt_args = strukt_params.to_generic_args();
+ let field_ty =
+ subst_name_in_strukt(ctx, ast_strukt, field_ty, strukt_args.clone())?;
+ let wc = ast_strukt
+ .where_clause()
+ .and_then(|wc| Some(subst_name_in_strukt(ctx, ast_strukt, &wc, strukt_args)?));
+ (Some(strukt_params), field_ty, wc)
+ } else {
+ (None, field_ty.clone_for_update(), None)
+ };
+
+ // Some generics used in `field_ty` may be instantiated, so they are no longer
+ // `generics`. We should remove them from generics params, and use the rest params.
+ let trait_gen_params =
+ remove_instantiated_params(&old_impl.self_ty()?, old_impl_params, &old_trait_args);
+
+ // Generate generic args that applied to current impl, this step will also remove unused params
+ let args_for_impl =
+ get_args_for_impl(&old_impl, &field_ty, &trait_gen_params, &old_trait_args);
+
+ let mut trait_gen_args = old_impl.trait_()?.generic_arg_list();
+ if let Some(arg_list) = &mut trait_gen_args {
+ *arg_list = arg_list.clone_for_update();
+ transform_impl(ctx, ast_strukt, &old_impl, &args_for_impl, &arg_list.syntax())?;
+ }
+
+ let mut type_gen_args =
+ renamed_strukt_params.clone().map(|params| params.to_generic_args());
+ if let Some(type_args) = &mut type_gen_args {
+ *type_args = type_args.clone_for_update();
+ transform_impl(ctx, ast_strukt, &old_impl, &args_for_impl, &type_args.syntax())?;
+ }
+
+ let path_type = make::ty(&trait_.name(db).to_smol_str()).clone_for_update();
+ transform_impl(ctx, ast_strukt, &old_impl, &args_for_impl, &path_type.syntax())?;
+
delegate = make::impl_trait(
- delegee.0.is_unsafe(db),
- source.generic_param_list(),
- None,
- None,
- None,
- delegee.0.is_auto(db),
- make::ty(&delegee.0.name(db).to_smol_str()),
- make::ty_path(s_path),
- source.where_clause(),
- strukt.strukt.where_clause(),
+ trait_.is_unsafe(db),
+ trait_gen_params,
+ trait_gen_args,
+ renamed_strukt_params,
+ type_gen_args,
+ trait_.is_auto(db),
+ path_type,
+ strukt_ty,
+ old_impl.where_clause().map(|wc| wc.clone_for_update()),
+ ty_where_clause,
None,
)
.clone_for_update();
- genpar = source.generic_param_list();
- let delegate_assoc_items = delegate.get_or_create_assoc_item_list();
- let gen_args: String =
- genpar.map_or_else(String::new, |params| params.to_generic_args().to_string());
// Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths
let qualified_path_type = make::path_from_text(&format!(
- "<{} as {}{}>",
- base_path.to_string().as_str(),
- delegee.0.name(db).to_smol_str(),
- gen_args.to_string().as_str()
+ "<{} as {}>",
+ field_ty.to_string(),
+ delegate.trait_()?.to_string()
));
- source
+ let delegate_assoc_items = delegate.get_or_create_assoc_item_list();
+ for item in old_impl
.get_or_create_assoc_item_list()
.assoc_items()
.filter(|item| matches!(item, AssocItem::MacroCall(_)).not())
- .for_each(|item| {
- let assoc = process_assoc_item(item, qualified_path_type.clone(), &field_name);
- if let Some(assoc) = assoc {
- delegate_assoc_items.add_item(assoc);
- }
- });
+ {
+ let assoc = process_assoc_item(
+ transform_assoc_item(ctx, ast_strukt, &old_impl, &args_for_impl, item)?,
+ qualified_path_type.clone(),
+ &field_name,
+ )?;
- let target = ctx.sema.scope(strukt.strukt.syntax())?;
- let source = ctx.sema.scope(source.syntax())?;
+ delegate_assoc_items.add_item(assoc);
+ }
- let transform =
- PathTransform::trait_impl(&target, &source, delegee.0, delegate.clone());
- transform.apply(&delegate.syntax());
+ // Remove unused where clauses
+ if let Some(wc) = delegate.where_clause() {
+ remove_useless_where_clauses(&delegate, wc)?;
+ }
}
}
Some(delegate)
}
+fn transform_assoc_item(
+ ctx: &AssistContext<'_>,
+ strukt: &ast::Struct,
+ old_impl: &ast::Impl,
+ args: &Option<GenericArgList>,
+ item: AssocItem,
+) -> Option<AssocItem> {
+ let source_scope = ctx.sema.scope(&item.syntax()).unwrap();
+ let target_scope = ctx.sema.scope(&strukt.syntax())?;
+ let hir_old_impl = ctx.sema.to_impl_def(old_impl)?;
+ let item = item.clone_for_update();
+ let transform = args.as_ref().map_or_else(
+ || PathTransform::generic_transformation(&target_scope, &source_scope),
+ |args| {
+ PathTransform::impl_transformation(
+ &target_scope,
+ &source_scope,
+ hir_old_impl,
+ args.clone(),
+ )
+ },
+ );
+ transform.apply(&item.syntax());
+ Some(item)
+}
+
+fn transform_impl(
+ ctx: &AssistContext<'_>,
+ strukt: &ast::Struct,
+ old_impl: &ast::Impl,
+ args: &Option<GenericArgList>,
+ syntax: &syntax::SyntaxNode,
+) -> Option<()> {
+ let source_scope = ctx.sema.scope(&old_impl.self_ty()?.syntax())?;
+ let target_scope = ctx.sema.scope(&strukt.syntax())?;
+ let hir_old_impl = ctx.sema.to_impl_def(old_impl)?;
+
+ let transform = args.as_ref().map_or_else(
+ || PathTransform::generic_transformation(&target_scope, &source_scope),
+ |args| {
+ PathTransform::impl_transformation(
+ &target_scope,
+ &source_scope,
+ hir_old_impl,
+ args.clone(),
+ )
+ },
+ );
+
+ transform.apply(&syntax);
+ Some(())
+}
+
+fn remove_instantiated_params(
+ self_ty: &ast::Type,
+ old_impl_params: Option<GenericParamList>,
+ old_trait_args: &FxHashSet<String>,
+) -> Option<GenericParamList> {
+ match self_ty {
+ ast::Type::PathType(path_type) => {
+ old_impl_params.and_then(|gpl| {
+ // Remove generic parameters in field_ty (which is instantiated).
+ let new_gpl = gpl.clone_for_update();
+
+ path_type
+ .path()?
+ .segments()
+ .filter_map(|seg| seg.generic_arg_list())
+ .flat_map(|it| it.generic_args())
+ // However, if the param is also used in the trait arguments, it shouldn't be removed.
+ .filter(|arg| !old_trait_args.contains(&arg.to_string()))
+ .for_each(|arg| {
+ new_gpl.remove_generic_arg(&arg);
+ });
+ (new_gpl.generic_params().count() > 0).then_some(new_gpl)
+ })
+ }
+ _ => old_impl_params,
+ }
+}
+
+fn remove_useless_where_clauses(delegate: &ast::Impl, wc: ast::WhereClause) -> Option<()> {
+ let trait_args =
+ delegate.trait_()?.generic_arg_list().map(|trait_args| trait_args.generic_args());
+ let strukt_args =
+ delegate.self_ty()?.generic_arg_list().map(|strukt_args| strukt_args.generic_args());
+ let used_generic_names = match (trait_args, strukt_args) {
+ (None, None) => None,
+ (None, Some(y)) => Some(y.map(|arg| arg.to_string()).collect::<FxHashSet<_>>()),
+ (Some(x), None) => Some(x.map(|arg| arg.to_string()).collect::<FxHashSet<_>>()),
+ (Some(x), Some(y)) => Some(x.chain(y).map(|arg| arg.to_string()).collect::<FxHashSet<_>>()),
+ };
+
+ // Keep clauses that have generic clauses after substitution, and remove the rest
+ if let Some(used_generic_names) = used_generic_names {
+ wc.predicates()
+ .filter(|pred| {
+ pred.syntax()
+ .descendants_with_tokens()
+ .filter_map(|e| e.into_token())
+ .find(|e| {
+ e.kind() == SyntaxKind::IDENT && used_generic_names.contains(&e.to_string())
+ })
+ .is_none()
+ })
+ .for_each(|pred| {
+ wc.remove_predicate(pred);
+ });
+ } else {
+ wc.predicates().for_each(|pred| wc.remove_predicate(pred));
+ }
+
+ if wc.predicates().count() == 0 {
+ // Remove useless whitespaces
+ wc.syntax()
+ .siblings_with_tokens(syntax::Direction::Prev)
+ .skip(1)
+ .take_while(|node_or_tok| node_or_tok.kind() == SyntaxKind::WHITESPACE)
+ .for_each(|ws| ted::remove(ws));
+ wc.syntax()
+ .siblings_with_tokens(syntax::Direction::Next)
+ .skip(1)
+ .take_while(|node_or_tok| node_or_tok.kind() == SyntaxKind::WHITESPACE)
+ .for_each(|ws| ted::remove(ws));
+ ted::insert(
+ ted::Position::after(wc.syntax()),
+ NodeOrToken::Token(make::token(SyntaxKind::WHITESPACE)),
+ );
+ // Remove where clause
+ ted::remove(wc.syntax());
+ }
+
+ Some(())
+}
+
+fn get_args_for_impl(
+ old_impl: &ast::Impl,
+ field_ty: &ast::Type,
+ trait_params: &Option<GenericParamList>,
+ old_trait_args: &FxHashSet<String>,
+) -> Option<ast::GenericArgList> {
+ // Generate generic args that should be apply to current impl
+ //
+ // For exmaple, if we have `impl<A, B, C> Trait for B<A>`, and `b: B<T>` in `S<T>`,
+ // then the generic `A` should be renamed to `T`. While the last two generic args
+ // doesn't change, it renames <B, C>. So we apply `<T, B C>` as generic arguments
+ // to impl.
+ let old_impl_params = old_impl.generic_param_list();
+ let self_ty = old_impl.self_ty();
+
+ if let (Some(old_impl_gpl), Some(self_ty)) = (old_impl_params, self_ty) {
+ // Make pair of the arguments of `field_ty` and `old_strukt_args` to
+ // get the list for substitution
+ let mut arg_substs = FxHashMap::default();
+
+ match field_ty {
+ field_ty @ ast::Type::PathType(_) => {
+ let field_args = field_ty.generic_arg_list();
+ if let (Some(field_args), Some(old_impl_args)) =
+ (field_args, self_ty.generic_arg_list())
+ {
+ field_args.generic_args().zip(old_impl_args.generic_args()).for_each(
+ |(field_arg, impl_arg)| {
+ arg_substs.entry(impl_arg.to_string()).or_insert(field_arg);
+ },
+ )
+ }
+ }
+ _ => {}
+ }
+
+ let args = old_impl_gpl
+ .to_generic_args()
+ .generic_args()
+ .map(|old_arg| {
+ arg_substs.get(&old_arg.to_string()).map_or_else(
+ || old_arg.clone(),
+ |replace_with| {
+ // The old_arg will be replaced, so it becomes redundant
+ let old_arg_name = old_arg.to_string();
+ if old_trait_args.contains(&old_arg_name) {
+ // However, we should check type bounds and where clauses on old_arg,
+ // if it has type bound, we should keep the type bound.
+ // match trait_params.and_then(|params| params.remove_generic_arg(&old_arg)) {
+ // Some(ast::GenericParam::TypeParam(ty)) => {
+ // ty.type_bound_list().and_then(|bounds| )
+ // }
+ // _ => {}
+ // }
+ if let Some(params) = trait_params {
+ params.remove_generic_arg(&old_arg);
+ }
+ }
+ replace_with.clone()
+ },
+ )
+ })
+ .collect_vec();
+ args.is_empty().not().then(|| make::generic_arg_list(args.into_iter()))
+ } else {
+ None
+ }
+}
+
+fn subst_name_in_strukt<N>(
+ ctx: &AssistContext<'_>,
+ strukt: &ast::Struct,
+ item: &N,
+ args: GenericArgList,
+) -> Option<N>
+where
+ N: ast::AstNode,
+{
+ let hir_strukt = ctx.sema.to_struct_def(strukt)?;
+ let hir_adt = hir::Adt::from(hir_strukt);
+
+ let item = item.clone_for_update();
+ let item_scope = ctx.sema.scope(item.syntax())?;
+ let transform = PathTransform::adt_transformation(&item_scope, &item_scope, hir_adt, args);
+ transform.apply(&item.syntax());
+ Some(item)
+}
+
+fn has_self_type(trait_: hir::Trait, ctx: &AssistContext<'_>) -> Option<()> {
+ let trait_source = ctx.sema.source(trait_)?.value;
+ trait_source
+ .syntax()
+ .descendants_with_tokens()
+ .filter_map(|e| e.into_token())
+ .find(|e| e.kind() == SyntaxKind::SELF_TYPE_KW)
+ .map(|_| ())
+}
+
+fn resolve_conflicts_for_strukt(
+ strukt: &ast::Struct,
+ old_impl_params: Option<&ast::GenericParamList>,
+) -> Option<ast::GenericParamList> {
+ match (strukt.generic_param_list(), old_impl_params) {
+ (Some(old_strukt_params), Some(old_impl_params)) => {
+ let params = make::generic_param_list(std::iter::empty()).clone_for_update();
+
+ for old_strukt_param in old_strukt_params.generic_params() {
+ // Get old name from `strukt``
+ let mut name = SmolStr::from(match &old_strukt_param {
+ ast::GenericParam::ConstParam(c) => c.name()?.to_string(),
+ ast::GenericParam::LifetimeParam(l) => {
+ l.lifetime()?.lifetime_ident_token()?.to_string()
+ }
+ ast::GenericParam::TypeParam(t) => t.name()?.to_string(),
+ });
+
+ // The new name cannot be conflicted with generics in trait, and the renamed names.
+ name = suggest_name::for_unique_generic_name(&name, old_impl_params);
+ name = suggest_name::for_unique_generic_name(&name, ¶ms);
+ match old_strukt_param {
+ ast::GenericParam::ConstParam(c) => {
+ if let Some(const_ty) = c.ty() {
+ let const_param = make::const_param(make::name(&name), const_ty);
+ params.add_generic_param(ast::GenericParam::ConstParam(
+ const_param.clone_for_update(),
+ ));
+ }
+ }
+ p @ ast::GenericParam::LifetimeParam(_) => {
+ params.add_generic_param(p.clone_for_update());
+ }
+ ast::GenericParam::TypeParam(t) => {
+ let type_bounds = t.type_bound_list();
+ let type_param = make::type_param(make::name(&name), type_bounds);
+ params.add_generic_param(ast::GenericParam::TypeParam(
+ type_param.clone_for_update(),
+ ));
+ }
+ }
+ }
+ Some(params)
+ }
+ (Some(old_strukt_gpl), None) => Some(old_strukt_gpl),
+ _ => None,
+ }
+}
+
fn process_assoc_item(
item: syntax::ast::AssocItem,
qual_path_ty: ast::Path,
@@ -381,10 +702,14 @@
// <Base as Trait<GenArgs>>::ConstName;
// FIXME : We can't rely on `make::path_qualified` for now but it would be nice to replace the following with it.
// make::path_qualified(qual_path_ty, path_expr_segment.as_single_segment().unwrap());
- let qualpath = qualpath(qual_path_ty, path_expr_segment);
- let inner =
- make::item_const(item.visibility(), item.name()?, item.ty()?, make::expr_path(qualpath))
- .clone_for_update();
+ let qualified_path = qualified_path(qual_path_ty, path_expr_segment);
+ let inner = make::item_const(
+ item.visibility(),
+ item.name()?,
+ item.ty()?,
+ make::expr_path(qualified_path),
+ )
+ .clone_for_update();
Some(AssocItem::Const(inner))
}
@@ -395,7 +720,7 @@
base_name: &str,
) -> Option<AssocItem> {
let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str());
- let qualpath = qualpath(qual_path_ty, path_expr_segment);
+ let qualified_path = qualified_path(qual_path_ty, path_expr_segment);
let call = match item.param_list() {
// Methods and funcs should be handled separately.
@@ -413,31 +738,33 @@
let param_count = l.params().count();
let args = convert_param_list_to_arg_list(l).clone_for_update();
-
+ let pos_after_l_paren = Position::after(args.l_paren_token()?);
if param_count > 0 {
// Add SelfParam and a TOKEN::COMMA
- ted::insert_all(
- Position::after(args.l_paren_token()?),
+ ted::insert_all_raw(
+ pos_after_l_paren,
vec![
NodeOrToken::Node(tail_expr_self.syntax().clone_for_update()),
- NodeOrToken::Token(make::token(SyntaxKind::WHITESPACE)),
NodeOrToken::Token(make::token(SyntaxKind::COMMA)),
+ NodeOrToken::Token(make::token(SyntaxKind::WHITESPACE)),
],
);
} else {
// Add SelfParam only
- ted::insert(
- Position::after(args.l_paren_token()?),
+ ted::insert_raw(
+ pos_after_l_paren,
NodeOrToken::Node(tail_expr_self.syntax().clone_for_update()),
);
}
- make::expr_call(make::expr_path(qualpath), args)
+ make::expr_call(make::expr_path(qualified_path), args)
}
- None => make::expr_call(make::expr_path(qualpath), convert_param_list_to_arg_list(l)),
+ None => {
+ make::expr_call(make::expr_path(qualified_path), convert_param_list_to_arg_list(l))
+ }
},
None => make::expr_call(
- make::expr_path(qualpath),
+ make::expr_path(qualified_path),
convert_param_list_to_arg_list(make::param_list(None, Vec::new())),
),
}
@@ -463,8 +790,8 @@
fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> Option<AssocItem> {
let path_expr_segment = make::path_from_text(item.name()?.to_string().as_str());
- let qualpath = qualpath(qual_path_ty, path_expr_segment);
- let ty = make::ty_path(qualpath);
+ let qualified_path = qualified_path(qual_path_ty, path_expr_segment);
+ let ty = make::ty_path(qualified_path);
let ident = item.name()?.to_string();
let alias = make::ty_alias(
@@ -479,7 +806,7 @@
Some(AssocItem::TypeAlias(alias))
}
-fn qualpath(qual_path_ty: ast::Path, path_expr_seg: ast::Path) -> ast::Path {
+fn qualified_path(qual_path_ty: ast::Path, path_expr_seg: ast::Path) -> ast::Path {
make::path_from_text(&format!("{}::{}", qual_path_ty.to_string(), path_expr_seg.to_string()))
}
@@ -511,6 +838,29 @@
}
#[test]
+ fn test_self_ty() {
+ // trait whith `Self` type cannot be delegated
+ //
+ // See the function `fn f() -> Self`.
+ // It should be `fn f() -> Base` in `Base`, and `fn f() -> S` in `S`
+ check_assist_not_applicable(
+ generate_delegate_trait,
+ r#"
+struct Base(());
+struct S(B$0ase);
+trait Trait {
+ fn f() -> Self;
+}
+impl Trait for Base {
+ fn f() -> Base {
+ Base(())
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
fn test_struct_struct_basic() {
check_assist(
generate_delegate_trait,
@@ -628,7 +978,7 @@
}
unsafe fn a_method(&self) {
- <Base as Trait>::a_method( &self.base )
+ <Base as Trait>::a_method(&self.base)
}
}
@@ -673,6 +1023,245 @@
}
#[test]
+ fn test_fields_with_generics() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+struct B<T> {
+ a: T
+}
+
+trait Trait<T> {
+ fn f(&self, a: T) -> T;
+}
+
+impl<T1, T2> Trait<T1> for B<T2> {
+ fn f(&self, a: T1) -> T1 { a }
+}
+
+struct A {}
+struct S {
+ b :$0 B<A>,
+}
+"#,
+ r#"
+struct B<T> {
+ a: T
+}
+
+trait Trait<T> {
+ fn f(&self, a: T) -> T;
+}
+
+impl<T1, T2> Trait<T1> for B<T2> {
+ fn f(&self, a: T1) -> T1 { a }
+}
+
+struct A {}
+struct S {
+ b : B<A>,
+}
+
+impl<T1> Trait<T1> for S {
+ fn f(&self, a: T1) -> T1 {
+ <B<A> as Trait<T1>>::f(&self.b, a)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generics_with_conflict_names() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+struct B<T> {
+ a: T
+}
+
+trait Trait<T> {
+ fn f(&self, a: T) -> T;
+}
+
+impl<T, T0> Trait<T> for B<T0> {
+ fn f(&self, a: T) -> T { a }
+}
+
+struct S<T> {
+ b : $0B<T>,
+}
+"#,
+ r#"
+struct B<T> {
+ a: T
+}
+
+trait Trait<T> {
+ fn f(&self, a: T) -> T;
+}
+
+impl<T, T0> Trait<T> for B<T0> {
+ fn f(&self, a: T) -> T { a }
+}
+
+struct S<T> {
+ b : B<T>,
+}
+
+impl<T, T1> Trait<T> for S<T1> {
+ fn f(&self, a: T) -> T {
+ <B<T1> as Trait<T>>::f(&self.b, a)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_lifetime_with_conflict_names() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+struct B<'a, T> {
+ a: &'a T
+}
+
+trait Trait<T> {
+ fn f(&self, a: T) -> T;
+}
+
+impl<'a, T, T0> Trait<T> for B<'a, T0> {
+ fn f(&self, a: T) -> T { a }
+}
+
+struct S<'a, T> {
+ b : $0B<'a, T>,
+}
+"#,
+ r#"
+struct B<'a, T> {
+ a: &'a T
+}
+
+trait Trait<T> {
+ fn f(&self, a: T) -> T;
+}
+
+impl<'a, T, T0> Trait<T> for B<'a, T0> {
+ fn f(&self, a: T) -> T { a }
+}
+
+struct S<'a, T> {
+ b : B<'a, T>,
+}
+
+impl<'a, T, T1> Trait<T> for S<'a, T1> {
+ fn f(&self, a: T) -> T {
+ <B<'a, T1> as Trait<T>>::f(&self.b, a)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_multiple_generics() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+struct B<T1, T2> {
+ a: T1,
+ b: T2
+}
+
+trait Trait<T> {
+ fn f(&self, a: T) -> T;
+}
+
+impl<T, T0> Trait<T> for B<T, T0> {
+ fn f(&self, a: T) -> T { a }
+}
+
+struct S<T> {
+ b :$0 B<i32, T>,
+}
+"#,
+ r#"
+struct B<T1, T2> {
+ a: T1,
+ b: T2
+}
+
+trait Trait<T> {
+ fn f(&self, a: T) -> T;
+}
+
+impl<T, T0> Trait<T> for B<T, T0> {
+ fn f(&self, a: T) -> T { a }
+}
+
+struct S<T> {
+ b : B<i32, T>,
+}
+
+impl<T1> Trait<i32> for S<T1> {
+ fn f(&self, a: i32) -> i32 {
+ <B<i32, T1> as Trait<i32>>::f(&self.b, a)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_generics_multiplex() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+struct B<T> {
+ a: T
+}
+
+trait Trait<T> {
+ fn f(&self, a: T) -> T;
+}
+
+impl<T> Trait<T> for B<T> {
+ fn f(&self, a: T) -> T { a }
+}
+
+struct S<T> {
+ b : $0B<T>,
+}
+"#,
+ r#"
+struct B<T> {
+ a: T
+}
+
+trait Trait<T> {
+ fn f(&self, a: T) -> T;
+}
+
+impl<T> Trait<T> for B<T> {
+ fn f(&self, a: T) -> T { a }
+}
+
+struct S<T> {
+ b : B<T>,
+}
+
+impl<T0> Trait<T0> for S<T0> {
+ fn f(&self, a: T0) -> T0 {
+ <B<T0> as Trait<T0>>::f(&self.b, a)
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
fn test_complex_without_where() {
check_assist(
generate_delegate_trait,
@@ -719,7 +1308,7 @@
}
fn assoc_method(&self, p: ()) {
- <Base as Trait<'a, T, C>>::assoc_method( &self.field , p)
+ <Base as Trait<'a, T, C>>::assoc_method(&self.field, p)
}
}
@@ -789,7 +1378,7 @@
}
fn assoc_method(&self, p: ()) {
- <Base as Trait<'b, C, D>>::assoc_method( &self.field , p)
+ <Base as Trait<'b, C, D>>::assoc_method(&self.field, p)
}
}
@@ -875,7 +1464,7 @@
}
fn assoc_method(&self, p: ()) {
- <Base as Trait<'b, A, B>>::assoc_method( &self.field , p)
+ <Base as Trait<'b, A, B>>::assoc_method(&self.field, p)
}
}
@@ -924,6 +1513,132 @@
}
#[test]
+ fn test_type_bound_with_generics_1() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+trait AnotherTrait {}
+struct B<T, T1>
+where
+ T1: AnotherTrait
+{
+ a: T,
+ b: T1
+}
+
+trait Trait<T> {
+ fn f(&self, a: T) -> T;
+}
+
+impl<T, T0, T1: AnotherTrait> Trait<T> for B<T0, T1> {
+ fn f(&self, a: T) -> T { a }
+}
+
+struct S<T, T1>
+where
+ T1: AnotherTrait
+{
+ b : $0B<T, T1>,
+}"#,
+ r#"
+trait AnotherTrait {}
+struct B<T, T1>
+where
+ T1: AnotherTrait
+{
+ a: T,
+ b: T1
+}
+
+trait Trait<T> {
+ fn f(&self, a: T) -> T;
+}
+
+impl<T, T0, T1: AnotherTrait> Trait<T> for B<T0, T1> {
+ fn f(&self, a: T) -> T { a }
+}
+
+struct S<T, T1>
+where
+ T1: AnotherTrait
+{
+ b : B<T, T1>,
+}
+
+impl<T, T2, T10> Trait<T> for S<T2, T10>
+where
+ T10: AnotherTrait
+{
+ fn f(&self, a: T) -> T {
+ <B<T2, T10> as Trait<T>>::f(&self.b, a)
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_type_bound_with_generics_2() {
+ check_assist(
+ generate_delegate_trait,
+ r#"
+trait AnotherTrait {}
+struct B<T1>
+where
+ T1: AnotherTrait
+{
+ b: T1
+}
+
+trait Trait<T1> {
+ fn f(&self, a: T1) -> T1;
+}
+
+impl<T, T1: AnotherTrait> Trait<T> for B<T1> {
+ fn f(&self, a: T) -> T { a }
+}
+
+struct S<T>
+where
+ T: AnotherTrait
+{
+ b : $0B<T>,
+}"#,
+ r#"
+trait AnotherTrait {}
+struct B<T1>
+where
+ T1: AnotherTrait
+{
+ b: T1
+}
+
+trait Trait<T1> {
+ fn f(&self, a: T1) -> T1;
+}
+
+impl<T, T1: AnotherTrait> Trait<T> for B<T1> {
+ fn f(&self, a: T) -> T { a }
+}
+
+struct S<T>
+where
+ T: AnotherTrait
+{
+ b : B<T>,
+}
+
+impl<T, T0> Trait<T> for S<T0>
+where
+ T0: AnotherTrait
+{
+ fn f(&self, a: T) -> T {
+ <B<T0> as Trait<T>>::f(&self.b, a)
+ }
+}"#,
+ );
+ }
+
+ #[test]
fn test_docstring_example() {
check_assist(
generate_delegate_trait,
@@ -975,7 +1690,7 @@
}
fn method_(&mut self) -> bool {
- <A as SomeTrait>::method_( &mut self.a )
+ <A as SomeTrait>::method_(&mut self.a)
}
}
"#,
@@ -1043,7 +1758,7 @@
}
fn method_(&mut self) -> bool {
- <some_module::A as some_module::SomeTrait>::method_( &mut self.a )
+ <some_module::A as some_module::SomeTrait>::method_(&mut self.a)
}
}"#,
)
diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs
index a113c81..5bb200e 100644
--- a/crates/ide-assists/src/handlers/generate_function.rs
+++ b/crates/ide-assists/src/handlers/generate_function.rs
@@ -8,20 +8,21 @@
famous_defs::FamousDefs,
helpers::is_editable_crate,
path_transform::PathTransform,
+ source_change::SourceChangeBuilder,
FxHashMap, FxHashSet, RootDatabase, SnippetCap,
};
+use itertools::Itertools;
use stdx::to_lower_snake_case;
use syntax::{
ast::{
- self,
- edit::{AstNodeEdit, IndentLevel},
- make, AstNode, CallExpr, HasArgList, HasGenericParams, HasModuleItem, HasTypeBounds,
+ self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, CallExpr, HasArgList,
+ HasGenericParams, HasModuleItem, HasTypeBounds,
},
- SyntaxKind, SyntaxNode, TextRange, TextSize,
+ ted, SyntaxKind, SyntaxNode, TextRange, T,
};
use crate::{
- utils::{convert_reference_type, find_struct_impl, render_snippet, Cursor},
+ utils::{convert_reference_type, find_struct_impl},
AssistContext, AssistId, AssistKind, Assists,
};
@@ -65,7 +66,7 @@
}
let fn_name = &*name_ref.text();
- let TargetInfo { target_module, adt_name, target, file, insert_offset } =
+ let TargetInfo { target_module, adt_name, target, file } =
fn_target_info(ctx, path, &call, fn_name)?;
if let Some(m) = target_module {
@@ -77,16 +78,7 @@
let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?;
let text_range = call.syntax().text_range();
let label = format!("Generate {} function", function_builder.fn_name);
- add_func_to_accumulator(
- acc,
- ctx,
- text_range,
- function_builder,
- insert_offset,
- file,
- adt_name,
- label,
- )
+ add_func_to_accumulator(acc, ctx, text_range, function_builder, file, adt_name, label)
}
struct TargetInfo {
@@ -94,7 +86,6 @@
adt_name: Option<hir::Name>,
target: GeneratedFunctionTarget,
file: FileId,
- insert_offset: TextSize,
}
impl TargetInfo {
@@ -103,9 +94,8 @@
adt_name: Option<hir::Name>,
target: GeneratedFunctionTarget,
file: FileId,
- insert_offset: TextSize,
) -> Self {
- Self { target_module, adt_name, target, file, insert_offset }
+ Self { target_module, adt_name, target, file }
}
}
@@ -156,7 +146,7 @@
}
let (impl_, file) = get_adt_source(ctx, &adt, fn_name.text().as_str())?;
- let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?;
+ let target = get_method_target(ctx, &impl_, &adt)?;
let function_builder = FunctionBuilder::from_method_call(
ctx,
@@ -169,16 +159,7 @@
let text_range = call.syntax().text_range();
let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None };
let label = format!("Generate {} method", function_builder.fn_name);
- add_func_to_accumulator(
- acc,
- ctx,
- text_range,
- function_builder,
- insert_offset,
- file,
- adt_name,
- label,
- )
+ add_func_to_accumulator(acc, ctx, text_range, function_builder, file, adt_name, label)
}
fn add_func_to_accumulator(
@@ -186,23 +167,28 @@
ctx: &AssistContext<'_>,
text_range: TextRange,
function_builder: FunctionBuilder,
- insert_offset: TextSize,
file: FileId,
adt_name: Option<hir::Name>,
label: String,
) -> Option<()> {
- acc.add(AssistId("generate_function", AssistKind::Generate), label, text_range, |builder| {
- let indent = IndentLevel::from_node(function_builder.target.syntax());
- let function_template = function_builder.render(adt_name.is_some());
- let mut func = function_template.to_string(ctx.config.snippet_cap);
+ acc.add(AssistId("generate_function", AssistKind::Generate), label, text_range, |edit| {
+ edit.edit_file(file);
+
+ let target = function_builder.target.clone();
+ let function_template = function_builder.render();
+ let func = function_template.to_ast(ctx.config.snippet_cap, edit);
+
if let Some(name) = adt_name {
+ let name = make::ty_path(make::ext::ident_path(&format!("{}", name.display(ctx.db()))));
+
// FIXME: adt may have generic params.
- func = format!("\n{indent}impl {} {{\n{func}\n{indent}}}", name.display(ctx.db()));
- }
- builder.edit_file(file);
- match ctx.config.snippet_cap {
- Some(cap) => builder.insert_snippet(cap, insert_offset, func),
- None => builder.insert(insert_offset, func),
+ let impl_ = make::impl_(None, None, name, None, None).clone_for_update();
+
+ func.indent(IndentLevel(1));
+ impl_.get_or_create_assoc_item_list().add_item(func.into());
+ target.insert_impl_at(edit, impl_);
+ } else {
+ target.insert_fn_at(edit, func);
}
})
}
@@ -220,36 +206,33 @@
}
struct FunctionTemplate {
- leading_ws: String,
fn_def: ast::Fn,
ret_type: Option<ast::RetType>,
should_focus_return_type: bool,
- trailing_ws: String,
tail_expr: ast::Expr,
}
impl FunctionTemplate {
- fn to_string(&self, cap: Option<SnippetCap>) -> String {
- let Self { leading_ws, fn_def, ret_type, should_focus_return_type, trailing_ws, tail_expr } =
- self;
+ fn to_ast(&self, cap: Option<SnippetCap>, edit: &mut SourceChangeBuilder) -> ast::Fn {
+ let Self { fn_def, ret_type, should_focus_return_type, tail_expr } = self;
- let f = match cap {
- Some(cap) => {
- let cursor = if *should_focus_return_type {
- // Focus the return type if there is one
- match ret_type {
- Some(ret_type) => ret_type.syntax(),
- None => tail_expr.syntax(),
+ if let Some(cap) = cap {
+ if *should_focus_return_type {
+ // Focus the return type if there is one
+ match ret_type {
+ Some(ret_type) => {
+ edit.add_placeholder_snippet(cap, ret_type.clone());
}
- } else {
- tail_expr.syntax()
- };
- render_snippet(cap, fn_def.syntax(), Cursor::Replace(cursor))
+ None => {
+ edit.add_placeholder_snippet(cap, tail_expr.clone());
+ }
+ }
+ } else {
+ edit.add_placeholder_snippet(cap, tail_expr.clone());
}
- None => fn_def.to_string(),
- };
+ }
- format!("{leading_ws}{f}{trailing_ws}")
+ fn_def.clone()
}
}
@@ -356,7 +339,7 @@
})
}
- fn render(self, is_method: bool) -> FunctionTemplate {
+ fn render(self) -> FunctionTemplate {
let placeholder_expr = make::ext::expr_todo();
let fn_body = make::block_expr(vec![], Some(placeholder_expr));
let visibility = match self.visibility {
@@ -364,7 +347,7 @@
Visibility::Crate => Some(make::visibility_pub_crate()),
Visibility::Pub => Some(make::visibility_pub()),
};
- let mut fn_def = make::fn_(
+ let fn_def = make::fn_(
visibility,
self.fn_name,
self.generic_param_list,
@@ -375,34 +358,10 @@
self.is_async,
false, // FIXME : const and unsafe are not handled yet.
false,
- );
- let leading_ws;
- let trailing_ws;
-
- match self.target {
- GeneratedFunctionTarget::BehindItem(it) => {
- let mut indent = IndentLevel::from_node(&it);
- if is_method {
- indent = indent + 1;
- leading_ws = format!("{indent}");
- } else {
- leading_ws = format!("\n\n{indent}");
- }
-
- fn_def = fn_def.indent(indent);
- trailing_ws = String::new();
- }
- GeneratedFunctionTarget::InEmptyItemList(it) => {
- let indent = IndentLevel::from_node(&it);
- let leading_indent = indent + 1;
- leading_ws = format!("\n{leading_indent}");
- fn_def = fn_def.indent(leading_indent);
- trailing_ws = format!("\n{indent}");
- }
- };
+ )
+ .clone_for_update();
FunctionTemplate {
- leading_ws,
ret_type: fn_def.ret_type(),
// PANIC: we guarantee we always create a function body with a tail expr
tail_expr: fn_def
@@ -412,7 +371,6 @@
.expect("function body should have a tail expression"),
should_focus_return_type: self.should_focus_return_type,
fn_def,
- trailing_ws,
}
}
}
@@ -456,40 +414,37 @@
target_module: Option<Module>,
call: CallExpr,
) -> Option<TargetInfo> {
- let (target, file, insert_offset) = get_fn_target(ctx, target_module, call)?;
- Some(TargetInfo::new(target_module, None, target, file, insert_offset))
+ let (target, file) = get_fn_target(ctx, target_module, call)?;
+ Some(TargetInfo::new(target_module, None, target, file))
}
fn get_fn_target(
ctx: &AssistContext<'_>,
target_module: Option<Module>,
call: CallExpr,
-) -> Option<(GeneratedFunctionTarget, FileId, TextSize)> {
+) -> Option<(GeneratedFunctionTarget, FileId)> {
let mut file = ctx.file_id();
let target = match target_module {
Some(target_module) => {
- let module_source = target_module.definition_source(ctx.db());
- let (in_file, target) = next_space_for_fn_in_module(ctx.sema.db, &module_source)?;
+ let (in_file, target) = next_space_for_fn_in_module(ctx.db(), target_module);
file = in_file;
target
}
None => next_space_for_fn_after_call_site(ast::CallableExpr::Call(call))?,
};
- Some((target.clone(), file, get_insert_offset(&target)))
+ Some((target.clone(), file))
}
fn get_method_target(
ctx: &AssistContext<'_>,
impl_: &Option<ast::Impl>,
adt: &Adt,
-) -> Option<(GeneratedFunctionTarget, TextSize)> {
+) -> Option<GeneratedFunctionTarget> {
let target = match impl_ {
- Some(impl_) => next_space_for_fn_in_impl(impl_)?,
- None => {
- GeneratedFunctionTarget::BehindItem(adt.source(ctx.sema.db)?.syntax().value.clone())
- }
+ Some(impl_) => GeneratedFunctionTarget::InImpl(impl_.clone()),
+ None => GeneratedFunctionTarget::AfterItem(adt.source(ctx.sema.db)?.syntax().value.clone()),
};
- Some((target.clone(), get_insert_offset(&target)))
+ Some(target)
}
fn assoc_fn_target_info(
@@ -505,36 +460,120 @@
return None;
}
let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?;
- let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?;
+ let target = get_method_target(ctx, &impl_, &adt)?;
let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None };
- Some(TargetInfo::new(target_module, adt_name, target, file, insert_offset))
-}
-
-fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize {
- match target {
- GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(),
- GeneratedFunctionTarget::InEmptyItemList(it) => it.text_range().start() + TextSize::of('{'),
- }
+ Some(TargetInfo::new(target_module, adt_name, target, file))
}
#[derive(Clone)]
enum GeneratedFunctionTarget {
- BehindItem(SyntaxNode),
+ AfterItem(SyntaxNode),
InEmptyItemList(SyntaxNode),
+ InImpl(ast::Impl),
}
impl GeneratedFunctionTarget {
fn syntax(&self) -> &SyntaxNode {
match self {
- GeneratedFunctionTarget::BehindItem(it) => it,
+ GeneratedFunctionTarget::AfterItem(it) => it,
GeneratedFunctionTarget::InEmptyItemList(it) => it,
+ GeneratedFunctionTarget::InImpl(it) => it.syntax(),
}
}
fn parent(&self) -> SyntaxNode {
match self {
- GeneratedFunctionTarget::BehindItem(it) => it.parent().expect("item without parent"),
+ GeneratedFunctionTarget::AfterItem(it) => it.parent().expect("item without parent"),
GeneratedFunctionTarget::InEmptyItemList(it) => it.clone(),
+ GeneratedFunctionTarget::InImpl(it) => it.syntax().clone(),
+ }
+ }
+
+ fn insert_impl_at(&self, edit: &mut SourceChangeBuilder, impl_: ast::Impl) {
+ match self {
+ GeneratedFunctionTarget::AfterItem(item) => {
+ let item = edit.make_syntax_mut(item.clone());
+ let position = if item.parent().is_some() {
+ ted::Position::after(&item)
+ } else {
+ ted::Position::first_child_of(&item)
+ };
+
+ let indent = IndentLevel::from_node(&item);
+ let leading_ws = make::tokens::whitespace(&format!("\n{indent}"));
+ impl_.indent(indent);
+
+ ted::insert_all(position, vec![leading_ws.into(), impl_.syntax().clone().into()]);
+ }
+ GeneratedFunctionTarget::InEmptyItemList(item_list) => {
+ let item_list = edit.make_syntax_mut(item_list.clone());
+ let insert_after =
+ item_list.children_with_tokens().find_or_first(|child| child.kind() == T!['{']);
+ let position = match insert_after {
+ Some(child) => ted::Position::after(child),
+ None => ted::Position::first_child_of(&item_list),
+ };
+
+ let indent = IndentLevel::from_node(&item_list);
+ let leading_indent = indent + 1;
+ let leading_ws = make::tokens::whitespace(&format!("\n{leading_indent}"));
+ impl_.indent(indent);
+
+ ted::insert_all(position, vec![leading_ws.into(), impl_.syntax().clone().into()]);
+ }
+ GeneratedFunctionTarget::InImpl(_) => {
+ unreachable!("can't insert an impl inside an impl")
+ }
+ }
+ }
+
+ fn insert_fn_at(&self, edit: &mut SourceChangeBuilder, func: ast::Fn) {
+ match self {
+ GeneratedFunctionTarget::AfterItem(item) => {
+ let item = edit.make_syntax_mut(item.clone());
+ let position = if item.parent().is_some() {
+ ted::Position::after(&item)
+ } else {
+ ted::Position::first_child_of(&item)
+ };
+
+ let indent = IndentLevel::from_node(&item);
+ let leading_ws = make::tokens::whitespace(&format!("\n\n{indent}"));
+ func.indent(indent);
+
+ ted::insert_all_raw(
+ position,
+ vec![leading_ws.into(), func.syntax().clone().into()],
+ );
+ }
+ GeneratedFunctionTarget::InEmptyItemList(item_list) => {
+ let item_list = edit.make_syntax_mut(item_list.clone());
+ let insert_after =
+ item_list.children_with_tokens().find_or_first(|child| child.kind() == T!['{']);
+ let position = match insert_after {
+ Some(child) => ted::Position::after(child),
+ None => ted::Position::first_child_of(&item_list),
+ };
+
+ let indent = IndentLevel::from_node(&item_list);
+ let leading_indent = indent + 1;
+ let leading_ws = make::tokens::whitespace(&format!("\n{leading_indent}"));
+ let trailing_ws = make::tokens::whitespace(&format!("\n{indent}"));
+ func.indent(leading_indent);
+
+ ted::insert_all(
+ position,
+ vec![leading_ws.into(), func.syntax().clone().into(), trailing_ws.into()],
+ );
+ }
+ GeneratedFunctionTarget::InImpl(impl_) => {
+ let impl_ = edit.make_mut(impl_.clone());
+
+ let leading_indent = impl_.indent_level() + 1;
+ func.indent(leading_indent);
+
+ impl_.get_or_create_assoc_item_list().add_item(func.into());
+ }
}
}
}
@@ -1026,43 +1065,40 @@
}
last_ancestor = Some(next_ancestor);
}
- last_ancestor.map(GeneratedFunctionTarget::BehindItem)
+ last_ancestor.map(GeneratedFunctionTarget::AfterItem)
}
fn next_space_for_fn_in_module(
- db: &dyn hir::db::ExpandDatabase,
- module_source: &hir::InFile<hir::ModuleSource>,
-) -> Option<(FileId, GeneratedFunctionTarget)> {
- let file = module_source.file_id.original_file(db);
+ db: &dyn hir::db::HirDatabase,
+ target_module: hir::Module,
+) -> (FileId, GeneratedFunctionTarget) {
+ let module_source = target_module.definition_source(db);
+ let file = module_source.file_id.original_file(db.upcast());
let assist_item = match &module_source.value {
hir::ModuleSource::SourceFile(it) => match it.items().last() {
- Some(last_item) => GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()),
- None => GeneratedFunctionTarget::BehindItem(it.syntax().clone()),
+ Some(last_item) => GeneratedFunctionTarget::AfterItem(last_item.syntax().clone()),
+ None => GeneratedFunctionTarget::AfterItem(it.syntax().clone()),
},
hir::ModuleSource::Module(it) => match it.item_list().and_then(|it| it.items().last()) {
- Some(last_item) => GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()),
- None => GeneratedFunctionTarget::InEmptyItemList(it.item_list()?.syntax().clone()),
+ Some(last_item) => GeneratedFunctionTarget::AfterItem(last_item.syntax().clone()),
+ None => {
+ let item_list =
+ it.item_list().expect("module definition source should have an item list");
+ GeneratedFunctionTarget::InEmptyItemList(item_list.syntax().clone())
+ }
},
hir::ModuleSource::BlockExpr(it) => {
if let Some(last_item) =
it.statements().take_while(|stmt| matches!(stmt, ast::Stmt::Item(_))).last()
{
- GeneratedFunctionTarget::BehindItem(last_item.syntax().clone())
+ GeneratedFunctionTarget::AfterItem(last_item.syntax().clone())
} else {
GeneratedFunctionTarget::InEmptyItemList(it.syntax().clone())
}
}
};
- Some((file, assist_item))
-}
-fn next_space_for_fn_in_impl(impl_: &ast::Impl) -> Option<GeneratedFunctionTarget> {
- let assoc_item_list = impl_.assoc_item_list()?;
- if let Some(last_item) = assoc_item_list.assoc_items().last() {
- Some(GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()))
- } else {
- Some(GeneratedFunctionTarget::InEmptyItemList(assoc_item_list.syntax().clone()))
- }
+ (file, assist_item)
}
#[derive(Clone, Copy)]
diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs
index 5b9cc5f..2eb7089 100644
--- a/crates/ide-assists/src/handlers/inline_call.rs
+++ b/crates/ide-assists/src/handlers/inline_call.rs
@@ -315,17 +315,6 @@
} else {
fn_body.clone_for_update()
};
- if let Some(imp) = body.syntax().ancestors().find_map(ast::Impl::cast) {
- if !node.syntax().ancestors().any(|anc| &anc == imp.syntax()) {
- if let Some(t) = imp.self_ty() {
- body.syntax()
- .descendants_with_tokens()
- .filter_map(NodeOrToken::into_token)
- .filter(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW)
- .for_each(|tok| ted::replace(tok, t.syntax()));
- }
- }
- }
let usages_for_locals = |local| {
Definition::Local(local)
.usages(sema)
@@ -381,6 +370,27 @@
}
}
+ // We should place the following code after last usage of `usages_for_locals`
+ // because `ted::replace` will change the offset in syntax tree, which makes
+ // `FileReference` incorrect
+ if let Some(imp) =
+ sema.ancestors_with_macros(fn_body.syntax().clone()).find_map(ast::Impl::cast)
+ {
+ if !node.syntax().ancestors().any(|anc| &anc == imp.syntax()) {
+ if let Some(t) = imp.self_ty() {
+ while let Some(self_tok) = body
+ .syntax()
+ .descendants_with_tokens()
+ .filter_map(NodeOrToken::into_token)
+ .find(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW)
+ {
+ let replace_with = t.clone_subtree().syntax().clone_for_update();
+ ted::replace(self_tok, replace_with);
+ }
+ }
+ }
+ }
+
let mut func_let_vars: BTreeSet<String> = BTreeSet::new();
// grab all of the local variable declarations in the function
@@ -1510,4 +1520,106 @@
"#,
);
}
+
+ #[test]
+ fn inline_call_with_multiple_self_types_eq() {
+ check_assist(
+ inline_call,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Enum {
+ A,
+ B,
+}
+
+impl Enum {
+ fn a_or_b_eq(&self) -> bool {
+ self == &Self::A || self == &Self::B
+ }
+}
+
+fn a() -> bool {
+ Enum::A.$0a_or_b_eq()
+}
+"#,
+ r#"
+#[derive(PartialEq, Eq)]
+enum Enum {
+ A,
+ B,
+}
+
+impl Enum {
+ fn a_or_b_eq(&self) -> bool {
+ self == &Self::A || self == &Self::B
+ }
+}
+
+fn a() -> bool {
+ {
+ let ref this = Enum::A;
+ this == &Enum::A || this == &Enum::B
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn inline_call_with_self_type_in_macros() {
+ check_assist(
+ inline_call,
+ r#"
+trait Trait<T1> {
+ fn f(a: T1) -> Self;
+}
+
+macro_rules! impl_from {
+ ($t: ty) => {
+ impl Trait<$t> for $t {
+ fn f(a: $t) -> Self {
+ a as Self
+ }
+ }
+ };
+}
+
+struct A {}
+
+impl_from!(A);
+
+fn main() {
+ let a: A = A{};
+ let b = <A as Trait<A>>::$0f(a);
+}
+"#,
+ r#"
+trait Trait<T1> {
+ fn f(a: T1) -> Self;
+}
+
+macro_rules! impl_from {
+ ($t: ty) => {
+ impl Trait<$t> for $t {
+ fn f(a: $t) -> Self {
+ a as Self
+ }
+ }
+ };
+}
+
+struct A {}
+
+impl_from!(A);
+
+fn main() {
+ let a: A = A{};
+ let b = {
+ let a = a;
+ a as A
+ };
+}
+"#,
+ )
+ }
}
diff --git a/crates/ide-assists/src/handlers/introduce_named_generic.rs b/crates/ide-assists/src/handlers/introduce_named_generic.rs
index b0d35c0..b1daa78 100644
--- a/crates/ide-assists/src/handlers/introduce_named_generic.rs
+++ b/crates/ide-assists/src/handlers/introduce_named_generic.rs
@@ -18,7 +18,7 @@
// ```
pub(crate) fn introduce_named_generic(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let impl_trait_type = ctx.find_node_at_offset::<ast::ImplTraitType>()?;
- let param = impl_trait_type.syntax().parent().and_then(ast::Param::cast)?;
+ let param = impl_trait_type.syntax().ancestors().find_map(|node| ast::Param::cast(node))?;
let fn_ = param.syntax().ancestors().find_map(ast::Fn::cast)?;
let type_bound_list = impl_trait_type.type_bound_list()?;
@@ -31,15 +31,16 @@
|edit| {
let impl_trait_type = edit.make_mut(impl_trait_type);
let fn_ = edit.make_mut(fn_);
-
- let type_param_name = suggest_name::for_generic_parameter(&impl_trait_type);
+ let fn_generic_param_list = fn_.get_or_create_generic_param_list();
+ let type_param_name =
+ suggest_name::for_impl_trait_as_generic(&impl_trait_type, &fn_generic_param_list);
let type_param = make::type_param(make::name(&type_param_name), Some(type_bound_list))
.clone_for_update();
let new_ty = make::ty(&type_param_name).clone_for_update();
ted::replace(impl_trait_type.syntax(), new_ty.syntax());
- fn_.get_or_create_generic_param_list().add_generic_param(type_param.into());
+ fn_generic_param_list.add_generic_param(type_param.into());
if let Some(cap) = ctx.config.snippet_cap {
if let Some(generic_param) =
@@ -111,12 +112,19 @@
#[test]
fn replace_impl_trait_with_exist_generic_letter() {
- // FIXME: This is wrong, we should pick a different name if the one we
- // want is already bound.
check_assist(
introduce_named_generic,
r#"fn foo<B>(bar: $0impl Bar) {}"#,
- r#"fn foo<B, $0B: Bar>(bar: B) {}"#,
+ r#"fn foo<B, $0B0: Bar>(bar: B0) {}"#,
+ );
+ }
+
+ #[test]
+ fn replace_impl_trait_with_more_exist_generic_letter() {
+ check_assist(
+ introduce_named_generic,
+ r#"fn foo<B, B0, B1, B3>(bar: $0impl Bar) {}"#,
+ r#"fn foo<B, B0, B1, B3, $0B2: Bar>(bar: B2) {}"#,
);
}
@@ -149,4 +157,22 @@
r#"fn foo<$0F: Foo + Bar>(bar: F) {}"#,
);
}
+
+ #[test]
+ fn replace_impl_with_mut() {
+ check_assist(
+ introduce_named_generic,
+ r#"fn f(iter: &mut $0impl Iterator<Item = i32>) {}"#,
+ r#"fn f<$0I: Iterator<Item = i32>>(iter: &mut I) {}"#,
+ );
+ }
+
+ #[test]
+ fn replace_impl_inside() {
+ check_assist(
+ introduce_named_generic,
+ r#"fn f(x: &mut Vec<$0impl Iterator<Item = i32>>) {}"#,
+ r#"fn f<$0I: Iterator<Item = i32>>(x: &mut Vec<I>) {}"#,
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/promote_local_to_const.rs b/crates/ide-assists/src/handlers/promote_local_to_const.rs
index 6ed9bd8..67fea77 100644
--- a/crates/ide-assists/src/handlers/promote_local_to_const.rs
+++ b/crates/ide-assists/src/handlers/promote_local_to_const.rs
@@ -11,7 +11,10 @@
ted, AstNode, WalkEvent,
};
-use crate::assist_context::{AssistContext, Assists};
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils,
+};
// Assist: promote_local_to_const
//
@@ -79,15 +82,13 @@
let name_ref = make::name_ref(&name);
for usage in usages {
- let Some(usage) = usage.name.as_name_ref().cloned() else { continue };
- if let Some(record_field) = ast::RecordExprField::for_name_ref(&usage) {
- let record_field = edit.make_mut(record_field);
- let name_expr =
- make::expr_path(make::path_from_text(&name)).clone_for_update();
- record_field.replace_expr(name_expr);
+ let Some(usage_name) = usage.name.as_name_ref().cloned() else { continue };
+ if let Some(record_field) = ast::RecordExprField::for_name_ref(&usage_name) {
+ let name_expr = make::expr_path(make::path_from_text(&name));
+ utils::replace_record_field_expr(ctx, edit, record_field, name_expr);
} else {
- let usage = edit.make_mut(usage);
- ted::replace(usage.syntax(), name_ref.clone_for_update().syntax());
+ let usage_range = usage.range;
+ edit.replace(usage_range, name_ref.syntax().text());
}
}
}
@@ -213,6 +214,76 @@
}
#[test]
+ fn usage_in_macro() {
+ check_assist(
+ promote_local_to_const,
+ r"
+macro_rules! identity {
+ ($body:expr) => {
+ $body
+ }
+}
+
+fn baz() -> usize {
+ let $0foo = 2;
+ identity![foo]
+}
+",
+ r"
+macro_rules! identity {
+ ($body:expr) => {
+ $body
+ }
+}
+
+fn baz() -> usize {
+ const $0FOO: usize = 2;
+ identity![FOO]
+}
+",
+ )
+ }
+
+ #[test]
+ fn usage_shorthand_in_macro() {
+ check_assist(
+ promote_local_to_const,
+ r"
+struct Foo {
+ foo: usize,
+}
+
+macro_rules! identity {
+ ($body:expr) => {
+ $body
+ };
+}
+
+fn baz() -> Foo {
+ let $0foo = 2;
+ identity![Foo { foo }]
+}
+",
+ r"
+struct Foo {
+ foo: usize,
+}
+
+macro_rules! identity {
+ ($body:expr) => {
+ $body
+ };
+}
+
+fn baz() -> Foo {
+ const $0FOO: usize = 2;
+ identity![Foo { foo: FOO }]
+}
+",
+ )
+ }
+
+ #[test]
fn not_applicable_non_const_meth_call() {
cov_mark::check!(promote_local_non_const);
check_assist_not_applicable(
diff --git a/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs b/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
index b1daaea..0975901 100644
--- a/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
+++ b/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs
@@ -1,4 +1,7 @@
-use syntax::ast::{self, AstNode};
+use syntax::{
+ ast::{self, make, AstNode},
+ ted,
+};
use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
@@ -42,19 +45,34 @@
suggest_name::for_variable(&receiver, &ctx.sema)
};
- let target = call_expr.syntax().text_range();
-
let (assist_id, message, text) = if name_ref.text() == "is_some" {
("replace_is_some_with_if_let_some", "Replace `is_some` with `if let Some`", "Some")
} else {
("replace_is_ok_with_if_let_ok", "Replace `is_ok` with `if let Ok`", "Ok")
};
- acc.add(AssistId(assist_id, AssistKind::RefactorRewrite), message, target, |edit| {
- let var_name = format!("${{0:{}}}", var_name);
- let replacement = format!("let {}({}) = {}", text, var_name, receiver);
- edit.replace(target, replacement);
- })
+ acc.add(
+ AssistId(assist_id, AssistKind::RefactorRewrite),
+ message,
+ call_expr.syntax().text_range(),
+ |edit| {
+ let call_expr = edit.make_mut(call_expr);
+
+ let var_pat = make::ident_pat(false, false, make::name(&var_name));
+ let pat = make::tuple_struct_pat(make::ext::ident_path(text), [var_pat.into()]);
+ let let_expr = make::expr_let(pat.into(), receiver).clone_for_update();
+
+ if let Some(cap) = ctx.config.snippet_cap {
+ if let Some(ast::Pat::TupleStructPat(pat)) = let_expr.pat() {
+ if let Some(first_var) = pat.fields().next() {
+ edit.add_placeholder_snippet(cap, first_var);
+ }
+ }
+ }
+
+ ted::replace(call_expr.syntax(), let_expr.syntax());
+ },
+ )
}
_ => return None,
}
diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs
index 977c838..95b9eb5 100644
--- a/crates/ide-assists/src/tests.rs
+++ b/crates/ide-assists/src/tests.rs
@@ -505,16 +505,33 @@
TextEdit {
indels: [
Indel {
- insert: "let $0var_name = 5;\n ",
- delete: 45..45,
+ insert: "let",
+ delete: 45..47,
},
Indel {
insert: "var_name",
- delete: 59..60,
+ delete: 48..60,
+ },
+ Indel {
+ insert: "=",
+ delete: 61..81,
+ },
+ Indel {
+ insert: "5;\n if let 2..6 = var_name {\n true\n } else {\n false\n }",
+ delete: 82..108,
},
],
},
- None,
+ Some(
+ SnippetEdit(
+ [
+ (
+ 0,
+ 49..49,
+ ),
+ ],
+ ),
+ ),
),
},
file_system_edits: [],
@@ -567,16 +584,33 @@
TextEdit {
indels: [
Indel {
- insert: "let $0var_name = 5;\n ",
- delete: 45..45,
+ insert: "let",
+ delete: 45..47,
},
Indel {
insert: "var_name",
- delete: 59..60,
+ delete: 48..60,
+ },
+ Indel {
+ insert: "=",
+ delete: 61..81,
+ },
+ Indel {
+ insert: "5;\n if let 2..6 = var_name {\n true\n } else {\n false\n }",
+ delete: 82..108,
},
],
},
- None,
+ Some(
+ SnippetEdit(
+ [
+ (
+ 0,
+ 49..49,
+ ),
+ ],
+ ),
+ ),
),
},
file_system_edits: [],
diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs
index da5822b..0c23317 100644
--- a/crates/ide-assists/src/tests/generated.rs
+++ b/crates/ide-assists/src/tests/generated.rs
@@ -1153,7 +1153,7 @@
}
fn method_(&mut self) -> bool {
- <A as SomeTrait>::method_( &mut self.a )
+ <A as SomeTrait>::method_(&mut self.a)
}
}
"#####,
diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs
index f51e99a..927a8e3 100644
--- a/crates/ide-assists/src/utils.rs
+++ b/crates/ide-assists/src/utils.rs
@@ -813,3 +813,21 @@
assert_eq!(3, required_hashes("#ab\"##c"));
assert_eq!(5, required_hashes("#ab\"##\"####c"));
}
+
+/// Replaces the record expression, handling field shorthands including inside macros.
+pub(crate) fn replace_record_field_expr(
+ ctx: &AssistContext<'_>,
+ edit: &mut SourceChangeBuilder,
+ record_field: ast::RecordExprField,
+ initializer: ast::Expr,
+) {
+ if let Some(ast::Expr::PathExpr(path_expr)) = record_field.expr() {
+ // replace field shorthand
+ let file_range = ctx.sema.original_range(path_expr.syntax());
+ edit.insert(file_range.range.end(), format!(": {}", initializer.syntax().text()))
+ } else if let Some(expr) = record_field.expr() {
+ // just replace expr
+ let file_range = ctx.sema.original_range(expr.syntax());
+ edit.replace(file_range.range, initializer.syntax().text());
+ }
+}
diff --git a/crates/ide-assists/src/utils/suggest_name.rs b/crates/ide-assists/src/utils/suggest_name.rs
index 2f1b47b..b4c6cbf 100644
--- a/crates/ide-assists/src/utils/suggest_name.rs
+++ b/crates/ide-assists/src/utils/suggest_name.rs
@@ -1,5 +1,7 @@
//! This module contains functions to suggest names for expressions, functions and other items
+use std::collections::HashSet;
+
use hir::Semantics;
use ide_db::RootDatabase;
use itertools::Itertools;
@@ -58,12 +60,59 @@
"into_future",
];
-pub(crate) fn for_generic_parameter(ty: &ast::ImplTraitType) -> SmolStr {
+/// Suggest a unique name for generic parameter.
+///
+/// `existing_params` is used to check if the name conflicts with existing
+/// generic parameters.
+///
+/// The function checks if the name conflicts with existing generic parameters.
+/// If so, it will try to resolve the conflict by adding a number suffix, e.g.
+/// `T`, `T0`, `T1`, ...
+pub(crate) fn for_unique_generic_name(
+ name: &str,
+ existing_params: &ast::GenericParamList,
+) -> SmolStr {
+ let param_names = existing_params
+ .generic_params()
+ .map(|param| match param {
+ ast::GenericParam::TypeParam(t) => t.name().unwrap().to_string(),
+ p => p.to_string(),
+ })
+ .collect::<HashSet<_>>();
+ let mut name = name.to_string();
+ let base_len = name.len();
+ let mut count = 0;
+ while param_names.contains(&name) {
+ name.truncate(base_len);
+ name.push_str(&count.to_string());
+ count += 1;
+ }
+
+ name.into()
+}
+
+/// Suggest name of impl trait type
+///
+/// `existing_params` is used to check if the name conflicts with existing
+/// generic parameters.
+///
+/// # Current implementation
+///
+/// In current implementation, the function tries to get the name from the first
+/// character of the name for the first type bound.
+///
+/// If the name conflicts with existing generic parameters, it will try to
+/// resolve the conflict with `for_unique_generic_name`.
+pub(crate) fn for_impl_trait_as_generic(
+ ty: &ast::ImplTraitType,
+ existing_params: &ast::GenericParamList,
+) -> SmolStr {
let c = ty
.type_bound_list()
.and_then(|bounds| bounds.syntax().text().char_at(0.into()))
.unwrap_or('T');
- c.encode_utf8(&mut [0; 4]).into()
+
+ for_unique_generic_name(c.encode_utf8(&mut [0; 4]), existing_params)
}
/// Suggest name of variable for given expression
diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs
index 613a35d..53a1c84 100644
--- a/crates/ide-completion/src/completions/dot.rs
+++ b/crates/ide-completion/src/completions/dot.rs
@@ -27,6 +27,8 @@
}
let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. });
+ let is_method_acces_with_parens =
+ matches!(dot_access.kind, DotAccessKind::Method { has_parens: true });
complete_fields(
acc,
@@ -35,6 +37,7 @@
|acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty),
|acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty),
is_field_access,
+ is_method_acces_with_parens,
);
complete_methods(ctx, receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None));
@@ -83,6 +86,7 @@
},
|acc, field, ty| acc.add_tuple_field(ctx, Some(hir::known::SELF_PARAM), field, &ty),
true,
+ false,
);
complete_methods(ctx, &ty, |func| {
acc.add_method(
@@ -106,12 +110,14 @@
mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type),
mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type),
is_field_access: bool,
+ is_method_acess_with_parens: bool,
) {
let mut seen_names = FxHashSet::default();
for receiver in receiver.autoderef(ctx.db) {
for (field, ty) in receiver.fields(ctx.db) {
if seen_names.insert(field.name(ctx.db))
- && (is_field_access || ty.is_fn() || ty.is_closure())
+ && (is_field_access
+ || (is_method_acess_with_parens && (ty.is_fn() || ty.is_closure())))
{
named_field(acc, field, ty);
}
@@ -120,7 +126,8 @@
// Tuples are always the last type in a deref chain, so just check if the name is
// already seen without inserting into the hashset.
if !seen_names.contains(&hir::Name::new_tuple_field(i))
- && (is_field_access || ty.is_fn() || ty.is_closure())
+ && (is_field_access
+ || (is_method_acess_with_parens && (ty.is_fn() || ty.is_closure())))
{
// Tuple fields are always public (tuple struct fields are handled above).
tuple_index(acc, i, ty);
@@ -1236,4 +1243,24 @@
"#,
)
}
+
+ #[test]
+ fn test_fn_field_dot_access_method_has_parens_false() {
+ check(
+ r#"
+struct Foo { baz: fn() }
+impl Foo {
+ fn bar<T>(self, t: T): T { t }
+}
+
+fn baz() {
+ let foo = Foo{ baz: || {} };
+ foo.ba$0::<>;
+}
+"#,
+ expect![[r#"
+ me bar(…) fn(self, T)
+ "#]],
+ );
+ }
}
diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs
index 2d62c45..7da6648 100644
--- a/crates/ide-completion/src/context/analysis.rs
+++ b/crates/ide-completion/src/context/analysis.rs
@@ -361,7 +361,12 @@
let ty = it.pat()
.and_then(|pat| sema.type_of_pat(&pat))
.or_else(|| it.initializer().and_then(|it| sema.type_of_expr(&it)))
- .map(TypeInfo::original);
+ .map(TypeInfo::original)
+ .filter(|ty| {
+ // don't infer the let type if the expr is a function,
+ // preventing parenthesis from vanishing
+ it.ty().is_some() || !ty.is_fn()
+ });
let name = match it.pat() {
Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
Some(_) | None => None,
@@ -415,20 +420,16 @@
})().unwrap_or((None, None))
},
ast::RecordExprField(it) => {
+ let field_ty = sema.resolve_record_field(&it).map(|(_, _, ty)| ty);
+ let field_name = it.field_name().map(NameOrNameRef::NameRef);
if let Some(expr) = it.expr() {
cov_mark::hit!(expected_type_struct_field_with_leading_char);
- (
- sema.type_of_expr(&expr).map(TypeInfo::original),
- it.field_name().map(NameOrNameRef::NameRef),
- )
+ let ty = field_ty
+ .or_else(|| sema.type_of_expr(&expr).map(TypeInfo::original));
+ (ty, field_name)
} else {
cov_mark::hit!(expected_type_struct_field_followed_by_comma);
- let ty = sema.resolve_record_field(&it)
- .map(|(_, _, ty)| ty);
- (
- ty,
- it.field_name().map(NameOrNameRef::NameRef),
- )
+ (field_ty, field_name)
}
},
// match foo { $0 }
diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs
index 2ea3f74..581d557 100644
--- a/crates/ide-completion/src/render.rs
+++ b/crates/ide-completion/src/render.rs
@@ -837,11 +837,11 @@
}
"#,
expect![[r#"
- fn main []
- fn test []
+ fn main() []
+ fn test(…) []
md dep []
fn function (use dep::test_mod_a::function) [requires_import]
- fn function (use dep::test_mod_b::function) [requires_import]
+ fn function(…) (use dep::test_mod_b::function) [requires_import]
"#]],
);
}
diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs
index d23ed71..b306bed 100644
--- a/crates/ide-completion/src/render/function.rs
+++ b/crates/ide-completion/src/render/function.rs
@@ -305,12 +305,15 @@
return None;
}
- // Don't add parentheses if the expected type is some function reference.
- if let Some(ty) = &ctx.expected_type {
- // FIXME: check signature matches?
- if ty.is_fn() {
- cov_mark::hit!(no_call_parens_if_fn_ptr_needed);
- return None;
+ // Don't add parentheses if the expected type is a function reference with the same signature.
+ if let Some(expected) = ctx.expected_type.as_ref().filter(|e| e.is_fn()) {
+ if let Some(expected) = expected.as_callable(ctx.db) {
+ if let Some(completed) = func.ty(ctx.db).as_callable(ctx.db) {
+ if expected.sig() == completed.sig() {
+ cov_mark::hit!(no_call_parens_if_fn_ptr_needed);
+ return None;
+ }
+ }
}
}
diff --git a/crates/ide-db/src/documentation.rs b/crates/ide-db/src/documentation.rs
index 26f3cd2..cc8e843 100644
--- a/crates/ide-db/src/documentation.rs
+++ b/crates/ide-db/src/documentation.rs
@@ -138,15 +138,13 @@
for doc in docs {
// str::lines doesn't yield anything for the empty string
if !doc.is_empty() {
- buf.extend(Itertools::intersperse(
- doc.lines().map(|line| {
- line.char_indices()
- .nth(indent)
- .map_or(line, |(offset, _)| &line[offset..])
- .trim_end()
- }),
- "\n",
- ));
+ // We don't trim trailing whitespace from doc comments as multiple trailing spaces
+ // indicates a hard line break in Markdown.
+ let lines = doc.lines().map(|line| {
+ line.char_indices().nth(indent).map_or(line, |(offset, _)| &line[offset..])
+ });
+
+ buf.extend(Itertools::intersperse(lines, "\n"));
}
buf.push('\n');
}
diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs
index fb4c0c1..8c1a6e6 100644
--- a/crates/ide-db/src/path_transform.rs
+++ b/crates/ide-db/src/path_transform.rs
@@ -82,6 +82,34 @@
}
}
+ pub fn impl_transformation(
+ target_scope: &'a SemanticsScope<'a>,
+ source_scope: &'a SemanticsScope<'a>,
+ impl_: hir::Impl,
+ generic_arg_list: ast::GenericArgList,
+ ) -> PathTransform<'a> {
+ PathTransform {
+ source_scope,
+ target_scope,
+ generic_def: Some(impl_.into()),
+ substs: get_type_args_from_arg_list(generic_arg_list).unwrap_or_default(),
+ }
+ }
+
+ pub fn adt_transformation(
+ target_scope: &'a SemanticsScope<'a>,
+ source_scope: &'a SemanticsScope<'a>,
+ adt: hir::Adt,
+ generic_arg_list: ast::GenericArgList,
+ ) -> PathTransform<'a> {
+ PathTransform {
+ source_scope,
+ target_scope,
+ generic_def: Some(adt.into()),
+ substs: get_type_args_from_arg_list(generic_arg_list).unwrap_or_default(),
+ }
+ }
+
pub fn generic_transformation(
target_scope: &'a SemanticsScope<'a>,
source_scope: &'a SemanticsScope<'a>,
diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs
index 24d0850..f5f0f05 100644
--- a/crates/ide-db/src/symbol_index.rs
+++ b/crates/ide-db/src/symbol_index.rs
@@ -414,6 +414,12 @@
fn impl_fn() {}
}
+struct StructT<T>;
+
+impl <T> StructT<T> {
+ fn generic_impl_fn() {}
+}
+
trait Trait {
fn trait_fn(&self);
}
diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
index c9875c7..f0b9777 100644
--- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt
+++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -23,12 +23,12 @@
),
ptr: SyntaxNodePtr {
kind: TYPE_ALIAS,
- range: 397..417,
+ range: 470..490,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
- range: 402..407,
+ range: 475..480,
},
),
},
@@ -51,12 +51,12 @@
),
ptr: SyntaxNodePtr {
kind: CONST,
- range: 340..361,
+ range: 413..434,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
- range: 346..351,
+ range: 419..424,
},
),
},
@@ -79,12 +79,12 @@
),
ptr: SyntaxNodePtr {
kind: CONST,
- range: 520..592,
+ range: 593..665,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
- range: 526..542,
+ range: 599..615,
},
),
},
@@ -139,12 +139,12 @@
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
- range: 654..676,
+ range: 727..749,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
- range: 663..676,
+ range: 736..749,
},
),
},
@@ -197,12 +197,12 @@
),
ptr: SyntaxNodePtr {
kind: STATIC,
- range: 362..396,
+ range: 435..469,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
- range: 369..375,
+ range: 442..448,
},
),
},
@@ -276,7 +276,7 @@
Struct(
Struct {
id: StructId(
- 4,
+ 5,
),
},
),
@@ -287,12 +287,12 @@
),
ptr: SyntaxNodePtr {
kind: STRUCT,
- range: 318..336,
+ range: 391..409,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
- range: 325..335,
+ range: 398..408,
},
),
},
@@ -308,7 +308,7 @@
Struct(
Struct {
id: StructId(
- 5,
+ 6,
),
},
),
@@ -319,12 +319,12 @@
),
ptr: SyntaxNodePtr {
kind: STRUCT,
- range: 555..581,
+ range: 628..654,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
- range: 562..580,
+ range: 635..653,
},
),
},
@@ -340,7 +340,7 @@
Struct(
Struct {
id: StructId(
- 6,
+ 7,
),
},
),
@@ -351,12 +351,42 @@
),
ptr: SyntaxNodePtr {
kind: STRUCT,
- range: 479..507,
+ range: 552..580,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
- range: 486..506,
+ range: 559..579,
+ },
+ ),
+ },
+ container_name: None,
+ is_alias: false,
+ is_assoc: false,
+ },
+ FileSymbol {
+ name: "StructT",
+ def: Adt(
+ Struct(
+ Struct {
+ id: StructId(
+ 2,
+ ),
+ },
+ ),
+ ),
+ loc: DeclarationLocation {
+ hir_file_id: FileId(
+ 0,
+ ),
+ ptr: SyntaxNodePtr {
+ kind: STRUCT,
+ range: 261..279,
+ },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 268..275,
},
),
},
@@ -379,12 +409,12 @@
),
ptr: SyntaxNodePtr {
kind: TRAIT,
- range: 261..300,
+ range: 334..373,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
- range: 267..272,
+ range: 340..345,
},
),
},
@@ -409,12 +439,12 @@
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
- range: 682..696,
+ range: 755..769,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
- range: 691..696,
+ range: 764..769,
},
),
},
@@ -469,12 +499,12 @@
),
ptr: SyntaxNodePtr {
kind: MODULE,
- range: 419..457,
+ range: 492..530,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
- range: 423..428,
+ range: 496..501,
},
),
},
@@ -499,12 +529,12 @@
),
ptr: SyntaxNodePtr {
kind: MODULE,
- range: 594..604,
+ range: 667..677,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
- range: 598..603,
+ range: 671..676,
},
),
},
@@ -543,6 +573,36 @@
is_assoc: false,
},
FileSymbol {
+ name: "generic_impl_fn",
+ def: Function(
+ Function {
+ id: FunctionId(
+ 3,
+ ),
+ },
+ ),
+ loc: DeclarationLocation {
+ hir_file_id: FileId(
+ 0,
+ ),
+ ptr: SyntaxNodePtr {
+ kind: FN,
+ range: 307..330,
+ },
+ name_ptr: AstPtr(
+ SyntaxNodePtr {
+ kind: NAME,
+ range: 310..325,
+ },
+ ),
+ },
+ container_name: Some(
+ "StructT<T>",
+ ),
+ is_alias: false,
+ is_assoc: true,
+ },
+ FileSymbol {
name: "impl_fn",
def: Function(
Function {
@@ -566,7 +626,9 @@
},
),
},
- container_name: None,
+ container_name: Some(
+ "Struct",
+ ),
is_alias: false,
is_assoc: true,
},
@@ -615,12 +677,12 @@
),
ptr: SyntaxNodePtr {
kind: FN,
- range: 302..338,
+ range: 375..411,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
- range: 305..309,
+ range: 378..382,
},
),
},
@@ -645,12 +707,12 @@
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
- range: 611..648,
+ range: 684..721,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
- range: 628..648,
+ range: 701..721,
},
),
},
@@ -673,12 +735,12 @@
),
ptr: SyntaxNodePtr {
kind: FN,
- range: 279..298,
+ range: 352..371,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
- range: 282..290,
+ range: 355..363,
},
),
},
@@ -705,7 +767,7 @@
Struct(
Struct {
id: StructId(
- 2,
+ 3,
),
},
),
@@ -716,12 +778,12 @@
),
ptr: SyntaxNodePtr {
kind: STRUCT,
- range: 435..455,
+ range: 508..528,
},
name_ptr: AstPtr(
SyntaxNodePtr {
kind: NAME,
- range: 442..454,
+ range: 515..527,
},
),
},
@@ -776,7 +838,7 @@
Struct(
Struct {
id: StructId(
- 3,
+ 4,
),
},
),
@@ -836,7 +898,7 @@
Struct(
Struct {
id: StructId(
- 3,
+ 4,
),
},
),
@@ -866,7 +928,7 @@
Struct(
Struct {
id: StructId(
- 3,
+ 4,
),
},
),
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index 464b0a7..60a45a0 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -1,11 +1,14 @@
-use hir::{db::ExpandDatabase, HirDisplay};
+use hir::{db::ExpandDatabase, AssocItem, HirDisplay, InFile};
use ide_db::{
assists::{Assist, AssistId, AssistKind},
base_db::FileRange,
label::Label,
source_change::SourceChange,
};
-use syntax::{ast, AstNode, TextRange};
+use syntax::{
+ ast::{self, make, HasArgList},
+ AstNode, SmolStr, TextRange,
+};
use text_edit::TextEdit;
use crate::{adjusted_display_range_new, Diagnostic, DiagnosticCode, DiagnosticsContext};
@@ -17,15 +20,17 @@
ctx: &DiagnosticsContext<'_>,
d: &hir::UnresolvedMethodCall,
) -> Diagnostic {
- let field_suffix = if d.field_with_same_name.is_some() {
+ let suffix = if d.field_with_same_name.is_some() {
", but a field with a similar name exists"
+ } else if d.assoc_func_with_same_name.is_some() {
+ ", but an associated function with a similar name exists"
} else {
""
};
Diagnostic::new(
DiagnosticCode::RustcHardError("E0599"),
format!(
- "no method `{}` on type `{}`{field_suffix}",
+ "no method `{}` on type `{}`{suffix}",
d.name.display(ctx.sema.db),
d.receiver.display(ctx.sema.db)
),
@@ -46,11 +51,27 @@
}
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option<Vec<Assist>> {
- if let Some(ty) = &d.field_with_same_name {
+ let field_fix = if let Some(ty) = &d.field_with_same_name {
field_fix(ctx, d, ty)
} else {
// FIXME: add quickfix
None
+ };
+
+ let assoc_func_fix = assoc_func_fix(ctx, d);
+
+ let mut fixes = vec![];
+ if let Some(field_fix) = field_fix {
+ fixes.push(field_fix);
+ }
+ if let Some(assoc_func_fix) = assoc_func_fix {
+ fixes.push(assoc_func_fix);
+ }
+
+ if fixes.is_empty() {
+ None
+ } else {
+ Some(fixes)
}
}
@@ -58,7 +79,7 @@
ctx: &DiagnosticsContext<'_>,
d: &hir::UnresolvedMethodCall,
ty: &hir::Type,
-) -> Option<Vec<Assist>> {
+) -> Option<Assist> {
if !ty.impls_fnonce(ctx.sema.db) {
return None;
}
@@ -78,7 +99,7 @@
}
_ => return None,
};
- Some(vec![Assist {
+ Some(Assist {
id: AssistId("expected-method-found-field-fix", AssistKind::QuickFix),
label: Label::new("Use parentheses to call the value of the field".to_string()),
group: None,
@@ -88,7 +109,95 @@
(file_id, TextEdit::insert(range.end(), ")".to_owned())),
])),
trigger_signature_help: false,
- }])
+ })
+}
+
+fn assoc_func_fix(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option<Assist> {
+ if let Some(assoc_item_id) = d.assoc_func_with_same_name {
+ let db = ctx.sema.db;
+
+ let expr_ptr = &d.expr;
+ let root = db.parse_or_expand(expr_ptr.file_id);
+ let expr: ast::Expr = expr_ptr.value.to_node(&root);
+
+ let call = ast::MethodCallExpr::cast(expr.syntax().clone())?;
+ let range = InFile::new(expr_ptr.file_id, call.syntax().text_range())
+ .original_node_file_range_rooted(db)
+ .range;
+
+ let receiver = call.receiver()?;
+ let receiver_type = &ctx.sema.type_of_expr(&receiver)?.original;
+
+ let need_to_take_receiver_as_first_arg = match hir::AssocItem::from(assoc_item_id) {
+ AssocItem::Function(f) => {
+ let assoc_fn_params = f.assoc_fn_params(db);
+ if assoc_fn_params.is_empty() {
+ false
+ } else {
+ assoc_fn_params
+ .first()
+ .map(|first_arg| {
+ // For generic type, say `Box`, take `Box::into_raw(b: Self)` as example,
+ // type of `b` is `Self`, which is `Box<T, A>`, containing unspecified generics.
+ // However, type of `receiver` is specified, it could be `Box<i32, Global>` or something like that,
+ // so `first_arg.ty() == receiver_type` evaluate to `false` here.
+ // Here add `first_arg.ty().as_adt() == receiver_type.as_adt()` as guard,
+ // apply `.as_adt()` over `Box<T, A>` or `Box<i32, Global>` gets `Box`, so we get `true` here.
+
+ // FIXME: it fails when type of `b` is `Box` with other generic param different from `receiver`
+ first_arg.ty() == receiver_type
+ || first_arg.ty().as_adt() == receiver_type.as_adt()
+ })
+ .unwrap_or(false)
+ }
+ }
+ _ => false,
+ };
+
+ let mut receiver_type_adt_name = receiver_type.as_adt()?.name(db).to_smol_str().to_string();
+
+ let generic_parameters: Vec<SmolStr> = receiver_type.generic_parameters(db).collect();
+ // if receiver should be pass as first arg in the assoc func,
+ // we could omit generic parameters cause compiler can deduce it automatically
+ if !need_to_take_receiver_as_first_arg && !generic_parameters.is_empty() {
+ let generic_parameters = generic_parameters.join(", ").to_string();
+ receiver_type_adt_name =
+ format!("{}::<{}>", receiver_type_adt_name, generic_parameters);
+ }
+
+ let method_name = call.name_ref()?;
+ let assoc_func_call = format!("{}::{}()", receiver_type_adt_name, method_name);
+
+ let assoc_func_call = make::expr_path(make::path_from_text(&assoc_func_call));
+
+ let args: Vec<_> = if need_to_take_receiver_as_first_arg {
+ std::iter::once(receiver).chain(call.arg_list()?.args()).collect()
+ } else {
+ call.arg_list()?.args().collect()
+ };
+ let args = make::arg_list(args);
+
+ let assoc_func_call_expr_string = make::expr_call(assoc_func_call, args).to_string();
+
+ let file_id = ctx.sema.original_range_opt(call.receiver()?.syntax())?.file_id;
+
+ Some(Assist {
+ id: AssistId("method_call_to_assoc_func_call_fix", AssistKind::QuickFix),
+ label: Label::new(format!(
+ "Use associated func call instead: `{}`",
+ assoc_func_call_expr_string
+ )),
+ group: None,
+ target: range,
+ source_change: Some(SourceChange::from_text_edit(
+ file_id,
+ TextEdit::replace(range, assoc_func_call_expr_string),
+ )),
+ trigger_signature_help: false,
+ })
+ } else {
+ None
+ }
}
#[cfg(test)]
@@ -96,6 +205,85 @@
use crate::tests::{check_diagnostics, check_fix};
#[test]
+ fn test_assoc_func_fix() {
+ check_fix(
+ r#"
+struct A {}
+
+impl A {
+ fn hello() {}
+}
+fn main() {
+ let a = A{};
+ a.hello$0();
+}
+"#,
+ r#"
+struct A {}
+
+impl A {
+ fn hello() {}
+}
+fn main() {
+ let a = A{};
+ A::hello();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_func_diagnostic() {
+ check_diagnostics(
+ r#"
+struct A {}
+impl A {
+ fn hello() {}
+}
+fn main() {
+ let a = A{};
+ a.hello();
+ // ^^^^^ 💡 error: no method `hello` on type `A`, but an associated function with a similar name exists
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_func_fix_with_generic() {
+ check_fix(
+ r#"
+struct A<T, U> {
+ a: T,
+ b: U
+}
+
+impl<T, U> A<T, U> {
+ fn foo() {}
+}
+fn main() {
+ let a = A {a: 0, b: ""};
+ a.foo()$0;
+}
+"#,
+ r#"
+struct A<T, U> {
+ a: T,
+ b: U
+}
+
+impl<T, U> A<T, U> {
+ fn foo() {}
+}
+fn main() {
+ let a = A {a: 0, b: ""};
+ A::<i32, &str>::foo();
+}
+"#,
+ );
+ }
+
+ #[test]
fn smoke_test() {
check_diagnostics(
r#"
diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs
index e7f97eb..b2b305c 100644
--- a/crates/ide/src/status.rs
+++ b/crates/ide/src/status.rs
@@ -10,7 +10,7 @@
debug::{DebugQueryTable, TableEntry},
Query, QueryTable,
},
- CrateId, FileId, FileTextQuery, ParseQuery, SourceDatabase, SourceRootId,
+ CrateData, FileId, FileTextQuery, ParseQuery, SourceDatabase, SourceRootId,
},
symbol_index::ModuleSymbolsQuery,
};
@@ -54,25 +54,54 @@
format_to!(buf, "{} block def maps\n", collect_query_count(BlockDefMapQuery.in_db(db)));
if let Some(file_id) = file_id {
- format_to!(buf, "\nFile info:\n");
+ format_to!(buf, "\nCrates for file {}:\n", file_id.index());
let crates = crate::parent_module::crates_for(db, file_id);
if crates.is_empty() {
format_to!(buf, "Does not belong to any crate");
}
let crate_graph = db.crate_graph();
- for krate in crates {
- let display_crate = |krate: CrateId| match &crate_graph[krate].display_name {
- Some(it) => format!("{it}({})", krate.into_raw()),
- None => format!("{}", krate.into_raw()),
- };
- format_to!(buf, "Crate: {}\n", display_crate(krate));
- format_to!(buf, "Enabled cfgs: {:?}\n", crate_graph[krate].cfg_options);
- let deps = crate_graph[krate]
- .dependencies
+ for crate_id in crates {
+ let CrateData {
+ root_file_id,
+ edition,
+ version,
+ display_name,
+ cfg_options,
+ potential_cfg_options,
+ env,
+ dependencies,
+ origin,
+ is_proc_macro,
+ target_layout,
+ toolchain,
+ } = &crate_graph[crate_id];
+ format_to!(
+ buf,
+ "Crate: {}\n",
+ match display_name {
+ Some(it) => format!("{it}({})", crate_id.into_raw()),
+ None => format!("{}", crate_id.into_raw()),
+ }
+ );
+ format_to!(buf, " Root module file id: {}\n", root_file_id.index());
+ format_to!(buf, " Edition: {}\n", edition);
+ format_to!(buf, " Version: {}\n", version.as_deref().unwrap_or("n/a"));
+ format_to!(buf, " Enabled cfgs: {:?}\n", cfg_options);
+ format_to!(buf, " Potential cfgs: {:?}\n", potential_cfg_options);
+ format_to!(buf, " Env: {:?}\n", env);
+ format_to!(buf, " Origin: {:?}\n", origin);
+ format_to!(buf, " Is a proc macro crate: {}\n", is_proc_macro);
+ format_to!(buf, " Workspace Target Layout: {:?}\n", target_layout);
+ format_to!(
+ buf,
+ " Workspace Toolchain: {}\n",
+ toolchain.as_ref().map_or_else(|| "n/a".into(), |v| v.to_string())
+ );
+ let deps = dependencies
.iter()
- .map(|dep| format!("{}={:?}", dep.name, dep.crate_id))
+ .map(|dep| format!("{}={}", dep.name, dep.crate_id.into_raw()))
.format(", ");
- format_to!(buf, "Dependencies: {}\n", deps);
+ format_to!(buf, " Dependencies: {}\n", deps);
}
}
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs
index 8472e49..6f40a4c 100644
--- a/crates/rust-analyzer/src/bin/main.rs
+++ b/crates/rust-analyzer/src/bin/main.rs
@@ -172,7 +172,15 @@
let (connection, io_threads) = Connection::stdio();
- let (initialize_id, initialize_params) = connection.initialize_start()?;
+ let (initialize_id, initialize_params) = match connection.initialize_start() {
+ Ok(it) => it,
+ Err(e) => {
+ if e.channel_is_disconnected() {
+ io_threads.join()?;
+ }
+ return Err(e.into());
+ }
+ };
tracing::info!("InitializeParams: {}", initialize_params);
let lsp_types::InitializeParams {
root_uri,
@@ -240,7 +248,12 @@
let initialize_result = serde_json::to_value(initialize_result).unwrap();
- connection.initialize_finish(initialize_id, initialize_result)?;
+ if let Err(e) = connection.initialize_finish(initialize_id, initialize_result) {
+ if e.channel_is_disconnected() {
+ io_threads.join()?;
+ }
+ return Err(e.into());
+ }
if !config.has_linked_projects() && config.detached_files().is_empty() {
config.rediscover_workspaces();
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 258f741..88fb370 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -7,7 +7,11 @@
//! configure the server itself, feature flags are passed into analysis, and
//! tweak things like automatic insertion of `()` in completions.
-use std::{fmt, iter, ops::Not, path::PathBuf};
+use std::{
+ fmt, iter,
+ ops::Not,
+ path::{Path, PathBuf},
+};
use cfg::{CfgAtom, CfgDiff};
use flycheck::FlycheckConfig;
@@ -105,6 +109,9 @@
/// ```
/// .
cargo_buildScripts_overrideCommand: Option<Vec<String>> = "null",
+ /// Rerun proc-macros building/build-scripts running when proc-macro
+ /// or build-script sources change and are saved.
+ cargo_buildScripts_rebuildOnSave: bool = "false",
/// Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
/// avoid checking unnecessary things.
cargo_buildScripts_useRustcWrapper: bool = "true",
@@ -164,15 +171,15 @@
/// Specifies the working directory for running checks.
/// - "workspace": run checks for workspaces in the corresponding workspaces' root directories.
// FIXME: Ideally we would support this in some way
- /// This falls back to "root" if `#rust-analyzer.cargo.check.invocationStrategy#` is set to `once`.
+ /// This falls back to "root" if `#rust-analyzer.check.invocationStrategy#` is set to `once`.
/// - "root": run checks in the project's root directory.
- /// This config only has an effect when `#rust-analyzer.cargo.check.overrideCommand#`
+ /// This config only has an effect when `#rust-analyzer.check.overrideCommand#`
/// is set.
check_invocationLocation | checkOnSave_invocationLocation: InvocationLocation = "\"workspace\"",
/// Specifies the invocation strategy to use when running the check command.
/// If `per_workspace` is set, the command will be executed for each workspace.
/// If `once` is set, the command will be executed once.
- /// This config only has an effect when `#rust-analyzer.cargo.check.overrideCommand#`
+ /// This config only has an effect when `#rust-analyzer.check.overrideCommand#`
/// is set.
check_invocationStrategy | checkOnSave_invocationStrategy: InvocationStrategy = "\"per_workspace\"",
/// Whether to pass `--no-default-features` to Cargo. Defaults to
@@ -191,8 +198,8 @@
/// If there are multiple linked projects/workspaces, this command is invoked for
/// each of them, with the working directory being the workspace root
/// (i.e., the folder containing the `Cargo.toml`). This can be overwritten
- /// by changing `#rust-analyzer.cargo.check.invocationStrategy#` and
- /// `#rust-analyzer.cargo.check.invocationLocation#`.
+ /// by changing `#rust-analyzer.check.invocationStrategy#` and
+ /// `#rust-analyzer.check.invocationLocation#`.
///
/// An example command would be:
///
@@ -917,7 +924,19 @@
pub fn has_linked_projects(&self) -> bool {
!self.data.linkedProjects.is_empty()
}
- pub fn linked_projects(&self) -> Vec<LinkedProject> {
+ pub fn linked_manifests(&self) -> impl Iterator<Item = &Path> + '_ {
+ self.data.linkedProjects.iter().filter_map(|it| match it {
+ ManifestOrProjectJson::Manifest(p) => Some(&**p),
+ ManifestOrProjectJson::ProjectJson(_) => None,
+ })
+ }
+ pub fn has_linked_project_jsons(&self) -> bool {
+ self.data
+ .linkedProjects
+ .iter()
+ .any(|it| matches!(it, ManifestOrProjectJson::ProjectJson(_)))
+ }
+ pub fn linked_or_discovered_projects(&self) -> Vec<LinkedProject> {
match self.data.linkedProjects.as_slice() {
[] => {
let exclude_dirs: Vec<_> =
@@ -952,15 +971,6 @@
}
}
- pub fn add_linked_projects(&mut self, linked_projects: Vec<ProjectJsonData>) {
- let mut linked_projects = linked_projects
- .into_iter()
- .map(ManifestOrProjectJson::ProjectJson)
- .collect::<Vec<ManifestOrProjectJson>>();
-
- self.data.linkedProjects.append(&mut linked_projects);
- }
-
pub fn did_save_text_document_dynamic_registration(&self) -> bool {
let caps = try_or_def!(self.caps.text_document.as_ref()?.synchronization.clone()?);
caps.did_save == Some(true) && caps.dynamic_registration == Some(true)
@@ -1369,6 +1379,10 @@
self.data.checkOnSave
}
+ pub fn script_rebuild_on_save(&self) -> bool {
+ self.data.cargo_buildScripts_rebuildOnSave
+ }
+
pub fn runnables(&self) -> RunnablesConfig {
RunnablesConfig {
override_cargo: self.data.runnables_command.clone(),
diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs
index f9070d2..7e62199 100644
--- a/crates/rust-analyzer/src/handlers/notification.rs
+++ b/crates/rust-analyzer/src/handlers/notification.rs
@@ -130,6 +130,13 @@
state: &mut GlobalState,
params: DidSaveTextDocumentParams,
) -> anyhow::Result<()> {
+ if state.config.script_rebuild_on_save() && state.proc_macro_changed {
+ // reset the flag
+ state.proc_macro_changed = false;
+ // rebuild the proc macros
+ state.fetch_build_data_queue.request_op("ScriptRebuildOnSave".to_owned(), ());
+ }
+
if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) {
// Re-fetch workspaces if a workspace related file has changed
if let Some(abs_path) = vfs_path.as_path() {
diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs
index 6b7bc94..f1317ce 100644
--- a/crates/rust-analyzer/src/handlers/request.rs
+++ b/crates/rust-analyzer/src/handlers/request.rs
@@ -801,7 +801,7 @@
}
}
None => {
- if !snap.config.linked_projects().is_empty() {
+ if !snap.config.linked_or_discovered_projects().is_empty() {
res.push(lsp_ext::Runnable {
label: "cargo check --workspace".to_string(),
location: None,
diff --git a/crates/rust-analyzer/src/lsp/utils.rs b/crates/rust-analyzer/src/lsp/utils.rs
index b388b31..a4417e4 100644
--- a/crates/rust-analyzer/src/lsp/utils.rs
+++ b/crates/rust-analyzer/src/lsp/utils.rs
@@ -171,30 +171,19 @@
file_contents: impl FnOnce() -> String,
mut content_changes: Vec<lsp_types::TextDocumentContentChangeEvent>,
) -> String {
- // Skip to the last full document change, as it invalidates all previous changes anyways.
- let mut start = content_changes
- .iter()
- .rev()
- .position(|change| change.range.is_none())
- .map(|idx| content_changes.len() - idx - 1)
- .unwrap_or(0);
-
- let mut text: String = match content_changes.get_mut(start) {
- // peek at the first content change as an optimization
- Some(lsp_types::TextDocumentContentChangeEvent { range: None, text, .. }) => {
- let text = mem::take(text);
- start += 1;
-
- // The only change is a full document update
- if start == content_changes.len() {
- return text;
+ // If at least one of the changes is a full document change, use the last
+ // of them as the starting point and ignore all previous changes.
+ let (mut text, content_changes) =
+ match content_changes.iter().rposition(|change| change.range.is_none()) {
+ Some(idx) => {
+ let text = mem::take(&mut content_changes[idx].text);
+ (text, &content_changes[idx + 1..])
}
- text
- }
- Some(_) => file_contents(),
- // we received no content changes
- None => return file_contents(),
- };
+ None => (file_contents(), &content_changes[..]),
+ };
+ if content_changes.is_empty() {
+ return text;
+ }
let mut line_index = LineIndex {
// the index will be overwritten in the bottom loop's first iteration
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index cc7cb27..91dc6c2 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -80,7 +80,8 @@
&self.config.lru_query_capacities().cloned().unwrap_or_default(),
);
}
- if self.config.linked_projects() != old_config.linked_projects() {
+ if self.config.linked_or_discovered_projects() != old_config.linked_or_discovered_projects()
+ {
self.fetch_workspaces_queue.request_op("linked projects changed".to_string(), false)
} else if self.config.flycheck() != old_config.flycheck() {
self.reload_flycheck();
@@ -128,7 +129,7 @@
status.health = lsp_ext::Health::Warning;
message.push_str("Auto-reloading is disabled and the workspace has changed, a manual workspace reload is required.\n\n");
}
- if self.config.linked_projects().is_empty()
+ if self.config.linked_or_discovered_projects().is_empty()
&& self.config.detached_files().is_empty()
&& self.config.notifications().cargo_toml_not_found
{
@@ -174,7 +175,21 @@
if let Err(_) = self.fetch_workspace_error() {
status.health = lsp_ext::Health::Error;
- message.push_str("Failed to load workspaces.\n\n");
+ message.push_str("Failed to load workspaces.");
+
+ if self.config.has_linked_projects() {
+ message.push_str(
+ "`rust-analyzer.linkedProjects` have been specified, which may be incorrect. Specified project paths:\n",
+ );
+ message.push_str(&format!(
+ " {}",
+ self.config.linked_manifests().map(|it| it.display()).format("\n ")
+ ));
+ if self.config.has_linked_project_jsons() {
+ message.push_str("\nAdditionally, one or more project jsons are specified")
+ }
+ }
+ message.push_str("\n\n");
}
if !message.is_empty() {
@@ -187,7 +202,7 @@
tracing::info!(%cause, "will fetch workspaces");
self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, {
- let linked_projects = self.config.linked_projects();
+ let linked_projects = self.config.linked_or_discovered_projects();
let detached_files = self.config.detached_files().to_vec();
let cargo_config = self.config.cargo();
diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs
index 916bbba..4c2878f 100644
--- a/crates/syntax/src/ast/edit_in_place.rs
+++ b/crates/syntax/src/ast/edit_in_place.rs
@@ -13,7 +13,7 @@
SyntaxNode, SyntaxToken,
};
-use super::{HasArgList, HasName};
+use super::{GenericParam, HasArgList, HasName};
pub trait GenericParamsOwnerEdit: ast::HasGenericParams {
fn get_or_create_generic_param_list(&self) -> ast::GenericParamList;
@@ -272,6 +272,36 @@
}
}
+ /// Find the params corresponded to generic arg
+ pub fn find_generic_arg(&self, generic_arg: &ast::GenericArg) -> Option<GenericParam> {
+ self.generic_params().find_map(move |param| match (¶m, &generic_arg) {
+ (ast::GenericParam::LifetimeParam(a), ast::GenericArg::LifetimeArg(b)) => {
+ (a.lifetime()?.lifetime_ident_token()?.text()
+ == b.lifetime()?.lifetime_ident_token()?.text())
+ .then_some(param)
+ }
+ (ast::GenericParam::TypeParam(a), ast::GenericArg::TypeArg(b)) => {
+ debug_assert_eq!(b.syntax().first_token(), b.syntax().last_token());
+ (a.name()?.text() == b.syntax().first_token()?.text()).then_some(param)
+ }
+ (ast::GenericParam::ConstParam(a), ast::GenericArg::TypeArg(b)) => {
+ debug_assert_eq!(b.syntax().first_token(), b.syntax().last_token());
+ (a.name()?.text() == b.syntax().first_token()?.text()).then_some(param)
+ }
+ _ => None,
+ })
+ }
+
+ /// Removes the corresponding generic arg
+ pub fn remove_generic_arg(&self, generic_arg: &ast::GenericArg) -> Option<GenericParam> {
+ let param_to_remove = self.find_generic_arg(generic_arg);
+
+ if let Some(param) = ¶m_to_remove {
+ self.remove_generic_param(param.clone());
+ }
+ param_to_remove
+ }
+
/// Constructs a matching [`ast::GenericArgList`]
pub fn to_generic_args(&self) -> ast::GenericArgList {
let args = self.generic_params().filter_map(|param| match param {
@@ -300,6 +330,20 @@
}
ted::append_child(self.syntax(), predicate.syntax());
}
+
+ pub fn remove_predicate(&self, predicate: ast::WherePred) {
+ if let Some(previous) = predicate.syntax().prev_sibling() {
+ if let Some(next_token) = previous.next_sibling_or_token() {
+ ted::remove_all(next_token..=predicate.syntax().clone().into());
+ }
+ } else if let Some(next) = predicate.syntax().next_sibling() {
+ if let Some(next_token) = next.prev_sibling_or_token() {
+ ted::remove_all(predicate.syntax().clone().into()..=next_token);
+ }
+ } else {
+ ted::remove(predicate.syntax());
+ }
+ }
}
impl ast::TypeParam {
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index ad63cc5..2abbfc8 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -207,10 +207,28 @@
(None, Some(bs)) => Some(bs),
(Some(ps), None) => Some(ps),
(Some(ps), Some(bs)) => {
- for b in bs.generic_params() {
- ps.add_generic_param(b);
- }
- Some(ps)
+ // make sure lifetime is placed before other generic params
+ let generic_params = ps.generic_params().merge_by(bs.generic_params(), |_, b| {
+ !matches!(b, ast::GenericParam::LifetimeParam(_))
+ });
+ Some(generic_param_list(generic_params))
+ }
+ }
+}
+
+fn merge_where_clause(
+ ps: Option<ast::WhereClause>,
+ bs: Option<ast::WhereClause>,
+) -> Option<ast::WhereClause> {
+ match (ps, bs) {
+ (None, None) => None,
+ (None, Some(bs)) => Some(bs),
+ (Some(ps), None) => Some(ps),
+ (Some(ps), Some(bs)) => {
+ let preds = where_clause(std::iter::empty()).clone_for_update();
+ ps.predicates().for_each(|p| preds.add_predicate(p));
+ bs.predicates().for_each(|p| preds.add_predicate(p));
+ Some(preds)
}
}
}
@@ -251,9 +269,9 @@
pub fn impl_trait(
is_unsafe: bool,
trait_gen_params: Option<ast::GenericParamList>,
- trait_gen_args: Option<ast::GenericParamList>,
+ trait_gen_args: Option<ast::GenericArgList>,
type_gen_params: Option<ast::GenericParamList>,
- type_gen_args: Option<ast::GenericParamList>,
+ type_gen_args: Option<ast::GenericArgList>,
is_negative: bool,
path_type: ast::Type,
ty: ast::Type,
@@ -262,15 +280,9 @@
body: Option<Vec<either::Either<ast::Attr, ast::AssocItem>>>,
) -> ast::Impl {
let is_unsafe = if is_unsafe { "unsafe " } else { "" };
- let ty_gen_args = match merge_gen_params(type_gen_params.clone(), type_gen_args) {
- Some(pars) => pars.to_generic_args().to_string(),
- None => String::new(),
- };
- let tr_gen_args = match merge_gen_params(trait_gen_params.clone(), trait_gen_args) {
- Some(pars) => pars.to_generic_args().to_string(),
- None => String::new(),
- };
+ let trait_gen_args = trait_gen_args.map(|args| args.to_string()).unwrap_or_default();
+ let type_gen_args = type_gen_args.map(|args| args.to_string()).unwrap_or_default();
let gen_params = match merge_gen_params(trait_gen_params, type_gen_params) {
Some(pars) => pars.to_string(),
@@ -279,25 +291,15 @@
let is_negative = if is_negative { "! " } else { "" };
- let where_clause = match (ty_where_clause, trait_where_clause) {
- (None, None) => " ".to_string(),
- (None, Some(tr)) => format!("\n{}\n", tr).to_string(),
- (Some(ty), None) => format!("\n{}\n", ty).to_string(),
- (Some(ty), Some(tr)) => {
- let updated = ty.clone_for_update();
- tr.predicates().for_each(|p| {
- ty.add_predicate(p);
- });
- format!("\n{}\n", updated).to_string()
- }
- };
+ let where_clause = merge_where_clause(ty_where_clause, trait_where_clause)
+ .map_or_else(|| " ".to_string(), |wc| format!("\n{}\n", wc));
let body = match body {
Some(bd) => bd.iter().map(|elem| elem.to_string()).join(""),
None => String::new(),
};
- ast_from_text(&format!("{is_unsafe}impl{gen_params} {is_negative}{path_type}{tr_gen_args} for {ty}{ty_gen_args}{where_clause}{{{}}}" , body))
+ ast_from_text(&format!("{is_unsafe}impl{gen_params} {is_negative}{path_type}{trait_gen_args} for {ty}{type_gen_args}{where_clause}{{{}}}" , body))
}
pub fn impl_trait_type(bounds: ast::TypeBoundList) -> ast::ImplTraitType {
@@ -922,6 +924,10 @@
ast_from_text(&format!("fn f<{name}{bounds}>() {{ }}"))
}
+pub fn const_param(name: ast::Name, ty: ast::Type) -> ast::ConstParam {
+ ast_from_text(&format!("fn f<const {name}: {ty}>() {{ }}"))
+}
+
pub fn lifetime_param(lifetime: ast::Lifetime) -> ast::LifetimeParam {
ast_from_text(&format!("fn f<{lifetime}>() {{ }}"))
}
@@ -948,9 +954,7 @@
ast_from_text(&format!("const S: T::<{args}> = ();"))
}
-pub(crate) fn generic_arg_list(
- args: impl IntoIterator<Item = ast::GenericArg>,
-) -> ast::GenericArgList {
+pub fn generic_arg_list(args: impl IntoIterator<Item = ast::GenericArg>) -> ast::GenericArgList {
let args = args.into_iter().join(", ");
ast_from_text(&format!("const S: T<{args}> = ();"))
}
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index a253827..a7e4899 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -617,6 +617,16 @@
}
}
+impl ast::Type {
+ pub fn generic_arg_list(&self) -> Option<ast::GenericArgList> {
+ if let ast::Type::PathType(path_type) = self {
+ path_type.path()?.segment()?.generic_arg_list()
+ } else {
+ None
+ }
+ }
+}
+
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum FieldKind {
Name(ast::NameRef),
diff --git a/docs/dev/architecture.md b/docs/dev/architecture.md
index b7d585c..4303a80 100644
--- a/docs/dev/architecture.md
+++ b/docs/dev/architecture.md
@@ -134,29 +134,29 @@
**Architecture Invariant:** Syntax trees are by design incomplete and do not enforce well-formedness.
If an AST method returns an `Option`, it *can* be `None` at runtime, even if this is forbidden by the grammar.
-### `crates/base_db`
+### `crates/base-db`
We use the [salsa](https://github.com/salsa-rs/salsa) crate for incremental and on-demand computation.
Roughly, you can think of salsa as a key-value store, but it can also compute derived values using specified functions.
-The `base_db` crate provides basic infrastructure for interacting with salsa.
+The `base-db` crate provides basic infrastructure for interacting with salsa.
Crucially, it defines most of the "input" queries: facts supplied by the client of the analyzer.
Reading the docs of the `base_db::input` module should be useful: everything else is strictly derived from those inputs.
**Architecture Invariant:** particularities of the build system are *not* the part of the ground state.
-In particular, `base_db` knows nothing about cargo.
+In particular, `base-db` knows nothing about cargo.
For example, `cfg` flags are a part of `base_db`, but `feature`s are not.
A `foo` feature is a Cargo-level concept, which is lowered by Cargo to `--cfg feature=foo` argument on the command line.
The `CrateGraph` structure is used to represent the dependencies between the crates abstractly.
-**Architecture Invariant:** `base_db` doesn't know about file system and file paths.
+**Architecture Invariant:** `base-db` doesn't know about file system and file paths.
Files are represented with opaque `FileId`, there's no operation to get an `std::path::Path` out of the `FileId`.
-### `crates/hir_expand`, `crates/hir_def`, `crates/hir_ty`
+### `crates/hir-expand`, `crates/hir-def`, `crates/hir_ty`
These crates are the *brain* of rust-analyzer.
This is the compiler part of the IDE.
-`hir_xxx` crates have a strong [ECS](https://en.wikipedia.org/wiki/Entity_component_system) flavor, in that they work with raw ids and directly query the database.
+`hir-xxx` crates have a strong [ECS](https://en.wikipedia.org/wiki/Entity_component_system) flavor, in that they work with raw ids and directly query the database.
There's little abstraction here.
These crates integrate deeply with salsa and chalk.
@@ -186,7 +186,7 @@
It wraps ECS-style internal API into a more OO-flavored API (with an extra `db` argument for each call).
**Architecture Invariant:** `hir` provides a static, fully resolved view of the code.
-While internal `hir_*` crates _compute_ things, `hir`, from the outside, looks like an inert data structure.
+While internal `hir-*` crates _compute_ things, `hir`, from the outside, looks like an inert data structure.
`hir` also handles the delicate task of going from syntax to the corresponding `hir`.
Remember that the mapping here is one-to-many.
@@ -200,7 +200,7 @@
This is the heart of many IDE features, like goto definition, which start with figuring out the hir node at the cursor.
This is some kind of (yet unnamed) uber-IDE pattern, as it is present in Roslyn and Kotlin as well.
-### `crates/ide`
+### `crates/ide`, `crates/ide-db`, `crates/ide-assists`, `crates/ide-completion`, `crates/ide-diagnostics`, `crates/ide-ssr`
The `ide` crate builds on top of `hir` semantic model to provide high-level IDE features like completion or goto definition.
It is an **API Boundary**.
@@ -217,8 +217,8 @@
`AnalysisHost` is a state to which you can transactionally `apply_change`.
`Analysis` is an immutable snapshot of the state.
-Internally, `ide` is split across several crates. `ide_assists`, `ide_completion` and `ide_ssr` implement large isolated features.
-`ide_db` implements common IDE functionality (notably, reference search is implemented here).
+Internally, `ide` is split across several crates. `ide-assists`, `ide-completion`, `ide-diagnostics` and `ide-ssr` implement large isolated features.
+`ide-db` implements common IDE functionality (notably, reference search is implemented here).
The `ide` contains a public API/façade, as well as implementation for a plethora of smaller features.
**Architecture Invariant:** `ide` crate strives to provide a _perfect_ API.
@@ -251,14 +251,14 @@
**Architecture Invariant:** `rust-analyzer` should be partially available even when the build is broken.
Reloading process should not prevent IDE features from working.
-### `crates/toolchain`, `crates/project_model`, `crates/flycheck`
+### `crates/toolchain`, `crates/project-model`, `crates/flycheck`
These crates deal with invoking `cargo` to learn about project structure and get compiler errors for the "check on save" feature.
-They use `crates/path` heavily instead of `std::path`.
+They use `crates/paths` heavily instead of `std::path`.
A single `rust-analyzer` process can serve many projects, so it is important that server's current directory does not leak.
-### `crates/mbe`, `crates/tt`, `crates/proc_macro_api`, `crates/proc_macro_srv`
+### `crates/mbe`, `crates/tt`, `crates/proc-macro-api`, `crates/proc-macro-srv`, `crates/proc-macro-srv-cli`
These crates implement macros as token tree -> token tree transforms.
They are independent from the rest of the code.
@@ -268,8 +268,8 @@
And it also handles the actual parsing and expansion of declarative macro (a-la "Macros By Example" or mbe).
For proc macros, the client-server model are used.
-We start a separate process (`proc_macro_srv`) which loads and runs the proc-macros for us.
-And the client (`proc_macro_api`) provides an interface to talk to that server separately.
+We start a separate process (`proc-macro-srv-cli`) which loads and runs the proc-macros for us.
+And the client (`proc-macro-api`) provides an interface to talk to that server separately.
And then token trees are passed from client, and the server will load the corresponding dynamic library (which built by `cargo`).
And due to the fact the api for getting result from proc macro are always unstable in `rustc`,
@@ -283,7 +283,7 @@
This crate is responsible for parsing, evaluation and general definition of `cfg` attributes.
-### `crates/vfs`, `crates/vfs-notify`
+### `crates/vfs`, `crates/vfs-notify`, `crates/paths`
These crates implement a virtual file system.
They provide consistent snapshots of the underlying file system and insulate messy OS paths.
@@ -301,6 +301,25 @@
This crate contains utilities for CPU and memory profiling.
+### `crates/intern`
+
+This crate contains infrastructure for globally interning things via `Arc`.
+
+### `crates/load-cargo`
+
+This crate exposes several utilities for loading projects, used by the main `rust-analyzer` crate
+and other downstream consumers.
+
+### `crates/rustc-dependencies`
+
+This crate wraps the `rustc_*` crates rust-analyzer relies on and conditionally points them to
+mirrored crates-io releases such that rust-analyzer keeps building on stable.
+
+### `crates/span`
+
+This crate exposes types and functions related to rust-analyzer's span for macros.
+
+A span is effectively a text range relative to some item in a file with a given `SyntaxContext` (hygiene).
## Cross-Cutting Concerns
diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc
index 8a2d080..c3f249e 100644
--- a/docs/user/generated_config.adoc
+++ b/docs/user/generated_config.adoc
@@ -71,6 +71,12 @@
```
.
--
+[[rust-analyzer.cargo.buildScripts.rebuildOnSave]]rust-analyzer.cargo.buildScripts.rebuildOnSave (default: `false`)::
++
+--
+Rerun proc-macros building/build-scripts running when proc-macro
+or build-script sources change and are saved.
+--
[[rust-analyzer.cargo.buildScripts.useRustcWrapper]]rust-analyzer.cargo.buildScripts.useRustcWrapper (default: `true`)::
+
--
@@ -179,9 +185,9 @@
--
Specifies the working directory for running checks.
- "workspace": run checks for workspaces in the corresponding workspaces' root directories.
- This falls back to "root" if `#rust-analyzer.cargo.check.invocationStrategy#` is set to `once`.
+ This falls back to "root" if `#rust-analyzer.check.invocationStrategy#` is set to `once`.
- "root": run checks in the project's root directory.
-This config only has an effect when `#rust-analyzer.cargo.check.overrideCommand#`
+This config only has an effect when `#rust-analyzer.check.overrideCommand#`
is set.
--
[[rust-analyzer.check.invocationStrategy]]rust-analyzer.check.invocationStrategy (default: `"per_workspace"`)::
@@ -190,7 +196,7 @@
Specifies the invocation strategy to use when running the check command.
If `per_workspace` is set, the command will be executed for each workspace.
If `once` is set, the command will be executed once.
-This config only has an effect when `#rust-analyzer.cargo.check.overrideCommand#`
+This config only has an effect when `#rust-analyzer.check.overrideCommand#`
is set.
--
[[rust-analyzer.check.noDefaultFeatures]]rust-analyzer.check.noDefaultFeatures (default: `null`)::
@@ -215,8 +221,8 @@
If there are multiple linked projects/workspaces, this command is invoked for
each of them, with the working directory being the workspace root
(i.e., the folder containing the `Cargo.toml`). This can be overwritten
-by changing `#rust-analyzer.cargo.check.invocationStrategy#` and
-`#rust-analyzer.cargo.check.invocationLocation#`.
+by changing `#rust-analyzer.check.invocationStrategy#` and
+`#rust-analyzer.check.invocationLocation#`.
An example command would be:
diff --git a/editors/code/package.json b/editors/code/package.json
index 89a7b92..27ed8ac 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -586,6 +586,11 @@
"type": "string"
}
},
+ "rust-analyzer.cargo.buildScripts.rebuildOnSave": {
+ "markdownDescription": "Rerun proc-macros building/build-scripts running when proc-macro\nor build-script sources change and are saved.",
+ "default": false,
+ "type": "boolean"
+ },
"rust-analyzer.cargo.buildScripts.useRustcWrapper": {
"markdownDescription": "Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to\navoid checking unnecessary things.",
"default": true,
@@ -731,7 +736,7 @@
"uniqueItems": true
},
"rust-analyzer.check.invocationLocation": {
- "markdownDescription": "Specifies the working directory for running checks.\n- \"workspace\": run checks for workspaces in the corresponding workspaces' root directories.\n This falls back to \"root\" if `#rust-analyzer.cargo.check.invocationStrategy#` is set to `once`.\n- \"root\": run checks in the project's root directory.\nThis config only has an effect when `#rust-analyzer.cargo.check.overrideCommand#`\nis set.",
+ "markdownDescription": "Specifies the working directory for running checks.\n- \"workspace\": run checks for workspaces in the corresponding workspaces' root directories.\n This falls back to \"root\" if `#rust-analyzer.check.invocationStrategy#` is set to `once`.\n- \"root\": run checks in the project's root directory.\nThis config only has an effect when `#rust-analyzer.check.overrideCommand#`\nis set.",
"default": "workspace",
"type": "string",
"enum": [
@@ -744,7 +749,7 @@
]
},
"rust-analyzer.check.invocationStrategy": {
- "markdownDescription": "Specifies the invocation strategy to use when running the check command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.cargo.check.overrideCommand#`\nis set.",
+ "markdownDescription": "Specifies the invocation strategy to use when running the check command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.check.overrideCommand#`\nis set.",
"default": "per_workspace",
"type": "string",
"enum": [
@@ -765,7 +770,7 @@
]
},
"rust-analyzer.check.overrideCommand": {
- "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.cargo.check.invocationStrategy#` and\n`#rust-analyzer.cargo.check.invocationLocation#`.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.",
+ "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#` and\n`#rust-analyzer.check.invocationLocation#`.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.",
"default": null,
"type": [
"null",
diff --git a/editors/code/src/run.ts b/editors/code/src/run.ts
index 778cbc5..fc3f1ac 100644
--- a/editors/code/src/run.ts
+++ b/editors/code/src/run.ts
@@ -43,7 +43,7 @@
return;
}
- // clear the list before we hook up listeners to to avoid invoking them
+ // clear the list before we hook up listeners to avoid invoking them
// if the user happens to accept the placeholder item
quickPick.items = [];
diff --git a/lib/lsp-server/Cargo.toml b/lib/lsp-server/Cargo.toml
index 14c917a..a89eb4b 100644
--- a/lib/lsp-server/Cargo.toml
+++ b/lib/lsp-server/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "lsp-server"
-version = "0.7.5"
+version = "0.7.6"
description = "Generic LSP server scaffold."
license = "MIT OR Apache-2.0"
repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/lsp-server"
@@ -10,7 +10,7 @@
log = "0.4.17"
serde_json = "1.0.108"
serde = { version = "1.0.192", features = ["derive"] }
-crossbeam-channel = "0.5.6"
+crossbeam-channel = "0.5.8"
[dev-dependencies]
lsp-types = "=0.95"
diff --git a/lib/lsp-server/examples/goto_def.rs b/lib/lsp-server/examples/goto_def.rs
index 2f270af..71f6625 100644
--- a/lib/lsp-server/examples/goto_def.rs
+++ b/lib/lsp-server/examples/goto_def.rs
@@ -64,7 +64,15 @@
..Default::default()
})
.unwrap();
- let initialization_params = connection.initialize(server_capabilities)?;
+ let initialization_params = match connection.initialize(server_capabilities) {
+ Ok(it) => it,
+ Err(e) => {
+ if e.channel_is_disconnected() {
+ io_threads.join()?;
+ }
+ return Err(e.into());
+ }
+ };
main_loop(connection, initialization_params)?;
io_threads.join()?;
diff --git a/lib/lsp-server/src/error.rs b/lib/lsp-server/src/error.rs
index 755b3fd..ebdd153 100644
--- a/lib/lsp-server/src/error.rs
+++ b/lib/lsp-server/src/error.rs
@@ -3,7 +3,22 @@
use crate::{Notification, Request};
#[derive(Debug, Clone, PartialEq)]
-pub struct ProtocolError(pub(crate) String);
+pub struct ProtocolError(String, bool);
+
+impl ProtocolError {
+ pub(crate) fn new(msg: impl Into<String>) -> Self {
+ ProtocolError(msg.into(), false)
+ }
+
+ pub(crate) fn disconnected() -> ProtocolError {
+ ProtocolError("disconnected channel".into(), true)
+ }
+
+ /// Whether this error occured due to a disconnected channel.
+ pub fn channel_is_disconnected(&self) -> bool {
+ self.1
+ }
+}
impl std::error::Error for ProtocolError {}
diff --git a/lib/lsp-server/src/lib.rs b/lib/lsp-server/src/lib.rs
index 2797a6b..6b732d4 100644
--- a/lib/lsp-server/src/lib.rs
+++ b/lib/lsp-server/src/lib.rs
@@ -17,7 +17,7 @@
net::{TcpListener, TcpStream, ToSocketAddrs},
};
-use crossbeam_channel::{Receiver, RecvTimeoutError, Sender};
+use crossbeam_channel::{Receiver, RecvError, RecvTimeoutError, Sender};
pub use crate::{
error::{ExtractError, ProtocolError},
@@ -158,11 +158,7 @@
Err(RecvTimeoutError::Timeout) => {
continue;
}
- Err(e) => {
- return Err(ProtocolError(format!(
- "expected initialize request, got error: {e}"
- )))
- }
+ Err(RecvTimeoutError::Disconnected) => return Err(ProtocolError::disconnected()),
};
match msg {
@@ -181,12 +177,14 @@
continue;
}
msg => {
- return Err(ProtocolError(format!("expected initialize request, got {msg:?}")));
+ return Err(ProtocolError::new(format!(
+ "expected initialize request, got {msg:?}"
+ )));
}
};
}
- return Err(ProtocolError(String::from(
+ return Err(ProtocolError::new(String::from(
"Initialization has been aborted during initialization",
)));
}
@@ -201,12 +199,10 @@
self.sender.send(resp.into()).unwrap();
match &self.receiver.recv() {
Ok(Message::Notification(n)) if n.is_initialized() => Ok(()),
- Ok(msg) => {
- Err(ProtocolError(format!(r#"expected initialized notification, got: {msg:?}"#)))
- }
- Err(e) => {
- Err(ProtocolError(format!("expected initialized notification, got error: {e}",)))
- }
+ Ok(msg) => Err(ProtocolError::new(format!(
+ r#"expected initialized notification, got: {msg:?}"#
+ ))),
+ Err(RecvError) => Err(ProtocolError::disconnected()),
}
}
@@ -231,10 +227,8 @@
Err(RecvTimeoutError::Timeout) => {
continue;
}
- Err(e) => {
- return Err(ProtocolError(format!(
- "expected initialized notification, got error: {e}",
- )));
+ Err(RecvTimeoutError::Disconnected) => {
+ return Err(ProtocolError::disconnected());
}
};
@@ -243,14 +237,14 @@
return Ok(());
}
msg => {
- return Err(ProtocolError(format!(
+ return Err(ProtocolError::new(format!(
r#"expected initialized notification, got: {msg:?}"#
)));
}
}
}
- return Err(ProtocolError(String::from(
+ return Err(ProtocolError::new(String::from(
"Initialization has been aborted during initialization",
)));
}
@@ -359,9 +353,18 @@
match &self.receiver.recv_timeout(std::time::Duration::from_secs(30)) {
Ok(Message::Notification(n)) if n.is_exit() => (),
Ok(msg) => {
- return Err(ProtocolError(format!("unexpected message during shutdown: {msg:?}")))
+ return Err(ProtocolError::new(format!(
+ "unexpected message during shutdown: {msg:?}"
+ )))
}
- Err(e) => return Err(ProtocolError(format!("unexpected error during shutdown: {e}"))),
+ Err(RecvTimeoutError::Timeout) => {
+ return Err(ProtocolError::new(format!("timed out waiting for exit notification")))
+ }
+ Err(RecvTimeoutError::Disconnected) => {
+ return Err(ProtocolError::new(format!(
+ "channel disconnected waiting for exit notification"
+ )))
+ }
}
Ok(true)
}
@@ -426,7 +429,7 @@
initialize_start_test(TestCase {
test_messages: vec![notification_msg.clone()],
- expected_resp: Err(ProtocolError(format!(
+ expected_resp: Err(ProtocolError::new(format!(
"expected initialize request, got {:?}",
notification_msg
))),
diff --git a/lib/lsp-server/src/msg.rs b/lib/lsp-server/src/msg.rs
index 730ad51..ba318dd 100644
--- a/lib/lsp-server/src/msg.rs
+++ b/lib/lsp-server/src/msg.rs
@@ -264,12 +264,12 @@
let mut parts = buf.splitn(2, ": ");
let header_name = parts.next().unwrap();
let header_value =
- parts.next().ok_or_else(|| invalid_data!("malformed header: {:?}", buf))?;
+ parts.next().ok_or_else(|| invalid_data(format!("malformed header: {:?}", buf)))?;
if header_name.eq_ignore_ascii_case("Content-Length") {
size = Some(header_value.parse::<usize>().map_err(invalid_data)?);
}
}
- let size: usize = size.ok_or_else(|| invalid_data!("no Content-Length"))?;
+ let size: usize = size.ok_or_else(|| invalid_data("no Content-Length".to_string()))?;
let mut buf = buf.into_bytes();
buf.resize(size, 0);
inp.read_exact(&mut buf)?;
diff --git a/lib/lsp-server/src/stdio.rs b/lib/lsp-server/src/stdio.rs
index e487b9b4..cea199d 100644
--- a/lib/lsp-server/src/stdio.rs
+++ b/lib/lsp-server/src/stdio.rs
@@ -15,8 +15,7 @@
let writer = thread::spawn(move || {
let stdout = stdout();
let mut stdout = stdout.lock();
- writer_receiver.into_iter().try_for_each(|it| it.write(&mut stdout))?;
- Ok(())
+ writer_receiver.into_iter().try_for_each(|it| it.write(&mut stdout))
});
let (reader_sender, reader_receiver) = bounded::<Message>(0);
let reader = thread::spawn(move || {