Auto merge of #141763 - lcnr:fixme-gamer, r=BoxyUwU
`FIXME(-Znext-solver)` triage
r? `@BoxyUwU`
diff --git a/.github/workflows/autopublish.yaml b/.github/workflows/autopublish.yaml
index d3f0499..6e2be7f 100644
--- a/.github/workflows/autopublish.yaml
+++ b/.github/workflows/autopublish.yaml
@@ -28,7 +28,7 @@
run: rustup update --no-self-update stable
- name: Install cargo-workspaces
- run: cargo install cargo-workspaces
+ run: cargo install cargo-workspaces --version "0.3.6"
- name: Publish Crates
env:
@@ -54,8 +54,8 @@
cargo workspaces rename --from project-model project_model
cargo workspaces rename --from test-fixture test_fixture
cargo workspaces rename --from test-utils test_utils
- # Remove library crates from the workspaces so we don't auto-publish them as well
- sed -i 's/ "lib\/\*",//' ./Cargo.toml
+ # Remove library crates and xtask from the workspaces so we don't auto-publish them as well
+ sed -i 's|^members = .*$|members = ["crates/*"]|' Cargo.toml
cargo workspaces rename ra_ap_%n
find crates/rust-analyzer -type f -name '*.rs' -exec sed -i 's/rust_analyzer/ra_ap_rust_analyzer/g' {} +
cargo workspaces publish --yes --force '*' --exact --no-git-commit --allow-dirty --skip-published custom 0.0.$(($RUN_NUMBER + 133))
diff --git a/.github/workflows/publish-libs.yaml b/.github/workflows/publish-libs.yaml
index 93ae567..f2c8b63 100644
--- a/.github/workflows/publish-libs.yaml
+++ b/.github/workflows/publish-libs.yaml
@@ -22,7 +22,7 @@
run: rustup update --no-self-update stable
- name: Install cargo-workspaces
- run: cargo install cargo-workspaces
+ run: cargo install cargo-workspaces --version "0.3.6"
- name: Publish Crates
env:
diff --git a/Cargo.toml b/Cargo.toml
index 8c50718..975fe27 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -21,6 +21,8 @@
text-size.opt-level = 3
serde.opt-level = 3
salsa.opt-level = 3
+dissimilar.opt-level = 3
+
# This speeds up `cargo xtask dist`.
miniz_oxide.opt-level = 3
diff --git a/crates/hir-def/src/dyn_map.rs b/crates/hir-def/src/dyn_map.rs
index eed1490..20018b6 100644
--- a/crates/hir-def/src/dyn_map.rs
+++ b/crates/hir-def/src/dyn_map.rs
@@ -67,8 +67,14 @@
pub const PROC_MACRO: Key<ast::Fn, ProcMacroId> = Key::new();
pub const MACRO_CALL: Key<ast::MacroCall, MacroCallId> = Key::new();
pub const ATTR_MACRO_CALL: Key<ast::Item, MacroCallId> = Key::new();
- pub const DERIVE_MACRO_CALL: Key<ast::Attr, (AttrId, MacroCallId, Box<[Option<MacroCallId>]>)> =
- Key::new();
+ pub const DERIVE_MACRO_CALL: Key<
+ ast::Attr,
+ (
+ AttrId,
+ /* derive() */ MacroCallId,
+ /* actual derive macros */ Box<[Option<MacroCallId>]>,
+ ),
+ > = Key::new();
/// XXX: AST Nodes and SyntaxNodes have identity equality semantics: nodes are
/// equal if they point to exactly the same object.
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
index cb4fcd8..2cc3ca8 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
@@ -931,11 +931,12 @@
// PATH_TYPE@23..26
// PATH@23..26
// PATH_SEGMENT@23..26
-// L_ANGLE@23..24 "<"
-// PAREN_TYPE@24..26
-// L_PAREN@24..25 "("
-// ERROR@25..26
-// INT_NUMBER@25..26 "8"
+// TYPE_ANCHOR@23..26
+// L_ANGLE@23..24 "<"
+// PAREN_TYPE@24..26
+// L_PAREN@24..25 "("
+// ERROR@25..26
+// INT_NUMBER@25..26 "8"
// PLUS@26..27 "+"
// CONST_ARG@27..28
// LITERAL@27..28
diff --git a/crates/hir-expand/src/files.rs b/crates/hir-expand/src/files.rs
index 321ee8f..8024823 100644
--- a/crates/hir-expand/src/files.rs
+++ b/crates/hir-expand/src/files.rs
@@ -42,6 +42,49 @@
FilePositionWrapper { file_id: self.file_id.file_id(db), offset: self.offset }
}
}
+
+impl From<FileRange> for HirFileRange {
+ fn from(value: FileRange) -> Self {
+ HirFileRange { file_id: value.file_id.into(), range: value.range }
+ }
+}
+
+impl From<FilePosition> for HirFilePosition {
+ fn from(value: FilePosition) -> Self {
+ HirFilePosition { file_id: value.file_id.into(), offset: value.offset }
+ }
+}
+
+impl FilePositionWrapper<span::FileId> {
+ pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> FilePosition {
+ FilePositionWrapper {
+ file_id: EditionedFileId::new(db, self.file_id, edition),
+ offset: self.offset,
+ }
+ }
+}
+
+impl FileRangeWrapper<span::FileId> {
+ pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> FileRange {
+ FileRangeWrapper {
+ file_id: EditionedFileId::new(db, self.file_id, edition),
+ range: self.range,
+ }
+ }
+}
+
+impl<T> InFileWrapper<span::FileId, T> {
+ pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> InRealFile<T> {
+ InRealFile { file_id: EditionedFileId::new(db, self.file_id, edition), value: self.value }
+ }
+}
+
+impl HirFileRange {
+ pub fn file_range(self) -> Option<FileRange> {
+ Some(FileRange { file_id: self.file_id.file_id()?, range: self.range })
+ }
+}
+
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub struct FileRangeWrapper<FileKind> {
pub file_id: FileKind,
@@ -194,6 +237,9 @@
pub fn syntax(&self) -> InFileWrapper<FileId, &SyntaxNode> {
self.with_value(self.value.syntax())
}
+ pub fn node_file_range(&self) -> FileRangeWrapper<FileId> {
+ FileRangeWrapper { file_id: self.file_id, range: self.value.syntax().text_range() }
+ }
}
impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, &N> {
@@ -204,9 +250,9 @@
}
// region:specific impls
-impl<SN: Borrow<SyntaxNode>> InRealFile<SN> {
- pub fn file_range(&self) -> FileRange {
- FileRange { file_id: self.file_id, range: self.value.borrow().text_range() }
+impl<FileId: Copy, SN: Borrow<SyntaxNode>> InFileWrapper<FileId, SN> {
+ pub fn file_range(&self) -> FileRangeWrapper<FileId> {
+ FileRangeWrapper { file_id: self.file_id, range: self.value.borrow().text_range() }
}
}
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index d844d8f..6ecac14 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -392,6 +392,10 @@
}
}
+ pub fn call_node(self, db: &dyn ExpandDatabase) -> Option<InFile<SyntaxNode>> {
+ Some(db.lookup_intern_macro_call(self.macro_file()?).to_node(db))
+ }
+
pub fn as_builtin_derive_attr_node(
&self,
db: &dyn ExpandDatabase,
@@ -848,7 +852,10 @@
map_node_range_up(db, &self.exp_map, range)
}
- /// Maps up the text range out of the expansion into is macro call.
+ /// Maps up the text range out of the expansion into its macro call.
+ ///
+ /// Note that this may return multiple ranges as we lose the precise association between input to output
+ /// and as such we may consider inputs that are unrelated.
pub fn map_range_up_once(
&self,
db: &dyn ExpandDatabase,
@@ -864,11 +871,10 @@
InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] }
}
SpanMap::ExpansionSpanMap(arg_map) => {
- let arg_range = self
- .arg
- .value
- .as_ref()
- .map_or_else(|| TextRange::empty(TextSize::from(0)), |it| it.text_range());
+ let Some(arg_node) = &self.arg.value else {
+ return InFile::new(self.arg.file_id, smallvec::smallvec![]);
+ };
+ let arg_range = arg_node.text_range();
InFile::new(
self.arg.file_id,
arg_map
diff --git a/crates/hir-expand/src/prettify_macro_expansion_.rs b/crates/hir-expand/src/prettify_macro_expansion_.rs
index 11cc434..6134c3a 100644
--- a/crates/hir-expand/src/prettify_macro_expansion_.rs
+++ b/crates/hir-expand/src/prettify_macro_expansion_.rs
@@ -20,42 +20,46 @@
let span_offset = syn.text_range().start();
let target_crate = target_crate_id.data(db);
let mut syntax_ctx_id_to_dollar_crate_replacement = FxHashMap::default();
- syntax_bridge::prettify_macro_expansion::prettify_macro_expansion(syn, &mut |dollar_crate| {
- let ctx = span_map.span_at(dollar_crate.text_range().start() + span_offset).ctx;
- let replacement =
- syntax_ctx_id_to_dollar_crate_replacement.entry(ctx).or_insert_with(|| {
- let macro_call_id =
- ctx.outer_expn(db).expect("`$crate` cannot come from `SyntaxContextId::ROOT`");
- let macro_call = db.lookup_intern_macro_call(macro_call_id.into());
- let macro_def_crate = macro_call.def.krate;
- // First, if this is the same crate as the macro, nothing will work but `crate`.
- // If not, if the target trait has the macro's crate as a dependency, using the dependency name
- // will work in inserted code and match the user's expectation.
- // If not, the crate's display name is what the dependency name is likely to be once such dependency
- // is inserted, and also understandable to the user.
- // Lastly, if nothing else found, resort to leaving `$crate`.
- if target_crate_id == macro_def_crate {
- make::tokens::crate_kw()
- } else if let Some(dep) =
- target_crate.dependencies.iter().find(|dep| dep.crate_id == macro_def_crate)
- {
- make::tokens::ident(dep.name.as_str())
- } else if let Some(crate_name) = ¯o_def_crate.extra_data(db).display_name {
- make::tokens::ident(crate_name.crate_name().as_str())
- } else {
- return dollar_crate.clone();
- }
- });
- if replacement.text() == "$crate" {
- // The parent may have many children, and looking for the token may yield incorrect results.
- return dollar_crate.clone();
- }
- // We need to `clone_subtree()` but rowan doesn't provide such operation for tokens.
- let parent = replacement.parent().unwrap().clone_subtree().clone_for_update();
- parent
- .children_with_tokens()
- .filter_map(NodeOrToken::into_token)
- .find(|it| it.kind() == replacement.kind())
- .unwrap()
- })
+ syntax_bridge::prettify_macro_expansion::prettify_macro_expansion(
+ syn,
+ &mut |dollar_crate| {
+ let ctx = span_map.span_at(dollar_crate.text_range().start() + span_offset).ctx;
+ let replacement =
+ syntax_ctx_id_to_dollar_crate_replacement.entry(ctx).or_insert_with(|| {
+ let macro_call_id = ctx
+ .outer_expn(db)
+ .expect("`$crate` cannot come from `SyntaxContextId::ROOT`");
+ let macro_call = db.lookup_intern_macro_call(macro_call_id.into());
+ let macro_def_crate = macro_call.def.krate;
+ // First, if this is the same crate as the macro, nothing will work but `crate`.
+ // If not, if the target trait has the macro's crate as a dependency, using the dependency name
+ // will work in inserted code and match the user's expectation.
+ // If not, the crate's display name is what the dependency name is likely to be once such dependency
+ // is inserted, and also understandable to the user.
+ // Lastly, if nothing else found, resort to leaving `$crate`.
+ if target_crate_id == macro_def_crate {
+ make::tokens::crate_kw()
+ } else if let Some(dep) =
+ target_crate.dependencies.iter().find(|dep| dep.crate_id == macro_def_crate)
+ {
+ make::tokens::ident(dep.name.as_str())
+ } else if let Some(crate_name) = ¯o_def_crate.extra_data(db).display_name {
+ make::tokens::ident(crate_name.crate_name().as_str())
+ } else {
+ return dollar_crate.clone();
+ }
+ });
+ if replacement.text() == "$crate" {
+ // The parent may have many children, and looking for the token may yield incorrect results.
+ return None;
+ }
+ // We need to `clone_subtree()` but rowan doesn't provide such operation for tokens.
+ let parent = replacement.parent().unwrap().clone_subtree().clone_for_update();
+ parent
+ .children_with_tokens()
+ .filter_map(NodeOrToken::into_token)
+ .find(|it| it.kind() == replacement.kind())
+ },
+ |_| (),
+ )
}
diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs
index 980ee26..1e985dc 100644
--- a/crates/hir-ty/src/db.rs
+++ b/crates/hir-ty/src/db.rs
@@ -31,6 +31,7 @@
#[query_group::query_group]
pub trait HirDatabase: DefDatabase + std::fmt::Debug {
#[salsa::invoke(crate::infer::infer_query)]
+ #[salsa::cycle(cycle_result = crate::infer::infer_cycle_result)]
fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
// region:mir
@@ -132,6 +133,7 @@
// FIXME: Make this a non-interned query.
#[salsa::invoke_interned(crate::lower::const_param_ty_with_diagnostics_query)]
+ #[salsa::cycle(cycle_result = crate::lower::const_param_ty_with_diagnostics_cycle_result)]
fn const_param_ty_with_diagnostics(&self, def: ConstParamId) -> (Ty, Diagnostics);
#[salsa::invoke(crate::lower::const_param_ty_query)]
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index e698fb2..14eb716 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -35,7 +35,8 @@
use either::Either;
use hir_def::{
AdtId, AssocItemId, ConstId, DefWithBodyId, FieldId, FunctionId, GenericDefId, GenericParamId,
- ImplId, ItemContainerId, Lookup, TraitId, TupleFieldId, TupleId, TypeAliasId, VariantId,
+ ImplId, ItemContainerId, LocalFieldId, Lookup, TraitId, TupleFieldId, TupleId, TypeAliasId,
+ VariantId,
builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
expr_store::{Body, ExpressionStore, HygieneId, path::Path},
hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, LabelId, PatId},
@@ -135,6 +136,10 @@
Arc::new(ctx.resolve_all())
}
+pub(crate) fn infer_cycle_result(_: &dyn HirDatabase, _: DefWithBodyId) -> Arc<InferenceResult> {
+ Arc::new(InferenceResult { has_errors: true, ..Default::default() })
+}
+
/// Fully normalize all the types found within `ty` in context of `owner` body definition.
///
/// This is appropriate to use only after type-check: it assumes
@@ -203,7 +208,7 @@
pub enum InferenceDiagnostic {
NoSuchField {
field: ExprOrPatId,
- private: bool,
+ private: Option<LocalFieldId>,
variant: VariantId,
},
PrivateField {
@@ -558,6 +563,9 @@
ExprOrPatId::PatId(id) => self.type_of_pat.get(id),
}
}
+ pub fn is_erroneous(&self) -> bool {
+ self.has_errors && self.type_of_expr.iter().count() == 0
+ }
}
impl Index<ExprId> for InferenceResult {
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index 87b7f34..6403127 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -554,7 +554,7 @@
self.push_diagnostic(
InferenceDiagnostic::NoSuchField {
field: field.expr.into(),
- private: true,
+ private: Some(local_id),
variant: def,
},
);
@@ -564,7 +564,7 @@
None => {
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
field: field.expr.into(),
- private: false,
+ private: None,
variant: def,
});
None
diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs
index a9a3265..4bc3e16 100644
--- a/crates/hir-ty/src/infer/pat.rs
+++ b/crates/hir-ty/src/infer/pat.rs
@@ -143,7 +143,7 @@
{
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
field: inner.into(),
- private: true,
+ private: Some(local_id),
variant: def,
});
}
@@ -157,7 +157,7 @@
None => {
self.push_diagnostic(InferenceDiagnostic::NoSuchField {
field: inner.into(),
- private: false,
+ private: None,
variant: def,
});
self.err_ty()
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index ea8e7cc..0a54676 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -1604,6 +1604,14 @@
)
}
+pub(crate) fn impl_self_ty_with_diagnostics_cycle_result(
+ db: &dyn HirDatabase,
+ impl_id: ImplId,
+) -> (Binders<Ty>, Diagnostics) {
+ let generics = generics(db, impl_id.into());
+ (make_binders(db, &generics, TyKind::Error.intern(Interner)), None)
+}
+
pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> Ty {
db.const_param_ty_with_diagnostics(def).0
}
@@ -1633,12 +1641,12 @@
(ty, create_diagnostics(ctx.diagnostics))
}
-pub(crate) fn impl_self_ty_with_diagnostics_cycle_result(
- db: &dyn HirDatabase,
- impl_id: ImplId,
-) -> (Binders<Ty>, Diagnostics) {
- let generics = generics(db, impl_id.into());
- (make_binders(db, &generics, TyKind::Error.intern(Interner)), None)
+pub(crate) fn const_param_ty_with_diagnostics_cycle_result(
+ _: &dyn HirDatabase,
+ _: crate::db::HirDatabaseData,
+ _: ConstParamId,
+) -> (Ty, Diagnostics) {
+ (TyKind::Error.intern(Interner), None)
}
pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option<Binders<TraitRef>> {
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index e6caf2d..99d9351 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -2182,7 +2182,7 @@
// need to take this input explicitly.
root_expr: ExprId,
) -> Result<MirBody> {
- if infer.type_mismatches().next().is_some() {
+ if infer.type_mismatches().next().is_some() || infer.is_erroneous() {
return Err(MirLowerError::HasErrors);
}
let mut ctx = MirLowerCtx::new(db, owner, body, infer);
diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs
index 48474d2..e8e3812 100644
--- a/crates/hir-ty/src/tests/incremental.rs
+++ b/crates/hir-ty/src/tests/incremental.rs
@@ -106,3 +106,256 @@
assert_eq!(format!("{events:?}").matches("infer_shim").count(), 1, "{events:#?}")
}
}
+
+#[test]
+fn adding_struct_invalidates_infer() {
+ let (mut db, pos) = TestDB::with_position(
+ "
+//- /lib.rs
+fn foo() -> i32 {
+ 1 + 1
+}
+
+fn bar() -> f32 {
+ 2.0 * 3.0
+}
+$0",
+ );
+ {
+ let events = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id.file_id(&db));
+ let _crate_def_map = module.def_map(&db);
+ db.trait_impls_in_crate(module.krate());
+ });
+ assert!(format!("{events:?}").contains("trait_impls_in_crate_shim"))
+ }
+
+ let new_text = "
+fn foo() -> i32 {
+ 1 + 1
+}
+
+fn bar() -> f32 {
+ 2.0 * 3.0
+}
+
+pub struct NewStruct {
+ field: i32,
+}
+";
+
+ db.set_file_text(pos.file_id.file_id(&db), new_text);
+
+ {
+ let actual = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id.file_id(&db));
+ let _crate_def_map = module.def_map(&db);
+ db.trait_impls_in_crate(module.krate());
+ });
+
+ let expected = vec![
+ "parse_shim".to_owned(),
+ "ast_id_map_shim".to_owned(),
+ "file_item_tree_shim".to_owned(),
+ "real_span_map_shim".to_owned(),
+ "crate_local_def_map".to_owned(),
+ "trait_impls_in_crate_shim".to_owned(),
+ ];
+
+ assert_eq!(expected, actual);
+ }
+}
+
+#[test]
+fn adding_enum_query_log() {
+ let (mut db, pos) = TestDB::with_position(
+ "
+//- /lib.rs
+fn foo() -> i32 {
+ 1 + 1
+}
+
+fn bar() -> f32 {
+ 2.0 * 3.0
+}
+$0",
+ );
+ {
+ let events = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id.file_id(&db));
+ let _crate_def_map = module.def_map(&db);
+ db.trait_impls_in_crate(module.krate());
+ });
+ assert!(format!("{events:?}").contains("trait_impls_in_crate_shim"))
+ }
+
+ let new_text = "
+fn foo() -> i32 {
+ 1 + 1
+}
+
+fn bar() -> f32 {
+ 2.0 * 3.0
+}
+
+pub enum SomeEnum {
+ A,
+ B
+}
+";
+
+ db.set_file_text(pos.file_id.file_id(&db), new_text);
+
+ {
+ let actual = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id.file_id(&db));
+ let _crate_def_map = module.def_map(&db);
+ db.trait_impls_in_crate(module.krate());
+ });
+
+ let expected = vec![
+ "parse_shim".to_owned(),
+ "ast_id_map_shim".to_owned(),
+ "file_item_tree_shim".to_owned(),
+ "real_span_map_shim".to_owned(),
+ "crate_local_def_map".to_owned(),
+ "trait_impls_in_crate_shim".to_owned(),
+ ];
+
+ assert_eq!(expected, actual);
+ }
+}
+
+#[test]
+fn adding_use_query_log() {
+ let (mut db, pos) = TestDB::with_position(
+ "
+//- /lib.rs
+fn foo() -> i32 {
+ 1 + 1
+}
+
+fn bar() -> f32 {
+ 2.0 * 3.0
+}
+$0",
+ );
+ {
+ let events = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id.file_id(&db));
+ let _crate_def_map = module.def_map(&db);
+ db.trait_impls_in_crate(module.krate());
+ });
+ assert!(format!("{events:?}").contains("trait_impls_in_crate_shim"))
+ }
+
+ let new_text = "
+use std::collections::HashMap;
+
+fn foo() -> i32 {
+ 1 + 1
+}
+
+fn bar() -> f32 {
+ 2.0 * 3.0
+}
+";
+
+ db.set_file_text(pos.file_id.file_id(&db), new_text);
+
+ {
+ let actual = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id.file_id(&db));
+ let _crate_def_map = module.def_map(&db);
+ db.trait_impls_in_crate(module.krate());
+ });
+
+ let expected = vec![
+ "parse_shim".to_owned(),
+ "ast_id_map_shim".to_owned(),
+ "file_item_tree_shim".to_owned(),
+ "real_span_map_shim".to_owned(),
+ "crate_local_def_map".to_owned(),
+ "trait_impls_in_crate_shim".to_owned(),
+ ];
+
+ assert_eq!(expected, actual);
+ }
+}
+
+#[test]
+fn adding_impl_query_log() {
+ let (mut db, pos) = TestDB::with_position(
+ "
+//- /lib.rs
+fn foo() -> i32 {
+ 1 + 1
+}
+
+fn bar() -> f32 {
+ 2.0 * 3.0
+}
+
+pub struct SomeStruct {
+ field: i32,
+}
+$0",
+ );
+ {
+ let events = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id.file_id(&db));
+ let _crate_def_map = module.def_map(&db);
+ db.trait_impls_in_crate(module.krate());
+ });
+ assert!(format!("{events:?}").contains("trait_impls_in_crate_shim"))
+ }
+
+ let new_text = "
+fn foo() -> i32 {
+ 1 + 1
+}
+
+fn bar() -> f32 {
+ 2.0 * 3.0
+}
+
+pub struct SomeStruct {
+ field: i32,
+}
+
+impl SomeStruct {
+ pub fn new(value: i32) -> Self {
+ Self { field: value }
+ }
+}
+";
+
+ db.set_file_text(pos.file_id.file_id(&db), new_text);
+
+ {
+ let actual = db.log_executed(|| {
+ let module = db.module_for_file(pos.file_id.file_id(&db));
+ let _crate_def_map = module.def_map(&db);
+ db.trait_impls_in_crate(module.krate());
+ });
+
+ let expected = vec![
+ "parse_shim".to_owned(),
+ "ast_id_map_shim".to_owned(),
+ "file_item_tree_shim".to_owned(),
+ "real_span_map_shim".to_owned(),
+ "crate_local_def_map".to_owned(),
+ "trait_impls_in_crate_shim".to_owned(),
+ "attrs_shim".to_owned(),
+ "impl_trait_with_diagnostics_shim".to_owned(),
+ "impl_signature_shim".to_owned(),
+ "impl_signature_with_source_map_shim".to_owned(),
+ "impl_self_ty_with_diagnostics_shim".to_owned(),
+ "struct_signature_shim".to_owned(),
+ "struct_signature_with_source_map_shim".to_owned(),
+ "type_for_adt_tracked".to_owned(),
+ ];
+
+ assert_eq!(expected, actual);
+ }
+}
diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs
index 47c695c..ff8adee 100644
--- a/crates/hir-ty/src/tests/regression.rs
+++ b/crates/hir-ty/src/tests/regression.rs
@@ -2301,3 +2301,51 @@
"#]],
);
}
+
+#[test]
+fn no_panic_on_recursive_const() {
+ check_infer(
+ r#"
+struct Foo<const N: usize> {}
+impl<const N: Foo<N>> Foo<N> {
+ fn foo(self) {}
+}
+
+fn test() {
+ let _ = N;
+}
+"#,
+ expect![[r#"
+ 72..76 'self': Foo<N>
+ 78..80 '{}': ()
+ 94..112 '{ ...= N; }': ()
+ 104..105 '_': {unknown}
+ 108..109 'N': {unknown}
+ "#]],
+ );
+
+ check_infer(
+ r#"
+struct Foo<const N: usize>;
+const N: Foo<N> = Foo;
+
+impl<const N: usize> Foo<N> {
+ fn foo(self) -> usize {
+ N
+ }
+}
+
+fn test() {
+ let _ = N;
+}
+"#,
+ expect![[r#"
+ 93..97 'self': Foo<N>
+ 108..125 '{ ... }': usize
+ 118..119 'N': usize
+ 139..157 '{ ...= N; }': ()
+ 149..150 '_': Foo<_>
+ 153..154 'N': Foo<_>
+ "#]],
+ );
+}
diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs
index b6e3002..f7b140e 100644
--- a/crates/hir/src/diagnostics.rs
+++ b/crates/hir/src/diagnostics.rs
@@ -224,7 +224,7 @@
#[derive(Debug)]
pub struct NoSuchField {
pub field: InFile<AstPtr<Either<ast::RecordExprField, ast::RecordPatField>>>,
- pub private: bool,
+ pub private: Option<Field>,
pub variant: VariantId,
}
@@ -648,6 +648,7 @@
}
ExprOrPatId::PatId(pat) => source_map.pat_field_syntax(pat),
};
+ let private = private.map(|id| Field { id, parent: variant.into() });
NoSuchField { field: expr_or_pat, private, variant }.into()
}
&InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index aea2254..4a2e8e3 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -24,7 +24,7 @@
attrs::collect_attrs,
builtin::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
- files::{FileRangeWrapper, InRealFile},
+ files::{FileRangeWrapper, HirFileRange, InRealFile},
inert_attr_macro::find_builtin_attr_idx,
mod_path::{ModPath, PathKind},
name::AsName,
@@ -222,6 +222,21 @@
self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
}
+ // FIXME: Rethink this API
+ pub fn find_namelike_at_offset_with_descend<'slf>(
+ &'slf self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = ast::NameLike> + 'slf {
+ node.token_at_offset(offset)
+ .map(move |token| self.descend_into_macros_no_opaque(token))
+ .map(|descendants| descendants.into_iter().filter_map(move |it| it.value.parent()))
+ // re-order the tokens from token_at_offset by returning the ancestors with the smaller first nodes first
+ // See algo::ancestors_at_offset, which uses the same approach
+ .kmerge_by(|left, right| left.text_range().len().lt(&right.text_range().len()))
+ .filter_map(ast::NameLike::cast)
+ }
+
pub fn resolve_range_pat(&self, range_pat: &ast::RangePat) -> Option<Struct> {
self.imp.resolve_range_pat(range_pat).map(Struct::from)
}
@@ -262,6 +277,17 @@
self.imp.file_to_module_defs(file.into())
}
+ pub fn hir_file_to_module_def(&self, file: impl Into<HirFileId>) -> Option<Module> {
+ self.imp.hir_file_to_module_defs(file.into()).next()
+ }
+
+ pub fn hir_file_to_module_defs(
+ &self,
+ file: impl Into<HirFileId>,
+ ) -> impl Iterator<Item = Module> {
+ self.imp.hir_file_to_module_defs(file.into())
+ }
+
pub fn to_adt_def(&self, a: &ast::Adt) -> Option<Adt> {
self.imp.to_def(a)
}
@@ -357,6 +383,15 @@
tree
}
+ pub fn adjust_edition(&self, file_id: HirFileId) -> HirFileId {
+ if let Some(editioned_file_id) = file_id.file_id() {
+ self.attach_first_edition(editioned_file_id.file_id(self.db))
+ .map_or(file_id, Into::into)
+ } else {
+ file_id
+ }
+ }
+
pub fn find_parent_file(&self, file_id: HirFileId) -> Option<InFile<SyntaxNode>> {
match file_id {
HirFileId::FileId(file_id) => {
@@ -515,7 +550,7 @@
}
pub fn is_derive_annotated(&self, adt: InFile<&ast::Adt>) -> bool {
- self.with_ctx(|ctx| ctx.has_derives(adt))
+ self.with_ctx(|ctx| ctx.file_of_adt_has_derives(adt))
}
pub fn derive_helpers_in_scope(&self, adt: &ast::Adt) -> Option<Vec<(Symbol, Symbol)>> {
@@ -624,7 +659,7 @@
/// Checks if renaming `renamed` to `new_name` may introduce conflicts with other locals,
/// and returns the conflicting locals.
- pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &str) -> Vec<Local> {
+ pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &Name) -> Vec<Local> {
let body = self.db.body(to_be_renamed.parent);
let resolver = to_be_renamed.parent.resolver(self.db);
let starting_expr =
@@ -633,7 +668,7 @@
body: &body,
conflicts: FxHashSet::default(),
db: self.db,
- new_name: Symbol::intern(new_name),
+ new_name: new_name.symbol().clone(),
old_name: to_be_renamed.name(self.db).symbol().clone(),
owner: to_be_renamed.parent,
to_be_renamed: to_be_renamed.binding_id,
@@ -653,7 +688,7 @@
string: &ast::String,
) -> Option<Vec<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)>> {
let string_start = string.syntax().text_range().start();
- let token = self.wrap_token_infile(string.syntax().clone()).into_real_file().ok()?;
+ let token = self.wrap_token_infile(string.syntax().clone());
self.descend_into_macros_breakable(token, |token, _| {
(|| {
let token = token.value;
@@ -693,50 +728,95 @@
}
/// Retrieves the formatting part of the format_args! template string at the given offset.
+ ///
+ // FIXME: Type the return type
+ /// Returns the range (pre-expansion) in the string literal corresponding to the resolution,
+ /// absolute file range (post-expansion)
+ /// of the part in the format string, the corresponding string token and the resolution if it
+ /// exists.
+ // FIXME: Remove this in favor of `check_for_format_args_template_with_file`
pub fn check_for_format_args_template(
&self,
original_token: SyntaxToken,
offset: TextSize,
- ) -> Option<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)> {
- let string_start = original_token.text_range().start();
- let original_token = self.wrap_token_infile(original_token).into_real_file().ok()?;
- self.descend_into_macros_breakable(original_token, |token, _| {
- (|| {
- let token = token.value;
- self.resolve_offset_in_format_args(
- ast::String::cast(token)?,
- offset.checked_sub(string_start)?,
- )
- .map(|(range, res)| (range + string_start, res))
- })()
- .map_or(ControlFlow::Continue(()), ControlFlow::Break)
- })
+ ) -> Option<(
+ TextRange,
+ HirFileRange,
+ ast::String,
+ Option<Either<PathResolution, InlineAsmOperand>>,
+ )> {
+ let original_token =
+ self.wrap_token_infile(original_token).map(ast::String::cast).transpose()?;
+ self.check_for_format_args_template_with_file(original_token, offset)
+ }
+
+ /// Retrieves the formatting part of the format_args! template string at the given offset.
+ ///
+ // FIXME: Type the return type
+ /// Returns the range (pre-expansion) in the string literal corresponding to the resolution,
+ /// absolute file range (post-expansion)
+ /// of the part in the format string, the corresponding string token and the resolution if it
+ /// exists.
+ pub fn check_for_format_args_template_with_file(
+ &self,
+ original_token: InFile<ast::String>,
+ offset: TextSize,
+ ) -> Option<(
+ TextRange,
+ HirFileRange,
+ ast::String,
+ Option<Either<PathResolution, InlineAsmOperand>>,
+ )> {
+ let relative_offset =
+ offset.checked_sub(original_token.value.syntax().text_range().start())?;
+ self.descend_into_macros_breakable(
+ original_token.as_ref().map(|it| it.syntax().clone()),
+ |token, _| {
+ (|| {
+ let token = token.map(ast::String::cast).transpose()?;
+ self.resolve_offset_in_format_args(token.as_ref(), relative_offset).map(
+ |(range, res)| {
+ (
+ range + original_token.value.syntax().text_range().start(),
+ HirFileRange {
+ file_id: token.file_id,
+ range: range + token.value.syntax().text_range().start(),
+ },
+ token.value,
+ res,
+ )
+ },
+ )
+ })()
+ .map_or(ControlFlow::Continue(()), ControlFlow::Break)
+ },
+ )
}
fn resolve_offset_in_format_args(
&self,
- string: ast::String,
+ InFile { value: string, file_id }: InFile<&ast::String>,
offset: TextSize,
) -> Option<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)> {
debug_assert!(offset <= string.syntax().text_range().len());
let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
let parent = literal.parent()?;
if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) {
- let source_analyzer = &self.analyze_no_infer(format_args.syntax())?;
- let format_args = self.wrap_node_infile(format_args);
+ let source_analyzer =
+ &self.analyze_impl(InFile::new(file_id, format_args.syntax()), None, false)?;
source_analyzer
- .resolve_offset_in_format_args(self.db, format_args.as_ref(), offset)
+ .resolve_offset_in_format_args(self.db, InFile::new(file_id, &format_args), offset)
.map(|(range, res)| (range, res.map(Either::Left)))
} else {
let asm = ast::AsmExpr::cast(parent)?;
- let source_analyzer = &self.analyze_no_infer(asm.syntax())?;
+ let source_analyzer =
+ self.analyze_impl(InFile::new(file_id, asm.syntax()), None, false)?;
let line = asm.template().position(|it| *it.syntax() == literal)?;
- let asm = self.wrap_node_infile(asm);
- source_analyzer.resolve_offset_in_asm_template(asm.as_ref(), line, offset).map(
- |(owner, (expr, range, index))| {
+ source_analyzer
+ .resolve_offset_in_asm_template(InFile::new(file_id, &asm), line, offset)
+ .map(|(owner, (expr, range, index))| {
(range, Some(Either::Right(InlineAsmOperand { owner, expr, index })))
- },
- )
+ })
}
}
@@ -809,14 +889,12 @@
None => return res,
};
let file = self.find_file(node.syntax());
- let Some(file_id) = file.file_id.file_id() else {
- return res;
- };
if first == last {
// node is just the token, so descend the token
- self.descend_into_macros_impl(
- InRealFile::new(file_id, first),
+ self.descend_into_macros_all(
+ InFile::new(file.file_id, first),
+ false,
&mut |InFile { value, .. }, _ctx| {
if let Some(node) = value
.parent_ancestors()
@@ -825,20 +903,21 @@
{
res.push(node)
}
- CONTINUE_NO_BREAKS
},
);
} else {
// Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![];
- self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token, _ctx| {
- scratch.push(token);
- CONTINUE_NO_BREAKS
- });
+ self.descend_into_macros_all(
+ InFile::new(file.file_id, first),
+ false,
+ &mut |token, _ctx| scratch.push(token),
+ );
let mut scratch = scratch.into_iter();
- self.descend_into_macros_impl(
- InRealFile::new(file_id, last),
+ self.descend_into_macros_all(
+ InFile::new(file.file_id, last),
+ false,
&mut |InFile { value: last, file_id: last_fid }, _ctx| {
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
if first_fid == last_fid {
@@ -855,17 +934,18 @@
}
}
}
- CONTINUE_NO_BREAKS
},
);
}
res
}
- pub fn is_inside_macro_call(&self, token: InFile<&SyntaxToken>) -> bool {
- // FIXME: Maybe `ancestors_with_macros()` is more suitable here? Currently
- // this is only used on real (not macro) files so this is not a problem.
- token.value.parent_ancestors().any(|ancestor| {
+ /// Returns true if the given input is within a macro call.
+ ///
+ /// Note that if this token itself is within the context of a macro expansion does not matter.
+ /// That is, we strictly check if it lies inside the input of a macro call.
+ pub fn is_inside_macro_call(&self, token @ InFile { value, .. }: InFile<&SyntaxToken>) -> bool {
+ value.parent_ancestors().any(|ancestor| {
if ast::MacroCall::can_cast(ancestor.kind()) {
return true;
}
@@ -890,7 +970,7 @@
ast::Item::Union(it) => it.into(),
_ => return false,
};
- ctx.has_derives(token.with_value(&adt))
+ ctx.file_of_adt_has_derives(token.with_value(&adt))
})
})
}
@@ -900,22 +980,18 @@
token: SyntaxToken,
mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext),
) {
- if let Ok(token) = self.wrap_token_infile(token).into_real_file() {
- self.descend_into_macros_impl(token, &mut |t, ctx| {
- cb(t, ctx);
- CONTINUE_NO_BREAKS
- });
- }
+ self.descend_into_macros_all(self.wrap_token_infile(token), false, &mut |t, ctx| {
+ cb(t, ctx)
+ });
}
pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
let mut res = smallvec![];
- if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
- self.descend_into_macros_impl(token, &mut |t, _ctx| {
- res.push(t.value);
- CONTINUE_NO_BREAKS
- });
- }
+ self.descend_into_macros_all(
+ self.wrap_token_infile(token.clone()),
+ false,
+ &mut |t, _ctx| res.push(t.value),
+ );
if res.is_empty() {
res.push(token);
}
@@ -928,15 +1004,12 @@
) -> SmallVec<[InFile<SyntaxToken>; 1]> {
let mut res = smallvec![];
let token = self.wrap_token_infile(token);
- if let Ok(token) = token.clone().into_real_file() {
- self.descend_into_macros_impl(token, &mut |t, ctx| {
- if !ctx.is_opaque(self.db) {
- // Don't descend into opaque contexts
- res.push(t);
- }
- CONTINUE_NO_BREAKS
- });
- }
+ self.descend_into_macros_all(token.clone(), true, &mut |t, ctx| {
+ if !ctx.is_opaque(self.db) {
+ // Don't descend into opaque contexts
+ res.push(t);
+ }
+ });
if res.is_empty() {
res.push(token);
}
@@ -945,10 +1018,10 @@
pub fn descend_into_macros_breakable<T>(
&self,
- token: InRealFile<SyntaxToken>,
+ token: InFile<SyntaxToken>,
mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
) -> Option<T> {
- self.descend_into_macros_impl(token, &mut cb)
+ self.descend_into_macros_impl(token, false, &mut cb)
}
/// Descends the token into expansions, returning the tokens that matches the input
@@ -974,67 +1047,103 @@
r
}
+ /// Descends the token into expansions, returning the tokens that matches the input
+ /// token's [`SyntaxKind`] and text.
+ pub fn descend_into_macros_exact_with_file(
+ &self,
+ token: SyntaxToken,
+ ) -> SmallVec<[InFile<SyntaxToken>; 1]> {
+ let mut r = smallvec![];
+ let text = token.text();
+ let kind = token.kind();
+
+ self.descend_into_macros_cb(token.clone(), |InFile { value, file_id }, ctx| {
+ let mapped_kind = value.kind();
+ let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
+ let matches = (kind == mapped_kind || any_ident_match())
+ && text == value.text()
+ && !ctx.is_opaque(self.db);
+ if matches {
+ r.push(InFile { value, file_id });
+ }
+ });
+ if r.is_empty() {
+ r.push(self.wrap_token_infile(token));
+ }
+ r
+ }
+
/// Descends the token into expansions, returning the first token that matches the input
/// token's [`SyntaxKind`] and text.
pub fn descend_into_macros_single_exact(&self, token: SyntaxToken) -> SyntaxToken {
let text = token.text();
let kind = token.kind();
- if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
- self.descend_into_macros_breakable(token, |InFile { value, file_id: _ }, _ctx| {
+ self.descend_into_macros_breakable(
+ self.wrap_token_infile(token.clone()),
+ |InFile { value, file_id: _ }, _ctx| {
let mapped_kind = value.kind();
let any_ident_match =
|| kind.is_any_identifier() && value.kind().is_any_identifier();
let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
if matches { ControlFlow::Break(value) } else { ControlFlow::Continue(()) }
- })
- } else {
- None
- }
+ },
+ )
.unwrap_or(token)
}
+ fn descend_into_macros_all(
+ &self,
+ token: InFile<SyntaxToken>,
+ always_descend_into_derives: bool,
+ f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContext),
+ ) {
+ self.descend_into_macros_impl(token, always_descend_into_derives, &mut |tok, ctx| {
+ f(tok, ctx);
+ CONTINUE_NO_BREAKS
+ });
+ }
+
fn descend_into_macros_impl<T>(
&self,
- InRealFile { value: token, file_id }: InRealFile<SyntaxToken>,
+ InFile { value: token, file_id }: InFile<SyntaxToken>,
+ always_descend_into_derives: bool,
f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
) -> Option<T> {
let _p = tracing::info_span!("descend_into_macros_impl").entered();
- let span = self.db.real_span_map(file_id).span_for_range(token.text_range());
+ let db = self.db;
+ let span = db.span_map(file_id).span_for_range(token.text_range());
// Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack
- let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
- let InMacroFile { file_id, value: mapped_tokens } = self.with_ctx(|ctx| {
- Some(
- ctx.cache
- .get_or_insert_expansion(ctx.db, macro_file)
- .map_range_down(span)?
- .map(SmallVec::<[_; 2]>::from_iter),
- )
- })?;
- // we have found a mapping for the token if the vec is non-empty
- let res = mapped_tokens.is_empty().not().then_some(());
- // requeue the tokens we got from mapping our current token down
- stack.push((HirFileId::from(file_id), mapped_tokens));
- res
- };
+ let process_expansion_for_token =
+ |ctx: &mut SourceToDefCtx<'_, '_>, stack: &mut Vec<_>, macro_file| {
+ let InMacroFile { file_id, value: mapped_tokens } = ctx
+ .cache
+ .get_or_insert_expansion(ctx.db, macro_file)
+ .map_range_down(span)?
+ .map(SmallVec::<[_; 2]>::from_iter);
+ // we have found a mapping for the token if the vec is non-empty
+ let res = mapped_tokens.is_empty().not().then_some(());
+ // requeue the tokens we got from mapping our current token down
+ stack.push((HirFileId::from(file_id), mapped_tokens));
+ res
+ };
// A stack of tokens to process, along with the file they came from
// These are tracked to know which macro calls we still have to look into
// the tokens themselves aren't that interesting as the span that is being used to map
// things down never changes.
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![];
- let include = self.s2d_cache.borrow_mut().get_or_insert_include_for(self.db, file_id);
+ let include = file_id
+ .file_id()
+ .and_then(|file_id| self.s2d_cache.borrow_mut().get_or_insert_include_for(db, file_id));
match include {
Some(include) => {
// include! inputs are always from real files, so they only need to be handled once upfront
- process_expansion_for_token(&mut stack, include)?;
+ self.with_ctx(|ctx| process_expansion_for_token(ctx, &mut stack, include))?;
}
None => {
- stack.push((
- file_id.into(),
- smallvec![(token, SyntaxContext::root(file_id.edition(self.db)))],
- ));
+ stack.push((file_id, smallvec![(token, span.ctx)]));
}
}
@@ -1053,62 +1162,120 @@
tokens.reverse();
while let Some((token, ctx)) = tokens.pop() {
let was_not_remapped = (|| {
- // First expand into attribute invocations
- let containing_attribute_macro_call = self.with_ctx(|ctx| {
- token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
- // Don't force populate the dyn cache for items that don't have an attribute anyways
- item.attrs().next()?;
- Some((ctx.item_to_macro_call(InFile::new(expansion, &item))?, item))
- })
- });
- if let Some((call_id, item)) = containing_attribute_macro_call {
- let attr_id = match self.db.lookup_intern_macro_call(call_id).kind {
- hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => {
- invoc_attr_index.ast_index()
- }
- _ => 0,
- };
- // FIXME: here, the attribute's text range is used to strip away all
- // entries from the start of the attribute "list" up the invoking
- // attribute. But in
- // ```
- // mod foo {
- // #![inner]
- // }
- // ```
- // we don't wanna strip away stuff in the `mod foo {` range, that is
- // here if the id corresponds to an inner attribute we got strip all
- // text ranges of the outer ones, and then all of the inner ones up
- // to the invoking attribute so that the inbetween is ignored.
- let text_range = item.syntax().text_range();
- let start = collect_attrs(&item)
- .nth(attr_id)
- .map(|attr| match attr.1 {
- Either::Left(it) => it.syntax().text_range().start(),
- Either::Right(it) => it.syntax().text_range().start(),
+ // First expand into attribute invocations, this is required to be handled
+ // upfront as any other macro call within will not semantically resolve unless
+ // also descended.
+ let res = self.with_ctx(|ctx| {
+ token
+ .parent_ancestors()
+ .filter_map(ast::Item::cast)
+ // FIXME: This might work incorrectly when we have a derive, followed by
+ // an attribute on an item, like:
+ // ```
+ // #[derive(Debug$0)]
+ // #[my_attr]
+ // struct MyStruct;
+ // ```
+ // here we should not consider the attribute at all, as our cursor
+ // technically lies outside of its expansion
+ .find_map(|item| {
+ // Don't force populate the dyn cache for items that don't have an attribute anyways
+ item.attrs().next()?;
+ ctx.item_to_macro_call(InFile::new(expansion, &item))
+ .zip(Some(item))
})
- .unwrap_or_else(|| text_range.start());
- let text_range = TextRange::new(start, text_range.end());
- filter_duplicates(tokens, text_range);
- return process_expansion_for_token(&mut stack, call_id);
+ .map(|(call_id, item)| {
+ let attr_id = match db.lookup_intern_macro_call(call_id).kind {
+ hir_expand::MacroCallKind::Attr {
+ invoc_attr_index, ..
+ } => invoc_attr_index.ast_index(),
+ _ => 0,
+ };
+ // FIXME: here, the attribute's text range is used to strip away all
+ // entries from the start of the attribute "list" up the invoking
+ // attribute. But in
+ // ```
+ // mod foo {
+ // #![inner]
+ // }
+ // ```
+ // we don't wanna strip away stuff in the `mod foo {` range, that is
+ // here if the id corresponds to an inner attribute we got strip all
+ // text ranges of the outer ones, and then all of the inner ones up
+ // to the invoking attribute so that the inbetween is ignored.
+ let text_range = item.syntax().text_range();
+ let start = collect_attrs(&item)
+ .nth(attr_id)
+ .map(|attr| match attr.1 {
+ Either::Left(it) => it.syntax().text_range().start(),
+ Either::Right(it) => it.syntax().text_range().start(),
+ })
+ .unwrap_or_else(|| text_range.start());
+ let text_range = TextRange::new(start, text_range.end());
+ filter_duplicates(tokens, text_range);
+ process_expansion_for_token(ctx, &mut stack, call_id)
+ })
+ });
+
+ if let Some(res) = res {
+ return res;
}
+ if always_descend_into_derives {
+ let res = self.with_ctx(|ctx| {
+ let (derives, adt) = token
+ .parent_ancestors()
+ .filter_map(ast::Adt::cast)
+ .find_map(|adt| {
+ Some((
+ ctx.derive_macro_calls(InFile::new(expansion, &adt))?
+ .map(|(a, b, c)| (a, b, c.to_owned()))
+ .collect::<SmallVec<[_; 2]>>(),
+ adt,
+ ))
+ })?;
+ let mut res = None;
+ for (_, derive_attr, derives) in derives {
+ // as there may be multiple derives registering the same helper
+ // name, we gotta make sure to call this for all of them!
+ // FIXME: We need to call `f` for all of them as well though!
+ res = res.or(process_expansion_for_token(
+ ctx,
+ &mut stack,
+ derive_attr,
+ ));
+ for derive in derives.into_iter().flatten() {
+ res = res
+ .or(process_expansion_for_token(ctx, &mut stack, derive));
+ }
+ }
+ // remove all tokens that are within the derives expansion
+ filter_duplicates(tokens, adt.syntax().text_range());
+ Some(res)
+ });
+ // if we found derives, we can early exit. There is no way we can be in any
+ // macro call at this point given we are not in a token tree
+ if let Some(res) = res {
+ return res;
+ }
+ }
// Then check for token trees, that means we are either in a function-like macro or
// secondary attribute inputs
let tt = token
.parent_ancestors()
.map_while(Either::<ast::TokenTree, ast::Meta>::cast)
.last()?;
+
match tt {
// function-like macro call
Either::Left(tt) => {
+ let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
if tt.left_delimiter_token().map_or(false, |it| it == token) {
return None;
}
if tt.right_delimiter_token().map_or(false, |it| it == token) {
return None;
}
- let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
let mcall = InFile::new(expansion, macro_call);
let file_id = match m_cache.get(&mcall) {
Some(&it) => it,
@@ -1121,13 +1288,16 @@
let text_range = tt.syntax().text_range();
filter_duplicates(tokens, text_range);
- process_expansion_for_token(&mut stack, file_id).or(file_id
- .eager_arg(self.db)
- .and_then(|arg| {
- // also descend into eager expansions
- process_expansion_for_token(&mut stack, arg)
- }))
+ self.with_ctx(|ctx| {
+ process_expansion_for_token(ctx, &mut stack, file_id).or(file_id
+ .eager_arg(db)
+ .and_then(|arg| {
+ // also descend into eager expansions
+ process_expansion_for_token(ctx, &mut stack, arg)
+ }))
+ })
}
+ Either::Right(_) if always_descend_into_derives => None,
// derive or derive helper
Either::Right(meta) => {
// attribute we failed expansion for earlier, this might be a derive invocation
@@ -1136,31 +1306,33 @@
let adt = match attr.syntax().parent().and_then(ast::Adt::cast) {
Some(adt) => {
// this might be a derive on an ADT
- let derive_call = self.with_ctx(|ctx| {
+ let res = self.with_ctx(|ctx| {
// so try downmapping the token into the pseudo derive expansion
// see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
- ctx.attr_to_derive_macro_call(
- InFile::new(expansion, &adt),
- InFile::new(expansion, attr.clone()),
- )
- .map(|(_, call_id, _)| call_id)
- });
+ let derive_call = ctx
+ .attr_to_derive_macro_call(
+ InFile::new(expansion, &adt),
+ InFile::new(expansion, attr.clone()),
+ )?
+ .1;
- match derive_call {
- Some(call_id) => {
- // resolved to a derive
- let text_range = attr.syntax().text_range();
- // remove any other token in this macro input, all their mappings are the
- // same as this
- tokens.retain(|(t, _)| {
- !text_range.contains_range(t.text_range())
- });
- return process_expansion_for_token(
- &mut stack, call_id,
- );
- }
- None => Some(adt),
+ // resolved to a derive
+ let text_range = attr.syntax().text_range();
+ // remove any other token in this macro input, all their mappings are the
+ // same as this
+ tokens.retain(|(t, _)| {
+ !text_range.contains_range(t.text_range())
+ });
+ Some(process_expansion_for_token(
+ ctx,
+ &mut stack,
+ derive_call,
+ ))
+ });
+ if let Some(res) = res {
+ return res;
}
+ Some(adt)
}
None => {
// Otherwise this could be a derive helper on a variant or field
@@ -1174,12 +1346,9 @@
)
}
}?;
- if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(expansion, &adt))) {
- return None;
- }
let attr_name =
attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
- // Not an attribute, nor a derive, so it's either an intert attribute or a derive helper
+ // Not an attribute, nor a derive, so it's either an inert attribute or a derive helper
// Try to resolve to a derive helper and downmap
let resolver = &token
.parent()
@@ -1187,7 +1356,7 @@
self.analyze_impl(InFile::new(expansion, &parent), None, false)
})?
.resolver;
- let id = self.db.ast_id_map(expansion).ast_id(&adt);
+ let id = db.ast_id_map(expansion).ast_id(&adt);
let helpers = resolver
.def_map()
.derive_helpers_in_scope(InFile::new(expansion, id))?;
@@ -1198,20 +1367,22 @@
}
let mut res = None;
- for (.., derive) in
- helpers.iter().filter(|(helper, ..)| *helper == attr_name)
- {
- // as there may be multiple derives registering the same helper
- // name, we gotta make sure to call this for all of them!
- // FIXME: We need to call `f` for all of them as well though!
- res = res.or(process_expansion_for_token(&mut stack, *derive));
- }
- res
+ self.with_ctx(|ctx| {
+ for (.., derive) in
+ helpers.iter().filter(|(helper, ..)| *helper == attr_name)
+ {
+ // as there may be multiple derives registering the same helper
+ // name, we gotta make sure to call this for all of them!
+ // FIXME: We need to call `f` for all of them as well though!
+ res = res
+ .or(process_expansion_for_token(ctx, &mut stack, *derive));
+ }
+ res
+ })
}
}
})()
.is_none();
-
if was_not_remapped {
if let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx) {
return Some(b);
@@ -1300,25 +1471,31 @@
}
/// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
+ // FIXME: Replace with `ancestors_with_macros_file` when all usages are updated.
pub fn ancestors_with_macros(
&self,
node: SyntaxNode,
) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
let node = self.find_file(&node);
- iter::successors(Some(node.cloned()), move |&InFile { file_id, ref value }| {
- match value.parent() {
- Some(parent) => Some(InFile::new(file_id, parent)),
- None => {
- let macro_file = file_id.macro_file()?;
+ self.ancestors_with_macros_file(node.cloned()).map(|it| it.value)
+ }
- self.with_ctx(|ctx| {
- let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
- expansion_info.arg().map(|node| node?.parent()).transpose()
- })
- }
+ /// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
+ pub fn ancestors_with_macros_file(
+ &self,
+ node: InFile<SyntaxNode>,
+ ) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
+ iter::successors(Some(node), move |&InFile { file_id, ref value }| match value.parent() {
+ Some(parent) => Some(InFile::new(file_id, parent)),
+ None => {
+ let macro_file = file_id.macro_file()?;
+
+ self.with_ctx(|ctx| {
+ let expansion_info = ctx.cache.get_or_insert_expansion(ctx.db, macro_file);
+ expansion_info.arg().map(|node| node?.parent()).transpose()
+ })
}
})
- .map(|it| it.value)
}
pub fn ancestors_at_offset_with_macros(
@@ -1678,6 +1855,11 @@
self.with_ctx(|ctx| ctx.file_to_def(file).to_owned()).into_iter().map(Module::from)
}
+ fn hir_file_to_module_defs(&self, file: HirFileId) -> impl Iterator<Item = Module> {
+ // FIXME: Do we need to care about inline modules for macro expansions?
+ self.file_to_module_defs(file.original_file_respecting_includes(self.db).file_id(self.db))
+ }
+
pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
db: self.db,
@@ -1766,18 +1948,12 @@
ChildContainer::TraitId(it) => {
return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
}
- ChildContainer::TraitAliasId(it) => {
- return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
- }
ChildContainer::ImplId(it) => {
return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
}
ChildContainer::EnumId(it) => {
return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
}
- ChildContainer::TypeAliasId(it) => {
- return Some(SourceAnalyzer::new_generic_def(self.db, it.into(), node, offset));
- }
ChildContainer::GenericDefId(it) => {
return Some(SourceAnalyzer::new_generic_def(self.db, it, node, offset));
}
diff --git a/crates/hir/src/semantics/child_by_source.rs b/crates/hir/src/semantics/child_by_source.rs
index 6accf9b..1a6d63c 100644
--- a/crates/hir/src/semantics/child_by_source.rs
+++ b/crates/hir/src/semantics/child_by_source.rs
@@ -17,6 +17,7 @@
DynMap,
keys::{self, Key},
},
+ hir::generics::GenericParams,
item_scope::ItemScope,
item_tree::ItemTreeNode,
nameres::DefMap,
@@ -49,6 +50,12 @@
data.items.iter().for_each(|&(_, item)| {
add_assoc_item(db, res, file_id, item);
});
+ let (_, source_map) = db.trait_signature_with_source_map(*self);
+ source_map.expansions().filter(|(ast, _)| ast.file_id == file_id).for_each(
+ |(ast, &exp_id)| {
+ res[keys::MACRO_CALL].insert(ast.value, exp_id);
+ },
+ );
}
}
@@ -68,6 +75,12 @@
data.items.iter().for_each(|&(_, item)| {
add_assoc_item(db, res, file_id, item);
});
+ let (_, source_map) = db.impl_signature_with_source_map(*self);
+ source_map.expansions().filter(|(ast, _)| ast.file_id == file_id).for_each(
+ |(ast, &exp_id)| {
+ res[keys::MACRO_CALL].insert(ast.value, exp_id);
+ },
+ );
}
}
@@ -178,6 +191,8 @@
Either::Right(source) => res[keys::RECORD_FIELD].insert(AstPtr::new(&source), id),
}
}
+ let (_, sm) = db.variant_fields_with_source_map(*self);
+ sm.expansions().for_each(|(ast, &exp_id)| res[keys::MACRO_CALL].insert(ast.value, exp_id));
}
}
@@ -195,6 +210,11 @@
res[keys::ENUM_VARIANT]
.insert(ast_id_map.get(tree[variant.lookup(db).id.value].ast_id), variant);
});
+ let (_, source_map) = db.enum_signature_with_source_map(*self);
+ source_map
+ .expansions()
+ .filter(|(ast, _)| ast.file_id == file_id)
+ .for_each(|(ast, &exp_id)| res[keys::MACRO_CALL].insert(ast.value, exp_id));
}
}
@@ -225,7 +245,8 @@
return;
}
- let generic_params = db.generic_params(*self);
+ let (generic_params, _, source_map) =
+ GenericParams::generic_params_and_store_and_source_map(db, *self);
let mut toc_idx_iter = generic_params.iter_type_or_consts().map(|(idx, _)| idx);
let lts_idx_iter = generic_params.iter_lt().map(|(idx, _)| idx);
@@ -253,6 +274,11 @@
res[keys::LIFETIME_PARAM].insert(AstPtr::new(&ast_param), id);
}
}
+
+ source_map
+ .expansions()
+ .filter(|(ast, _)| ast.file_id == file_id)
+ .for_each(|(ast, &exp_id)| res[keys::MACRO_CALL].insert(ast.value, exp_id));
}
}
diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs
index 7f6c9af..71ee0f6 100644
--- a/crates/hir/src/semantics/source_to_def.rs
+++ b/crates/hir/src/semantics/source_to_def.rs
@@ -108,7 +108,7 @@
use stdx::impl_from;
use syntax::{
AstNode, AstPtr, SyntaxNode,
- ast::{self, HasName},
+ ast::{self, HasAttrs, HasName},
};
use tt::TextRange;
@@ -411,10 +411,25 @@
.map(|&(attr_id, call_id, ref ids)| (attr_id, call_id, &**ids))
}
- pub(super) fn has_derives(&mut self, adt: InFile<&ast::Adt>) -> bool {
+ // FIXME: Make this more fine grained! This should be a `adt_has_derives`!
+ pub(super) fn file_of_adt_has_derives(&mut self, adt: InFile<&ast::Adt>) -> bool {
self.dyn_map(adt).as_ref().is_some_and(|map| !map[keys::DERIVE_MACRO_CALL].is_empty())
}
+ pub(super) fn derive_macro_calls<'slf>(
+ &'slf mut self,
+ adt: InFile<&ast::Adt>,
+ ) -> Option<impl Iterator<Item = (AttrId, MacroCallId, &'slf [Option<MacroCallId>])> + use<'slf>>
+ {
+ self.dyn_map(adt).as_ref().map(|&map| {
+ let dyn_map = &map[keys::DERIVE_MACRO_CALL];
+ adt.value
+ .attrs()
+ .filter_map(move |attr| dyn_map.get(&AstPtr::new(&attr)))
+ .map(|&(attr_id, call_id, ref ids)| (attr_id, call_id, &**ids))
+ })
+ }
+
fn to_def<Ast: AstNode + 'static, ID: Copy + 'static>(
&mut self,
src: InFile<&Ast>,
@@ -616,14 +631,14 @@
match &item {
ast::Item::Module(it) => self.module_to_def(container.with_value(it))?.into(),
ast::Item::Trait(it) => self.trait_to_def(container.with_value(it))?.into(),
- ast::Item::TraitAlias(it) => {
- self.trait_alias_to_def(container.with_value(it))?.into()
- }
ast::Item::Impl(it) => self.impl_to_def(container.with_value(it))?.into(),
ast::Item::Enum(it) => self.enum_to_def(container.with_value(it))?.into(),
- ast::Item::TypeAlias(it) => {
- self.type_alias_to_def(container.with_value(it))?.into()
- }
+ ast::Item::TypeAlias(it) => ChildContainer::GenericDefId(
+ self.type_alias_to_def(container.with_value(it))?.into(),
+ ),
+ ast::Item::TraitAlias(it) => ChildContainer::GenericDefId(
+ self.trait_alias_to_def(container.with_value(it))?.into(),
+ ),
ast::Item::Struct(it) => {
let def = self.struct_to_def(container.with_value(it))?;
let is_in_body = it.field_list().is_some_and(|it| {
@@ -723,11 +738,9 @@
DefWithBodyId(DefWithBodyId),
ModuleId(ModuleId),
TraitId(TraitId),
- TraitAliasId(TraitAliasId),
ImplId(ImplId),
EnumId(EnumId),
VariantId(VariantId),
- TypeAliasId(TypeAliasId),
/// XXX: this might be the same def as, for example an `EnumId`. However,
/// here the children are generic parameters, and not, eg enum variants.
GenericDefId(GenericDefId),
@@ -736,11 +749,9 @@
DefWithBodyId,
ModuleId,
TraitId,
- TraitAliasId,
ImplId,
EnumId,
VariantId,
- TypeAliasId,
GenericDefId
for ChildContainer
}
@@ -752,11 +763,9 @@
ChildContainer::DefWithBodyId(it) => it.child_by_source(db, file_id),
ChildContainer::ModuleId(it) => it.child_by_source(db, file_id),
ChildContainer::TraitId(it) => it.child_by_source(db, file_id),
- ChildContainer::TraitAliasId(_) => DynMap::default(),
ChildContainer::ImplId(it) => it.child_by_source(db, file_id),
ChildContainer::EnumId(it) => it.child_by_source(db, file_id),
ChildContainer::VariantId(it) => it.child_by_source(db, file_id),
- ChildContainer::TypeAliasId(_) => DynMap::default(),
ChildContainer::GenericDefId(it) => it.child_by_source(db, file_id),
}
}
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index d22812d..ec2ccf8 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -1303,6 +1303,7 @@
false
}
+ /// Returns the range of the implicit template argument and its resolution at the given `offset`
pub(crate) fn resolve_offset_in_format_args(
&self,
db: &'db dyn HirDatabase,
diff --git a/crates/ide-assists/src/handlers/auto_import.rs b/crates/ide-assists/src/handlers/auto_import.rs
index d310e11..f3243d3 100644
--- a/crates/ide-assists/src/handlers/auto_import.rs
+++ b/crates/ide-assists/src/handlers/auto_import.rs
@@ -128,11 +128,7 @@
format!("Import `{import_name}`"),
range,
|builder| {
- let scope = match scope.clone() {
- ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
- ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
- ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
- };
+ let scope = builder.make_import_scope_mut(scope.clone());
insert_use(&scope, mod_path_to_ast(&import_path, edition), &ctx.config.insert_use);
},
);
@@ -153,11 +149,7 @@
format!("Import `{import_name} as _`"),
range,
|builder| {
- let scope = match scope.clone() {
- ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
- ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
- ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
- };
+ let scope = builder.make_import_scope_mut(scope.clone());
insert_use_as_alias(
&scope,
mod_path_to_ast(&import_path, edition),
@@ -1877,4 +1869,30 @@
",
);
}
+
+ #[test]
+ fn carries_cfg_attr() {
+ check_assist(
+ auto_import,
+ r#"
+mod m {
+ pub struct S;
+}
+
+#[cfg(test)]
+fn foo(_: S$0) {}
+"#,
+ r#"
+#[cfg(test)]
+use m::S;
+
+mod m {
+ pub struct S;
+}
+
+#[cfg(test)]
+fn foo(_: S) {}
+"#,
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/convert_bool_to_enum.rs b/crates/ide-assists/src/handlers/convert_bool_to_enum.rs
index 00e9fdf..f73b8c4 100644
--- a/crates/ide-assists/src/handlers/convert_bool_to_enum.rs
+++ b/crates/ide-assists/src/handlers/convert_bool_to_enum.rs
@@ -312,12 +312,8 @@
}
// add imports across modules where needed
- if let Some((import_scope, path)) = import_data {
- let scope = match import_scope {
- ImportScope::File(it) => ImportScope::File(edit.make_mut(it)),
- ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)),
- ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)),
- };
+ if let Some((scope, path)) = import_data {
+ let scope = edit.make_import_scope_mut(scope);
delayed_mutations.push((scope, path));
}
},
diff --git a/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs b/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
index ed8aad7..32c4ae2 100644
--- a/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
+++ b/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
@@ -56,14 +56,22 @@
// XXX: We don't currently provide this assist for struct definitions inside macros, but if we
// are to lift this limitation, don't forget to make `edit_struct_def()` consider macro files
// too.
- let name = ctx.find_node_at_offset::<ast::Name>()?;
- let strukt = name.syntax().parent().and_then(<Either<ast::Struct, ast::Variant>>::cast)?;
- let field_list = strukt.as_ref().either(|s| s.field_list(), |v| v.field_list())?;
+ let strukt_or_variant = ctx
+ .find_node_at_offset::<ast::Struct>()
+ .map(Either::Left)
+ .or_else(|| ctx.find_node_at_offset::<ast::Variant>().map(Either::Right))?;
+ let field_list = strukt_or_variant.as_ref().either(|s| s.field_list(), |v| v.field_list())?;
+
+ if ctx.offset() > field_list.syntax().text_range().start() {
+ // Assist could be distracting after the braces
+ return None;
+ }
+
let record_fields = match field_list {
ast::FieldList::RecordFieldList(it) => it,
ast::FieldList::TupleFieldList(_) => return None,
};
- let strukt_def = match &strukt {
+ let strukt_def = match &strukt_or_variant {
Either::Left(s) => Either::Left(ctx.sema.to_def(s)?),
Either::Right(v) => Either::Right(ctx.sema.to_def(v)?),
};
@@ -71,11 +79,11 @@
acc.add(
AssistId::refactor_rewrite("convert_named_struct_to_tuple_struct"),
"Convert to tuple struct",
- strukt.syntax().text_range(),
+ strukt_or_variant.syntax().text_range(),
|edit| {
edit_field_references(ctx, edit, record_fields.fields());
edit_struct_references(ctx, edit, strukt_def);
- edit_struct_def(ctx, edit, &strukt, record_fields);
+ edit_struct_def(ctx, edit, &strukt_or_variant, record_fields);
},
)
}
@@ -294,6 +302,88 @@
}
#[test]
+ fn convert_simple_struct_cursor_on_struct_keyword() {
+ check_assist(
+ convert_named_struct_to_tuple_struct,
+ r#"
+struct Inner;
+struct$0 A { inner: Inner }
+
+impl A {
+ fn new(inner: Inner) -> A {
+ A { inner }
+ }
+
+ fn new_with_default() -> A {
+ A::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.inner
+ }
+}"#,
+ r#"
+struct Inner;
+struct A(Inner);
+
+impl A {
+ fn new(inner: Inner) -> A {
+ A(inner)
+ }
+
+ fn new_with_default() -> A {
+ A::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.0
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_simple_struct_cursor_on_visibility_keyword() {
+ check_assist(
+ convert_named_struct_to_tuple_struct,
+ r#"
+struct Inner;
+pub$0 struct A { inner: Inner }
+
+impl A {
+ fn new(inner: Inner) -> A {
+ A { inner }
+ }
+
+ fn new_with_default() -> A {
+ A::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.inner
+ }
+}"#,
+ r#"
+struct Inner;
+pub struct A(Inner);
+
+impl A {
+ fn new(inner: Inner) -> A {
+ A(inner)
+ }
+
+ fn new_with_default() -> A {
+ A::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.0
+ }
+}"#,
+ );
+ }
+
+ #[test]
fn convert_struct_referenced_via_self_kw() {
check_assist(
convert_named_struct_to_tuple_struct,
@@ -996,7 +1086,8 @@
}
"#,
r#"
-pub struct Foo(#[my_custom_attr] u32);
+pub struct Foo(#[my_custom_attr]
+u32);
"#,
);
}
diff --git a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
index 777e366..8075619 100644
--- a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
+++ b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
@@ -51,18 +51,26 @@
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
- let name = ctx.find_node_at_offset::<ast::Name>()?;
- let strukt = name.syntax().parent().and_then(<Either<ast::Struct, ast::Variant>>::cast)?;
- let field_list = strukt.as_ref().either(|s| s.field_list(), |v| v.field_list())?;
+ let strukt_or_variant = ctx
+ .find_node_at_offset::<ast::Struct>()
+ .map(Either::Left)
+ .or_else(|| ctx.find_node_at_offset::<ast::Variant>().map(Either::Right))?;
+ let field_list = strukt_or_variant.as_ref().either(|s| s.field_list(), |v| v.field_list())?;
+
+ if ctx.offset() > field_list.syntax().text_range().start() {
+ // Assist could be distracting after the braces
+ return None;
+ }
+
let tuple_fields = match field_list {
ast::FieldList::TupleFieldList(it) => it,
ast::FieldList::RecordFieldList(_) => return None,
};
- let strukt_def = match &strukt {
+ let strukt_def = match &strukt_or_variant {
Either::Left(s) => Either::Left(ctx.sema.to_def(s)?),
Either::Right(v) => Either::Right(ctx.sema.to_def(v)?),
};
- let target = strukt.as_ref().either(|s| s.syntax(), |v| v.syntax()).text_range();
+ let target = strukt_or_variant.as_ref().either(|s| s.syntax(), |v| v.syntax()).text_range();
acc.add(
AssistId::refactor_rewrite("convert_tuple_struct_to_named_struct"),
@@ -72,7 +80,7 @@
let names = generate_names(tuple_fields.fields());
edit_field_references(ctx, edit, tuple_fields.fields(), &names);
edit_struct_references(ctx, edit, strukt_def, &names);
- edit_struct_def(ctx, edit, &strukt, tuple_fields, names);
+ edit_struct_def(ctx, edit, &strukt_or_variant, tuple_fields, names);
},
)
}
@@ -317,6 +325,88 @@
}
#[test]
+ fn convert_simple_struct_cursor_on_struct_keyword() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+struct Inner;
+struct$0 A(Inner);
+
+impl A {
+ fn new(inner: Inner) -> A {
+ A(inner)
+ }
+
+ fn new_with_default() -> A {
+ A::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.0
+ }
+}"#,
+ r#"
+struct Inner;
+struct A { field1: Inner }
+
+impl A {
+ fn new(inner: Inner) -> A {
+ A { field1: inner }
+ }
+
+ fn new_with_default() -> A {
+ A::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.field1
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn convert_simple_struct_cursor_on_visibility_keyword() {
+ check_assist(
+ convert_tuple_struct_to_named_struct,
+ r#"
+struct Inner;
+pub$0 struct A(Inner);
+
+impl A {
+ fn new(inner: Inner) -> A {
+ A(inner)
+ }
+
+ fn new_with_default() -> A {
+ A::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.0
+ }
+}"#,
+ r#"
+struct Inner;
+pub struct A { field1: Inner }
+
+impl A {
+ fn new(inner: Inner) -> A {
+ A { field1: inner }
+ }
+
+ fn new_with_default() -> A {
+ A::new(Inner)
+ }
+
+ fn into_inner(self) -> Inner {
+ self.field1
+ }
+}"#,
+ );
+ }
+
+ #[test]
fn convert_struct_referenced_via_self_kw() {
check_assist(
convert_tuple_struct_to_named_struct,
@@ -923,7 +1013,8 @@
pub struct $0Foo(#[my_custom_attr] u32);
"#,
r#"
-pub struct Foo { #[my_custom_attr] field1: u32 }
+pub struct Foo { #[my_custom_attr]
+field1: u32 }
"#,
);
}
diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs
index e977798..cf45ea0 100644
--- a/crates/ide-assists/src/handlers/extract_function.rs
+++ b/crates/ide-assists/src/handlers/extract_function.rs
@@ -204,12 +204,7 @@
.kind
.is_some_and(|kind| matches!(kind, FlowKind::Break(_, _) | FlowKind::Continue(_)))
{
- let scope = match scope {
- ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
- ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
- ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
- };
-
+ let scope = builder.make_import_scope_mut(scope);
let control_flow_enum =
FamousDefs(&ctx.sema, module.krate()).core_ops_ControlFlow();
diff --git a/crates/ide-assists/src/handlers/fix_visibility.rs b/crates/ide-assists/src/handlers/fix_visibility.rs
index 19e0a73..3badc17 100644
--- a/crates/ide-assists/src/handlers/fix_visibility.rs
+++ b/crates/ide-assists/src/handlers/fix_visibility.rs
@@ -7,10 +7,10 @@
use crate::{AssistContext, AssistId, Assists};
-// FIXME: this really should be a fix for diagnostic, rather than an assist.
-
// Assist: fix_visibility
//
+// Note that there is some duplication between this and the no_such_field diagnostic.
+//
// Makes inaccessible item public.
//
// ```
@@ -32,7 +32,6 @@
// ```
pub(crate) fn fix_visibility(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
add_vis_to_referenced_module_def(acc, ctx)
- .or_else(|| add_vis_to_referenced_record_field(acc, ctx))
}
fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
@@ -88,59 +87,6 @@
})
}
-fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
- let record_field: ast::RecordExprField = ctx.find_node_at_offset()?;
- let (record_field_def, _, _) = ctx.sema.resolve_record_field(&record_field)?;
-
- let current_module = ctx.sema.scope(record_field.syntax())?.module();
- let current_edition = current_module.krate().edition(ctx.db());
- let visibility = record_field_def.visibility(ctx.db());
- if visibility.is_visible_from(ctx.db(), current_module.into()) {
- return None;
- }
-
- let parent = record_field_def.parent_def(ctx.db());
- let parent_name = parent.name(ctx.db());
- let target_module = parent.module(ctx.db());
-
- let in_file_source = record_field_def.source(ctx.db())?;
- let (vis_owner, target) = match in_file_source.value {
- hir::FieldSource::Named(it) => {
- let range = it.syntax().text_range();
- (ast::AnyHasVisibility::new(it), range)
- }
- hir::FieldSource::Pos(it) => {
- let range = it.syntax().text_range();
- (ast::AnyHasVisibility::new(it), range)
- }
- };
-
- let missing_visibility = if current_module.krate() == target_module.krate() {
- make::visibility_pub_crate()
- } else {
- make::visibility_pub()
- };
- let target_file = in_file_source.file_id.original_file(ctx.db());
-
- let target_name = record_field_def.name(ctx.db());
- let assist_label = format!(
- "Change visibility of {}.{} to {missing_visibility}",
- parent_name.display(ctx.db(), current_edition),
- target_name.display(ctx.db(), current_edition)
- );
-
- acc.add(AssistId::quick_fix("fix_visibility"), assist_label, target, |edit| {
- edit.edit_file(target_file.file_id(ctx.db()));
-
- let vis_owner = edit.make_mut(vis_owner);
- vis_owner.set_visibility(Some(missing_visibility.clone_for_update()));
-
- if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) {
- edit.add_tabstop_before(cap, vis);
- }
- })
-}
-
fn target_data_for_def(
db: &dyn HirDatabase,
def: hir::ModuleDef,
@@ -294,44 +240,6 @@
}
#[test]
- fn fix_visibility_of_struct_field() {
- check_assist(
- fix_visibility,
- r"mod foo { pub struct Foo { bar: (), } }
- fn main() { foo::Foo { $0bar: () }; } ",
- r"mod foo { pub struct Foo { $0pub(crate) bar: (), } }
- fn main() { foo::Foo { bar: () }; } ",
- );
- check_assist(
- fix_visibility,
- r"
-//- /lib.rs
-mod foo;
-fn main() { foo::Foo { $0bar: () }; }
-//- /foo.rs
-pub struct Foo { bar: () }
-",
- r"pub struct Foo { $0pub(crate) bar: () }
-",
- );
- check_assist_not_applicable(
- fix_visibility,
- r"mod foo { pub struct Foo { pub bar: (), } }
- fn main() { foo::Foo { $0bar: () }; } ",
- );
- check_assist_not_applicable(
- fix_visibility,
- r"
-//- /lib.rs
-mod foo;
-fn main() { foo::Foo { $0bar: () }; }
-//- /foo.rs
-pub struct Foo { pub bar: () }
-",
- );
- }
-
- #[test]
fn fix_visibility_of_enum_variant_field() {
// Enum variants, as well as their fields, always get the enum's visibility. In fact, rustc
// rejects any visibility specifiers on them, so this assist should never fire on them.
@@ -368,44 +276,6 @@
}
#[test]
- fn fix_visibility_of_union_field() {
- check_assist(
- fix_visibility,
- r"mod foo { pub union Foo { bar: (), } }
- fn main() { foo::Foo { $0bar: () }; } ",
- r"mod foo { pub union Foo { $0pub(crate) bar: (), } }
- fn main() { foo::Foo { bar: () }; } ",
- );
- check_assist(
- fix_visibility,
- r"
-//- /lib.rs
-mod foo;
-fn main() { foo::Foo { $0bar: () }; }
-//- /foo.rs
-pub union Foo { bar: () }
-",
- r"pub union Foo { $0pub(crate) bar: () }
-",
- );
- check_assist_not_applicable(
- fix_visibility,
- r"mod foo { pub union Foo { pub bar: (), } }
- fn main() { foo::Foo { $0bar: () }; } ",
- );
- check_assist_not_applicable(
- fix_visibility,
- r"
-//- /lib.rs
-mod foo;
-fn main() { foo::Foo { $0bar: () }; }
-//- /foo.rs
-pub union Foo { pub bar: () }
-",
- );
- }
-
- #[test]
fn fix_visibility_of_const() {
check_assist(
fix_visibility,
@@ -572,19 +442,6 @@
r"$0pub struct Bar;
",
);
- check_assist(
- fix_visibility,
- r"
-//- /main.rs crate:a deps:foo
-fn main() {
- foo::Foo { $0bar: () };
-}
-//- /lib.rs crate:foo
-pub struct Foo { pub(crate) bar: () }
-",
- r"pub struct Foo { $0pub bar: () }
-",
- );
}
#[test]
diff --git a/crates/ide-assists/src/handlers/remove_underscore.rs b/crates/ide-assists/src/handlers/remove_underscore.rs
index 912e193..a8e2741 100644
--- a/crates/ide-assists/src/handlers/remove_underscore.rs
+++ b/crates/ide-assists/src/handlers/remove_underscore.rs
@@ -1,6 +1,7 @@
use ide_db::{
assists::AssistId,
defs::{Definition, NameClass, NameRefClass},
+ rename::RenameDefinition,
};
use syntax::{AstNode, ast};
@@ -61,7 +62,7 @@
"Remove underscore from a used variable",
text_range,
|builder| {
- let changes = def.rename(&ctx.sema, new_name).unwrap();
+ let changes = def.rename(&ctx.sema, new_name, RenameDefinition::Yes).unwrap();
builder.source_change = changes;
},
)
diff --git a/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs b/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
index c067747..fa005a4 100644
--- a/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
+++ b/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
@@ -81,11 +81,7 @@
|builder| {
// Now that we've brought the name into scope, re-qualify all paths that could be
// affected (that is, all paths inside the node we added the `use` to).
- let scope = match scope {
- ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
- ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
- ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
- };
+ let scope = builder.make_import_scope_mut(scope);
shorten_paths(scope.as_syntax_node(), &original_path);
let path = drop_generic_args(&original_path);
let edition = ctx
diff --git a/crates/ide-assists/src/handlers/unqualify_method_call.rs b/crates/ide-assists/src/handlers/unqualify_method_call.rs
index ebb8ef9..1f89a3d 100644
--- a/crates/ide-assists/src/handlers/unqualify_method_call.rs
+++ b/crates/ide-assists/src/handlers/unqualify_method_call.rs
@@ -1,4 +1,3 @@
-use ide_db::imports::insert_use::ImportScope;
use syntax::{
TextRange,
ast::{self, AstNode, HasArgList, prec::ExprPrecedence},
@@ -114,11 +113,7 @@
);
if let Some(scope) = scope {
- let scope = match scope {
- ImportScope::File(it) => ImportScope::File(edit.make_mut(it)),
- ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)),
- ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)),
- };
+ let scope = edit.make_import_scope_mut(scope);
ide_db::imports::insert_use::insert_use(&scope, import, &ctx.config.insert_use);
}
}
diff --git a/crates/ide-completion/src/completions/attribute.rs b/crates/ide-completion/src/completions/attribute.rs
index 3c195f8..705402c 100644
--- a/crates/ide-completion/src/completions/attribute.rs
+++ b/crates/ide-completion/src/completions/attribute.rs
@@ -25,6 +25,7 @@
mod cfg;
mod derive;
+mod diagnostic;
mod lint;
mod macro_use;
mod repr;
@@ -40,23 +41,22 @@
extern_crate: Option<&ast::ExternCrate>,
) -> Option<()> {
let attribute = fake_attribute_under_caret;
- let name_ref = match attribute.path() {
- Some(p) => Some(p.as_single_name_ref()?),
- None => None,
- };
- let (path, tt) = name_ref.zip(attribute.token_tree())?;
- tt.l_paren_token()?;
+ let path = attribute.path()?;
+ let segments = path.segments().map(|s| s.name_ref()).collect::<Option<Vec<_>>>()?;
+ let segments = segments.iter().map(|n| n.text()).collect::<Vec<_>>();
+ let segments = segments.iter().map(|t| t.as_str()).collect::<Vec<_>>();
+ let tt = attribute.token_tree()?;
- match path.text().as_str() {
- "repr" => repr::complete_repr(acc, ctx, tt),
- "feature" => lint::complete_lint(
+ match segments.as_slice() {
+ ["repr"] => repr::complete_repr(acc, ctx, tt),
+ ["feature"] => lint::complete_lint(
acc,
ctx,
colon_prefix,
&parse_tt_as_comma_sep_paths(tt, ctx.edition)?,
FEATURES,
),
- "allow" | "expect" | "deny" | "forbid" | "warn" => {
+ ["allow"] | ["expect"] | ["deny"] | ["forbid"] | ["warn"] => {
let existing_lints = parse_tt_as_comma_sep_paths(tt, ctx.edition)?;
let lints: Vec<Lint> = CLIPPY_LINT_GROUPS
@@ -70,13 +70,14 @@
lint::complete_lint(acc, ctx, colon_prefix, &existing_lints, &lints);
}
- "cfg" => cfg::complete_cfg(acc, ctx),
- "macro_use" => macro_use::complete_macro_use(
+ ["cfg"] => cfg::complete_cfg(acc, ctx),
+ ["macro_use"] => macro_use::complete_macro_use(
acc,
ctx,
extern_crate,
&parse_tt_as_comma_sep_paths(tt, ctx.edition)?,
),
+ ["diagnostic", "on_unimplemented"] => diagnostic::complete_on_unimplemented(acc, ctx, tt),
_ => (),
}
Some(())
@@ -139,6 +140,8 @@
}
Qualified::TypeAnchor { .. } | Qualified::With { .. } => {}
}
+ let qualifier_path =
+ if let Qualified::With { path, .. } = qualified { Some(path) } else { None };
let attributes = annotated_item_kind.and_then(|kind| {
if ast::Expr::can_cast(kind) {
@@ -149,18 +152,33 @@
});
let add_completion = |attr_completion: &AttrCompletion| {
- let mut item = CompletionItem::new(
- SymbolKind::Attribute,
- ctx.source_range(),
- attr_completion.label,
- ctx.edition,
- );
+ // if we don't already have the qualifiers of the completion, then
+ // add the missing parts to the label and snippet
+ let mut label = attr_completion.label.to_owned();
+ let mut snippet = attr_completion.snippet.map(|s| s.to_owned());
+ let segments = qualifier_path.iter().flat_map(|q| q.segments()).collect::<Vec<_>>();
+ let qualifiers = attr_completion.qualifiers;
+ let matching_qualifiers = segments
+ .iter()
+ .zip(qualifiers)
+ .take_while(|(s, q)| s.name_ref().is_some_and(|t| t.text() == **q))
+ .count();
+ if matching_qualifiers != qualifiers.len() {
+ let prefix = qualifiers[matching_qualifiers..].join("::");
+ label = format!("{prefix}::{label}");
+ if let Some(s) = snippet.as_mut() {
+ *s = format!("{prefix}::{s}");
+ }
+ }
+
+ let mut item =
+ CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label, ctx.edition);
if let Some(lookup) = attr_completion.lookup {
item.lookup_by(lookup);
}
- if let Some((snippet, cap)) = attr_completion.snippet.zip(ctx.config.snippet_cap) {
+ if let Some((snippet, cap)) = snippet.zip(ctx.config.snippet_cap) {
item.insert_snippet(cap, snippet);
}
@@ -184,6 +202,7 @@
label: &'static str,
lookup: Option<&'static str>,
snippet: Option<&'static str>,
+ qualifiers: &'static [&'static str],
prefer_inner: bool,
}
@@ -192,6 +211,10 @@
self.lookup.unwrap_or(self.label)
}
+ const fn qualifiers(self, qualifiers: &'static [&'static str]) -> AttrCompletion {
+ AttrCompletion { qualifiers, ..self }
+ }
+
const fn prefer_inner(self) -> AttrCompletion {
AttrCompletion { prefer_inner: true, ..self }
}
@@ -202,7 +225,7 @@
lookup: Option<&'static str>,
snippet: Option<&'static str>,
) -> AttrCompletion {
- AttrCompletion { label, lookup, snippet, prefer_inner: false }
+ AttrCompletion { label, lookup, snippet, qualifiers: &[], prefer_inner: false }
}
macro_rules! attrs {
@@ -264,14 +287,14 @@
FN,
attrs!(
item, linkable,
- "cold", "ignore", "inline", "must_use", "panic_handler", "proc_macro",
+ "cold", "ignore", "inline", "panic_handler", "proc_macro",
"proc_macro_derive", "proc_macro_attribute", "should_panic", "target_feature",
"test", "track_caller"
),
),
(STATIC, attrs!(item, linkable, "global_allocator", "used")),
- (TRAIT, attrs!(item, "must_use")),
- (IMPL, attrs!(item, "automatically_derived")),
+ (TRAIT, attrs!(item, "diagnostic::on_unimplemented")),
+ (IMPL, attrs!(item, "automatically_derived", "diagnostic::do_not_recommend")),
(ASSOC_ITEM_LIST, attrs!(item)),
(EXTERN_BLOCK, attrs!(item, "link")),
(EXTERN_ITEM_LIST, attrs!(item, "link")),
@@ -311,6 +334,14 @@
attr("deny(…)", Some("deny"), Some("deny(${0:lint})")),
attr(r#"deprecated"#, Some("deprecated"), Some(r#"deprecated"#)),
attr("derive(…)", Some("derive"), Some(r#"derive(${0:Debug})"#)),
+ attr("do_not_recommend", Some("diagnostic::do_not_recommend"), None)
+ .qualifiers(&["diagnostic"]),
+ attr(
+ "on_unimplemented",
+ Some("diagnostic::on_unimplemented"),
+ Some(r#"on_unimplemented(${0:keys})"#),
+ )
+ .qualifiers(&["diagnostic"]),
attr(r#"doc = "…""#, Some("doc"), Some(r#"doc = "${0:docs}""#)),
attr(r#"doc(alias = "…")"#, Some("docalias"), Some(r#"doc(alias = "${0:docs}")"#)),
attr(r#"doc(hidden)"#, Some("dochidden"), Some(r#"doc(hidden)"#)),
diff --git a/crates/ide-completion/src/completions/attribute/diagnostic.rs b/crates/ide-completion/src/completions/attribute/diagnostic.rs
new file mode 100644
index 0000000..8adc974
--- /dev/null
+++ b/crates/ide-completion/src/completions/attribute/diagnostic.rs
@@ -0,0 +1,60 @@
+//! Completion for diagnostic attributes.
+
+use ide_db::SymbolKind;
+use syntax::ast;
+
+use crate::{CompletionItem, Completions, context::CompletionContext};
+
+use super::AttrCompletion;
+
+pub(super) fn complete_on_unimplemented(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ input: ast::TokenTree,
+) {
+ if let Some(existing_keys) = super::parse_comma_sep_expr(input) {
+ for attr in ATTRIBUTE_ARGS {
+ let already_annotated = existing_keys
+ .iter()
+ .filter_map(|expr| match expr {
+ ast::Expr::PathExpr(path) => path.path()?.as_single_name_ref(),
+ ast::Expr::BinExpr(bin)
+ if bin.op_kind() == Some(ast::BinaryOp::Assignment { op: None }) =>
+ {
+ match bin.lhs()? {
+ ast::Expr::PathExpr(path) => path.path()?.as_single_name_ref(),
+ _ => None,
+ }
+ }
+ _ => None,
+ })
+ .any(|it| {
+ let text = it.text();
+ attr.key() == text && text != "note"
+ });
+ if already_annotated {
+ continue;
+ }
+
+ let mut item = CompletionItem::new(
+ SymbolKind::BuiltinAttr,
+ ctx.source_range(),
+ attr.label,
+ ctx.edition,
+ );
+ if let Some(lookup) = attr.lookup {
+ item.lookup_by(lookup);
+ }
+ if let Some((snippet, cap)) = attr.snippet.zip(ctx.config.snippet_cap) {
+ item.insert_snippet(cap, snippet);
+ }
+ item.add_to(acc, ctx.db);
+ }
+ }
+}
+
+const ATTRIBUTE_ARGS: &[AttrCompletion] = &[
+ super::attr(r#"label = "…""#, Some("label"), Some(r#"label = "${0:label}""#)),
+ super::attr(r#"message = "…""#, Some("message"), Some(r#"message = "${0:message}""#)),
+ super::attr(r#"note = "…""#, Some("note"), Some(r#"note = "${0:note}""#)),
+];
diff --git a/crates/ide-completion/src/tests/attribute.rs b/crates/ide-completion/src/tests/attribute.rs
index 32d3b50..411902f 100644
--- a/crates/ide-completion/src/tests/attribute.rs
+++ b/crates/ide-completion/src/tests/attribute.rs
@@ -30,6 +30,8 @@
at deprecated
at derive macro derive
at derive(…)
+ at diagnostic::do_not_recommend
+ at diagnostic::on_unimplemented
at doc = "…"
at doc(alias = "…")
at doc(hidden)
@@ -472,13 +474,13 @@
at cfg_attr(…)
at deny(…)
at deprecated
+ at diagnostic::on_unimplemented
at doc = "…"
at doc(alias = "…")
at doc(hidden)
at expect(…)
at forbid(…)
at must_use
- at must_use
at no_mangle
at warn(…)
kw crate::
@@ -498,6 +500,7 @@
at cfg_attr(…)
at deny(…)
at deprecated
+ at diagnostic::do_not_recommend
at doc = "…"
at doc(alias = "…")
at doc(hidden)
@@ -533,6 +536,76 @@
}
#[test]
+fn attr_with_qualifier() {
+ check(
+ r#"#[diagnostic::$0] impl () {}"#,
+ expect![[r#"
+ at allow(…)
+ at automatically_derived
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at do_not_recommend
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at expect(…)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at warn(…)
+ "#]],
+ );
+ check(
+ r#"#[diagnostic::$0] trait Foo {}"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc(alias = "…")
+ at doc(hidden)
+ at expect(…)
+ at forbid(…)
+ at must_use
+ at no_mangle
+ at on_unimplemented
+ at warn(…)
+ "#]],
+ );
+}
+
+#[test]
+fn attr_diagnostic_on_unimplemented() {
+ check(
+ r#"#[diagnostic::on_unimplemented($0)] trait Foo {}"#,
+ expect![[r#"
+ ba label = "…"
+ ba message = "…"
+ ba note = "…"
+ "#]],
+ );
+ check(
+ r#"#[diagnostic::on_unimplemented(message = "foo", $0)] trait Foo {}"#,
+ expect![[r#"
+ ba label = "…"
+ ba note = "…"
+ "#]],
+ );
+ check(
+ r#"#[diagnostic::on_unimplemented(note = "foo", $0)] trait Foo {}"#,
+ expect![[r#"
+ ba label = "…"
+ ba message = "…"
+ ba note = "…"
+ "#]],
+ );
+}
+
+#[test]
fn attr_on_extern_block() {
check(
r#"#[$0] extern {}"#,
@@ -619,7 +692,6 @@
at link_name = "…"
at link_section = "…"
at must_use
- at must_use
at no_mangle
at panic_handler
at proc_macro
@@ -649,6 +721,8 @@
at deny(…)
at deprecated
at derive(…)
+ at diagnostic::do_not_recommend
+ at diagnostic::on_unimplemented
at doc = "…"
at doc(alias = "…")
at doc(hidden)
diff --git a/crates/ide-db/src/generated/lints.rs b/crates/ide-db/src/generated/lints.rs
index f9ff392..de8a429 100644
--- a/crates/ide-db/src/generated/lints.rs
+++ b/crates/ide-db/src/generated/lints.rs
@@ -4458,20 +4458,6 @@
deny_since: None,
},
Lint {
- label: "const_eq_ignore_ascii_case",
- description: r##"# `const_eq_ignore_ascii_case`
-
-The tracking issue for this feature is: [#131719]
-
-[#131719]: https://github.com/rust-lang/rust/issues/131719
-
-------------------------
-"##,
- default_severity: Severity::Allow,
- warn_since: None,
- deny_since: None,
- },
- Lint {
label: "const_eval_select",
description: r##"# `const_eval_select`
diff --git a/crates/ide-db/src/imports/insert_use.rs b/crates/ide-db/src/imports/insert_use.rs
index d26e5d6..813f383 100644
--- a/crates/ide-db/src/imports/insert_use.rs
+++ b/crates/ide-db/src/imports/insert_use.rs
@@ -60,107 +60,87 @@
}
#[derive(Debug, Clone)]
-pub enum ImportScope {
+pub struct ImportScope {
+ pub kind: ImportScopeKind,
+ pub required_cfgs: Vec<ast::Attr>,
+}
+
+#[derive(Debug, Clone)]
+pub enum ImportScopeKind {
File(ast::SourceFile),
Module(ast::ItemList),
Block(ast::StmtList),
}
impl ImportScope {
- // FIXME: Remove this?
- #[cfg(test)]
- fn from(syntax: SyntaxNode) -> Option<Self> {
- use syntax::match_ast;
- fn contains_cfg_attr(attrs: &dyn HasAttrs) -> bool {
- attrs.attrs().any(|attr| attr.as_simple_call().is_some_and(|(ident, _)| ident == "cfg"))
- }
- match_ast! {
- match syntax {
- ast::Module(module) => module.item_list().map(ImportScope::Module),
- ast::SourceFile(file) => Some(ImportScope::File(file)),
- ast::Fn(func) => contains_cfg_attr(&func).then(|| func.body().and_then(|it| it.stmt_list().map(ImportScope::Block))).flatten(),
- ast::Const(konst) => contains_cfg_attr(&konst).then(|| match konst.body()? {
- ast::Expr::BlockExpr(block) => Some(block),
- _ => None,
- }).flatten().and_then(|it| it.stmt_list().map(ImportScope::Block)),
- ast::Static(statik) => contains_cfg_attr(&statik).then(|| match statik.body()? {
- ast::Expr::BlockExpr(block) => Some(block),
- _ => None,
- }).flatten().and_then(|it| it.stmt_list().map(ImportScope::Block)),
- _ => None,
-
- }
- }
- }
-
/// Determines the containing syntax node in which to insert a `use` statement affecting `position`.
/// Returns the original source node inside attributes.
pub fn find_insert_use_container(
position: &SyntaxNode,
sema: &Semantics<'_, RootDatabase>,
) -> Option<Self> {
- fn contains_cfg_attr(attrs: &dyn HasAttrs) -> bool {
- attrs.attrs().any(|attr| attr.as_simple_call().is_some_and(|(ident, _)| ident == "cfg"))
- }
-
+ // The closest block expression ancestor
+ let mut block = None;
+ let mut required_cfgs = Vec::new();
// Walk up the ancestor tree searching for a suitable node to do insertions on
// with special handling on cfg-gated items, in which case we want to insert imports locally
// or FIXME: annotate inserted imports with the same cfg
for syntax in sema.ancestors_with_macros(position.clone()) {
if let Some(file) = ast::SourceFile::cast(syntax.clone()) {
- return Some(ImportScope::File(file));
- } else if let Some(item) = ast::Item::cast(syntax) {
- return match item {
- ast::Item::Const(konst) if contains_cfg_attr(&konst) => {
- // FIXME: Instead of bailing out with None, we should note down that
- // this import needs an attribute added
- match sema.original_ast_node(konst)?.body()? {
- ast::Expr::BlockExpr(block) => block,
- _ => return None,
+ return Some(ImportScope { kind: ImportScopeKind::File(file), required_cfgs });
+ } else if let Some(module) = ast::Module::cast(syntax.clone()) {
+ // early return is important here, if we can't find the original module
+ // in the input there is no way for us to insert an import anywhere.
+ return sema
+ .original_ast_node(module)?
+ .item_list()
+ .map(ImportScopeKind::Module)
+ .map(|kind| ImportScope { kind, required_cfgs });
+ } else if let Some(has_attrs) = ast::AnyHasAttrs::cast(syntax) {
+ if block.is_none() {
+ if let Some(b) = ast::BlockExpr::cast(has_attrs.syntax().clone()) {
+ if let Some(b) = sema.original_ast_node(b) {
+ block = b.stmt_list();
}
- .stmt_list()
- .map(ImportScope::Block)
}
- ast::Item::Fn(func) if contains_cfg_attr(&func) => {
- // FIXME: Instead of bailing out with None, we should note down that
- // this import needs an attribute added
- sema.original_ast_node(func)?.body()?.stmt_list().map(ImportScope::Block)
+ }
+ if has_attrs
+ .attrs()
+ .any(|attr| attr.as_simple_call().is_some_and(|(ident, _)| ident == "cfg"))
+ {
+ if let Some(b) = block {
+ return Some(ImportScope {
+ kind: ImportScopeKind::Block(b),
+ required_cfgs,
+ });
}
- ast::Item::Static(statik) if contains_cfg_attr(&statik) => {
- // FIXME: Instead of bailing out with None, we should note down that
- // this import needs an attribute added
- match sema.original_ast_node(statik)?.body()? {
- ast::Expr::BlockExpr(block) => block,
- _ => return None,
- }
- .stmt_list()
- .map(ImportScope::Block)
- }
- ast::Item::Module(module) => {
- // early return is important here, if we can't find the original module
- // in the input there is no way for us to insert an import anywhere.
- sema.original_ast_node(module)?.item_list().map(ImportScope::Module)
- }
- _ => continue,
- };
+ required_cfgs.extend(has_attrs.attrs().filter(|attr| {
+ attr.as_simple_call().is_some_and(|(ident, _)| ident == "cfg")
+ }));
+ }
}
}
None
}
pub fn as_syntax_node(&self) -> &SyntaxNode {
- match self {
- ImportScope::File(file) => file.syntax(),
- ImportScope::Module(item_list) => item_list.syntax(),
- ImportScope::Block(block) => block.syntax(),
+ match &self.kind {
+ ImportScopeKind::File(file) => file.syntax(),
+ ImportScopeKind::Module(item_list) => item_list.syntax(),
+ ImportScopeKind::Block(block) => block.syntax(),
}
}
pub fn clone_for_update(&self) -> Self {
- match self {
- ImportScope::File(file) => ImportScope::File(file.clone_for_update()),
- ImportScope::Module(item_list) => ImportScope::Module(item_list.clone_for_update()),
- ImportScope::Block(block) => ImportScope::Block(block.clone_for_update()),
+ Self {
+ kind: match &self.kind {
+ ImportScopeKind::File(file) => ImportScopeKind::File(file.clone_for_update()),
+ ImportScopeKind::Module(item_list) => {
+ ImportScopeKind::Module(item_list.clone_for_update())
+ }
+ ImportScopeKind::Block(block) => ImportScopeKind::Block(block.clone_for_update()),
+ },
+ required_cfgs: self.required_cfgs.iter().map(|attr| attr.clone_for_update()).collect(),
}
}
}
@@ -216,6 +196,11 @@
use_tree.wrap_in_tree_list();
}
let use_item = make::use_(None, use_tree).clone_for_update();
+ for attr in
+ scope.required_cfgs.iter().map(|attr| attr.syntax().clone_subtree().clone_for_update())
+ {
+ ted::insert(ted::Position::first_child_of(use_item.syntax()), attr);
+ }
// merge into existing imports if possible
if let Some(mb) = mb {
@@ -229,7 +214,6 @@
}
}
}
-
// either we weren't allowed to merge or there is no import that fits the merge conditions
// so look for the place we have to insert to
insert_use_(scope, use_item, cfg.group);
@@ -316,10 +300,10 @@
}
_ => None,
};
- let mut use_stmts = match scope {
- ImportScope::File(f) => f.items(),
- ImportScope::Module(m) => m.items(),
- ImportScope::Block(b) => b.items(),
+ let mut use_stmts = match &scope.kind {
+ ImportScopeKind::File(f) => f.items(),
+ ImportScopeKind::Module(m) => m.items(),
+ ImportScopeKind::Block(b) => b.items(),
}
.filter_map(use_stmt);
let mut res = ImportGranularityGuess::Unknown;
@@ -463,12 +447,12 @@
}
}
- let l_curly = match scope {
- ImportScope::File(_) => None,
+ let l_curly = match &scope.kind {
+ ImportScopeKind::File(_) => None,
// don't insert the imports before the item list/block expr's opening curly brace
- ImportScope::Module(item_list) => item_list.l_curly_token(),
+ ImportScopeKind::Module(item_list) => item_list.l_curly_token(),
// don't insert the imports before the item list's opening curly brace
- ImportScope::Block(block) => block.l_curly_token(),
+ ImportScopeKind::Block(block) => block.l_curly_token(),
};
// there are no imports in this file at all
// so put the import after all inner module attributes and possible license header comments
diff --git a/crates/ide-db/src/imports/insert_use/tests.rs b/crates/ide-db/src/imports/insert_use/tests.rs
index 428ba1d..4a00854 100644
--- a/crates/ide-db/src/imports/insert_use/tests.rs
+++ b/crates/ide-db/src/imports/insert_use/tests.rs
@@ -23,7 +23,7 @@
}
#[test]
-fn respects_cfg_attr_fn() {
+fn respects_cfg_attr_fn_body() {
check(
r"bar::Bar",
r#"
@@ -41,6 +41,25 @@
}
#[test]
+fn respects_cfg_attr_fn_sig() {
+ check(
+ r"bar::Bar",
+ r#"
+#[cfg(test)]
+fn foo($0) {}
+"#,
+ r#"
+#[cfg(test)]
+use bar::Bar;
+
+#[cfg(test)]
+fn foo() {}
+"#,
+ ImportGranularity::Crate,
+ );
+}
+
+#[test]
fn respects_cfg_attr_const() {
check(
r"bar::Bar",
@@ -59,6 +78,51 @@
}
#[test]
+fn respects_cfg_attr_impl() {
+ check(
+ r"bar::Bar",
+ r#"
+#[cfg(test)]
+impl () {$0}
+"#,
+ r#"
+#[cfg(test)]
+use bar::Bar;
+
+#[cfg(test)]
+impl () {}
+"#,
+ ImportGranularity::Crate,
+ );
+}
+
+#[test]
+fn respects_cfg_attr_multiple_layers() {
+ check(
+ r"bar::Bar",
+ r#"
+#[cfg(test)]
+impl () {
+ #[cfg(test2)]
+ fn f($0) {}
+}
+"#,
+ r#"
+#[cfg(test)]
+#[cfg(test2)]
+use bar::Bar;
+
+#[cfg(test)]
+impl () {
+ #[cfg(test2)]
+ fn f() {}
+}
+"#,
+ ImportGranularity::Crate,
+ );
+}
+
+#[test]
fn insert_skips_lone_glob_imports() {
check(
"use foo::baz::A",
@@ -813,7 +877,7 @@
}
#[test]
-fn merge_groups_skip_attributed() {
+fn merge_groups_cfg_vs_no_cfg() {
check_crate(
"std::io",
r#"
@@ -837,6 +901,25 @@
}
#[test]
+fn merge_groups_cfg_matching() {
+ check_crate(
+ "std::io",
+ r#"
+#[cfg(feature = "gated")] use std::fmt::{Result, Display};
+
+#[cfg(feature = "gated")]
+fn f($0) {}
+"#,
+ r#"
+#[cfg(feature = "gated")] use std::{fmt::{Display, Result}, io};
+
+#[cfg(feature = "gated")]
+fn f() {}
+"#,
+ );
+}
+
+#[test]
fn split_out_merge() {
// FIXME: This is suboptimal, we want to get `use std::fmt::{self, Result}`
// instead.
@@ -1259,12 +1342,14 @@
};
let sema = &Semantics::new(&db);
let source_file = sema.parse(file_id);
- let syntax = source_file.syntax().clone_for_update();
let file = pos
- .and_then(|pos| syntax.token_at_offset(pos.expect_offset()).next()?.parent())
+ .and_then(|pos| source_file.syntax().token_at_offset(pos.expect_offset()).next()?.parent())
.and_then(|it| ImportScope::find_insert_use_container(&it, sema))
- .or_else(|| ImportScope::from(syntax))
- .unwrap();
+ .unwrap_or_else(|| ImportScope {
+ kind: ImportScopeKind::File(source_file),
+ required_cfgs: vec![],
+ })
+ .clone_for_update();
let path = ast::SourceFile::parse(&format!("use {path};"), span::Edition::CURRENT)
.tree()
.syntax()
@@ -1349,7 +1434,7 @@
}
fn check_guess(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected: ImportGranularityGuess) {
- let syntax = ast::SourceFile::parse(ra_fixture, span::Edition::CURRENT).tree().syntax().clone();
- let file = ImportScope::from(syntax).unwrap();
+ let syntax = ast::SourceFile::parse(ra_fixture, span::Edition::CURRENT).tree();
+ let file = ImportScope { kind: ImportScopeKind::File(syntax), required_cfgs: vec![] };
assert_eq!(super::guess_granularity_from_scope(&file), expected);
}
diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs
index fa2a46a..4e737e2 100644
--- a/crates/ide-db/src/rename.rs
+++ b/crates/ide-db/src/rename.rs
@@ -20,7 +20,7 @@
//!
//! The correct behavior in such cases is probably to show a dialog to the user.
//! Our current behavior is ¯\_(ツ)_/¯.
-use std::fmt;
+use std::fmt::{self, Display};
use crate::{
source_change::ChangeAnnotation,
@@ -28,13 +28,12 @@
};
use base_db::AnchoredPathBuf;
use either::Either;
-use hir::{EditionedFileId, FieldSource, FileRange, InFile, ModuleSource, Semantics};
+use hir::{FieldSource, FileRange, InFile, ModuleSource, Name, Semantics, sym};
use span::{Edition, FileId, SyntaxContext};
use stdx::{TupleExt, never};
use syntax::{
AstNode, SyntaxKind, T, TextRange,
ast::{self, HasName},
- utils::is_raw_identifier,
};
use crate::{
@@ -70,26 +69,33 @@
}
pub use _bail as bail;
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum RenameDefinition {
+ Yes,
+ No,
+}
+
impl Definition {
pub fn rename(
&self,
sema: &Semantics<'_, RootDatabase>,
new_name: &str,
+ rename_definition: RenameDefinition,
) -> Result<SourceChange> {
- // We append `r#` if needed.
- let new_name = new_name.trim_start_matches("r#");
-
// self.krate() returns None if
// self is a built-in attr, built-in type or tool module.
// it is not allowed for these defs to be renamed.
// cases where self.krate() is None is handled below.
- if let Some(krate) = self.krate(sema.db) {
+ let edition = if let Some(krate) = self.krate(sema.db) {
// Can we not rename non-local items?
// Then bail if non-local
if !krate.origin(sema.db).is_local() {
bail!("Cannot rename a non-local definition")
}
- }
+ krate.edition(sema.db)
+ } else {
+ Edition::LATEST
+ };
match *self {
Definition::Module(module) => rename_mod(sema, module, new_name),
@@ -103,8 +109,10 @@
bail!("Cannot rename a builtin attr.")
}
Definition::SelfType(_) => bail!("Cannot rename `Self`"),
- Definition::Macro(mac) => rename_reference(sema, Definition::Macro(mac), new_name),
- def => rename_reference(sema, def, new_name),
+ Definition::Macro(mac) => {
+ rename_reference(sema, Definition::Macro(mac), new_name, rename_definition, edition)
+ }
+ def => rename_reference(sema, def, new_name, rename_definition, edition),
}
}
@@ -237,10 +245,6 @@
module: hir::Module,
new_name: &str,
) -> Result<SourceChange> {
- if IdentifierKind::classify(new_name)? != IdentifierKind::Ident {
- bail!("Invalid name `{0}`: cannot rename module to {0}", new_name);
- }
-
let mut source_change = SourceChange::default();
if module.is_crate_root() {
@@ -248,6 +252,14 @@
}
let InFile { file_id, value: def_source } = module.definition_source(sema.db);
+ let edition = file_id.edition(sema.db);
+ let (new_name, kind) = IdentifierKind::classify(edition, new_name)?;
+ if kind != IdentifierKind::Ident {
+ bail!(
+ "Invalid name `{0}`: cannot rename module to {0}",
+ new_name.display(sema.db, edition)
+ );
+ }
if let ModuleSource::SourceFile(..) = def_source {
let anchor = file_id.original_file(sema.db).file_id(sema.db);
@@ -256,7 +268,7 @@
// Module exists in a named file
if !is_mod_rs {
- let path = format!("{new_name}.rs");
+ let path = format!("{}.rs", new_name.as_str());
let dst = AnchoredPathBuf { anchor, path };
source_change.push_file_system_edit(FileSystemEdit::MoveFile { src: anchor, dst })
}
@@ -267,11 +279,11 @@
let dir_paths = match (is_mod_rs, has_detached_child, module.name(sema.db)) {
// Go up one level since the anchor is inside the dir we're trying to rename
(true, _, Some(mod_name)) => {
- Some((format!("../{}", mod_name.as_str()), format!("../{new_name}")))
+ Some((format!("../{}", mod_name.as_str()), format!("../{}", new_name.as_str())))
}
// The anchor is on the same level as target dir
(false, true, Some(mod_name)) => {
- Some((mod_name.as_str().to_owned(), new_name.to_owned()))
+ Some((mod_name.as_str().to_owned(), new_name.as_str().to_owned()))
}
_ => None,
};
@@ -296,11 +308,7 @@
.original_file_range_opt(sema.db)
.map(TupleExt::head)
{
- let new_name = if is_raw_identifier(new_name, file_id.edition(sema.db)) {
- format!("r#{new_name}")
- } else {
- new_name.to_owned()
- };
+ let new_name = new_name.display(sema.db, edition).to_string();
source_change.insert_source_edit(
file_id.file_id(sema.db),
TextEdit::replace(file_range.range, new_name),
@@ -314,9 +322,10 @@
let def = Definition::Module(module);
let usages = def.usages(sema).all();
let ref_edits = usages.iter().map(|(file_id, references)| {
+ let edition = file_id.edition(sema.db);
(
file_id.file_id(sema.db),
- source_edit_from_references(references, def, new_name, file_id.edition(sema.db)),
+ source_edit_from_references(sema.db, references, def, &new_name, edition),
)
});
source_change.extend(ref_edits);
@@ -328,8 +337,10 @@
sema: &Semantics<'_, RootDatabase>,
def: Definition,
new_name: &str,
+ rename_definition: RenameDefinition,
+ edition: Edition,
) -> Result<SourceChange> {
- let ident_kind = IdentifierKind::classify(new_name)?;
+ let (mut new_name, ident_kind) = IdentifierKind::classify(edition, new_name)?;
if matches!(
def,
@@ -337,18 +348,34 @@
) {
match ident_kind {
IdentifierKind::Underscore => {
- bail!("Invalid name `{}`: not a lifetime identifier", new_name);
+ bail!(
+ "Invalid name `{}`: not a lifetime identifier",
+ new_name.display(sema.db, edition)
+ );
}
- _ => cov_mark::hit!(rename_lifetime),
+ IdentifierKind::Ident => {
+ new_name = Name::new_lifetime(&format!("'{}", new_name.as_str()))
+ }
+ IdentifierKind::Lifetime => (),
+ IdentifierKind::LowercaseSelf => bail!(
+ "Invalid name `{}`: not a lifetime identifier",
+ new_name.display(sema.db, edition)
+ ),
}
} else {
match ident_kind {
IdentifierKind::Lifetime => {
cov_mark::hit!(rename_not_an_ident_ref);
- bail!("Invalid name `{}`: not an identifier", new_name);
+ bail!("Invalid name `{}`: not an identifier", new_name.display(sema.db, edition));
}
IdentifierKind::Ident => cov_mark::hit!(rename_non_local),
IdentifierKind::Underscore => (),
+ IdentifierKind::LowercaseSelf => {
+ bail!(
+ "Invalid name `{}`: cannot rename to `self`",
+ new_name.display(sema.db, edition)
+ );
+ }
}
}
@@ -361,30 +388,29 @@
}
let mut source_change = SourceChange::default();
source_change.extend(usages.iter().map(|(file_id, references)| {
+ let edition = file_id.edition(sema.db);
(
file_id.file_id(sema.db),
- source_edit_from_references(references, def, new_name, file_id.edition(sema.db)),
+ source_edit_from_references(sema.db, references, def, &new_name, edition),
)
}));
-
- // This needs to come after the references edits, because we change the annotation of existing edits
- // if a conflict is detected.
- let (file_id, edit) = source_edit_from_def(sema, def, new_name, &mut source_change)?;
- source_change.insert_source_edit(file_id, edit);
+ if rename_definition == RenameDefinition::Yes {
+ // This needs to come after the references edits, because we change the annotation of existing edits
+ // if a conflict is detected.
+ let (file_id, edit) = source_edit_from_def(sema, def, &new_name, &mut source_change)?;
+ source_change.insert_source_edit(file_id, edit);
+ }
Ok(source_change)
}
pub fn source_edit_from_references(
+ db: &RootDatabase,
references: &[FileReference],
def: Definition,
- new_name: &str,
+ new_name: &Name,
edition: Edition,
) -> TextEdit {
- let new_name = if is_raw_identifier(new_name, edition) {
- format!("r#{new_name}")
- } else {
- new_name.to_owned()
- };
+ let name_display = new_name.display(db, edition);
let mut edit = TextEdit::builder();
// macros can cause multiple refs to occur for the same text range, so keep track of what we have edited so far
let mut edited_ranges = Vec::new();
@@ -395,23 +421,15 @@
// to make special rewrites like shorthand syntax and such, so just rename the node in
// the macro input
FileReferenceNode::NameRef(name_ref) if name_range == range => {
- source_edit_from_name_ref(&mut edit, name_ref, &new_name, def)
+ source_edit_from_name_ref(&mut edit, name_ref, &name_display, def)
}
FileReferenceNode::Name(name) if name_range == range => {
- source_edit_from_name(&mut edit, name, &new_name)
+ source_edit_from_name(&mut edit, name, &name_display)
}
_ => false,
};
if !has_emitted_edit && !edited_ranges.contains(&range.start()) {
- let (range, new_name) = match name {
- FileReferenceNode::Lifetime(_) => (
- TextRange::new(range.start() + syntax::TextSize::from(1), range.end()),
- new_name.strip_prefix('\'').unwrap_or(&new_name).to_owned(),
- ),
- _ => (range, new_name.to_owned()),
- };
-
- edit.replace(range, new_name);
+ edit.replace(range, name_display.to_string());
edited_ranges.push(range.start());
}
}
@@ -419,7 +437,11 @@
edit.finish()
}
-fn source_edit_from_name(edit: &mut TextEditBuilder, name: &ast::Name, new_name: &str) -> bool {
+fn source_edit_from_name(
+ edit: &mut TextEditBuilder,
+ name: &ast::Name,
+ new_name: &dyn Display,
+) -> bool {
if ast::RecordPatField::for_field_name(name).is_some() {
if let Some(ident_pat) = name.syntax().parent().and_then(ast::IdentPat::cast) {
cov_mark::hit!(rename_record_pat_field_name_split);
@@ -439,7 +461,7 @@
fn source_edit_from_name_ref(
edit: &mut TextEditBuilder,
name_ref: &ast::NameRef,
- new_name: &str,
+ new_name: &dyn Display,
def: Definition,
) -> bool {
if name_ref.super_token().is_some() {
@@ -452,6 +474,7 @@
match &(rcf_name_ref, rcf_expr.and_then(|it| expr_as_name_ref(&it))) {
// field: init-expr, check if we can use a field init shorthand
(Some(field_name), Some(init)) => {
+ let new_name = new_name.to_string();
if field_name == name_ref {
if init.text() == new_name {
cov_mark::hit!(test_rename_field_put_init_shorthand);
@@ -507,6 +530,7 @@
{
// field name is being renamed
if let Some(name) = pat.name() {
+ let new_name = new_name.to_string();
if name.text() == new_name {
cov_mark::hit!(test_rename_field_put_init_shorthand_pat);
// Foo { field: ref mut local } -> Foo { ref mut field }
@@ -518,7 +542,7 @@
let s = field_name.syntax().text_range().start();
let e = pat.syntax().text_range().start();
edit.delete(TextRange::new(s, e));
- edit.replace(name.syntax().text_range(), new_name.to_owned());
+ edit.replace(name.syntax().text_range(), new_name);
return true;
}
}
@@ -532,16 +556,9 @@
fn source_edit_from_def(
sema: &Semantics<'_, RootDatabase>,
def: Definition,
- new_name: &str,
+ new_name: &Name,
source_change: &mut SourceChange,
) -> Result<(FileId, TextEdit)> {
- let new_name_edition_aware = |new_name: &str, file_id: EditionedFileId| {
- if is_raw_identifier(new_name, file_id.edition(sema.db)) {
- format!("r#{new_name}")
- } else {
- new_name.to_owned()
- }
- };
let mut edit = TextEdit::builder();
if let Definition::Local(local) = def {
let mut file_id = None;
@@ -573,7 +590,10 @@
{
Some(FileRange { file_id: file_id2, range }) => {
file_id = Some(file_id2);
- edit.replace(range, new_name_edition_aware(new_name, file_id2));
+ edit.replace(
+ range,
+ new_name.display(sema.db, file_id2.edition(sema.db)).to_string(),
+ );
continue;
}
None => {
@@ -587,7 +607,7 @@
// special cases required for renaming fields/locals in Record patterns
if let Some(pat_field) = pat.syntax().parent().and_then(ast::RecordPatField::cast) {
if let Some(name_ref) = pat_field.name_ref() {
- if new_name == name_ref.text().as_str().trim_start_matches("r#")
+ if new_name.as_str() == name_ref.text().as_str().trim_start_matches("r#")
&& pat.at_token().is_none()
{
// Foo { field: ref mut local } -> Foo { ref mut field }
@@ -607,7 +627,9 @@
// ^^^^^ replace this with `new_name`
edit.replace(
name_range,
- new_name_edition_aware(new_name, source.file_id),
+ new_name
+ .display(sema.db, source.file_id.edition(sema.db))
+ .to_string(),
);
}
} else {
@@ -618,10 +640,16 @@
pat.syntax().text_range().start(),
format!("{}: ", pat_field.field_name().unwrap()),
);
- edit.replace(name_range, new_name_edition_aware(new_name, source.file_id));
+ edit.replace(
+ name_range,
+ new_name.display(sema.db, source.file_id.edition(sema.db)).to_string(),
+ );
}
} else {
- edit.replace(name_range, new_name_edition_aware(new_name, source.file_id));
+ edit.replace(
+ name_range,
+ new_name.display(sema.db, source.file_id.edition(sema.db)).to_string(),
+ );
}
}
}
@@ -639,16 +667,13 @@
.range_for_rename(sema)
.ok_or_else(|| format_err!("No identifier available to rename"))?;
let (range, new_name) = match def {
- Definition::GenericParam(hir::GenericParam::LifetimeParam(_)) | Definition::Label(_) => (
- TextRange::new(range.start() + syntax::TextSize::from(1), range.end()),
- new_name.strip_prefix('\'').unwrap_or(new_name).to_owned(),
+ Definition::ExternCrateDecl(decl) if decl.alias(sema.db).is_none() => (
+ TextRange::empty(range.end()),
+ format!(" as {}", new_name.display(sema.db, file_id.edition(sema.db)),),
),
- Definition::ExternCrateDecl(decl) if decl.alias(sema.db).is_none() => {
- (TextRange::empty(range.end()), format!(" as {new_name}"))
- }
- _ => (range, new_name.to_owned()),
+ _ => (range, new_name.display(sema.db, file_id.edition(sema.db)).to_string()),
};
- edit.replace(range, new_name_edition_aware(&new_name, file_id));
+ edit.replace(range, new_name);
Ok((file_id.file_id(sema.db), edit.finish()))
}
@@ -657,26 +682,27 @@
Ident,
Lifetime,
Underscore,
+ LowercaseSelf,
}
impl IdentifierKind {
- pub fn classify(new_name: &str) -> Result<IdentifierKind> {
- let new_name = new_name.trim_start_matches("r#");
- match parser::LexedStr::single_token(Edition::LATEST, new_name) {
+ pub fn classify(edition: Edition, new_name: &str) -> Result<(Name, IdentifierKind)> {
+ match parser::LexedStr::single_token(edition, new_name) {
Some(res) => match res {
- (SyntaxKind::IDENT, _) => {
- if let Some(inner) = new_name.strip_prefix("r#") {
- if matches!(inner, "self" | "crate" | "super" | "Self") {
- bail!("Invalid name: `{}` cannot be a raw identifier", inner);
- }
- }
- Ok(IdentifierKind::Ident)
+ (SyntaxKind::IDENT, _) => Ok((Name::new_root(new_name), IdentifierKind::Ident)),
+ (T![_], _) => {
+ Ok((Name::new_symbol_root(sym::underscore), IdentifierKind::Underscore))
}
- (T![_], _) => Ok(IdentifierKind::Underscore),
(SyntaxKind::LIFETIME_IDENT, _) if new_name != "'static" && new_name != "'_" => {
- Ok(IdentifierKind::Lifetime)
+ Ok((Name::new_lifetime(new_name), IdentifierKind::Lifetime))
}
- _ if is_raw_identifier(new_name, Edition::LATEST) => Ok(IdentifierKind::Ident),
+ _ if SyntaxKind::from_keyword(new_name, edition).is_some() => match new_name {
+ "self" => Ok((Name::new_root(new_name), IdentifierKind::LowercaseSelf)),
+ "crate" | "super" | "Self" => {
+ bail!("Invalid name `{}`: cannot rename to a keyword", new_name)
+ }
+ _ => Ok((Name::new_root(new_name), IdentifierKind::Ident)),
+ },
(_, Some(syntax_error)) => bail!("Invalid name `{}`: {}", new_name, syntax_error),
(_, None) => bail!("Invalid name `{}`: not an identifier", new_name),
},
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index d4ab759..c5ad64e 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -961,12 +961,16 @@
// Search for occurrences of the items name
for offset in Self::match_indices(&text, finder, search_range) {
let ret = tree.token_at_offset(offset).any(|token| {
- let Some(str_token) = ast::String::cast(token.clone()) else { return false };
- if let Some((range, Some(nameres))) =
- sema.check_for_format_args_template(token, offset)
+ if let Some((range, _frange, string_token, Some(nameres))) =
+ sema.check_for_format_args_template(token.clone(), offset)
{
- return self
- .found_format_args_ref(file_id, range, str_token, nameres, sink);
+ return self.found_format_args_ref(
+ file_id,
+ range,
+ string_token,
+ nameres,
+ sink,
+ );
}
false
});
diff --git a/crates/ide-db/src/source_change.rs b/crates/ide-db/src/source_change.rs
index b1b58d6..16c0d8d 100644
--- a/crates/ide-db/src/source_change.rs
+++ b/crates/ide-db/src/source_change.rs
@@ -5,6 +5,7 @@
use std::{collections::hash_map::Entry, fmt, iter, mem};
+use crate::imports::insert_use::{ImportScope, ImportScopeKind};
use crate::text_edit::{TextEdit, TextEditBuilder};
use crate::{SnippetCap, assists::Command, syntax_helpers::tree_diff::diff};
use base_db::AnchoredPathBuf;
@@ -367,6 +368,17 @@
pub fn make_mut<N: AstNode>(&mut self, node: N) -> N {
self.mutated_tree.get_or_insert_with(|| TreeMutator::new(node.syntax())).make_mut(&node)
}
+
+ pub fn make_import_scope_mut(&mut self, scope: ImportScope) -> ImportScope {
+ ImportScope {
+ kind: match scope.kind.clone() {
+ ImportScopeKind::File(it) => ImportScopeKind::File(self.make_mut(it)),
+ ImportScopeKind::Module(it) => ImportScopeKind::Module(self.make_mut(it)),
+ ImportScopeKind::Block(it) => ImportScopeKind::Block(self.make_mut(it)),
+ },
+ required_cfgs: scope.required_cfgs.iter().map(|it| self.make_mut(it.clone())).collect(),
+ }
+ }
/// Returns a copy of the `node`, suitable for mutation.
///
/// Syntax trees in rust-analyzer are typically immutable, and mutating
diff --git a/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/crates/ide-diagnostics/src/handlers/incorrect_case.rs
index 38f10c7..519ff19 100644
--- a/crates/ide-diagnostics/src/handlers/incorrect_case.rs
+++ b/crates/ide-diagnostics/src/handlers/incorrect_case.rs
@@ -1,5 +1,5 @@
use hir::{CaseType, InFile, db::ExpandDatabase};
-use ide_db::{assists::Assist, defs::NameClass};
+use ide_db::{assists::Assist, defs::NameClass, rename::RenameDefinition};
use syntax::AstNode;
use crate::{
@@ -44,7 +44,7 @@
let label = format!("Rename to {}", d.suggested_text);
let mut res = unresolved_fix("change_case", &label, frange.range);
if ctx.resolve.should_resolve(&res.id) {
- let source_change = def.rename(&ctx.sema, &d.suggested_text);
+ let source_change = def.rename(&ctx.sema, &d.suggested_text, RenameDefinition::Yes);
res.source_change = Some(source_change.ok().unwrap_or_default());
}
diff --git a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
index ac1b599..bf7ddda 100644
--- a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
+++ b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
@@ -135,13 +135,10 @@
"JSON syntax is not valid as a Rust item",
FileRange { file_id: vfs_file_id, range },
)
+ .stable()
.with_fixes(Some(vec![{
let mut scb = SourceChangeBuilder::new(vfs_file_id);
- let scope = match import_scope {
- ImportScope::File(it) => ImportScope::File(scb.make_mut(it)),
- ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)),
- ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)),
- };
+ let scope = scb.make_import_scope_mut(import_scope);
let current_module = semantics_scope.module();
let cfg = ImportPathConfig {
diff --git a/crates/ide-diagnostics/src/handlers/no_such_field.rs b/crates/ide-diagnostics/src/handlers/no_such_field.rs
index 84fb467..ef42f2d 100644
--- a/crates/ide-diagnostics/src/handlers/no_such_field.rs
+++ b/crates/ide-diagnostics/src/handlers/no_such_field.rs
@@ -1,4 +1,5 @@
use either::Either;
+use hir::{Field, HasCrate};
use hir::{HasSource, HirDisplay, Semantics, VariantId, db::ExpandDatabase};
use ide_db::text_edit::TextEdit;
use ide_db::{EditionedFileId, RootDatabase, source_change::SourceChange};
@@ -13,44 +14,69 @@
//
// This diagnostic is triggered if created structure does not have field provided in record.
pub(crate) fn no_such_field(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Diagnostic {
- let node = d.field.map(Into::into);
- if d.private {
- // FIXME: quickfix to add required visibility
- Diagnostic::new_with_syntax_node_ptr(
- ctx,
- DiagnosticCode::RustcHardError("E0451"),
- "field is private",
- node,
- )
- .stable()
+ let (code, message) = if d.private.is_some() {
+ ("E0451", "field is private")
+ } else if let VariantId::EnumVariantId(_) = d.variant {
+ ("E0559", "no such field")
} else {
- Diagnostic::new_with_syntax_node_ptr(
- ctx,
- match d.variant {
- VariantId::EnumVariantId(_) => DiagnosticCode::RustcHardError("E0559"),
- _ => DiagnosticCode::RustcHardError("E0560"),
- },
- "no such field",
- node,
- )
+ ("E0560", "no such field")
+ };
+
+ let node = d.field.map(Into::into);
+ Diagnostic::new_with_syntax_node_ptr(ctx, DiagnosticCode::RustcHardError(code), message, node)
.stable()
.with_fixes(fixes(ctx, d))
- }
}
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Option<Vec<Assist>> {
// FIXME: quickfix for pattern
let root = ctx.sema.db.parse_or_expand(d.field.file_id);
match &d.field.value.to_node(&root) {
- Either::Left(node) => missing_record_expr_field_fixes(
- &ctx.sema,
- d.field.file_id.original_file(ctx.sema.db),
- node,
- ),
+ Either::Left(node) => {
+ if let Some(private_field) = d.private {
+ field_is_private_fixes(
+ &ctx.sema,
+ d.field.file_id.original_file(ctx.sema.db),
+ node,
+ private_field,
+ )
+ } else {
+ missing_record_expr_field_fixes(
+ &ctx.sema,
+ d.field.file_id.original_file(ctx.sema.db),
+ node,
+ )
+ }
+ }
_ => None,
}
}
+fn field_is_private_fixes(
+ sema: &Semantics<'_, RootDatabase>,
+ usage_file_id: EditionedFileId,
+ record_expr_field: &ast::RecordExprField,
+ private_field: Field,
+) -> Option<Vec<Assist>> {
+ let def_crate = private_field.krate(sema.db);
+ let usage_crate = sema.file_to_module_def(usage_file_id.file_id(sema.db))?.krate();
+ let visibility = if usage_crate == def_crate { "pub(crate) " } else { "pub " };
+
+ let source = private_field.source(sema.db)?;
+ let (range, _) = source.syntax().original_file_range_opt(sema.db)?;
+ let source_change = SourceChange::from_text_edit(
+ range.file_id.file_id(sema.db),
+ TextEdit::insert(range.range.start(), visibility.into()),
+ );
+
+ Some(vec![fix(
+ "increase_field_visibility",
+ "Increase field visibility",
+ source_change,
+ sema.original_range(record_expr_field.syntax()).range,
+ )])
+}
+
fn missing_record_expr_field_fixes(
sema: &Semantics<'_, RootDatabase>,
usage_file_id: EditionedFileId,
@@ -118,7 +144,7 @@
"create_field",
"Create field",
source_change,
- record_expr_field.syntax().text_range(),
+ sema.original_range(record_expr_field.syntax()).range,
)]);
fn record_field_list(field_def_list: ast::FieldList) -> Option<ast::RecordFieldList> {
@@ -387,15 +413,15 @@
// assignee expression
m::Struct {
field: 0,
- //^^^^^^^^ error: field is private
+ //^^^^^^^^ 💡 error: field is private
field2
- //^^^^^^ error: field is private
+ //^^^^^^ 💡 error: field is private
} = s;
m::Struct {
field: 0,
- //^^^^^^^^ error: field is private
+ //^^^^^^^^ 💡 error: field is private
field2
- //^^^^^^ error: field is private
+ //^^^^^^ 💡 error: field is private
};
}
"#,
@@ -403,6 +429,77 @@
}
#[test]
+ fn test_struct_field_private_same_crate_fix() {
+ check_diagnostics(
+ r#"
+mod m {
+ pub struct Struct {
+ field: u32,
+ }
+}
+fn f() {
+ let _ = m::Struct {
+ field: 0,
+ //^^^^^^^^ 💡 error: field is private
+ };
+}
+"#,
+ );
+
+ check_fix(
+ r#"
+mod m {
+ pub struct Struct {
+ field: u32,
+ }
+}
+fn f() {
+ let _ = m::Struct {
+ field$0: 0,
+ };
+}
+"#,
+ r#"
+mod m {
+ pub struct Struct {
+ pub(crate) field: u32,
+ }
+}
+fn f() {
+ let _ = m::Struct {
+ field: 0,
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_struct_field_private_other_crate_fix() {
+ check_fix(
+ r#"
+//- /lib.rs crate:another_crate
+pub struct Struct {
+ field: u32,
+}
+//- /lib.rs crate:this_crate deps:another_crate
+use another_crate;
+
+fn f() {
+ let _ = another_crate::Struct {
+ field$0: 0,
+ };
+}
+"#,
+ r#"
+pub struct Struct {
+ pub field: u32,
+}
+"#,
+ );
+ }
+
+ #[test]
fn editions_between_macros() {
check_diagnostics(
r#"
diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs
index af9126c..d96c658 100644
--- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs
+++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs
@@ -69,6 +69,7 @@
FileRange { file_id, range },
)
.with_unused(unused)
+ .stable()
.with_fixes(fixes),
);
}
diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs
index 7c39633..f31886b 100644
--- a/crates/ide/src/expand_macro.rs
+++ b/crates/ide/src/expand_macro.rs
@@ -800,4 +800,65 @@
foo();"#]],
);
}
+
+ #[test]
+ fn works_in_sig() {
+ check(
+ r#"
+macro_rules! foo {
+ () => { u32 };
+}
+fn foo() -> foo$0!() {
+ 42
+}
+"#,
+ expect![[r#"
+ foo!
+ u32"#]],
+ );
+ check(
+ r#"
+macro_rules! foo {
+ () => { u32 };
+}
+fn foo(_: foo$0!() ) {}
+"#,
+ expect![[r#"
+ foo!
+ u32"#]],
+ );
+ }
+
+ #[test]
+ fn works_in_generics() {
+ check(
+ r#"
+trait Trait {}
+macro_rules! foo {
+ () => { Trait };
+}
+impl<const C: foo$0!()> Trait for () {}
+"#,
+ expect![[r#"
+ foo!
+ Trait"#]],
+ );
+ }
+
+ #[test]
+ fn works_in_fields() {
+ check(
+ r#"
+macro_rules! foo {
+ () => { u32 };
+}
+struct S {
+ field: foo$0!(),
+}
+"#,
+ expect![[r#"
+ foo!
+ u32"#]],
+ );
+ }
}
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs
index c60ca35..7917aab 100644
--- a/crates/ide/src/goto_definition.rs
+++ b/crates/ide/src/goto_definition.rs
@@ -67,7 +67,7 @@
});
}
- if let Some((range, resolution)) =
+ if let Some((range, _, _, resolution)) =
sema.check_for_format_args_template(original_token.clone(), offset)
{
return Some(RangeInfo::new(
diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs
index a78f5cd..a6c7ea2 100644
--- a/crates/ide/src/goto_type_definition.rs
+++ b/crates/ide/src/goto_type_definition.rs
@@ -53,7 +53,9 @@
}
});
};
- if let Some((range, resolution)) = sema.check_for_format_args_template(token.clone(), offset) {
+ if let Some((range, _, _, resolution)) =
+ sema.check_for_format_args_template(token.clone(), offset)
+ {
if let Some(ty) = resolution.and_then(|res| match Definition::from(res) {
Definition::Const(it) => Some(it.ty(db)),
Definition::Static(it) => Some(it.ty(db)),
diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs
index 520ba39..aa94792 100644
--- a/crates/ide/src/highlight_related.rs
+++ b/crates/ide/src/highlight_related.rs
@@ -11,7 +11,6 @@
preorder_expr_with_ctx_checker,
},
};
-use span::FileId;
use syntax::{
AstNode,
SyntaxKind::{self, IDENT, INT_NUMBER},
@@ -61,13 +60,12 @@
let file_id = sema
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id));
- let span_file_id = file_id.editioned_file_id(sema.db);
let syntax = sema.parse(file_id).syntax().clone();
let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
T![?] => 4, // prefer `?` when the cursor is sandwiched like in `await$0?`
T![->] => 4,
- kind if kind.is_keyword(span_file_id.edition()) => 3,
+ kind if kind.is_keyword(file_id.edition(sema.db)) => 3,
IDENT | INT_NUMBER => 2,
T![|] => 1,
_ => 0,
@@ -92,18 +90,11 @@
T![unsafe] if token.parent().and_then(ast::BlockExpr::cast).is_some() => {
highlight_unsafe_points(sema, token).remove(&file_id)
}
- T![|] if config.closure_captures => {
- highlight_closure_captures(sema, token, file_id, span_file_id.file_id())
+ T![|] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
+ T![move] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
+ _ if config.references => {
+ highlight_references(sema, token, FilePosition { file_id, offset })
}
- T![move] if config.closure_captures => {
- highlight_closure_captures(sema, token, file_id, span_file_id.file_id())
- }
- _ if config.references => highlight_references(
- sema,
- token,
- FilePosition { file_id, offset },
- span_file_id.file_id(),
- ),
_ => None,
}
}
@@ -112,7 +103,6 @@
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
file_id: EditionedFileId,
- vfs_file_id: FileId,
) -> Option<Vec<HighlightedRange>> {
let closure = token.parent_ancestors().take(2).find_map(ast::ClosureExpr::cast)?;
let search_range = closure.body()?.syntax().text_range();
@@ -145,7 +135,7 @@
.sources(sema.db)
.into_iter()
.flat_map(|x| x.to_nav(sema.db))
- .filter(|decl| decl.file_id == vfs_file_id)
+ .filter(|decl| decl.file_id == file_id.file_id(sema.db))
.filter_map(|decl| decl.focus_range)
.map(move |range| HighlightedRange { range, category })
.chain(usages)
@@ -158,9 +148,8 @@
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
FilePosition { file_id, offset }: FilePosition,
- vfs_file_id: FileId,
) -> Option<Vec<HighlightedRange>> {
- let defs = if let Some((range, resolution)) =
+ let defs = if let Some((range, _, _, resolution)) =
sema.check_for_format_args_template(token.clone(), offset)
{
match resolution.map(Definition::from) {
@@ -270,7 +259,7 @@
.sources(sema.db)
.into_iter()
.flat_map(|x| x.to_nav(sema.db))
- .filter(|decl| decl.file_id == vfs_file_id)
+ .filter(|decl| decl.file_id == file_id.file_id(sema.db))
.filter_map(|decl| decl.focus_range)
.map(|range| HighlightedRange { range, category })
.for_each(|x| {
@@ -288,7 +277,7 @@
},
};
for nav in navs {
- if nav.file_id != vfs_file_id {
+ if nav.file_id != file_id.file_id(sema.db) {
continue;
}
let hl_range = nav.focus_range.map(|range| {
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs
index 8bb1c70..5404a9d 100644
--- a/crates/ide/src/hover.rs
+++ b/crates/ide/src/hover.rs
@@ -200,7 +200,7 @@
});
}
- if let Some((range, resolution)) =
+ if let Some((range, _, _, resolution)) =
sema.check_for_format_args_template(original_token.clone(), offset)
{
let res = hover_for_definition(
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs
index 82704af..b094b09 100644
--- a/crates/ide/src/inlay_hints.rs
+++ b/crates/ide/src/inlay_hints.rs
@@ -6,7 +6,7 @@
use either::Either;
use hir::{
ClosureStyle, DisplayTarget, EditionedFileId, HasVisibility, HirDisplay, HirDisplayError,
- HirWrite, ModuleDef, ModuleDefId, Semantics, sym,
+ HirWrite, InRealFile, ModuleDef, ModuleDefId, Semantics, sym,
};
use ide_db::{FileRange, RootDatabase, famous_defs::FamousDefs, text_edit::TextEditBuilder};
use ide_db::{FxHashSet, text_edit::TextEdit};
@@ -34,6 +34,7 @@
mod generic_param;
mod implicit_drop;
mod implicit_static;
+mod implied_dyn_trait;
mod lifetime;
mod param_name;
mod range_exclusive;
@@ -95,16 +96,16 @@
return acc;
};
let famous_defs = FamousDefs(&sema, scope.krate());
+ let display_target = famous_defs.1.to_display_target(sema.db);
let ctx = &mut InlayHintCtx::default();
let mut hints = |event| {
if let Some(node) = handle_event(ctx, event) {
- hints(&mut acc, ctx, &famous_defs, config, file_id, node);
+ hints(&mut acc, ctx, &famous_defs, config, file_id, display_target, node);
}
};
let mut preorder = file.preorder();
while let Some(event) = preorder.next() {
- // FIXME: This can miss some hints that require the parent of the range to calculate
if matches!((&event, range_limit), (WalkEvent::Enter(node), Some(range)) if range.intersect(node.text_range()).is_none())
{
preorder.skip_subtree();
@@ -144,10 +145,12 @@
let famous_defs = FamousDefs(&sema, scope.krate());
let mut acc = Vec::new();
+ let display_target = famous_defs.1.to_display_target(sema.db);
+
let ctx = &mut InlayHintCtx::default();
let mut hints = |event| {
if let Some(node) = handle_event(ctx, event) {
- hints(&mut acc, ctx, &famous_defs, config, file_id, node);
+ hints(&mut acc, ctx, &famous_defs, config, file_id, display_target, node);
}
};
@@ -202,17 +205,19 @@
fn hints(
hints: &mut Vec<InlayHint>,
ctx: &mut InlayHintCtx,
- famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>,
+ famous_defs @ FamousDefs(sema, _krate): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
file_id: EditionedFileId,
+ display_target: DisplayTarget,
node: SyntaxNode,
) {
- let file_id = file_id.editioned_file_id(sema.db);
- let Some(krate) = sema.first_crate(file_id.file_id()) else {
- return;
- };
- let display_target = krate.to_display_target(sema.db);
- closing_brace::hints(hints, sema, config, file_id, display_target, node.clone());
+ closing_brace::hints(
+ hints,
+ sema,
+ config,
+ display_target,
+ InRealFile { file_id, value: node.clone() },
+ );
if let Some(any_has_generic_args) = ast::AnyHasGenericArgs::cast(node.clone()) {
generic_param::hints(hints, famous_defs, config, any_has_generic_args);
}
@@ -231,18 +236,18 @@
closure_captures::hints(hints, famous_defs, config, it.clone());
closure_ret::hints(hints, famous_defs, config, display_target, it)
},
- ast::Expr::RangeExpr(it) => range_exclusive::hints(hints, famous_defs, config, file_id, it),
+ ast::Expr::RangeExpr(it) => range_exclusive::hints(hints, famous_defs, config, it),
_ => Some(()),
}
},
ast::Pat(it) => {
- binding_mode::hints(hints, famous_defs, config, file_id, &it);
+ binding_mode::hints(hints, famous_defs, config, &it);
match it {
ast::Pat::IdentPat(it) => {
bind_pat::hints(hints, famous_defs, config, display_target, &it);
}
ast::Pat::RangePat(it) => {
- range_exclusive::hints(hints, famous_defs, config, file_id, it);
+ range_exclusive::hints(hints, famous_defs, config, it);
}
_ => {}
}
@@ -250,30 +255,38 @@
},
ast::Item(it) => match it {
ast::Item::Fn(it) => {
- implicit_drop::hints(hints, famous_defs, config, file_id, &it);
+ implicit_drop::hints(hints, famous_defs, config, display_target, &it);
if let Some(extern_block) = &ctx.extern_block_parent {
- extern_block::fn_hints(hints, famous_defs, config, file_id, &it, extern_block);
+ extern_block::fn_hints(hints, famous_defs, config, &it, extern_block);
}
- lifetime::fn_hints(hints, ctx, famous_defs, config, file_id, it)
+ lifetime::fn_hints(hints, ctx, famous_defs, config, it)
},
ast::Item::Static(it) => {
if let Some(extern_block) = &ctx.extern_block_parent {
- extern_block::static_hints(hints, famous_defs, config, file_id, &it, extern_block);
+ extern_block::static_hints(hints, famous_defs, config, &it, extern_block);
}
- implicit_static::hints(hints, famous_defs, config, file_id, Either::Left(it))
+ implicit_static::hints(hints, famous_defs, config, Either::Left(it))
},
- ast::Item::Const(it) => implicit_static::hints(hints, famous_defs, config, file_id, Either::Right(it)),
- ast::Item::Enum(it) => discriminant::enum_hints(hints, famous_defs, config, file_id, it),
- ast::Item::ExternBlock(it) => extern_block::extern_block_hints(hints, famous_defs, config, file_id, it),
+ ast::Item::Const(it) => implicit_static::hints(hints, famous_defs, config, Either::Right(it)),
+ ast::Item::Enum(it) => discriminant::enum_hints(hints, famous_defs, config, it),
+ ast::Item::ExternBlock(it) => extern_block::extern_block_hints(hints, famous_defs, config, it),
_ => None,
},
// FIXME: trait object type elisions
ast::Type(ty) => match ty {
- ast::Type::FnPtrType(ptr) => lifetime::fn_ptr_hints(hints, ctx, famous_defs, config, file_id, ptr),
- ast::Type::PathType(path) => lifetime::fn_path_hints(hints, ctx, famous_defs, config, file_id, path),
+ ast::Type::FnPtrType(ptr) => lifetime::fn_ptr_hints(hints, ctx, famous_defs, config, ptr),
+ ast::Type::PathType(path) => {
+ lifetime::fn_path_hints(hints, ctx, famous_defs, config, &path);
+ implied_dyn_trait::hints(hints, famous_defs, config, Either::Left(path));
+ Some(())
+ },
+ ast::Type::DynTraitType(dyn_) => {
+ implied_dyn_trait::hints(hints, famous_defs, config, Either::Right(dyn_));
+ Some(())
+ },
_ => Some(()),
},
- ast::GenericParamList(it) => bounds::hints(hints, famous_defs, config, file_id, it),
+ ast::GenericParamList(it) => bounds::hints(hints, famous_defs, config, it),
_ => Some(()),
}
};
@@ -438,6 +451,7 @@
Parameter,
GenericParameter,
Type,
+ Dyn,
Drop,
RangeExclusive,
ExternUnsafety,
diff --git a/crates/ide/src/inlay_hints/binding_mode.rs b/crates/ide/src/inlay_hints/binding_mode.rs
index d291732..169ab92 100644
--- a/crates/ide/src/inlay_hints/binding_mode.rs
+++ b/crates/ide/src/inlay_hints/binding_mode.rs
@@ -8,7 +8,6 @@
use ide_db::famous_defs::FamousDefs;
use ide_db::text_edit::TextEditBuilder;
-use span::EditionedFileId;
use syntax::ast::{self, AstNode};
use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind};
@@ -17,7 +16,6 @@
acc: &mut Vec<InlayHint>,
FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
- _file_id: EditionedFileId,
pat: &ast::Pat,
) -> Option<()> {
if !config.binding_mode_hints {
diff --git a/crates/ide/src/inlay_hints/bounds.rs b/crates/ide/src/inlay_hints/bounds.rs
index 8ddbfae..b9a98f8 100644
--- a/crates/ide/src/inlay_hints/bounds.rs
+++ b/crates/ide/src/inlay_hints/bounds.rs
@@ -3,7 +3,6 @@
//! Currently this renders the implied `Sized` bound.
use ide_db::{FileRange, famous_defs::FamousDefs};
-use span::EditionedFileId;
use syntax::ast::{self, AstNode, HasTypeBounds};
use crate::{
@@ -15,7 +14,6 @@
acc: &mut Vec<InlayHint>,
famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
- _file_id: EditionedFileId,
params: ast::GenericParamList,
) -> Option<()> {
if !config.sized_bound {
diff --git a/crates/ide/src/inlay_hints/closing_brace.rs b/crates/ide/src/inlay_hints/closing_brace.rs
index 2ec85da..ca3a982 100644
--- a/crates/ide/src/inlay_hints/closing_brace.rs
+++ b/crates/ide/src/inlay_hints/closing_brace.rs
@@ -3,9 +3,8 @@
//! fn g() {
//! } /* fn g */
//! ```
-use hir::{DisplayTarget, HirDisplay, Semantics};
+use hir::{DisplayTarget, HirDisplay, InRealFile, Semantics};
use ide_db::{FileRange, RootDatabase};
-use span::EditionedFileId;
use syntax::{
SyntaxKind, SyntaxNode, T,
ast::{self, AstNode, HasLoopBody, HasName},
@@ -21,15 +20,14 @@
acc: &mut Vec<InlayHint>,
sema: &Semantics<'_, RootDatabase>,
config: &InlayHintsConfig,
- file_id: EditionedFileId,
display_target: DisplayTarget,
- original_node: SyntaxNode,
+ InRealFile { file_id, value: node }: InRealFile<SyntaxNode>,
) -> Option<()> {
let min_lines = config.closing_brace_hints_min_lines?;
let name = |it: ast::Name| it.syntax().text_range();
- let mut node = original_node.clone();
+ let mut node = node.clone();
let mut closing_token;
let (label, name_range) = if let Some(item_list) = ast::AssocItemList::cast(node.clone()) {
closing_token = item_list.r_curly_token()?;
@@ -44,7 +42,7 @@
let hint_text = match trait_ {
Some(tr) => format!(
"impl {} for {}",
- tr.name(sema.db).display(sema.db, file_id.edition()),
+ tr.name(sema.db).display(sema.db, display_target.edition),
ty.display_truncated(sema.db, config.max_length, display_target,
)),
None => format!("impl {}", ty.display_truncated(sema.db, config.max_length, display_target)),
@@ -142,7 +140,8 @@
return None;
}
- let linked_location = name_range.map(|range| FileRange { file_id: file_id.into(), range });
+ let linked_location =
+ name_range.map(|range| FileRange { file_id: file_id.file_id(sema.db), range });
acc.push(InlayHint {
range: closing_token.text_range(),
kind: InlayKind::ClosingBrace,
@@ -151,7 +150,7 @@
position: InlayHintPosition::After,
pad_left: true,
pad_right: false,
- resolve_parent: Some(original_node.text_range()),
+ resolve_parent: Some(node.text_range()),
});
None
diff --git a/crates/ide/src/inlay_hints/discriminant.rs b/crates/ide/src/inlay_hints/discriminant.rs
index 827a043..a2a7028 100644
--- a/crates/ide/src/inlay_hints/discriminant.rs
+++ b/crates/ide/src/inlay_hints/discriminant.rs
@@ -7,7 +7,6 @@
use hir::Semantics;
use ide_db::text_edit::TextEdit;
use ide_db::{RootDatabase, famous_defs::FamousDefs};
-use span::EditionedFileId;
use syntax::ast::{self, AstNode, HasName};
use crate::{
@@ -19,7 +18,6 @@
acc: &mut Vec<InlayHint>,
FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
- _: EditionedFileId,
enum_: ast::Enum,
) -> Option<()> {
if let DiscriminantHints::Never = config.discriminant_hints {
diff --git a/crates/ide/src/inlay_hints/extern_block.rs b/crates/ide/src/inlay_hints/extern_block.rs
index 20f54b2..88152bf 100644
--- a/crates/ide/src/inlay_hints/extern_block.rs
+++ b/crates/ide/src/inlay_hints/extern_block.rs
@@ -1,6 +1,5 @@
//! Extern block hints
use ide_db::{famous_defs::FamousDefs, text_edit::TextEdit};
-use span::EditionedFileId;
use syntax::{AstNode, SyntaxToken, ast};
use crate::{InlayHint, InlayHintsConfig};
@@ -9,7 +8,6 @@
acc: &mut Vec<InlayHint>,
FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
- _file_id: EditionedFileId,
extern_block: ast::ExternBlock,
) -> Option<()> {
if extern_block.unsafe_token().is_some() {
@@ -36,7 +34,6 @@
acc: &mut Vec<InlayHint>,
FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
- _file_id: EditionedFileId,
fn_: &ast::Fn,
extern_block: &ast::ExternBlock,
) -> Option<()> {
@@ -55,7 +52,6 @@
acc: &mut Vec<InlayHint>,
FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
- _file_id: EditionedFileId,
static_: &ast::Static,
extern_block: &ast::ExternBlock,
) -> Option<()> {
diff --git a/crates/ide/src/inlay_hints/implicit_drop.rs b/crates/ide/src/inlay_hints/implicit_drop.rs
index f52e279..bf4688e 100644
--- a/crates/ide/src/inlay_hints/implicit_drop.rs
+++ b/crates/ide/src/inlay_hints/implicit_drop.rs
@@ -12,7 +12,6 @@
};
use ide_db::{FileRange, famous_defs::FamousDefs};
-use span::EditionedFileId;
use syntax::{
ToSmolStr,
ast::{self, AstNode},
@@ -25,7 +24,7 @@
acc: &mut Vec<InlayHint>,
FamousDefs(sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
- file_id: EditionedFileId,
+ display_target: hir::DisplayTarget,
node: &ast::Fn,
) -> Option<()> {
if !config.implicit_drop_hints {
@@ -94,7 +93,7 @@
MirSpan::Unknown => continue,
};
let binding = &hir.bindings[binding_idx];
- let name = binding.name.display_no_db(file_id.edition()).to_smolstr();
+ let name = binding.name.display_no_db(display_target.edition).to_smolstr();
if name.starts_with("<ra@") {
continue; // Ignore desugared variables
}
diff --git a/crates/ide/src/inlay_hints/implicit_static.rs b/crates/ide/src/inlay_hints/implicit_static.rs
index f3be09f..7212efd 100644
--- a/crates/ide/src/inlay_hints/implicit_static.rs
+++ b/crates/ide/src/inlay_hints/implicit_static.rs
@@ -5,7 +5,6 @@
use either::Either;
use ide_db::famous_defs::FamousDefs;
use ide_db::text_edit::TextEdit;
-use span::EditionedFileId;
use syntax::{
SyntaxKind,
ast::{self, AstNode},
@@ -17,7 +16,6 @@
acc: &mut Vec<InlayHint>,
FamousDefs(_sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
- _file_id: EditionedFileId,
statik_or_const: Either<ast::Static, ast::Const>,
) -> Option<()> {
if config.lifetime_elision_hints != LifetimeElisionHints::Always {
diff --git a/crates/ide/src/inlay_hints/implied_dyn_trait.rs b/crates/ide/src/inlay_hints/implied_dyn_trait.rs
new file mode 100644
index 0000000..32d1305
--- /dev/null
+++ b/crates/ide/src/inlay_hints/implied_dyn_trait.rs
@@ -0,0 +1,133 @@
+//! Implementation of trait bound hints.
+//!
+//! Currently this renders the implied `Sized` bound.
+use either::Either;
+use ide_db::{famous_defs::FamousDefs, text_edit::TextEdit};
+
+use syntax::ast::{self, AstNode};
+
+use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind};
+
+pub(super) fn hints(
+ acc: &mut Vec<InlayHint>,
+ FamousDefs(sema, _): &FamousDefs<'_, '_>,
+ config: &InlayHintsConfig,
+ path: Either<ast::PathType, ast::DynTraitType>,
+) -> Option<()> {
+ let parent = path.syntax().parent()?;
+ let range = match path {
+ Either::Left(path) => {
+ let paren =
+ parent.ancestors().take_while(|it| ast::ParenType::can_cast(it.kind())).last();
+ let parent = paren.as_ref().and_then(|it| it.parent()).unwrap_or(parent);
+ if ast::TypeBound::can_cast(parent.kind())
+ || ast::TypeAnchor::can_cast(parent.kind())
+ || ast::Impl::cast(parent)
+ .and_then(|it| it.trait_())
+ .is_some_and(|it| it.syntax() == path.syntax())
+ {
+ return None;
+ }
+ sema.resolve_trait(&path.path()?)?;
+ paren.map_or_else(|| path.syntax().text_range(), |it| it.text_range())
+ }
+ Either::Right(dyn_) => {
+ if dyn_.dyn_token().is_some() {
+ return None;
+ }
+
+ dyn_.syntax().text_range()
+ }
+ };
+
+ acc.push(InlayHint {
+ range,
+ kind: InlayKind::Dyn,
+ label: InlayHintLabel::simple("dyn", None, None),
+ text_edit: Some(
+ config.lazy_text_edit(|| TextEdit::insert(range.start(), "dyn ".to_owned())),
+ ),
+ position: InlayHintPosition::Before,
+ pad_left: false,
+ pad_right: true,
+ resolve_parent: Some(range),
+ });
+
+ Some(())
+}
+
+#[cfg(test)]
+mod tests {
+
+ use expect_test::expect;
+
+ use crate::inlay_hints::InlayHintsConfig;
+
+ use crate::inlay_hints::tests::{DISABLED_CONFIG, check_edit, check_with_config};
+
+ #[track_caller]
+ fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
+ check_with_config(InlayHintsConfig { sized_bound: true, ..DISABLED_CONFIG }, ra_fixture);
+ }
+
+ #[test]
+ fn path_works() {
+ check(
+ r#"
+struct S {}
+trait T {}
+fn foo(_: T, _: dyn T, _: S) {}
+ // ^ dyn
+fn foo(_: &T, _: for<'a> T) {}
+ // ^ dyn
+ // ^ dyn
+impl T {}
+ // ^ dyn
+impl T for (T) {}
+ // ^^^ dyn
+"#,
+ );
+ }
+
+ #[test]
+ fn missing_dyn_bounds() {
+ check(
+ r#"
+trait T {}
+fn foo(
+ _: T + T,
+ // ^^^^^ dyn
+ _: T + 'a,
+ // ^^^^^^ dyn
+ _: 'a + T,
+ // ^^^^^^ dyn
+ _: &(T + T)
+ // ^^^^^ dyn
+ _: &mut (T + T)
+ // ^^^^^ dyn
+ _: *mut (T),
+ // ^^^ dyn
+) {}
+"#,
+ );
+ }
+
+ #[test]
+ fn edit() {
+ check_edit(
+ DISABLED_CONFIG,
+ r#"
+trait T {}
+fn foo(
+ _: &mut T
+) {}
+"#,
+ expect![[r#"
+ trait T {}
+ fn foo(
+ _: &mut dyn T
+ ) {}
+ "#]],
+ );
+ }
+}
diff --git a/crates/ide/src/inlay_hints/lifetime.rs b/crates/ide/src/inlay_hints/lifetime.rs
index baba49a..0069452 100644
--- a/crates/ide/src/inlay_hints/lifetime.rs
+++ b/crates/ide/src/inlay_hints/lifetime.rs
@@ -6,7 +6,6 @@
use ide_db::{FxHashMap, famous_defs::FamousDefs, syntax_helpers::node_ext::walk_ty};
use itertools::Itertools;
-use span::EditionedFileId;
use syntax::{SmolStr, format_smolstr};
use syntax::{
SyntaxKind, SyntaxToken,
@@ -23,7 +22,6 @@
ctx: &mut InlayHintCtx,
fd: &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
- file_id: EditionedFileId,
func: ast::Fn,
) -> Option<()> {
if config.lifetime_elision_hints == LifetimeElisionHints::Never {
@@ -40,7 +38,6 @@
ctx,
fd,
config,
- file_id,
param_list.params().filter_map(|it| {
Some((
it.pat().and_then(|it| match it {
@@ -74,7 +71,6 @@
ctx: &mut InlayHintCtx,
fd: &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
- file_id: EditionedFileId,
func: ast::FnPtrType,
) -> Option<()> {
if config.lifetime_elision_hints == LifetimeElisionHints::Never {
@@ -97,7 +93,6 @@
ctx,
fd,
config,
- file_id,
param_list.params().filter_map(|it| {
Some((
it.pat().and_then(|it| match it {
@@ -140,8 +135,7 @@
ctx: &mut InlayHintCtx,
fd: &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
- file_id: EditionedFileId,
- func: ast::PathType,
+ func: &ast::PathType,
) -> Option<()> {
if config.lifetime_elision_hints == LifetimeElisionHints::Never {
return None;
@@ -163,7 +157,6 @@
ctx,
fd,
config,
- file_id,
param_list.type_args().filter_map(|it| Some((None, it.ty()?))),
generic_param_list,
ret_type,
@@ -202,7 +195,6 @@
ctx: &mut InlayHintCtx,
FamousDefs(_, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
- _file_id: EditionedFileId,
params: impl Iterator<Item = (Option<ast::Name>, ast::Type)>,
generic_param_list: Option<ast::GenericParamList>,
ret_type: Option<ast::RetType>,
diff --git a/crates/ide/src/inlay_hints/range_exclusive.rs b/crates/ide/src/inlay_hints/range_exclusive.rs
index d67d845..47bd6d7 100644
--- a/crates/ide/src/inlay_hints/range_exclusive.rs
+++ b/crates/ide/src/inlay_hints/range_exclusive.rs
@@ -4,7 +4,6 @@
//! if let ../* < */100 = 50 {}
//! ```
use ide_db::famous_defs::FamousDefs;
-use span::EditionedFileId;
use syntax::{SyntaxToken, T, ast};
use crate::{InlayHint, InlayHintsConfig};
@@ -13,7 +12,6 @@
acc: &mut Vec<InlayHint>,
FamousDefs(_sema, _): &FamousDefs<'_, '_>,
config: &InlayHintsConfig,
- _file_id: EditionedFileId,
range: impl ast::RangeItem,
) -> Option<()> {
(config.range_exclusive_hints && range.end().is_some())
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index d649dff..82dbcde 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -514,7 +514,6 @@
self.with_db(|db| goto_type_definition::goto_type_definition(db, position))
}
- /// Finds all usages of the reference at point.
pub fn find_all_refs(
&self,
position: FilePosition,
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index 4fa1164..c6a323d 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -8,6 +8,14 @@
//! for text occurrences of the identifier. If there's an `ast::NameRef`
//! at the index that the match starts at and its tree parent is
//! resolved to the search element definition, we get a reference.
+//!
+//! Special handling for constructors/initializations:
+//! When searching for references to a struct/enum/variant, if the cursor is positioned on:
+//! - `{` after a struct/enum/variant definition
+//! - `(` for tuple structs/variants
+//! - `;` for unit structs
+//! - The type name in a struct/enum/variant definition
+//! Then only constructor/initialization usages will be shown, filtering out other references.
use hir::{PathResolution, Semantics};
use ide_db::{
@@ -28,27 +36,76 @@
use crate::{FilePosition, HighlightedRange, NavigationTarget, TryToNav, highlight_related};
+/// Result of a reference search operation.
#[derive(Debug, Clone)]
pub struct ReferenceSearchResult {
+ /// Information about the declaration site of the searched item.
+ /// For ADTs (structs/enums), this points to the type definition.
+ /// May be None for primitives or items without clear declaration sites.
pub declaration: Option<Declaration>,
+ /// All references found, grouped by file.
+ /// For ADTs when searching from a constructor position (e.g. on '{', '(', ';'),
+ /// this only includes constructor/initialization usages.
+ /// The map key is the file ID, and the value is a vector of (range, category) pairs.
+ /// - range: The text range of the reference in the file
+ /// - category: Metadata about how the reference is used (read/write/etc)
pub references: IntMap<FileId, Vec<(TextRange, ReferenceCategory)>>,
}
+/// Information about the declaration site of a searched item.
#[derive(Debug, Clone)]
pub struct Declaration {
+ /// Navigation information to jump to the declaration
pub nav: NavigationTarget,
+ /// Whether the declared item is mutable (relevant for variables)
pub is_mut: bool,
}
// Feature: Find All References
//
-// Shows all references of the item at the cursor location
+// Shows all references of the item at the cursor location. This includes:
+// - Direct references to variables, functions, types, etc.
+// - Constructor/initialization references when cursor is on struct/enum definition tokens
+// - References in patterns and type contexts
+// - References through dereferencing and borrowing
+// - References in macro expansions
+//
+// Special handling for constructors:
+// - When the cursor is on `{`, `(`, or `;` in a struct/enum definition
+// - When the cursor is on the type name in a struct/enum definition
+// These cases will show only constructor/initialization usages of the type
//
// | Editor | Shortcut |
// |---------|----------|
// | VS Code | <kbd>Shift+Alt+F12</kbd> |
//
// 
+
+/// Find all references to the item at the given position.
+///
+/// # Arguments
+/// * `sema` - Semantic analysis context
+/// * `position` - Position in the file where to look for the item
+/// * `search_scope` - Optional scope to limit the search (e.g. current crate only)
+///
+/// # Returns
+/// Returns `None` if no valid item is found at the position.
+/// Otherwise returns a vector of `ReferenceSearchResult`, usually with one element.
+/// Multiple results can occur in case of ambiguity or when searching for trait items.
+///
+/// # Special cases
+/// - Control flow keywords (break, continue, etc): Shows all related jump points
+/// - Constructor search: When on struct/enum definition tokens (`{`, `(`, `;`), shows only initialization sites
+/// - Format string arguments: Shows template parameter usages
+/// - Lifetime parameters: Shows lifetime constraint usages
+///
+/// # Constructor search
+/// When the cursor is on specific tokens in a struct/enum definition:
+/// - `{` after struct/enum/variant: Shows record literal initializations
+/// - `(` after tuple struct/variant: Shows tuple literal initializations
+/// - `;` after unit struct: Shows unit literal initializations
+/// - Type name in definition: Shows all initialization usages
+/// In these cases, other kinds of references (like type references) are filtered out.
pub(crate) fn find_all_refs(
sema: &Semantics<'_, RootDatabase>,
position: FilePosition,
@@ -143,7 +200,7 @@
)
})?;
- if let Some((_, resolution)) = sema.check_for_format_args_template(token.clone(), offset) {
+ if let Some((.., resolution)) = sema.check_for_format_args_template(token.clone(), offset) {
return resolution.map(Definition::from).map(|it| vec![it]);
}
@@ -219,7 +276,19 @@
}
}
-/// Returns `Some` if the cursor is at a position for an item to search for all its constructor/literal usages
+/// Returns `Some` if the cursor is at a position where we should search for constructor/initialization usages.
+/// This is used to implement the special constructor search behavior when the cursor is on specific tokens
+/// in a struct/enum/variant definition.
+///
+/// # Returns
+/// - `Some(name)` if the cursor is on:
+/// - `{` after a struct/enum/variant definition
+/// - `(` for tuple structs/variants
+/// - `;` for unit structs
+/// - The type name in a struct/enum/variant definition
+/// - `None` otherwise
+///
+/// The returned name is the name of the type whose constructor usages should be searched for.
fn name_for_constructor_search(syntax: &SyntaxNode, position: FilePosition) -> Option<ast::Name> {
let token = syntax.token_at_offset(position.offset).right_biased()?;
let token_parent = token.parent()?;
@@ -257,6 +326,16 @@
}
}
+/// Checks if a name reference is part of an enum variant literal expression.
+/// Used to filter references when searching for enum variant constructors.
+///
+/// # Arguments
+/// * `sema` - Semantic analysis context
+/// * `enum_` - The enum type to check against
+/// * `name_ref` - The name reference to check
+///
+/// # Returns
+/// `true` if the name reference is used as part of constructing a variant of the given enum.
fn is_enum_lit_name_ref(
sema: &Semantics<'_, RootDatabase>,
enum_: hir::Enum,
@@ -284,12 +363,19 @@
.unwrap_or(false)
}
+/// Checks if a path ends with the given name reference.
+/// Helper function for checking constructor usage patterns.
fn path_ends_with(path: Option<ast::Path>, name_ref: &ast::NameRef) -> bool {
path.and_then(|path| path.segment())
.and_then(|segment| segment.name_ref())
.map_or(false, |segment| segment == *name_ref)
}
+/// Checks if a name reference is used in a literal (constructor) context.
+/// Used to filter references when searching for struct/variant constructors.
+///
+/// # Returns
+/// `true` if the name reference is used as part of a struct/variant literal expression.
fn is_lit_name_ref(name_ref: &ast::NameRef) -> bool {
name_ref.syntax().ancestors().find_map(|ancestor| {
match_ast! {
diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs
index e6cda60..fb84e8e 100644
--- a/crates/ide/src/rename.rs
+++ b/crates/ide/src/rename.rs
@@ -4,11 +4,11 @@
//! tests. This module also implements a couple of magic tricks, like renaming
//! `self` and to `self` (to switch between associated function and method).
-use hir::{AsAssocItem, InFile, Semantics};
+use hir::{AsAssocItem, InFile, Name, Semantics, sym};
use ide_db::{
FileId, FileRange, RootDatabase,
defs::{Definition, NameClass, NameRefClass},
- rename::{IdentifierKind, bail, format_err, source_edit_from_references},
+ rename::{IdentifierKind, RenameDefinition, bail, format_err, source_edit_from_references},
source_change::SourceChangeBuilder,
};
use itertools::Itertools;
@@ -33,8 +33,8 @@
let source_file = sema.parse_guess_edition(position.file_id);
let syntax = source_file.syntax();
- let res = find_definitions(&sema, syntax, position)?
- .map(|(frange, kind, def)| {
+ let res = find_definitions(&sema, syntax, position, &Name::new_symbol_root(sym::underscore))?
+ .map(|(frange, kind, def, _, _)| {
// ensure all ranges are valid
if def.range_for_rename(&sema).is_none() {
@@ -88,22 +88,28 @@
let source_file = sema.parse(file_id);
let syntax = source_file.syntax();
- let defs = find_definitions(&sema, syntax, position)?;
- let alias_fallback = alias_fallback(syntax, position, new_name);
+ let edition = file_id.edition(db);
+ let (new_name, kind) = IdentifierKind::classify(edition, new_name)?;
+
+ let defs = find_definitions(&sema, syntax, position, &new_name)?;
+ let alias_fallback =
+ alias_fallback(syntax, position, &new_name.display(db, edition).to_string());
let ops: RenameResult<Vec<SourceChange>> = match alias_fallback {
Some(_) => defs
// FIXME: This can use the `ide_db::rename_reference` (or def.rename) method once we can
// properly find "direct" usages/references.
- .map(|(.., def)| {
- match IdentifierKind::classify(new_name)? {
+ .map(|(.., def, new_name, _)| {
+ match kind {
IdentifierKind::Ident => (),
IdentifierKind::Lifetime => {
bail!("Cannot alias reference to a lifetime identifier")
}
IdentifierKind::Underscore => bail!("Cannot alias reference to `_`"),
+ IdentifierKind::LowercaseSelf => {
+ bail!("Cannot rename alias reference to `self`")
+ }
};
-
let mut usages = def.usages(&sema).all();
// FIXME: hack - removes the usage that triggered this rename operation.
@@ -120,7 +126,7 @@
source_change.extend(usages.references.get_mut(&file_id).iter().map(|refs| {
(
position.file_id,
- source_edit_from_references(refs, def, new_name, file_id.edition(db)),
+ source_edit_from_references(db, refs, def, &new_name, edition),
)
}));
@@ -128,18 +134,18 @@
})
.collect(),
None => defs
- .map(|(.., def)| {
+ .map(|(.., def, new_name, rename_def)| {
if let Definition::Local(local) = def {
if let Some(self_param) = local.as_self_param(sema.db) {
cov_mark::hit!(rename_self_to_param);
- return rename_self_to_param(&sema, local, self_param, new_name);
+ return rename_self_to_param(&sema, local, self_param, &new_name, kind);
}
- if new_name == "self" {
+ if kind == IdentifierKind::LowercaseSelf {
cov_mark::hit!(rename_to_self);
return rename_to_self(&sema, local);
}
}
- def.rename(&sema, new_name)
+ def.rename(&sema, new_name.as_str(), rename_def)
})
.collect(),
};
@@ -159,7 +165,7 @@
let sema = Semantics::new(db);
let module = sema.file_to_module_def(file_id)?;
let def = Definition::Module(module);
- let mut change = def.rename(&sema, new_name_stem).ok()?;
+ let mut change = def.rename(&sema, new_name_stem, RenameDefinition::Yes).ok()?;
change.file_system_edits.clear();
Some(change)
}
@@ -200,22 +206,40 @@
sema: &Semantics<'_, RootDatabase>,
syntax: &SyntaxNode,
FilePosition { file_id, offset }: FilePosition,
-) -> RenameResult<impl Iterator<Item = (FileRange, SyntaxKind, Definition)>> {
- let token = syntax.token_at_offset(offset).find(|t| matches!(t.kind(), SyntaxKind::STRING));
+ new_name: &Name,
+) -> RenameResult<impl Iterator<Item = (FileRange, SyntaxKind, Definition, Name, RenameDefinition)>>
+{
+ let maybe_format_args =
+ syntax.token_at_offset(offset).find(|t| matches!(t.kind(), SyntaxKind::STRING));
- if let Some((range, Some(resolution))) =
- token.and_then(|token| sema.check_for_format_args_template(token, offset))
+ if let Some((range, _, _, Some(resolution))) =
+ maybe_format_args.and_then(|token| sema.check_for_format_args_template(token, offset))
{
return Ok(vec![(
FileRange { file_id, range },
SyntaxKind::STRING,
Definition::from(resolution),
+ new_name.clone(),
+ RenameDefinition::Yes,
)]
.into_iter());
}
+ let original_ident = syntax
+ .token_at_offset(offset)
+ .max_by_key(|t| {
+ t.kind().is_any_identifier() || matches!(t.kind(), SyntaxKind::LIFETIME_IDENT)
+ })
+ .map(|t| {
+ if t.kind() == SyntaxKind::LIFETIME_IDENT {
+ Name::new_lifetime(t.text())
+ } else {
+ Name::new_root(t.text())
+ }
+ })
+ .ok_or_else(|| format_err!("No references found at position"))?;
let symbols =
- sema.find_nodes_at_offset_with_descend::<ast::NameLike>(syntax, offset).map(|name_like| {
+ sema.find_namelike_at_offset_with_descend(syntax, offset).map(|name_like| {
let kind = name_like.syntax().kind();
let range = sema
.original_range_opt(name_like.syntax())
@@ -284,23 +308,28 @@
.ok_or_else(|| format_err!("No references found at position"))
}
};
- res.map(|def| (range, kind, def))
+ res.map(|def| {
+ let n = def.name(sema.db)?;
+ if n == original_ident {
+ Some((range, kind, def, new_name.clone(), RenameDefinition::Yes))
+ } else if let Some(suffix) = n.as_str().strip_prefix(original_ident.as_str()) {
+ Some((range, kind, def, Name::new_root(&format!("{}{suffix}", new_name.as_str())), RenameDefinition::No))
+ } else {
+ n.as_str().strip_suffix(original_ident.as_str().trim_start_matches('\''))
+ .map(|prefix| (range, kind, def, Name::new_root(&format!("{prefix}{}", new_name.as_str())), RenameDefinition::No))
+ }
+ })
});
- let res: RenameResult<Vec<_>> = symbols.collect();
+ let res: RenameResult<Vec<_>> = symbols.filter_map(Result::transpose).collect();
match res {
Ok(v) => {
- if v.is_empty() {
- // FIXME: some semantic duplication between "empty vec" and "Err()"
- Err(format_err!("No references found at position"))
- } else {
- // remove duplicates, comparing `Definition`s
- Ok(v.into_iter()
- .unique_by(|&(.., def)| def)
- .map(|(a, b, c)| (a.into_file_id(sema.db), b, c))
- .collect::<Vec<_>>()
- .into_iter())
- }
+ // remove duplicates, comparing `Definition`s
+ Ok(v.into_iter()
+ .unique_by(|&(.., def, _, _)| def)
+ .map(|(a, b, c, d, e)| (a.into_file_id(sema.db), b, c, d, e))
+ .collect::<Vec<_>>()
+ .into_iter())
}
Err(e) => Err(e),
}
@@ -370,7 +399,13 @@
source_change.extend(usages.iter().map(|(file_id, references)| {
(
file_id.file_id(sema.db),
- source_edit_from_references(references, def, "self", file_id.edition(sema.db)),
+ source_edit_from_references(
+ sema.db,
+ references,
+ def,
+ &Name::new_symbol_root(sym::self_),
+ file_id.edition(sema.db),
+ ),
)
}));
source_change.insert_source_edit(
@@ -384,23 +419,25 @@
sema: &Semantics<'_, RootDatabase>,
local: hir::Local,
self_param: hir::SelfParam,
- new_name: &str,
+ new_name: &Name,
+ identifier_kind: IdentifierKind,
) -> RenameResult<SourceChange> {
- if new_name == "self" {
+ if identifier_kind == IdentifierKind::LowercaseSelf {
// Let's do nothing rather than complain.
cov_mark::hit!(rename_self_to_self);
return Ok(SourceChange::default());
}
- let identifier_kind = IdentifierKind::classify(new_name)?;
-
let InFile { file_id, value: self_param } =
sema.source(self_param).ok_or_else(|| format_err!("cannot find function source"))?;
let def = Definition::Local(local);
let usages = def.usages(sema).all();
- let edit = text_edit_from_self_param(&self_param, new_name)
- .ok_or_else(|| format_err!("No target type found"))?;
+ let edit = text_edit_from_self_param(
+ &self_param,
+ new_name.display(sema.db, file_id.edition(sema.db)).to_string(),
+ )
+ .ok_or_else(|| format_err!("No target type found"))?;
if usages.len() > 1 && identifier_kind == IdentifierKind::Underscore {
bail!("Cannot rename reference to `_` as it is being referenced multiple times");
}
@@ -409,13 +446,19 @@
source_change.extend(usages.iter().map(|(file_id, references)| {
(
file_id.file_id(sema.db),
- source_edit_from_references(references, def, new_name, file_id.edition(sema.db)),
+ source_edit_from_references(
+ sema.db,
+ references,
+ def,
+ new_name,
+ file_id.edition(sema.db),
+ ),
)
}));
Ok(source_change)
}
-fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: &str) -> Option<TextEdit> {
+fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: String) -> Option<TextEdit> {
fn target_type_name(impl_def: &ast::Impl) -> Option<String> {
if let Some(ast::Type::PathType(p)) = impl_def.self_ty() {
return Some(p.path()?.segment()?.name_ref()?.text().to_string());
@@ -427,7 +470,7 @@
Some(impl_def) => {
let type_name = target_type_name(&impl_def)?;
- let mut replacement_text = String::from(new_name);
+ let mut replacement_text = new_name;
replacement_text.push_str(": ");
match (self_param.amp_token(), self_param.mut_token()) {
(Some(_), None) => replacement_text.push('&'),
@@ -440,7 +483,7 @@
}
None => {
cov_mark::hit!(rename_self_outside_of_methods);
- let mut replacement_text = String::from(new_name);
+ let mut replacement_text = new_name;
replacement_text.push_str(": _");
Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text))
}
@@ -710,7 +753,7 @@
check(
"super",
r#"fn main() { let i$0 = 1; }"#,
- "error: Invalid name `super`: not an identifier",
+ "error: Invalid name `super`: cannot rename to a keyword",
);
}
@@ -759,7 +802,11 @@
#[test]
fn test_rename_mod_invalid_raw_ident() {
- check("r#self", r#"mod foo$0 {}"#, "error: Invalid name `self`: not an identifier");
+ check(
+ "r#self",
+ r#"mod foo$0 {}"#,
+ "error: Invalid name `self`: cannot rename module to self",
+ );
}
#[test]
@@ -2359,7 +2406,6 @@
#[test]
fn test_rename_lifetimes() {
- cov_mark::check!(rename_lifetime);
check(
"'yeeee",
r#"
@@ -2536,7 +2582,7 @@
x.0$0 = 5;
}
"#,
- "error: No identifier available to rename",
+ "error: No references found at position",
);
}
@@ -2566,7 +2612,7 @@
}
}
"#,
- "error: Cannot rename `Self`",
+ "error: No references found at position",
);
}
@@ -3262,4 +3308,100 @@
"#,
);
}
+
+ #[test]
+ fn rename_macro_generated_type_from_type_with_a_suffix() {
+ check(
+ "Bar",
+ r#"
+//- proc_macros: generate_suffixed_type
+#[proc_macros::generate_suffixed_type]
+struct Foo$0;
+fn usage(_: FooSuffix) {}
+usage(FooSuffix);
+"#,
+ r#"
+#[proc_macros::generate_suffixed_type]
+struct Bar;
+fn usage(_: BarSuffix) {}
+usage(BarSuffix);
+"#,
+ );
+ }
+
+ #[test]
+ // FIXME
+ #[should_panic]
+ fn rename_macro_generated_type_from_type_usage_with_a_suffix() {
+ check(
+ "Bar",
+ r#"
+//- proc_macros: generate_suffixed_type
+#[proc_macros::generate_suffixed_type]
+struct Foo;
+fn usage(_: FooSuffix) {}
+usage(FooSuffix);
+fn other_place() { Foo$0; }
+"#,
+ r#"
+#[proc_macros::generate_suffixed_type]
+struct Bar;
+fn usage(_: BarSuffix) {}
+usage(BarSuffix);
+fn other_place() { Bar; }
+"#,
+ );
+ }
+
+ #[test]
+ fn rename_macro_generated_type_from_variant_with_a_suffix() {
+ check(
+ "Bar",
+ r#"
+//- proc_macros: generate_suffixed_type
+#[proc_macros::generate_suffixed_type]
+enum Quux {
+ Foo$0,
+}
+fn usage(_: FooSuffix) {}
+usage(FooSuffix);
+"#,
+ r#"
+#[proc_macros::generate_suffixed_type]
+enum Quux {
+ Bar,
+}
+fn usage(_: BarSuffix) {}
+usage(BarSuffix);
+"#,
+ );
+ }
+
+ #[test]
+ // FIXME
+ #[should_panic]
+ fn rename_macro_generated_type_from_variant_usage_with_a_suffix() {
+ check(
+ "Bar",
+ r#"
+//- proc_macros: generate_suffixed_type
+#[proc_macros::generate_suffixed_type]
+enum Quux {
+ Foo,
+}
+fn usage(_: FooSuffix) {}
+usage(FooSuffix);
+fn other_place() { Quux::Foo$0; }
+"#,
+ r#"
+#[proc_macros::generate_suffixed_type]
+enum Quux {
+ Bar,
+}
+fn usage(_: BarSuffix) {}
+usage(BartSuffix);
+fn other_place() { Quux::Bar$0; }
+"#,
+ );
+ }
}
diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs
index e1bc763..3ca1729 100644
--- a/crates/ide/src/syntax_highlighting.rs
+++ b/crates/ide/src/syntax_highlighting.rs
@@ -542,7 +542,7 @@
let mut t = None;
let mut r = 0;
- sema.descend_into_macros_breakable(token.clone(), |tok, _ctx| {
+ sema.descend_into_macros_breakable(token.clone().into(), |tok, _ctx| {
// FIXME: Consider checking ctx transparency for being opaque?
let my_rank = ranker.rank_token(&tok.value);
diff --git a/crates/mbe/src/tests.rs b/crates/mbe/src/tests.rs
index 3369dff..769455f 100644
--- a/crates/mbe/src/tests.rs
+++ b/crates/mbe/src/tests.rs
@@ -74,7 +74,8 @@
"{}",
syntax_bridge::prettify_macro_expansion::prettify_macro_expansion(
node.syntax_node(),
- &mut |it| it.clone()
+ &mut |_| None,
+ |_| ()
)
);
expect.assert_eq(&expect_res);
diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs
index 8cc332d..8ed0fc6 100644
--- a/crates/parser/src/grammar/expressions/atom.rs
+++ b/crates/parser/src/grammar/expressions/atom.rs
@@ -562,8 +562,12 @@
let m = p.start();
+ // test closure_binder
+ // fn main() { for<'a> || (); }
if p.at(T![for]) {
+ let b = p.start();
types::for_binder(p);
+ b.complete(p, CLOSURE_BINDER);
}
// test const_closure
// fn main() { let cl = const || _ = 0; }
diff --git a/crates/parser/src/grammar/generic_params.rs b/crates/parser/src/grammar/generic_params.rs
index 9d4fdbf..ea5a3bc 100644
--- a/crates/parser/src/grammar/generic_params.rs
+++ b/crates/parser/src/grammar/generic_params.rs
@@ -201,6 +201,17 @@
}
if paths::is_use_path_start(p) {
types::path_type_bounds(p, false);
+ // test_err type_bounds_macro_call_recovery
+ // fn foo<T: T![], T: T!, T: T!{}>() -> Box<T! + T!{}> {}
+ if p.at(T![!]) {
+ let m = p.start();
+ p.bump(T![!]);
+ p.error("unexpected `!` in type path, macro calls are not allowed here");
+ if p.at_ts(TokenSet::new(&[T!['{'], T!['['], T!['(']])) {
+ items::token_tree(p);
+ }
+ m.complete(p, ERROR);
+ }
} else {
m.abandon(p);
return false;
diff --git a/crates/parser/src/grammar/paths.rs b/crates/parser/src/grammar/paths.rs
index 770827c..dfe7cb5 100644
--- a/crates/parser/src/grammar/paths.rs
+++ b/crates/parser/src/grammar/paths.rs
@@ -89,19 +89,22 @@
// test qual_paths
// type X = <A as B>::Output;
// fn foo() { <usize as Default>::default(); }
- if first && p.eat(T![<]) {
+ if first && p.at(T![<]) {
+ let m = p.start();
+ p.bump(T![<]);
// test_err angled_path_without_qual
// type X = <()>;
// type Y = <A as B>;
types::type_(p);
if p.eat(T![as]) {
if is_use_path_start(p) {
- types::path_type(p);
+ types::path_type_bounds(p, true);
} else {
p.error("expected a trait");
}
}
p.expect(T![>]);
+ m.complete(p, TYPE_ANCHOR);
if !p.at(T![::]) {
p.error("expected `::`");
}
diff --git a/crates/parser/src/grammar/types.rs b/crates/parser/src/grammar/types.rs
index 9d31e43..908440b 100644
--- a/crates/parser/src/grammar/types.rs
+++ b/crates/parser/src/grammar/types.rs
@@ -330,15 +330,6 @@
m.complete(p, DYN_TRAIT_TYPE);
}
-// test path_type
-// type A = Foo;
-// type B = ::Foo;
-// type C = self::Foo;
-// type D = super::Foo;
-pub(super) fn path_type(p: &mut Parser<'_>) {
- path_type_bounds(p, true);
-}
-
// test macro_call_type
// type A = foo!();
// type B = crate::foo!();
@@ -365,6 +356,11 @@
}
}
+// test path_type
+// type A = Foo;
+// type B = ::Foo;
+// type C = self::Foo;
+// type D = super::Foo;
pub(super) fn path_type_bounds(p: &mut Parser<'_>, allow_bounds: bool) {
assert!(paths::is_path_start(p));
let m = p.start();
diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs
index b172750..f534546 100644
--- a/crates/parser/src/syntax_kind/generated.rs
+++ b/crates/parser/src/syntax_kind/generated.rs
@@ -291,6 +291,7 @@
TUPLE_STRUCT_PAT,
TUPLE_TYPE,
TYPE_ALIAS,
+ TYPE_ANCHOR,
TYPE_ARG,
TYPE_BOUND,
TYPE_BOUND_LIST,
@@ -463,6 +464,7 @@
| TUPLE_STRUCT_PAT
| TUPLE_TYPE
| TYPE_ALIAS
+ | TYPE_ANCHOR
| TYPE_ARG
| TYPE_BOUND
| TYPE_BOUND_LIST
diff --git a/crates/parser/test_data/generated/runner.rs b/crates/parser/test_data/generated/runner.rs
index 030d8e0..6ec4192 100644
--- a/crates/parser/test_data/generated/runner.rs
+++ b/crates/parser/test_data/generated/runner.rs
@@ -83,6 +83,10 @@
#[test]
fn cast_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/cast_expr.rs"); }
#[test]
+ fn closure_binder() {
+ run_and_expect_no_errors("test_data/parser/inline/ok/closure_binder.rs");
+ }
+ #[test]
fn closure_body_underscore_assignment() {
run_and_expect_no_errors(
"test_data/parser/inline/ok/closure_body_underscore_assignment.rs",
@@ -872,6 +876,10 @@
run_and_expect_errors("test_data/parser/inline/err/tuple_pat_leading_comma.rs");
}
#[test]
+ fn type_bounds_macro_call_recovery() {
+ run_and_expect_errors("test_data/parser/inline/err/type_bounds_macro_call_recovery.rs");
+ }
+ #[test]
fn type_in_array_recover() {
run_and_expect_errors("test_data/parser/inline/err/type_in_array_recover.rs");
}
diff --git a/crates/parser/test_data/parser/err/0024_many_type_parens.rast b/crates/parser/test_data/parser/err/0024_many_type_parens.rast
index f0dbc9b..025c12e 100644
--- a/crates/parser/test_data/parser/err/0024_many_type_parens.rast
+++ b/crates/parser/test_data/parser/err/0024_many_type_parens.rast
@@ -186,13 +186,14 @@
TUPLE_EXPR
L_PAREN "("
CLOSURE_EXPR
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ CLOSURE_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
WHITESPACE " "
BIN_EXPR
BIN_EXPR
diff --git a/crates/parser/test_data/parser/inline/err/angled_path_without_qual.rast b/crates/parser/test_data/parser/inline/err/angled_path_without_qual.rast
index 0529e97..53fbe0b 100644
--- a/crates/parser/test_data/parser/inline/err/angled_path_without_qual.rast
+++ b/crates/parser/test_data/parser/inline/err/angled_path_without_qual.rast
@@ -10,11 +10,12 @@
PATH_TYPE
PATH
PATH_SEGMENT
- L_ANGLE "<"
- TUPLE_TYPE
- L_PAREN "("
- R_PAREN ")"
- R_ANGLE ">"
+ TYPE_ANCHOR
+ L_ANGLE "<"
+ TUPLE_TYPE
+ L_PAREN "("
+ R_PAREN ")"
+ R_ANGLE ">"
SEMICOLON ";"
WHITESPACE "\n"
TYPE_ALIAS
@@ -28,21 +29,22 @@
PATH_TYPE
PATH
PATH_SEGMENT
- L_ANGLE "<"
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "A"
- WHITESPACE " "
- AS_KW "as"
- WHITESPACE " "
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "B"
- R_ANGLE ">"
+ TYPE_ANCHOR
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "B"
+ R_ANGLE ">"
SEMICOLON ";"
WHITESPACE "\n"
error 13: expected `::`
diff --git a/crates/parser/test_data/parser/inline/err/type_bounds_macro_call_recovery.rast b/crates/parser/test_data/parser/inline/err/type_bounds_macro_call_recovery.rast
new file mode 100644
index 0000000..4722beb
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/err/type_bounds_macro_call_recovery.rast
@@ -0,0 +1,112 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ ERROR
+ BANG "!"
+ TOKEN_TREE
+ L_BRACK "["
+ R_BRACK "]"
+ COMMA ","
+ WHITESPACE " "
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ ERROR
+ BANG "!"
+ COMMA ","
+ WHITESPACE " "
+ TYPE_PARAM
+ NAME
+ IDENT "T"
+ COLON ":"
+ WHITESPACE " "
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ ERROR
+ BANG "!"
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ R_ANGLE ">"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ RET_TYPE
+ THIN_ARROW "->"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Box"
+ GENERIC_ARG_LIST
+ L_ANGLE "<"
+ TYPE_ARG
+ DYN_TRAIT_TYPE
+ TYPE_BOUND_LIST
+ TYPE_BOUND
+ MACRO_TYPE
+ MACRO_CALL
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ BANG "!"
+ WHITESPACE " "
+ PLUS "+"
+ WHITESPACE " "
+ TYPE_BOUND
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ ERROR
+ BANG "!"
+ TOKEN_TREE
+ L_CURLY "{"
+ R_CURLY "}"
+ R_ANGLE ">"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ WHITESPACE "\n"
+error 12: unexpected `!` in type path, macro calls are not allowed here
+error 21: unexpected `!` in type path, macro calls are not allowed here
+error 28: unexpected `!` in type path, macro calls are not allowed here
+error 43: expected `{`, `[`, `(`
+error 48: unexpected `!` in type path, macro calls are not allowed here
diff --git a/crates/parser/test_data/parser/inline/err/type_bounds_macro_call_recovery.rs b/crates/parser/test_data/parser/inline/err/type_bounds_macro_call_recovery.rs
new file mode 100644
index 0000000..517404f
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/err/type_bounds_macro_call_recovery.rs
@@ -0,0 +1 @@
+fn foo<T: T![], T: T!, T: T!{}>() -> Box<T! + T!{}> {}
diff --git a/crates/parser/test_data/parser/inline/ok/call_expr.rast b/crates/parser/test_data/parser/inline/ok/call_expr.rast
index 19cc8d5..7c1d894 100644
--- a/crates/parser/test_data/parser/inline/ok/call_expr.rast
+++ b/crates/parser/test_data/parser/inline/ok/call_expr.rast
@@ -88,13 +88,14 @@
PATH
PATH
PATH_SEGMENT
- L_ANGLE "<"
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Foo"
- R_ANGLE ">"
+ TYPE_ANCHOR
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ R_ANGLE ">"
COLON2 "::"
PATH_SEGMENT
NAME_REF
@@ -119,21 +120,22 @@
PATH
PATH
PATH_SEGMENT
- L_ANGLE "<"
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Foo"
- WHITESPACE " "
- AS_KW "as"
- WHITESPACE " "
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Trait"
- R_ANGLE ">"
+ TYPE_ANCHOR
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Trait"
+ R_ANGLE ">"
COLON2 "::"
PATH_SEGMENT
NAME_REF
diff --git a/crates/parser/test_data/parser/inline/ok/closure_binder.rast b/crates/parser/test_data/parser/inline/ok/closure_binder.rast
new file mode 100644
index 0000000..c04dbe1
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/closure_binder.rast
@@ -0,0 +1,36 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "main"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE " "
+ EXPR_STMT
+ CLOSURE_EXPR
+ CLOSURE_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
+ WHITESPACE " "
+ PARAM_LIST
+ PIPE "|"
+ PIPE "|"
+ WHITESPACE " "
+ TUPLE_EXPR
+ L_PAREN "("
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE " "
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/crates/parser/test_data/parser/inline/ok/closure_binder.rs b/crates/parser/test_data/parser/inline/ok/closure_binder.rs
new file mode 100644
index 0000000..a6d8aaf
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/closure_binder.rs
@@ -0,0 +1 @@
+fn main() { for<'a> || (); }
diff --git a/crates/parser/test_data/parser/inline/ok/lambda_expr.rast b/crates/parser/test_data/parser/inline/ok/lambda_expr.rast
index c25ad84..ea401d2 100644
--- a/crates/parser/test_data/parser/inline/ok/lambda_expr.rast
+++ b/crates/parser/test_data/parser/inline/ok/lambda_expr.rast
@@ -202,13 +202,14 @@
WHITESPACE "\n "
EXPR_STMT
CLOSURE_EXPR
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ CLOSURE_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
WHITESPACE " "
PARAM_LIST
PIPE "|"
@@ -222,13 +223,14 @@
WHITESPACE "\n "
EXPR_STMT
CLOSURE_EXPR
- FOR_KW "for"
- GENERIC_PARAM_LIST
- L_ANGLE "<"
- LIFETIME_PARAM
- LIFETIME
- LIFETIME_IDENT "'a"
- R_ANGLE ">"
+ CLOSURE_BINDER
+ FOR_KW "for"
+ GENERIC_PARAM_LIST
+ L_ANGLE "<"
+ LIFETIME_PARAM
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ R_ANGLE ">"
WHITESPACE " "
MOVE_KW "move"
WHITESPACE " "
diff --git a/crates/parser/test_data/parser/inline/ok/qual_paths.rast b/crates/parser/test_data/parser/inline/ok/qual_paths.rast
index 8c66cfe..10f8a6a 100644
--- a/crates/parser/test_data/parser/inline/ok/qual_paths.rast
+++ b/crates/parser/test_data/parser/inline/ok/qual_paths.rast
@@ -11,21 +11,22 @@
PATH
PATH
PATH_SEGMENT
- L_ANGLE "<"
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "A"
- WHITESPACE " "
- AS_KW "as"
- WHITESPACE " "
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "B"
- R_ANGLE ">"
+ TYPE_ANCHOR
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "A"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "B"
+ R_ANGLE ">"
COLON2 "::"
PATH_SEGMENT
NAME_REF
@@ -51,21 +52,22 @@
PATH
PATH
PATH_SEGMENT
- L_ANGLE "<"
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "usize"
- WHITESPACE " "
- AS_KW "as"
- WHITESPACE " "
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Default"
- R_ANGLE ">"
+ TYPE_ANCHOR
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "usize"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Default"
+ R_ANGLE ">"
COLON2 "::"
PATH_SEGMENT
NAME_REF
diff --git a/crates/parser/test_data/parser/inline/ok/type_path_in_pattern.rast b/crates/parser/test_data/parser/inline/ok/type_path_in_pattern.rast
index 297f757..3d27afa 100644
--- a/crates/parser/test_data/parser/inline/ok/type_path_in_pattern.rast
+++ b/crates/parser/test_data/parser/inline/ok/type_path_in_pattern.rast
@@ -19,10 +19,11 @@
PATH
PATH
PATH_SEGMENT
- L_ANGLE "<"
- INFER_TYPE
- UNDERSCORE "_"
- R_ANGLE ">"
+ TYPE_ANCHOR
+ L_ANGLE "<"
+ INFER_TYPE
+ UNDERSCORE "_"
+ R_ANGLE ">"
COLON2 "::"
PATH_SEGMENT
NAME_REF
diff --git a/crates/parser/test_data/parser/inline/ok/where_clause.rast b/crates/parser/test_data/parser/inline/ok/where_clause.rast
index a3cbe45..9adfe2c 100644
--- a/crates/parser/test_data/parser/inline/ok/where_clause.rast
+++ b/crates/parser/test_data/parser/inline/ok/where_clause.rast
@@ -84,21 +84,22 @@
PATH
PATH
PATH_SEGMENT
- L_ANGLE "<"
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "T"
- WHITESPACE " "
- AS_KW "as"
- WHITESPACE " "
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Iterator"
- R_ANGLE ">"
+ TYPE_ANCHOR
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ R_ANGLE ">"
COLON2 "::"
PATH_SEGMENT
NAME_REF
diff --git a/crates/parser/test_data/parser/ok/0036_fully_qualified.rast b/crates/parser/test_data/parser/ok/0036_fully_qualified.rast
index 9382020..2fecb1c 100644
--- a/crates/parser/test_data/parser/ok/0036_fully_qualified.rast
+++ b/crates/parser/test_data/parser/ok/0036_fully_qualified.rast
@@ -45,21 +45,22 @@
PATH
PATH
PATH_SEGMENT
- L_ANGLE "<"
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "S"
- WHITESPACE " "
- AS_KW "as"
- WHITESPACE " "
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Iterator"
- R_ANGLE ">"
+ TYPE_ANCHOR
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "S"
+ WHITESPACE " "
+ AS_KW "as"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Iterator"
+ R_ANGLE ">"
COLON2 "::"
PATH_SEGMENT
NAME_REF
diff --git a/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rast b/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rast
index a536b0e..d1d1ffa 100644
--- a/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rast
+++ b/crates/parser/test_data/parser/ok/0042_ufcs_call_list.rast
@@ -107,13 +107,14 @@
PATH
PATH
PATH_SEGMENT
- L_ANGLE "<"
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Foo"
- R_ANGLE ">"
+ TYPE_ANCHOR
+ L_ANGLE "<"
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "Foo"
+ R_ANGLE ">"
COLON2 "::"
PATH_SEGMENT
NAME_REF
diff --git a/crates/parser/test_data/parser/ok/0067_where_for_pred.rast b/crates/parser/test_data/parser/ok/0067_where_for_pred.rast
index cd3b21a..8bf1090 100644
--- a/crates/parser/test_data/parser/ok/0067_where_for_pred.rast
+++ b/crates/parser/test_data/parser/ok/0067_where_for_pred.rast
@@ -288,26 +288,27 @@
PATH
PATH
PATH_SEGMENT
- L_ANGLE "<"
- REF_TYPE
- AMP "&"
- LIFETIME
- LIFETIME_IDENT "'a"
+ TYPE_ANCHOR
+ L_ANGLE "<"
+ REF_TYPE
+ AMP "&"
+ LIFETIME
+ LIFETIME_IDENT "'a"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "T"
+ WHITESPACE " "
+ AS_KW "as"
WHITESPACE " "
PATH_TYPE
PATH
PATH_SEGMENT
NAME_REF
- IDENT "T"
- WHITESPACE " "
- AS_KW "as"
- WHITESPACE " "
- PATH_TYPE
- PATH
- PATH_SEGMENT
- NAME_REF
- IDENT "Baz"
- R_ANGLE ">"
+ IDENT "Baz"
+ R_ANGLE ">"
COLON2 "::"
PATH_SEGMENT
NAME_REF
diff --git a/crates/proc-macro-srv/src/server_impl.rs b/crates/proc-macro-srv/src/server_impl.rs
index 11dbd92..ad28599 100644
--- a/crates/proc-macro-srv/src/server_impl.rs
+++ b/crates/proc-macro-srv/src/server_impl.rs
@@ -10,6 +10,7 @@
use std::fmt;
+use intern::Symbol;
use proc_macro::bridge;
mod token_stream;
@@ -112,3 +113,135 @@
bridge::LitKind::ErrWithGuar => tt::LitKind::Err(()),
}
}
+
+pub(super) fn literal_from_str<Span: Copy>(
+ s: &str,
+ span: Span,
+) -> Result<bridge::Literal<Span, Symbol>, ()> {
+ use proc_macro::bridge::LitKind;
+ use rustc_lexer::{LiteralKind, Token, TokenKind};
+
+ let mut tokens = rustc_lexer::tokenize(s);
+ let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 });
+
+ let lit = if minus_or_lit.kind == TokenKind::Minus {
+ let lit = tokens.next().ok_or(())?;
+ if !matches!(
+ lit.kind,
+ TokenKind::Literal { kind: LiteralKind::Int { .. } | LiteralKind::Float { .. }, .. }
+ ) {
+ return Err(());
+ }
+ lit
+ } else {
+ minus_or_lit
+ };
+
+ if tokens.next().is_some() {
+ return Err(());
+ }
+
+ let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) };
+ let (kind, start_offset, end_offset) = match kind {
+ LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
+ LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
+ LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
+ LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
+ LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
+ LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
+ LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
+ LiteralKind::RawStr { n_hashes } => (
+ LitKind::StrRaw(n_hashes.unwrap_or_default()),
+ 2 + n_hashes.unwrap_or_default() as usize,
+ 1 + n_hashes.unwrap_or_default() as usize,
+ ),
+ LiteralKind::RawByteStr { n_hashes } => (
+ LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
+ 3 + n_hashes.unwrap_or_default() as usize,
+ 1 + n_hashes.unwrap_or_default() as usize,
+ ),
+ LiteralKind::RawCStr { n_hashes } => (
+ LitKind::CStrRaw(n_hashes.unwrap_or_default()),
+ 3 + n_hashes.unwrap_or_default() as usize,
+ 1 + n_hashes.unwrap_or_default() as usize,
+ ),
+ };
+
+ let (lit, suffix) = s.split_at(suffix_start as usize);
+ let lit = &lit[start_offset..lit.len() - end_offset];
+ let suffix = match suffix {
+ "" | "_" => None,
+ suffix => Some(Symbol::intern(suffix)),
+ };
+
+ Ok(bridge::Literal { kind, symbol: Symbol::intern(lit), suffix, span })
+}
+
+pub(super) fn from_token_tree<Span: Copy>(
+ tree: bridge::TokenTree<TokenStream<Span>, Span, Symbol>,
+) -> TokenStream<Span> {
+ match tree {
+ bridge::TokenTree::Group(group) => {
+ let group = TopSubtree::from_bridge(group);
+ TokenStream { token_trees: group.0 }
+ }
+
+ bridge::TokenTree::Ident(ident) => {
+ let text = ident.sym;
+ let ident: tt::Ident<Span> = tt::Ident {
+ sym: text,
+ span: ident.span,
+ is_raw: if ident.is_raw { tt::IdentIsRaw::Yes } else { tt::IdentIsRaw::No },
+ };
+ let leaf = tt::Leaf::from(ident);
+ let tree = tt::TokenTree::from(leaf);
+ TokenStream { token_trees: vec![tree] }
+ }
+
+ bridge::TokenTree::Literal(literal) => {
+ let token_trees =
+ if let Some((_minus, symbol)) = literal.symbol.as_str().split_once('-') {
+ let punct = tt::Punct {
+ spacing: tt::Spacing::Alone,
+ span: literal.span,
+ char: '-' as char,
+ };
+ let leaf: tt::Leaf<Span> = tt::Leaf::from(punct);
+ let minus_tree = tt::TokenTree::from(leaf);
+
+ let literal = tt::Literal {
+ symbol: Symbol::intern(symbol),
+ suffix: literal.suffix,
+ span: literal.span,
+ kind: literal_kind_to_internal(literal.kind),
+ };
+ let leaf: tt::Leaf<Span> = tt::Leaf::from(literal);
+ let tree = tt::TokenTree::from(leaf);
+ vec![minus_tree, tree]
+ } else {
+ let literal = tt::Literal {
+ symbol: literal.symbol,
+ suffix: literal.suffix,
+ span: literal.span,
+ kind: literal_kind_to_internal(literal.kind),
+ };
+
+ let leaf: tt::Leaf<Span> = tt::Leaf::from(literal);
+ let tree = tt::TokenTree::from(leaf);
+ vec![tree]
+ };
+ TokenStream { token_trees }
+ }
+
+ bridge::TokenTree::Punct(p) => {
+ let punct = tt::Punct {
+ char: p.ch as char,
+ spacing: if p.joint { tt::Spacing::Joint } else { tt::Spacing::Alone },
+ span: p.span,
+ };
+ let leaf = tt::Leaf::from(punct);
+ let tree = tt::TokenTree::from(leaf);
+ TokenStream { token_trees: vec![tree] }
+ }
+ }
+}
diff --git a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
index e0c6e68..5d1271b 100644
--- a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
+++ b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
@@ -14,16 +14,7 @@
use span::{FIXUP_ERASED_FILE_AST_ID_MARKER, Span};
use tt::{TextRange, TextSize};
-use crate::server_impl::{TopSubtree, literal_kind_to_internal, token_stream::TokenStreamBuilder};
-mod tt {
- pub use tt::*;
-
- pub type TokenTree = ::tt::TokenTree<super::Span>;
- pub type Leaf = ::tt::Leaf<super::Span>;
- pub type Literal = ::tt::Literal<super::Span>;
- pub type Punct = ::tt::Punct<super::Span>;
- pub type Ident = ::tt::Ident<super::Span>;
-}
+use crate::server_impl::{from_token_tree, literal_from_str, token_stream::TokenStreamBuilder};
type TokenStream = crate::server_impl::TokenStream<Span>;
@@ -62,66 +53,7 @@
&mut self,
s: &str,
) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
- use proc_macro::bridge::LitKind;
- use rustc_lexer::{LiteralKind, Token, TokenKind};
-
- let mut tokens = rustc_lexer::tokenize(s);
- let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 });
-
- let lit = if minus_or_lit.kind == TokenKind::Minus {
- let lit = tokens.next().ok_or(())?;
- if !matches!(
- lit.kind,
- TokenKind::Literal {
- kind: LiteralKind::Int { .. } | LiteralKind::Float { .. },
- ..
- }
- ) {
- return Err(());
- }
- lit
- } else {
- minus_or_lit
- };
-
- if tokens.next().is_some() {
- return Err(());
- }
-
- let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) };
- let (kind, start_offset, end_offset) = match kind {
- LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
- LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
- LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
- LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
- LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
- LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
- LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
- LiteralKind::RawStr { n_hashes } => (
- LitKind::StrRaw(n_hashes.unwrap_or_default()),
- 2 + n_hashes.unwrap_or_default() as usize,
- 1 + n_hashes.unwrap_or_default() as usize,
- ),
- LiteralKind::RawByteStr { n_hashes } => (
- LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
- 3 + n_hashes.unwrap_or_default() as usize,
- 1 + n_hashes.unwrap_or_default() as usize,
- ),
- LiteralKind::RawCStr { n_hashes } => (
- LitKind::CStrRaw(n_hashes.unwrap_or_default()),
- 3 + n_hashes.unwrap_or_default() as usize,
- 1 + n_hashes.unwrap_or_default() as usize,
- ),
- };
-
- let (lit, suffix) = s.split_at(suffix_start as usize);
- let lit = &lit[start_offset..lit.len() - end_offset];
- let suffix = match suffix {
- "" | "_" => None,
- suffix => Some(Symbol::intern(suffix)),
- };
-
- Ok(bridge::Literal { kind, symbol: Symbol::intern(lit), suffix, span: self.call_site })
+ literal_from_str(s, self.call_site)
}
fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {
@@ -149,70 +81,7 @@
&mut self,
tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
) -> Self::TokenStream {
- match tree {
- bridge::TokenTree::Group(group) => {
- let group = TopSubtree::from_bridge(group);
- TokenStream { token_trees: group.0 }
- }
-
- bridge::TokenTree::Ident(ident) => {
- let text = ident.sym;
- let ident: tt::Ident = tt::Ident {
- sym: text,
- span: ident.span,
- is_raw: if ident.is_raw { tt::IdentIsRaw::Yes } else { tt::IdentIsRaw::No },
- };
- let leaf = tt::Leaf::from(ident);
- let tree = tt::TokenTree::from(leaf);
- TokenStream { token_trees: vec![tree] }
- }
-
- bridge::TokenTree::Literal(literal) => {
- let token_trees =
- if let Some((_minus, symbol)) = literal.symbol.as_str().split_once('-') {
- let punct = tt::Punct {
- spacing: tt::Spacing::Alone,
- span: literal.span,
- char: '-' as char,
- };
- let leaf: tt::Leaf = tt::Leaf::from(punct);
- let minus_tree = tt::TokenTree::from(leaf);
-
- let literal = tt::Literal {
- symbol: Symbol::intern(symbol),
- suffix: literal.suffix,
- span: literal.span,
- kind: literal_kind_to_internal(literal.kind),
- };
- let leaf: tt::Leaf = tt::Leaf::from(literal);
- let tree = tt::TokenTree::from(leaf);
- vec![minus_tree, tree]
- } else {
- let literal = tt::Literal {
- symbol: literal.symbol,
- suffix: literal.suffix,
- span: literal.span,
- kind: literal_kind_to_internal(literal.kind),
- };
-
- let leaf: tt::Leaf = tt::Leaf::from(literal);
- let tree = tt::TokenTree::from(leaf);
- vec![tree]
- };
- TokenStream { token_trees }
- }
-
- bridge::TokenTree::Punct(p) => {
- let punct = tt::Punct {
- char: p.ch as char,
- spacing: if p.joint { tt::Spacing::Joint } else { tt::Spacing::Alone },
- span: p.span,
- };
- let leaf = tt::Leaf::from(punct);
- let tree = tt::TokenTree::from(leaf);
- TokenStream { token_trees: vec![tree] }
- }
- }
+ from_token_tree(tree)
}
fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
diff --git a/crates/proc-macro-srv/src/server_impl/token_id.rs b/crates/proc-macro-srv/src/server_impl/token_id.rs
index d55b269..b493b32 100644
--- a/crates/proc-macro-srv/src/server_impl/token_id.rs
+++ b/crates/proc-macro-srv/src/server_impl/token_id.rs
@@ -5,23 +5,9 @@
use intern::Symbol;
use proc_macro::bridge::{self, server};
-use crate::server_impl::{TopSubtree, literal_kind_to_internal, token_stream::TokenStreamBuilder};
-mod tt {
- pub use span::TokenId;
+use crate::server_impl::{from_token_tree, literal_from_str, token_stream::TokenStreamBuilder};
- pub use tt::*;
-
- pub type TokenTree = ::tt::TokenTree<TokenId>;
- pub type Leaf = ::tt::Leaf<TokenId>;
- pub type Literal = ::tt::Literal<TokenId>;
- pub type Punct = ::tt::Punct<TokenId>;
- pub type Ident = ::tt::Ident<TokenId>;
-}
-type TokenTree = tt::TokenTree;
-type Punct = tt::Punct;
-type Spacing = tt::Spacing;
-type Literal = tt::Literal;
-type Span = tt::TokenId;
+type Span = span::TokenId;
type TokenStream = crate::server_impl::TokenStream<Span>;
pub struct FreeFunctions;
@@ -49,67 +35,7 @@
&mut self,
s: &str,
) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
- use proc_macro::bridge::LitKind;
- use rustc_lexer::{LiteralKind, Token, TokenKind};
-
- let mut tokens = rustc_lexer::tokenize(s);
- let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 });
-
- let lit = if minus_or_lit.kind == TokenKind::Minus {
- let lit = tokens.next().ok_or(())?;
- if !matches!(
- lit.kind,
- TokenKind::Literal {
- kind: LiteralKind::Int { .. } | LiteralKind::Float { .. },
- ..
- }
- ) {
- return Err(());
- }
- lit
- } else {
- minus_or_lit
- };
-
- if tokens.next().is_some() {
- return Err(());
- }
-
- let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) };
-
- let (kind, start_offset, end_offset) = match kind {
- LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
- LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
- LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
- LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
- LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
- LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
- LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
- LiteralKind::RawStr { n_hashes } => (
- LitKind::StrRaw(n_hashes.unwrap_or_default()),
- 2 + n_hashes.unwrap_or_default() as usize,
- 1 + n_hashes.unwrap_or_default() as usize,
- ),
- LiteralKind::RawByteStr { n_hashes } => (
- LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
- 3 + n_hashes.unwrap_or_default() as usize,
- 1 + n_hashes.unwrap_or_default() as usize,
- ),
- LiteralKind::RawCStr { n_hashes } => (
- LitKind::CStrRaw(n_hashes.unwrap_or_default()),
- 3 + n_hashes.unwrap_or_default() as usize,
- 1 + n_hashes.unwrap_or_default() as usize,
- ),
- };
-
- let (lit, suffix) = s.split_at(suffix_start as usize);
- let lit = &lit[start_offset..lit.len() - end_offset];
- let suffix = match suffix {
- "" | "_" => None,
- suffix => Some(Symbol::intern(suffix)),
- };
-
- Ok(bridge::Literal { kind, symbol: Symbol::intern(lit), suffix, span: self.call_site })
+ literal_from_str(s, self.call_site)
}
fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {}
@@ -135,69 +61,7 @@
&mut self,
tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
) -> Self::TokenStream {
- match tree {
- bridge::TokenTree::Group(group) => {
- let group = TopSubtree::from_bridge(group);
- TokenStream { token_trees: group.0 }
- }
-
- bridge::TokenTree::Ident(ident) => {
- let ident: tt::Ident = tt::Ident {
- sym: ident.sym,
- span: ident.span,
- is_raw: if ident.is_raw { tt::IdentIsRaw::Yes } else { tt::IdentIsRaw::No },
- };
- let leaf = tt::Leaf::from(ident);
- let tree = TokenTree::from(leaf);
- TokenStream { token_trees: vec![tree] }
- }
-
- bridge::TokenTree::Literal(literal) => {
- let token_trees =
- if let Some((_minus, symbol)) = literal.symbol.as_str().split_once('-') {
- let punct = tt::Punct {
- spacing: tt::Spacing::Alone,
- span: literal.span,
- char: '-' as char,
- };
- let leaf: tt::Leaf = tt::Leaf::from(punct);
- let minus_tree = tt::TokenTree::from(leaf);
-
- let literal = Literal {
- symbol: Symbol::intern(symbol),
- suffix: literal.suffix,
- span: literal.span,
- kind: literal_kind_to_internal(literal.kind),
- };
- let leaf: tt::Leaf = tt::Leaf::from(literal);
- let tree = tt::TokenTree::from(leaf);
- vec![minus_tree, tree]
- } else {
- let literal = Literal {
- symbol: literal.symbol,
- suffix: literal.suffix,
- span: literal.span,
- kind: literal_kind_to_internal(literal.kind),
- };
-
- let leaf: tt::Leaf = tt::Leaf::from(literal);
- let tree = tt::TokenTree::from(leaf);
- vec![tree]
- };
- TokenStream { token_trees }
- }
-
- bridge::TokenTree::Punct(p) => {
- let punct = Punct {
- char: p.ch as char,
- spacing: if p.joint { Spacing::Joint } else { Spacing::Alone },
- span: p.span,
- };
- let leaf = tt::Leaf::from(punct);
- let tree = TokenTree::from(leaf);
- TokenStream { token_trees: vec![tree] }
- }
- }
+ from_token_tree(tree)
}
fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
@@ -337,6 +201,8 @@
#[cfg(test)]
mod tests {
+ use span::TokenId;
+
use super::*;
#[test]
@@ -345,18 +211,18 @@
token_trees: vec![
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
sym: Symbol::intern("struct"),
- span: tt::TokenId(0),
+ span: TokenId(0),
is_raw: tt::IdentIsRaw::No,
})),
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
sym: Symbol::intern("T"),
- span: tt::TokenId(0),
+ span: TokenId(0),
is_raw: tt::IdentIsRaw::No,
})),
tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter {
- open: tt::TokenId(0),
- close: tt::TokenId(0),
+ open: TokenId(0),
+ close: TokenId(0),
kind: tt::DelimiterKind::Brace,
},
len: 0,
@@ -372,8 +238,8 @@
let subtree_paren_a = vec![
tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter {
- open: tt::TokenId(0),
- close: tt::TokenId(0),
+ open: TokenId(0),
+ close: TokenId(0),
kind: tt::DelimiterKind::Parenthesis,
},
len: 1,
@@ -381,24 +247,24 @@
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
is_raw: tt::IdentIsRaw::No,
sym: Symbol::intern("a"),
- span: tt::TokenId(0),
+ span: TokenId(0),
})),
];
- let t1 = TokenStream::from_str("(a)", tt::TokenId(0)).unwrap();
+ let t1 = TokenStream::from_str("(a)", TokenId(0)).unwrap();
assert_eq!(t1.token_trees.len(), 2);
assert!(t1.token_trees[0..2] == subtree_paren_a);
- let t2 = TokenStream::from_str("(a);", tt::TokenId(0)).unwrap();
+ let t2 = TokenStream::from_str("(a);", TokenId(0)).unwrap();
assert_eq!(t2.token_trees.len(), 3);
assert!(t2.token_trees[0..2] == subtree_paren_a);
- let underscore = TokenStream::from_str("_", tt::TokenId(0)).unwrap();
+ let underscore = TokenStream::from_str("_", TokenId(0)).unwrap();
assert!(
underscore.token_trees[0]
== tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
sym: Symbol::intern("_"),
- span: tt::TokenId(0),
+ span: TokenId(0),
is_raw: tt::IdentIsRaw::No,
}))
);
diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs
index 6e730b1..bb02284 100644
--- a/crates/project-model/src/cargo_workspace.rs
+++ b/crates/project-model/src/cargo_workspace.rs
@@ -431,12 +431,6 @@
.ok_or(cargo_metadata::Error::NoJson)?;
Ok((cargo_metadata::MetadataCommand::parse(stdout)?, None))
})()
- .map(|(metadata, error)| {
- (
- metadata,
- error.map(|e| e.context(format!("Failed to run `{:?}`", meta.cargo_command()))),
- )
- })
.with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()))
}
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index d1ca8c1..5cbea9c 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -1192,7 +1192,7 @@
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum LinkedProject {
ProjectManifest(ProjectManifest),
- InlineJsonProject(ProjectJson),
+ InlineProjectJson(ProjectJson),
}
impl From<ProjectManifest> for LinkedProject {
@@ -1203,7 +1203,7 @@
impl From<ProjectJson> for LinkedProject {
fn from(v: ProjectJson) -> Self {
- LinkedProject::InlineJsonProject(v)
+ LinkedProject::InlineProjectJson(v)
}
}
@@ -1597,6 +1597,16 @@
term_search_borrowck: self.assist_termSearch_borrowcheck(source_root).to_owned(),
}
}
+
+ pub fn diagnostic_fixes(&self, source_root: Option<SourceRootId>) -> DiagnosticsConfig {
+ // We always want to show quickfixes for diagnostics, even when diagnostics/experimental diagnostics are disabled.
+ DiagnosticsConfig {
+ enabled: true,
+ disable_experimental: false,
+ ..self.diagnostics(source_root)
+ }
+ }
+
pub fn expand_proc_attr_macros(&self) -> bool {
self.procMacro_enable().to_owned() && self.procMacro_attributes_enable().to_owned()
}
diff --git a/crates/rust-analyzer/src/handlers/dispatch.rs b/crates/rust-analyzer/src/handlers/dispatch.rs
index f04ada3..40d0556 100644
--- a/crates/rust-analyzer/src/handlers/dispatch.rs
+++ b/crates/rust-analyzer/src/handlers/dispatch.rs
@@ -6,7 +6,7 @@
use ide_db::base_db::{
DbPanicContext,
- salsa::{self, Cancelled},
+ salsa::{self, Cancelled, UnexpectedCycle},
};
use lsp_server::{ExtractError, Response, ResponseError};
use serde::{Serialize, de::DeserializeOwned};
@@ -349,11 +349,14 @@
let mut message = "request handler panicked".to_owned();
if let Some(panic_message) = panic_message {
message.push_str(": ");
- message.push_str(panic_message)
+ message.push_str(panic_message);
+ } else if let Some(cycle) = panic.downcast_ref::<UnexpectedCycle>() {
+ tracing::error!("{cycle}");
+ message.push_str(": unexpected cycle");
} else if let Ok(cancelled) = panic.downcast::<Cancelled>() {
tracing::error!("Cancellation propagated out of salsa! This is a bug");
return Err(HandlerCancelledError::Inner(*cancelled));
- }
+ };
Ok(lsp_server::Response::new_err(
id,
diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs
index 69983a6..6d46ce6 100644
--- a/crates/rust-analyzer/src/handlers/request.rs
+++ b/crates/rust-analyzer/src/handlers/request.rs
@@ -1439,7 +1439,7 @@
};
let assists = snap.analysis.assists_with_fixes(
&assists_config,
- &snap.config.diagnostics(Some(source_root)),
+ &snap.config.diagnostic_fixes(Some(source_root)),
resolve,
frange,
)?;
@@ -1530,7 +1530,7 @@
let assists = snap.analysis.assists_with_fixes(
&assists_config,
- &snap.config.diagnostics(Some(source_root)),
+ &snap.config.diagnostic_fixes(Some(source_root)),
AssistResolveStrategy::Single(assist_resolve),
frange,
)?;
diff --git a/crates/rust-analyzer/src/lsp/from_proto.rs b/crates/rust-analyzer/src/lsp/from_proto.rs
index fb8a983..0275761 100644
--- a/crates/rust-analyzer/src/lsp/from_proto.rs
+++ b/crates/rust-analyzer/src/lsp/from_proto.rs
@@ -103,6 +103,7 @@
pub(crate) fn assist_kind(kind: lsp_types::CodeActionKind) -> Option<AssistKind> {
let assist_kind = match &kind {
+ k if k == &lsp_types::CodeActionKind::EMPTY => AssistKind::Generate,
k if k == &lsp_types::CodeActionKind::QUICKFIX => AssistKind::QuickFix,
k if k == &lsp_types::CodeActionKind::REFACTOR => AssistKind::Refactor,
k if k == &lsp_types::CodeActionKind::REFACTOR_EXTRACT => AssistKind::RefactorExtract,
diff --git a/crates/rust-analyzer/src/lsp/utils.rs b/crates/rust-analyzer/src/lsp/utils.rs
index 673eaa5..5bea708 100644
--- a/crates/rust-analyzer/src/lsp/utils.rs
+++ b/crates/rust-analyzer/src/lsp/utils.rs
@@ -108,8 +108,7 @@
/// edge users from being upset!
pub(crate) fn poke_rust_analyzer_developer(&mut self, message: String) {
let from_source_build = option_env!("POKE_RA_DEVS").is_some();
- let profiling_enabled = std::env::var("RA_PROFILE").is_ok();
- if from_source_build || profiling_enabled {
+ if from_source_build {
self.show_and_log_error(message, None);
}
}
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index ae9e3e9..4677880 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -292,7 +292,7 @@
if let (Some(_command), Some(path)) = (&discover_command, &path) {
let build = linked_projects.iter().find_map(|project| match project {
- LinkedProject::InlineJsonProject(it) => it.crate_by_buildfile(path),
+ LinkedProject::InlineProjectJson(it) => it.crate_by_buildfile(path),
_ => None,
});
@@ -318,7 +318,7 @@
&progress,
)
}
- LinkedProject::InlineJsonProject(it) => {
+ LinkedProject::InlineProjectJson(it) => {
let workspace = project_model::ProjectWorkspace::load_inline(
it.clone(),
&cargo_config,
diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs
index f6bcb56..59073af 100644
--- a/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -975,10 +975,6 @@
}
fn out_dirs_check_impl(root_contains_symlink: bool) {
- if skip_slow_tests() {
- return;
- }
-
let mut server = Project::with_fixture(
r###"
//- /Cargo.toml
@@ -1130,12 +1126,18 @@
#[test]
fn out_dirs_check() {
+ if skip_slow_tests() {
+ return;
+ }
out_dirs_check_impl(false);
}
#[test]
#[cfg(not(windows))] // windows requires elevated permissions to create symlinks
fn root_contains_symlink_out_dirs_check() {
+ if skip_slow_tests() {
+ return;
+ }
out_dirs_check_impl(true);
}
diff --git a/crates/rust-analyzer/tests/slow-tests/ratoml.rs b/crates/rust-analyzer/tests/slow-tests/ratoml.rs
index 485f322..cac7efd 100644
--- a/crates/rust-analyzer/tests/slow-tests/ratoml.rs
+++ b/crates/rust-analyzer/tests/slow-tests/ratoml.rs
@@ -439,6 +439,7 @@
}
#[test]
+#[ignore = "flaky test that tends to hang"]
fn ratoml_inherit_config_from_ws_root() {
if skip_slow_tests() {
return;
diff --git a/crates/span/src/lib.rs b/crates/span/src/lib.rs
index 54f9090..f81648a 100644
--- a/crates/span/src/lib.rs
+++ b/crates/span/src/lib.rs
@@ -112,7 +112,10 @@
impl fmt::Debug for EditionedFileId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_tuple("EditionedFileId").field(&self.file_id()).field(&self.edition()).finish()
+ f.debug_tuple("EditionedFileId")
+ .field(&self.file_id().index())
+ .field(&self.edition())
+ .finish()
}
}
diff --git a/crates/syntax-bridge/src/prettify_macro_expansion.rs b/crates/syntax-bridge/src/prettify_macro_expansion.rs
index e815e07..0a5c8df 100644
--- a/crates/syntax-bridge/src/prettify_macro_expansion.rs
+++ b/crates/syntax-bridge/src/prettify_macro_expansion.rs
@@ -7,6 +7,13 @@
ted::{self, Position},
};
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum PrettifyWsKind {
+ Space,
+ Indent(usize),
+ Newline,
+}
+
/// Renders a [`SyntaxNode`] with whitespace inserted between tokens that require them.
///
/// This is an internal API that is only exported because `mbe` needs it for tests and cannot depend
@@ -15,7 +22,8 @@
#[deprecated = "use `hir_expand::prettify_macro_expansion()` instead"]
pub fn prettify_macro_expansion(
syn: SyntaxNode,
- dollar_crate_replacement: &mut dyn FnMut(&SyntaxToken) -> SyntaxToken,
+ dollar_crate_replacement: &mut dyn FnMut(&SyntaxToken) -> Option<SyntaxToken>,
+ inspect_mods: impl FnOnce(&[(Position, PrettifyWsKind)]),
) -> SyntaxNode {
let mut indent = 0;
let mut last: Option<SyntaxKind> = None;
@@ -27,14 +35,12 @@
let after = Position::after;
let do_indent = |pos: fn(_) -> Position, token: &SyntaxToken, indent| {
- (pos(token.clone()), make::tokens::whitespace(&" ".repeat(4 * indent)))
+ (pos(token.clone()), PrettifyWsKind::Indent(indent))
};
- let do_ws = |pos: fn(_) -> Position, token: &SyntaxToken| {
- (pos(token.clone()), make::tokens::single_space())
- };
- let do_nl = |pos: fn(_) -> Position, token: &SyntaxToken| {
- (pos(token.clone()), make::tokens::single_newline())
- };
+ let do_ws =
+ |pos: fn(_) -> Position, token: &SyntaxToken| (pos(token.clone()), PrettifyWsKind::Space);
+ let do_nl =
+ |pos: fn(_) -> Position, token: &SyntaxToken| (pos(token.clone()), PrettifyWsKind::Newline);
for event in syn.preorder_with_tokens() {
let token = match event {
@@ -46,20 +52,19 @@
) =>
{
if indent > 0 {
- mods.push((
- Position::after(node.clone()),
- make::tokens::whitespace(&" ".repeat(4 * indent)),
- ));
+ mods.push((Position::after(node.clone()), PrettifyWsKind::Indent(indent)));
}
if node.parent().is_some() {
- mods.push((Position::after(node), make::tokens::single_newline()));
+ mods.push((Position::after(node), PrettifyWsKind::Newline));
}
continue;
}
_ => continue,
};
if token.kind() == SyntaxKind::IDENT && token.text() == "$crate" {
- dollar_crate_replacements.push((token.clone(), dollar_crate_replacement(&token)));
+ if let Some(replacement) = dollar_crate_replacement(&token) {
+ dollar_crate_replacements.push((token.clone(), replacement));
+ }
}
let tok = &token;
@@ -129,8 +134,16 @@
last = Some(tok.kind());
}
+ inspect_mods(&mods);
for (pos, insert) in mods {
- ted::insert(pos, insert);
+ ted::insert_raw(
+ pos,
+ match insert {
+ PrettifyWsKind::Space => make::tokens::single_space(),
+ PrettifyWsKind::Indent(indent) => make::tokens::whitespace(&" ".repeat(4 * indent)),
+ PrettifyWsKind::Newline => make::tokens::single_newline(),
+ },
+ );
}
for (old, new) in dollar_crate_replacements {
ted::replace(old, new);
diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram
index 10abca7..c81da06 100644
--- a/crates/syntax/rust.ungram
+++ b/crates/syntax/rust.ungram
@@ -39,7 +39,10 @@
| NameRef GenericArgList?
| NameRef ParenthesizedArgList RetType?
| NameRef ReturnTypeSyntax
-| '<' Type ('as' PathType)? '>'
+| TypeAnchor
+
+TypeAnchor =
+ '<' Type ('as' PathType)? '>'
ReturnTypeSyntax =
'(' '..' ')'
@@ -98,7 +101,7 @@
'where' predicates:(WherePred (',' WherePred)* ','?)
WherePred =
- ('for' GenericParamList)? (Lifetime | Type) ':' TypeBoundList?
+ ('for' GenericParamList)? (Lifetime | Type) ':' TypeBoundList?
//*************************//
diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs
index cd9f4db..04c7e8a 100644
--- a/crates/syntax/src/ast/generated/nodes.rs
+++ b/crates/syntax/src/ast/generated/nodes.rs
@@ -1232,21 +1232,13 @@
support::child(&self.syntax)
}
#[inline]
- pub fn path_type(&self) -> Option<PathType> { support::child(&self.syntax) }
- #[inline]
pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
#[inline]
pub fn return_type_syntax(&self) -> Option<ReturnTypeSyntax> { support::child(&self.syntax) }
#[inline]
- pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn type_anchor(&self) -> Option<TypeAnchor> { support::child(&self.syntax) }
#[inline]
pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
- #[inline]
- pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
- #[inline]
- pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
- #[inline]
- pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
}
pub struct PathType {
pub(crate) syntax: SyntaxNode,
@@ -1739,6 +1731,21 @@
#[inline]
pub fn type_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![type]) }
}
+pub struct TypeAnchor {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TypeAnchor {
+ #[inline]
+ pub fn path_type(&self) -> Option<PathType> { support::child(&self.syntax) }
+ #[inline]
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ #[inline]
+ pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
+ #[inline]
+ pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
+ #[inline]
+ pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
+}
pub struct TypeArg {
pub(crate) syntax: SyntaxNode,
}
@@ -7108,6 +7115,42 @@
f.debug_struct("TypeAlias").field("syntax", &self.syntax).finish()
}
}
+impl AstNode for TypeAnchor {
+ #[inline]
+ fn kind() -> SyntaxKind
+ where
+ Self: Sized,
+ {
+ TYPE_ANCHOR
+ }
+ #[inline]
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_ANCHOR }
+ #[inline]
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ #[inline]
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl hash::Hash for TypeAnchor {
+ fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); }
+}
+impl Eq for TypeAnchor {}
+impl PartialEq for TypeAnchor {
+ fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax }
+}
+impl Clone for TypeAnchor {
+ fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } }
+}
+impl fmt::Debug for TypeAnchor {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("TypeAnchor").field("syntax", &self.syntax).finish()
+ }
+}
impl AstNode for TypeArg {
#[inline]
fn kind() -> SyntaxKind
@@ -10624,6 +10667,11 @@
std::fmt::Display::fmt(self.syntax(), f)
}
}
+impl std::fmt::Display for TypeAnchor {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
impl std::fmt::Display for TypeArg {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index b9ccd34..dcf8534 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -276,18 +276,15 @@
_ => PathSegmentKind::Name(name_ref),
}
} else {
- match self.syntax().first_child_or_token()?.kind() {
- T![<] => {
- // <T> or <T as Trait>
- // T is any TypeRef, Trait has to be a PathType
- let mut type_refs =
- self.syntax().children().filter(|node| ast::Type::can_cast(node.kind()));
- let type_ref = type_refs.next().and_then(ast::Type::cast);
- let trait_ref = type_refs.next().and_then(ast::PathType::cast);
- PathSegmentKind::Type { type_ref, trait_ref }
- }
- _ => return None,
- }
+ let anchor = self.type_anchor()?;
+ // FIXME: Move this over to `ast::TypeAnchor`
+ // <T> or <T as Trait>
+ // T is any TypeRef, Trait has to be a PathType
+ let mut type_refs =
+ anchor.syntax().children().filter(|node| ast::Type::can_cast(node.kind()));
+ let type_ref = type_refs.next().and_then(ast::Type::cast);
+ let trait_ref = type_refs.next().and_then(ast::PathType::cast);
+ PathSegmentKind::Type { type_ref, trait_ref }
};
Some(res)
}
@@ -473,7 +470,7 @@
// [#15778](https://github.com/rust-lang/rust-analyzer/issues/15778)
impl ast::PathSegment {
pub fn qualifying_trait(&self) -> Option<ast::PathType> {
- let mut path_types = support::children(self.syntax());
+ let mut path_types = support::children(self.type_anchor()?.syntax());
let first = path_types.next()?;
path_types.next().or(Some(first))
}
diff --git a/crates/syntax/src/ted.rs b/crates/syntax/src/ted.rs
index 64d5ea0..6fcbdd0 100644
--- a/crates/syntax/src/ted.rs
+++ b/crates/syntax/src/ted.rs
@@ -5,6 +5,7 @@
use std::{mem, ops::RangeInclusive};
use parser::T;
+use rowan::TextSize;
use crate::{
SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken,
@@ -74,6 +75,12 @@
};
Position { repr }
}
+ pub fn offset(&self) -> TextSize {
+ match &self.repr {
+ PositionRepr::FirstChild(node) => node.text_range().start(),
+ PositionRepr::After(elem) => elem.text_range().end(),
+ }
+ }
}
pub fn insert(position: Position, elem: impl Element) {
@@ -207,5 +214,12 @@
}
return Some(make::tokens::whitespace(&format!("\n{indent}")));
}
+ if left.kind() == SyntaxKind::ATTR {
+ let mut indent = IndentLevel::from_element(right);
+ if right.kind() == SyntaxKind::ATTR {
+ indent.0 = IndentLevel::from_element(left).0.max(indent.0);
+ }
+ return Some(make::tokens::whitespace(&format!("\n{indent}")));
+ }
Some(make::tokens::single_space())
}
diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs
index 96e1301..8eb48f8 100644
--- a/crates/test-fixture/src/lib.rs
+++ b/crates/test-fixture/src/lib.rs
@@ -538,6 +538,21 @@
disabled: false,
},
),
+ (
+ r#"
+#[proc_macro_attribute]
+pub fn generate_suffixed_type(_attr: TokenStream, input: TokenStream) -> TokenStream {
+ input
+}
+"#
+ .into(),
+ ProcMacro {
+ name: Symbol::intern("generate_suffixed_type"),
+ kind: ProcMacroKind::Attr,
+ expander: sync::Arc::new(GenerateSuffixedTypeProcMacroExpander),
+ disabled: false,
+ },
+ ),
])
}
@@ -919,3 +934,57 @@
Ok(subtree.clone())
}
}
+
+// Generates a new type by adding a suffix to the original name
+#[derive(Debug)]
+struct GenerateSuffixedTypeProcMacroExpander;
+impl ProcMacroExpander for GenerateSuffixedTypeProcMacroExpander {
+ fn expand(
+ &self,
+ subtree: &TopSubtree,
+ _attrs: Option<&TopSubtree>,
+ _env: &Env,
+ _def_site: Span,
+ call_site: Span,
+ _mixed_site: Span,
+ _current_dir: String,
+ ) -> Result<TopSubtree, ProcMacroExpansionError> {
+ let TokenTree::Leaf(Leaf::Ident(ident)) = &subtree.0[1] else {
+ return Err(ProcMacroExpansionError::Panic("incorrect Input".into()));
+ };
+
+ let ident = match ident.sym.as_str() {
+ "struct" => {
+ let TokenTree::Leaf(Leaf::Ident(ident)) = &subtree.0[2] else {
+ return Err(ProcMacroExpansionError::Panic("incorrect Input".into()));
+ };
+ ident
+ }
+
+ "enum" => {
+ let TokenTree::Leaf(Leaf::Ident(ident)) = &subtree.0[4] else {
+ return Err(ProcMacroExpansionError::Panic("incorrect Input".into()));
+ };
+ ident
+ }
+
+ _ => {
+ return Err(ProcMacroExpansionError::Panic("incorrect Input".into()));
+ }
+ };
+
+ let generated_ident = tt::Ident {
+ sym: Symbol::intern(&format!("{}Suffix", ident.sym)),
+ span: ident.span,
+ is_raw: tt::IdentIsRaw::No,
+ };
+
+ let ret = quote! { call_site =>
+ #subtree
+
+ struct #generated_ident;
+ };
+
+ Ok(ret)
+ }
+}
diff --git a/crates/vfs/src/vfs_path.rs b/crates/vfs/src/vfs_path.rs
index c35b7f2..7e2c787 100644
--- a/crates/vfs/src/vfs_path.rs
+++ b/crates/vfs/src/vfs_path.rs
@@ -39,6 +39,13 @@
}
}
+ pub fn into_abs_path(self) -> Option<AbsPathBuf> {
+ match self.0 {
+ VfsPathRepr::PathBuf(it) => Some(it),
+ VfsPathRepr::VirtualPath(_) => None,
+ }
+ }
+
/// Creates a new `VfsPath` with `path` adjoined to `self`.
pub fn join(&self, path: &str) -> Option<VfsPath> {
match &self.0 {
diff --git a/docs/book/src/non_cargo_based_projects.md b/docs/book/src/non_cargo_based_projects.md
index 151f875..bbdb48b 100644
--- a/docs/book/src/non_cargo_based_projects.md
+++ b/docs/book/src/non_cargo_based_projects.md
@@ -5,7 +5,7 @@
rust-analyzer in the `rust-project.json` format:
```typescript
-interface JsonProject {
+interface ProjectJson {
/// Path to the sysroot directory.
///
/// The sysroot is where rustc looks for the
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json
index 18fb097..ab43114 100644
--- a/editors/code/package-lock.json
+++ b/editors/code/package-lock.json
@@ -5500,9 +5500,9 @@
}
},
"node_modules/tar-fs": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.2.tgz",
- "integrity": "sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA==",
+ "version": "2.1.3",
+ "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.3.tgz",
+ "integrity": "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg==",
"dev": true,
"license": "MIT",
"optional": true,
diff --git a/rust-version b/rust-version
index 5b47d1b..af0dd5c 100644
--- a/rust-version
+++ b/rust-version
@@ -1 +1 @@
-a8e4c68dcb4dc1e48a0db294c5323cab0227fcb9
+7c10378e1fee5ddc6573b916aeb884ab10e0de17
diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs
index bba7ad7..19ca62e 100644
--- a/xtask/src/codegen.rs
+++ b/xtask/src/codegen.rs
@@ -24,8 +24,8 @@
grammar::generate(self.check);
assists_doc_tests::generate(self.check);
parser_inline_tests::generate(self.check);
- feature_docs::generate(self.check)
- // diagnostics_docs::generate(self.check) doesn't generate any tests
+ feature_docs::generate(self.check);
+ diagnostics_docs::generate(self.check);
// lints::generate(self.check) Updating clones the rust repo, so don't run it unless
// explicitly asked for
}