Auto merge of #17462 - Veykril:sema-attr-macro-res, r=Veykril

fix: Fix IDE features breaking in some attr macros

Fixes https://github.com/rust-lang/rust-analyzer/issues/17453, Fixes https://github.com/rust-lang/rust-analyzer/issues/17458
diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs
index 36be514..4584400 100644
--- a/crates/flycheck/src/lib.rs
+++ b/crates/flycheck/src/lib.rs
@@ -386,6 +386,7 @@
                 "did  cancel flycheck"
             );
             command_handle.cancel();
+            self.command_receiver.take();
             self.report_progress(Progress::DidCancel);
             self.status = FlycheckStatus::Finished;
         }
diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs
index 5a5a8d3..faba905 100644
--- a/crates/hir-def/src/body/lower.rs
+++ b/crates/hir-def/src/body/lower.rs
@@ -965,7 +965,7 @@
                     .resolve_path(
                         self.db,
                         module,
-                        &path,
+                        path,
                         crate::item_scope::BuiltinShadowMode::Other,
                         Some(MacroSubNs::Bang),
                     )
diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs
index 077ded3..4338163 100644
--- a/crates/hir-def/src/data.rs
+++ b/crates/hir-def/src/data.rs
@@ -628,7 +628,7 @@
             'attrs: for attr in &*attrs {
                 let ast_id =
                     AstId::new(self.expander.current_file_id(), item.ast_id(item_tree).upcast());
-                let ast_id_with_path = AstIdWithPath { path: (*attr.path).clone(), ast_id };
+                let ast_id_with_path = AstIdWithPath { path: attr.path.clone(), ast_id };
 
                 match self.def_map.resolve_attr_macro(
                     self.db,
@@ -719,12 +719,12 @@
                 let MacroCall { ast_id, expand_to, ctxt, ref path } = item_tree[call];
                 let module = self.expander.module.local_id;
 
-                let resolver = |path| {
+                let resolver = |path: &_| {
                     self.def_map
                         .resolve_path(
                             self.db,
                             module,
-                            &path,
+                            path,
                             crate::item_scope::BuiltinShadowMode::Other,
                             Some(MacroSubNs::Bang),
                         )
diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs
index 73ce942..dbf8e6b 100644
--- a/crates/hir-def/src/expander.rs
+++ b/crates/hir-def/src/expander.rs
@@ -56,7 +56,7 @@
         &mut self,
         db: &dyn DefDatabase,
         macro_call: ast::MacroCall,
-        resolver: impl Fn(ModPath) -> Option<MacroId>,
+        resolver: impl Fn(&ModPath) -> Option<MacroId>,
     ) -> Result<ExpandResult<Option<(Mark, Parse<T>)>>, UnresolvedMacro> {
         // FIXME: within_limit should support this, instead of us having to extract the error
         let mut unresolved_macro_err = None;
diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs
index a23df7f..acead9d 100644
--- a/crates/hir-def/src/generics.rs
+++ b/crates/hir-def/src/generics.rs
@@ -403,12 +403,12 @@
                 let (def_map, expander) = &mut **exp;
 
                 let module = expander.module.local_id;
-                let resolver = |path| {
+                let resolver = |path: &_| {
                     def_map
                         .resolve_path(
                             db,
                             module,
-                            &path,
+                            path,
                             crate::item_scope::BuiltinShadowMode::Other,
                             Some(MacroSubNs::Bang),
                         )
diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs
index 2f7ebbf..fd6f4a3 100644
--- a/crates/hir-def/src/hir.rs
+++ b/crates/hir-def/src/hir.rs
@@ -503,11 +503,11 @@
 
 #[derive(Debug, Clone, Eq, PartialEq)]
 pub enum BindingProblems {
-    /// https://doc.rust-lang.org/stable/error_codes/E0416.html
+    /// <https://doc.rust-lang.org/stable/error_codes/E0416.html>
     BoundMoreThanOnce,
-    /// https://doc.rust-lang.org/stable/error_codes/E0409.html
+    /// <https://doc.rust-lang.org/stable/error_codes/E0409.html>
     BoundInconsistently,
-    /// https://doc.rust-lang.org/stable/error_codes/E0408.html
+    /// <https://doc.rust-lang.org/stable/error_codes/E0408.html>
     NotBoundAcrossAll,
 }
 
diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs
index b86703c..0dcd912 100644
--- a/crates/hir-def/src/lib.rs
+++ b/crates/hir-def/src/lib.rs
@@ -56,6 +56,7 @@
 pub mod import_map;
 pub mod visibility;
 
+use intern::Interned;
 pub use rustc_abi as layout;
 use triomphe::Arc;
 
@@ -72,7 +73,7 @@
 
 use base_db::{
     impl_intern_key,
-    salsa::{self, impl_intern_value_trivial},
+    salsa::{self, InternValueTrivial},
     CrateId,
 };
 use hir_expand::{
@@ -90,7 +91,7 @@
 use item_tree::ExternBlock;
 use la_arena::Idx;
 use nameres::DefMap;
-use span::{AstIdNode, Edition, FileAstId, FileId, SyntaxContextId};
+use span::{AstIdNode, Edition, FileAstId, SyntaxContextId};
 use stdx::impl_from;
 use syntax::{ast, AstNode};
 
@@ -186,7 +187,7 @@
 macro_rules! impl_intern {
     ($id:ident, $loc:ident, $intern:ident, $lookup:ident) => {
         impl_intern_key!($id);
-        impl_intern_value_trivial!($loc);
+        impl InternValueTrivial for $loc {}
         impl_intern_lookup!(DefDatabase, $id, $loc, $intern, $lookup);
     };
 }
@@ -534,7 +535,7 @@
     pub parent: GenericDefId,
     pub local_id: LocalTypeOrConstParamId,
 }
-impl_intern_value_trivial!(TypeOrConstParamId);
+impl InternValueTrivial for TypeOrConstParamId {}
 
 /// A TypeOrConstParamId with an invariant that it actually belongs to a type
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -596,7 +597,7 @@
     pub local_id: LocalLifetimeParamId,
 }
 pub type LocalLifetimeParamId = Idx<generics::LifetimeParamData>;
-impl_intern_value_trivial!(LifetimeParamId);
+impl InternValueTrivial for LifetimeParamId {}
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub enum ItemContainerId {
@@ -958,15 +959,14 @@
         match self {
             GenericDefId::FunctionId(it) => file_id_and_params_of_item_loc(db, it),
             GenericDefId::TypeAliasId(it) => file_id_and_params_of_item_loc(db, it),
-            GenericDefId::ConstId(_) => (FileId::BOGUS.into(), None),
             GenericDefId::AdtId(AdtId::StructId(it)) => file_id_and_params_of_item_loc(db, it),
             GenericDefId::AdtId(AdtId::UnionId(it)) => file_id_and_params_of_item_loc(db, it),
             GenericDefId::AdtId(AdtId::EnumId(it)) => file_id_and_params_of_item_loc(db, it),
             GenericDefId::TraitId(it) => file_id_and_params_of_item_loc(db, it),
             GenericDefId::TraitAliasId(it) => file_id_and_params_of_item_loc(db, it),
             GenericDefId::ImplId(it) => file_id_and_params_of_item_loc(db, it),
-            // We won't be using this ID anyway
-            GenericDefId::EnumVariantId(_) => (FileId::BOGUS.into(), None),
+            GenericDefId::ConstId(it) => (it.lookup(db).id.file_id(), None),
+            GenericDefId::EnumVariantId(it) => (it.lookup(db).id.file_id(), None),
         }
     }
 
@@ -1370,7 +1370,7 @@
         &self,
         db: &dyn ExpandDatabase,
         krate: CrateId,
-        resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
+        resolver: impl Fn(&path::ModPath) -> Option<MacroDefId> + Copy,
     ) -> Option<MacroCallId> {
         self.as_call_id_with_errors(db, krate, resolver).ok()?.value
     }
@@ -1379,7 +1379,7 @@
         &self,
         db: &dyn ExpandDatabase,
         krate: CrateId,
-        resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
+        resolver: impl Fn(&path::ModPath) -> Option<MacroDefId> + Copy,
     ) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro>;
 }
 
@@ -1388,7 +1388,7 @@
         &self,
         db: &dyn ExpandDatabase,
         krate: CrateId,
-        resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
+        resolver: impl Fn(&path::ModPath) -> Option<MacroDefId> + Copy,
     ) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
         let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
         let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
@@ -1408,7 +1408,8 @@
 
         macro_call_as_call_id_with_eager(
             db,
-            &AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
+            ast_id,
+            &path,
             call_site.ctx,
             expands_to,
             krate,
@@ -1422,11 +1423,15 @@
 #[derive(Clone, Debug, Eq, PartialEq)]
 struct AstIdWithPath<T: AstIdNode> {
     ast_id: AstId<T>,
-    path: path::ModPath,
+    path: Interned<path::ModPath>,
 }
 
 impl<T: AstIdNode> AstIdWithPath<T> {
-    fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWithPath<T> {
+    fn new(
+        file_id: HirFileId,
+        ast_id: FileAstId<T>,
+        path: Interned<path::ModPath>,
+    ) -> AstIdWithPath<T> {
         AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path }
     }
 }
@@ -1437,30 +1442,39 @@
     call_site: SyntaxContextId,
     expand_to: ExpandTo,
     krate: CrateId,
-    resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
+    resolver: impl Fn(&path::ModPath) -> Option<MacroDefId> + Copy,
 ) -> Result<Option<MacroCallId>, UnresolvedMacro> {
-    macro_call_as_call_id_with_eager(db, call, call_site, expand_to, krate, resolver, resolver)
-        .map(|res| res.value)
+    macro_call_as_call_id_with_eager(
+        db,
+        call.ast_id,
+        &call.path,
+        call_site,
+        expand_to,
+        krate,
+        resolver,
+        resolver,
+    )
+    .map(|res| res.value)
 }
 
 fn macro_call_as_call_id_with_eager(
     db: &dyn ExpandDatabase,
-    call: &AstIdWithPath<ast::MacroCall>,
+    ast_id: AstId<ast::MacroCall>,
+    path: &path::ModPath,
     call_site: SyntaxContextId,
     expand_to: ExpandTo,
     krate: CrateId,
-    resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
-    eager_resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
+    resolver: impl FnOnce(&path::ModPath) -> Option<MacroDefId>,
+    eager_resolver: impl Fn(&path::ModPath) -> Option<MacroDefId>,
 ) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
-    let def =
-        resolver(call.path.clone()).ok_or_else(|| UnresolvedMacro { path: call.path.clone() })?;
+    let def = resolver(path).ok_or_else(|| UnresolvedMacro { path: path.clone() })?;
 
     let res = match def.kind {
         MacroDefKind::BuiltInEager(..) => expand_eager_macro_input(
             db,
             krate,
-            &call.ast_id.to_node(db),
-            call.ast_id,
+            &ast_id.to_node(db),
+            ast_id,
             def,
             call_site,
             &|path| eager_resolver(path).filter(MacroDefId::is_fn_like),
@@ -1469,12 +1483,12 @@
             value: Some(def.make_call(
                 db,
                 krate,
-                MacroCallKind::FnLike { ast_id: call.ast_id, expand_to, eager: None },
+                MacroCallKind::FnLike { ast_id, expand_to, eager: None },
                 call_site,
             )),
             err: None,
         },
-        _ => return Err(UnresolvedMacro { path: call.path.clone() }),
+        _ => return Err(UnresolvedMacro { path: path.clone() }),
     };
     Ok(res)
 }
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs
index c5c26e2..4058159 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -1883,3 +1883,41 @@
 "#]],
     );
 }
+
+#[test]
+fn test_pat_fragment_eof_17441() {
+    check(
+        r#"
+macro_rules! matches {
+    ($expression:expr, $pattern:pat $(if $guard:expr)? ) => {
+        match $expression {
+            $pattern $(if $guard)? => true,
+            _ => false
+        }
+    };
+}
+fn f() {
+    matches!(0, 10..);
+    matches!(0, 10.. if true);
+}
+ "#,
+        expect![[r#"
+macro_rules! matches {
+    ($expression:expr, $pattern:pat $(if $guard:expr)? ) => {
+        match $expression {
+            $pattern $(if $guard)? => true,
+            _ => false
+        }
+    };
+}
+fn f() {
+    match 0 {
+        10.. =>true , _=>false
+    };
+    match 0 {
+        10..if true =>true , _=>false
+    };
+}
+ "#]],
+    );
+}
diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs
index 8904aca..dc964b3 100644
--- a/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -96,7 +96,7 @@
         let res = macro_call
             .as_call_id_with_errors(&db, krate, |path| {
                 resolver
-                    .resolve_path_as_macro(&db, &path, Some(MacroSubNs::Bang))
+                    .resolve_path_as_macro(&db, path, Some(MacroSubNs::Bang))
                     .map(|(it, _)| db.macro_def(it))
             })
             .unwrap();
diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs
index 256a37f..162b642 100644
--- a/crates/hir-def/src/nameres.rs
+++ b/crates/hir-def/src/nameres.rs
@@ -16,8 +16,8 @@
 //!
 //! This happens in the `raw` module, which parses a single source file into a
 //! set of top-level items. Nested imports are desugared to flat imports in this
-//! phase. Macro calls are represented as a triple of (Path, Option<Name>,
-//! TokenTree).
+//! phase. Macro calls are represented as a triple of `(Path, Option<Name>,
+//! TokenTree)`.
 //!
 //! ## Collecting Modules
 //!
diff --git a/crates/hir-def/src/nameres/attr_resolution.rs b/crates/hir-def/src/nameres/attr_resolution.rs
index 5829887..f842027 100644
--- a/crates/hir-def/src/nameres/attr_resolution.rs
+++ b/crates/hir-def/src/nameres/attr_resolution.rs
@@ -59,7 +59,7 @@
                     return Ok(ResolvedAttr::Other);
                 }
             }
-            None => return Err(UnresolvedMacro { path: ast_id.path }),
+            None => return Err(UnresolvedMacro { path: ast_id.path.as_ref().clone() }),
         };
 
         Ok(ResolvedAttr::Macro(attr_macro_as_call_id(
@@ -137,12 +137,12 @@
     derive_pos: u32,
     call_site: SyntaxContextId,
     krate: CrateId,
-    resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
+    resolver: impl Fn(&path::ModPath) -> Option<(MacroId, MacroDefId)>,
     derive_macro_id: MacroCallId,
 ) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
-    let (macro_id, def_id) = resolver(item_attr.path.clone())
+    let (macro_id, def_id) = resolver(&item_attr.path)
         .filter(|(_, def_id)| def_id.is_derive())
-        .ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
+        .ok_or_else(|| UnresolvedMacro { path: item_attr.path.as_ref().clone() })?;
     let call_id = def_id.make_call(
         db.upcast(),
         krate,
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index cf5e0c2..6d2eb71 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -17,11 +17,12 @@
     proc_macro::CustomProcMacroExpander,
     ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind,
 };
+use intern::Interned;
 use itertools::{izip, Itertools};
 use la_arena::Idx;
 use limit::Limit;
 use rustc_hash::{FxHashMap, FxHashSet};
-use span::{Edition, ErasedFileAstId, FileAstId, Span, SyntaxContextId};
+use span::{Edition, ErasedFileAstId, FileAstId, SyntaxContextId};
 use syntax::ast;
 use triomphe::Arc;
 
@@ -75,36 +76,23 @@
 
     let proc_macros = if krate.is_proc_macro {
         match db.proc_macros().get(&def_map.krate) {
-            Some(Ok(proc_macros)) => {
-                Ok(proc_macros
-                    .iter()
-                    .enumerate()
-                    .map(|(idx, it)| {
-                        // FIXME: a hacky way to create a Name from string.
-                        let name = tt::Ident {
-                            text: it.name.clone(),
-                            span: Span {
-                                range: syntax::TextRange::empty(syntax::TextSize::new(0)),
-                                anchor: span::SpanAnchor {
-                                    file_id: FileId::BOGUS,
-                                    ast_id: span::ROOT_ERASED_FILE_AST_ID,
-                                },
-                                ctx: SyntaxContextId::ROOT,
-                            },
-                        };
-                        (
-                            name.as_name(),
-                            if it.disabled {
-                                CustomProcMacroExpander::disabled()
-                            } else {
-                                CustomProcMacroExpander::new(
-                                    hir_expand::proc_macro::ProcMacroId::new(idx as u32),
-                                )
-                            },
-                        )
-                    })
-                    .collect())
-            }
+            Some(Ok(proc_macros)) => Ok(proc_macros
+                .iter()
+                .enumerate()
+                .map(|(idx, it)| {
+                    let name = Name::new_text_dont_use(it.name.clone());
+                    (
+                        name,
+                        if it.disabled {
+                            CustomProcMacroExpander::disabled()
+                        } else {
+                            CustomProcMacroExpander::new(hir_expand::proc_macro::ProcMacroId::new(
+                                idx as u32,
+                            ))
+                        },
+                    )
+                })
+                .collect()),
             Some(Err(e)) => Err(e.clone().into_boxed_str()),
             None => Err("No proc-macros present for crate".to_owned().into_boxed_str()),
         }
@@ -1137,18 +1125,18 @@
                     MacroSubNs::Attr
                 }
             };
-            let resolver = |path| {
+            let resolver = |path: &_| {
                 let resolved_res = self.def_map.resolve_path_fp_with_macro(
                     self.db,
                     ResolveMode::Other,
                     directive.module_id,
-                    &path,
+                    path,
                     BuiltinShadowMode::Module,
                     Some(subns),
                 );
                 resolved_res.resolved_def.take_macros().map(|it| (it, self.db.macro_def(it)))
             };
-            let resolver_def_id = |path| resolver(path).map(|(_, it)| it);
+            let resolver_def_id = |path: &_| resolver(path).map(|(_, it)| it);
 
             match &directive.kind {
                 MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt: call_site } => {
@@ -1251,7 +1239,7 @@
                         }
                     }
 
-                    let def = match resolver_def_id(path.clone()) {
+                    let def = match resolver_def_id(path) {
                         Some(def) if def.is_attribute() => def,
                         _ => return Resolved::No,
                     };
@@ -1300,7 +1288,11 @@
                                 let call_id = call_id();
                                 let mut len = 0;
                                 for (idx, (path, call_site)) in derive_macros.enumerate() {
-                                    let ast_id = AstIdWithPath::new(file_id, ast_id.value, path);
+                                    let ast_id = AstIdWithPath::new(
+                                        file_id,
+                                        ast_id.value,
+                                        Interned::new(path),
+                                    );
                                     self.unresolved_macros.push(MacroDirective {
                                         module_id: directive.module_id,
                                         depth: directive.depth + 1,
@@ -1439,7 +1431,7 @@
                                 self.db,
                                 ResolveMode::Other,
                                 directive.module_id,
-                                &path,
+                                path,
                                 BuiltinShadowMode::Module,
                                 Some(MacroSubNs::Bang),
                             );
@@ -1473,7 +1465,7 @@
                             derive_index: *derive_pos as u32,
                             derive_macro_id: *derive_macro_id,
                         },
-                        ast_id.path.clone(),
+                        ast_id.path.as_ref().clone(),
                     ));
                 }
                 // These are diagnosed by `reseed_with_unresolved_attribute`, as that function consumes them
@@ -2108,7 +2100,7 @@
             let ast_id = AstIdWithPath::new(
                 self.file_id(),
                 mod_item.ast_id(self.item_tree),
-                attr.path.as_ref().clone(),
+                attr.path.clone(),
             );
             self.def_collector.unresolved_macros.push(MacroDirective {
                 module_id: self.module_id,
@@ -2154,19 +2146,7 @@
             let name;
             let name = match attrs.by_key("rustc_builtin_macro").string_value() {
                 Some(it) => {
-                    // FIXME: a hacky way to create a Name from string.
-                    name = tt::Ident {
-                        text: it.into(),
-                        span: Span {
-                            range: syntax::TextRange::empty(syntax::TextSize::new(0)),
-                            anchor: span::SpanAnchor {
-                                file_id: FileId::BOGUS,
-                                ast_id: span::ROOT_ERASED_FILE_AST_ID,
-                            },
-                            ctx: SyntaxContextId::ROOT,
-                        },
-                    }
-                    .as_name();
+                    name = Name::new_text_dont_use(it.into());
                     &name
                 }
                 None => {
@@ -2302,7 +2282,7 @@
         &MacroCall { ref path, ast_id, expand_to, ctxt }: &MacroCall,
         container: ItemContainerId,
     ) {
-        let ast_id = AstIdWithPath::new(self.file_id(), ast_id, ModPath::clone(path));
+        let ast_id = AstIdWithPath::new(self.file_id(), ast_id, path.clone());
         let db = self.def_collector.db;
 
         // FIXME: Immediately expanding in "Case 1" is insufficient since "Case 2" may also define
@@ -2312,7 +2292,8 @@
         // Case 1: try to resolve macro calls with single-segment name and expand macro_rules
         if let Ok(res) = macro_call_as_call_id_with_eager(
             db.upcast(),
-            &ast_id,
+            ast_id.ast_id,
+            &ast_id.path,
             ctxt,
             expand_to,
             self.def_collector.def_map.krate,
@@ -2339,7 +2320,7 @@
                     db,
                     ResolveMode::Other,
                     self.module_id,
-                    &path,
+                    path,
                     BuiltinShadowMode::Module,
                     Some(MacroSubNs::Bang),
                 );
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 30042ca..ad25a11 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -146,13 +146,11 @@
     token_to_map: SyntaxToken,
 ) -> Option<(SyntaxNode, SyntaxToken)> {
     let loc = db.lookup_intern_macro_call(actual_macro_call);
-
-    // FIXME: This BOGUS here is dangerous once the proc-macro server can call back into the database!
-    let span_map = RealSpanMap::absolute(FileId::BOGUS);
-    let span_map = SpanMapRef::RealSpanMap(&span_map);
-
     let (_, _, span) = db.macro_arg_considering_derives(actual_macro_call, &loc.kind);
 
+    let span_map = RealSpanMap::absolute(span.anchor.file_id);
+    let span_map = SpanMapRef::RealSpanMap(&span_map);
+
     // Build the subtree and token mapping for the speculative args
     let (mut tt, undo_info) = match loc.kind {
         MacroCallKind::FnLike { .. } => (
diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs
index 64e04bc..3e0d2df 100644
--- a/crates/hir-expand/src/eager.rs
+++ b/crates/hir-expand/src/eager.rs
@@ -39,7 +39,7 @@
     ast_id: AstId<ast::MacroCall>,
     def: MacroDefId,
     call_site: SyntaxContextId,
-    resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
+    resolver: &dyn Fn(&ModPath) -> Option<MacroDefId>,
 ) -> ExpandResult<Option<MacroCallId>> {
     let expand_to = ExpandTo::from_call_site(macro_call);
 
@@ -138,7 +138,7 @@
     curr: InFile<SyntaxNode>,
     krate: CrateId,
     call_site: SyntaxContextId,
-    macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
+    macro_resolver: &dyn Fn(&ModPath) -> Option<MacroDefId>,
 ) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
     let original = curr.value.clone_for_update();
 
@@ -172,7 +172,7 @@
         let def = match call.path().and_then(|path| {
             ModPath::from_src(db, path, &mut |range| span_map.span_at(range.start()).ctx)
         }) {
-            Some(path) => match macro_resolver(path.clone()) {
+            Some(path) => match macro_resolver(&path) {
                 Some(def) => def,
                 None => {
                     error =
diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs
index 9ec2a83..eadb2e1 100644
--- a/crates/hir-expand/src/fixup.rs
+++ b/crates/hir-expand/src/fixup.rs
@@ -4,7 +4,10 @@
 use mbe::DocCommentDesugarMode;
 use rustc_hash::{FxHashMap, FxHashSet};
 use smallvec::SmallVec;
-use span::{ErasedFileAstId, Span, SpanAnchor, FIXUP_ERASED_FILE_AST_ID_MARKER};
+use span::{
+    ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, FIXUP_ERASED_FILE_AST_ID_MARKER,
+    ROOT_ERASED_FILE_AST_ID,
+};
 use stdx::never;
 use syntax::{
     ast::{self, AstNode, HasLoopBody},
@@ -88,7 +91,6 @@
             preorder.skip_subtree();
             continue;
         }
-
         // In some other situations, we can fix things by just appending some tokens.
         match_ast! {
             match node {
@@ -273,6 +275,62 @@
                         ]);
                     }
                 },
+                ast::RecordExprField(it) => {
+                    if let Some(colon) = it.colon_token() {
+                        if it.name_ref().is_some() {
+                            append.insert(colon.into(), vec![
+                                Leaf::Ident(Ident {
+                                    text: "__ra_fixup".into(),
+                                    span: fake_span(node_range)
+                                })
+                            ]);
+                        }
+                    }
+                },
+                ast::Path(it) => {
+                    if let Some(colon) = it.coloncolon_token() {
+                        if it.segment().is_none() {
+                            append.insert(colon.into(), vec![
+                                Leaf::Ident(Ident {
+                                    text: "__ra_fixup".into(),
+                                    span: fake_span(node_range)
+                                })
+                            ]);
+                        }
+                    }
+                },
+                ast::ArgList(it) => {
+                    if it.r_paren_token().is_none() {
+                        append.insert(node.into(), vec![
+                            Leaf::Punct(Punct {
+                                span: fake_span(node_range),
+                                char: ')',
+                                spacing: Spacing::Alone
+                            })
+                        ]);
+                    }
+                },
+                ast::ArgList(it) => {
+                    if it.r_paren_token().is_none() {
+                        append.insert(node.into(), vec![
+                            Leaf::Punct(Punct {
+                                span: fake_span(node_range),
+                                char: ')',
+                                spacing: Spacing::Alone
+                            })
+                        ]);
+                    }
+                },
+                ast::ClosureExpr(it) => {
+                    if it.body().is_none() {
+                        append.insert(node.into(), vec![
+                            Leaf::Ident(Ident {
+                                text: "__ra_fixup".into(),
+                                span: fake_span(node_range)
+                            })
+                        ]);
+                    }
+                },
                 _ => (),
             }
         }
@@ -307,8 +365,13 @@
         tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
             || tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
     ) {
-        tt.delimiter.close = Span::DUMMY;
-        tt.delimiter.open = Span::DUMMY;
+        let span = |file_id| Span {
+            range: TextRange::empty(TextSize::new(0)),
+            anchor: SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
+            ctx: SyntaxContextId::ROOT,
+        };
+        tt.delimiter.open = span(tt.delimiter.open.anchor.file_id);
+        tt.delimiter.close = span(tt.delimiter.close.anchor.file_id);
     }
     reverse_fixups_(tt, undo_info);
 }
@@ -751,4 +814,70 @@
 "#]],
         )
     }
+
+    #[test]
+    fn fixup_path() {
+        check(
+            r#"
+fn foo() {
+    path::
+}
+"#,
+            expect![[r#"
+fn foo () {path :: __ra_fixup}
+"#]],
+        )
+    }
+
+    #[test]
+    fn fixup_record_ctor_field() {
+        check(
+            r#"
+fn foo() {
+    R { f: }
+}
+"#,
+            expect![[r#"
+fn foo () {R {f : __ra_fixup}}
+"#]],
+        )
+    }
+
+    #[test]
+    fn fixup_arg_list() {
+        check(
+            r#"
+fn foo() {
+    foo(a
+}
+"#,
+            expect![[r#"
+fn foo () { foo ( a ) }
+"#]],
+        );
+        check(
+            r#"
+fn foo() {
+    bar.foo(a
+}
+"#,
+            expect![[r#"
+fn foo () { bar . foo ( a ) }
+"#]],
+        );
+    }
+
+    #[test]
+    fn fixup_closure() {
+        check(
+            r#"
+fn foo() {
+    ||
+}
+"#,
+            expect![[r#"
+fn foo () {|| __ra_fixup}
+"#]],
+        );
+    }
 }
diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs
index 097e760..cc02332 100644
--- a/crates/hir-expand/src/hygiene.rs
+++ b/crates/hir-expand/src/hygiene.rs
@@ -4,7 +4,7 @@
 //! Expansion, and Definition Contexts,” *Journal of Functional Programming* 22, no. 2
 //! (March 1, 2012): 181–216, <https://doi.org/10.1017/S0956796812000093>.
 //!
-//! Also see https://rustc-dev-guide.rust-lang.org/macro-expansion.html#hygiene-and-hierarchies
+//! Also see <https://rustc-dev-guide.rust-lang.org/macro-expansion.html#hygiene-and-hierarchies>
 //!
 //! # The Expansion Order Hierarchy
 //!
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index 3492162..a7150cf 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -31,7 +31,7 @@
 
 use std::{fmt, hash::Hash};
 
-use base_db::{salsa::impl_intern_value_trivial, CrateId, FileId};
+use base_db::{salsa::InternValueTrivial, CrateId, FileId};
 use either::Either;
 use span::{
     Edition, ErasedFileAstId, FileAstId, FileRange, HirFileIdRepr, Span, SpanAnchor,
@@ -173,7 +173,7 @@
     pub kind: MacroCallKind,
     pub ctxt: SyntaxContextId,
 }
-impl_intern_value_trivial!(MacroCallLoc);
+impl InternValueTrivial for MacroCallLoc {}
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct MacroDefId {
diff --git a/crates/hir-expand/src/quote.rs b/crates/hir-expand/src/quote.rs
index a31a111..8f1e323 100644
--- a/crates/hir-expand/src/quote.rs
+++ b/crates/hir-expand/src/quote.rs
@@ -231,7 +231,7 @@
 
     const DUMMY: tt::Span = tt::Span {
         range: TextRange::empty(TextSize::new(0)),
-        anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID },
+        anchor: SpanAnchor { file_id: FileId::from_raw(0xe4e4e), ast_id: ROOT_ERASED_FILE_AST_ID },
         ctx: SyntaxContextId::ROOT,
     };
 
diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs
index 90bf46b..000871e 100644
--- a/crates/hir-ty/src/db.rs
+++ b/crates/hir-ty/src/db.rs
@@ -5,7 +5,7 @@
 
 use base_db::{
     impl_intern_key,
-    salsa::{self, impl_intern_value_trivial},
+    salsa::{self, InternValueTrivial},
     CrateId, Upcast,
 };
 use hir_def::{
@@ -298,7 +298,8 @@
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct InternedClosure(pub DefWithBodyId, pub ExprId);
-impl_intern_value_trivial!(InternedClosure);
+
+impl InternValueTrivial for InternedClosure {}
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct InternedCoroutineId(salsa::InternId);
@@ -306,7 +307,7 @@
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct InternedCoroutine(pub DefWithBodyId, pub ExprId);
-impl_intern_value_trivial!(InternedCoroutine);
+impl InternValueTrivial for InternedCoroutine {}
 
 /// This exists just for Chalk, because Chalk just has a single `FnDefId` where
 /// we have different IDs for struct and enum variant constructors.
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index ff78fe0..cd31845 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -1426,6 +1426,7 @@
                     }
                     // otherwise, if the arg is equal to the param default, hide it (unless the
                     // default is an error which can happen for the trait Self type)
+                    #[allow(unstable_name_collisions)]
                     default_parameters.get(i).is_none_or(|default_parameter| {
                         // !is_err(default_parameter.skip_binders())
                         //     &&
diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs
index 26a839f..9d596e9 100644
--- a/crates/hir-ty/src/lib.rs
+++ b/crates/hir-ty/src/lib.rs
@@ -52,7 +52,7 @@
     hash::{BuildHasherDefault, Hash},
 };
 
-use base_db::salsa::impl_intern_value_trivial;
+use base_db::salsa::InternValueTrivial;
 use chalk_ir::{
     fold::{Shift, TypeFoldable},
     interner::HasInterner,
@@ -606,7 +606,7 @@
     AssociatedTypeImplTrait(hir_def::TypeAliasId, ImplTraitIdx),
     AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId),
 }
-impl_intern_value_trivial!(ImplTraitId);
+impl InternValueTrivial for ImplTraitId {}
 
 #[derive(PartialEq, Eq, Debug, Hash)]
 pub struct ImplTraits {
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index 04ace38..2e100d5 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -11,7 +11,7 @@
 };
 
 use base_db::{
-    salsa::{impl_intern_value_trivial, Cycle},
+    salsa::{Cycle, InternValueTrivial},
     CrateId,
 };
 use chalk_ir::{
@@ -416,9 +416,9 @@
                 };
                 let ty = {
                     let macro_call = macro_call.to_node(self.db.upcast());
-                    let resolver = |path| {
+                    let resolver = |path: &_| {
                         self.resolver
-                            .resolve_path_as_macro(self.db.upcast(), &path, Some(MacroSubNs::Bang))
+                            .resolve_path_as_macro(self.db.upcast(), path, Some(MacroSubNs::Bang))
                             .map(|(it, _)| it)
                     };
                     match expander.enter_expand::<ast::Type>(self.db.upcast(), macro_call, resolver)
@@ -1965,7 +1965,9 @@
     StructId(StructId),
     EnumVariantId(EnumVariantId),
 }
-impl_intern_value_trivial!(CallableDefId);
+
+impl InternValueTrivial for CallableDefId {}
+
 impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId);
 impl From<CallableDefId> for ModuleDefId {
     fn from(def: CallableDefId) -> ModuleDefId {
diff --git a/crates/hir/src/has_source.rs b/crates/hir/src/has_source.rs
index 4b3b7ff..929c8b3 100644
--- a/crates/hir/src/has_source.rs
+++ b/crates/hir/src/has_source.rs
@@ -26,7 +26,7 @@
     ///
     /// The current some implementations can return `InFile` instead of `Option<InFile>`.
     /// But we made this method `Option` to support rlib in the future
-    /// by https://github.com/rust-lang/rust-analyzer/issues/6913
+    /// by <https://github.com/rust-lang/rust-analyzer/issues/6913>
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>>;
 }
 
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 5167dbf..f6c88ed 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -469,7 +469,7 @@
         let macro_call = InFile::new(file_id, actual_macro_call);
         let krate = resolver.krate();
         let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
-            resolver.resolve_path_as_macro_def(self.db.upcast(), &path, Some(MacroSubNs::Bang))
+            resolver.resolve_path_as_macro_def(self.db.upcast(), path, Some(MacroSubNs::Bang))
         })?;
         hir_expand::db::expand_speculative(
             self.db.upcast(),
diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs
index 247d0ab..74ed264 100644
--- a/crates/hir/src/semantics/source_to_def.rs
+++ b/crates/hir/src/semantics/source_to_def.rs
@@ -26,19 +26,19 @@
 //!
 //! The actual algorithm to resolve syntax to def is curious in two aspects:
 //!
-//!     * It is recursive
-//!     * It uses the inverse algorithm (what is the syntax for this def?)
+//! * It is recursive
+//! * It uses the inverse algorithm (what is the syntax for this def?)
 //!
 //! Specifically, the algorithm goes like this:
 //!
-//!     1. Find the syntactic container for the syntax. For example, field's
-//!        container is the struct, and structs container is a module.
-//!     2. Recursively get the def corresponding to container.
-//!     3. Ask the container def for all child defs. These child defs contain
-//!        the answer and answer's siblings.
-//!     4. For each child def, ask for it's source.
-//!     5. The child def whose source is the syntax node we've started with
-//!        is the answer.
+//! 1. Find the syntactic container for the syntax. For example, field's
+//!    container is the struct, and structs container is a module.
+//! 2. Recursively get the def corresponding to container.
+//! 3. Ask the container def for all child defs. These child defs contain
+//!    the answer and answer's siblings.
+//! 4. For each child def, ask for it's source.
+//! 5. The child def whose source is the syntax node we've started with
+//!    is the answer.
 //!
 //! It's interesting that both Roslyn and Kotlin contain very similar code
 //! shape.
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index 7ce64b4..8e71a54 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -826,7 +826,7 @@
         // FIXME: This causes us to parse, generally this is the wrong approach for resolving a
         // macro call to a macro call id!
         let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
-            self.resolver.resolve_path_as_macro_def(db.upcast(), &path, Some(MacroSubNs::Bang))
+            self.resolver.resolve_path_as_macro_def(db.upcast(), path, Some(MacroSubNs::Bang))
         })?;
         // why the 64?
         Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
diff --git a/crates/hir/src/term_search.rs b/crates/hir/src/term_search.rs
index 7b70cdf..aa046b0 100644
--- a/crates/hir/src/term_search.rs
+++ b/crates/hir/src/term_search.rs
@@ -325,6 +325,7 @@
     let mut solutions: Vec<Expr> = tactics::trivial(ctx, &defs, &mut lookup).collect();
     // Use well known types tactic before iterations as it does not depend on other tactics
     solutions.extend(tactics::famous_types(ctx, &defs, &mut lookup));
+    solutions.extend(tactics::assoc_const(ctx, &defs, &mut lookup));
 
     while should_continue() {
         lookup.new_round();
diff --git a/crates/hir/src/term_search/expr.rs b/crates/hir/src/term_search/expr.rs
index 8173427..bb687f5 100644
--- a/crates/hir/src/term_search/expr.rs
+++ b/crates/hir/src/term_search/expr.rs
@@ -9,8 +9,8 @@
 use itertools::Itertools;
 
 use crate::{
-    Adt, AsAssocItem, Const, ConstParam, Field, Function, GenericDef, Local, ModuleDef,
-    SemanticsScope, Static, Struct, StructKind, Trait, Type, Variant,
+    Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Field, Function, GenericDef, Local,
+    ModuleDef, SemanticsScope, Static, Struct, StructKind, Trait, Type, Variant,
 };
 
 /// Helper function to get path to `ModuleDef`
@@ -138,7 +138,17 @@
         let db = sema_scope.db;
         let mod_item_path_str = |s, def| mod_item_path_str(s, def, cfg);
         match self {
-            Expr::Const(it) => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)),
+            Expr::Const(it) => match it.as_assoc_item(db).map(|it| it.container(db)) {
+                Some(container) => {
+                    let container_name = container_name(container, sema_scope, cfg)?;
+                    let const_name = it
+                        .name(db)
+                        .map(|c| c.display(db.upcast()).to_string())
+                        .unwrap_or(String::new());
+                    Ok(format!("{container_name}::{const_name}"))
+                }
+                None => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)),
+            },
             Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)),
             Expr::Local(it) => Ok(it.name(db).display(db.upcast()).to_string()),
             Expr::ConstParam(it) => Ok(it.name(db).display(db.upcast()).to_string()),
@@ -153,22 +163,7 @@
 
                 match func.as_assoc_item(db).map(|it| it.container(db)) {
                     Some(container) => {
-                        let container_name = match container {
-                            crate::AssocItemContainer::Trait(trait_) => {
-                                mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_))?
-                            }
-                            crate::AssocItemContainer::Impl(imp) => {
-                                let self_ty = imp.self_ty(db);
-                                // Should it be guaranteed that `mod_item_path` always exists?
-                                match self_ty
-                                    .as_adt()
-                                    .and_then(|adt| mod_item_path(sema_scope, &adt.into(), cfg))
-                                {
-                                    Some(path) => path.display(sema_scope.db.upcast()).to_string(),
-                                    None => self_ty.display(db).to_string(),
-                                }
-                            }
-                        };
+                        let container_name = container_name(container, sema_scope, cfg)?;
                         let fn_name = func.name(db).display(db.upcast()).to_string();
                         Ok(format!("{container_name}::{fn_name}({args})"))
                     }
@@ -414,3 +409,25 @@
         matches!(self, Expr::Many(_))
     }
 }
+
+/// Helper function to find name of container
+fn container_name(
+    container: AssocItemContainer,
+    sema_scope: &SemanticsScope<'_>,
+    cfg: ImportPathConfig,
+) -> Result<String, DisplaySourceCodeError> {
+    let container_name = match container {
+        crate::AssocItemContainer::Trait(trait_) => {
+            mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_), cfg)?
+        }
+        crate::AssocItemContainer::Impl(imp) => {
+            let self_ty = imp.self_ty(sema_scope.db);
+            // Should it be guaranteed that `mod_item_path` always exists?
+            match self_ty.as_adt().and_then(|adt| mod_item_path(sema_scope, &adt.into(), cfg)) {
+                Some(path) => path.display(sema_scope.db.upcast()).to_string(),
+                None => self_ty.display(sema_scope.db).to_string(),
+            }
+        }
+    };
+    Ok(container_name)
+}
diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs
index 7ac6356..b738e6a 100644
--- a/crates/hir/src/term_search/tactics.rs
+++ b/crates/hir/src/term_search/tactics.rs
@@ -80,7 +80,10 @@
         lookup.insert(ty.clone(), std::iter::once(expr.clone()));
 
         // Don't suggest local references as they are not valid for return
-        if matches!(expr, Expr::Local(_)) && ty.contains_reference(db) {
+        if matches!(expr, Expr::Local(_))
+            && ty.contains_reference(db)
+            && ctx.config.enable_borrowcheck
+        {
             return None;
         }
 
@@ -88,6 +91,52 @@
     })
 }
 
+/// # Associated constant tactic
+///
+/// Attempts to fulfill the goal by trying constants defined as associated items.
+/// Only considers them on types that are in scope.
+///
+/// # Arguments
+/// * `ctx` - Context for the term search
+/// * `defs` - Set of items in scope at term search target location
+/// * `lookup` - Lookup table for types
+///
+/// Returns iterator that yields elements that unify with `goal`.
+///
+/// _Note that there is no use of calling this tactic in every iteration as the output does not
+/// depend on the current state of `lookup`_
+pub(super) fn assoc_const<'a, DB: HirDatabase>(
+    ctx: &'a TermSearchCtx<'a, DB>,
+    defs: &'a FxHashSet<ScopeDef>,
+    lookup: &'a mut LookupTable,
+) -> impl Iterator<Item = Expr> + 'a {
+    let db = ctx.sema.db;
+    let module = ctx.scope.module();
+
+    defs.iter()
+        .filter_map(|def| match def {
+            ScopeDef::ModuleDef(ModuleDef::Adt(it)) => Some(it),
+            _ => None,
+        })
+        .flat_map(|it| Impl::all_for_type(db, it.ty(db)))
+        .filter(|it| !it.is_unsafe(db))
+        .flat_map(|it| it.items(db))
+        .filter(move |it| it.is_visible_from(db, module))
+        .filter_map(AssocItem::as_const)
+        .filter_map(|it| {
+            let expr = Expr::Const(it);
+            let ty = it.ty(db);
+
+            if ty.contains_unknown() {
+                return None;
+            }
+
+            lookup.insert(ty.clone(), std::iter::once(expr.clone()));
+
+            ty.could_unify_with_deeply(db, &ctx.goal).then_some(expr)
+        })
+}
+
 /// # Data constructor tactic
 ///
 /// Attempts different data constructors for enums and structs in scope
diff --git a/crates/ide-assists/src/handlers/term_search.rs b/crates/ide-assists/src/handlers/term_search.rs
index 94e0519..8a9229c 100644
--- a/crates/ide-assists/src/handlers/term_search.rs
+++ b/crates/ide-assists/src/handlers/term_search.rs
@@ -290,4 +290,34 @@
 fn f() { let a = 1; let b: Foo<i32> = Foo(a); }"#,
         )
     }
+
+    #[test]
+    fn test_struct_assoc_item() {
+        check_assist(
+            term_search,
+            r#"//- minicore: todo, unimplemented
+struct Foo;
+impl Foo { const FOO: i32 = 0; }
+fn f() { let a: i32 = todo$0!(); }"#,
+            r#"struct Foo;
+impl Foo { const FOO: i32 = 0; }
+fn f() { let a: i32 = Foo::FOO; }"#,
+        )
+    }
+
+    #[test]
+    fn test_trait_assoc_item() {
+        check_assist(
+            term_search,
+            r#"//- minicore: todo, unimplemented
+struct Foo;
+trait Bar { const BAR: i32; }
+impl Bar for Foo { const BAR: i32 = 0; }
+fn f() { let a: i32 = todo$0!(); }"#,
+            r#"struct Foo;
+trait Bar { const BAR: i32; }
+impl Bar for Foo { const BAR: i32 = 0; }
+fn f() { let a: i32 = Foo::BAR; }"#,
+        )
+    }
 }
diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs
index 4005753..23d93d3 100644
--- a/crates/ide-completion/src/completions/env_vars.rs
+++ b/crates/ide-completion/src/completions/env_vars.rs
@@ -1,4 +1,5 @@
-//! Completes environment variables defined by Cargo (https://doc.rust-lang.org/cargo/reference/environment-variables.html)
+//! Completes environment variables defined by Cargo
+//! (<https://doc.rust-lang.org/cargo/reference/environment-variables.html>)
 use hir::MacroFileIdExt;
 use ide_db::syntax_helpers::node_ext::macro_call_for_string_token;
 use syntax::{
diff --git a/crates/ide-completion/src/completions/field.rs b/crates/ide-completion/src/completions/field.rs
index 53fcb7c..b795bbd 100644
--- a/crates/ide-completion/src/completions/field.rs
+++ b/crates/ide-completion/src/completions/field.rs
@@ -20,9 +20,9 @@
     } = path_ctx
     {
         let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet);
-        add_keyword("pub(crate)", "pub(crate)");
-        add_keyword("pub(super)", "pub(super)");
-        add_keyword("pub", "pub");
+        add_keyword("pub(crate)", "pub(crate) $0");
+        add_keyword("pub(super)", "pub(super) $0");
+        add_keyword("pub", "pub $0");
     }
 }
 
@@ -32,8 +32,8 @@
 ) {
     if ctx.qualifier_ctx.vis_node.is_none() {
         let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet);
-        add_keyword("pub(crate)", "pub(crate)");
-        add_keyword("pub(super)", "pub(super)");
-        add_keyword("pub", "pub");
+        add_keyword("pub(crate)", "pub(crate) $0");
+        add_keyword("pub(super)", "pub(super) $0");
+        add_keyword("pub", "pub $0");
     }
 }
diff --git a/crates/ide-completion/src/completions/item_list.rs b/crates/ide-completion/src/completions/item_list.rs
index ce36681..02298b1 100644
--- a/crates/ide-completion/src/completions/item_list.rs
+++ b/crates/ide-completion/src/completions/item_list.rs
@@ -79,7 +79,7 @@
     let in_trait = matches!(kind, Some(ItemListKind::Trait));
     let in_trait_impl = matches!(kind, Some(ItemListKind::TraitImpl(_)));
     let in_inherent_impl = matches!(kind, Some(ItemListKind::Impl));
-    let no_qualifiers = ctx.qualifier_ctx.vis_node.is_none();
+    let no_vis_qualifiers = ctx.qualifier_ctx.vis_node.is_none();
     let in_block = kind.is_none();
 
     if !in_trait_impl {
@@ -89,7 +89,7 @@
             }
             if in_item_list {
                 add_keyword("trait", "trait $1 {\n    $0\n}");
-                if no_qualifiers {
+                if no_vis_qualifiers {
                     add_keyword("impl", "impl $1 {\n    $0\n}");
                 }
             }
@@ -100,19 +100,20 @@
             add_keyword("enum", "enum $1 {\n    $0\n}");
             add_keyword("mod", "mod $0");
             add_keyword("static", "static $0");
+            add_keyword("async", "async $0");
             add_keyword("struct", "struct $0");
             add_keyword("trait", "trait $1 {\n    $0\n}");
             add_keyword("union", "union $1 {\n    $0\n}");
             add_keyword("use", "use $0");
-            if no_qualifiers {
+            if no_vis_qualifiers {
                 add_keyword("impl", "impl $1 {\n    $0\n}");
             }
         }
 
-        if !in_trait && !in_block && no_qualifiers {
-            add_keyword("pub(crate)", "pub(crate)");
-            add_keyword("pub(super)", "pub(super)");
-            add_keyword("pub", "pub");
+        if !in_trait && !in_block && no_vis_qualifiers {
+            add_keyword("pub(crate)", "pub(crate) $0");
+            add_keyword("pub(super)", "pub(super) $0");
+            add_keyword("pub", "pub $0");
         }
 
         if in_extern_block {
@@ -126,7 +127,7 @@
             }
 
             add_keyword("fn", "fn $1($2) {\n    $0\n}");
-            add_keyword("unsafe", "unsafe");
+            add_keyword("unsafe", "unsafe $0");
             add_keyword("const", "const $0");
         }
     }
diff --git a/crates/ide-completion/src/completions/keyword.rs b/crates/ide-completion/src/completions/keyword.rs
index 1322c05..d79b539 100644
--- a/crates/ide-completion/src/completions/keyword.rs
+++ b/crates/ide-completion/src/completions/keyword.rs
@@ -14,9 +14,9 @@
     match keyword_item {
         Item::Impl(it) => {
             if it.for_token().is_none() && it.trait_().is_none() && it.self_ty().is_some() {
-                add_keyword("for", "for");
+                add_keyword("for", "for $0");
             }
-            add_keyword("where", "where");
+            add_keyword("where", "where $0");
         }
         Item::Enum(_)
         | Item::Fn(_)
@@ -24,7 +24,7 @@
         | Item::Trait(_)
         | Item::TypeAlias(_)
         | Item::Union(_) => {
-            add_keyword("where", "where");
+            add_keyword("where", "where $0");
         }
         _ => (),
     }
diff --git a/crates/ide-completion/src/completions/pattern.rs b/crates/ide-completion/src/completions/pattern.rs
index 40b2c83..60cfb7e 100644
--- a/crates/ide-completion/src/completions/pattern.rs
+++ b/crates/ide-completion/src/completions/pattern.rs
@@ -14,25 +14,27 @@
     ctx: &CompletionContext<'_>,
     pattern_ctx: &PatternContext,
 ) {
+    let mut add_keyword = |kw, snippet| acc.add_keyword_snippet(ctx, kw, snippet);
+
     match pattern_ctx.parent_pat.as_ref() {
         Some(Pat::RangePat(_) | Pat::BoxPat(_)) => (),
         Some(Pat::RefPat(r)) => {
             if r.mut_token().is_none() {
-                acc.add_keyword(ctx, "mut");
+                add_keyword("mut", "mut $0");
             }
         }
         _ => {
             let tok = ctx.token.text_range().start();
             match (pattern_ctx.ref_token.as_ref(), pattern_ctx.mut_token.as_ref()) {
                 (None, None) => {
-                    acc.add_keyword(ctx, "ref");
-                    acc.add_keyword(ctx, "mut");
+                    add_keyword("ref", "ref $0");
+                    add_keyword("mut", "mut $0");
                 }
                 (None, Some(m)) if tok < m.text_range().start() => {
-                    acc.add_keyword(ctx, "ref");
+                    add_keyword("ref", "ref $0");
                 }
                 (Some(r), None) if tok > r.text_range().end() => {
-                    acc.add_keyword(ctx, "mut");
+                    add_keyword("mut", "mut $0");
                 }
                 _ => (),
             }
diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs
index a230edd..5041ef8 100644
--- a/crates/ide-completion/src/completions/postfix.rs
+++ b/crates/ide-completion/src/completions/postfix.rs
@@ -668,7 +668,7 @@
         check_edit(
             "unsafe",
             r#"fn main() { let x = true else {panic!()}.$0}"#,
-            r#"fn main() { let x = true else {panic!()}.unsafe}"#,
+            r#"fn main() { let x = true else {panic!()}.unsafe $0}"#,
         );
     }
 
diff --git a/crates/ide-completion/src/completions/vis.rs b/crates/ide-completion/src/completions/vis.rs
index e0a959a..0ea5157 100644
--- a/crates/ide-completion/src/completions/vis.rs
+++ b/crates/ide-completion/src/completions/vis.rs
@@ -33,7 +33,7 @@
         Qualified::No => {
             if !has_in_token {
                 cov_mark::hit!(kw_completion_in);
-                acc.add_keyword(ctx, "in");
+                acc.add_keyword_snippet(ctx, "in", "in $0");
             }
             acc.add_nameref_keywords(ctx);
         }
diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs
index 905c7ee..ebdc813 100644
--- a/crates/ide-completion/src/render.rs
+++ b/crates/ide-completion/src/render.rs
@@ -1799,6 +1799,7 @@
 "#,
             expect![[r#"
                 lc world [type+name+local]
+                ex world [type]
                 st WorldSnapshot {…} []
                 st &WorldSnapshot {…} [type]
                 st WorldSnapshot []
diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs
index a653314..545c2a2 100644
--- a/crates/ide-completion/src/tests/expression.rs
+++ b/crates/ide-completion/src/tests/expression.rs
@@ -141,6 +141,7 @@
             un Union        Union
             ev TupleV(…)    TupleV(u32)
             bt u32          u32
+            kw async
             kw const
             kw crate::
             kw enum
@@ -217,6 +218,7 @@
         expect![[r#"
             fn foo()       fn()
             bt u32         u32
+            kw async
             kw const
             kw crate::
             kw enum
@@ -264,6 +266,7 @@
         expect![[r#"
             fn foo()       fn()
             bt u32         u32
+            kw async
             kw const
             kw crate::
             kw else
@@ -336,6 +339,7 @@
         expect![[r#"
             fn my()        fn()
             bt u32         u32
+            kw async
             kw break
             kw const
             kw continue
@@ -799,6 +803,7 @@
         expect![[r#"
             fn foo()       fn()
             bt u32         u32
+            kw async
             kw const
             kw crate::
             kw else
@@ -839,6 +844,7 @@
         expect![[r#"
             fn foo()       fn()
             bt u32         u32
+            kw async
             kw const
             kw crate::
             kw else
@@ -927,6 +933,7 @@
         expect![[r#"
             fn foo()       fn()
             bt u32         u32
+            kw async
             kw const
             kw crate::
             kw else
@@ -967,6 +974,7 @@
         expect![[r#"
             fn foo()       fn()
             bt u32         u32
+            kw async
             kw const
             kw crate::
             kw else
@@ -1007,6 +1015,7 @@
         expect![[r#"
             fn foo()       fn()
             bt u32         u32
+            kw async
             kw const
             kw crate::
             kw else
@@ -1059,6 +1068,7 @@
             fn main()      fn()
             md std
             bt u32         u32
+            kw async
             kw const
             kw crate::
             kw enum
@@ -1111,6 +1121,7 @@
             md std
             st UnstableButWeAreOnNightlyAnyway UnstableButWeAreOnNightlyAnyway
             bt u32                    u32
+            kw async
             kw const
             kw crate::
             kw enum
diff --git a/crates/ide-completion/src/tests/item.rs b/crates/ide-completion/src/tests/item.rs
index de3fd05..09254ae 100644
--- a/crates/ide-completion/src/tests/item.rs
+++ b/crates/ide-completion/src/tests/item.rs
@@ -6,6 +6,8 @@
 
 use crate::tests::{completion_list, BASE_ITEMS_FIXTURE};
 
+use super::check_edit;
+
 fn check(ra_fixture: &str, expect: Expect) {
     let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}{ra_fixture}"));
     expect.assert_eq(&actual)
@@ -152,3 +154,90 @@
         "#]],
     )
 }
+
+#[test]
+fn add_space_after_vis_kw() {
+    check_edit(
+        "pub(crate)",
+        r"
+$0
+",
+        r#"
+pub(crate) $0
+"#,
+    );
+
+    check_edit(
+        "pub",
+        r"
+$0
+",
+        r#"
+pub $0
+"#,
+    );
+
+    check_edit(
+        "pub(super)",
+        r"
+$0
+",
+        r#"
+pub(super) $0
+"#,
+    );
+
+    check_edit(
+        "in",
+        r"
+pub($0)
+",
+        r#"
+pub(in $0)
+"#,
+    );
+}
+
+#[test]
+fn add_space_after_unsafe_kw() {
+    check_edit(
+        "unsafe",
+        r"
+$0
+",
+        r#"
+unsafe $0
+"#,
+    );
+}
+
+#[test]
+fn add_space_after_for_where_kw() {
+    check_edit(
+        "for",
+        r#"
+struct S {}
+
+impl Copy $0
+"#,
+        r#"
+struct S {}
+
+impl Copy for $0
+"#,
+    );
+
+    check_edit(
+        "where",
+        r#"
+struct S {}
+
+impl Copy for S $0
+"#,
+        r#"
+struct S {}
+
+impl Copy for S where $0
+"#,
+    );
+}
diff --git a/crates/ide-completion/src/tests/item_list.rs b/crates/ide-completion/src/tests/item_list.rs
index 2b5b4dd..c379004 100644
--- a/crates/ide-completion/src/tests/item_list.rs
+++ b/crates/ide-completion/src/tests/item_list.rs
@@ -14,6 +14,7 @@
         r#"mod tests { $0 }"#,
         expect![[r#"
             ma makro!(…)           macro_rules! makro
+            kw async
             kw const
             kw crate::
             kw enum
@@ -47,6 +48,7 @@
         expect![[r#"
             ma makro!(…)           macro_rules! makro
             md module
+            kw async
             kw const
             kw crate::
             kw enum
@@ -79,6 +81,7 @@
         expect![[r#"
             ma makro!(…)           macro_rules! makro
             md module
+            kw async
             kw const
             kw crate::
             kw enum
@@ -132,6 +135,7 @@
     check(
         r#"pub $0"#,
         expect![[r#"
+            kw async
             kw const
             kw enum
             kw extern
@@ -356,6 +360,7 @@
         expect![[r#"
             ma makro!(…)           macro_rules! makro
             md module
+            kw async
             kw const
             kw crate::
             kw enum
diff --git a/crates/ide-completion/src/tests/pattern.rs b/crates/ide-completion/src/tests/pattern.rs
index 67cf551..8720cb5 100644
--- a/crates/ide-completion/src/tests/pattern.rs
+++ b/crates/ide-completion/src/tests/pattern.rs
@@ -819,3 +819,34 @@
         "#]],
     );
 }
+
+#[test]
+fn add_space_after_mut_ref_kw() {
+    check_edit(
+        "mut",
+        r#"
+fn foo() {
+    let $0
+}
+"#,
+        r#"
+fn foo() {
+    let mut $0
+}
+"#,
+    );
+
+    check_edit(
+        "ref",
+        r#"
+fn foo() {
+    let $0
+}
+"#,
+        r#"
+fn foo() {
+    let ref $0
+}
+"#,
+    );
+}
diff --git a/crates/ide-completion/src/tests/record.rs b/crates/ide-completion/src/tests/record.rs
index e64ec74..56162bb 100644
--- a/crates/ide-completion/src/tests/record.rs
+++ b/crates/ide-completion/src/tests/record.rs
@@ -2,6 +2,8 @@
 
 use crate::tests::completion_list;
 
+use super::check_edit;
+
 fn check(ra_fixture: &str, expect: Expect) {
     let actual = completion_list(ra_fixture);
     expect.assert_eq(&actual);
@@ -301,3 +303,48 @@
         expect![[r#""#]],
     )
 }
+
+#[test]
+fn add_space_after_vis_kw() {
+    check_edit(
+        "pub(crate)",
+        r"
+pub(crate) struct S {
+    $0
+}
+",
+        r#"
+pub(crate) struct S {
+    pub(crate) $0
+}
+"#,
+    );
+
+    check_edit(
+        "pub",
+        r"
+pub struct S {
+    $0
+}
+",
+        r#"
+pub struct S {
+    pub $0
+}
+"#,
+    );
+
+    check_edit(
+        "pub(super)",
+        r"
+pub(super) struct S {
+    $0
+}
+",
+        r#"
+pub(super) struct S {
+    pub(super) $0
+}
+"#,
+    );
+}
diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs
index 69d8fe9..2ae7d37 100644
--- a/crates/ide-completion/src/tests/special.rs
+++ b/crates/ide-completion/src/tests/special.rs
@@ -1017,6 +1017,7 @@
             fn here_we_go()    fn()
             st Foo (alias Bar) Foo
             bt u32             u32
+            kw async
             kw const
             kw crate::
             kw enum
@@ -1066,6 +1067,7 @@
             fn here_we_go()           fn()
             st Foo (alias Bar, Qux, Baz) Foo
             bt u32                    u32
+            kw async
             kw const
             kw crate::
             kw enum
@@ -1188,6 +1190,7 @@
             fn bar()             fn()
             fn foo() (alias qux) fn()
             bt u32               u32
+            kw async
             kw const
             kw crate::
             kw enum
@@ -1443,6 +1446,7 @@
         expect![[r#"
             fn foo()       fn()
             bt u32         u32
+            kw async
             kw const
             kw crate::
             kw enum
diff --git a/crates/ide-db/src/famous_defs.rs b/crates/ide-db/src/famous_defs.rs
index e445e9f..51ac0b7 100644
--- a/crates/ide-db/src/famous_defs.rs
+++ b/crates/ide-db/src/famous_defs.rs
@@ -15,7 +15,7 @@
 /// you'd want to include minicore (see `test_utils::MiniCore`) declaration at
 /// the start of your tests:
 ///
-/// ```
+/// ```text
 /// //- minicore: iterator, ord, derive
 /// ```
 pub struct FamousDefs<'a, 'b>(pub &'a Semantics<'b, RootDatabase>, pub Crate);
diff --git a/crates/ide-db/src/prime_caches.rs b/crates/ide-db/src/prime_caches.rs
index b4486d1..62104fb 100644
--- a/crates/ide-db/src/prime_caches.rs
+++ b/crates/ide-db/src/prime_caches.rs
@@ -11,9 +11,9 @@
 use crate::{
     base_db::{
         salsa::{Database, ParallelDatabase, Snapshot},
-        Cancelled, CrateGraph, CrateId, SourceDatabase, SourceDatabaseExt,
+        Cancelled, CrateId, SourceDatabase, SourceDatabaseExt,
     },
-    FxHashSet, FxIndexMap, RootDatabase,
+    FxIndexMap, RootDatabase,
 };
 
 /// We're indexing many crates.
@@ -36,19 +36,10 @@
 
     let graph = db.crate_graph();
     let mut crates_to_prime = {
-        let crate_ids = compute_crates_to_prime(db, &graph);
-
         let mut builder = topologic_sort::TopologicalSortIter::builder();
 
-        for &crate_id in &crate_ids {
-            let crate_data = &graph[crate_id];
-            let dependencies = crate_data
-                .dependencies
-                .iter()
-                .map(|d| d.crate_id)
-                .filter(|i| crate_ids.contains(i));
-
-            builder.add(crate_id, dependencies);
+        for crate_id in graph.iter() {
+            builder.add(crate_id, graph[crate_id].dependencies.iter().map(|d| d.crate_id));
         }
 
         builder.build()
@@ -62,13 +53,20 @@
     let (work_sender, progress_receiver) = {
         let (progress_sender, progress_receiver) = crossbeam_channel::unbounded();
         let (work_sender, work_receiver) = crossbeam_channel::unbounded();
+        let graph = graph.clone();
         let prime_caches_worker = move |db: Snapshot<RootDatabase>| {
             while let Ok((crate_id, crate_name)) = work_receiver.recv() {
                 progress_sender
                     .send(ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name })?;
 
-                // This also computes the DefMap
-                db.import_map(crate_id);
+                let file_id = graph[crate_id].root_file_id;
+                let root_id = db.file_source_root(file_id);
+                if db.source_root(root_id).is_library {
+                    db.crate_def_map(crate_id);
+                } else {
+                    // This also computes the DefMap
+                    db.import_map(crate_id);
+                }
 
                 progress_sender.send(ParallelPrimeCacheWorkerProgress::EndCrate { crate_id })?;
             }
@@ -76,13 +74,13 @@
             Ok::<_, crossbeam_channel::SendError<_>>(())
         };
 
-        for _ in 0..num_worker_threads {
+        for id in 0..num_worker_threads {
             let worker = prime_caches_worker.clone();
             let db = db.snapshot();
 
             stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker)
                 .allow_leak(true)
-                .name("PrimeCaches".to_owned())
+                .name(format!("PrimeCaches#{id}"))
                 .spawn(move || Cancelled::catch(|| worker(db)))
                 .expect("failed to spawn thread");
         }
@@ -96,7 +94,7 @@
     // an index map is used to preserve ordering so we can sort the progress report in order of
     // "longest crate to index" first
     let mut crates_currently_indexing =
-        FxIndexMap::with_capacity_and_hasher(num_worker_threads as _, Default::default());
+        FxIndexMap::with_capacity_and_hasher(num_worker_threads, Default::default());
 
     while crates_done < crates_total {
         db.unwind_if_cancelled();
@@ -144,19 +142,3 @@
         cb(progress);
     }
 }
-
-fn compute_crates_to_prime(db: &RootDatabase, graph: &CrateGraph) -> FxHashSet<CrateId> {
-    // We're only interested in the workspace crates and the `ImportMap`s of their direct
-    // dependencies, though in practice the latter also compute the `DefMap`s.
-    // We don't prime transitive dependencies because they're generally not visible in
-    // the current workspace.
-    graph
-        .iter()
-        .filter(|&id| {
-            let file_id = graph[id].root_file_id;
-            let root_id = db.file_source_root(file_id);
-            !db.source_root(root_id).is_library
-        })
-        .flat_map(|id| graph[id].dependencies.iter().map(|krate| krate.crate_id))
-        .collect()
-}
diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs
index 15fb42e..cbf50d1 100644
--- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs
+++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs
@@ -11,7 +11,7 @@
 use paths::Utf8Component;
 use syntax::{
     ast::{self, edit::IndentLevel, HasModuleItem, HasName},
-    AstNode,
+    AstNode, TextRange,
 };
 use text_edit::TextEdit;
 
@@ -35,7 +35,27 @@
         "file not included in module tree"
     };
 
-    let range = ctx.sema.db.parse(file_id).syntax_node().text_range();
+    let mut range = ctx.sema.db.parse(file_id).syntax_node().text_range();
+    let mut unused = true;
+
+    if fixes.is_none() {
+        // If we don't have a fix, the unlinked-file diagnostic is not
+        // actionable. This generally means that rust-analyzer hasn't
+        // finished startup, or we couldn't find the Cargo.toml.
+        //
+        // Only show this diagnostic on the first three characters of
+        // the file, to avoid overwhelming the user during startup.
+        range = FileLoader::file_text(ctx.sema.db, file_id)
+            .char_indices()
+            .take(3)
+            .last()
+            .map(|(i, _)| i)
+            .map(|i| TextRange::up_to(i.try_into().unwrap()))
+            .unwrap_or(range);
+        // Prefer a diagnostic underline over graying out the text,
+        // since we're only highlighting a small region.
+        unused = false;
+    }
 
     acc.push(
         Diagnostic::new(
@@ -43,7 +63,7 @@
             message,
             FileRange { file_id, range },
         )
-        .with_unused(true)
+        .with_unused(unused)
         .with_fixes(fixes),
     );
 }
diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs
index b706e95..e8d6dc9 100644
--- a/crates/ide/src/extend_selection.rs
+++ b/crates/ide/src/extend_selection.rs
@@ -210,7 +210,13 @@
     let start_idx = before.rfind(non_word_char)? as u32;
     let end_idx = after.find(non_word_char).unwrap_or(after.len()) as u32;
 
-    let from: TextSize = (start_idx + 1).into();
+    // FIXME: use `ceil_char_boundary` from `std::str` when it gets stable
+    // https://github.com/rust-lang/rust/issues/93743
+    fn ceil_char_boundary(text: &str, index: u32) -> u32 {
+        (index..).find(|&index| text.is_char_boundary(index as usize)).unwrap_or(text.len() as u32)
+    }
+
+    let from: TextSize = ceil_char_boundary(text, start_idx + 1).into();
     let to: TextSize = (cursor_position + end_idx).into();
 
     let range = TextRange::new(from, to);
@@ -662,4 +668,18 @@
             ],
         );
     }
+
+    #[test]
+    fn extend_selection_inside_str_with_wide_char() {
+        // should not panic
+        do_check(
+            r#"fn main() { let x = "═$0═══════"; }"#,
+            &[
+                r#""════════""#,
+                r#"let x = "════════";"#,
+                r#"{ let x = "════════"; }"#,
+                r#"fn main() { let x = "════════"; }"#,
+            ],
+        );
+    }
 }
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs
index e9c48ab..3f10bed 100644
--- a/crates/ide/src/inlay_hints.rs
+++ b/crates/ide/src/inlay_hints.rs
@@ -455,6 +455,23 @@
 // * elided lifetimes
 // * compiler inserted reborrows
 //
+// Note: inlay hints for function argument names are heuristically omitted to reduce noise and will not appear if
+// any of the
+// link:https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L92-L99[following criteria]
+// are met:
+//
+// * the parameter name is a suffix of the function's name
+// * the argument is a qualified constructing or call expression where the qualifier is an ADT
+// * exact argument<->parameter match(ignoring leading underscore) or parameter is a prefix/suffix
+//   of argument with _ splitting it off
+// * the parameter name starts with `ra_fixture`
+// * the parameter name is a
+// link:https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L200[well known name]
+// in a unary function
+// * the parameter name is a
+// link:https://github.com/rust-lang/rust-analyzer/blob/6b8b8ff4c56118ddee6c531cde06add1aad4a6af/crates/ide/src/inlay_hints/param_name.rs#L201[single character]
+// in a unary function
+//
 // image::https://user-images.githubusercontent.com/48062697/113020660-b5f98b80-917a-11eb-8d70-3be3fd558cdd.png[]
 pub(crate) fn inlay_hints(
     db: &RootDatabase,
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index cb78519..de68b86 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -285,28 +285,54 @@
     /// If a `SourceRoot` doesn't have a parent and is local then it is not contained in this mapping but it can be asserted that it is a root `SourceRoot`.
     pub fn source_root_parent_map(&self) -> FxHashMap<SourceRootId, SourceRootId> {
         let roots = self.fsc.roots();
-        roots
-            .iter()
-            .enumerate()
-            .filter(|(_, (_, id))| self.local_filesets.contains(id))
-            .filter_map(|(idx, (root, root_id))| {
-                // We are interested in parents if they are also local source roots.
-                // So instead of a non-local parent we may take a local ancestor as a parent to a node.
-                //
-                // Here paths in roots are sorted lexicographically, so if a root
-                // is a parent of another root, it will be before it in the list.
-                roots[..idx].iter().find_map(|(root2, root2_id)| {
-                    if self.local_filesets.contains(root2_id)
-                        && root.starts_with(root2)
-                        && root_id != root2_id
-                    {
-                        return Some((root_id, root2_id));
+
+        let mut map = FxHashMap::default();
+
+        // See https://github.com/rust-lang/rust-analyzer/issues/17409
+        //
+        // We can view the connections between roots as a graph. The problem is
+        // that this graph may contain cycles, so when adding edges, it is necessary
+        // to check whether it will lead to a cycle.
+        //
+        // Since we ensure that each node has at most one outgoing edge (because
+        // each SourceRoot can have only one parent), we can use a disjoint-set to
+        // maintain the connectivity between nodes. If an edge’s two nodes belong
+        // to the same set, they are already connected.
+        let mut dsu = FxHashMap::default();
+        fn find_parent(dsu: &mut FxHashMap<u64, u64>, id: u64) -> u64 {
+            if let Some(&parent) = dsu.get(&id) {
+                let parent = find_parent(dsu, parent);
+                dsu.insert(id, parent);
+                parent
+            } else {
+                id
+            }
+        }
+
+        for (idx, (root, root_id)) in roots.iter().enumerate() {
+            if !self.local_filesets.contains(root_id)
+                || map.contains_key(&SourceRootId(*root_id as u32))
+            {
+                continue;
+            }
+
+            for (root2, root2_id) in roots[..idx].iter().rev() {
+                if self.local_filesets.contains(root2_id)
+                    && root_id != root2_id
+                    && root.starts_with(root2)
+                {
+                    // check if the edge will create a cycle
+                    if find_parent(&mut dsu, *root_id) != find_parent(&mut dsu, *root2_id) {
+                        map.insert(SourceRootId(*root_id as u32), SourceRootId(*root2_id as u32));
+                        dsu.insert(*root_id, *root2_id);
                     }
-                    None
-                })
-            })
-            .map(|(&child, &parent)| (SourceRootId(child as u32), SourceRootId(parent as u32)))
-            .collect()
+
+                    break;
+                }
+            }
+        }
+
+        map
     }
 }
 
@@ -592,4 +618,20 @@
 
         assert_eq!(vc, vec![(SourceRootId(1), SourceRootId(0)),])
     }
+
+    #[test]
+    fn circular_reference() {
+        let mut builder = FileSetConfigBuilder::default();
+        builder.add_file_set(vec![
+            VfsPath::new_virtual_path("/ROOT/def".to_owned()),
+            VfsPath::new_virtual_path("/ROOT/def/abc/def".to_owned()),
+        ]);
+        builder.add_file_set(vec![VfsPath::new_virtual_path("/ROOT/def/abc".to_owned())]);
+        let fsc = builder.build();
+        let src = SourceRootConfig { fsc, local_filesets: vec![0, 1] };
+        let mut vc = src.source_root_parent_map().into_iter().collect::<Vec<_>>();
+        vc.sort_by(|x, y| x.0 .0.cmp(&y.0 .0));
+
+        assert_eq!(vc, vec![(SourceRootId(1), SourceRootId(0)),])
+    }
 }
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index 412e492..c8ff8c3 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -47,7 +47,7 @@
     pub const DUMMY: Span = Span {
         range: TextRange::empty(TextSize::new(0)),
         anchor: span::SpanAnchor {
-            file_id: span::FileId::BOGUS,
+            file_id: span::FileId::from_raw(0xe4e4e),
             ast_id: span::ROOT_ERASED_FILE_AST_ID,
         },
         ctx: SyntaxContextId::ROOT,
@@ -60,7 +60,7 @@
             Span {
                 range,
                 anchor: span::SpanAnchor {
-                    file_id: span::FileId::BOGUS,
+                    file_id: span::FileId::from_raw(0xe4e4e),
                     ast_id: span::ROOT_ERASED_FILE_AST_ID,
                 },
                 ctx: SyntaxContextId::ROOT,
diff --git a/crates/parser/src/grammar.rs b/crates/parser/src/grammar.rs
index b342e35..2930190 100644
--- a/crates/parser/src/grammar.rs
+++ b/crates/parser/src/grammar.rs
@@ -13,7 +13,7 @@
 //! Code in this module also contains inline tests, which start with
 //! `// test name-of-the-test` comment and look like this:
 //!
-//! ```
+//! ```text
 //! // test function_with_zero_parameters
 //! // fn foo() {}
 //! ```
diff --git a/crates/parser/src/grammar/patterns.rs b/crates/parser/src/grammar/patterns.rs
index eff6b66..882c243 100644
--- a/crates/parser/src/grammar/patterns.rs
+++ b/crates/parser/src/grammar/patterns.rs
@@ -181,7 +181,7 @@
                 //       ^
                 if matches!(
                     p.current(),
-                    T![=] | T![,] | T![:] | T![')'] | T!['}'] | T![']'] | T![if]
+                    T![=] | T![,] | T![:] | T![')'] | T!['}'] | T![']'] | T![if] | EOF
                 ) {
                     // test half_open_range_pat
                     // fn f() {
diff --git a/crates/rust-analyzer/src/tracing/hprof.rs b/crates/rust-analyzer/src/tracing/hprof.rs
index 978dcd6..2d1604e 100644
--- a/crates/rust-analyzer/src/tracing/hprof.rs
+++ b/crates/rust-analyzer/src/tracing/hprof.rs
@@ -1,8 +1,8 @@
 //! Consumer of `tracing` data, which prints a hierarchical profile.
 //!
-//! Based on https://github.com/davidbarsky/tracing-tree, but does less, while
+//! Based on <https://github.com/davidbarsky/tracing-tree>, but does less, while
 //! actually printing timings for spans by default. The code here is vendored from
-//! https://github.com/matklad/tracing-span-tree.
+//! <https://github.com/matklad/tracing-span-tree>.
 //!
 //! Usage:
 //!
diff --git a/crates/salsa/src/interned.rs b/crates/salsa/src/interned.rs
index bfa9cc0..eef8bcc 100644
--- a/crates/salsa/src/interned.rs
+++ b/crates/salsa/src/interned.rs
@@ -98,26 +98,27 @@
     }
 }
 
-/// Implement [`InternValue`] trivially, that is without actually mapping at all.
-#[macro_export]
-macro_rules! impl_intern_value_trivial {
-    ($($ty:ty),*) => {
-        $(
-            impl $crate::InternValue for $ty {
-                type Key = $ty;
-                #[inline]
-                fn into_key(&self) -> Self::Key {
-                    self.clone()
-                }
-                #[inline]
-                fn with_key<F: FnOnce(&Self::Key) -> T, T>(&self, f: F) -> T {
-                    f(self)
-                }
-            }
-        )*
-    };
+pub trait InternValueTrivial
+where
+    Self: Eq + Hash + Debug + Clone,
+{
 }
-impl_intern_value_trivial!(String);
+
+/// Implement [`InternValue`] trivially, that is without actually mapping at all.
+impl<V: InternValueTrivial> InternValue for V {
+    type Key = Self;
+    #[inline]
+    fn into_key(&self) -> Self::Key {
+        self.clone()
+    }
+    #[inline]
+    fn with_key<F: FnOnce(&Self::Key) -> T, T>(&self, f: F) -> T {
+        f(self)
+    }
+}
+
+impl InternValueTrivial for String {}
+
 #[derive(Debug)]
 struct Slot<V> {
     /// DatabaseKeyIndex for this slot.
diff --git a/crates/salsa/src/lib.rs b/crates/salsa/src/lib.rs
index 9219a55..e11e6e2 100644
--- a/crates/salsa/src/lib.rs
+++ b/crates/salsa/src/lib.rs
@@ -40,7 +40,7 @@
 
 pub use crate::durability::Durability;
 pub use crate::intern_id::InternId;
-pub use crate::interned::{InternKey, InternValue};
+pub use crate::interned::{InternKey, InternValue, InternValueTrivial};
 pub use crate::runtime::Runtime;
 pub use crate::runtime::RuntimeId;
 pub use crate::storage::Storage;
@@ -284,7 +284,7 @@
     /// series of queries in parallel and arranging the results. Using
     /// this method for that purpose ensures that those queries will
     /// see a consistent view of the database (it is also advisable
-    /// for those queries to use the [`Runtime::unwind_if_cancelled`]
+    /// for those queries to use the [`Database::unwind_if_cancelled`]
     /// method to check for cancellation).
     ///
     /// # Panics
diff --git a/crates/span/src/hygiene.rs b/crates/span/src/hygiene.rs
index e4b0a26..e8c5583 100644
--- a/crates/span/src/hygiene.rs
+++ b/crates/span/src/hygiene.rs
@@ -4,7 +4,7 @@
 //! Expansion, and Definition Contexts,” *Journal of Functional Programming* 22, no. 2
 //! (March 1, 2012): 181–216, <https://doi.org/10.1017/S0956796812000093>.
 //!
-//! Also see https://rustc-dev-guide.rust-lang.org/macro-expansion.html#hygiene-and-hierarchies
+//! Also see <https://rustc-dev-guide.rust-lang.org/macro-expansion.html#hygiene-and-hierarchies>
 //!
 //! # The Expansion Order Hierarchy
 //!
diff --git a/crates/span/src/lib.rs b/crates/span/src/lib.rs
index 74693a2..bbaf1b2 100644
--- a/crates/span/src/lib.rs
+++ b/crates/span/src/lib.rs
@@ -86,15 +86,6 @@
     }
 }
 
-impl Span {
-    #[deprecated = "dummy spans will panic if surfaced incorrectly, as such they should be replaced appropriately"]
-    pub const DUMMY: Self = Self {
-        range: TextRange::empty(TextSize::new(0)),
-        anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID },
-        ctx: SyntaxContextId::ROOT,
-    };
-}
-
 impl fmt::Display for Span {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         fmt::Debug::fmt(&self.anchor.file_id.index(), f)?;
@@ -178,6 +169,8 @@
 }
 
 impl MacroCallId {
+    pub const MAX_ID: u32 = 0x7fff_ffff;
+
     pub fn as_file(self) -> HirFileId {
         MacroFileId { macro_call_id: self }.into()
     }
diff --git a/crates/stdx/src/anymap.rs b/crates/stdx/src/anymap.rs
index d47b3d1..d189b56 100644
--- a/crates/stdx/src/anymap.rs
+++ b/crates/stdx/src/anymap.rs
@@ -1,6 +1,6 @@
-//! This file is a port of only the necessary features from https://github.com/chris-morgan/anymap version 1.0.0-beta.2 for use within rust-analyzer.
+//! This file is a port of only the necessary features from <https://github.com/chris-morgan/anymap> version 1.0.0-beta.2 for use within rust-analyzer.
 //! Copyright © 2014–2022 Chris Morgan.
-//! COPYING: https://github.com/chris-morgan/anymap/blob/master/COPYING
+//! COPYING: <https://github.com/chris-morgan/anymap/blob/master/COPYING>
 //! Note that the license is changed from Blue Oak Model 1.0.0 or MIT or Apache-2.0 to MIT OR Apache-2.0
 //!
 //! This implementation provides a safe and convenient store for one value of each type.
diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs
index cb23755..b5d816b 100644
--- a/crates/syntax/src/lib.rs
+++ b/crates/syntax/src/lib.rs
@@ -442,7 +442,7 @@
     assert!(expr_syntax.siblings_with_tokens(Direction::Next).any(|it| it.kind() == T!['}']));
     assert_eq!(
         expr_syntax.descendants_with_tokens().count(),
-        8, // 5 tokens `1`, ` `, `+`, ` `, `!`
+        8, // 5 tokens `1`, ` `, `+`, ` `, `1`
            // 2 child literal expressions: `1`, `1`
            // 1 the node itself: `1 + 1`
     );
diff --git a/crates/test-utils/src/fixture.rs b/crates/test-utils/src/fixture.rs
index 9871fd2..54c9db7 100644
--- a/crates/test-utils/src/fixture.rs
+++ b/crates/test-utils/src/fixture.rs
@@ -143,14 +143,14 @@
     /// Specifies LLVM data layout to be used.
     ///
     /// You probably don't want to manually specify this. See LLVM manual for the
-    /// syntax, if you must: https://llvm.org/docs/LangRef.html#data-layout
+    /// syntax, if you must: <https://llvm.org/docs/LangRef.html#data-layout>
     pub target_data_layout: String,
 }
 
 impl FixtureWithProjectMeta {
     /// Parses text which looks like this:
     ///
-    ///  ```not_rust
+    ///  ```text
     ///  //- some meta
     ///  line 1
     ///  line 2
@@ -159,7 +159,7 @@
     ///
     /// Fixture can also start with a proc_macros and minicore declaration (in that order):
     ///
-    /// ```
+    /// ```text
     /// //- toolchain: nightly
     /// //- proc_macros: identity
     /// //- minicore: sized
diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs
index b750107..43f62d0 100644
--- a/crates/test-utils/src/lib.rs
+++ b/crates/test-utils/src/lib.rs
@@ -224,7 +224,7 @@
 /// Annotations point to the last line that actually was long enough for the
 /// range, not counting annotations themselves. So overlapping annotations are
 /// possible:
-/// ```no_run
+/// ```text
 /// // stuff        other stuff
 /// // ^^ 'st'
 /// // ^^^^^ 'stuff'
diff --git a/crates/toolchain/src/lib.rs b/crates/toolchain/src/lib.rs
index b577723..2591ed1 100644
--- a/crates/toolchain/src/lib.rs
+++ b/crates/toolchain/src/lib.rs
@@ -23,7 +23,7 @@
     ///
     /// The current implementation checks three places for an executable to use:
     /// 1) `$CARGO_HOME/bin/<executable_name>`
-    ///      where $CARGO_HOME defaults to ~/.cargo (see https://doc.rust-lang.org/cargo/guide/cargo-home.html)
+    ///      where $CARGO_HOME defaults to ~/.cargo (see <https://doc.rust-lang.org/cargo/guide/cargo-home.html>)
     ///      example: for cargo, this tries $CARGO_HOME/bin/cargo, or ~/.cargo/bin/cargo if $CARGO_HOME is unset.
     ///      It seems that this is a reasonable place to try for cargo, rustc, and rustup
     /// 2) Appropriate environment variable (erroring if this is set but not a usable executable)
@@ -45,7 +45,7 @@
     ///      example: for cargo, this tries all paths in $PATH with appended `cargo`, returning the
     ///      first that exists
     /// 3) `$CARGO_HOME/bin/<executable_name>`
-    ///      where $CARGO_HOME defaults to ~/.cargo (see https://doc.rust-lang.org/cargo/guide/cargo-home.html)
+    ///      where $CARGO_HOME defaults to ~/.cargo (see <https://doc.rust-lang.org/cargo/guide/cargo-home.html>)
     ///      example: for cargo, this tries $CARGO_HOME/bin/cargo, or ~/.cargo/bin/cargo if $CARGO_HOME is unset.
     ///      It seems that this is a reasonable place to try for cargo, rustc, and rustup
     /// 4) If all else fails, we just try to use the executable name directly
diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs
index eab66f1..18c8699 100644
--- a/crates/vfs/src/lib.rs
+++ b/crates/vfs/src/lib.rs
@@ -69,9 +69,6 @@
 // pub struct FileId(NonMaxU32);
 
 impl FileId {
-    /// Think twice about using this outside of tests. If this ends up in a wrong place it will cause panics!
-    // FIXME: To be removed once we get rid of all `SpanData::DUMMY` usages.
-    pub const BOGUS: FileId = FileId(0xe4e4e);
     pub const MAX_FILE_ID: u32 = 0x7fff_ffff;
 
     #[inline]
diff --git a/docs/dev/README.md b/docs/dev/README.md
index 8897f02..002b8ba 100644
--- a/docs/dev/README.md
+++ b/docs/dev/README.md
@@ -145,7 +145,7 @@
   ```
   env RA_LOG=lsp_server=debug code .
   ```
-* You can log on the client side, by enabling `"rust-analyzer.trace.server": "verbose"` workspace setting.
+* You can log on the client side, by the `rust-analyzer: Toggle LSP Logs` command or enabling `"rust-analyzer.trace.server": "verbose"` workspace setting.
   These logs are shown in a separate tab in the output and could be used with LSP inspector.
   Kudos to [@DJMcNab](https://github.com/DJMcNab) for setting this awesome infra up!
 
diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc
index 8e6c53d..e1c1c54 100644
--- a/docs/user/manual.adoc
+++ b/docs/user/manual.adoc
@@ -21,7 +21,7 @@
 To improve this document, send a pull request: +
 https://github.com/rust-lang/rust-analyzer/blob/master/docs/user/manual.adoc[https://github.com/rust-analyzer/.../manual.adoc]
 
-The manual is written in https://asciidoc.org[AsciiDoc] and includes some extra files which are generated from the source code. Run `cargo test` and `cargo test -p xtask` to create these and then `asciidoctor manual.adoc` to create an HTML copy.
+The manual is written in https://asciidoc.org[AsciiDoc] and includes some extra files which are generated from the source code. Run `cargo test` and `cargo xtask codegen` to create these and then `asciidoctor manual.adoc` to create an HTML copy.
 ====
 
 If you have questions about using rust-analyzer, please ask them in the https://users.rust-lang.org/c/ide/14["`IDEs and Editors`"] topic of Rust users forum.
@@ -591,7 +591,7 @@
 Log messages are printed to stderr, in VS Code you can see them in the `Output > Rust Analyzer Language Server` tab of the panel.
 To see more logs, set the `RA_LOG=info` environment variable, this can be done either by setting the environment variable manually or by using `rust-analyzer.server.extraEnv`, note that both of these approaches require the server to be restarted.
 
-To fully capture LSP messages between the editor and the server, set `"rust-analyzer.trace.server": "verbose"` config and check
+To fully capture LSP messages between the editor and the server, run the `rust-analyzer: Toggle LSP Logs` command and check
 `Output > Rust Analyzer Language Server Trace`.
 
 The root cause for many "`nothing works`" problems is that rust-analyzer fails to understand the project structure.
diff --git a/editors/code/package.json b/editors/code/package.json
index b447106..db2a9891 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -300,6 +300,11 @@
                 "command": "rust-analyzer.toggleCheckOnSave",
                 "title": "Toggle Check on Save",
                 "category": "rust-analyzer"
+            },
+            {
+                "command": "rust-analyzer.toggleLSPLogs",
+                "title": "Toggle LSP Logs",
+                "category": "rust-analyzer"
             }
         ],
         "keybindings": [
@@ -3123,6 +3128,10 @@
                 {
                     "command": "rust-analyzer.viewMemoryLayout",
                     "when": "inRustProject"
+                },
+                {
+                    "command": "rust-analyzer.toggleLSPLogs",
+                    "when": "inRustProject"
                 }
             ],
             "editor/context": [
diff --git a/editors/code/src/ast_inspector.ts b/editors/code/src/ast_inspector.ts
index 688c53a..35b705c 100644
--- a/editors/code/src/ast_inspector.ts
+++ b/editors/code/src/ast_inspector.ts
@@ -1,8 +1,7 @@
 import * as vscode from "vscode";
 
 import type { Ctx, Disposable } from "./ctx";
-import { type RustEditor, isRustEditor } from "./util";
-import { unwrapUndefinable } from "./undefinable";
+import { type RustEditor, isRustEditor, unwrapUndefinable } from "./util";
 
 // FIXME: consider implementing this via the Tree View API?
 // https://code.visualstudio.com/api/extension-guides/tree-view
diff --git a/editors/code/src/bootstrap.ts b/editors/code/src/bootstrap.ts
index 6cf3995..5a92b28 100644
--- a/editors/code/src/bootstrap.ts
+++ b/editors/code/src/bootstrap.ts
@@ -1,9 +1,9 @@
 import * as vscode from "vscode";
 import * as os from "os";
 import type { Config } from "./config";
-import { log, isValidExecutable } from "./util";
+import { type Env, log } from "./util";
 import type { PersistentState } from "./persistent_state";
-import { exec } from "child_process";
+import { exec, spawnSync } from "child_process";
 
 export async function bootstrap(
     context: vscode.ExtensionContext,
@@ -13,7 +13,7 @@
     const path = await getServer(context, config, state);
     if (!path) {
         throw new Error(
-            "Rust Analyzer Language Server is not available. " +
+            "rust-analyzer Language Server is not available. " +
                 "Please, ensure its [proper installation](https://rust-analyzer.github.io/manual.html#installation).",
         );
     }
@@ -21,12 +21,12 @@
     log.info("Using server binary at", path);
 
     if (!isValidExecutable(path, config.serverExtraEnv)) {
-        if (config.serverPath) {
-            throw new Error(`Failed to execute ${path} --version. \`config.server.path\` or \`config.serverPath\` has been set explicitly.\
-            Consider removing this config or making a valid server binary available at that path.`);
-        } else {
-            throw new Error(`Failed to execute ${path} --version`);
-        }
+        throw new Error(
+            `Failed to execute ${path} --version.` + config.serverPath
+                ? `\`config.server.path\` or \`config.serverPath\` has been set explicitly.\
+            Consider removing this config or making a valid server binary available at that path.`
+                : "",
+        );
     }
 
     return path;
@@ -54,27 +54,12 @@
     if (bundledExists) {
         let server = bundled;
         if (await isNixOs()) {
-            await vscode.workspace.fs.createDirectory(config.globalStorageUri).then();
-            const dest = vscode.Uri.joinPath(config.globalStorageUri, `rust-analyzer${ext}`);
-            let exists = await vscode.workspace.fs.stat(dest).then(
-                () => true,
-                () => false,
-            );
-            if (exists && config.package.version !== state.serverVersion) {
-                await vscode.workspace.fs.delete(dest);
-                exists = false;
-            }
-            if (!exists) {
-                await vscode.workspace.fs.copy(bundled, dest);
-                await patchelf(dest);
-            }
-            server = dest;
+            server = await getNixOsServer(config, ext, state, bundled, server);
+            await state.updateServerVersion(config.package.version);
         }
-        await state.updateServerVersion(config.package.version);
         return server.fsPath;
     }
 
-    await state.updateServerVersion(undefined);
     await vscode.window.showErrorMessage(
         "Unfortunately we don't ship binaries for your platform yet. " +
             "You need to manually clone the rust-analyzer repository and " +
@@ -86,6 +71,45 @@
     return undefined;
 }
 
+export function isValidExecutable(path: string, extraEnv: Env): boolean {
+    log.debug("Checking availability of a binary at", path);
+
+    const res = spawnSync(path, ["--version"], {
+        encoding: "utf8",
+        env: { ...process.env, ...extraEnv },
+    });
+
+    const printOutput = res.error ? log.warn : log.info;
+    printOutput(path, "--version:", res);
+
+    return res.status === 0;
+}
+
+async function getNixOsServer(
+    config: Config,
+    ext: string,
+    state: PersistentState,
+    bundled: vscode.Uri,
+    server: vscode.Uri,
+) {
+    await vscode.workspace.fs.createDirectory(config.globalStorageUri).then();
+    const dest = vscode.Uri.joinPath(config.globalStorageUri, `rust-analyzer${ext}`);
+    let exists = await vscode.workspace.fs.stat(dest).then(
+        () => true,
+        () => false,
+    );
+    if (exists && config.package.version !== state.serverVersion) {
+        await vscode.workspace.fs.delete(dest);
+        exists = false;
+    }
+    if (!exists) {
+        await vscode.workspace.fs.copy(bundled, dest);
+        await patchelf(dest);
+    }
+    server = dest;
+    return server;
+}
+
 async function isNixOs(): Promise<boolean> {
     try {
         const contents = (
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts
index f679c88..1c2a34b 100644
--- a/editors/code/src/client.ts
+++ b/editors/code/src/client.ts
@@ -3,73 +3,13 @@
 import * as vscode from "vscode";
 import * as ra from "../src/lsp_ext";
 import * as Is from "vscode-languageclient/lib/common/utils/is";
-import { assert } from "./util";
+import { assert, unwrapUndefinable } from "./util";
 import * as diagnostics from "./diagnostics";
 import { WorkspaceEdit } from "vscode";
 import { type Config, prepareVSCodeConfig } from "./config";
-import { randomUUID } from "crypto";
 import { sep as pathSeparator } from "path";
-import { unwrapUndefinable } from "./undefinable";
 import { RaLanguageClient } from "./lang_client";
 
-export interface Env {
-    [name: string]: string;
-}
-
-// Command URIs have a form of command:command-name?arguments, where
-// arguments is a percent-encoded array of data we want to pass along to
-// the command function. For "Show References" this is a list of all file
-// URIs with locations of every reference, and it can get quite long.
-//
-// To work around it we use an intermediary linkToCommand command. When
-// we render a command link, a reference to a command with all its arguments
-// is stored in a map, and instead a linkToCommand link is rendered
-// with the key to that map.
-export const LINKED_COMMANDS = new Map<string, ra.CommandLink>();
-
-// For now the map is cleaned up periodically (I've set it to every
-// 10 minutes). In general case we'll probably need to introduce TTLs or
-// flags to denote ephemeral links (like these in hover popups) and
-// persistent links and clean those separately. But for now simply keeping
-// the last few links in the map should be good enough. Likewise, we could
-// add code to remove a target command from the map after the link is
-// clicked, but assuming most links in hover sheets won't be clicked anyway
-// this code won't change the overall memory use much.
-setInterval(
-    function cleanupOlderCommandLinks() {
-        // keys are returned in insertion order, we'll keep a few
-        // of recent keys available, and clean the rest
-        const keys = [...LINKED_COMMANDS.keys()];
-        const keysToRemove = keys.slice(0, keys.length - 10);
-        for (const key of keysToRemove) {
-            LINKED_COMMANDS.delete(key);
-        }
-    },
-    10 * 60 * 1000,
-);
-
-function renderCommand(cmd: ra.CommandLink): string {
-    const commandId = randomUUID();
-    LINKED_COMMANDS.set(commandId, cmd);
-    return `[${cmd.title}](command:rust-analyzer.linkToCommand?${encodeURIComponent(
-        JSON.stringify([commandId]),
-    )} '${cmd.tooltip}')`;
-}
-
-function renderHoverActions(actions: ra.CommandLinkGroup[]): vscode.MarkdownString {
-    const text = actions
-        .map(
-            (group) =>
-                (group.title ? group.title + " " : "") +
-                group.commands.map(renderCommand).join(" | "),
-        )
-        .join(" | ");
-
-    const result = new vscode.MarkdownString(text);
-    result.isTrusted = true;
-    return result;
-}
-
 export async function createClient(
     traceOutputChannel: vscode.OutputChannel,
     outputChannel: vscode.OutputChannel,
@@ -450,3 +390,32 @@
         candidate.command === void 0
     );
 }
+
+// Command URIs have a form of command:command-name?arguments, where
+// arguments is a percent-encoded array of data we want to pass along to
+// the command function. For "Show References" this is a list of all file
+// URIs with locations of every reference, and it can get quite long.
+// So long in fact that it will fail rendering inside an `a` tag so we need
+// to proxy around that. We store the last hover's reference command link
+// here, as only one hover can be active at a time, and we don't need to
+// keep a history of these.
+export let HOVER_REFERENCE_COMMAND: ra.CommandLink | undefined = undefined;
+
+function renderCommand(cmd: ra.CommandLink): string {
+    HOVER_REFERENCE_COMMAND = cmd;
+    return `[${cmd.title}](command:rust-analyzer.hoverRefCommandProxy '${cmd.tooltip}')`;
+}
+
+function renderHoverActions(actions: ra.CommandLinkGroup[]): vscode.MarkdownString {
+    const text = actions
+        .map(
+            (group) =>
+                (group.title ? group.title + " " : "") +
+                group.commands.map(renderCommand).join(" | "),
+        )
+        .join(" | ");
+
+    const result = new vscode.MarkdownString(text);
+    result.isTrusted = true;
+    return result;
+}
diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts
index 5cec2c6..f0f9fab 100644
--- a/editors/code/src/commands.ts
+++ b/editors/code/src/commands.ts
@@ -9,7 +9,12 @@
     applySnippetTextEdits,
     type SnippetTextDocumentEdit,
 } from "./snippets";
-import { type RunnableQuickPick, selectRunnable, createTask, createCargoArgs } from "./run";
+import {
+    type RunnableQuickPick,
+    selectRunnable,
+    createTaskFromRunnable,
+    createCargoArgs,
+} from "./run";
 import { AstInspector } from "./ast_inspector";
 import {
     isRustDocument,
@@ -19,12 +24,12 @@
     isRustEditor,
     type RustEditor,
     type RustDocument,
+    unwrapUndefinable,
 } from "./util";
 import { startDebugSession, makeDebugConfig } from "./debug";
 import type { LanguageClient } from "vscode-languageclient/node";
-import { LINKED_COMMANDS } from "./client";
+import { HOVER_REFERENCE_COMMAND } from "./client";
 import type { DependencyId } from "./dependencies_provider";
-import { unwrapUndefinable } from "./undefinable";
 import { log } from "./util";
 
 export * from "./ast_inspector";
@@ -1096,7 +1101,7 @@
 
         item.detail = "rerun";
         prevRunnable = item;
-        const task = await createTask(item.runnable, ctx.config);
+        const task = await createTaskFromRunnable(item.runnable, ctx.config);
         return await vscode.tasks.executeTask(task);
     };
 }
@@ -1139,7 +1144,7 @@
         const editor = ctx.activeRustEditor;
         if (!editor) return;
 
-        const task = await createTask(runnable, ctx.config);
+        const task = await createTaskFromRunnable(runnable, ctx.config);
         task.group = vscode.TaskGroup.Build;
         task.presentationOptions = {
             reveal: vscode.TaskRevealKind.Always,
@@ -1191,11 +1196,10 @@
     };
 }
 
-export function linkToCommand(_: Ctx): Cmd {
-    return async (commandId: string) => {
-        const link = LINKED_COMMANDS.get(commandId);
-        if (link) {
-            const { command, arguments: args = [] } = link;
+export function hoverRefCommandProxy(_: Ctx): Cmd {
+    return async () => {
+        if (HOVER_REFERENCE_COMMAND) {
+            const { command, arguments: args = [] } = HOVER_REFERENCE_COMMAND;
             await vscode.commands.executeCommand(command, ...args);
         }
     };
@@ -1485,3 +1489,16 @@
         ctx.refreshServerStatus();
     };
 }
+
+export function toggleLSPLogs(ctx: Ctx): Cmd {
+    return async () => {
+        const config = vscode.workspace.getConfiguration("rust-analyzer");
+        const targetValue =
+            config.get<string | undefined>("trace.server") === "verbose" ? undefined : "verbose";
+
+        await config.update("trace.server", targetValue, vscode.ConfigurationTarget.Workspace);
+        if (targetValue && ctx.client && ctx.client.traceOutputChannel) {
+            ctx.client.traceOutputChannel.show();
+        }
+    };
+}
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts
index e676bc0..1931cfe 100644
--- a/editors/code/src/config.ts
+++ b/editors/code/src/config.ts
@@ -2,9 +2,7 @@
 import * as os from "os";
 import * as path from "path";
 import * as vscode from "vscode";
-import type { Env } from "./client";
-import { log } from "./util";
-import { expectNotUndefined, unwrapUndefinable } from "./undefinable";
+import { type Env, log, unwrapUndefinable, expectNotUndefined } from "./util";
 import type { JsonProject } from "./rust_project";
 
 export type RunnableEnvCfgItem = {
diff --git a/editors/code/src/debug.ts b/editors/code/src/debug.ts
index eef2f6f..58fe1df 100644
--- a/editors/code/src/debug.ts
+++ b/editors/code/src/debug.ts
@@ -6,8 +6,7 @@
 import { Cargo, getRustcId, getSysroot } from "./toolchain";
 import type { Ctx } from "./ctx";
 import { prepareEnv } from "./run";
-import { unwrapUndefinable } from "./undefinable";
-import { isCargoRunnableArgs } from "./util";
+import { isCargoRunnableArgs, unwrapUndefinable } from "./util";
 
 const debugOutput = vscode.window.createOutputChannel("Debug");
 type DebugConfigProvider = (
@@ -136,7 +135,7 @@
     const workspaceQualifier = isMultiFolderWorkspace ? `:${workspace.name}` : "";
     function simplifyPath(p: string): string {
         // see https://github.com/rust-lang/rust-analyzer/pull/5513#issuecomment-663458818 for why this is needed
-        return path.normalize(p).replace(wsFolder, "${workspaceFolder" + workspaceQualifier + "}");
+        return path.normalize(p).replace(wsFolder, `\${workspaceFolder${workspaceQualifier}}`);
     }
 
     const env = prepareEnv(runnable.label, runnableArgs, ctx.config.runnablesExtraEnv);
diff --git a/editors/code/src/dependencies_provider.ts b/editors/code/src/dependencies_provider.ts
index 863ace0..203ef5c 100644
--- a/editors/code/src/dependencies_provider.ts
+++ b/editors/code/src/dependencies_provider.ts
@@ -4,7 +4,7 @@
 import type { CtxInit } from "./ctx";
 import * as ra from "./lsp_ext";
 import type { FetchDependencyListResult } from "./lsp_ext";
-import { unwrapUndefinable } from "./undefinable";
+import { unwrapUndefinable } from "./util";
 
 export class RustDependenciesProvider
     implements vscode.TreeDataProvider<Dependency | DependencyFile>
diff --git a/editors/code/src/diagnostics.ts b/editors/code/src/diagnostics.ts
index e31a1cd..9fb2993 100644
--- a/editors/code/src/diagnostics.ts
+++ b/editors/code/src/diagnostics.ts
@@ -8,7 +8,7 @@
     window,
 } from "vscode";
 import type { Ctx } from "./ctx";
-import { unwrapUndefinable } from "./undefinable";
+import { unwrapUndefinable } from "./util";
 
 export const URI_SCHEME = "rust-analyzer-diagnostics-view";
 
diff --git a/editors/code/src/lsp_ext.ts b/editors/code/src/lsp_ext.ts
index 2462f06..699052e 100644
--- a/editors/code/src/lsp_ext.ts
+++ b/editors/code/src/lsp_ext.ts
@@ -223,8 +223,16 @@
 export type Runnable = {
     label: string;
     location?: lc.LocationLink;
-    kind: "cargo" | "shell";
-    args: CargoRunnableArgs | ShellRunnableArgs;
+} & (RunnableCargo | RunnableShell);
+
+type RunnableCargo = {
+    kind: "cargo";
+    args: CargoRunnableArgs;
+};
+
+type RunnableShell = {
+    kind: "shell";
+    args: ShellRunnableArgs;
 };
 
 export type ShellRunnableArgs = {
diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts
index 0af58fd..ff67bb7 100644
--- a/editors/code/src/main.ts
+++ b/editors/code/src/main.ts
@@ -177,12 +177,13 @@
         serverVersion: { enabled: commands.serverVersion },
         viewMemoryLayout: { enabled: commands.viewMemoryLayout },
         toggleCheckOnSave: { enabled: commands.toggleCheckOnSave },
+        toggleLSPLogs: { enabled: commands.toggleLSPLogs },
         // Internal commands which are invoked by the server.
         applyActionGroup: { enabled: commands.applyActionGroup },
         applySnippetWorkspaceEdit: { enabled: commands.applySnippetWorkspaceEditCommand },
         debugSingle: { enabled: commands.debugSingle },
         gotoLocation: { enabled: commands.gotoLocation },
-        linkToCommand: { enabled: commands.linkToCommand },
+        hoverRefCommandProxy: { enabled: commands.hoverRefCommandProxy },
         resolveCodeAction: { enabled: commands.resolveCodeAction },
         runSingle: { enabled: commands.runSingle },
         showReferences: { enabled: commands.showReferences },
@@ -203,14 +204,4 @@
             )
             .then(() => {}, console.error);
     }
-
-    if (vscode.extensions.getExtension("panicbit.cargo")) {
-        vscode.window
-            .showWarningMessage(
-                `You have both the rust-analyzer (rust-lang.rust-analyzer) and Cargo (panicbit.cargo) plugins enabled, ` +
-                    'you can disable it or set {"cargo.automaticCheck": false} in settings.json to avoid invoking cargo twice',
-                "Got it",
-            )
-            .then(() => {}, console.error);
-    }
 }
diff --git a/editors/code/src/nullable.ts b/editors/code/src/nullable.ts
deleted file mode 100644
index e973e16..0000000
--- a/editors/code/src/nullable.ts
+++ /dev/null
@@ -1,19 +0,0 @@
-export type NotNull<T> = T extends null ? never : T;
-
-export type Nullable<T> = T | null;
-
-function isNotNull<T>(input: Nullable<T>): input is NotNull<T> {
-    return input !== null;
-}
-
-function expectNotNull<T>(input: Nullable<T>, msg: string): NotNull<T> {
-    if (isNotNull(input)) {
-        return input;
-    }
-
-    throw new TypeError(msg);
-}
-
-export function unwrapNullable<T>(input: Nullable<T>): NotNull<T> {
-    return expectNotNull(input, `unwrapping \`null\``);
-}
diff --git a/editors/code/src/run.ts b/editors/code/src/run.ts
index 52117a4..7a9049a 100644
--- a/editors/code/src/run.ts
+++ b/editors/code/src/run.ts
@@ -6,9 +6,8 @@
 import type { CtxInit } from "./ctx";
 import { makeDebugConfig } from "./debug";
 import type { Config, RunnableEnvCfg, RunnableEnvCfgItem } from "./config";
-import { unwrapUndefinable } from "./undefinable";
 import type { LanguageClient } from "vscode-languageclient/node";
-import type { RustEditor } from "./util";
+import { unwrapUndefinable, type RustEditor } from "./util";
 import * as toolchain from "./toolchain";
 
 const quickPickButtons = [
@@ -110,10 +109,13 @@
     return env;
 }
 
-export async function createTask(runnable: ra.Runnable, config: Config): Promise<vscode.Task> {
+export async function createTaskFromRunnable(
+    runnable: ra.Runnable,
+    config: Config,
+): Promise<vscode.Task> {
     let definition: tasks.RustTargetDefinition;
     if (runnable.kind === "cargo") {
-        const runnableArgs = runnable.args as ra.CargoRunnableArgs;
+        const runnableArgs = runnable.args;
         let args = createCargoArgs(runnableArgs);
 
         let program: string;
@@ -128,17 +130,16 @@
         }
 
         definition = {
-            type: tasks.TASK_TYPE,
+            type: tasks.CARGO_TASK_TYPE,
             command: program,
             args,
             cwd: runnableArgs.workspaceRoot || ".",
             env: prepareEnv(runnable.label, runnableArgs, config.runnablesExtraEnv),
         };
     } else {
-        const runnableArgs = runnable.args as ra.ShellRunnableArgs;
-
+        const runnableArgs = runnable.args;
         definition = {
-            type: "shell",
+            type: tasks.SHELL_TASK_TYPE,
             command: runnableArgs.program,
             args: runnableArgs.args,
             cwd: runnableArgs.cwd,
@@ -146,15 +147,14 @@
         };
     }
 
-    // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
-    const target = vscode.workspace.workspaceFolders![0]; // safe, see main activate()
+    const target = vscode.workspace.workspaceFolders?.[0];
+    const exec = await tasks.targetToExecution(definition, config.cargoRunner, true);
     const task = await tasks.buildRustTask(
         target,
         definition,
         runnable.label,
         config.problemMatcher,
-        config.cargoRunner,
-        true,
+        exec,
     );
 
     task.presentationOptions.clear = true;
diff --git a/editors/code/src/snippets.ts b/editors/code/src/snippets.ts
index b3982bd..a469a9c 100644
--- a/editors/code/src/snippets.ts
+++ b/editors/code/src/snippets.ts
@@ -1,7 +1,6 @@
 import * as vscode from "vscode";
 
-import { assert } from "./util";
-import { unwrapUndefinable } from "./undefinable";
+import { assert, unwrapUndefinable } from "./util";
 
 export type SnippetTextDocumentEdit = [vscode.Uri, (vscode.TextEdit | vscode.SnippetTextEdit)[]];
 
diff --git a/editors/code/src/tasks.ts b/editors/code/src/tasks.ts
index c28a919..6f4fbf9 100644
--- a/editors/code/src/tasks.ts
+++ b/editors/code/src/tasks.ts
@@ -1,28 +1,29 @@
 import * as vscode from "vscode";
-import * as toolchain from "./toolchain";
 import type { Config } from "./config";
-import { log } from "./util";
-import { unwrapUndefinable } from "./undefinable";
+import { log, unwrapUndefinable } from "./util";
+import * as toolchain from "./toolchain";
 
 // This ends up as the `type` key in tasks.json. RLS also uses `cargo` and
 // our configuration should be compatible with it so use the same key.
-export const TASK_TYPE = "cargo";
+export const CARGO_TASK_TYPE = "cargo";
+export const SHELL_TASK_TYPE = "shell";
 
-export const TASK_SOURCE = "rust";
+export const RUST_TASK_SOURCE = "rust";
 
-export interface RustTargetDefinition extends vscode.TaskDefinition {
-    // The cargo command, such as "run" or "check".
+export type RustTargetDefinition = {
+    readonly type: typeof CARGO_TASK_TYPE | typeof SHELL_TASK_TYPE;
+} & vscode.TaskDefinition &
+    RustTarget;
+export type RustTarget = {
+    // The command to run, usually `cargo`.
     command: string;
-    // Additional arguments passed to the cargo command.
+    // Additional arguments passed to the command.
     args?: string[];
-    // The working directory to run the cargo command in.
+    // The working directory to run the command in.
     cwd?: string;
     // The shell environment.
     env?: { [key: string]: string };
-    // Override the cargo executable name, such as
-    // "my_custom_cargo_bin".
-    overrideCargo?: string;
-}
+};
 
 class RustTaskProvider implements vscode.TaskProvider {
     private readonly config: Config;
@@ -32,6 +33,10 @@
     }
 
     async provideTasks(): Promise<vscode.Task[]> {
+        if (!vscode.workspace.workspaceFolders) {
+            return [];
+        }
+
         // Detect Rust tasks. Currently we do not do any actual detection
         // of tasks (e.g. aliases in .cargo/config) and just return a fixed
         // set of tasks that always exist. These tasks cannot be removed in
@@ -46,15 +51,23 @@
             { command: "run", group: undefined },
         ];
 
+        // FIXME: The server should provide this
+        const cargo = await toolchain.cargoPath();
+
         const tasks: vscode.Task[] = [];
-        for (const workspaceTarget of vscode.workspace.workspaceFolders || []) {
+        for (const workspaceTarget of vscode.workspace.workspaceFolders) {
             for (const def of defs) {
+                const definition = {
+                    command: cargo,
+                    args: [def.command],
+                };
+                const exec = await targetToExecution(definition, this.config.cargoRunner);
                 const vscodeTask = await buildRustTask(
                     workspaceTarget,
-                    { type: TASK_TYPE, command: def.command },
+                    { ...definition, type: CARGO_TASK_TYPE },
                     `cargo ${def.command}`,
                     this.config.problemMatcher,
-                    this.config.cargoRunner,
+                    exec,
                 );
                 vscodeTask.group = def.group;
                 tasks.push(vscodeTask);
@@ -68,16 +81,24 @@
         // VSCode calls this for every cargo task in the user's tasks.json,
         // we need to inform VSCode how to execute that command by creating
         // a ShellExecution for it.
-
-        const definition = task.definition as RustTargetDefinition;
-
-        if (definition.type === TASK_TYPE) {
+        if (task.definition.type === CARGO_TASK_TYPE) {
+            const taskDefinition = task.definition as RustTargetDefinition;
+            const cargo = await toolchain.cargoPath();
+            const exec = await targetToExecution(
+                {
+                    command: cargo,
+                    args: [taskDefinition.command].concat(taskDefinition.args || []),
+                    cwd: taskDefinition.cwd,
+                    env: taskDefinition.env,
+                },
+                this.config.cargoRunner,
+            );
             return await buildRustTask(
                 task.scope,
-                definition,
+                taskDefinition,
                 task.name,
                 this.config.problemMatcher,
-                this.config.cargoRunner,
+                exec,
             );
         }
 
@@ -90,34 +111,31 @@
     definition: RustTargetDefinition,
     name: string,
     problemMatcher: string[],
-    customRunner?: string,
-    throwOnError: boolean = false,
+    exec: vscode.ProcessExecution | vscode.ShellExecution,
 ): Promise<vscode.Task> {
-    const exec = await cargoToExecution(definition, customRunner, throwOnError);
-
     return new vscode.Task(
         definition,
         // scope can sometimes be undefined. in these situations we default to the workspace taskscope as
         // recommended by the official docs: https://code.visualstudio.com/api/extension-guides/task-provider#task-provider)
         scope ?? vscode.TaskScope.Workspace,
         name,
-        TASK_SOURCE,
+        RUST_TASK_SOURCE,
         exec,
         problemMatcher,
     );
 }
 
-async function cargoToExecution(
-    definition: RustTargetDefinition,
-    customRunner: string | undefined,
-    throwOnError: boolean,
+export async function targetToExecution(
+    definition: RustTarget,
+    customRunner?: string,
+    throwOnError: boolean = false,
 ): Promise<vscode.ProcessExecution | vscode.ShellExecution> {
     if (customRunner) {
         const runnerCommand = `${customRunner}.buildShellExecution`;
 
         try {
             const runnerArgs = {
-                kind: TASK_TYPE,
+                kind: CARGO_TASK_TYPE,
                 args: definition.args,
                 cwd: definition.cwd,
                 env: definition.env,
@@ -137,35 +155,14 @@
             // fallback to default processing
         }
     }
-
-    // this is a cargo task; do Cargo-esque processing
-    if (definition.type === TASK_TYPE) {
-        // Check whether we must use a user-defined substitute for cargo.
-        // Split on spaces to allow overrides like "wrapper cargo".
-        const cargoPath = await toolchain.cargoPath();
-        const cargoCommand = definition.overrideCargo?.split(" ") ?? [cargoPath];
-
-        const args = [definition.command].concat(definition.args ?? []);
-        const fullCommand = [...cargoCommand, ...args];
-        const processName = unwrapUndefinable(fullCommand[0]);
-
-        return new vscode.ProcessExecution(processName, fullCommand.slice(1), {
-            cwd: definition.cwd,
-            env: definition.env,
-        });
-    } else {
-        // we've been handed a process definition by rust-analyzer, trust all its inputs
-        // and make a shell execution.
-        const args = unwrapUndefinable(definition.args);
-
-        return new vscode.ProcessExecution(definition.command, args, {
-            cwd: definition.cwd,
-            env: definition.env,
-        });
-    }
+    const args = unwrapUndefinable(definition.args);
+    return new vscode.ProcessExecution(definition.command, args, {
+        cwd: definition.cwd,
+        env: definition.env,
+    });
 }
 
 export function activateTaskProvider(config: Config): vscode.Disposable {
     const provider = new RustTaskProvider(config);
-    return vscode.tasks.registerTaskProvider(TASK_TYPE, provider);
+    return vscode.tasks.registerTaskProvider(CARGO_TASK_TYPE, provider);
 }
diff --git a/editors/code/src/toolchain.ts b/editors/code/src/toolchain.ts
index 58e5fc7..a48d2d9 100644
--- a/editors/code/src/toolchain.ts
+++ b/editors/code/src/toolchain.ts
@@ -3,9 +3,7 @@
 import * as path from "path";
 import * as readline from "readline";
 import * as vscode from "vscode";
-import { execute, log, memoizeAsync } from "./util";
-import { unwrapNullable } from "./nullable";
-import { unwrapUndefinable } from "./undefinable";
+import { execute, log, memoizeAsync, unwrapNullable, unwrapUndefinable } from "./util";
 
 interface CompilationArtifact {
     fileName: string;
@@ -151,12 +149,13 @@
 }
 
 /** Mirrors `toolchain::cargo()` implementation */
+// FIXME: The server should provide this
 export function cargoPath(): Promise<string> {
     return getPathForExecutable("cargo");
 }
 
 /** Mirrors `toolchain::get_path_for_executable()` implementation */
-export const getPathForExecutable = memoizeAsync(
+const getPathForExecutable = memoizeAsync(
     // We apply caching to decrease file-system interactions
     async (executableName: "cargo" | "rustc" | "rustup"): Promise<string> => {
         {
diff --git a/editors/code/src/undefinable.ts b/editors/code/src/undefinable.ts
deleted file mode 100644
index 813bac5..0000000
--- a/editors/code/src/undefinable.ts
+++ /dev/null
@@ -1,19 +0,0 @@
-export type NotUndefined<T> = T extends undefined ? never : T;
-
-export type Undefinable<T> = T | undefined;
-
-function isNotUndefined<T>(input: Undefinable<T>): input is NotUndefined<T> {
-    return input !== undefined;
-}
-
-export function expectNotUndefined<T>(input: Undefinable<T>, msg: string): NotUndefined<T> {
-    if (isNotUndefined(input)) {
-        return input;
-    }
-
-    throw new TypeError(msg);
-}
-
-export function unwrapUndefinable<T>(input: Undefinable<T>): NotUndefined<T> {
-    return expectNotUndefined(input, `unwrapping \`undefined\``);
-}
diff --git a/editors/code/src/util.ts b/editors/code/src/util.ts
index 868cb2b..dd1cbe3 100644
--- a/editors/code/src/util.ts
+++ b/editors/code/src/util.ts
@@ -1,9 +1,8 @@
 import * as vscode from "vscode";
 import { strict as nativeAssert } from "assert";
-import { exec, type ExecOptions, spawnSync } from "child_process";
+import { exec, type ExecOptions } from "child_process";
 import { inspect } from "util";
 import type { CargoRunnableArgs, ShellRunnableArgs } from "./lsp_ext";
-import type { Env } from "./client";
 
 export function assert(condition: boolean, explanation: string): asserts condition {
     try {
@@ -14,6 +13,10 @@
     }
 }
 
+export type Env = {
+    [name: string]: string;
+};
+
 export const log = new (class {
     private enabled = true;
     private readonly output = vscode.window.createOutputChannel("Rust Analyzer Client");
@@ -101,20 +104,6 @@
     return false;
 }
 
-export function isValidExecutable(path: string, extraEnv: Env): boolean {
-    log.debug("Checking availability of a binary at", path);
-
-    const res = spawnSync(path, ["--version"], {
-        encoding: "utf8",
-        env: { ...process.env, ...extraEnv },
-    });
-
-    const printOutput = res.error ? log.warn : log.info;
-    printOutput(path, "--version:", res);
-
-    return res.status === 0;
-}
-
 /** Sets ['when'](https://code.visualstudio.com/docs/getstarted/keybindings#_when-clause-contexts) clause contexts */
 export function setContextValue(key: string, value: any): Thenable<void> {
     return vscode.commands.executeCommand("setContext", key, value);
@@ -206,3 +195,42 @@
         }
     }
 }
+
+export type NotNull<T> = T extends null ? never : T;
+
+export type Nullable<T> = T | null;
+
+function isNotNull<T>(input: Nullable<T>): input is NotNull<T> {
+    return input !== null;
+}
+
+function expectNotNull<T>(input: Nullable<T>, msg: string): NotNull<T> {
+    if (isNotNull(input)) {
+        return input;
+    }
+
+    throw new TypeError(msg);
+}
+
+export function unwrapNullable<T>(input: Nullable<T>): NotNull<T> {
+    return expectNotNull(input, `unwrapping \`null\``);
+}
+export type NotUndefined<T> = T extends undefined ? never : T;
+
+export type Undefinable<T> = T | undefined;
+
+function isNotUndefined<T>(input: Undefinable<T>): input is NotUndefined<T> {
+    return input !== undefined;
+}
+
+export function expectNotUndefined<T>(input: Undefinable<T>, msg: string): NotUndefined<T> {
+    if (isNotUndefined(input)) {
+        return input;
+    }
+
+    throw new TypeError(msg);
+}
+
+export function unwrapUndefinable<T>(input: Undefinable<T>): NotUndefined<T> {
+    return expectNotUndefined(input, `unwrapping \`undefined\``);
+}
diff --git a/rust-version b/rust-version
index 207ef6c..c605feb 100644
--- a/rust-version
+++ b/rust-version
@@ -1 +1 @@
-6579ed89f0fcc26da71afdd11d30d63f6f812a0a
+3d5d7a24f76006b391d8a53d903ae64c1b4a52d2