Merge pull request #19862 from ChayimFriedman2/item-resolve-macro-hir

fix: Fix IDE resolution of item macros
diff --git a/Cargo.lock b/Cargo.lock
index bd8146d..01de430 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -18,15 +18,6 @@
 checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
 
 [[package]]
-name = "aho-corasick"
-version = "1.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
-dependencies = [
- "memchr",
-]
-
-[[package]]
 name = "allocator-api2"
 version = "0.2.21"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -124,12 +115,9 @@
 
 [[package]]
 name = "boxcar"
-version = "0.2.11"
+version = "0.2.12"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6740c6e2fc6360fa57c35214c7493826aee95993926092606f27c983b40837be"
-dependencies = [
- "loom",
-]
+checksum = "66bb12751a83493ef4b8da1120451a262554e216a247f14b48cb5e8fe7ed8bdf"
 
 [[package]]
 name = "camino"
@@ -512,19 +500,6 @@
 checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a"
 
 [[package]]
-name = "generator"
-version = "0.8.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cc6bd114ceda131d3b1d665eba35788690ad37f5916457286b32ab6fd3c438dd"
-dependencies = [
- "cfg-if",
- "libc",
- "log",
- "rustversion",
- "windows 0.58.0",
-]
-
-[[package]]
 name = "getrandom"
 version = "0.2.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1214,19 +1189,6 @@
 checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e"
 
 [[package]]
-name = "loom"
-version = "0.7.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca"
-dependencies = [
- "cfg-if",
- "generator",
- "scoped-tls",
- "tracing",
- "tracing-subscriber",
-]
-
-[[package]]
 name = "lsp-server"
 version = "0.7.8"
 dependencies = [
@@ -1266,15 +1228,6 @@
 ]
 
 [[package]]
-name = "matchers"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
-dependencies = [
- "regex-automata 0.1.10",
-]
-
-[[package]]
 name = "mbe"
 version = "0.0.0"
 dependencies = [
@@ -1402,16 +1355,6 @@
 
 [[package]]
 name = "nu-ansi-term"
-version = "0.46.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
-dependencies = [
- "overload",
- "winapi",
-]
-
-[[package]]
-name = "nu-ansi-term"
 version = "0.50.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399"
@@ -1472,12 +1415,6 @@
 checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
 
 [[package]]
-name = "overload"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
-
-[[package]]
 name = "parking_lot"
 version = "0.12.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1648,7 +1585,7 @@
  "indexmap",
  "nix",
  "tracing",
- "windows 0.61.1",
+ "windows",
 ]
 
 [[package]]
@@ -1865,50 +1802,6 @@
 ]
 
 [[package]]
-name = "regex"
-version = "1.11.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
-dependencies = [
- "aho-corasick",
- "memchr",
- "regex-automata 0.4.9",
- "regex-syntax 0.8.5",
-]
-
-[[package]]
-name = "regex-automata"
-version = "0.1.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
-dependencies = [
- "regex-syntax 0.6.29",
-]
-
-[[package]]
-name = "regex-automata"
-version = "0.4.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
-dependencies = [
- "aho-corasick",
- "memchr",
- "regex-syntax 0.8.5",
-]
-
-[[package]]
-name = "regex-syntax"
-version = "0.6.29"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
-
-[[package]]
-name = "regex-syntax"
-version = "0.8.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
-
-[[package]]
 name = "rowan"
 version = "0.15.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2027,12 +1920,6 @@
 ]
 
 [[package]]
-name = "rustversion"
-version = "1.0.20"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2"
-
-[[package]]
 name = "ryu"
 version = "1.0.20"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2040,9 +1927,9 @@
 
 [[package]]
 name = "salsa"
-version = "0.21.1"
+version = "0.22.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6f80d5cf3c3fcab2cef898012f242a670477a1baa609267376af9cb4409026c5"
+checksum = "c8fff508e3d6ef42a32607f7538e17171a877a12015e32036f46e99d00c95781"
 dependencies = [
  "boxcar",
  "crossbeam-queue",
@@ -2063,15 +1950,15 @@
 
 [[package]]
 name = "salsa-macro-rules"
-version = "0.21.1"
+version = "0.22.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "05303d72606fbf2b9c9523cda2039bb8ecb00304027a3cd7e52b02a65c7d9185"
+checksum = "8ea72b3c06f2ce6350fe3a0eeb7aaaf842d1d8352b706973c19c4f02e298a87c"
 
 [[package]]
 name = "salsa-macros"
-version = "0.21.1"
+version = "0.22.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eb2f0e2a30c65cb3cd63440c491dde68d9af7e1be2b77832ac7057141107db50"
+checksum = "0ce92025bc160b27814a207cb78d680973af17f863c7f4fc56cf3a535e22f378"
 dependencies = [
  "heck",
  "proc-macro2",
@@ -2556,15 +2443,9 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
 dependencies = [
- "matchers",
- "nu-ansi-term 0.46.0",
- "once_cell",
- "regex",
  "sharded-slab",
- "smallvec",
  "thread_local",
  "time",
- "tracing",
  "tracing-core",
  "tracing-log",
 ]
@@ -2575,7 +2456,7 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "f459ca79f1b0d5f71c54ddfde6debfc59c8b6eeb46808ae492077f739dc7b49c"
 dependencies = [
- "nu-ansi-term 0.50.1",
+ "nu-ansi-term",
  "tracing-core",
  "tracing-log",
  "tracing-subscriber",
@@ -2710,22 +2591,6 @@
 checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
 
 [[package]]
-name = "winapi"
-version = "0.3.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
-dependencies = [
- "winapi-i686-pc-windows-gnu",
- "winapi-x86_64-pc-windows-gnu",
-]
-
-[[package]]
-name = "winapi-i686-pc-windows-gnu"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
-
-[[package]]
 name = "winapi-util"
 version = "0.1.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2735,29 +2600,13 @@
 ]
 
 [[package]]
-name = "winapi-x86_64-pc-windows-gnu"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
-
-[[package]]
-name = "windows"
-version = "0.58.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6"
-dependencies = [
- "windows-core 0.58.0",
- "windows-targets 0.52.6",
-]
-
-[[package]]
 name = "windows"
 version = "0.61.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c5ee8f3d025738cb02bad7868bbb5f8a6327501e870bf51f1b455b0a2454a419"
 dependencies = [
  "windows-collections",
- "windows-core 0.61.0",
+ "windows-core",
  "windows-future",
  "windows-link",
  "windows-numerics",
@@ -2769,20 +2618,7 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8"
 dependencies = [
- "windows-core 0.61.0",
-]
-
-[[package]]
-name = "windows-core"
-version = "0.58.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99"
-dependencies = [
- "windows-implement 0.58.0",
- "windows-interface 0.58.0",
- "windows-result 0.2.0",
- "windows-strings 0.1.0",
- "windows-targets 0.52.6",
+ "windows-core",
 ]
 
 [[package]]
@@ -2791,11 +2627,11 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4763c1de310c86d75a878046489e2e5ba02c649d185f21c67d4cf8a56d098980"
 dependencies = [
- "windows-implement 0.60.0",
- "windows-interface 0.59.1",
+ "windows-implement",
+ "windows-interface",
  "windows-link",
- "windows-result 0.3.2",
- "windows-strings 0.4.0",
+ "windows-result",
+ "windows-strings",
 ]
 
 [[package]]
@@ -2804,23 +2640,12 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7a1d6bbefcb7b60acd19828e1bc965da6fcf18a7e39490c5f8be71e54a19ba32"
 dependencies = [
- "windows-core 0.61.0",
+ "windows-core",
  "windows-link",
 ]
 
 [[package]]
 name = "windows-implement"
-version = "0.58.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "windows-implement"
 version = "0.60.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836"
@@ -2832,17 +2657,6 @@
 
 [[package]]
 name = "windows-interface"
-version = "0.58.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "windows-interface"
 version = "0.59.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8"
@@ -2864,21 +2678,12 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1"
 dependencies = [
- "windows-core 0.61.0",
+ "windows-core",
  "windows-link",
 ]
 
 [[package]]
 name = "windows-result"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e"
-dependencies = [
- "windows-targets 0.52.6",
-]
-
-[[package]]
-name = "windows-result"
 version = "0.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252"
@@ -2888,16 +2693,6 @@
 
 [[package]]
 name = "windows-strings"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10"
-dependencies = [
- "windows-result 0.2.0",
- "windows-targets 0.52.6",
-]
-
-[[package]]
-name = "windows-strings"
 version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7a2ba9642430ee452d5a7aa78d72907ebe8cfda358e8cb7918a2050581322f97"
diff --git a/Cargo.toml b/Cargo.toml
index 07731ba..8c50718 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -132,11 +132,8 @@
 pulldown-cmark = { version = "0.9.6", default-features = false }
 rayon = "1.10.0"
 rowan = "=0.15.15"
-salsa = { version = "0.21.1", default-features = false, features = [
-  "rayon",
-  "salsa_unstable",
-] }
-salsa-macros = "0.21.1"
+salsa = { version = "0.22.0", default-features = false, features = ["rayon","salsa_unstable"] }
+salsa-macros = "0.22.0"
 semver = "1.0.26"
 serde = { version = "1.0.219" }
 serde_derive = { version = "1.0.219" }
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index d42d7e5..7452381 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -395,21 +395,21 @@
 pub type CratesIdMap = FxHashMap<CrateBuilderId, Crate>;
 
 #[salsa_macros::input]
-#[derive(Debug)]
+#[derive(Debug, PartialOrd, Ord)]
 pub struct Crate {
-    #[return_ref]
+    #[returns(ref)]
     pub data: BuiltCrateData,
     /// Crate data that is not needed for analysis.
     ///
     /// This is split into a separate field to increase incrementality.
-    #[return_ref]
+    #[returns(ref)]
     pub extra_data: ExtraCrateData,
     // This is in `Arc` because it is shared for all crates in a workspace.
-    #[return_ref]
+    #[returns(ref)]
     pub workspace_data: Arc<CrateWorkspaceData>,
-    #[return_ref]
+    #[returns(ref)]
     pub cfg_options: CfgOptions,
-    #[return_ref]
+    #[returns(ref)]
     pub env: Env,
 }
 
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index a67fbf7..4d4e6ca 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -32,6 +32,7 @@
 macro_rules! impl_intern_key {
     ($id:ident, $loc:ident) => {
         #[salsa_macros::interned(no_lifetime)]
+        #[derive(PartialOrd, Ord)]
         pub struct $id {
             pub loc: $loc,
         }
@@ -165,6 +166,7 @@
 }
 
 #[salsa_macros::interned(no_lifetime, debug, constructor=from_span)]
+#[derive(PartialOrd, Ord)]
 pub struct EditionedFileId {
     pub editioned_file_id: span::EditionedFileId,
 }
@@ -356,7 +358,7 @@
 }
 
 fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<&[SyntaxError]> {
-    #[salsa_macros::tracked(return_ref)]
+    #[salsa_macros::tracked(returns(ref))]
     fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<Box<[SyntaxError]>> {
         let errors = db.parse(file_id).errors();
         match &*errors {
diff --git a/crates/hir-def/src/lang_item.rs b/crates/hir-def/src/lang_item.rs
index 5934464..4ad4477 100644
--- a/crates/hir-def/src/lang_item.rs
+++ b/crates/hir-def/src/lang_item.rs
@@ -85,7 +85,7 @@
 }
 
 /// Salsa query. This will look for lang items in a specific crate.
-#[salsa_macros::tracked(return_ref)]
+#[salsa_macros::tracked(returns(ref))]
 pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option<Box<LangItems>> {
     let _p = tracing::info_span!("crate_lang_items_query").entered();
 
diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs
index d4b30a1..f337f83 100644
--- a/crates/hir-def/src/nameres.rs
+++ b/crates/hir-def/src/nameres.rs
@@ -381,15 +381,15 @@
     #[salsa_macros::tracked]
     pub(crate) struct DefMapPair<'db> {
         #[tracked]
-        #[return_ref]
+        #[returns(ref)]
         pub(crate) def_map: DefMap,
-        #[return_ref]
+        #[returns(ref)]
         pub(crate) local: LocalDefMap,
     }
 }
 pub(crate) use __::DefMapPair;
 
-#[salsa_macros::tracked(return_ref)]
+#[salsa_macros::tracked(returns(ref))]
 pub(crate) fn crate_local_def_map(db: &dyn DefDatabase, crate_id: Crate) -> DefMapPair<'_> {
     let krate = crate_id.data(db);
     let _p = tracing::info_span!(
@@ -420,7 +420,7 @@
     DefMapPair::new(db, def_map, local_def_map)
 }
 
-#[salsa_macros::tracked(return_ref)]
+#[salsa_macros::tracked(returns(ref))]
 pub fn block_def_map(db: &dyn DefDatabase, block_id: BlockId) -> DefMap {
     let BlockLoc { ast_id, module } = block_id.lookup(db);
 
diff --git a/crates/hir-def/src/test_db.rs b/crates/hir-def/src/test_db.rs
index 6c995ab..e30a5b6 100644
--- a/crates/hir-def/src/test_db.rs
+++ b/crates/hir-def/src/test_db.rs
@@ -30,9 +30,18 @@
 
 impl Default for TestDB {
     fn default() -> Self {
+        let events = <Arc<Mutex<Option<Vec<salsa::Event>>>>>::default();
         let mut this = Self {
-            storage: Default::default(),
-            events: Default::default(),
+            storage: salsa::Storage::new(Some(Box::new({
+                let events = events.clone();
+                move |event| {
+                    let mut events = events.lock().unwrap();
+                    if let Some(events) = &mut *events {
+                        events.push(event);
+                    }
+                }
+            }))),
+            events,
             files: Default::default(),
             crates_map: Default::default(),
         };
@@ -45,15 +54,7 @@
 }
 
 #[salsa_macros::db]
-impl salsa::Database for TestDB {
-    fn salsa_event(&self, event: &dyn std::ops::Fn() -> salsa::Event) {
-        let mut events = self.events.lock().unwrap();
-        if let Some(events) = &mut *events {
-            let event = event();
-            events.push(event);
-        }
-    }
-}
+impl salsa::Database for TestDB {}
 
 impl fmt::Debug for TestDB {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs
index e4a23cb..8665e9d 100644
--- a/crates/hir-ty/src/diagnostics/expr.rs
+++ b/crates/hir-ty/src/diagnostics/expr.rs
@@ -25,7 +25,7 @@
 use typed_arena::Arena;
 
 use crate::{
-    Adjust, InferenceResult, Interner, Ty, TyExt, TyKind,
+    Adjust, InferenceResult, Interner, TraitEnvironment, Ty, TyExt, TyKind,
     db::HirDatabase,
     diagnostics::match_check::{
         self,
@@ -74,8 +74,9 @@
         let _p = tracing::info_span!("BodyValidationDiagnostic::collect").entered();
         let infer = db.infer(owner);
         let body = db.body(owner);
+        let env = db.trait_environment_for_body(owner);
         let mut validator =
-            ExprValidator { owner, body, infer, diagnostics: Vec::new(), validate_lints };
+            ExprValidator { owner, body, infer, diagnostics: Vec::new(), validate_lints, env };
         validator.validate_body(db);
         validator.diagnostics
     }
@@ -85,6 +86,7 @@
     owner: DefWithBodyId,
     body: Arc<Body>,
     infer: Arc<InferenceResult>,
+    env: Arc<TraitEnvironment>,
     diagnostics: Vec<BodyValidationDiagnostic>,
     validate_lints: bool,
 }
@@ -190,7 +192,7 @@
             return;
         }
 
-        let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db);
+        let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db, self.env.clone());
 
         let pattern_arena = Arena::new();
         let mut m_arms = Vec::with_capacity(arms.len());
@@ -317,7 +319,7 @@
             return;
         };
         let pattern_arena = Arena::new();
-        let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db);
+        let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db, self.env.clone());
         for stmt in &**statements {
             let &Statement::Let { pat, initializer, else_branch: None, .. } = stmt else {
                 continue;
diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
index 785277d..dd82a0f 100644
--- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
+++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
@@ -12,9 +12,10 @@
 };
 use smallvec::{SmallVec, smallvec};
 use stdx::never;
+use triomphe::Arc;
 
 use crate::{
-    AdtId, Interner, Scalar, Ty, TyExt, TyKind,
+    AdtId, Interner, Scalar, TraitEnvironment, Ty, TyExt, TyKind,
     db::HirDatabase,
     infer::normalize,
     inhabitedness::{is_enum_variant_uninhabited_from, is_ty_uninhabited_from},
@@ -69,13 +70,19 @@
     body: DefWithBodyId,
     pub(crate) db: &'db dyn HirDatabase,
     exhaustive_patterns: bool,
+    env: Arc<TraitEnvironment>,
 }
 
 impl<'db> MatchCheckCtx<'db> {
-    pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'db dyn HirDatabase) -> Self {
+    pub(crate) fn new(
+        module: ModuleId,
+        body: DefWithBodyId,
+        db: &'db dyn HirDatabase,
+        env: Arc<TraitEnvironment>,
+    ) -> Self {
         let def_map = module.crate_def_map(db);
         let exhaustive_patterns = def_map.is_unstable_feature_enabled(&sym::exhaustive_patterns);
-        Self { module, body, db, exhaustive_patterns }
+        Self { module, body, db, exhaustive_patterns, env }
     }
 
     pub(crate) fn compute_match_usefulness(
@@ -100,7 +107,7 @@
     }
 
     fn is_uninhabited(&self, ty: &Ty) -> bool {
-        is_ty_uninhabited_from(self.db, ty, self.module)
+        is_ty_uninhabited_from(self.db, ty, self.module, self.env.clone())
     }
 
     /// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`.
@@ -459,8 +466,13 @@
                 } else {
                     let mut variants = IndexVec::with_capacity(enum_data.variants.len());
                     for &(variant, _) in enum_data.variants.iter() {
-                        let is_uninhabited =
-                            is_enum_variant_uninhabited_from(cx.db, variant, subst, cx.module);
+                        let is_uninhabited = is_enum_variant_uninhabited_from(
+                            cx.db,
+                            variant,
+                            subst,
+                            cx.module,
+                            self.env.clone(),
+                        );
                         let visibility = if is_uninhabited {
                             VariantVisibility::Empty
                         } else {
diff --git a/crates/hir-ty/src/inhabitedness.rs b/crates/hir-ty/src/inhabitedness.rs
index e0c3279..e81a5e3 100644
--- a/crates/hir-ty/src/inhabitedness.rs
+++ b/crates/hir-ty/src/inhabitedness.rs
@@ -7,17 +7,24 @@
 };
 use hir_def::{AdtId, EnumVariantId, ModuleId, VariantId, visibility::Visibility};
 use rustc_hash::FxHashSet;
+use triomphe::Arc;
 
 use crate::{
-    Binders, Interner, Substitution, Ty, TyKind, consteval::try_const_usize, db::HirDatabase,
+    AliasTy, Binders, Interner, Substitution, TraitEnvironment, Ty, TyKind,
+    consteval::try_const_usize, db::HirDatabase,
 };
 
 // FIXME: Turn this into a query, it can be quite slow
 /// Checks whether a type is visibly uninhabited from a particular module.
-pub(crate) fn is_ty_uninhabited_from(db: &dyn HirDatabase, ty: &Ty, target_mod: ModuleId) -> bool {
+pub(crate) fn is_ty_uninhabited_from(
+    db: &dyn HirDatabase,
+    ty: &Ty,
+    target_mod: ModuleId,
+    env: Arc<TraitEnvironment>,
+) -> bool {
     let _p = tracing::info_span!("is_ty_uninhabited_from", ?ty).entered();
     let mut uninhabited_from =
-        UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default() };
+        UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default(), env };
     let inhabitedness = ty.visit_with(&mut uninhabited_from, DebruijnIndex::INNERMOST);
     inhabitedness == BREAK_VISIBLY_UNINHABITED
 }
@@ -29,11 +36,12 @@
     variant: EnumVariantId,
     subst: &Substitution,
     target_mod: ModuleId,
+    env: Arc<TraitEnvironment>,
 ) -> bool {
     let _p = tracing::info_span!("is_enum_variant_uninhabited_from").entered();
 
     let mut uninhabited_from =
-        UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default() };
+        UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default(), env };
     let inhabitedness = uninhabited_from.visit_variant(variant.into(), subst);
     inhabitedness == BREAK_VISIBLY_UNINHABITED
 }
@@ -44,6 +52,7 @@
     // guard for preventing stack overflow in non trivial non terminating types
     max_depth: usize,
     db: &'a dyn HirDatabase,
+    env: Arc<TraitEnvironment>,
 }
 
 const CONTINUE_OPAQUELY_INHABITED: ControlFlow<VisiblyUninhabited> = Continue(());
@@ -78,6 +87,12 @@
                 Some(0) | None => CONTINUE_OPAQUELY_INHABITED,
                 Some(1..) => item_ty.super_visit_with(self, outer_binder),
             },
+            TyKind::Alias(AliasTy::Projection(projection)) => {
+                // FIXME: I think this currently isn't used for monomorphized bodies, so there is no need to handle
+                // `TyKind::AssociatedType`, but perhaps in the future it will.
+                let normalized = self.db.normalize_projection(projection.clone(), self.env.clone());
+                self.visit_ty(&normalized, outer_binder)
+            }
             _ => CONTINUE_OPAQUELY_INHABITED,
         };
         self.recursive_ty.remove(ty);
diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs
index 7cf948b..90c52ee 100644
--- a/crates/hir-ty/src/mir/eval/shim.rs
+++ b/crates/hir-ty/src/mir/eval/shim.rs
@@ -1121,7 +1121,7 @@
                 // We don't call any drop glue yet, so there is nothing here
                 Ok(())
             }
-            "transmute" => {
+            "transmute" | "transmute_unchecked" => {
                 let [arg] = args else {
                     return Err(MirEvalError::InternalError(
                         "transmute arg is not provided".into(),
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index 7fcc89e..e6caf2d 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -25,7 +25,7 @@
 use triomphe::Arc;
 
 use crate::{
-    Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt,
+    Adjust, Adjustment, AutoBorrow, CallableDefId, TraitEnvironment, TyBuilder, TyExt,
     consteval::ConstEvalError,
     db::{HirDatabase, InternedClosure, InternedClosureId},
     display::{DisplayTarget, HirDisplay, hir_display_with_store},
@@ -79,6 +79,7 @@
     infer: &'db InferenceResult,
     resolver: Resolver<'db>,
     drop_scopes: Vec<DropScope>,
+    env: Arc<TraitEnvironment>,
 }
 
 // FIXME: Make this smaller, its stored in database queries
@@ -288,6 +289,7 @@
             closures: vec![],
         };
         let resolver = owner.resolver(db);
+        let env = db.trait_environment_for_body(owner);
 
         MirLowerCtx {
             result: mir,
@@ -300,6 +302,7 @@
             labeled_loop_blocks: Default::default(),
             discr_temp: None,
             drop_scopes: vec![DropScope::default()],
+            env,
         }
     }
 
@@ -944,10 +947,7 @@
                     let cast_kind = if source_ty.as_reference().is_some() {
                         CastKind::PointerCoercion(PointerCast::ArrayToPointer)
                     } else {
-                        let mut table = InferenceTable::new(
-                            self.db,
-                            self.db.trait_environment_for_body(self.owner),
-                        );
+                        let mut table = InferenceTable::new(self.db, self.env.clone());
                         cast_kind(&mut table, &source_ty, &target_ty)?
                     };
 
@@ -1412,11 +1412,8 @@
     }
 
     fn lower_literal_to_operand(&mut self, ty: Ty, l: &Literal) -> Result<Operand> {
-        let size = || {
-            self.db
-                .layout_of_ty(ty.clone(), self.db.trait_environment_for_body(self.owner))
-                .map(|it| it.size.bytes_usize())
-        };
+        let size =
+            || self.db.layout_of_ty(ty.clone(), self.env.clone()).map(|it| it.size.bytes_usize());
         const USIZE_SIZE: usize = size_of::<usize>();
         let bytes: Box<[_]> = match l {
             hir_def::hir::Literal::String(b) => {
@@ -1723,7 +1720,12 @@
     }
 
     fn is_uninhabited(&self, expr_id: ExprId) -> bool {
-        is_ty_uninhabited_from(self.db, &self.infer[expr_id], self.owner.module(self.db))
+        is_ty_uninhabited_from(
+            self.db,
+            &self.infer[expr_id],
+            self.owner.module(self.db),
+            self.env.clone(),
+        )
     }
 
     /// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` and
diff --git a/crates/hir-ty/src/test_db.rs b/crates/hir-ty/src/test_db.rs
index 8f0d17c..d049c67 100644
--- a/crates/hir-ty/src/test_db.rs
+++ b/crates/hir-ty/src/test_db.rs
@@ -27,9 +27,18 @@
 
 impl Default for TestDB {
     fn default() -> Self {
+        let events = <Arc<Mutex<Option<Vec<salsa::Event>>>>>::default();
         let mut this = Self {
-            storage: Default::default(),
-            events: Default::default(),
+            storage: salsa::Storage::new(Some(Box::new({
+                let events = events.clone();
+                move |event| {
+                    let mut events = events.lock().unwrap();
+                    if let Some(events) = &mut *events {
+                        events.push(event);
+                    }
+                }
+            }))),
+            events,
             files: Default::default(),
             crates_map: Default::default(),
         };
@@ -103,14 +112,7 @@
 }
 
 #[salsa_macros::db]
-impl salsa::Database for TestDB {
-    fn salsa_event(&self, event: &dyn std::ops::Fn() -> salsa::Event) {
-        let mut events = self.events.lock().unwrap();
-        if let Some(events) = &mut *events {
-            events.push(event());
-        }
-    }
-}
+impl salsa::Database for TestDB {}
 
 impl panic::RefUnwindSafe for TestDB {}
 
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index e479770..247d640 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -407,14 +407,14 @@
         res
     }
 
-    pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
+    pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option<InFile<SyntaxNode>> {
         let sa = self.analyze_no_infer(macro_call.syntax())?;
 
         let macro_call = InFile::new(sa.file_id, macro_call);
         let file_id = sa.expansion(self.db, macro_call)?;
 
         let node = self.parse_or_expand(file_id.into());
-        Some(node)
+        Some(InFile::new(file_id.into(), node))
     }
 
     pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> {
@@ -468,10 +468,10 @@
     }
 
     /// If `item` has an attribute macro attached to it, expands it.
-    pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<SyntaxNode>> {
+    pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<InFile<SyntaxNode>>> {
         let src = self.wrap_node_infile(item.clone());
         let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
-        Some(self.expand(macro_call_id))
+        Some(self.expand(macro_call_id).map(|it| InFile::new(macro_call_id.into(), it)))
     }
 
     pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
@@ -846,49 +846,35 @@
         res
     }
 
-    // FIXME: This isn't quite right wrt to inner attributes
-    /// Does a syntactic traversal to check whether this token might be inside a macro call
-    pub fn might_be_inside_macro_call(&self, token: &SyntaxToken) -> bool {
-        token.parent_ancestors().any(|ancestor| {
+    pub fn is_inside_macro_call(&self, token: InFile<&SyntaxToken>) -> bool {
+        // FIXME: Maybe `ancestors_with_macros()` is more suitable here? Currently
+        // this is only used on real (not macro) files so this is not a problem.
+        token.value.parent_ancestors().any(|ancestor| {
             if ast::MacroCall::can_cast(ancestor.kind()) {
                 return true;
             }
-            // Check if it is an item (only items can have macro attributes) that has a non-builtin attribute.
-            let Some(item) = ast::Item::cast(ancestor) else { return false };
-            item.attrs().any(|attr| {
-                let Some(meta) = attr.meta() else { return false };
-                let Some(path) = meta.path() else { return false };
-                if let Some(attr_name) = path.as_single_name_ref() {
-                    let attr_name = attr_name.text();
-                    let attr_name = Symbol::intern(attr_name.as_str());
-                    if attr_name == sym::derive {
-                        return true;
-                    }
-                    // We ignore `#[test]` and friends in the def map, so we cannot expand them.
-                    // FIXME: We match by text. This is both hacky and incorrect (people can, and do, create
-                    // other macros named `test`). We cannot fix that unfortunately because we use this method
-                    // for speculative expansion in completion, which we cannot analyze. Fortunately, most macros
-                    // named `test` are test-like, meaning their expansion is not terribly important for IDE.
-                    if attr_name == sym::test
-                        || attr_name == sym::bench
-                        || attr_name == sym::test_case
-                        || find_builtin_attr_idx(&attr_name).is_some()
-                    {
-                        return false;
-                    }
+
+            let Some(item) = ast::Item::cast(ancestor) else {
+                return false;
+            };
+            // Optimization to skip the semantic check.
+            if item.attrs().all(|attr| {
+                attr.simple_name()
+                    .is_some_and(|attr| find_builtin_attr_idx(&Symbol::intern(&attr)).is_some())
+            }) {
+                return false;
+            }
+            self.with_ctx(|ctx| {
+                if ctx.item_to_macro_call(token.with_value(&item)).is_some() {
+                    return true;
                 }
-                let mut segments = path.segments();
-                let mut next_segment_text = || segments.next().and_then(|it| it.name_ref());
-                // `#[core::prelude::rust_2024::test]` or `#[std::prelude::rust_2024::test]`.
-                if next_segment_text().is_some_and(|it| matches!(&*it.text(), "core" | "std"))
-                    && next_segment_text().is_some_and(|it| it.text() == "prelude")
-                    && next_segment_text().is_some()
-                    && next_segment_text()
-                        .is_some_and(|it| matches!(&*it.text(), "test" | "bench" | "test_case"))
-                {
-                    return false;
-                }
-                true
+                let adt = match item {
+                    ast::Item::Struct(it) => it.into(),
+                    ast::Item::Enum(it) => it.into(),
+                    ast::Item::Union(it) => it.into(),
+                    _ => return false,
+                };
+                ctx.has_derives(token.with_value(&adt))
             })
         })
     }
diff --git a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
index 2ac960e..bab2ccf 100644
--- a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
@@ -1,7 +1,7 @@
 use ide_db::famous_defs::FamousDefs;
 use syntax::{
     AstNode,
-    ast::{self, make},
+    ast::{self, edit_in_place::Indent, make},
     ted,
 };
 
@@ -46,6 +46,7 @@
 // ```
 pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
     let impl_def = ctx.find_node_at_offset::<ast::Impl>()?.clone_for_update();
+    let indent = impl_def.indent_level();
 
     let trait_ = impl_def.trait_()?;
     if let ast::Type::PathType(trait_path) = trait_ {
@@ -97,8 +98,8 @@
     })?;
 
     let assoc_list = make::assoc_item_list().clone_for_update();
-    assoc_list.add_item(syntax::ast::AssocItem::Fn(fn_));
     ted::replace(impl_def.assoc_item_list()?.syntax(), assoc_list.syntax());
+    impl_def.get_or_create_assoc_item_list().add_item(syntax::ast::AssocItem::Fn(fn_));
 
     let target = impl_def.syntax().text_range();
     acc.add(
@@ -106,7 +107,7 @@
         "Generate `IndexMut` impl from this `Index` trait",
         target,
         |edit| {
-            edit.insert(target.start(), format!("$0{impl_def}\n\n"));
+            edit.insert(target.start(), format!("$0{impl_def}\n\n{indent}"));
         },
     )
 }
@@ -190,6 +191,93 @@
     }
 
     #[test]
+    fn test_generate_mut_trait_impl_non_zero_indent() {
+        check_assist(
+            generate_mut_trait_impl,
+            r#"
+//- minicore: index
+mod foo {
+    pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+    impl<T> core::ops::Index$0<Axis> for [T; 3] where T: Copy {
+        type Output = T;
+
+        fn index(&self, index: Axis) -> &Self::Output {
+            let var_name = &self[index as usize];
+            var_name
+        }
+    }
+}
+"#,
+            r#"
+mod foo {
+    pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+    $0impl<T> core::ops::IndexMut<Axis> for [T; 3] where T: Copy {
+        fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
+            let var_name = &self[index as usize];
+            var_name
+        }
+    }
+
+    impl<T> core::ops::Index<Axis> for [T; 3] where T: Copy {
+        type Output = T;
+
+        fn index(&self, index: Axis) -> &Self::Output {
+            let var_name = &self[index as usize];
+            var_name
+        }
+    }
+}
+"#,
+        );
+
+        check_assist(
+            generate_mut_trait_impl,
+            r#"
+//- minicore: index
+mod foo {
+    mod bar {
+        pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+        impl<T> core::ops::Index$0<Axis> for [T; 3] where T: Copy {
+            type Output = T;
+
+            fn index(&self, index: Axis) -> &Self::Output {
+                let var_name = &self[index as usize];
+                var_name
+            }
+        }
+    }
+}
+"#,
+            r#"
+mod foo {
+    mod bar {
+        pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+        $0impl<T> core::ops::IndexMut<Axis> for [T; 3] where T: Copy {
+            fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
+                let var_name = &self[index as usize];
+                var_name
+            }
+        }
+
+        impl<T> core::ops::Index<Axis> for [T; 3] where T: Copy {
+            type Output = T;
+
+            fn index(&self, index: Axis) -> &Self::Output {
+                let var_name = &self[index as usize];
+                var_name
+            }
+        }
+    }
+}
+"#,
+        );
+    }
+
+    #[test]
     fn test_generate_mut_trait_impl_not_applicable() {
         check_assist_not_applicable(
             generate_mut_trait_impl,
diff --git a/crates/ide-assists/src/handlers/generate_new.rs b/crates/ide-assists/src/handlers/generate_new.rs
index f963f48..4837f92 100644
--- a/crates/ide-assists/src/handlers/generate_new.rs
+++ b/crates/ide-assists/src/handlers/generate_new.rs
@@ -129,17 +129,23 @@
 
         // Get the mutable version of the impl to modify
         let impl_def = if let Some(impl_def) = impl_def {
+            fn_.indent(impl_def.indent_level());
             builder.make_mut(impl_def)
         } else {
             // Generate a new impl to add the method to
             let impl_def = generate_impl(&ast::Adt::Struct(strukt.clone()));
+            let indent_level = strukt.indent_level();
+            fn_.indent(indent_level);
 
             // Insert it after the adt
             let strukt = builder.make_mut(strukt.clone());
 
             ted::insert_all_raw(
                 ted::Position::after(strukt.syntax()),
-                vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
+                vec![
+                    make::tokens::whitespace(&format!("\n\n{indent_level}")).into(),
+                    impl_def.syntax().clone().into(),
+                ],
             );
 
             impl_def
@@ -426,6 +432,135 @@
     }
 
     #[test]
+    fn non_zero_indent() {
+        check_assist(
+            generate_new,
+            r#"
+mod foo {
+    struct $0Foo {}
+}
+"#,
+            r#"
+mod foo {
+    struct Foo {}
+
+    impl Foo {
+        fn $0new() -> Self {
+            Self {  }
+        }
+    }
+}
+"#,
+        );
+        check_assist(
+            generate_new,
+            r#"
+mod foo {
+    mod bar {
+        struct $0Foo {}
+    }
+}
+"#,
+            r#"
+mod foo {
+    mod bar {
+        struct Foo {}
+
+        impl Foo {
+            fn $0new() -> Self {
+                Self {  }
+            }
+        }
+    }
+}
+"#,
+        );
+        check_assist(
+            generate_new,
+            r#"
+mod foo {
+    struct $0Foo {}
+
+    impl Foo {
+        fn some() {}
+    }
+}
+"#,
+            r#"
+mod foo {
+    struct Foo {}
+
+    impl Foo {
+        fn $0new() -> Self {
+            Self {  }
+        }
+
+        fn some() {}
+    }
+}
+"#,
+        );
+        check_assist(
+            generate_new,
+            r#"
+mod foo {
+    mod bar {
+        struct $0Foo {}
+
+        impl Foo {
+            fn some() {}
+        }
+    }
+}
+"#,
+            r#"
+mod foo {
+    mod bar {
+        struct Foo {}
+
+        impl Foo {
+            fn $0new() -> Self {
+                Self {  }
+            }
+
+            fn some() {}
+        }
+    }
+}
+"#,
+        );
+        check_assist(
+            generate_new,
+            r#"
+mod foo {
+    mod bar {
+struct $0Foo {}
+
+        impl Foo {
+            fn some() {}
+        }
+    }
+}
+"#,
+            r#"
+mod foo {
+    mod bar {
+struct Foo {}
+
+        impl Foo {
+            fn $0new() -> Self {
+                Self {  }
+            }
+
+            fn some() {}
+        }
+    }
+}
+"#,
+        );
+    }
+
+    #[test]
     fn check_visibility_of_new_fn_based_on_struct() {
         check_assist(
             generate_new,
diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs
index 3baf1f3..5287627 100644
--- a/crates/ide-completion/src/context.rs
+++ b/crates/ide-completion/src/context.rs
@@ -8,8 +8,8 @@
 
 use base_db::RootQueryDb as _;
 use hir::{
-    DisplayTarget, HasAttrs, Local, ModuleDef, ModuleSource, Name, PathResolution, ScopeDef,
-    Semantics, SemanticsScope, Symbol, Type, TypeInfo,
+    DisplayTarget, HasAttrs, InFile, Local, ModuleDef, ModuleSource, Name, PathResolution,
+    ScopeDef, Semantics, SemanticsScope, Symbol, Type, TypeInfo,
 };
 use ide_db::{
     FilePosition, FxHashMap, FxHashSet, RootDatabase, famous_defs::FamousDefs,
@@ -751,7 +751,7 @@
             original_offset,
         } = expand_and_analyze(
             &sema,
-            original_file.syntax().clone(),
+            InFile::new(editioned_file_id.into(), original_file.syntax().clone()),
             file_with_fake_ident.syntax().clone(),
             offset,
             &original_token,
diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs
index 284876f..7a2230b 100644
--- a/crates/ide-completion/src/context/analysis.rs
+++ b/crates/ide-completion/src/context/analysis.rs
@@ -1,7 +1,7 @@
 //! Module responsible for analyzing the code surrounding the cursor for completion.
 use std::iter;
 
-use hir::{ExpandResult, Semantics, Type, TypeInfo, Variant};
+use hir::{ExpandResult, InFile, Semantics, Type, TypeInfo, Variant};
 use ide_db::{RootDatabase, active_parameter::ActiveParameter};
 use itertools::Either;
 use syntax::{
@@ -50,7 +50,7 @@
 
 pub(super) fn expand_and_analyze(
     sema: &Semantics<'_, RootDatabase>,
-    original_file: SyntaxNode,
+    original_file: InFile<SyntaxNode>,
     speculative_file: SyntaxNode,
     offset: TextSize,
     original_token: &SyntaxToken,
@@ -72,7 +72,7 @@
         relative_offset,
     )
     .unwrap_or(ExpansionResult {
-        original_file,
+        original_file: original_file.value,
         speculative_file,
         original_offset: offset,
         speculative_offset: fake_ident_token.text_range().start(),
@@ -125,7 +125,7 @@
 /// the best we can do.
 fn expand_maybe_stop(
     sema: &Semantics<'_, RootDatabase>,
-    original_file: SyntaxNode,
+    original_file: InFile<SyntaxNode>,
     speculative_file: SyntaxNode,
     original_offset: TextSize,
     fake_ident_token: SyntaxToken,
@@ -142,17 +142,16 @@
         return result;
     }
 
-    // This needs to come after the recursive call, because our "inside macro" detection is subtly wrong
-    // with regard to attribute macros named `test` that are not std's test. So hopefully we will expand
-    // them successfully above and be able to analyze.
-    // Left biased since there may already be an identifier token there, and we appended to it.
-    if !sema.might_be_inside_macro_call(&fake_ident_token)
-        && token_at_offset_ignore_whitespace(&original_file, original_offset + relative_offset)
-            .is_some_and(|original_token| !sema.might_be_inside_macro_call(&original_token))
+    // We can't check whether the fake expansion is inside macro call, because that requires semantic info.
+    // But hopefully checking just the real one should be enough.
+    if token_at_offset_ignore_whitespace(&original_file.value, original_offset + relative_offset)
+        .is_some_and(|original_token| {
+            !sema.is_inside_macro_call(original_file.with_value(&original_token))
+        })
     {
         // Recursion base case.
         Some(ExpansionResult {
-            original_file,
+            original_file: original_file.value,
             speculative_file,
             original_offset,
             speculative_offset: fake_ident_token.text_range().start(),
@@ -166,7 +165,7 @@
 
 fn expand(
     sema: &Semantics<'_, RootDatabase>,
-    original_file: SyntaxNode,
+    original_file: InFile<SyntaxNode>,
     speculative_file: SyntaxNode,
     original_offset: TextSize,
     fake_ident_token: SyntaxToken,
@@ -176,7 +175,7 @@
 
     let parent_item =
         |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
-    let original_node = token_at_offset_ignore_whitespace(&original_file, original_offset)
+    let original_node = token_at_offset_ignore_whitespace(&original_file.value, original_offset)
         .and_then(|token| token.parent_ancestors().find_map(ast::Item::cast));
     let ancestor_items = iter::successors(
         Option::zip(
@@ -249,7 +248,7 @@
     }
 
     // No attributes have been expanded, so look for macro_call! token trees or derive token trees
-    let orig_tt = ancestors_at_offset(&original_file, original_offset)
+    let orig_tt = ancestors_at_offset(&original_file.value, original_offset)
         .map_while(Either::<ast::TokenTree, ast::Meta>::cast)
         .last()?;
     let spec_tt = ancestors_at_offset(&speculative_file, fake_ident_token.text_range().start())
@@ -292,7 +291,7 @@
                 fake_mapped_tokens.into_iter().min_by_key(|(_, rank)| *rank)
             {
                 return Some(ExpansionResult {
-                    original_file,
+                    original_file: original_file.value,
                     speculative_file,
                     original_offset,
                     speculative_offset: fake_ident_token.text_range().start(),
@@ -349,7 +348,7 @@
                                 }
                                 let result = expand_maybe_stop(
                                     sema,
-                                    actual_expansion.clone(),
+                                    InFile::new(file.into(), actual_expansion.clone()),
                                     fake_expansion.clone(),
                                     new_offset,
                                     fake_mapped_token,
diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs
index e546776..b46e4c3 100644
--- a/crates/ide-completion/src/tests/expression.rs
+++ b/crates/ide-completion/src/tests/expression.rs
@@ -2112,6 +2112,56 @@
 }
 
 #[test]
+fn cfg_attr_attr_macro() {
+    check(
+        r#"
+//- proc_macros: identity
+#[cfg_attr(test, proc_macros::identity)]
+fn foo() {
+    $0
+}
+    "#,
+        expect![[r#"
+            fn foo()  fn()
+            md proc_macros
+            bt u32     u32
+            kw async
+            kw const
+            kw crate::
+            kw enum
+            kw extern
+            kw false
+            kw fn
+            kw for
+            kw if
+            kw if let
+            kw impl
+            kw impl for
+            kw let
+            kw letm
+            kw loop
+            kw match
+            kw mod
+            kw return
+            kw self::
+            kw static
+            kw struct
+            kw trait
+            kw true
+            kw type
+            kw union
+            kw unsafe
+            kw use
+            kw while
+            kw while let
+            sn macro_rules
+            sn pd
+            sn ppd
+        "#]],
+    );
+}
+
+#[test]
 fn escaped_label() {
     check(
         r#"
diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs
index 63cc7cd..c94be7e 100644
--- a/crates/ide-db/src/lib.rs
+++ b/crates/ide-db/src/lib.rs
@@ -92,9 +92,7 @@
 impl std::panic::RefUnwindSafe for RootDatabase {}
 
 #[salsa_macros::db]
-impl salsa::Database for RootDatabase {
-    fn salsa_event(&self, _event: &dyn Fn() -> salsa::Event) {}
-}
+impl salsa::Database for RootDatabase {}
 
 impl Drop for RootDatabase {
     fn drop(&mut self) {
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index 30be5bc..d4ab759 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -524,6 +524,7 @@
     fn find_nodes<'b>(
         sema: &'b Semantics<'_, RootDatabase>,
         name: &str,
+        file_id: EditionedFileId,
         node: &syntax::SyntaxNode,
         offset: TextSize,
     ) -> impl Iterator<Item = SyntaxNode> + 'b {
@@ -534,7 +535,7 @@
             })
             .into_iter()
             .flat_map(move |token| {
-                if sema.might_be_inside_macro_call(&token) {
+                if sema.is_inside_macro_call(InFile::new(file_id.into(), &token)) {
                     sema.descend_into_macros_exact(token)
                 } else {
                     <_>::from([token])
@@ -654,11 +655,14 @@
                     let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
 
                     for offset in FindUsages::match_indices(&file_text, &finder, search_range) {
-                        let usages =
-                            FindUsages::find_nodes(sema, &current_to_process, &tree, offset)
-                                .filter(|it| {
-                                    matches!(it.kind(), SyntaxKind::NAME | SyntaxKind::NAME_REF)
-                                });
+                        let usages = FindUsages::find_nodes(
+                            sema,
+                            &current_to_process,
+                            file_id,
+                            &tree,
+                            offset,
+                        )
+                        .filter(|it| matches!(it.kind(), SyntaxKind::NAME | SyntaxKind::NAME_REF));
                         for usage in usages {
                             if let Some(alias) = usage.parent().and_then(|it| {
                                 let path = ast::PathSegment::cast(it)?.parent_path();
@@ -813,7 +817,7 @@
                 let tree = LazyCell::new(move || this.sema.parse(file_id).syntax().clone());
 
                 for offset in FindUsages::match_indices(&file_text, finder, search_range) {
-                    let usages = FindUsages::find_nodes(this.sema, name, &tree, offset)
+                    let usages = FindUsages::find_nodes(this.sema, name, file_id, &tree, offset)
                         .filter_map(ast::NameRef::cast);
                     for usage in usages {
                         let found_usage = usage
@@ -970,8 +974,8 @@
                     return;
                 }
 
-                for name in
-                    Self::find_nodes(sema, name, &tree, offset).filter_map(ast::NameLike::cast)
+                for name in Self::find_nodes(sema, name, file_id, &tree, offset)
+                    .filter_map(ast::NameLike::cast)
                 {
                     if match name {
                         ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink),
@@ -985,8 +989,8 @@
             // Search for occurrences of the `Self` referring to our type
             if let Some((self_ty, finder)) = &include_self_kw_refs {
                 for offset in Self::match_indices(&text, finder, search_range) {
-                    for name_ref in
-                        Self::find_nodes(sema, "Self", &tree, offset).filter_map(ast::NameRef::cast)
+                    for name_ref in Self::find_nodes(sema, "Self", file_id, &tree, offset)
+                        .filter_map(ast::NameRef::cast)
                     {
                         if self.found_self_ty_name_ref(self_ty, &name_ref, sink) {
                             return;
@@ -1010,7 +1014,7 @@
                 let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
 
                 for offset in Self::match_indices(&text, finder, search_range) {
-                    for name_ref in Self::find_nodes(sema, "super", &tree, offset)
+                    for name_ref in Self::find_nodes(sema, "super", file_id, &tree, offset)
                         .filter_map(ast::NameRef::cast)
                     {
                         if self.found_name_ref(&name_ref, sink) {
@@ -1020,7 +1024,7 @@
                 }
                 if let Some(finder) = &is_crate_root {
                     for offset in Self::match_indices(&text, finder, search_range) {
-                        for name_ref in Self::find_nodes(sema, "crate", &tree, offset)
+                        for name_ref in Self::find_nodes(sema, "crate", file_id, &tree, offset)
                             .filter_map(ast::NameRef::cast)
                         {
                             if self.found_name_ref(&name_ref, sink) {
@@ -1064,8 +1068,8 @@
                 let finder = &Finder::new("self");
 
                 for offset in Self::match_indices(&text, finder, search_range) {
-                    for name_ref in
-                        Self::find_nodes(sema, "self", &tree, offset).filter_map(ast::NameRef::cast)
+                    for name_ref in Self::find_nodes(sema, "self", file_id, &tree, offset)
+                        .filter_map(ast::NameRef::cast)
                     {
                         if self.found_self_module_name_ref(&name_ref, sink) {
                             return;
diff --git a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
index 364bead..6bd5417 100644
--- a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -915,4 +915,47 @@
             "#,
         );
     }
+
+    #[test]
+    fn regression_19823() {
+        check_diagnostics(
+            r#"
+pub trait FooTrait {
+    unsafe fn method1();
+    unsafe fn method2();
+}
+
+unsafe fn some_unsafe_fn() {}
+
+macro_rules! impl_foo {
+    () => {
+        unsafe fn method1() {
+            some_unsafe_fn();
+        }
+        unsafe fn method2() {
+            some_unsafe_fn();
+        }
+    };
+}
+
+pub struct S1;
+#[allow(unsafe_op_in_unsafe_fn)]
+impl FooTrait for S1 {
+    unsafe fn method1() {
+        some_unsafe_fn();
+    }
+
+    unsafe fn method2() {
+        some_unsafe_fn();
+    }
+}
+
+pub struct S2;
+#[allow(unsafe_op_in_unsafe_fn)]
+impl FooTrait for S2 {
+    impl_foo!();
+}
+        "#,
+        );
+    }
 }
diff --git a/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs b/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs
index 35cefd2..f20b6de 100644
--- a/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs
+++ b/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs
@@ -106,4 +106,29 @@
 "#,
         );
     }
+
+    #[test]
+    fn empty_patterns_normalize() {
+        check_diagnostics(
+            r#"
+enum Infallible {}
+
+trait Foo {
+    type Assoc;
+}
+enum Enum<T: Foo> {
+    A,
+    B(T::Assoc),
+}
+
+impl Foo for () {
+    type Assoc = Infallible;
+}
+
+fn foo(v: Enum<()>) {
+    let Enum::A = v;
+}
+        "#,
+        );
+    }
 }
diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs
index 2af14ca..72bd66d 100644
--- a/crates/ide-diagnostics/src/lib.rs
+++ b/crates/ide-diagnostics/src/lib.rs
@@ -83,12 +83,11 @@
 #[cfg(test)]
 mod tests;
 
-use std::{collections::hash_map, iter, sync::LazyLock};
+use std::{iter, sync::LazyLock};
 
 use either::Either;
 use hir::{
-    Crate, DisplayTarget, HirFileId, InFile, Semantics, db::ExpandDatabase,
-    diagnostics::AnyDiagnostic,
+    Crate, DisplayTarget, InFile, Semantics, db::ExpandDatabase, diagnostics::AnyDiagnostic,
 };
 use ide_db::{
     EditionedFileId, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, Severity, SnippetCap,
@@ -513,13 +512,7 @@
 
     // The edition isn't accurate (each diagnostics may have its own edition due to macros),
     // but it's okay as it's only being used for error recovery.
-    handle_lints(
-        &ctx.sema,
-        &mut FxHashMap::default(),
-        &mut lints,
-        &mut Vec::new(),
-        editioned_file_id.edition(db),
-    );
+    handle_lints(&ctx.sema, &mut lints, editioned_file_id.edition(db));
 
     res.retain(|d| d.severity != Severity::Allow);
 
@@ -584,8 +577,6 @@
     true
 }
 
-// `__RA_EVERY_LINT` is a fake lint group to allow every lint in proc macros
-
 struct BuiltLint {
     lint: &'static Lint,
     groups: Vec<&'static str>,
@@ -629,9 +620,7 @@
 
 fn handle_lints(
     sema: &Semantics<'_, RootDatabase>,
-    cache: &mut FxHashMap<HirFileId, FxHashMap<SmolStr, SeverityAttr>>,
     diagnostics: &mut [(InFile<SyntaxNode>, &mut Diagnostic)],
-    cache_stack: &mut Vec<HirFileId>,
     edition: Edition,
 ) {
     for (node, diag) in diagnostics {
@@ -645,7 +634,8 @@
             diag.severity = default_severity;
         }
 
-        let mut diag_severity = fill_lint_attrs(sema, node, cache, cache_stack, diag, edition);
+        let mut diag_severity =
+            lint_severity_at(sema, node, &lint_groups(&diag.code, edition), edition);
 
         if let outline_diag_severity @ Some(_) =
             find_outline_mod_lint_severity(sema, node, diag, edition)
@@ -698,155 +688,22 @@
     result
 }
 
-#[derive(Debug, Clone, Copy)]
-struct SeverityAttr {
-    severity: Severity,
-    /// This field counts how far we are from the main node. Bigger values mean more far.
-    ///
-    /// Note this isn't accurate: there can be gaps between values (created when merging severity maps).
-    /// The important thing is that if an attr is closer to the main node, it will have smaller value.
-    ///
-    /// This is necessary even though we take care to never overwrite a value from deeper nesting
-    /// because of lint groups. For example, in the following code:
-    /// ```
-    /// #[warn(non_snake_case)]
-    /// mod foo {
-    ///     #[allow(nonstandard_style)]
-    ///     mod bar {}
-    /// }
-    /// ```
-    /// We want to not warn on non snake case inside `bar`. If we are traversing this for the first
-    /// time, everything will be fine, because we will set `diag_severity` on the first matching group
-    /// and never overwrite it since then. But if `bar` is cached, the cache will contain both
-    /// `#[warn(non_snake_case)]` and `#[allow(nonstandard_style)]`, and without this field, we have
-    /// no way of differentiating between the two.
-    depth: u32,
-}
-
-fn fill_lint_attrs(
+fn lint_severity_at(
     sema: &Semantics<'_, RootDatabase>,
     node: &InFile<SyntaxNode>,
-    cache: &mut FxHashMap<HirFileId, FxHashMap<SmolStr, SeverityAttr>>,
-    cache_stack: &mut Vec<HirFileId>,
-    diag: &Diagnostic,
+    lint_groups: &LintGroups,
     edition: Edition,
 ) -> Option<Severity> {
-    let mut collected_lint_attrs = FxHashMap::<SmolStr, SeverityAttr>::default();
-    let mut diag_severity = None;
-
-    let mut ancestors = node.value.ancestors().peekable();
-    let mut depth = 0;
-    loop {
-        let ancestor = ancestors.next().expect("we always return from top-level nodes");
-        depth += 1;
-
-        if ancestors.peek().is_none() {
-            // We don't want to insert too many nodes into cache, but top level nodes (aka. outline modules
-            // or macro expansions) need to touch the database so they seem like a good fit to cache.
-
-            if let Some(cached) = cache.get_mut(&node.file_id) {
-                // This node (and everything above it) is already cached; the attribute is either here or nowhere.
-
-                // Workaround for the borrow checker.
-                let cached = std::mem::take(cached);
-
-                cached.iter().for_each(|(lint, severity)| {
-                    for item in &*cache_stack {
-                        let node_cache_entry = cache
-                            .get_mut(item)
-                            .expect("we always insert cached nodes into the cache map");
-                        let lint_cache_entry = node_cache_entry.entry(lint.clone());
-                        if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
-                            // Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
-                            // overwrite top attrs.
-                            lint_cache_entry.insert(SeverityAttr {
-                                severity: severity.severity,
-                                depth: severity.depth + depth,
-                            });
-                        }
-                    }
-                });
-
-                let lints = lint_groups(&diag.code, edition);
-                let all_matching_groups =
-                    lints.iter().filter_map(|lint_group| cached.get(lint_group));
-                let cached_severity =
-                    all_matching_groups.min_by_key(|it| it.depth).map(|it| it.severity);
-
-                cache.insert(node.file_id, cached);
-
-                return diag_severity.or(cached_severity);
-            }
-
-            // Insert this node's descendants' attributes into any outline descendant, but not including this node.
-            // This must come before inserting this node's own attributes to preserve order.
-            collected_lint_attrs.drain().for_each(|(lint, severity)| {
-                if diag_severity.is_none() && lint_groups(&diag.code, edition).contains(&lint) {
-                    diag_severity = Some(severity.severity);
-                }
-
-                for item in &*cache_stack {
-                    let node_cache_entry = cache
-                        .get_mut(item)
-                        .expect("we always insert cached nodes into the cache map");
-                    let lint_cache_entry = node_cache_entry.entry(lint.clone());
-                    if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
-                        // Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
-                        // overwrite top attrs.
-                        lint_cache_entry.insert(severity);
-                    }
-                }
-            });
-
-            cache_stack.push(node.file_id);
-            cache.insert(node.file_id, FxHashMap::default());
-
-            if let Some(ancestor) = ast::AnyHasAttrs::cast(ancestor) {
-                // Insert this node's attributes into any outline descendant, including this node.
-                lint_attrs(sema, ancestor, edition).for_each(|(lint, severity)| {
-                    if diag_severity.is_none() && lint_groups(&diag.code, edition).contains(&lint) {
-                        diag_severity = Some(severity);
-                    }
-
-                    for item in &*cache_stack {
-                        let node_cache_entry = cache
-                            .get_mut(item)
-                            .expect("we always insert cached nodes into the cache map");
-                        let lint_cache_entry = node_cache_entry.entry(lint.clone());
-                        if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
-                            // Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
-                            // overwrite top attrs.
-                            lint_cache_entry.insert(SeverityAttr { severity, depth });
-                        }
-                    }
-                });
-            }
-
-            let parent_node = sema.find_parent_file(node.file_id);
-            if let Some(parent_node) = parent_node {
-                let parent_severity =
-                    fill_lint_attrs(sema, &parent_node, cache, cache_stack, diag, edition);
-                if diag_severity.is_none() {
-                    diag_severity = parent_severity;
-                }
-            }
-            cache_stack.pop();
-            return diag_severity;
-        } else if let Some(ancestor) = ast::AnyHasAttrs::cast(ancestor) {
-            lint_attrs(sema, ancestor, edition).for_each(|(lint, severity)| {
-                if diag_severity.is_none() && lint_groups(&diag.code, edition).contains(&lint) {
-                    diag_severity = Some(severity);
-                }
-
-                let lint_cache_entry = collected_lint_attrs.entry(lint);
-                if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
-                    // Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
-                    // overwrite top attrs.
-                    lint_cache_entry.insert(SeverityAttr { severity, depth });
-                }
-            });
-        }
-    }
+    node.value
+        .ancestors()
+        .filter_map(ast::AnyHasAttrs::cast)
+        .find_map(|ancestor| {
+            lint_attrs(sema, ancestor, edition)
+                .find_map(|(lint, severity)| lint_groups.contains(&lint).then_some(severity))
+        })
+        .or_else(|| {
+            lint_severity_at(sema, &sema.find_parent_file(node.file_id)?, lint_groups, edition)
+        })
 }
 
 fn lint_attrs<'a>(
@@ -945,10 +802,6 @@
     fn contains(&self, group: &str) -> bool {
         self.groups.contains(&group) || (self.inside_warnings && group == "warnings")
     }
-
-    fn iter(&self) -> impl Iterator<Item = &'static str> {
-        self.groups.iter().copied().chain(self.inside_warnings.then_some("warnings"))
-    }
 }
 
 fn lint_groups(lint: &DiagnosticCode, edition: Edition) -> LintGroups {
diff --git a/crates/ide-ssr/src/lib.rs b/crates/ide-ssr/src/lib.rs
index 339c199..43c56ac 100644
--- a/crates/ide-ssr/src/lib.rs
+++ b/crates/ide-ssr/src/lib.rs
@@ -287,7 +287,7 @@
                 if let Some(expanded) = self.sema.expand_macro_call(&macro_call) {
                     if let Some(tt) = macro_call.token_tree() {
                         self.output_debug_for_nodes_at_range(
-                            &expanded,
+                            &expanded.value,
                             range,
                             &Some(self.sema.original_range(tt.syntax())),
                             out,
diff --git a/crates/ide-ssr/src/search.rs b/crates/ide-ssr/src/search.rs
index d89911f..9afbedb 100644
--- a/crates/ide-ssr/src/search.rs
+++ b/crates/ide-ssr/src/search.rs
@@ -194,7 +194,7 @@
                     // nodes that originated entirely from within the token tree of the macro call.
                     // i.e. we don't want to match something that came from the macro itself.
                     if let Some(range) = self.sema.original_range_opt(tt.syntax()) {
-                        self.slow_scan_node(&expanded, rule, &Some(range), matches_out);
+                        self.slow_scan_node(&expanded.value, rule, &Some(range), matches_out);
                     }
                 }
             }
diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs
index 241a702..9beed2c 100644
--- a/crates/ide/src/expand_macro.rs
+++ b/crates/ide/src/expand_macro.rs
@@ -146,10 +146,11 @@
     offset_in_original_node: TextSize,
 ) -> Option<SyntaxNode> {
     let ExpandResult { value: expanded, err } = match macro_call {
-        item @ ast::Item::MacroCall(macro_call) => {
-            sema.expand_attr_macro(item).or_else(|| sema.expand_allowed_builtins(macro_call))?
-        }
-        item => sema.expand_attr_macro(item)?,
+        item @ ast::Item::MacroCall(macro_call) => sema
+            .expand_attr_macro(item)
+            .map(|it| it.map(|it| it.value))
+            .or_else(|| sema.expand_allowed_builtins(macro_call))?,
+        item => sema.expand_attr_macro(item)?.map(|it| it.value),
     };
     let expanded = expanded.clone_for_update();
     if let Some(err) = err {
diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs
index fb8dbcf..520ba39 100644
--- a/crates/ide/src/highlight_related.rs
+++ b/crates/ide/src/highlight_related.rs
@@ -653,7 +653,7 @@
                             expr.macro_call().and_then(|call| self.sema.expand_macro_call(&call))
                         {
                             match_ast! {
-                                match expanded {
+                                match (expanded.value) {
                                     ast::MacroStmts(it) => {
                                         self.handle_expanded(it, cb);
                                     },
diff --git a/crates/query-group-macro/tests/logger_db.rs b/crates/query-group-macro/tests/logger_db.rs
index bade0c2..71af63a 100644
--- a/crates/query-group-macro/tests/logger_db.rs
+++ b/crates/query-group-macro/tests/logger_db.rs
@@ -1,33 +1,41 @@
 use std::sync::{Arc, Mutex};
 
 #[salsa_macros::db]
-#[derive(Default, Clone)]
+#[derive(Clone)]
 pub(crate) struct LoggerDb {
     storage: salsa::Storage<Self>,
     logger: Logger,
 }
 
+impl Default for LoggerDb {
+    fn default() -> Self {
+        let logger = Logger::default();
+        Self {
+            storage: salsa::Storage::new(Some(Box::new({
+                let logger = logger.clone();
+                move |event| match event.kind {
+                    salsa::EventKind::WillExecute { .. }
+                    | salsa::EventKind::WillCheckCancellation
+                    | salsa::EventKind::DidValidateMemoizedValue { .. }
+                    | salsa::EventKind::WillDiscardStaleOutput { .. }
+                    | salsa::EventKind::DidDiscard { .. } => {
+                        logger.logs.lock().unwrap().push(format!("salsa_event({:?})", event.kind));
+                    }
+                    _ => {}
+                }
+            }))),
+            logger,
+        }
+    }
+}
+
 #[derive(Default, Clone)]
 struct Logger {
     logs: Arc<Mutex<Vec<String>>>,
 }
 
 #[salsa_macros::db]
-impl salsa::Database for LoggerDb {
-    fn salsa_event(&self, event: &dyn Fn() -> salsa::Event) {
-        let event = event();
-        match event.kind {
-            salsa::EventKind::WillExecute { .. }
-            | salsa::EventKind::WillCheckCancellation
-            | salsa::EventKind::DidValidateMemoizedValue { .. }
-            | salsa::EventKind::WillDiscardStaleOutput { .. }
-            | salsa::EventKind::DidDiscard { .. } => {
-                self.push_log(format!("salsa_event({:?})", event.kind));
-            }
-            _ => {}
-        }
-    }
-}
+impl salsa::Database for LoggerDb {}
 
 impl LoggerDb {
     /// Log an event from inside a tracked function.
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 03e5b1f..f715823 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -544,7 +544,7 @@
          /// Whether to prefer import paths containing a `prelude` module.
         imports_preferPrelude: bool                       = false,
         /// The path structure for newly inserted paths to use.
-        imports_prefix: ImportPrefixDef               = ImportPrefixDef::Plain,
+        imports_prefix: ImportPrefixDef               = ImportPrefixDef::ByCrate,
         /// Whether to prefix external (including std, core) crate imports with `::`. e.g. "use ::std::io::Read;".
         imports_prefixExternPrelude: bool = false,
     }
diff --git a/docs/book/src/configuration_generated.md b/docs/book/src/configuration_generated.md
index 2ae73df..de1d0ea 100644
--- a/docs/book/src/configuration_generated.md
+++ b/docs/book/src/configuration_generated.md
@@ -835,7 +835,7 @@
 
 ## rust-analyzer.imports.prefix {#imports.prefix}
 
-Default: `"plain"`
+Default: `"crate"`
 
 The path structure for newly inserted paths to use.
 
diff --git a/docs/book/src/other_editors.md b/docs/book/src/other_editors.md
index 1eac7dd..896df52 100644
--- a/docs/book/src/other_editors.md
+++ b/docs/book/src/other_editors.md
@@ -364,30 +364,6 @@
 There are multiple rust-analyzer extensions for Visual Studio 2022 on
 Windows:
 
-### rust-analyzer.vs
-
-(License: Creative Commons Attribution-NonCommercial-ShareAlike 4.0
-International)
-
-[Visual Studio
-Marketplace](https://marketplace.visualstudio.com/items?itemName=kitamstudios.RustAnalyzer)
-
-[GitHub](https://github.com/kitamstudios/rust-analyzer/)
-
-Support for Rust development in the Visual Studio IDE is enabled by the
-[rust-analyzer](https://marketplace.visualstudio.com/items?itemName=kitamstudios.RustAnalyzer)
-package. Either click on the download link or install from IDE’s
-extension manager. For now [Visual Studio
-2022](https://visualstudio.microsoft.com/downloads/) is required. All
-editions are supported viz. Community, Professional & Enterprise. The
-package aims to provide 0-friction installation and therefore comes
-loaded with most things required including rust-analyzer binary. If
-anything it needs is missing, appropriate errors / warnings will guide
-the user. E.g. cargo.exe needs to be in path and the package will tell
-you as much. This package is under rapid active development. So if you
-encounter any issues please file it at
-[rust-analyzer.vs](https://github.com/kitamstudios/rust-analyzer/).
-
 ### VS RustAnalyzer
 
 (License: GPL)
diff --git a/editors/code/package.json b/editors/code/package.json
index a282eea..88a90aa 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -1927,7 +1927,7 @@
                 "properties": {
                     "rust-analyzer.imports.prefix": {
                         "markdownDescription": "The path structure for newly inserted paths to use.",
-                        "default": "plain",
+                        "default": "crate",
                         "type": "string",
                         "enum": [
                             "plain",