Auto merge of #16861 - Veykril:macro-diag-exceptions, r=Veykril

fix: Ignore some warnings if they originate from within macro expansions

These tend to be annoying noise as we can't handle `allow`s for them properly for the time being.
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index ac536d0..dc0a6c2 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -36,7 +36,6 @@
           - os: ubuntu-20.04
             target: x86_64-unknown-linux-gnu
             code-target: linux-x64
-            container: ubuntu:18.04
           - os: ubuntu-20.04
             target: aarch64-unknown-linux-gnu
             code-target: linux-arm64
@@ -63,14 +62,6 @@
         with:
           fetch-depth: ${{ env.FETCH_DEPTH }}
 
-      - name: Install toolchain dependencies
-        if: matrix.container == 'ubuntu:18.04'
-        shell: bash
-        run: |
-          apt-get update && apt-get install -y build-essential curl
-          curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused -fsSL "https://sh.rustup.rs" | sh -s -- --profile minimal --default-toolchain none -y
-          echo "${CARGO_HOME:-$HOME/.cargo}/bin" >> $GITHUB_PATH
-
       - name: Install Rust toolchain
         run: |
           rustup update --no-self-update stable
diff --git a/crates/flycheck/src/test_runner.rs b/crates/flycheck/src/test_runner.rs
index 6dac589..3137871 100644
--- a/crates/flycheck/src/test_runner.rs
+++ b/crates/flycheck/src/test_runner.rs
@@ -28,19 +28,20 @@
     },
     Suite,
     Finished,
+    Custom {
+        text: String,
+    },
 }
 
 impl ParseFromLine for CargoTestMessage {
-    fn from_line(line: &str, error: &mut String) -> Option<Self> {
+    fn from_line(line: &str, _: &mut String) -> Option<Self> {
         let mut deserializer = serde_json::Deserializer::from_str(line);
         deserializer.disable_recursion_limit();
         if let Ok(message) = CargoTestMessage::deserialize(&mut deserializer) {
             return Some(message);
         }
 
-        error.push_str(line);
-        error.push('\n');
-        None
+        Some(CargoTestMessage::Custom { text: line.to_owned() })
     }
 
     fn from_eof() -> Option<Self> {
diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs
index d4c1db8..b815c9b 100644
--- a/crates/hir-def/src/data.rs
+++ b/crates/hir-def/src/data.rs
@@ -715,7 +715,7 @@
             }
             AssocItem::MacroCall(call) => {
                 let file_id = self.expander.current_file_id();
-                let MacroCall { ast_id, expand_to, call_site, ref path } = item_tree[call];
+                let MacroCall { ast_id, expand_to, ctxt, ref path } = item_tree[call];
                 let module = self.expander.module.local_id;
 
                 let resolver = |path| {
@@ -734,7 +734,7 @@
                 match macro_call_as_call_id(
                     self.db.upcast(),
                     &AstIdWithPath::new(file_id, ast_id, Clone::clone(path)),
-                    call_site,
+                    ctxt,
                     expand_to,
                     self.expander.module.krate(),
                     resolver,
@@ -745,6 +745,7 @@
                         self.collect_macro_items(res, &|| hir_expand::MacroCallKind::FnLike {
                             ast_id: InFile::new(file_id, ast_id),
                             expand_to: hir_expand::ExpandTo::Items,
+                            eager: None,
                         });
                     }
                     Ok(None) => (),
@@ -754,6 +755,7 @@
                             MacroCallKind::FnLike {
                                 ast_id: InFile::new(file_id, ast_id),
                                 expand_to,
+                                eager: None,
                             },
                             Clone::clone(path),
                         ));
diff --git a/crates/hir-def/src/data/adt.rs b/crates/hir-def/src/data/adt.rs
index 5790e60..a7461b7 100644
--- a/crates/hir-def/src/data/adt.rs
+++ b/crates/hir-def/src/data/adt.rs
@@ -191,9 +191,9 @@
         let krate = loc.container.krate;
         let item_tree = loc.id.item_tree(db);
         let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
-        let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone();
+        let cfg_options = db.crate_graph()[krate].cfg_options.clone();
 
-        let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into());
+        let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
 
         let mut flags = StructFlags::NO_FLAGS;
         if attrs.by_key("rustc_has_incoherent_inherent_impls").exists() {
@@ -248,9 +248,9 @@
         let krate = loc.container.krate;
         let item_tree = loc.id.item_tree(db);
         let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
-        let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone();
+        let cfg_options = db.crate_graph()[krate].cfg_options.clone();
 
-        let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into());
+        let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
         let mut flags = StructFlags::NO_FLAGS;
         if attrs.by_key("rustc_has_incoherent_inherent_impls").exists() {
             flags |= StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL;
diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs
index 544ed6b..30d52d8 100644
--- a/crates/hir-def/src/db.rs
+++ b/crates/hir-def/src/db.rs
@@ -309,13 +309,9 @@
                 kind: kind(loc.expander, loc.id.file_id(), makro.ast_id.upcast()),
                 local_inner: false,
                 allow_internal_unsafe: loc.allow_internal_unsafe,
-                span: db
-                    .span_map(loc.id.file_id())
-                    .span_for_range(db.ast_id_map(loc.id.file_id()).get(makro.ast_id).text_range()),
                 edition: loc.edition,
             }
         }
-
         MacroId::MacroRulesId(it) => {
             let loc: MacroRulesLoc = it.lookup(db);
 
@@ -328,9 +324,6 @@
                 allow_internal_unsafe: loc
                     .flags
                     .contains(MacroRulesLocFlags::ALLOW_INTERNAL_UNSAFE),
-                span: db
-                    .span_map(loc.id.file_id())
-                    .span_for_range(db.ast_id_map(loc.id.file_id()).get(makro.ast_id).text_range()),
                 edition: loc.edition,
             }
         }
@@ -348,9 +341,6 @@
                 ),
                 local_inner: false,
                 allow_internal_unsafe: false,
-                span: db
-                    .span_map(loc.id.file_id())
-                    .span_for_range(db.ast_id_map(loc.id.file_id()).get(makro.ast_id).text_range()),
                 edition: loc.edition,
             }
         }
diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs
index bd3d377..585e93c 100644
--- a/crates/hir-def/src/item_tree.rs
+++ b/crates/hir-def/src/item_tree.rs
@@ -49,7 +49,7 @@
 use la_arena::{Arena, Idx, IdxRange, RawIdx};
 use rustc_hash::FxHashMap;
 use smallvec::SmallVec;
-use span::{AstIdNode, FileAstId, Span};
+use span::{AstIdNode, FileAstId, SyntaxContextId};
 use stdx::never;
 use syntax::{ast, match_ast, SyntaxKind};
 use triomphe::Arc;
@@ -790,8 +790,7 @@
     pub path: Interned<ModPath>,
     pub ast_id: FileAstId<ast::MacroCall>,
     pub expand_to: ExpandTo,
-    // FIXME: We need to move this out. It invalidates the item tree when typing inside the macro call.
-    pub call_site: Span,
+    pub ctxt: SyntaxContextId,
 }
 
 #[derive(Debug, Clone, Eq, PartialEq)]
diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs
index bf3d54f..f02163c 100644
--- a/crates/hir-def/src/item_tree/lower.rs
+++ b/crates/hir-def/src/item_tree/lower.rs
@@ -560,35 +560,32 @@
 
     fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> {
         let span_map = self.span_map();
-        let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, &mut |range| {
+        let path = m.path()?;
+        let range = path.syntax().text_range();
+        let path = Interned::new(ModPath::from_src(self.db.upcast(), path, &mut |range| {
             span_map.span_for_range(range).ctx
         })?);
         let ast_id = self.source_ast_id_map.ast_id(m);
         let expand_to = hir_expand::ExpandTo::from_call_site(m);
-        let res = MacroCall {
-            path,
-            ast_id,
-            expand_to,
-            call_site: span_map.span_for_range(m.syntax().text_range()),
-        };
+        let res = MacroCall { path, ast_id, expand_to, ctxt: span_map.span_for_range(range).ctx };
         Some(id(self.data().macro_calls.alloc(res)))
     }
 
     fn lower_macro_rules(&mut self, m: &ast::MacroRules) -> Option<FileItemTreeId<MacroRules>> {
-        let name = m.name().map(|it| it.as_name())?;
+        let name = m.name()?;
         let ast_id = self.source_ast_id_map.ast_id(m);
 
-        let res = MacroRules { name, ast_id };
+        let res = MacroRules { name: name.as_name(), ast_id };
         Some(id(self.data().macro_rules.alloc(res)))
     }
 
     fn lower_macro_def(&mut self, m: &ast::MacroDef) -> Option<FileItemTreeId<Macro2>> {
-        let name = m.name().map(|it| it.as_name())?;
+        let name = m.name()?;
 
         let ast_id = self.source_ast_id_map.ast_id(m);
         let visibility = self.lower_visibility(m);
 
-        let res = Macro2 { name, ast_id, visibility };
+        let res = Macro2 { name: name.as_name(), ast_id, visibility };
         Some(id(self.data().macro_defs.alloc(res)))
     }
 
diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs
index 87c90a4..953bf6b 100644
--- a/crates/hir-def/src/item_tree/pretty.rs
+++ b/crates/hir-def/src/item_tree/pretty.rs
@@ -487,12 +487,12 @@
                 }
             }
             ModItem::MacroCall(it) => {
-                let MacroCall { path, ast_id, expand_to, call_site } = &self.tree[it];
+                let MacroCall { path, ast_id, expand_to, ctxt } = &self.tree[it];
                 let _ = writeln!(
                     self,
-                    "// AstId: {:?}, Span: {}, ExpandTo: {:?}",
+                    "// AstId: {:?}, SyntaxContext: {}, ExpandTo: {:?}",
                     ast_id.erase().into_raw(),
-                    call_site,
+                    ctxt,
                     expand_to
                 );
                 wln!(self, "{}!(...);", path.display(self.db.upcast()));
diff --git a/crates/hir-def/src/item_tree/tests.rs b/crates/hir-def/src/item_tree/tests.rs
index 26f7b41..48da876 100644
--- a/crates/hir-def/src/item_tree/tests.rs
+++ b/crates/hir-def/src/item_tree/tests.rs
@@ -278,7 +278,7 @@
             // AstId: 2
             pub macro m2 { ... }
 
-            // AstId: 3, Span: 0:3@0..5#0, ExpandTo: Items
+            // AstId: 3, SyntaxContext: 0, ExpandTo: Items
             m!(...);
         "#]],
     );
diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs
index d63f226..828842d 100644
--- a/crates/hir-def/src/lib.rs
+++ b/crates/hir-def/src/lib.rs
@@ -90,7 +90,7 @@
 use item_tree::ExternBlock;
 use la_arena::Idx;
 use nameres::DefMap;
-use span::{AstIdNode, FileAstId, FileId, Span};
+use span::{AstIdNode, FileAstId, FileId, SyntaxContextId};
 use stdx::impl_from;
 use syntax::{ast, AstNode};
 
@@ -1342,21 +1342,22 @@
         let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
         let span_map = db.span_map(self.file_id);
         let path = self.value.path().and_then(|path| {
-            path::ModPath::from_src(db, path, &mut |range| {
+            let range = path.syntax().text_range();
+            let mod_path = path::ModPath::from_src(db, path, &mut |range| {
                 span_map.as_ref().span_for_range(range).ctx
-            })
+            })?;
+            let call_site = span_map.span_for_range(range);
+            Some((call_site, mod_path))
         });
 
-        let Some(path) = path else {
+        let Some((call_site, path)) = path else {
             return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
         };
 
-        let call_site = span_map.span_for_range(self.value.syntax().text_range());
-
         macro_call_as_call_id_with_eager(
             db,
             &AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
-            call_site,
+            call_site.ctx,
             expands_to,
             krate,
             resolver,
@@ -1381,7 +1382,7 @@
 fn macro_call_as_call_id(
     db: &dyn ExpandDatabase,
     call: &AstIdWithPath<ast::MacroCall>,
-    call_site: Span,
+    call_site: SyntaxContextId,
     expand_to: ExpandTo,
     krate: CrateId,
     resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
@@ -1393,7 +1394,7 @@
 fn macro_call_as_call_id_with_eager(
     db: &dyn ExpandDatabase,
     call: &AstIdWithPath<ast::MacroCall>,
-    call_site: Span,
+    call_site: SyntaxContextId,
     expand_to: ExpandTo,
     krate: CrateId,
     resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
@@ -1403,17 +1404,20 @@
         resolver(call.path.clone()).ok_or_else(|| UnresolvedMacro { path: call.path.clone() })?;
 
     let res = match def.kind {
-        MacroDefKind::BuiltInEager(..) => {
-            let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db));
-            expand_eager_macro_input(db, krate, macro_call, def, call_site, &|path| {
-                eager_resolver(path).filter(MacroDefId::is_fn_like)
-            })
-        }
+        MacroDefKind::BuiltInEager(..) => expand_eager_macro_input(
+            db,
+            krate,
+            &call.ast_id.to_node(db),
+            call.ast_id,
+            def,
+            call_site,
+            &|path| eager_resolver(path).filter(MacroDefId::is_fn_like),
+        ),
         _ if def.is_fn_like() => ExpandResult {
-            value: Some(def.as_lazy_macro(
+            value: Some(def.make_call(
                 db,
                 krate,
-                MacroCallKind::FnLike { ast_id: call.ast_id, expand_to },
+                MacroCallKind::FnLike { ast_id: call.ast_id, expand_to, eager: None },
                 call_site,
             )),
             err: None,
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs
index edc8247..965f329 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -171,7 +171,7 @@
     }
 
     fn main(foo: ()) {
-        /* error: unresolved macro unresolved */"helloworld!"#0:3@207..323#2#;
+        /* error: unresolved macro unresolved */"helloworld!"#0:3@236..321#0#;
     }
 }
 
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs b/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs
index 63f2110..23d8b02 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs
@@ -33,7 +33,7 @@
 "#,
         expect![[r#"
 macro_rules! m { ($i:literal) => {}; }
-/* error: mismatched delimiters */"#]],
+/* error: expected literal */"#]],
     );
 }
 
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs b/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs
index 362c189..fb5797d 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs
@@ -98,7 +98,7 @@
 macro_rules! m2 { ($x:ident) => {} }
 
 /* error: macro definition has parse errors */
-/* error: mismatched delimiters */
+/* error: expected ident */
 "#]],
     )
 }
diff --git a/crates/hir-def/src/nameres/attr_resolution.rs b/crates/hir-def/src/nameres/attr_resolution.rs
index 1cadae8..662c80e 100644
--- a/crates/hir-def/src/nameres/attr_resolution.rs
+++ b/crates/hir-def/src/nameres/attr_resolution.rs
@@ -5,7 +5,7 @@
     attrs::{Attr, AttrId, AttrInput},
     MacroCallId, MacroCallKind, MacroDefId,
 };
-use span::Span;
+use span::SyntaxContextId;
 use syntax::{ast, SmolStr};
 use triomphe::Arc;
 
@@ -109,14 +109,14 @@
     let arg = match macro_attr.input.as_deref() {
         Some(AttrInput::TokenTree(tt)) => {
             let mut tt = tt.as_ref().clone();
-            tt.delimiter = tt::Delimiter::invisible_spanned(macro_attr.span);
+            tt.delimiter.kind = tt::DelimiterKind::Invisible;
             Some(tt)
         }
 
         _ => None,
     };
 
-    def.as_lazy_macro(
+    def.make_call(
         db.upcast(),
         krate,
         MacroCallKind::Attr {
@@ -124,7 +124,7 @@
             attr_args: arg.map(Arc::new),
             invoc_attr_index: macro_attr.id,
         },
-        macro_attr.span,
+        macro_attr.ctxt,
     )
 }
 
@@ -133,14 +133,14 @@
     item_attr: &AstIdWithPath<ast::Adt>,
     derive_attr_index: AttrId,
     derive_pos: u32,
-    call_site: Span,
+    call_site: SyntaxContextId,
     krate: CrateId,
     resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
 ) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
     let (macro_id, def_id) = resolver(item_attr.path.clone())
         .filter(|(_, def_id)| def_id.is_derive())
         .ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
-    let call_id = def_id.as_lazy_macro(
+    let call_id = def_id.make_call(
         db.upcast(),
         krate,
         MacroCallKind::Derive {
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index f9fe6d3..3d02644 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -230,13 +230,13 @@
     FnLike {
         ast_id: AstIdWithPath<ast::MacroCall>,
         expand_to: ExpandTo,
-        call_site: Span,
+        ctxt: SyntaxContextId,
     },
     Derive {
         ast_id: AstIdWithPath<ast::Adt>,
         derive_attr: AttrId,
         derive_pos: usize,
-        call_site: Span,
+        ctxt: SyntaxContextId,
     },
     Attr {
         ast_id: AstIdWithPath<ast::Item>,
@@ -1126,7 +1126,7 @@
             let resolver_def_id = |path| resolver(path).map(|(_, it)| it);
 
             match &directive.kind {
-                MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => {
+                MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt: call_site } => {
                     let call_id = macro_call_as_call_id(
                         self.db.upcast(),
                         ast_id,
@@ -1146,7 +1146,7 @@
                         return Resolved::Yes;
                     }
                 }
-                MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site } => {
+                MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, ctxt: call_site } => {
                     let id = derive_macro_as_call_id(
                         self.db,
                         ast_id,
@@ -1266,7 +1266,7 @@
                                             ast_id,
                                             derive_attr: attr.id,
                                             derive_pos: idx,
-                                            call_site,
+                                            ctxt: call_site.ctx,
                                         },
                                         container: directive.container,
                                     });
@@ -1428,7 +1428,7 @@
 
         for directive in &self.unresolved_macros {
             match &directive.kind {
-                MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => {
+                MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt: call_site } => {
                     // FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error!
                     let macro_call_as_call_id = macro_call_as_call_id(
                         self.db.upcast(),
@@ -1451,12 +1451,16 @@
                     if let Err(UnresolvedMacro { path }) = macro_call_as_call_id {
                         self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
                             directive.module_id,
-                            MacroCallKind::FnLike { ast_id: ast_id.ast_id, expand_to: *expand_to },
+                            MacroCallKind::FnLike {
+                                ast_id: ast_id.ast_id,
+                                expand_to: *expand_to,
+                                eager: None,
+                            },
                             path,
                         ));
                     }
                 }
-                MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site: _ } => {
+                MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, ctxt: _ } => {
                     self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
                         directive.module_id,
                         MacroCallKind::Derive {
@@ -2285,7 +2289,7 @@
 
     fn collect_macro_call(
         &mut self,
-        &MacroCall { ref path, ast_id, expand_to, call_site }: &MacroCall,
+        &MacroCall { ref path, ast_id, expand_to, ctxt }: &MacroCall,
         container: ItemContainerId,
     ) {
         let ast_id = AstIdWithPath::new(self.file_id(), ast_id, ModPath::clone(path));
@@ -2299,7 +2303,7 @@
         if let Ok(res) = macro_call_as_call_id_with_eager(
             db.upcast(),
             &ast_id,
-            call_site,
+            ctxt,
             expand_to,
             self.def_collector.def_map.krate,
             |path| {
@@ -2357,7 +2361,7 @@
         self.def_collector.unresolved_macros.push(MacroDirective {
             module_id: self.module_id,
             depth: self.macro_depth + 1,
-            kind: MacroDirectiveKind::FnLike { ast_id, expand_to, call_site },
+            kind: MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt },
             container,
         });
     }
diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs
index 7793e99..af3ecdc 100644
--- a/crates/hir-expand/src/attrs.rs
+++ b/crates/hir-expand/src/attrs.rs
@@ -7,7 +7,7 @@
 use intern::Interned;
 use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
 use smallvec::{smallvec, SmallVec};
-use span::Span;
+use span::{Span, SyntaxContextId};
 use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
 use triomphe::Arc;
 
@@ -53,7 +53,7 @@
                 id,
                 input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
                 path: Interned::new(ModPath::from(crate::name!(doc))),
-                span: span_map.span_for_range(comment.syntax().text_range()),
+                ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx,
             }),
         });
         let entries: Arc<[Attr]> = Arc::from_iter(entries);
@@ -173,7 +173,7 @@
     pub id: AttrId,
     pub path: Interned<ModPath>,
     pub input: Option<Interned<AttrInput>>,
-    pub span: Span,
+    pub ctxt: SyntaxContextId,
 }
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -201,10 +201,12 @@
         span_map: SpanMapRef<'_>,
         id: AttrId,
     ) -> Option<Attr> {
-        let path = Interned::new(ModPath::from_src(db, ast.path()?, &mut |range| {
+        let path = ast.path()?;
+        let range = path.syntax().text_range();
+        let path = Interned::new(ModPath::from_src(db, path, &mut |range| {
             span_map.span_for_range(range).ctx
         })?);
-        let span = span_map.span_for_range(ast.syntax().text_range());
+        let span = span_map.span_for_range(range);
         let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
             let value = match lit.kind() {
                 ast::LiteralKind::String(string) => string.value()?.into(),
@@ -217,11 +219,11 @@
         } else {
             None
         };
-        Some(Attr { id, path, input, span })
+        Some(Attr { id, path, input, ctxt: span.ctx })
     }
 
     fn from_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree], id: AttrId) -> Option<Attr> {
-        let span = tt.first()?.first_span();
+        let ctxt = tt.first()?.first_span().ctx;
         let path_end = tt
             .iter()
             .position(|tt| {
@@ -253,7 +255,7 @@
             }
             _ => None,
         };
-        Some(Attr { id, path, input, span })
+        Some(Attr { id, path, input, ctxt })
     }
 
     pub fn path(&self) -> &ModPath {
diff --git a/crates/hir-expand/src/builtin_attr_macro.rs b/crates/hir-expand/src/builtin_attr_macro.rs
index a0102f3..9ff29b4 100644
--- a/crates/hir-expand/src/builtin_attr_macro.rs
+++ b/crates/hir-expand/src/builtin_attr_macro.rs
@@ -11,7 +11,7 @@
         }
 
         impl BuiltinAttrExpander {
-            pub fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::Subtree) -> ExpandResult<tt::Subtree>  {
+            pub fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::Subtree, Span) -> ExpandResult<tt::Subtree>  {
                 match *self {
                     $( BuiltinAttrExpander::$variant => $expand, )*
                 }
@@ -34,8 +34,9 @@
         db: &dyn ExpandDatabase,
         id: MacroCallId,
         tt: &tt::Subtree,
+        span: Span,
     ) -> ExpandResult<tt::Subtree> {
-        self.expander()(db, id, tt)
+        self.expander()(db, id, tt, span)
     }
 
     pub fn is_derive(self) -> bool {
@@ -71,6 +72,7 @@
     _db: &dyn ExpandDatabase,
     _id: MacroCallId,
     tt: &tt::Subtree,
+    _span: Span,
 ) -> ExpandResult<tt::Subtree> {
     ExpandResult::ok(tt.clone())
 }
@@ -100,6 +102,7 @@
     db: &dyn ExpandDatabase,
     id: MacroCallId,
     tt: &tt::Subtree,
+    span: Span,
 ) -> ExpandResult<tt::Subtree> {
     let loc = db.lookup_intern_macro_call(id);
     let derives = match &loc.kind {
@@ -107,17 +110,14 @@
             attr_args
         }
         _ => {
-            return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan {
-                open: loc.call_site,
-                close: loc.call_site,
-            }))
+            return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan { open: span, close: span }))
         }
     };
-    pseudo_derive_attr_expansion(tt, derives, loc.call_site)
+    pseudo_derive_attr_expansion(tt, derives, span)
 }
 
 pub fn pseudo_derive_attr_expansion(
-    tt: &tt::Subtree,
+    _: &tt::Subtree,
     args: &tt::Subtree,
     call_site: Span,
 ) -> ExpandResult<tt::Subtree> {
@@ -141,7 +141,7 @@
         token_trees.push(mk_leaf(']'));
     }
     ExpandResult::ok(tt::Subtree {
-        delimiter: tt.delimiter,
+        delimiter: args.delimiter,
         token_trees: token_trees.into_boxed_slice(),
     })
 }
diff --git a/crates/hir-expand/src/builtin_derive_macro.rs b/crates/hir-expand/src/builtin_derive_macro.rs
index 66dec7d..528038a 100644
--- a/crates/hir-expand/src/builtin_derive_macro.rs
+++ b/crates/hir-expand/src/builtin_derive_macro.rs
@@ -50,8 +50,8 @@
         db: &dyn ExpandDatabase,
         id: MacroCallId,
         tt: &tt::Subtree,
+        span: Span,
     ) -> ExpandResult<tt::Subtree> {
-        let span = db.lookup_intern_macro_call(id).call_site;
         let span = span_with_def_site_ctxt(db, span, id);
         self.expander()(span, tt)
     }
diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs
index 0fd0c25..9fb6a0b 100644
--- a/crates/hir-expand/src/builtin_fn_macro.rs
+++ b/crates/hir-expand/src/builtin_fn_macro.rs
@@ -19,14 +19,14 @@
 };
 
 macro_rules! register_builtin {
-    ( LAZY: $(($name:ident, $kind: ident) => $expand:ident),* , EAGER: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),*  ) => {
+    ( $LAZY:ident: $(($name:ident, $kind: ident) => $expand:ident),* , $EAGER:ident: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),*  ) => {
         #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-        pub enum BuiltinFnLikeExpander {
+        pub enum $LAZY {
             $($kind),*
         }
 
         #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-        pub enum EagerExpander {
+        pub enum $EAGER {
             $($e_kind),*
         }
 
@@ -62,8 +62,8 @@
         db: &dyn ExpandDatabase,
         id: MacroCallId,
         tt: &tt::Subtree,
+        span: Span,
     ) -> ExpandResult<tt::Subtree> {
-        let span = db.lookup_intern_macro_call(id).call_site;
         let span = span_with_def_site_ctxt(db, span, id);
         self.expander()(db, id, tt, span)
     }
@@ -75,8 +75,8 @@
         db: &dyn ExpandDatabase,
         id: MacroCallId,
         tt: &tt::Subtree,
+        span: Span,
     ) -> ExpandResult<tt::Subtree> {
-        let span = db.lookup_intern_macro_call(id).call_site;
         let span = span_with_def_site_ctxt(db, span, id);
         self.expander()(db, id, tt, span)
     }
@@ -84,6 +84,17 @@
     pub fn is_include(&self) -> bool {
         matches!(self, EagerExpander::Include)
     }
+
+    pub fn is_include_like(&self) -> bool {
+        matches!(
+            self,
+            EagerExpander::Include | EagerExpander::IncludeStr | EagerExpander::IncludeBytes
+        )
+    }
+
+    pub fn is_env_or_option_env(&self) -> bool {
+        matches!(self, EagerExpander::Env | EagerExpander::OptionEnv)
+    }
 }
 
 pub fn find_builtin_macro(
@@ -93,7 +104,7 @@
 }
 
 register_builtin! {
-    LAZY:
+    BuiltinFnLikeExpander:
     (column, Column) => line_expand,
     (file, File) => file_expand,
     (line, Line) => line_expand,
@@ -114,7 +125,7 @@
     (format_args_nl, FormatArgsNl) => format_args_nl_expand,
     (quote, Quote) => quote_expand,
 
-    EAGER:
+    EagerExpander:
     (compile_error, CompileError) => compile_error_expand,
     (concat, Concat) => concat_expand,
     (concat_idents, ConcatIdents) => concat_idents_expand,
@@ -426,22 +437,25 @@
     }
 }
 
-fn unquote_str(lit: &tt::Literal) -> Option<String> {
+fn unquote_str(lit: &tt::Literal) -> Option<(String, Span)> {
+    let span = lit.span;
     let lit = ast::make::tokens::literal(&lit.to_string());
     let token = ast::String::cast(lit)?;
-    token.value().map(|it| it.into_owned())
+    token.value().map(|it| (it.into_owned(), span))
 }
 
-fn unquote_char(lit: &tt::Literal) -> Option<char> {
+fn unquote_char(lit: &tt::Literal) -> Option<(char, Span)> {
+    let span = lit.span;
     let lit = ast::make::tokens::literal(&lit.to_string());
     let token = ast::Char::cast(lit)?;
-    token.value()
+    token.value().zip(Some(span))
 }
 
-fn unquote_byte_string(lit: &tt::Literal) -> Option<Vec<u8>> {
+fn unquote_byte_string(lit: &tt::Literal) -> Option<(Vec<u8>, Span)> {
+    let span = lit.span;
     let lit = ast::make::tokens::literal(&lit.to_string());
     let token = ast::ByteString::cast(lit)?;
-    token.value().map(|it| it.into_owned())
+    token.value().map(|it| (it.into_owned(), span))
 }
 
 fn compile_error_expand(
@@ -452,7 +466,7 @@
 ) -> ExpandResult<tt::Subtree> {
     let err = match &*tt.token_trees {
         [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) {
-            Some(unquoted) => ExpandError::other(unquoted.into_boxed_str()),
+            Some((unquoted, _)) => ExpandError::other(unquoted.into_boxed_str()),
             None => ExpandError::other("`compile_error!` argument must be a string"),
         },
         _ => ExpandError::other("`compile_error!` argument must be a string"),
@@ -465,10 +479,16 @@
     _db: &dyn ExpandDatabase,
     _arg_id: MacroCallId,
     tt: &tt::Subtree,
-    span: Span,
+    _: Span,
 ) -> ExpandResult<tt::Subtree> {
     let mut err = None;
     let mut text = String::new();
+    let mut span: Option<Span> = None;
+    let mut record_span = |s: Span| match &mut span {
+        Some(span) if span.anchor == s.anchor => span.range = span.range.cover(s.range),
+        Some(_) => (),
+        None => span = Some(s),
+    };
     for (i, mut t) in tt.token_trees.iter().enumerate() {
         // FIXME: hack on top of a hack: `$e:expr` captures get surrounded in parentheses
         // to ensure the right parsing order, so skip the parentheses here. Ideally we'd
@@ -486,11 +506,14 @@
                 // concat works with string and char literals, so remove any quotes.
                 // It also works with integer, float and boolean literals, so just use the rest
                 // as-is.
-                if let Some(c) = unquote_char(it) {
+                if let Some((c, span)) = unquote_char(it) {
                     text.push(c);
+                    record_span(span);
                 } else {
-                    let component = unquote_str(it).unwrap_or_else(|| it.text.to_string());
+                    let (component, span) =
+                        unquote_str(it).unwrap_or_else(|| (it.text.to_string(), it.span));
                     text.push_str(&component);
+                    record_span(span);
                 }
             }
             // handle boolean literals
@@ -498,6 +521,7 @@
                 if i % 2 == 0 && (id.text == "true" || id.text == "false") =>
             {
                 text.push_str(id.text.as_str());
+                record_span(id.span);
             }
             tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
             _ => {
@@ -505,6 +529,7 @@
             }
         }
     }
+    let span = span.unwrap_or(tt.delimiter.open);
     ExpandResult { value: quote!(span =>#text), err }
 }
 
@@ -512,18 +537,25 @@
     _db: &dyn ExpandDatabase,
     _arg_id: MacroCallId,
     tt: &tt::Subtree,
-    span: Span,
+    call_site: Span,
 ) -> ExpandResult<tt::Subtree> {
     let mut bytes = Vec::new();
     let mut err = None;
+    let mut span: Option<Span> = None;
+    let mut record_span = |s: Span| match &mut span {
+        Some(span) if span.anchor == s.anchor => span.range = span.range.cover(s.range),
+        Some(_) => (),
+        None => span = Some(s),
+    };
     for (i, t) in tt.token_trees.iter().enumerate() {
         match t {
             tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
                 let token = ast::make::tokens::literal(&lit.to_string());
+                record_span(lit.span);
                 match token.kind() {
                     syntax::SyntaxKind::BYTE => bytes.push(token.text().to_owned()),
                     syntax::SyntaxKind::BYTE_STRING => {
-                        let components = unquote_byte_string(lit).unwrap_or_default();
+                        let components = unquote_byte_string(lit).map_or(vec![], |(it, _)| it);
                         components.into_iter().for_each(|it| bytes.push(it.to_string()));
                     }
                     _ => {
@@ -534,7 +566,7 @@
             }
             tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
             tt::TokenTree::Subtree(tree) if tree.delimiter.kind == tt::DelimiterKind::Bracket => {
-                if let Err(e) = concat_bytes_expand_subtree(tree, &mut bytes) {
+                if let Err(e) = concat_bytes_expand_subtree(tree, &mut bytes, &mut record_span) {
                     err.get_or_insert(e);
                     break;
                 }
@@ -546,17 +578,24 @@
         }
     }
     let value = tt::Subtree {
-        delimiter: tt::Delimiter { open: span, close: span, kind: tt::DelimiterKind::Bracket },
+        delimiter: tt::Delimiter {
+            open: call_site,
+            close: call_site,
+            kind: tt::DelimiterKind::Bracket,
+        },
         token_trees: {
             Itertools::intersperse_with(
                 bytes.into_iter().map(|it| {
-                    tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: it.into(), span }))
+                    tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
+                        text: it.into(),
+                        span: span.unwrap_or(call_site),
+                    }))
                 }),
                 || {
                     tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
                         char: ',',
                         spacing: tt::Spacing::Alone,
-                        span,
+                        span: call_site,
                     }))
                 },
             )
@@ -569,13 +608,15 @@
 fn concat_bytes_expand_subtree(
     tree: &tt::Subtree,
     bytes: &mut Vec<String>,
+    mut record_span: impl FnMut(Span),
 ) -> Result<(), ExpandError> {
     for (ti, tt) in tree.token_trees.iter().enumerate() {
         match tt {
-            tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
-                let lit = ast::make::tokens::literal(&lit.to_string());
+            tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => {
+                let lit = ast::make::tokens::literal(&it.to_string());
                 match lit.kind() {
                     syntax::SyntaxKind::BYTE | syntax::SyntaxKind::INT_NUMBER => {
+                        record_span(it.span);
                         bytes.push(lit.text().to_owned())
                     }
                     _ => {
@@ -635,7 +676,7 @@
     }
 }
 
-fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> {
+fn parse_string(tt: &tt::Subtree) -> Result<(String, Span), ExpandError> {
     tt.token_trees
         .first()
         .and_then(|tt| match tt {
@@ -675,7 +716,7 @@
     arg_id: MacroCallId,
     arg: &tt::Subtree,
 ) -> Result<FileId, ExpandError> {
-    relative_file(db, arg_id, &parse_string(arg)?, false)
+    relative_file(db, arg_id, &parse_string(arg)?.0, false)
 }
 
 fn include_bytes_expand(
@@ -701,7 +742,7 @@
     tt: &tt::Subtree,
     span: Span,
 ) -> ExpandResult<tt::Subtree> {
-    let path = match parse_string(tt) {
+    let (path, span) = match parse_string(tt) {
         Ok(it) => it,
         Err(e) => {
             return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
@@ -736,7 +777,7 @@
     tt: &tt::Subtree,
     span: Span,
 ) -> ExpandResult<tt::Subtree> {
-    let key = match parse_string(tt) {
+    let (key, span) = match parse_string(tt) {
         Ok(it) => it,
         Err(e) => {
             return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
@@ -766,18 +807,24 @@
     db: &dyn ExpandDatabase,
     arg_id: MacroCallId,
     tt: &tt::Subtree,
-    span: Span,
+    call_site: Span,
 ) -> ExpandResult<tt::Subtree> {
-    let key = match parse_string(tt) {
+    let (key, span) = match parse_string(tt) {
         Ok(it) => it,
         Err(e) => {
-            return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
+            return ExpandResult::new(
+                tt::Subtree::empty(DelimSpan { open: call_site, close: call_site }),
+                e,
+            )
         }
     };
-    let dollar_crate = dollar_crate(span);
+    let dollar_crate = dollar_crate(call_site);
     let expanded = match get_env_inner(db, arg_id, &key) {
-        None => quote! {span => #dollar_crate::option::Option::None::<&str> },
-        Some(s) => quote! {span => #dollar_crate::option::Option::Some(#s) },
+        None => quote! {call_site => #dollar_crate::option::Option::None::<&str> },
+        Some(s) => {
+            let s = quote! (span => #s);
+            quote! {call_site => #dollar_crate::option::Option::Some(#s) }
+        }
     };
 
     ExpandResult::ok(expanded)
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index a7469ae..ec68f2f 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -3,22 +3,19 @@
 use base_db::{salsa, CrateId, FileId, SourceDatabase};
 use either::Either;
 use limit::Limit;
-use mbe::{syntax_node_to_token_tree, ValueResult};
+use mbe::syntax_node_to_token_tree;
 use rustc_hash::FxHashSet;
-use span::{AstIdMap, SyntaxContextData, SyntaxContextId};
-use syntax::{
-    ast::{self, HasAttrs},
-    AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T,
-};
+use span::{AstIdMap, Span, SyntaxContextData, SyntaxContextId};
+use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T};
 use triomphe::Arc;
 
 use crate::{
-    attrs::collect_attrs,
+    attrs::{collect_attrs, AttrId},
     builtin_attr_macro::pseudo_derive_attr_expansion,
     builtin_fn_macro::EagerExpander,
     cfg_process,
     declarative::DeclarativeMacroExpander,
-    fixup::{self, reverse_fixups, SyntaxFixupUndoInfo},
+    fixup::{self, SyntaxFixupUndoInfo},
     hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt},
     proc_macro::ProcMacros,
     span_map::{RealSpanMap, SpanMap, SpanMapRef},
@@ -101,10 +98,7 @@
     /// Lowers syntactic macro call to a token tree representation. That's a firewall
     /// query, only typing in the macro call itself changes the returned
     /// subtree.
-    fn macro_arg(
-        &self,
-        id: MacroCallId,
-    ) -> ValueResult<(Arc<tt::Subtree>, SyntaxFixupUndoInfo), Arc<Box<[SyntaxError]>>>;
+    fn macro_arg(&self, id: MacroCallId) -> (Arc<tt::Subtree>, SyntaxFixupUndoInfo, Span);
     /// Fetches the expander for this macro.
     #[salsa::transparent]
     #[salsa::invoke(TokenExpander::macro_expander)]
@@ -121,6 +115,12 @@
     /// non-determinism breaks salsa in a very, very, very bad way.
     /// @edwin0cheng heroically debugged this once! See #4315 for details
     fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
+    /// Retrieves the span to be used for a proc-macro expansions spans.
+    /// This is a firewall query as it requires parsing the file, which we don't want proc-macros to
+    /// directly depend on as that would cause to frequent invalidations, mainly because of the
+    /// parse queries being LRU cached. If they weren't the invalidations would only happen if the
+    /// user wrote in the file that defines the proc-macro.
+    fn proc_macro_span(&self, fun: AstId<ast::Fn>) -> Span;
     /// Firewall query that returns the errors from the `parse_macro_expansion` query.
     fn parse_macro_expansion_error(
         &self,
@@ -140,20 +140,36 @@
 ) -> Option<(SyntaxNode, SyntaxToken)> {
     let loc = db.lookup_intern_macro_call(actual_macro_call);
 
+    // FIXME: This BOGUS here is dangerous once the proc-macro server can call back into the database!
     let span_map = RealSpanMap::absolute(FileId::BOGUS);
     let span_map = SpanMapRef::RealSpanMap(&span_map);
 
+    let (_, _, span) = db.macro_arg(actual_macro_call);
+
     // Build the subtree and token mapping for the speculative args
     let (mut tt, undo_info) = match loc.kind {
         MacroCallKind::FnLike { .. } => (
-            mbe::syntax_node_to_token_tree(speculative_args, span_map, loc.call_site),
+            mbe::syntax_node_to_token_tree(speculative_args, span_map, span),
             SyntaxFixupUndoInfo::NONE,
         ),
-        MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
-            let censor = censor_for_macro_input(&loc, speculative_args);
+        MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => (
+            mbe::syntax_node_to_token_tree(speculative_args, span_map, span),
+            SyntaxFixupUndoInfo::NONE,
+        ),
+        MacroCallKind::Derive { derive_attr_index: index, .. }
+        | MacroCallKind::Attr { invoc_attr_index: index, .. } => {
+            let censor = if let MacroCallKind::Derive { .. } = loc.kind {
+                censor_derive_input(index, &ast::Adt::cast(speculative_args.clone())?)
+            } else {
+                attr_source(index, &ast::Item::cast(speculative_args.clone())?)
+                    .into_iter()
+                    .map(|it| it.syntax().clone().into())
+                    .collect()
+            };
+
             let censor_cfg =
                 cfg_process::process_cfg_attrs(speculative_args, &loc, db).unwrap_or_default();
-            let mut fixups = fixup::fixup_syntax(span_map, speculative_args, loc.call_site);
+            let mut fixups = fixup::fixup_syntax(span_map, speculative_args, span);
             fixups.append.retain(|it, _| match it {
                 syntax::NodeOrToken::Token(_) => true,
                 it => !censor.contains(it) && !censor_cfg.contains(it),
@@ -167,7 +183,7 @@
                     span_map,
                     fixups.append,
                     fixups.remove,
-                    loc.call_site,
+                    span,
                 ),
                 fixups.undo_info,
             )
@@ -189,9 +205,8 @@
             }?;
             match attr.token_tree() {
                 Some(token_tree) => {
-                    let mut tree =
-                        syntax_node_to_token_tree(token_tree.syntax(), span_map, loc.call_site);
-                    tree.delimiter = tt::Delimiter::invisible_spanned(loc.call_site);
+                    let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map, span);
+                    tree.delimiter = tt::Delimiter::invisible_spanned(span);
 
                     Some(tree)
                 }
@@ -204,36 +219,36 @@
     // Do the actual expansion, we need to directly expand the proc macro due to the attribute args
     // Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
     let mut speculative_expansion = match loc.def.kind {
-        MacroDefKind::ProcMacro(expander, ..) => {
-            tt.delimiter = tt::Delimiter::invisible_spanned(loc.call_site);
+        MacroDefKind::ProcMacro(expander, _, ast) => {
+            let span = db.proc_macro_span(ast);
+            tt.delimiter = tt::Delimiter::invisible_spanned(span);
             expander.expand(
                 db,
                 loc.def.krate,
                 loc.krate,
                 &tt,
                 attr_arg.as_ref(),
-                span_with_def_site_ctxt(db, loc.def.span, actual_macro_call),
-                span_with_call_site_ctxt(db, loc.def.span, actual_macro_call),
-                span_with_mixed_site_ctxt(db, loc.def.span, actual_macro_call),
+                span_with_def_site_ctxt(db, span, actual_macro_call),
+                span_with_call_site_ctxt(db, span, actual_macro_call),
+                span_with_mixed_site_ctxt(db, span, actual_macro_call),
             )
         }
         MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
-            pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, loc.call_site)
+            pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span)
         }
-        MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand_unhygienic(
-            db,
-            tt,
-            loc.def.krate,
-            loc.call_site,
-        ),
-        MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
+        MacroDefKind::Declarative(it) => {
+            db.decl_macro_expander(loc.krate, it).expand_unhygienic(db, tt, loc.def.krate, span)
+        }
+        MacroDefKind::BuiltIn(it, _) => {
+            it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
+        }
         MacroDefKind::BuiltInDerive(it, ..) => {
-            it.expand(db, actual_macro_call, &tt).map_err(Into::into)
+            it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
         }
         MacroDefKind::BuiltInEager(it, _) => {
-            it.expand(db, actual_macro_call, &tt).map_err(Into::into)
+            it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
         }
-        MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt),
+        MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt, span),
     };
 
     let expand_to = loc.expand_to();
@@ -324,185 +339,161 @@
     }
 }
 
-// FIXME: for derive attributes, this will return separate copies of the same structures!
+// FIXME: for derive attributes, this will return separate copies of the same structures! Though
+// they may differ in spans due to differing call sites...
 fn macro_arg(
     db: &dyn ExpandDatabase,
     id: MacroCallId,
-    // FIXME: consider the following by putting fixup info into eager call info args
-    // ) -> ValueResult<Arc<(tt::Subtree, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>> {
-) -> ValueResult<(Arc<tt::Subtree>, SyntaxFixupUndoInfo), Arc<Box<[SyntaxError]>>> {
+) -> (Arc<tt::Subtree>, SyntaxFixupUndoInfo, Span) {
     let loc = db.lookup_intern_macro_call(id);
-    if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
-        .then(|| loc.eager.as_deref())
-        .flatten()
+
+    if let MacroCallLoc {
+        def: MacroDefId { kind: MacroDefKind::BuiltInEager(..), .. },
+        kind: MacroCallKind::FnLike { eager: Some(eager), .. },
+        ..
+    } = &loc
     {
-        ValueResult::ok((arg.clone(), SyntaxFixupUndoInfo::NONE))
-    } else {
-        let (parse, map) = parse_with_map(db, loc.kind.file_id());
-        let root = parse.syntax_node();
-
-        let syntax = match loc.kind {
-            MacroCallKind::FnLike { ast_id, .. } => {
-                let dummy_tt = |kind| {
-                    (
-                        Arc::new(tt::Subtree {
-                            delimiter: tt::Delimiter {
-                                open: loc.call_site,
-                                close: loc.call_site,
-                                kind,
-                            },
-                            token_trees: Box::default(),
-                        }),
-                        SyntaxFixupUndoInfo::default(),
-                    )
-                };
-
-                let node = &ast_id.to_ptr(db).to_node(&root);
-                let offset = node.syntax().text_range().start();
-                let Some(tt) = node.token_tree() else {
-                    return ValueResult::new(
-                        dummy_tt(tt::DelimiterKind::Invisible),
-                        Arc::new(Box::new([SyntaxError::new_at_offset(
-                            "missing token tree".to_owned(),
-                            offset,
-                        )])),
-                    );
-                };
-                let first = tt.left_delimiter_token().map(|it| it.kind()).unwrap_or(T!['(']);
-                let last = tt.right_delimiter_token().map(|it| it.kind()).unwrap_or(T![.]);
-
-                let mismatched_delimiters = !matches!(
-                    (first, last),
-                    (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}'])
-                );
-                if mismatched_delimiters {
-                    // Don't expand malformed (unbalanced) macro invocations. This is
-                    // less than ideal, but trying to expand unbalanced  macro calls
-                    // sometimes produces pathological, deeply nested code which breaks
-                    // all kinds of things.
-                    //
-                    // So instead, we'll return an empty subtree here
-                    cov_mark::hit!(issue9358_bad_macro_stack_overflow);
-
-                    let kind = match first {
-                        _ if loc.def.is_proc_macro() => tt::DelimiterKind::Invisible,
-                        T!['('] => tt::DelimiterKind::Parenthesis,
-                        T!['['] => tt::DelimiterKind::Bracket,
-                        T!['{'] => tt::DelimiterKind::Brace,
-                        _ => tt::DelimiterKind::Invisible,
-                    };
-                    return ValueResult::new(
-                        dummy_tt(kind),
-                        Arc::new(Box::new([SyntaxError::new_at_offset(
-                            "mismatched delimiters".to_owned(),
-                            offset,
-                        )])),
-                    );
-                }
-                tt.syntax().clone()
-            }
-            MacroCallKind::Derive { ast_id, .. } => {
-                ast_id.to_ptr(db).to_node(&root).syntax().clone()
-            }
-            MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
-        };
-        let (mut tt, undo_info) = match loc.kind {
-            MacroCallKind::FnLike { .. } => (
-                mbe::syntax_node_to_token_tree(&syntax, map.as_ref(), loc.call_site),
-                SyntaxFixupUndoInfo::NONE,
-            ),
-            MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
-                let censor = censor_for_macro_input(&loc, &syntax);
-                let censor_cfg =
-                    cfg_process::process_cfg_attrs(&syntax, &loc, db).unwrap_or_default();
-                let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax, loc.call_site);
-                fixups.append.retain(|it, _| match it {
-                    syntax::NodeOrToken::Token(_) => true,
-                    it => !censor.contains(it) && !censor_cfg.contains(it),
-                });
-                fixups.remove.extend(censor);
-                fixups.remove.extend(censor_cfg);
-
-                {
-                    let mut tt = mbe::syntax_node_to_token_tree_modified(
-                        &syntax,
-                        map.as_ref(),
-                        fixups.append.clone(),
-                        fixups.remove.clone(),
-                        loc.call_site,
-                    );
-                    reverse_fixups(&mut tt, &fixups.undo_info);
-                }
-                (
-                    mbe::syntax_node_to_token_tree_modified(
-                        &syntax,
-                        map,
-                        fixups.append,
-                        fixups.remove,
-                        loc.call_site,
-                    ),
-                    fixups.undo_info,
-                )
-            }
-        };
-
-        if loc.def.is_proc_macro() {
-            // proc macros expect their inputs without parentheses, MBEs expect it with them included
-            tt.delimiter.kind = tt::DelimiterKind::Invisible;
-        }
-
-        if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
-            match parse.errors() {
-                errors if errors.is_empty() => ValueResult::ok((Arc::new(tt), undo_info)),
-                errors => ValueResult::new(
-                    (Arc::new(tt), undo_info),
-                    // Box::<[_]>::from(res.errors()), not stable yet
-                    Arc::new(errors.to_vec().into_boxed_slice()),
-                ),
-            }
-        } else {
-            ValueResult::ok((Arc::new(tt), undo_info))
-        }
+        return (eager.arg.clone(), SyntaxFixupUndoInfo::NONE, eager.span);
     }
+
+    let (parse, map) = parse_with_map(db, loc.kind.file_id());
+    let root = parse.syntax_node();
+
+    let (censor, item_node, span) = match loc.kind {
+        MacroCallKind::FnLike { ast_id, .. } => {
+            let node = &ast_id.to_ptr(db).to_node(&root);
+            let path_range = node
+                .path()
+                .map_or_else(|| node.syntax().text_range(), |path| path.syntax().text_range());
+            let span = map.span_for_range(path_range);
+
+            let dummy_tt = |kind| {
+                (
+                    Arc::new(tt::Subtree {
+                        delimiter: tt::Delimiter { open: span, close: span, kind },
+                        token_trees: Box::default(),
+                    }),
+                    SyntaxFixupUndoInfo::default(),
+                    span,
+                )
+            };
+
+            let Some(tt) = node.token_tree() else {
+                return dummy_tt(tt::DelimiterKind::Invisible);
+            };
+            let first = tt.left_delimiter_token().map(|it| it.kind()).unwrap_or(T!['(']);
+            let last = tt.right_delimiter_token().map(|it| it.kind()).unwrap_or(T![.]);
+
+            let mismatched_delimiters = !matches!(
+                (first, last),
+                (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}'])
+            );
+            if mismatched_delimiters {
+                // Don't expand malformed (unbalanced) macro invocations. This is
+                // less than ideal, but trying to expand unbalanced  macro calls
+                // sometimes produces pathological, deeply nested code which breaks
+                // all kinds of things.
+                //
+                // So instead, we'll return an empty subtree here
+                cov_mark::hit!(issue9358_bad_macro_stack_overflow);
+
+                let kind = match first {
+                    _ if loc.def.is_proc_macro() => tt::DelimiterKind::Invisible,
+                    T!['('] => tt::DelimiterKind::Parenthesis,
+                    T!['['] => tt::DelimiterKind::Bracket,
+                    T!['{'] => tt::DelimiterKind::Brace,
+                    _ => tt::DelimiterKind::Invisible,
+                };
+                return dummy_tt(kind);
+            }
+
+            let mut tt = mbe::syntax_node_to_token_tree(tt.syntax(), map.as_ref(), span);
+            if loc.def.is_proc_macro() {
+                // proc macros expect their inputs without parentheses, MBEs expect it with them included
+                tt.delimiter.kind = tt::DelimiterKind::Invisible;
+            }
+            return (Arc::new(tt), SyntaxFixupUndoInfo::NONE, span);
+        }
+        MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
+            let node = ast_id.to_ptr(db).to_node(&root);
+            let censor_derive_input = censor_derive_input(derive_attr_index, &node);
+            let item_node = node.into();
+            let attr_source = attr_source(derive_attr_index, &item_node);
+            // FIXME: This is wrong, this should point to the path of the derive attribute`
+            let span =
+                map.span_for_range(attr_source.as_ref().and_then(|it| it.path()).map_or_else(
+                    || item_node.syntax().text_range(),
+                    |it| it.syntax().text_range(),
+                ));
+            (censor_derive_input, item_node, span)
+        }
+        MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+            let node = ast_id.to_ptr(db).to_node(&root);
+            let attr_source = attr_source(invoc_attr_index, &node);
+            let span = map.span_for_range(
+                attr_source
+                    .as_ref()
+                    .and_then(|it| it.path())
+                    .map_or_else(|| node.syntax().text_range(), |it| it.syntax().text_range()),
+            );
+            (attr_source.into_iter().map(|it| it.syntax().clone().into()).collect(), node, span)
+        }
+    };
+
+    let (mut tt, undo_info) = {
+        let syntax = item_node.syntax();
+        let censor_cfg = cfg_process::process_cfg_attrs(syntax, &loc, db).unwrap_or_default();
+        let mut fixups = fixup::fixup_syntax(map.as_ref(), syntax, span);
+        fixups.append.retain(|it, _| match it {
+            syntax::NodeOrToken::Token(_) => true,
+            it => !censor.contains(it) && !censor_cfg.contains(it),
+        });
+        fixups.remove.extend(censor);
+        fixups.remove.extend(censor_cfg);
+
+        (
+            mbe::syntax_node_to_token_tree_modified(
+                syntax,
+                map,
+                fixups.append,
+                fixups.remove,
+                span,
+            ),
+            fixups.undo_info,
+        )
+    };
+
+    if loc.def.is_proc_macro() {
+        // proc macros expect their inputs without parentheses, MBEs expect it with them included
+        tt.delimiter.kind = tt::DelimiterKind::Invisible;
+    }
+
+    (Arc::new(tt), undo_info, span)
 }
 
 // FIXME: Censoring info should be calculated by the caller! Namely by name resolution
-/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
-/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
-/// - attributes expect the invoking attribute to be stripped
-fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxElement> {
+/// Derives expect all `#[derive(..)]` invocations up to (and including) the currently invoked one to be stripped
+fn censor_derive_input(derive_attr_index: AttrId, node: &ast::Adt) -> FxHashSet<SyntaxElement> {
     // FIXME: handle `cfg_attr`
-    (|| {
-        let censor = match loc.kind {
-            MacroCallKind::FnLike { .. } => return None,
-            MacroCallKind::Derive { derive_attr_index, .. } => {
-                cov_mark::hit!(derive_censoring);
-                ast::Item::cast(node.clone())?
-                    .attrs()
-                    .take(derive_attr_index.ast_index() + 1)
-                    // FIXME, this resolution should not be done syntactically
-                    // derive is a proper macro now, no longer builtin
-                    // But we do not have resolution at this stage, this means
-                    // we need to know about all macro calls for the given ast item here
-                    // so we require some kind of mapping...
-                    .filter(|attr| attr.simple_name().as_deref() == Some("derive"))
-                    .map(|it| it.syntax().clone().into())
-                    .collect()
-            }
-            MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => return None,
-            MacroCallKind::Attr { invoc_attr_index, .. } => {
-                cov_mark::hit!(attribute_macro_attr_censoring);
-                collect_attrs(&ast::Item::cast(node.clone())?)
-                    .nth(invoc_attr_index.ast_index())
-                    .and_then(|x| Either::left(x.1))
-                    .map(|attr| attr.syntax().clone().into())
-                    .into_iter()
-                    .collect()
-            }
-        };
-        Some(censor)
-    })()
-    .unwrap_or_default()
+    cov_mark::hit!(derive_censoring);
+    collect_attrs(node)
+        .take(derive_attr_index.ast_index() + 1)
+        .filter_map(|(_, attr)| Either::left(attr))
+        // FIXME, this resolution should not be done syntactically
+        // derive is a proper macro now, no longer builtin
+        // But we do not have resolution at this stage, this means
+        // we need to know about all macro calls for the given ast item here
+        // so we require some kind of mapping...
+        .filter(|attr| attr.simple_name().as_deref() == Some("derive"))
+        .map(|it| it.syntax().clone().into())
+        .collect()
+}
+
+/// Attributes expect the invoking attribute to be stripped
+fn attr_source(invoc_attr_index: AttrId, node: &ast::Item) -> Option<ast::Attr> {
+    // FIXME: handle `cfg_attr`
+    cov_mark::hit!(attribute_macro_attr_censoring);
+    collect_attrs(node).nth(invoc_attr_index.ast_index()).and_then(|(_, attr)| Either::left(attr))
 }
 
 impl TokenExpander {
@@ -532,74 +523,64 @@
 ) -> ExpandResult<CowArc<tt::Subtree>> {
     let _p = tracing::span!(tracing::Level::INFO, "macro_expand").entered();
 
-    let ExpandResult { value: tt, mut err } = match loc.def.kind {
+    let (ExpandResult { value: tt, err }, span) = match loc.def.kind {
         MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id).map(CowArc::Arc),
         _ => {
-            let ValueResult { value: (macro_arg, undo_info), err } = db.macro_arg(macro_call_id);
-            let format_parse_err = |err: Arc<Box<[SyntaxError]>>| {
-                let mut buf = String::new();
-                for err in &**err {
-                    use std::fmt::Write;
-                    _ = write!(buf, "{}, ", err);
-                }
-                buf.pop();
-                buf.pop();
-                ExpandError::other(buf)
-            };
+            let (macro_arg, undo_info, span) = db.macro_arg(macro_call_id);
 
             let arg = &*macro_arg;
-            let res = match loc.def.kind {
-                MacroDefKind::Declarative(id) => {
-                    db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id)
-                }
-                MacroDefKind::BuiltIn(it, _) => {
-                    it.expand(db, macro_call_id, arg).map_err(Into::into)
-                }
-                // This might look a bit odd, but we do not expand the inputs to eager macros here.
-                // Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
-                // That kind of expansion uses the ast id map of an eager macros input though which goes through
-                // the HirFileId machinery. As eager macro inputs are assigned a macro file id that query
-                // will end up going through here again, whereas we want to just want to inspect the raw input.
-                // As such we just return the input subtree here.
-                MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
-                    return ExpandResult {
-                        value: CowArc::Arc(macro_arg.clone()),
-                        err: err.map(format_parse_err),
-                    };
-                }
-                MacroDefKind::BuiltInDerive(it, _) => {
-                    it.expand(db, macro_call_id, arg).map_err(Into::into)
-                }
-                MacroDefKind::BuiltInEager(it, _) => {
-                    it.expand(db, macro_call_id, arg).map_err(Into::into)
-                }
-                MacroDefKind::BuiltInAttr(it, _) => {
-                    let mut res = it.expand(db, macro_call_id, arg);
-                    fixup::reverse_fixups(&mut res.value, &undo_info);
-                    res
-                }
-                _ => unreachable!(),
-            };
-            ExpandResult {
-                value: res.value,
-                // if the arg had parse errors, show them instead of the expansion errors
-                err: err.map(format_parse_err).or(res.err),
-            }
+            let res =
+                match loc.def.kind {
+                    MacroDefKind::Declarative(id) => db
+                        .decl_macro_expander(loc.def.krate, id)
+                        .expand(db, arg.clone(), macro_call_id, span),
+                    MacroDefKind::BuiltIn(it, _) => {
+                        it.expand(db, macro_call_id, arg, span).map_err(Into::into)
+                    }
+                    MacroDefKind::BuiltInDerive(it, _) => {
+                        it.expand(db, macro_call_id, arg, span).map_err(Into::into)
+                    }
+                    MacroDefKind::BuiltInEager(it, _) => {
+                        // This might look a bit odd, but we do not expand the inputs to eager macros here.
+                        // Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
+                        // That kind of expansion uses the ast id map of an eager macros input though which goes through
+                        // the HirFileId machinery. As eager macro inputs are assigned a macro file id that query
+                        // will end up going through here again, whereas we want to just want to inspect the raw input.
+                        // As such we just return the input subtree here.
+                        let eager = match &loc.kind {
+                            MacroCallKind::FnLike { eager: None, .. } => {
+                                return ExpandResult::ok(CowArc::Arc(macro_arg.clone()));
+                            }
+                            MacroCallKind::FnLike { eager: Some(eager), .. } => Some(&**eager),
+                            _ => None,
+                        };
+
+                        let mut res = it.expand(db, macro_call_id, arg, span).map_err(Into::into);
+
+                        if let Some(EagerCallInfo { error, .. }) = eager {
+                            // FIXME: We should report both errors!
+                            res.err = error.clone().or(res.err);
+                        }
+                        res
+                    }
+                    MacroDefKind::BuiltInAttr(it, _) => {
+                        let mut res = it.expand(db, macro_call_id, arg, span);
+                        fixup::reverse_fixups(&mut res.value, &undo_info);
+                        res
+                    }
+                    _ => unreachable!(),
+                };
+            (ExpandResult { value: res.value, err: res.err }, span)
         }
     };
 
-    if let Some(EagerCallInfo { error, .. }) = loc.eager.as_deref() {
-        // FIXME: We should report both errors!
-        err = error.clone().or(err);
-    }
-
     // Skip checking token tree limit for include! macro call
     if !loc.def.is_include() {
         // Set a hard limit for the expanded tt
         if let Err(value) = check_tt_count(&tt) {
             return value.map(|()| {
                 CowArc::Owned(tt::Subtree {
-                    delimiter: tt::Delimiter::invisible_spanned(loc.call_site),
+                    delimiter: tt::Delimiter::invisible_spanned(span),
                     token_trees: Box::new([]),
                 })
             });
@@ -609,12 +590,23 @@
     ExpandResult { value: CowArc::Owned(tt), err }
 }
 
+fn proc_macro_span(db: &dyn ExpandDatabase, ast: AstId<ast::Fn>) -> Span {
+    let root = db.parse_or_expand(ast.file_id);
+    let ast_id_map = &db.ast_id_map(ast.file_id);
+    let span_map = &db.span_map(ast.file_id);
+
+    let node = ast_id_map.get(ast.value).to_node(&root);
+    let range = ast::HasName::name(&node)
+        .map_or_else(|| node.syntax().text_range(), |name| name.syntax().text_range());
+    span_map.span_for_range(range)
+}
+
 fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
     let loc = db.lookup_intern_macro_call(id);
-    let (macro_arg, undo_info) = db.macro_arg(id).value;
+    let (macro_arg, undo_info, span) = db.macro_arg(id);
 
-    let expander = match loc.def.kind {
-        MacroDefKind::ProcMacro(expander, ..) => expander,
+    let (expander, ast) = match loc.def.kind {
+        MacroDefKind::ProcMacro(expander, _, ast) => (expander, ast),
         _ => unreachable!(),
     };
 
@@ -623,22 +615,25 @@
         _ => None,
     };
 
-    let ExpandResult { value: mut tt, err } = expander.expand(
-        db,
-        loc.def.krate,
-        loc.krate,
-        &macro_arg,
-        attr_arg,
-        span_with_def_site_ctxt(db, loc.def.span, id),
-        span_with_call_site_ctxt(db, loc.def.span, id),
-        span_with_mixed_site_ctxt(db, loc.def.span, id),
-    );
+    let ExpandResult { value: mut tt, err } = {
+        let span = db.proc_macro_span(ast);
+        expander.expand(
+            db,
+            loc.def.krate,
+            loc.krate,
+            &macro_arg,
+            attr_arg,
+            span_with_def_site_ctxt(db, span, id),
+            span_with_call_site_ctxt(db, span, id),
+            span_with_mixed_site_ctxt(db, span, id),
+        )
+    };
 
     // Set a hard limit for the expanded tt
     if let Err(value) = check_tt_count(&tt) {
         return value.map(|()| {
             Arc::new(tt::Subtree {
-                delimiter: tt::Delimiter::invisible_spanned(loc.call_site),
+                delimiter: tt::Delimiter::invisible_spanned(span),
                 token_trees: Box::new([]),
             })
         });
diff --git a/crates/hir-expand/src/declarative.rs b/crates/hir-expand/src/declarative.rs
index 6874336..33643c0 100644
--- a/crates/hir-expand/src/declarative.rs
+++ b/crates/hir-expand/src/declarative.rs
@@ -29,6 +29,7 @@
         db: &dyn ExpandDatabase,
         tt: tt::Subtree,
         call_id: MacroCallId,
+        span: Span,
     ) -> ExpandResult<tt::Subtree> {
         let loc = db.lookup_intern_macro_call(call_id);
         let toolchain = db.toolchain(loc.def.krate);
@@ -45,7 +46,7 @@
         });
         match self.mac.err() {
             Some(_) => ExpandResult::new(
-                tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }),
+                tt::Subtree::empty(tt::DelimSpan { open: span, close: span }),
                 ExpandError::MacroDefinition,
             ),
             None => self
@@ -54,7 +55,7 @@
                     &tt,
                     |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
                     new_meta_vars,
-                    loc.call_site,
+                    span,
                 )
                 .map_err(Into::into),
         }
diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs
index 5337a5b..8b147c8 100644
--- a/crates/hir-expand/src/eager.rs
+++ b/crates/hir-expand/src/eager.rs
@@ -19,7 +19,7 @@
 //!
 //! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
 use base_db::CrateId;
-use span::Span;
+use span::SyntaxContextId;
 use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
 use triomphe::Arc;
 
@@ -27,22 +27,20 @@
     ast::{self, AstNode},
     db::ExpandDatabase,
     mod_path::ModPath,
-    EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, Intern,
+    AstId, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, Intern,
     MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
 };
 
 pub fn expand_eager_macro_input(
     db: &dyn ExpandDatabase,
     krate: CrateId,
-    macro_call: InFile<ast::MacroCall>,
+    macro_call: &ast::MacroCall,
+    ast_id: AstId<ast::MacroCall>,
     def: MacroDefId,
-    call_site: Span,
+    call_site: SyntaxContextId,
     resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
 ) -> ExpandResult<Option<MacroCallId>> {
-    let ast_map = db.ast_id_map(macro_call.file_id);
-    // the expansion which the ast id map is built upon has no whitespace, so the offsets are wrong as macro_call is from the token tree that has whitespace!
-    let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(&macro_call.value));
-    let expand_to = ExpandTo::from_call_site(&macro_call.value);
+    let expand_to = ExpandTo::from_call_site(macro_call);
 
     // Note:
     // When `lazy_expand` is called, its *parent* file must already exist.
@@ -51,11 +49,11 @@
     let arg_id = MacroCallLoc {
         def,
         krate,
-        eager: None,
-        kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
-        call_site,
+        kind: MacroCallKind::FnLike { ast_id, expand_to: ExpandTo::Expr, eager: None },
+        ctxt: call_site,
     }
     .intern(db);
+    let (_, _, span) = db.macro_arg(arg_id);
     let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
         db.parse_macro_expansion(arg_id.as_macro_file());
 
@@ -82,16 +80,24 @@
         return ExpandResult { value: None, err };
     };
 
-    let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map, call_site);
+    let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map, span);
 
     subtree.delimiter.kind = crate::tt::DelimiterKind::Invisible;
 
     let loc = MacroCallLoc {
         def,
         krate,
-        eager: Some(Arc::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })),
-        kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
-        call_site,
+        kind: MacroCallKind::FnLike {
+            ast_id,
+            expand_to,
+            eager: Some(Arc::new(EagerCallInfo {
+                arg: Arc::new(subtree),
+                arg_id,
+                error: err.clone(),
+                span,
+            })),
+        },
+        ctxt: call_site,
     };
 
     ExpandResult { value: Some(loc.intern(db)), err }
@@ -100,15 +106,18 @@
 fn lazy_expand(
     db: &dyn ExpandDatabase,
     def: &MacroDefId,
-    macro_call: InFile<ast::MacroCall>,
+    macro_call: &ast::MacroCall,
+    ast_id: AstId<ast::MacroCall>,
     krate: CrateId,
-    call_site: Span,
+    call_site: SyntaxContextId,
 ) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> {
-    let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value);
-
-    let expand_to = ExpandTo::from_call_site(&macro_call.value);
-    let ast_id = macro_call.with_value(ast_id);
-    let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to }, call_site);
+    let expand_to = ExpandTo::from_call_site(macro_call);
+    let id = def.make_call(
+        db,
+        krate,
+        MacroCallKind::FnLike { ast_id, expand_to, eager: None },
+        call_site,
+    );
     let macro_file = id.as_macro_file();
 
     db.parse_macro_expansion(macro_file)
@@ -122,7 +131,7 @@
     mut offset: TextSize,
     curr: InFile<SyntaxNode>,
     krate: CrateId,
-    call_site: Span,
+    call_site: SyntaxContextId,
     macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
 ) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
     let original = curr.value.clone_for_update();
@@ -172,12 +181,14 @@
                 continue;
             }
         };
+        let ast_id = db.ast_id_map(curr.file_id).ast_id(&call);
         let ExpandResult { value, err } = match def.kind {
             MacroDefKind::BuiltInEager(..) => {
                 let ExpandResult { value, err } = expand_eager_macro_input(
                     db,
                     krate,
-                    curr.with_value(call.clone()),
+                    &call,
+                    curr.with_value(ast_id),
                     def,
                     call_site,
                     macro_resolver,
@@ -207,7 +218,7 @@
             | MacroDefKind::BuiltInDerive(..)
             | MacroDefKind::ProcMacro(..) => {
                 let ExpandResult { value: (parse, tm), err } =
-                    lazy_expand(db, &def, curr.with_value(call.clone()), krate, call_site);
+                    lazy_expand(db, &def, &call, curr.with_value(ast_id), krate, call_site);
 
                 // replace macro inside
                 let ExpandResult { value, err: error } = eager_macro_recur(
diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs
index b44feb5..959595a 100644
--- a/crates/hir-expand/src/fixup.rs
+++ b/crates/hir-expand/src/fixup.rs
@@ -3,7 +3,7 @@
 
 use rustc_hash::{FxHashMap, FxHashSet};
 use smallvec::SmallVec;
-use span::{ErasedFileAstId, Span, SpanAnchor, SpanData, FIXUP_ERASED_FILE_AST_ID_MARKER};
+use span::{ErasedFileAstId, Span, SpanAnchor, FIXUP_ERASED_FILE_AST_ID_MARKER};
 use stdx::never;
 use syntax::{
     ast::{self, AstNode, HasLoopBody},
@@ -57,7 +57,7 @@
     let dummy_range = FIXUP_DUMMY_RANGE;
     let fake_span = |range| {
         let span = span_map.span_for_range(range);
-        SpanData {
+        Span {
             range: dummy_range,
             anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor },
             ctx: span.ctx,
@@ -76,7 +76,7 @@
             let span = span_map.span_for_range(node_range);
             let replacement = Leaf::Ident(Ident {
                 text: "__ra_fixup".into(),
-                span: SpanData {
+                span: Span {
                     range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END),
                     anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor },
                     ctx: span.ctx,
@@ -305,8 +305,8 @@
         tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
             || tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
     ) {
-        tt.delimiter.close = SpanData::DUMMY;
-        tt.delimiter.open = SpanData::DUMMY;
+        tt.delimiter.close = Span::DUMMY;
+        tt.delimiter.open = Span::DUMMY;
     }
     reverse_fixups_(tt, undo_info);
 }
diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs
index ac2bab2..097e760 100644
--- a/crates/hir-expand/src/hygiene.rs
+++ b/crates/hir-expand/src/hygiene.rs
@@ -65,7 +65,7 @@
         return apply_mark_internal(db, ctxt, call_id, transparency);
     }
 
-    let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site.ctx;
+    let call_site_ctxt = db.lookup_intern_macro_call(call_id).ctxt;
     let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
         call_site_ctxt.normalize_to_macros_2_0(db)
     } else {
@@ -205,11 +205,10 @@
         let id = e.key;
         let expn_data = e.value.as_ref().unwrap();
         s.push_str(&format!(
-            "\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
+            "\n{:?}: parent: {:?}, call_site_ctxt: {:?}, kind: {:?}",
             id,
             expn_data.kind.file_id(),
-            expn_data.call_site,
-            SyntaxContextId::ROOT, // FIXME expn_data.def_site,
+            expn_data.ctxt,
             expn_data.kind.descr(),
         ));
     }
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index 924f0da..5d4f7dc 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -170,13 +170,8 @@
 pub struct MacroCallLoc {
     pub def: MacroDefId,
     pub krate: CrateId,
-    /// Some if this is a macro call for an eager macro. Note that this is `None`
-    /// for the eager input macro file.
-    // FIXME: This is being interned, subtrees can vary quickly differ just slightly causing
-    // leakage problems here
-    eager: Option<Arc<EagerCallInfo>>,
     pub kind: MacroCallKind,
-    pub call_site: Span,
+    pub ctxt: SyntaxContextId,
 }
 impl_intern_value_trivial!(MacroCallLoc);
 
@@ -187,7 +182,6 @@
     pub kind: MacroDefKind,
     pub local_inner: bool,
     pub allow_internal_unsafe: bool,
-    pub span: Span,
 }
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -207,6 +201,8 @@
     /// Call id of the eager macro's input file (this is the macro file for its fully expanded input).
     arg_id: MacroCallId,
     error: Option<ExpandError>,
+    /// TODO: Doc
+    span: Span,
 }
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -214,6 +210,11 @@
     FnLike {
         ast_id: AstId<ast::MacroCall>,
         expand_to: ExpandTo,
+        /// Some if this is a macro call for an eager macro. Note that this is `None`
+        /// for the eager input macro file.
+        // FIXME: This is being interned, subtrees can vary quickly differ just slightly causing
+        // leakage problems here
+        eager: Option<Arc<EagerCallInfo>>,
     },
     Derive {
         ast_id: AstId<ast::Adt>,
@@ -275,7 +276,7 @@
                 HirFileIdRepr::MacroFile(file) => {
                     let loc = db.lookup_intern_macro_call(file.macro_call_id);
                     if loc.def.is_include() {
-                        if let Some(eager) = &loc.eager {
+                        if let MacroCallKind::FnLike { eager: Some(eager), .. } = &loc.kind {
                             if let Ok(it) = builtin_fn_macro::include_input_to_file_id(
                                 db,
                                 file.macro_call_id,
@@ -322,6 +323,9 @@
 }
 
 pub trait MacroFileIdExt {
+    fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool;
+    fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool;
+    fn eager_arg(&self, db: &dyn ExpandDatabase) -> Option<MacroCallId>;
     fn expansion_level(self, db: &dyn ExpandDatabase) -> u32;
     /// If this is a macro call, returns the syntax node of the call.
     fn call_node(self, db: &dyn ExpandDatabase) -> InFile<SyntaxNode>;
@@ -388,31 +392,47 @@
         db.lookup_intern_macro_call(self.macro_call_id).def.is_include()
     }
 
+    fn is_include_like_macro(&self, db: &dyn ExpandDatabase) -> bool {
+        db.lookup_intern_macro_call(self.macro_call_id).def.is_include_like()
+    }
+
+    fn is_env_or_option_env(&self, db: &dyn ExpandDatabase) -> bool {
+        db.lookup_intern_macro_call(self.macro_call_id).def.is_env_or_option_env()
+    }
+
     fn is_eager(&self, db: &dyn ExpandDatabase) -> bool {
-        let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
+        let loc = db.lookup_intern_macro_call(self.macro_call_id);
         matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
     }
 
+    fn eager_arg(&self, db: &dyn ExpandDatabase) -> Option<MacroCallId> {
+        let loc = db.lookup_intern_macro_call(self.macro_call_id);
+        match &loc.kind {
+            MacroCallKind::FnLike { eager, .. } => eager.as_ref().map(|it| it.arg_id),
+            _ => None,
+        }
+    }
+
     fn is_attr_macro(&self, db: &dyn ExpandDatabase) -> bool {
-        let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
+        let loc = db.lookup_intern_macro_call(self.macro_call_id);
         matches!(loc.kind, MacroCallKind::Attr { .. })
     }
 
     fn is_derive_attr_pseudo_expansion(&self, db: &dyn ExpandDatabase) -> bool {
-        let loc: MacroCallLoc = db.lookup_intern_macro_call(self.macro_call_id);
+        let loc = db.lookup_intern_macro_call(self.macro_call_id);
         loc.def.is_attribute_derive()
     }
 }
 
 impl MacroDefId {
-    pub fn as_lazy_macro(
+    pub fn make_call(
         self,
         db: &dyn ExpandDatabase,
         krate: CrateId,
         kind: MacroCallKind,
-        call_site: Span,
+        ctxt: SyntaxContextId,
     ) -> MacroCallId {
-        MacroCallLoc { def: self, krate, eager: None, kind, call_site }.intern(db)
+        MacroCallLoc { def: self, krate, kind, ctxt }.intern(db)
     }
 
     pub fn definition_range(&self, db: &dyn ExpandDatabase) -> InFile<TextRange> {
@@ -477,6 +497,14 @@
     pub fn is_include(&self) -> bool {
         matches!(self.kind, MacroDefKind::BuiltInEager(expander, ..) if expander.is_include())
     }
+
+    pub fn is_include_like(&self) -> bool {
+        matches!(self.kind, MacroDefKind::BuiltInEager(expander, ..) if expander.is_include_like())
+    }
+
+    pub fn is_env_or_option_env(&self) -> bool {
+        matches!(self.kind, MacroDefKind::BuiltInEager(expander, ..) if expander.is_env_or_option_env())
+    }
 }
 
 impl MacroCallLoc {
@@ -534,7 +562,7 @@
         macro_call_id: MacroCallId,
     ) -> Option<FileId> {
         if self.def.is_include() {
-            if let Some(eager) = &self.eager {
+            if let MacroCallKind::FnLike { eager: Some(eager), .. } = &self.kind {
                 if let Ok(it) =
                     builtin_fn_macro::include_input_to_file_id(db, macro_call_id, &eager.arg)
                 {
@@ -658,7 +686,7 @@
 /// ExpansionInfo mainly describes how to map text range between src and expanded macro
 // FIXME: can be expensive to create, we should check the use sites and maybe replace them with
 // simpler function calls if the map is only used once
-#[derive(Debug, Clone, PartialEq, Eq)]
+#[derive(Clone, Debug, PartialEq, Eq)]
 pub struct ExpansionInfo {
     pub expanded: InMacroFile<SyntaxNode>,
     /// The argument TokenTree or item for attributes
@@ -688,6 +716,22 @@
     /// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
     ///
     /// Note this does a linear search through the entire backing vector of the spanmap.
+    pub fn map_range_down_exact(
+        &self,
+        span: Span,
+    ) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + '_>> {
+        let tokens = self
+            .exp_map
+            .ranges_with_span_exact(span)
+            .flat_map(move |range| self.expanded.value.covering_element(range).into_token());
+
+        Some(InMacroFile::new(self.expanded.file_id, tokens))
+    }
+
+    /// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
+    /// Unlike [`map_range_down_exact`], this will consider spans that contain the given span.
+    ///
+    /// Note this does a linear search through the entire backing vector of the spanmap.
     pub fn map_range_down(
         &self,
         span: Span,
@@ -744,7 +788,7 @@
                 InFile::new(
                     self.arg.file_id,
                     arg_map
-                        .ranges_with_span(span)
+                        .ranges_with_span_exact(span)
                         .filter(|range| range.intersect(arg_range).is_some())
                         .collect(),
                 )
@@ -762,7 +806,7 @@
         let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
         let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
 
-        let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value;
+        let (macro_arg, _, _) = db.macro_arg(macro_file.macro_call_id);
 
         let def = loc.def.ast_id().left().and_then(|id| {
             let def_tt = match id.to_node(db) {
diff --git a/crates/hir-expand/src/quote.rs b/crates/hir-expand/src/quote.rs
index c1930c9..a31a111 100644
--- a/crates/hir-expand/src/quote.rs
+++ b/crates/hir-expand/src/quote.rs
@@ -266,10 +266,11 @@
 
         let quoted = quote!(DUMMY =>#a);
         assert_eq!(quoted.to_string(), "hello");
-        let t = format!("{quoted:?}");
+        let t = format!("{quoted:#?}");
         expect![[r#"
-            SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) }
-              IDENT   hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(937550), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t);
+            SUBTREE $$ 937550:0@0..0#0 937550:0@0..0#0
+              IDENT   hello 937550:0@0..0#0"#]]
+        .assert_eq(&t);
     }
 
     #[test]
diff --git a/crates/hir-expand/src/span_map.rs b/crates/hir-expand/src/span_map.rs
index 29fec16..eae2c8f 100644
--- a/crates/hir-expand/src/span_map.rs
+++ b/crates/hir-expand/src/span_map.rs
@@ -1,11 +1,13 @@
 //! Span maps for real files and macro expansions.
+
 use span::{FileId, HirFileId, HirFileIdRepr, MacroFileId, Span, SyntaxContextId};
-use syntax::{AstNode, TextRange};
+use stdx::TupleExt;
+use syntax::{ast, AstNode, TextRange};
 use triomphe::Arc;
 
 pub use span::RealSpanMap;
 
-use crate::db::ExpandDatabase;
+use crate::{attrs::collect_attrs, db::ExpandDatabase};
 
 pub type ExpansionSpanMap = span::SpanMap<SyntaxContextId>;
 
@@ -82,13 +84,54 @@
     let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
     let ast_id_map = db.ast_id_map(file_id.into());
     let tree = db.parse(file_id).tree();
-    // FIXME: Descend into modules and other item containing items that are not annotated with attributes
-    // and allocate pairs for those as well. This gives us finer grained span anchors resulting in
-    // better incrementality
-    pairs.extend(
-        tree.items()
-            .map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())),
-    );
+    // This is an incrementality layer. Basically we can't use absolute ranges for our spans as that
+    // would mean we'd invalidate everything whenever we type. So instead we make the text ranges
+    // relative to some AstIds reducing the risk of invalidation as typing somewhere no longer
+    // affects all following spans in the file.
+    // There is some stuff to bear in mind here though, for one, the more "anchors" we create, the
+    // easier it gets to invalidate things again as spans are as stable as their anchor's ID.
+    // The other problem is proc-macros. Proc-macros have a `Span::join` api that allows them
+    // to join two spans that come from the same file. rust-analyzer's proc-macro server
+    // can only join two spans if they belong to the same anchor though, as the spans are relative
+    // to that anchor. To do cross anchor joining we'd need to access to the ast id map to resolve
+    // them again, something we might get access to in the future. But even then, proc-macros doing
+    // this kind of joining makes them as stable as the AstIdMap (which is basically changing on
+    // every input of the file)…
+
+    let item_to_entry =
+        |item: ast::Item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase());
+    // Top level items make for great anchors as they are the most stable and a decent boundary
+    pairs.extend(tree.items().map(item_to_entry));
+    // Unfortunately, assoc items are very common in Rust, so descend into those as well and make
+    // them anchors too, but only if they have no attributes attached, as those might be proc-macros
+    // and using different anchors inside of them will prevent spans from being joinable.
+    tree.items().for_each(|item| match &item {
+        ast::Item::ExternBlock(it)
+            if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) =>
+        {
+            if let Some(extern_item_list) = it.extern_item_list() {
+                pairs.extend(
+                    extern_item_list.extern_items().map(ast::Item::from).map(item_to_entry),
+                );
+            }
+        }
+        ast::Item::Impl(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
+            if let Some(assoc_item_list) = it.assoc_item_list() {
+                pairs.extend(assoc_item_list.assoc_items().map(ast::Item::from).map(item_to_entry));
+            }
+        }
+        ast::Item::Module(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
+            if let Some(item_list) = it.item_list() {
+                pairs.extend(item_list.items().map(item_to_entry));
+            }
+        }
+        ast::Item::Trait(it) if !collect_attrs(it).map(TupleExt::tail).any(|it| it.is_left()) => {
+            if let Some(assoc_item_list) = it.assoc_item_list() {
+                pairs.extend(assoc_item_list.assoc_items().map(ast::Item::from).map(item_to_entry));
+            }
+        }
+        _ => (),
+    });
 
     Arc::new(RealSpanMap::from_file(
         file_id,
diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs
index a679a11..0885d92 100644
--- a/crates/hir-ty/src/method_resolution.rs
+++ b/crates/hir-ty/src/method_resolution.rs
@@ -966,7 +966,7 @@
             // the methods by autoderef order of *receiver types*, not *self
             // types*.
 
-            let mut table = InferenceTable::new(db, env.clone());
+            let mut table = InferenceTable::new(db, env);
             let ty = table.instantiate_canonical(ty.clone());
             let deref_chain = autoderef_method_receiver(&mut table, ty);
 
@@ -1044,7 +1044,7 @@
     let ref_muted = Canonical {
         value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value.clone())
             .intern(Interner),
-        binders: receiver_ty.binders.clone(),
+        binders: receiver_ty.binders,
     };
 
     iterate_method_candidates_by_receiver(ref_muted, first_adjustment.with_autoref(Mutability::Mut))
@@ -1060,7 +1060,7 @@
     name: Option<&Name>,
     mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
 ) -> ControlFlow<()> {
-    let receiver_ty = table.instantiate_canonical(receiver_ty.clone());
+    let receiver_ty = table.instantiate_canonical(receiver_ty);
     // We're looking for methods with *receiver* type receiver_ty. These could
     // be found in any of the derefs of receiver_ty, so we have to go through
     // that, including raw derefs.
@@ -1456,7 +1456,7 @@
                 if let Some(receiver_ty) = receiver_ty {
                     check_that!(data.has_self_param());
 
-                    let fn_subst = TyBuilder::subst_for_def(db, fn_id, Some(impl_subst.clone()))
+                    let fn_subst = TyBuilder::subst_for_def(db, fn_id, Some(impl_subst))
                         .fill_with_inference_vars(table)
                         .build();
 
diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs
index d699067..2a46bec 100644
--- a/crates/hir-ty/src/tests.rs
+++ b/crates/hir-ty/src/tests.rs
@@ -298,7 +298,7 @@
             if let Some(syntax_ptr) = body_source_map.self_param_syntax() {
                 let root = db.parse_or_expand(syntax_ptr.file_id);
                 let node = syntax_ptr.map(|ptr| ptr.to_node(&root).syntax().clone());
-                types.push((node.clone(), ty));
+                types.push((node, ty));
             }
         }
 
diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs
index cdc0db8..c5d44c1 100644
--- a/crates/hir/src/display.rs
+++ b/crates/hir/src/display.rs
@@ -159,6 +159,7 @@
 impl HirDisplay for Struct {
     fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
         let module_id = self.module(f.db).id;
+        // FIXME: Render repr if its set explicitly?
         write_visibility(module_id, self.visibility(f.db), f)?;
         f.write_str("struct ")?;
         write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
@@ -166,37 +167,40 @@
         write_generic_params(def_id, f)?;
 
         let variant_data = self.variant_data(f.db);
-        if let StructKind::Tuple = variant_data.kind() {
-            f.write_char('(')?;
-            let mut it = variant_data.fields().iter().peekable();
+        match variant_data.kind() {
+            StructKind::Tuple => {
+                f.write_char('(')?;
+                let mut it = variant_data.fields().iter().peekable();
 
-            while let Some((id, _)) = it.next() {
-                let field = Field { parent: (*self).into(), id };
-                write_visibility(module_id, field.visibility(f.db), f)?;
-                field.ty(f.db).hir_fmt(f)?;
-                if it.peek().is_some() {
-                    f.write_str(", ")?;
+                while let Some((id, _)) = it.next() {
+                    let field = Field { parent: (*self).into(), id };
+                    write_visibility(module_id, field.visibility(f.db), f)?;
+                    field.ty(f.db).hir_fmt(f)?;
+                    if it.peek().is_some() {
+                        f.write_str(", ")?;
+                    }
+                }
+
+                f.write_char(')')?;
+                write_where_clause(def_id, f)?;
+            }
+            StructKind::Record => {
+                let has_where_clause = write_where_clause(def_id, f)?;
+                let fields = self.fields(f.db);
+                f.write_char(if !has_where_clause { ' ' } else { '\n' })?;
+                if fields.is_empty() {
+                    f.write_str("{}")?;
+                } else {
+                    f.write_str("{\n")?;
+                    for field in self.fields(f.db) {
+                        f.write_str("    ")?;
+                        field.hir_fmt(f)?;
+                        f.write_str(",\n")?;
+                    }
+                    f.write_str("}")?;
                 }
             }
-
-            f.write_str(");")?;
-        }
-
-        write_where_clause(def_id, f)?;
-
-        if let StructKind::Record = variant_data.kind() {
-            let fields = self.fields(f.db);
-            if fields.is_empty() {
-                f.write_str(" {}")?;
-            } else {
-                f.write_str(" {\n")?;
-                for field in self.fields(f.db) {
-                    f.write_str("    ")?;
-                    field.hir_fmt(f)?;
-                    f.write_str(",\n")?;
-                }
-                f.write_str("}")?;
-            }
+            StructKind::Unit => _ = write_where_clause(def_id, f)?,
         }
 
         Ok(())
@@ -210,11 +214,12 @@
         write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
         let def_id = GenericDefId::AdtId(AdtId::EnumId(self.id));
         write_generic_params(def_id, f)?;
-        write_where_clause(def_id, f)?;
+        let has_where_clause = write_where_clause(def_id, f)?;
 
         let variants = self.variants(f.db);
         if !variants.is_empty() {
-            f.write_str(" {\n")?;
+            f.write_char(if !has_where_clause { ' ' } else { '\n' })?;
+            f.write_str("{\n")?;
             for variant in variants {
                 f.write_str("    ")?;
                 variant.hir_fmt(f)?;
@@ -234,11 +239,12 @@
         write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
         let def_id = GenericDefId::AdtId(AdtId::UnionId(self.id));
         write_generic_params(def_id, f)?;
-        write_where_clause(def_id, f)?;
+        let has_where_clause = write_where_clause(def_id, f)?;
 
         let fields = self.fields(f.db);
         if !fields.is_empty() {
-            f.write_str(" {\n")?;
+            f.write_char(if !has_where_clause { ' ' } else { '\n' })?;
+            f.write_str("{\n")?;
             for field in self.fields(f.db) {
                 f.write_str("    ")?;
                 field.hir_fmt(f)?;
@@ -446,7 +452,10 @@
     Ok(())
 }
 
-fn write_where_clause(def: GenericDefId, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+fn write_where_clause(
+    def: GenericDefId,
+    f: &mut HirFormatter<'_>,
+) -> Result<bool, HirDisplayError> {
     let params = f.db.generic_params(def);
 
     // unnamed type targets are displayed inline with the argument itself, e.g. `f: impl Y`.
@@ -465,7 +474,7 @@
         });
 
     if !has_displayable_predicate {
-        return Ok(());
+        return Ok(false);
     }
 
     let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target {
@@ -543,7 +552,7 @@
     // End of final predicate. There must be at least one predicate here.
     f.write_char(',')?;
 
-    Ok(())
+    Ok(true)
 }
 
 impl HirDisplay for Const {
@@ -594,19 +603,20 @@
         write!(f, "trait {}", data.name.display(f.db.upcast()))?;
         let def_id = GenericDefId::TraitId(self.id);
         write_generic_params(def_id, f)?;
-        write_where_clause(def_id, f)?;
+        let has_where_clause = write_where_clause(def_id, f)?;
 
         if let Some(limit) = f.entity_limit {
             let assoc_items = self.items(f.db);
             let count = assoc_items.len().min(limit);
+            f.write_char(if !has_where_clause { ' ' } else { '\n' })?;
             if count == 0 {
                 if assoc_items.is_empty() {
-                    f.write_str(" {}")?;
+                    f.write_str("{}")?;
                 } else {
-                    f.write_str(" { /* … */ }")?;
+                    f.write_str("{ /* … */ }")?;
                 }
             } else {
-                f.write_str(" {\n")?;
+                f.write_str("{\n")?;
                 for item in &assoc_items[..count] {
                     f.write_str("    ")?;
                     match item {
@@ -651,7 +661,6 @@
         write!(f, "type {}", data.name.display(f.db.upcast()))?;
         let def_id = GenericDefId::TypeAliasId(self.id);
         write_generic_params(def_id, f)?;
-        write_where_clause(def_id, f)?;
         if !data.bounds.is_empty() {
             f.write_str(": ")?;
             f.write_joined(data.bounds.iter(), " + ")?;
@@ -660,6 +669,7 @@
             f.write_str(" = ")?;
             ty.hir_fmt(f)?;
         }
+        write_where_clause(def_id, f)?;
         Ok(())
     }
 }
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index b16ec55..4f9697f 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -1099,7 +1099,7 @@
             VariantDef::Union(it) => it.id.into(),
             VariantDef::Variant(it) => it.parent_enum(db).id.into(),
         };
-        let mut generics = generics.map(|it| it.ty.clone());
+        let mut generics = generics.map(|it| it.ty);
         let substs = TyBuilder::subst_for_def(db, def_id, None)
             .fill(|x| match x {
                 ParamKind::Type => {
@@ -1440,7 +1440,7 @@
     /// the greatest API, FIXME find a better one.
     pub fn ty_with_args(self, db: &dyn HirDatabase, args: impl Iterator<Item = Type>) -> Type {
         let id = AdtId::from(self);
-        let mut it = args.map(|t| t.ty.clone());
+        let mut it = args.map(|t| t.ty);
         let ty = TyBuilder::def_ty(db, id.into(), None)
             .fill(|x| {
                 let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
@@ -1859,7 +1859,7 @@
             ItemContainerId::TraitId(it) => Some(it.into()),
             ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
         };
-        let mut generics = generics.map(|it| it.ty.clone());
+        let mut generics = generics.map(|it| it.ty);
         let mut filler = |x: &_| match x {
             ParamKind::Type => {
                 generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner)
@@ -1954,7 +1954,7 @@
             ItemContainerId::TraitId(it) => Some(it.into()),
             ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
         };
-        let mut generics = generics.map(|it| it.ty.clone());
+        let mut generics = generics.map(|it| it.ty);
         let parent_substs = parent_id.map(|id| {
             TyBuilder::subst_for_def(db, id, None)
                 .fill(|x| match x {
@@ -2215,7 +2215,7 @@
             }
         };
 
-        let mut generics = generics.map(|it| it.ty.clone());
+        let mut generics = generics.map(|it| it.ty);
         let mut filler = |x: &_| match x {
             ParamKind::Type => {
                 generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner)
@@ -2617,6 +2617,15 @@
         }
     }
 
+    pub fn is_env_or_option_env(&self, db: &dyn HirDatabase) -> bool {
+        match self.id {
+            MacroId::Macro2Id(it) => {
+                matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInEager(eager) if eager.is_env_or_option_env())
+            }
+            MacroId::MacroRulesId(_) | MacroId::ProcMacroId(_) => false,
+        }
+    }
+
     pub fn is_attr(&self, db: &dyn HirDatabase) -> bool {
         matches!(self.kind(db), MacroKind::Attr)
     }
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index ca47b37..9796009 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -681,28 +681,29 @@
             .filter(|&(_, include_file_id)| include_file_id == file_id)
         {
             let macro_file = invoc.as_macro_file();
-            let expansion_info = cache
-                .entry(macro_file)
-                .or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
+            let expansion_info = cache.entry(macro_file).or_insert_with(|| {
+                let exp_info = macro_file.expansion_info(self.db.upcast());
+
+                let InMacroFile { file_id, value } = exp_info.expanded();
+                self.cache(value, file_id.into());
+
+                exp_info
+            });
 
             // Create the source analyzer for the macro call scope
             let Some(sa) = self.analyze_no_infer(&self.parse_or_expand(expansion_info.call_file()))
             else {
                 continue;
             };
-            {
-                let InMacroFile { file_id: macro_file, value } = expansion_info.expanded();
-                self.cache(value, macro_file.into());
-            }
 
             // get mapped token in the include! macro file
-            let span = span::SpanData {
+            let span = span::Span {
                 range: token.text_range(),
                 anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
                 ctx: SyntaxContextId::ROOT,
             };
             let Some(InMacroFile { file_id, value: mut mapped_tokens }) =
-                expansion_info.map_range_down(span)
+                expansion_info.map_range_down_exact(span)
             else {
                 continue;
             };
@@ -753,22 +754,20 @@
         let def_map = sa.resolver.def_map();
 
         let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
-
         let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
-            let expansion_info = cache
-                .entry(macro_file)
-                .or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
+            let exp_info = cache.entry(macro_file).or_insert_with(|| {
+                let exp_info = macro_file.expansion_info(self.db.upcast());
 
-            {
-                let InMacroFile { file_id, value } = expansion_info.expanded();
+                let InMacroFile { file_id, value } = exp_info.expanded();
                 self.cache(value, file_id.into());
-            }
 
-            let InMacroFile { file_id, value: mapped_tokens } =
-                expansion_info.map_range_down(span)?;
+                exp_info
+            });
+
+            let InMacroFile { file_id, value: mapped_tokens } = exp_info.map_range_down(span)?;
             let mapped_tokens: SmallVec<[_; 2]> = mapped_tokens.collect();
 
-            // if the length changed we have found a mapping for the token
+            // we have found a mapping for the token if the vec is non-empty
             let res = mapped_tokens.is_empty().not().then_some(());
             // requeue the tokens we got from mapping our current token down
             stack.push((HirFileId::from(file_id), mapped_tokens));
@@ -851,7 +850,13 @@
                         // remove any other token in this macro input, all their mappings are the
                         // same as this one
                         tokens.retain(|t| !text_range.contains_range(t.text_range()));
-                        process_expansion_for_token(&mut stack, file_id)
+
+                        process_expansion_for_token(&mut stack, file_id).or(file_id
+                            .eager_arg(self.db.upcast())
+                            .and_then(|arg| {
+                                // also descend into eager expansions
+                                process_expansion_for_token(&mut stack, arg.as_macro_file())
+                            }))
                     } else if let Some(meta) = ast::Meta::cast(parent) {
                         // attribute we failed expansion for earlier, this might be a derive invocation
                         // or derive helper attribute
diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs
index 102e0ca..0ccca21 100644
--- a/crates/hir/src/term_search/tactics.rs
+++ b/crates/hir/src/term_search/tactics.rs
@@ -177,7 +177,7 @@
                 // Note that we need special case for 0 param constructors because of multi cartesian
                 // product
                 let variant_exprs: Vec<Expr> = if param_exprs.is_empty() {
-                    vec![Expr::Variant { variant, generics: generics.clone(), params: Vec::new() }]
+                    vec![Expr::Variant { variant, generics, params: Vec::new() }]
                 } else {
                     param_exprs
                         .into_iter()
diff --git a/crates/ide-assists/src/handlers/extract_module.rs b/crates/ide-assists/src/handlers/extract_module.rs
index af834c8..42f9356 100644
--- a/crates/ide-assists/src/handlers/extract_module.rs
+++ b/crates/ide-assists/src/handlers/extract_module.rs
@@ -1,5 +1,6 @@
 use std::iter;
 
+use either::Either;
 use hir::{HasSource, HirFileIdExt, ModuleSource};
 use ide_db::{
     assists::{AssistId, AssistKind},
@@ -10,17 +11,16 @@
 };
 use itertools::Itertools;
 use smallvec::SmallVec;
-use stdx::format_to;
 use syntax::{
     algo::find_node_at_range,
     ast::{
         self,
         edit::{AstNodeEdit, IndentLevel},
-        make, HasName, HasVisibility,
+        make, HasVisibility,
     },
-    match_ast, ted, AstNode, SourceFile,
+    match_ast, ted, AstNode,
     SyntaxKind::{self, WHITESPACE},
-    SyntaxNode, TextRange,
+    SyntaxNode, TextRange, TextSize,
 };
 
 use crate::{AssistContext, Assists};
@@ -109,76 +109,35 @@
 
             //We are getting item usages and record_fields together, record_fields
             //for change_visibility and usages for first point mentioned above in the process
-            let (usages_to_be_processed, record_fields) = module.get_usages_and_record_fields(ctx);
+
+            let (usages_to_be_processed, record_fields, use_stmts_to_be_inserted) =
+                module.get_usages_and_record_fields(ctx);
+
+            builder.edit_file(ctx.file_id());
+            use_stmts_to_be_inserted.into_iter().for_each(|(_, use_stmt)| {
+                builder.insert(ctx.selection_trimmed().end(), format!("\n{use_stmt}"));
+            });
 
             let import_paths_to_be_removed = module.resolve_imports(curr_parent_module, ctx);
             module.change_visibility(record_fields);
 
-            let mut body_items: Vec<String> = Vec::new();
-            let mut items_to_be_processed: Vec<ast::Item> = module.body_items.clone();
+            let module_def = generate_module_def(&impl_parent, &mut module, old_item_indent);
 
-            let new_item_indent = if impl_parent.is_some() {
-                old_item_indent + 2
-            } else {
-                items_to_be_processed = [module.use_items.clone(), items_to_be_processed].concat();
-                old_item_indent + 1
-            };
-
-            for item in items_to_be_processed {
-                let item = item.indent(IndentLevel(1));
-                let mut indented_item = String::new();
-                format_to!(indented_item, "{new_item_indent}{item}");
-                body_items.push(indented_item);
-            }
-
-            let mut body = body_items.join("\n\n");
-
-            if let Some(impl_) = &impl_parent {
-                let mut impl_body_def = String::new();
-
-                if let Some(self_ty) = impl_.self_ty() {
-                    {
-                        let impl_indent = old_item_indent + 1;
-                        format_to!(
-                            impl_body_def,
-                            "{impl_indent}impl {self_ty} {{\n{body}\n{impl_indent}}}",
-                        );
-                    }
-                    body = impl_body_def;
-
-                    // Add the import for enum/struct corresponding to given impl block
-                    module.make_use_stmt_of_node_with_super(self_ty.syntax());
-                    for item in module.use_items {
-                        let item_indent = old_item_indent + 1;
-                        body = format!("{item_indent}{item}\n\n{body}");
-                    }
-                }
-            }
-
-            let mut module_def = String::new();
-
-            let module_name = module.name;
-            format_to!(module_def, "mod {module_name} {{\n{body}\n{old_item_indent}}}");
-
-            let mut usages_to_be_updated_for_curr_file = vec![];
-            for usages_to_be_updated_for_file in usages_to_be_processed {
-                if usages_to_be_updated_for_file.0 == ctx.file_id() {
-                    usages_to_be_updated_for_curr_file = usages_to_be_updated_for_file.1;
+            let mut usages_to_be_processed_for_cur_file = vec![];
+            for (file_id, usages) in usages_to_be_processed {
+                if file_id == ctx.file_id() {
+                    usages_to_be_processed_for_cur_file = usages;
                     continue;
                 }
-                builder.edit_file(usages_to_be_updated_for_file.0);
-                for usage_to_be_processed in usages_to_be_updated_for_file.1 {
-                    builder.replace(usage_to_be_processed.0, usage_to_be_processed.1)
+                builder.edit_file(file_id);
+                for (text_range, usage) in usages {
+                    builder.replace(text_range, usage)
                 }
             }
 
             builder.edit_file(ctx.file_id());
-            for usage_to_be_processed in usages_to_be_updated_for_curr_file {
-                builder.replace(usage_to_be_processed.0, usage_to_be_processed.1)
-            }
-
-            for import_path_text_range in import_paths_to_be_removed {
-                builder.delete(import_path_text_range);
+            for (text_range, usage) in usages_to_be_processed_for_cur_file {
+                builder.replace(text_range, usage);
             }
 
             if let Some(impl_) = impl_parent {
@@ -199,12 +158,51 @@
 
                 builder.insert(impl_.syntax().text_range().end(), format!("\n\n{module_def}"));
             } else {
+                for import_path_text_range in import_paths_to_be_removed {
+                    if module.text_range.intersect(import_path_text_range).is_some() {
+                        module.text_range = module.text_range.cover(import_path_text_range);
+                    } else {
+                        builder.delete(import_path_text_range);
+                    }
+                }
+
                 builder.replace(module.text_range, module_def)
             }
         },
     )
 }
 
+fn generate_module_def(
+    parent_impl: &Option<ast::Impl>,
+    module: &mut Module,
+    old_indent: IndentLevel,
+) -> String {
+    let (items_to_be_processed, new_item_indent) = if parent_impl.is_some() {
+        (Either::Left(module.body_items.iter()), old_indent + 2)
+    } else {
+        (Either::Right(module.use_items.iter().chain(module.body_items.iter())), old_indent + 1)
+    };
+
+    let mut body = items_to_be_processed
+        .map(|item| item.indent(IndentLevel(1)))
+        .map(|item| format!("{new_item_indent}{item}"))
+        .join("\n\n");
+
+    if let Some(self_ty) = parent_impl.as_ref().and_then(|imp| imp.self_ty()) {
+        let impl_indent = old_indent + 1;
+        body = format!("{impl_indent}impl {self_ty} {{\n{body}\n{impl_indent}}}");
+
+        // Add the import for enum/struct corresponding to given impl block
+        module.make_use_stmt_of_node_with_super(self_ty.syntax());
+        for item in module.use_items.iter() {
+            body = format!("{impl_indent}{item}\n\n{body}");
+        }
+    }
+
+    let module_name = module.name;
+    format!("mod {module_name} {{\n{body}\n{old_indent}}}")
+}
+
 #[derive(Debug)]
 struct Module {
     text_range: TextRange,
@@ -233,20 +231,24 @@
     fn get_usages_and_record_fields(
         &self,
         ctx: &AssistContext<'_>,
-    ) -> (FxHashMap<FileId, Vec<(TextRange, String)>>, Vec<SyntaxNode>) {
+    ) -> (FxHashMap<FileId, Vec<(TextRange, String)>>, Vec<SyntaxNode>, FxHashMap<TextSize, ast::Use>)
+    {
         let mut adt_fields = Vec::new();
         let mut refs: FxHashMap<FileId, Vec<(TextRange, String)>> = FxHashMap::default();
+        // use `TextSize` as key to avoid repeated use stmts
+        let mut use_stmts_to_be_inserted = FxHashMap::default();
 
         //Here impl is not included as each item inside impl will be tied to the parent of
         //implementing block(a struct, enum, etc), if the parent is in selected module, it will
         //get updated by ADT section given below or if it is not, then we dont need to do any operation
+
         for item in &self.body_items {
             match_ast! {
                 match (item.syntax()) {
                     ast::Adt(it) => {
                         if let Some( nod ) = ctx.sema.to_def(&it) {
                             let node_def = Definition::Adt(nod);
-                            self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs);
+                            self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs, &mut use_stmts_to_be_inserted);
 
                             //Enum Fields are not allowed to explicitly specify pub, it is implied
                             match it {
@@ -280,30 +282,30 @@
                     ast::TypeAlias(it) => {
                         if let Some( nod ) = ctx.sema.to_def(&it) {
                             let node_def = Definition::TypeAlias(nod);
-                            self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs);
+                            self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs, &mut use_stmts_to_be_inserted);
                         }
                     },
                     ast::Const(it) => {
                         if let Some( nod ) = ctx.sema.to_def(&it) {
                             let node_def = Definition::Const(nod);
-                            self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs);
+                            self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs, &mut use_stmts_to_be_inserted);
                         }
                     },
                     ast::Static(it) => {
                         if let Some( nod ) = ctx.sema.to_def(&it) {
                             let node_def = Definition::Static(nod);
-                            self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs);
+                            self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs, &mut use_stmts_to_be_inserted);
                         }
                     },
                     ast::Fn(it) => {
                         if let Some( nod ) = ctx.sema.to_def(&it) {
                             let node_def = Definition::Function(nod);
-                            self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs);
+                            self.expand_and_group_usages_file_wise(ctx, node_def, &mut refs, &mut use_stmts_to_be_inserted);
                         }
                     },
                     ast::Macro(it) => {
                         if let Some(nod) = ctx.sema.to_def(&it) {
-                            self.expand_and_group_usages_file_wise(ctx, Definition::Macro(nod), &mut refs);
+                            self.expand_and_group_usages_file_wise(ctx, Definition::Macro(nod), &mut refs, &mut use_stmts_to_be_inserted);
                         }
                     },
                     _ => (),
@@ -311,7 +313,7 @@
             }
         }
 
-        (refs, adt_fields)
+        (refs, adt_fields, use_stmts_to_be_inserted)
     }
 
     fn expand_and_group_usages_file_wise(
@@ -319,49 +321,62 @@
         ctx: &AssistContext<'_>,
         node_def: Definition,
         refs_in_files: &mut FxHashMap<FileId, Vec<(TextRange, String)>>,
+        use_stmts_to_be_inserted: &mut FxHashMap<TextSize, ast::Use>,
     ) {
-        for (file_id, references) in node_def.usages(&ctx.sema).all() {
+        let mod_name = self.name;
+        let covering_node = match ctx.covering_element() {
+            syntax::NodeOrToken::Node(node) => node,
+            syntax::NodeOrToken::Token(tok) => tok.parent().unwrap(), // won't panic
+        };
+        let out_of_sel = |node: &SyntaxNode| !self.text_range.contains_range(node.text_range());
+        let mut use_stmts_set = FxHashSet::default();
+
+        for (file_id, refs) in node_def.usages(&ctx.sema).all() {
             let source_file = ctx.sema.parse(file_id);
-            let usages_in_file = references
-                .into_iter()
-                .filter_map(|usage| self.get_usage_to_be_processed(&source_file, usage));
-            refs_in_files.entry(file_id).or_default().extend(usages_in_file);
-        }
-    }
+            let usages = refs.into_iter().filter_map(|FileReference { range, .. }| {
+                // handle normal usages
+                let name_ref = find_node_at_range::<ast::NameRef>(source_file.syntax(), range)?;
 
-    fn get_usage_to_be_processed(
-        &self,
-        source_file: &SourceFile,
-        FileReference { range, name, .. }: FileReference,
-    ) -> Option<(TextRange, String)> {
-        let path: ast::Path = find_node_at_range(source_file.syntax(), range)?;
-
-        for desc in path.syntax().descendants() {
-            if desc.to_string() == name.syntax().to_string()
-                && !self.text_range.contains_range(desc.text_range())
-            {
-                if let Some(name_ref) = ast::NameRef::cast(desc) {
-                    let mod_name = self.name;
-                    return Some((
-                        name_ref.syntax().text_range(),
-                        format!("{mod_name}::{name_ref}"),
-                    ));
+                if out_of_sel(name_ref.syntax()) {
+                    let new_ref = format!("{mod_name}::{name_ref}");
+                    return Some((range, new_ref));
+                } else if let Some(use_) = name_ref.syntax().ancestors().find_map(ast::Use::cast) {
+                    // handle usages in use_stmts which is in_sel
+                    // check if `use` is top stmt in selection
+                    if use_.syntax().parent().is_some_and(|parent| parent == covering_node)
+                        && use_stmts_set.insert(use_.syntax().text_range().start())
+                    {
+                        let use_ = use_stmts_to_be_inserted
+                            .entry(use_.syntax().text_range().start())
+                            .or_insert_with(|| use_.clone_subtree().clone_for_update());
+                        for seg in use_
+                            .syntax()
+                            .descendants()
+                            .filter_map(ast::NameRef::cast)
+                            .filter(|seg| seg.syntax().to_string() == name_ref.to_string())
+                        {
+                            let new_ref = make::path_from_text(&format!("{mod_name}::{seg}"))
+                                .clone_for_update();
+                            ted::replace(seg.syntax().parent()?, new_ref.syntax());
+                        }
+                    }
                 }
-            }
-        }
 
-        None
+                None
+            });
+            refs_in_files.entry(file_id).or_default().extend(usages);
+        }
     }
 
     fn change_visibility(&mut self, record_fields: Vec<SyntaxNode>) {
         let (mut replacements, record_field_parents, impls) =
             get_replacements_for_visibility_change(&mut self.body_items, false);
 
-        let mut impl_items: Vec<ast::Item> = impls
+        let mut impl_items = impls
             .into_iter()
             .flat_map(|impl_| impl_.syntax().descendants())
             .filter_map(ast::Item::cast)
-            .collect();
+            .collect_vec();
 
         let (mut impl_item_replacements, _, _) =
             get_replacements_for_visibility_change(&mut impl_items, true);
@@ -394,133 +409,88 @@
 
     fn resolve_imports(
         &mut self,
-        curr_parent_module: Option<ast::Module>,
+        module: Option<ast::Module>,
         ctx: &AssistContext<'_>,
     ) -> Vec<TextRange> {
-        let mut import_paths_to_be_removed: Vec<TextRange> = vec![];
-        let mut node_set: FxHashSet<String> = FxHashSet::default();
+        let mut imports_to_remove = vec![];
+        let mut node_set = FxHashSet::default();
 
         for item in self.body_items.clone() {
-            for x in item.syntax().descendants() {
-                if let Some(name) = ast::Name::cast(x.clone()) {
-                    if let Some(name_classify) = NameClass::classify(&ctx.sema, &name) {
-                        //Necessary to avoid two same names going through
-                        if !node_set.contains(&name.syntax().to_string()) {
-                            node_set.insert(name.syntax().to_string());
-                            let def_opt: Option<Definition> = match name_classify {
-                                NameClass::Definition(def) => Some(def),
-                                _ => None,
-                            };
-
-                            if let Some(def) = def_opt {
-                                if let Some(import_path) = self
-                                    .process_names_and_namerefs_for_import_resolve(
-                                        def,
-                                        name.syntax(),
-                                        &curr_parent_module,
-                                        ctx,
-                                    )
-                                {
-                                    check_intersection_and_push(
-                                        &mut import_paths_to_be_removed,
-                                        import_path,
-                                    );
-                                }
-                            }
+            item.syntax()
+                .descendants()
+                .filter_map(|x| {
+                    if let Some(name) = ast::Name::cast(x.clone()) {
+                        NameClass::classify(&ctx.sema, &name).and_then(|nc| match nc {
+                            NameClass::Definition(def) => Some((name.syntax().clone(), def)),
+                            _ => None,
+                        })
+                    } else if let Some(name_ref) = ast::NameRef::cast(x) {
+                        NameRefClass::classify(&ctx.sema, &name_ref).and_then(|nc| match nc {
+                            NameRefClass::Definition(def) => Some((name_ref.syntax().clone(), def)),
+                            _ => None,
+                        })
+                    } else {
+                        None
+                    }
+                })
+                .for_each(|(node, def)| {
+                    if node_set.insert(node.to_string()) {
+                        if let Some(import) = self.process_def_in_sel(def, &node, &module, ctx) {
+                            check_intersection_and_push(&mut imports_to_remove, import);
                         }
                     }
-                }
-
-                if let Some(name_ref) = ast::NameRef::cast(x) {
-                    if let Some(name_classify) = NameRefClass::classify(&ctx.sema, &name_ref) {
-                        //Necessary to avoid two same names going through
-                        if !node_set.contains(&name_ref.syntax().to_string()) {
-                            node_set.insert(name_ref.syntax().to_string());
-                            let def_opt: Option<Definition> = match name_classify {
-                                NameRefClass::Definition(def) => Some(def),
-                                _ => None,
-                            };
-
-                            if let Some(def) = def_opt {
-                                if let Some(import_path) = self
-                                    .process_names_and_namerefs_for_import_resolve(
-                                        def,
-                                        name_ref.syntax(),
-                                        &curr_parent_module,
-                                        ctx,
-                                    )
-                                {
-                                    check_intersection_and_push(
-                                        &mut import_paths_to_be_removed,
-                                        import_path,
-                                    );
-                                }
-                            }
-                        }
-                    }
-                }
-            }
+                })
         }
 
-        import_paths_to_be_removed
+        imports_to_remove
     }
 
-    fn process_names_and_namerefs_for_import_resolve(
+    fn process_def_in_sel(
         &mut self,
         def: Definition,
-        node_syntax: &SyntaxNode,
+        use_node: &SyntaxNode,
         curr_parent_module: &Option<ast::Module>,
         ctx: &AssistContext<'_>,
     ) -> Option<TextRange> {
         //We only need to find in the current file
         let selection_range = ctx.selection_trimmed();
-        let curr_file_id = ctx.file_id();
-        let search_scope = SearchScope::single_file(curr_file_id);
-        let usage_res = def.usages(&ctx.sema).in_scope(&search_scope).all();
-        let file = ctx.sema.parse(curr_file_id);
+        let file_id = ctx.file_id();
+        let usage_res = def.usages(&ctx.sema).in_scope(&SearchScope::single_file(file_id)).all();
+        let file = ctx.sema.parse(file_id);
 
-        let mut exists_inside_sel = false;
-        let mut exists_outside_sel = false;
-        for (_, refs) in usage_res.iter() {
-            let mut non_use_nodes_itr = refs.iter().filter_map(|x| {
-                if find_node_at_range::<ast::Use>(file.syntax(), x.range).is_none() {
-                    let path_opt = find_node_at_range::<ast::Path>(file.syntax(), x.range);
-                    return path_opt;
-                }
-
-                None
-            });
-
-            if non_use_nodes_itr
-                .clone()
-                .any(|x| !selection_range.contains_range(x.syntax().text_range()))
+        // track uses which does not exists in `Use`
+        let mut uses_exist_in_sel = false;
+        let mut uses_exist_out_sel = false;
+        'outside: for (_, refs) in usage_res.iter() {
+            for x in refs
+                .iter()
+                .filter(|x| find_node_at_range::<ast::Use>(file.syntax(), x.range).is_none())
+                .filter_map(|x| find_node_at_range::<ast::Path>(file.syntax(), x.range))
             {
-                exists_outside_sel = true;
-            }
-            if non_use_nodes_itr.any(|x| selection_range.contains_range(x.syntax().text_range())) {
-                exists_inside_sel = true;
+                let in_selection = selection_range.contains_range(x.syntax().text_range());
+                uses_exist_in_sel |= in_selection;
+                uses_exist_out_sel |= !in_selection;
+
+                if uses_exist_in_sel && uses_exist_out_sel {
+                    break 'outside;
+                }
             }
         }
 
-        let source_exists_outside_sel_in_same_mod = does_source_exists_outside_sel_in_same_mod(
-            def,
-            ctx,
-            curr_parent_module,
-            selection_range,
-            curr_file_id,
-        );
+        let (def_in_mod, def_out_sel) =
+            check_def_in_mod_and_out_sel(def, ctx, curr_parent_module, selection_range, file_id);
 
-        let use_stmt_opt: Option<ast::Use> = usage_res.into_iter().find_map(|(file_id, refs)| {
-            if file_id == curr_file_id {
-                refs.into_iter()
-                    .rev()
-                    .find_map(|fref| find_node_at_range(file.syntax(), fref.range))
-            } else {
-                None
-            }
+        // Find use stmt that use def in current file
+        let use_stmt: Option<ast::Use> = usage_res
+            .into_iter()
+            .filter(|(use_file_id, _)| *use_file_id == file_id)
+            .flat_map(|(_, refs)| refs.into_iter().rev())
+            .find_map(|fref| find_node_at_range(file.syntax(), fref.range));
+        let use_stmt_not_in_sel = use_stmt.as_ref().is_some_and(|use_stmt| {
+            !selection_range.contains_range(use_stmt.syntax().text_range())
         });
 
-        let mut use_tree_str_opt: Option<Vec<ast::Path>> = None;
+        let mut use_tree_paths: Option<Vec<ast::Path>> = None;
         //Exists inside and outside selection
         // - Use stmt for item is present -> get the use_tree_str and reconstruct the path in new
         // module
@@ -534,37 +504,37 @@
         //get the use_tree_str, reconstruct the use stmt in new module
 
         let mut import_path_to_be_removed: Option<TextRange> = None;
-        if exists_inside_sel && exists_outside_sel {
+        if uses_exist_in_sel && uses_exist_out_sel {
             //Changes to be made only inside new module
 
             //If use_stmt exists, find the use_tree_str, reconstruct it inside new module
             //If not, insert a use stmt with super and the given nameref
-            if let Some((use_tree_str, _)) =
-                self.process_use_stmt_for_import_resolve(use_stmt_opt, node_syntax)
-            {
-                use_tree_str_opt = Some(use_tree_str);
-            } else if source_exists_outside_sel_in_same_mod {
-                //Considered only after use_stmt is not present
-                //source_exists_outside_sel_in_same_mod | exists_outside_sel(exists_inside_sel =
-                //true for all cases)
-                // false | false -> Do nothing
-                // false | true -> If source is in selection -> nothing to do, If source is outside
-                // mod -> ust_stmt transversal
-                // true  | false -> super import insertion
-                // true  | true -> super import insertion
-                self.make_use_stmt_of_node_with_super(node_syntax);
+            match self.process_use_stmt_for_import_resolve(use_stmt, use_node) {
+                Some((use_tree_str, _)) => use_tree_paths = Some(use_tree_str),
+                None if def_in_mod && def_out_sel => {
+                    //Considered only after use_stmt is not present
+                    //def_in_mod && def_out_sel | exists_outside_sel(exists_inside_sel =
+                    //true for all cases)
+                    // false | false -> Do nothing
+                    // false | true -> If source is in selection -> nothing to do, If source is outside
+                    // mod -> ust_stmt transversal
+                    // true  | false -> super import insertion
+                    // true  | true -> super import insertion
+                    self.make_use_stmt_of_node_with_super(use_node);
+                }
+                None => {}
             }
-        } else if exists_inside_sel && !exists_outside_sel {
+        } else if uses_exist_in_sel && !uses_exist_out_sel {
             //Changes to be made inside new module, and remove import from outside
 
             if let Some((mut use_tree_str, text_range_opt)) =
-                self.process_use_stmt_for_import_resolve(use_stmt_opt, node_syntax)
+                self.process_use_stmt_for_import_resolve(use_stmt, use_node)
             {
                 if let Some(text_range) = text_range_opt {
                     import_path_to_be_removed = Some(text_range);
                 }
 
-                if source_exists_outside_sel_in_same_mod {
+                if def_in_mod && def_out_sel {
                     if let Some(first_path_in_use_tree) = use_tree_str.last() {
                         let first_path_in_use_tree_str = first_path_in_use_tree.to_string();
                         if !first_path_in_use_tree_str.contains("super")
@@ -576,31 +546,43 @@
                     }
                 }
 
-                use_tree_str_opt = Some(use_tree_str);
-            } else if source_exists_outside_sel_in_same_mod {
-                self.make_use_stmt_of_node_with_super(node_syntax);
+                use_tree_paths = Some(use_tree_str);
+            } else if def_in_mod && def_out_sel {
+                self.make_use_stmt_of_node_with_super(use_node);
             }
         }
 
-        if let Some(use_tree_str) = use_tree_str_opt {
-            let mut use_tree_str = use_tree_str;
-            use_tree_str.reverse();
+        if let Some(mut use_tree_paths) = use_tree_paths {
+            use_tree_paths.reverse();
 
-            if !(!exists_outside_sel && exists_inside_sel && source_exists_outside_sel_in_same_mod)
-            {
-                if let Some(first_path_in_use_tree) = use_tree_str.first() {
-                    let first_path_in_use_tree_str = first_path_in_use_tree.to_string();
-                    if first_path_in_use_tree_str.contains("super") {
-                        let super_path = make::ext::ident_path("super");
-                        use_tree_str.insert(0, super_path)
+            if uses_exist_out_sel || !uses_exist_in_sel || !def_in_mod || !def_out_sel {
+                if let Some(first_path_in_use_tree) = use_tree_paths.first() {
+                    if first_path_in_use_tree.to_string().contains("super") {
+                        use_tree_paths.insert(0, make::ext::ident_path("super"));
                     }
                 }
             }
 
-            let use_ =
-                make::use_(None, make::use_tree(make::join_paths(use_tree_str), None, None, false));
-            let item = ast::Item::from(use_);
-            self.use_items.insert(0, item);
+            let is_item = matches!(
+                def,
+                Definition::Macro(_)
+                    | Definition::Module(_)
+                    | Definition::Function(_)
+                    | Definition::Adt(_)
+                    | Definition::Const(_)
+                    | Definition::Static(_)
+                    | Definition::Trait(_)
+                    | Definition::TraitAlias(_)
+                    | Definition::TypeAlias(_)
+            );
+
+            if (def_out_sel || !is_item) && use_stmt_not_in_sel {
+                let use_ = make::use_(
+                    None,
+                    make::use_tree(make::join_paths(use_tree_paths), None, None, false),
+                );
+                self.use_items.insert(0, ast::Item::from(use_));
+            }
         }
 
         import_path_to_be_removed
@@ -621,33 +603,26 @@
 
     fn process_use_stmt_for_import_resolve(
         &self,
-        use_stmt_opt: Option<ast::Use>,
+        use_stmt: Option<ast::Use>,
         node_syntax: &SyntaxNode,
     ) -> Option<(Vec<ast::Path>, Option<TextRange>)> {
-        if let Some(use_stmt) = use_stmt_opt {
-            for desc in use_stmt.syntax().descendants() {
-                if let Some(path_seg) = ast::PathSegment::cast(desc) {
-                    if path_seg.syntax().to_string() == node_syntax.to_string() {
-                        let mut use_tree_str = vec![path_seg.parent_path()];
-                        get_use_tree_paths_from_path(path_seg.parent_path(), &mut use_tree_str);
-                        for ancs in path_seg.syntax().ancestors() {
-                            //Here we are looking for use_tree with same string value as node
-                            //passed above as the range_to_remove function looks for a comma and
-                            //then includes it in the text range to remove it. But the comma only
-                            //appears at the use_tree level
-                            if let Some(use_tree) = ast::UseTree::cast(ancs) {
-                                if use_tree.syntax().to_string() == node_syntax.to_string() {
-                                    return Some((
-                                        use_tree_str,
-                                        Some(range_to_remove(use_tree.syntax())),
-                                    ));
-                                }
-                            }
-                        }
+        let use_stmt = use_stmt?;
+        for path_seg in use_stmt.syntax().descendants().filter_map(ast::PathSegment::cast) {
+            if path_seg.syntax().to_string() == node_syntax.to_string() {
+                let mut use_tree_str = vec![path_seg.parent_path()];
+                get_use_tree_paths_from_path(path_seg.parent_path(), &mut use_tree_str);
 
-                        return Some((use_tree_str, None));
+                //Here we are looking for use_tree with same string value as node
+                //passed above as the range_to_remove function looks for a comma and
+                //then includes it in the text range to remove it. But the comma only
+                //appears at the use_tree level
+                for use_tree in path_seg.syntax().ancestors().filter_map(ast::UseTree::cast) {
+                    if use_tree.syntax().to_string() == node_syntax.to_string() {
+                        return Some((use_tree_str, Some(range_to_remove(use_tree.syntax()))));
                     }
                 }
+
+                return Some((use_tree_str, None));
             }
         }
 
@@ -676,145 +651,58 @@
     import_paths_to_be_removed.push(import_path);
 }
 
-fn does_source_exists_outside_sel_in_same_mod(
+fn check_def_in_mod_and_out_sel(
     def: Definition,
     ctx: &AssistContext<'_>,
     curr_parent_module: &Option<ast::Module>,
     selection_range: TextRange,
     curr_file_id: FileId,
-) -> bool {
-    let mut source_exists_outside_sel_in_same_mod = false;
+) -> (bool, bool) {
+    macro_rules! check_item {
+        ($x:ident) => {
+            if let Some(source) = $x.source(ctx.db()) {
+                let have_same_parent = if let Some(ast_module) = &curr_parent_module {
+                    ctx.sema.to_module_def(ast_module).is_some_and(|it| it == $x.module(ctx.db()))
+                } else {
+                    source.file_id.original_file(ctx.db()) == curr_file_id
+                };
+
+                let in_sel = !selection_range.contains_range(source.value.syntax().text_range());
+                return (have_same_parent, in_sel);
+            }
+        };
+    }
+
     match def {
         Definition::Module(x) => {
             let source = x.definition_source(ctx.db());
-            let have_same_parent = if let Some(ast_module) = &curr_parent_module {
-                if let Some(hir_module) = x.parent(ctx.db()) {
-                    compare_hir_and_ast_module(ast_module, hir_module, ctx).is_some()
-                } else {
-                    let source_file_id = source.file_id.original_file(ctx.db());
-                    source_file_id == curr_file_id
+            let have_same_parent = match (&curr_parent_module, x.parent(ctx.db())) {
+                (Some(ast_module), Some(hir_module)) => {
+                    ctx.sema.to_module_def(ast_module).is_some_and(|it| it == hir_module)
                 }
-            } else {
-                let source_file_id = source.file_id.original_file(ctx.db());
-                source_file_id == curr_file_id
+                _ => source.file_id.original_file(ctx.db()) == curr_file_id,
             };
 
             if have_same_parent {
                 if let ModuleSource::Module(module_) = source.value {
-                    source_exists_outside_sel_in_same_mod =
-                        !selection_range.contains_range(module_.syntax().text_range());
+                    let in_sel = !selection_range.contains_range(module_.syntax().text_range());
+                    return (have_same_parent, in_sel);
                 }
             }
-        }
-        Definition::Function(x) => {
-            if let Some(source) = x.source(ctx.db()) {
-                let have_same_parent = if let Some(ast_module) = &curr_parent_module {
-                    compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
-                } else {
-                    let source_file_id = source.file_id.original_file(ctx.db());
-                    source_file_id == curr_file_id
-                };
 
-                if have_same_parent {
-                    source_exists_outside_sel_in_same_mod =
-                        !selection_range.contains_range(source.value.syntax().text_range());
-                }
-            }
+            return (have_same_parent, false);
         }
-        Definition::Adt(x) => {
-            if let Some(source) = x.source(ctx.db()) {
-                let have_same_parent = if let Some(ast_module) = &curr_parent_module {
-                    compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
-                } else {
-                    let source_file_id = source.file_id.original_file(ctx.db());
-                    source_file_id == curr_file_id
-                };
-
-                if have_same_parent {
-                    source_exists_outside_sel_in_same_mod =
-                        !selection_range.contains_range(source.value.syntax().text_range());
-                }
-            }
-        }
-        Definition::Variant(x) => {
-            if let Some(source) = x.source(ctx.db()) {
-                let have_same_parent = if let Some(ast_module) = &curr_parent_module {
-                    compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
-                } else {
-                    let source_file_id = source.file_id.original_file(ctx.db());
-                    source_file_id == curr_file_id
-                };
-
-                if have_same_parent {
-                    source_exists_outside_sel_in_same_mod =
-                        !selection_range.contains_range(source.value.syntax().text_range());
-                }
-            }
-        }
-        Definition::Const(x) => {
-            if let Some(source) = x.source(ctx.db()) {
-                let have_same_parent = if let Some(ast_module) = &curr_parent_module {
-                    compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
-                } else {
-                    let source_file_id = source.file_id.original_file(ctx.db());
-                    source_file_id == curr_file_id
-                };
-
-                if have_same_parent {
-                    source_exists_outside_sel_in_same_mod =
-                        !selection_range.contains_range(source.value.syntax().text_range());
-                }
-            }
-        }
-        Definition::Static(x) => {
-            if let Some(source) = x.source(ctx.db()) {
-                let have_same_parent = if let Some(ast_module) = &curr_parent_module {
-                    compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
-                } else {
-                    let source_file_id = source.file_id.original_file(ctx.db());
-                    source_file_id == curr_file_id
-                };
-
-                if have_same_parent {
-                    source_exists_outside_sel_in_same_mod =
-                        !selection_range.contains_range(source.value.syntax().text_range());
-                }
-            }
-        }
-        Definition::Trait(x) => {
-            if let Some(source) = x.source(ctx.db()) {
-                let have_same_parent = if let Some(ast_module) = &curr_parent_module {
-                    compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
-                } else {
-                    let source_file_id = source.file_id.original_file(ctx.db());
-                    source_file_id == curr_file_id
-                };
-
-                if have_same_parent {
-                    source_exists_outside_sel_in_same_mod =
-                        !selection_range.contains_range(source.value.syntax().text_range());
-                }
-            }
-        }
-        Definition::TypeAlias(x) => {
-            if let Some(source) = x.source(ctx.db()) {
-                let have_same_parent = if let Some(ast_module) = &curr_parent_module {
-                    compare_hir_and_ast_module(ast_module, x.module(ctx.db()), ctx).is_some()
-                } else {
-                    let source_file_id = source.file_id.original_file(ctx.db());
-                    source_file_id == curr_file_id
-                };
-
-                if have_same_parent {
-                    source_exists_outside_sel_in_same_mod =
-                        !selection_range.contains_range(source.value.syntax().text_range());
-                }
-            }
-        }
+        Definition::Function(x) => check_item!(x),
+        Definition::Adt(x) => check_item!(x),
+        Definition::Variant(x) => check_item!(x),
+        Definition::Const(x) => check_item!(x),
+        Definition::Static(x) => check_item!(x),
+        Definition::Trait(x) => check_item!(x),
+        Definition::TypeAlias(x) => check_item!(x),
         _ => {}
     }
 
-    source_exists_outside_sel_in_same_mod
+    (false, false)
 }
 
 fn get_replacements_for_visibility_change(
@@ -834,24 +722,30 @@
             *item = item.clone_for_update();
         }
         //Use stmts are ignored
+        macro_rules! push_to_replacement {
+            ($it:ident) => {
+                replacements.push(($it.visibility(), $it.syntax().clone()))
+            };
+        }
+
         match item {
-            ast::Item::Const(it) => replacements.push((it.visibility(), it.syntax().clone())),
-            ast::Item::Enum(it) => replacements.push((it.visibility(), it.syntax().clone())),
-            ast::Item::ExternCrate(it) => replacements.push((it.visibility(), it.syntax().clone())),
-            ast::Item::Fn(it) => replacements.push((it.visibility(), it.syntax().clone())),
+            ast::Item::Const(it) => push_to_replacement!(it),
+            ast::Item::Enum(it) => push_to_replacement!(it),
+            ast::Item::ExternCrate(it) => push_to_replacement!(it),
+            ast::Item::Fn(it) => push_to_replacement!(it),
             //Associated item's visibility should not be changed
             ast::Item::Impl(it) if it.for_token().is_none() => impls.push(it.clone()),
-            ast::Item::MacroDef(it) => replacements.push((it.visibility(), it.syntax().clone())),
-            ast::Item::Module(it) => replacements.push((it.visibility(), it.syntax().clone())),
-            ast::Item::Static(it) => replacements.push((it.visibility(), it.syntax().clone())),
+            ast::Item::MacroDef(it) => push_to_replacement!(it),
+            ast::Item::Module(it) => push_to_replacement!(it),
+            ast::Item::Static(it) => push_to_replacement!(it),
             ast::Item::Struct(it) => {
-                replacements.push((it.visibility(), it.syntax().clone()));
+                push_to_replacement!(it);
                 record_field_parents.push((it.visibility(), it.syntax().clone()));
             }
-            ast::Item::Trait(it) => replacements.push((it.visibility(), it.syntax().clone())),
-            ast::Item::TypeAlias(it) => replacements.push((it.visibility(), it.syntax().clone())),
+            ast::Item::Trait(it) => push_to_replacement!(it),
+            ast::Item::TypeAlias(it) => push_to_replacement!(it),
             ast::Item::Union(it) => {
-                replacements.push((it.visibility(), it.syntax().clone()));
+                push_to_replacement!(it);
                 record_field_parents.push((it.visibility(), it.syntax().clone()));
             }
             _ => (),
@@ -865,8 +759,11 @@
     path: ast::Path,
     use_tree_str: &mut Vec<ast::Path>,
 ) -> Option<&mut Vec<ast::Path>> {
-    path.syntax().ancestors().filter(|x| x.to_string() != path.to_string()).find_map(|x| {
-        if let Some(use_tree) = ast::UseTree::cast(x) {
+    path.syntax()
+        .ancestors()
+        .filter(|x| x.to_string() != path.to_string())
+        .filter_map(ast::UseTree::cast)
+        .find_map(|use_tree| {
             if let Some(upper_tree_path) = use_tree.path() {
                 if upper_tree_path.to_string() != path.to_string() {
                     use_tree_str.push(upper_tree_path.clone());
@@ -874,9 +771,8 @@
                     return Some(use_tree);
                 }
             }
-        }
-        None
-    })?;
+            None
+        })?;
 
     Some(use_tree_str)
 }
@@ -890,20 +786,6 @@
     }
 }
 
-fn compare_hir_and_ast_module(
-    ast_module: &ast::Module,
-    hir_module: hir::Module,
-    ctx: &AssistContext<'_>,
-) -> Option<()> {
-    let hir_mod_name = hir_module.name(ctx.db())?;
-    let ast_mod_name = ast_module.name()?;
-    if hir_mod_name.display(ctx.db()).to_string() != ast_mod_name.to_string() {
-        return None;
-    }
-
-    Some(())
-}
-
 fn indent_range_before_given_node(node: &SyntaxNode) -> Option<TextRange> {
     node.siblings_with_tokens(syntax::Direction::Prev)
         .find(|x| x.kind() == WHITESPACE)
@@ -1802,4 +1684,52 @@
 "#,
         );
     }
+
+    #[test]
+    fn test_remove_import_path_inside_selection() {
+        check_assist(
+            extract_module,
+            r#"
+$0struct Point;
+impl Point {
+    pub const fn direction(self, other: Self) -> Option<Direction> {
+        Some(Vertical)
+    }
+}
+
+pub enum Direction {
+    Horizontal,
+    Vertical,
+}
+use Direction::{Horizontal, Vertical};$0
+
+fn main() {
+    let x = Vertical;
+}
+"#,
+            r#"
+mod modname {
+    use Direction::{Horizontal, Vertical};
+
+    pub(crate) struct Point;
+
+    impl Point {
+        pub const fn direction(self, other: Self) -> Option<Direction> {
+            Some(Vertical)
+        }
+    }
+
+    pub enum Direction {
+        Horizontal,
+        Vertical,
+    }
+}
+use modname::Direction::{Horizontal, Vertical};
+
+fn main() {
+    let x = Vertical;
+}
+"#,
+        );
+    }
 }
diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs
index 35e6b97..4005753 100644
--- a/crates/ide-completion/src/completions/env_vars.rs
+++ b/crates/ide-completion/src/completions/env_vars.rs
@@ -1,7 +1,10 @@
 //! Completes environment variables defined by Cargo (https://doc.rust-lang.org/cargo/reference/environment-variables.html)
-use hir::Semantics;
-use ide_db::{syntax_helpers::node_ext::macro_call_for_string_token, RootDatabase};
-use syntax::ast::{self, IsString};
+use hir::MacroFileIdExt;
+use ide_db::syntax_helpers::node_ext::macro_call_for_string_token;
+use syntax::{
+    ast::{self, IsString},
+    AstToken,
+};
 
 use crate::{
     completions::Completions, context::CompletionContext, CompletionItem, CompletionItemKind,
@@ -32,10 +35,24 @@
 pub(crate) fn complete_cargo_env_vars(
     acc: &mut Completions,
     ctx: &CompletionContext<'_>,
+    original: &ast::String,
     expanded: &ast::String,
 ) -> Option<()> {
-    guard_env_macro(expanded, &ctx.sema)?;
-    let range = expanded.text_range_between_quotes()?;
+    let is_in_env_expansion = ctx
+        .sema
+        .hir_file_for(&expanded.syntax().parent()?)
+        .macro_file()
+        .map_or(false, |it| it.is_env_or_option_env(ctx.sema.db));
+    if !is_in_env_expansion {
+        let call = macro_call_for_string_token(expanded)?;
+        let makro = ctx.sema.resolve_macro_call(&call)?;
+        // We won't map into `option_env` as that generates `None` for non-existent env vars
+        // so fall back to this lookup
+        if !makro.is_env_or_option_env(ctx.sema.db) {
+            return None;
+        }
+    }
+    let range = original.text_range_between_quotes()?;
 
     CARGO_DEFINED_VARS.iter().for_each(|&(var, detail)| {
         let mut item = CompletionItem::new(CompletionItemKind::Keyword, range, var);
@@ -46,18 +63,6 @@
     Some(())
 }
 
-fn guard_env_macro(string: &ast::String, semantics: &Semantics<'_, RootDatabase>) -> Option<()> {
-    let call = macro_call_for_string_token(string)?;
-    let name = call.path()?.segment()?.name_ref()?;
-    let makro = semantics.resolve_macro_call(&call)?;
-    let db = semantics.db;
-
-    match name.text().as_str() {
-        "env" | "option_env" if makro.kind(db) == hir::MacroKind::BuiltIn => Some(()),
-        _ => None,
-    }
-}
-
 #[cfg(test)]
 mod tests {
     use crate::tests::{check_edit, completion_list};
@@ -68,7 +73,7 @@
             &format!(
                 r#"
             #[rustc_builtin_macro]
-            macro_rules! {macro_name} {{
+            macro {macro_name} {{
                 ($var:literal) => {{ 0 }}
             }}
 
@@ -80,7 +85,7 @@
             &format!(
                 r#"
             #[rustc_builtin_macro]
-            macro_rules! {macro_name} {{
+            macro {macro_name} {{
                 ($var:literal) => {{ 0 }}
             }}
 
diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs
index 912f2fb..d89cfc8 100644
--- a/crates/ide-completion/src/lib.rs
+++ b/crates/ide-completion/src/lib.rs
@@ -207,7 +207,7 @@
             CompletionAnalysis::String { original, expanded: Some(expanded) } => {
                 completions::extern_abi::complete_extern_abi(acc, ctx, expanded);
                 completions::format_string::format_string(acc, ctx, original, expanded);
-                completions::env_vars::complete_cargo_env_vars(acc, ctx, expanded);
+                completions::env_vars::complete_cargo_env_vars(acc, ctx, original, expanded);
             }
             CompletionAnalysis::UnexpandedAttrTT {
                 colon_prefix,
diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs
index 6d1a5a0..e88b36c 100644
--- a/crates/ide-completion/src/render.rs
+++ b/crates/ide-completion/src/render.rs
@@ -312,7 +312,7 @@
         None => ctx.source_range(),
     };
 
-    let mut item = CompletionItem::new(CompletionItemKind::Expression, source_range, label.clone());
+    let mut item = CompletionItem::new(CompletionItemKind::Expression, source_range, label);
 
     let snippet = format!(
         "{}$0",
diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs
index 33970de..c0f0fab 100644
--- a/crates/ide-db/src/defs.rs
+++ b/crates/ide-db/src/defs.rs
@@ -407,7 +407,7 @@
     }
 
     pub fn classify(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option<NameClass> {
-        let _p = tracing::span!(tracing::Level::INFO, "classify_name").entered();
+        let _p = tracing::span!(tracing::Level::INFO, "NameClass::classify").entered();
 
         let parent = name.syntax().parent()?;
 
@@ -499,7 +499,8 @@
         sema: &Semantics<'_, RootDatabase>,
         lifetime: &ast::Lifetime,
     ) -> Option<NameClass> {
-        let _p = tracing::span!(tracing::Level::INFO, "classify_lifetime", ?lifetime).entered();
+        let _p = tracing::span!(tracing::Level::INFO, "NameClass::classify_lifetime", ?lifetime)
+            .entered();
         let parent = lifetime.syntax().parent()?;
 
         if let Some(it) = ast::LifetimeParam::cast(parent.clone()) {
@@ -590,7 +591,8 @@
         sema: &Semantics<'_, RootDatabase>,
         name_ref: &ast::NameRef,
     ) -> Option<NameRefClass> {
-        let _p = tracing::span!(tracing::Level::INFO, "classify_name_ref", ?name_ref).entered();
+        let _p =
+            tracing::span!(tracing::Level::INFO, "NameRefClass::classify", ?name_ref).entered();
 
         let parent = name_ref.syntax().parent()?;
 
@@ -689,7 +691,8 @@
         sema: &Semantics<'_, RootDatabase>,
         lifetime: &ast::Lifetime,
     ) -> Option<NameRefClass> {
-        let _p = tracing::span!(tracing::Level::INFO, "classify_lifetime_ref", ?lifetime).entered();
+        let _p = tracing::span!(tracing::Level::INFO, "NameRefClass::classify_lifetime", ?lifetime)
+            .entered();
         let parent = lifetime.syntax().parent()?;
         match parent.kind() {
             SyntaxKind::BREAK_EXPR | SyntaxKind::CONTINUE_EXPR => {
diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs
index 073287d..448df1c 100644
--- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs
+++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs
@@ -44,7 +44,7 @@
 fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveUnnecessaryElse) -> Option<Vec<Assist>> {
     let root = ctx.sema.db.parse_or_expand(d.if_expr.file_id);
     let if_expr = d.if_expr.value.to_node(&root);
-    let if_expr = ctx.sema.original_ast_node(if_expr.clone())?;
+    let if_expr = ctx.sema.original_ast_node(if_expr)?;
 
     let mut indent = IndentLevel::from_node(if_expr.syntax());
     let has_parent_if_expr = if_expr.syntax().parent().and_then(ast::IfExpr::cast).is_some();
diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs
index c5e52d1..dea8022 100644
--- a/crates/ide-diagnostics/src/lib.rs
+++ b/crates/ide-diagnostics/src/lib.rs
@@ -410,9 +410,9 @@
         .iter_mut()
         .filter_map(|it| {
             Some((
-                it.main_node
-                    .map(|ptr| ptr.map(|node| node.to_node(&ctx.sema.parse_or_expand(ptr.file_id))))
-                    .clone()?,
+                it.main_node.map(|ptr| {
+                    ptr.map(|node| node.to_node(&ctx.sema.parse_or_expand(ptr.file_id)))
+                })?,
                 it,
             ))
         })
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs
index 1bda152..ddeeca5 100644
--- a/crates/ide/src/goto_definition.rs
+++ b/crates/ide/src/goto_definition.rs
@@ -1,10 +1,10 @@
-use std::mem::discriminant;
+use std::{iter, mem::discriminant};
 
 use crate::{
     doc_links::token_as_doc_comment, navigation_target::ToNav, FilePosition, NavigationTarget,
     RangeInfo, TryToNav,
 };
-use hir::{AsAssocItem, AssocItem, DescendPreference, ModuleDef, Semantics};
+use hir::{AsAssocItem, AssocItem, DescendPreference, MacroFileIdExt, ModuleDef, Semantics};
 use ide_db::{
     base_db::{AnchoredPath, FileId, FileLoader},
     defs::{Definition, IdentClass},
@@ -74,11 +74,13 @@
         .filter_map(|token| {
             let parent = token.parent()?;
 
-            if let Some(tt) = ast::TokenTree::cast(parent.clone()) {
-                if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), file_id) {
+            if let Some(token) = ast::String::cast(token.clone()) {
+                if let Some(x) = try_lookup_include_path(sema, token, file_id) {
                     return Some(vec![x]);
                 }
+            }
 
+            if ast::TokenTree::can_cast(parent.kind()) {
                 if let Some(x) = try_lookup_macro_def_in_macro_use(sema, token) {
                     return Some(vec![x]);
                 }
@@ -111,24 +113,17 @@
 
 fn try_lookup_include_path(
     sema: &Semantics<'_, RootDatabase>,
-    tt: ast::TokenTree,
-    token: SyntaxToken,
+    token: ast::String,
     file_id: FileId,
 ) -> Option<NavigationTarget> {
-    let token = ast::String::cast(token)?;
-    let path = token.value()?.into_owned();
-    let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
-    let name = macro_call.path()?.segment()?.name_ref()?;
-    if !matches!(&*name.text(), "include" | "include_str" | "include_bytes") {
+    let file = sema.hir_file_for(&token.syntax().parent()?).macro_file()?;
+    if !iter::successors(Some(file), |file| file.parent(sema.db).macro_file())
+        // Check that we are in the eager argument expansion of an include macro
+        .any(|file| file.is_include_like_macro(sema.db) && file.eager_arg(sema.db).is_none())
+    {
         return None;
     }
-
-    // Ignore non-built-in macros to account for shadowing
-    if let Some(it) = sema.resolve_macro_call(&macro_call) {
-        if !matches!(it.kind(sema.db), hir::MacroKind::BuiltIn) {
-            return None;
-        }
-    }
+    let path = token.value()?;
 
     let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
     let size = sema.db.file_text(file_id).len().try_into().ok()?;
@@ -1532,6 +1527,26 @@
     }
 
     #[test]
+    fn goto_include_has_eager_input() {
+        check(
+            r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include_str {}
+#[rustc_builtin_macro]
+macro_rules! concat {}
+
+fn main() {
+    let str = include_str!(concat!("foo", ".tx$0t"));
+}
+//- /foo.txt
+// empty
+//^file
+"#,
+        );
+    }
+
+    #[test]
     fn goto_doc_include_str() {
         check(
             r#"
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index 051a962..4451e31 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -819,7 +819,7 @@
 fn hover_tuple_struct() {
     check(
         r#"
-struct Foo$0(pub u32)
+struct Foo$0(pub u32) where u32: Copy;
 "#,
         expect![[r#"
             *Foo*
@@ -830,7 +830,99 @@
 
             ```rust
             // size = 4, align = 4
-            struct Foo(pub u32);
+            struct Foo(pub u32)
+            where
+                u32: Copy,
+            ```
+        "#]],
+    );
+}
+
+#[test]
+fn hover_record_struct() {
+    check(
+        r#"
+struct Foo$0 { field: u32 }
+"#,
+        expect![[r#"
+            *Foo*
+
+            ```rust
+            test
+            ```
+
+            ```rust
+            // size = 4, align = 4
+            struct Foo {
+                field: u32,
+            }
+            ```
+        "#]],
+    );
+    check(
+        r#"
+struct Foo$0 where u32: Copy { field: u32 }
+"#,
+        expect![[r#"
+            *Foo*
+
+            ```rust
+            test
+            ```
+
+            ```rust
+            // size = 4, align = 4
+            struct Foo
+            where
+                u32: Copy,
+            {
+                field: u32,
+            }
+            ```
+        "#]],
+    );
+}
+
+#[test]
+fn hover_unit_struct() {
+    check(
+        r#"
+struct Foo$0 where u32: Copy;
+"#,
+        expect![[r#"
+            *Foo*
+
+            ```rust
+            test
+            ```
+
+            ```rust
+            // size = 0, align = 1
+            struct Foo
+            where
+                u32: Copy,
+            ```
+        "#]],
+    );
+}
+
+#[test]
+fn hover_type_alias() {
+    check(
+        r#"
+type Fo$0o: Trait = S where T: Trait;
+"#,
+        expect![[r#"
+            *Foo*
+
+            ```rust
+            test
+            ```
+
+            ```rust
+            type Foo: Trait = S
+            where
+                T: Trait,
             ```
         "#]],
     );
@@ -2540,7 +2632,7 @@
                                 focus_range: 7..10,
                                 name: "Arg",
                                 kind: Struct,
-                                description: "struct Arg(u32);",
+                                description: "struct Arg(u32)",
                             },
                         },
                         HoverGotoTypeData {
@@ -2599,7 +2691,7 @@
                                 focus_range: 7..10,
                                 name: "Arg",
                                 kind: Struct,
-                                description: "struct Arg(u32);",
+                                description: "struct Arg(u32)",
                             },
                         },
                         HoverGotoTypeData {
@@ -2648,7 +2740,7 @@
                                 focus_range: 7..8,
                                 name: "A",
                                 kind: Struct,
-                                description: "struct A(u32);",
+                                description: "struct A(u32)",
                             },
                         },
                         HoverGotoTypeData {
@@ -2661,7 +2753,7 @@
                                 focus_range: 22..23,
                                 name: "B",
                                 kind: Struct,
-                                description: "struct B(u32);",
+                                description: "struct B(u32)",
                             },
                         },
                         HoverGotoTypeData {
@@ -2675,7 +2767,7 @@
                                 name: "C",
                                 kind: Struct,
                                 container_name: "M",
-                                description: "pub struct C(u32);",
+                                description: "pub struct C(u32)",
                             },
                         },
                     ],
@@ -3331,26 +3423,26 @@
 impl<const BAR: Bar> Foo<BAR$0> {}
 "#,
         expect![[r#"
-                [
-                    GoToType(
-                        [
-                            HoverGotoTypeData {
-                                mod_path: "test::Bar",
-                                nav: NavigationTarget {
-                                    file_id: FileId(
-                                        0,
-                                    ),
-                                    full_range: 0..11,
-                                    focus_range: 7..10,
-                                    name: "Bar",
-                                    kind: Struct,
-                                    description: "struct Bar",
-                                },
+            [
+                GoToType(
+                    [
+                        HoverGotoTypeData {
+                            mod_path: "test::Bar",
+                            nav: NavigationTarget {
+                                file_id: FileId(
+                                    0,
+                                ),
+                                full_range: 0..11,
+                                focus_range: 7..10,
+                                name: "Bar",
+                                kind: Struct,
+                                description: "struct Bar",
                             },
-                        ],
-                    ),
-                ]
-            "#]],
+                        },
+                    ],
+                ),
+            ]
+        "#]],
     );
 }
 
@@ -3396,26 +3488,26 @@
 }
 "#,
         expect![[r#"
-                [
-                    GoToType(
-                        [
-                            HoverGotoTypeData {
-                                mod_path: "test::Foo",
-                                nav: NavigationTarget {
-                                    file_id: FileId(
-                                        0,
-                                    ),
-                                    full_range: 0..11,
-                                    focus_range: 7..10,
-                                    name: "Foo",
-                                    kind: Struct,
-                                    description: "struct Foo",
-                                },
+            [
+                GoToType(
+                    [
+                        HoverGotoTypeData {
+                            mod_path: "test::Foo",
+                            nav: NavigationTarget {
+                                file_id: FileId(
+                                    0,
+                                ),
+                                full_range: 0..11,
+                                focus_range: 7..10,
+                                name: "Foo",
+                                kind: Struct,
+                                description: "struct Foo",
                             },
-                        ],
-                    ),
-                ]
-            "#]],
+                        },
+                    ],
+                ),
+            ]
+        "#]],
     );
 }
 
@@ -3498,7 +3590,7 @@
             ```
 
             ```rust
-            struct ST<const C: usize = 1, T = Foo>(T);
+            struct ST<const C: usize = 1, T = Foo>(T)
             ```
         "#]],
     );
@@ -3519,7 +3611,7 @@
             ```
 
             ```rust
-            struct ST<const C: usize = {const}, T = Foo>(T);
+            struct ST<const C: usize = {const}, T = Foo>(T)
             ```
         "#]],
     );
@@ -3541,7 +3633,7 @@
             ```
 
             ```rust
-            struct ST<const C: usize = VAL, T = Foo>(T);
+            struct ST<const C: usize = VAL, T = Foo>(T)
             ```
         "#]],
     );
@@ -5931,26 +6023,26 @@
 }
 "#,
         expect![[r#"
-                [
-                    GoToType(
-                        [
-                            HoverGotoTypeData {
-                                mod_path: "test::Foo",
-                                nav: NavigationTarget {
-                                    file_id: FileId(
-                                        0,
-                                    ),
-                                    full_range: 0..11,
-                                    focus_range: 7..10,
-                                    name: "Foo",
-                                    kind: Struct,
-                                    description: "struct Foo",
-                                },
+            [
+                GoToType(
+                    [
+                        HoverGotoTypeData {
+                            mod_path: "test::Foo",
+                            nav: NavigationTarget {
+                                file_id: FileId(
+                                    0,
+                                ),
+                                full_range: 0..11,
+                                focus_range: 7..10,
+                                name: "Foo",
+                                kind: Struct,
+                                description: "struct Foo",
                             },
-                        ],
-                    ),
-                ]
-            "#]],
+                        },
+                    ],
+                ),
+            ]
+        "#]],
     );
 }
 
@@ -6166,7 +6258,7 @@
 
             ```rust
             // size = 4, align = 4
-            pub struct Foo(i32);
+            pub struct Foo(i32)
             ```
 
             ---
@@ -6191,7 +6283,7 @@
             ```
 
             ```rust
-            pub struct Foo<T>(T);
+            pub struct Foo<T>(T)
             ```
 
             ---
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
index bcfe542..32ac6a9 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
@@ -94,7 +94,7 @@
 <span class="brace">}</span>
 
 
-<span class="macro default_library library">include</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"foo/"</span><span class="comma macro">,</span> <span class="string_literal macro">"foo.rs"</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+<span class="macro default_library library">include</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="macro default_library library macro">concat</span><span class="macro_bang macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"foo/"</span><span class="string_literal macro">,</span> <span class="string_literal macro">"foo.rs"</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
 
 <span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
     <span class="macro default_library library">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello, </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="parenthesis macro">(</span><span class="numeric_literal macro">92</span><span class="comma macro">,</span><span class="parenthesis macro">)</span><span class="operator macro">.</span><span class="field library macro">0</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
diff --git a/crates/ide/src/test_explorer.rs b/crates/ide/src/test_explorer.rs
index 2e74102..ca47139 100644
--- a/crates/ide/src/test_explorer.rs
+++ b/crates/ide/src/test_explorer.rs
@@ -11,7 +11,7 @@
 
 #[derive(Debug)]
 pub enum TestItemKind {
-    Crate,
+    Crate(CrateId),
     Module,
     Function,
 }
@@ -32,15 +32,17 @@
     crate_graph
         .iter()
         .filter(|&id| crate_graph[id].origin.is_local())
-        .filter_map(|id| Some(crate_graph[id].display_name.as_ref()?.to_string()))
-        .map(|id| TestItem {
-            kind: TestItemKind::Crate,
-            label: id.clone(),
-            id,
-            parent: None,
-            file: None,
-            text_range: None,
-            runnable: None,
+        .filter_map(|id| {
+            let test_id = crate_graph[id].display_name.as_ref()?.to_string();
+            Some(TestItem {
+                kind: TestItemKind::Crate(id),
+                label: test_id.clone(),
+                id: test_id,
+                parent: None,
+                file: None,
+                text_range: None,
+                runnable: None,
+            })
         })
         .collect()
 }
@@ -118,12 +120,13 @@
     let Some(crate_test_id) = &crate_graph[crate_id].display_name else {
         return vec![];
     };
+    let kind = TestItemKind::Crate(crate_id);
     let crate_test_id = crate_test_id.to_string();
     let crate_id: Crate = crate_id.into();
     let module = crate_id.root_module();
     let mut r = vec![TestItem {
         id: crate_test_id.clone(),
-        kind: TestItemKind::Crate,
+        kind,
         label: crate_test_id.clone(),
         parent: None,
         file: None,
diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs
index d946ecc..ac7f071 100644
--- a/crates/mbe/src/benchmark.rs
+++ b/crates/mbe/src/benchmark.rs
@@ -1,6 +1,7 @@
 //! This module add real world mbe example for benchmark tests
 
 use rustc_hash::FxHashMap;
+use span::Span;
 use syntax::{
     ast::{self, HasName},
     AstNode, SmolStr,
@@ -9,7 +10,7 @@
 
 use crate::{
     parser::{MetaVarKind, Op, RepeatKind, Separator},
-    syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanData, DummyTestSpanMap, DUMMY,
+    syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanMap, DUMMY,
 };
 
 #[test]
@@ -50,14 +51,14 @@
     assert_eq!(hash, 69413);
 }
 
-fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<DummyTestSpanData>> {
+fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<Span>> {
     macro_rules_fixtures_tt()
         .into_iter()
         .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true, true)))
         .collect()
 }
 
-fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<DummyTestSpanData>> {
+fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<Span>> {
     let fixture = bench_fixture::numerous_macro_rules();
     let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
 
@@ -79,8 +80,8 @@
 
 /// Generate random invocation fixtures from rules
 fn invocation_fixtures(
-    rules: &FxHashMap<String, DeclarativeMacro<DummyTestSpanData>>,
-) -> Vec<(String, tt::Subtree<DummyTestSpanData>)> {
+    rules: &FxHashMap<String, DeclarativeMacro<Span>>,
+) -> Vec<(String, tt::Subtree<Span>)> {
     let mut seed = 123456789;
     let mut res = Vec::new();
 
@@ -128,8 +129,8 @@
     return res;
 
     fn collect_from_op(
-        op: &Op<DummyTestSpanData>,
-        token_trees: &mut Vec<tt::TokenTree<DummyTestSpanData>>,
+        op: &Op<Span>,
+        token_trees: &mut Vec<tt::TokenTree<Span>>,
         seed: &mut usize,
     ) {
         return match op {
@@ -221,19 +222,19 @@
             *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
             *seed
         }
-        fn make_ident(ident: &str) -> tt::TokenTree<DummyTestSpanData> {
+        fn make_ident(ident: &str) -> tt::TokenTree<Span> {
             tt::Leaf::Ident(tt::Ident { span: DUMMY, text: SmolStr::new(ident) }).into()
         }
-        fn make_punct(char: char) -> tt::TokenTree<DummyTestSpanData> {
+        fn make_punct(char: char) -> tt::TokenTree<Span> {
             tt::Leaf::Punct(tt::Punct { span: DUMMY, char, spacing: tt::Spacing::Alone }).into()
         }
-        fn make_literal(lit: &str) -> tt::TokenTree<DummyTestSpanData> {
+        fn make_literal(lit: &str) -> tt::TokenTree<Span> {
             tt::Leaf::Literal(tt::Literal { span: DUMMY, text: SmolStr::new(lit) }).into()
         }
         fn make_subtree(
             kind: tt::DelimiterKind,
-            token_trees: Option<Vec<tt::TokenTree<DummyTestSpanData>>>,
-        ) -> tt::TokenTree<DummyTestSpanData> {
+            token_trees: Option<Vec<tt::TokenTree<Span>>>,
+        ) -> tt::TokenTree<Span> {
             tt::Subtree {
                 delimiter: tt::Delimiter { open: DUMMY, close: DUMMY, kind },
                 token_trees: token_trees.map(Vec::into_boxed_slice).unwrap_or_default(),
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index cb8f8df..57d6082 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -41,32 +41,30 @@
 /// Dummy things for testing where spans don't matter.
 pub(crate) mod dummy_test_span_utils {
 
+    use span::{Span, SyntaxContextId};
+
     use super::*;
 
-    pub type DummyTestSpanData = span::SpanData<DummyTestSyntaxContext>;
-    pub const DUMMY: DummyTestSpanData = span::SpanData {
+    pub const DUMMY: Span = Span {
         range: TextRange::empty(TextSize::new(0)),
         anchor: span::SpanAnchor {
             file_id: span::FileId::BOGUS,
             ast_id: span::ROOT_ERASED_FILE_AST_ID,
         },
-        ctx: DummyTestSyntaxContext,
+        ctx: SyntaxContextId::ROOT,
     };
 
-    #[derive(Debug, Copy, Clone, PartialEq, Eq)]
-    pub struct DummyTestSyntaxContext;
-
     pub struct DummyTestSpanMap;
 
-    impl SpanMapper<span::SpanData<DummyTestSyntaxContext>> for DummyTestSpanMap {
-        fn span_for(&self, range: syntax::TextRange) -> span::SpanData<DummyTestSyntaxContext> {
-            span::SpanData {
+    impl SpanMapper<Span> for DummyTestSpanMap {
+        fn span_for(&self, range: syntax::TextRange) -> Span {
+            Span {
                 range,
                 anchor: span::SpanAnchor {
                     file_id: span::FileId::BOGUS,
                     ast_id: span::ROOT_ERASED_FILE_AST_ID,
                 },
-                ctx: DummyTestSyntaxContext,
+                ctx: SyntaxContextId::ROOT,
             }
         }
     }
diff --git a/crates/mbe/src/syntax_bridge/tests.rs b/crates/mbe/src/syntax_bridge/tests.rs
index 11d1a72..a261b1d 100644
--- a/crates/mbe/src/syntax_bridge/tests.rs
+++ b/crates/mbe/src/syntax_bridge/tests.rs
@@ -1,4 +1,5 @@
 use rustc_hash::FxHashMap;
+use span::Span;
 use syntax::{ast, AstNode};
 use test_utils::extract_annotations;
 use tt::{
@@ -6,7 +7,7 @@
     Leaf, Punct, Spacing,
 };
 
-use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap, DUMMY};
+use crate::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
 
 fn check_punct_spacing(fixture: &str) {
     let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
@@ -28,7 +29,7 @@
     while !cursor.eof() {
         while let Some(token_tree) = cursor.token_tree() {
             if let TokenTreeRef::Leaf(
-                Leaf::Punct(Punct { spacing, span: DummyTestSpanData { range, .. }, .. }),
+                Leaf::Punct(Punct { spacing, span: Span { range, .. }, .. }),
                 _,
             ) = token_tree
             {
diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs
index 54a2035..11b008f 100644
--- a/crates/proc-macro-srv/src/tests/mod.rs
+++ b/crates/proc-macro-srv/src/tests/mod.rs
@@ -8,7 +8,12 @@
 
 #[test]
 fn test_derive_empty() {
-    assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 1 1"], expect!["SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"]);
+    assert_expand(
+        "DeriveEmpty",
+        r#"struct S;"#,
+        expect!["SUBTREE $$ 1 1"],
+        expect!["SUBTREE $$ 42:2@0..100#0 42:2@0..100#0"],
+    );
 }
 
 #[test]
@@ -21,15 +26,15 @@
               IDENT   compile_error 1
               PUNCH   ! [alone] 1
               SUBTREE () 1 1
-                LITERAL "#[derive(DeriveError)] struct S ;" 1
+                LITERAL "#[derive(DeriveError)] struct S ;"1
               PUNCH   ; [alone] 1"##]],
         expect![[r##"
-            SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              IDENT   compile_error SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              PUNCH   ! [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              SUBTREE () SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-                LITERAL "#[derive(DeriveError)] struct S ;" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              PUNCH   ; [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"##]],
+            SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
+              IDENT   compile_error 42:2@0..100#0
+              PUNCH   ! [alone] 42:2@0..100#0
+              SUBTREE () 42:2@0..100#0 42:2@0..100#0
+                LITERAL "#[derive(DeriveError)] struct S ;"42:2@0..100#0
+              PUNCH   ; [alone] 42:2@0..100#0"##]],
     );
 }
 
@@ -42,20 +47,20 @@
             SUBTREE $$ 1 1
               IDENT   ident 1
               PUNCH   , [alone] 1
-              LITERAL 0 1
+              LITERAL 01
               PUNCH   , [alone] 1
-              LITERAL 1 1
+              LITERAL 11
               PUNCH   , [alone] 1
               SUBTREE [] 1 1"#]],
         expect![[r#"
-            SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              IDENT   ident SpanData { range: 0..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              PUNCH   , [alone] SpanData { range: 5..6, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL 0 SpanData { range: 7..8, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              PUNCH   , [alone] SpanData { range: 8..9, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL 1 SpanData { range: 10..11, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              PUNCH   , [alone] SpanData { range: 11..12, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              SUBTREE [] SpanData { range: 13..14, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 14..15, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
+            SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
+              IDENT   ident 42:2@0..5#0
+              PUNCH   , [alone] 42:2@5..6#0
+              LITERAL 042:2@7..8#0
+              PUNCH   , [alone] 42:2@8..9#0
+              LITERAL 142:2@10..11#0
+              PUNCH   , [alone] 42:2@11..12#0
+              SUBTREE [] 42:2@13..14#0 42:2@14..15#0"#]],
     );
 }
 
@@ -70,10 +75,10 @@
               PUNCH   , [alone] 1
               SUBTREE [] 1 1"#]],
         expect![[r#"
-            SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              IDENT   ident SpanData { range: 0..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              PUNCH   , [alone] SpanData { range: 5..6, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              SUBTREE [] SpanData { range: 7..8, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 7..8, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
+            SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
+              IDENT   ident 42:2@0..5#0
+              PUNCH   , [alone] 42:2@5..6#0
+              SUBTREE [] 42:2@7..8#0 42:2@7..8#0"#]],
     );
 }
 
@@ -86,8 +91,8 @@
             SUBTREE $$ 1 1
               IDENT   r#async 1"#]],
         expect![[r#"
-            SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              IDENT   r#async SpanData { range: 0..7, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
+            SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
+              IDENT   r#async 42:2@0..7#0"#]],
     );
 }
 
@@ -100,8 +105,8 @@
             SUBTREE $$ 1 1
               IDENT   r#joined 1"#]],
         expect![[r#"
-            SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              IDENT   r#joined SpanData { range: 0..11, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
+            SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
+              IDENT   r#joined 42:2@0..11#0"#]],
     );
 }
 
@@ -116,10 +121,10 @@
               IDENT   resolved_at_def_site 1
               IDENT   start_span 1"#]],
         expect![[r#"
-            SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              IDENT   set_def_site SpanData { range: 0..150, anchor: SpanAnchor(FileId(41), 1), ctx: SyntaxContextId(0) }
-              IDENT   resolved_at_def_site SpanData { range: 13..33, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              IDENT   start_span SpanData { range: 34..34, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
+            SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
+              IDENT   set_def_site 41:1@0..150#0
+              IDENT   resolved_at_def_site 42:2@13..33#0
+              IDENT   start_span 42:2@34..34#0"#]],
     );
 }
 
@@ -130,22 +135,22 @@
         r#""#,
         expect![[r#"
             SUBTREE $$ 1 1
-              LITERAL b"byte_string" 1
-              LITERAL 'c' 1
-              LITERAL "string" 1
-              LITERAL 3.14f64 1
-              LITERAL 3.14 1
-              LITERAL 123i64 1
-              LITERAL 123 1"#]],
+              LITERAL b"byte_string"1
+              LITERAL 'c'1
+              LITERAL "string"1
+              LITERAL 3.14f641
+              LITERAL 3.141
+              LITERAL 123i641
+              LITERAL 1231"#]],
         expect![[r#"
-            SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL b"byte_string" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL 'c' SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL "string" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL 3.14f64 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL 3.14 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL 123i64 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL 123 SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
+            SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
+              LITERAL b"byte_string"42:2@0..100#0
+              LITERAL 'c'42:2@0..100#0
+              LITERAL "string"42:2@0..100#0
+              LITERAL 3.14f6442:2@0..100#0
+              LITERAL 3.1442:2@0..100#0
+              LITERAL 123i6442:2@0..100#0
+              LITERAL 12342:2@0..100#0"#]],
     );
 }
 
@@ -159,9 +164,9 @@
               IDENT   standard 1
               IDENT   r#raw 1"#]],
         expect![[r#"
-            SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              IDENT   standard SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              IDENT   r#raw SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]],
+            SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
+              IDENT   standard 42:2@0..100#0
+              IDENT   r#raw 42:2@0..100#0"#]],
     );
 }
 
@@ -172,48 +177,48 @@
         r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##, 'a', b'b', c"null""###,
         expect![[r###"
             SUBTREE $$ 1 1
-              LITERAL 1u16 1
+              LITERAL 1u161
               PUNCH   , [alone] 1
-              LITERAL 2_u32 1
+              LITERAL 2_u321
               PUNCH   , [alone] 1
               PUNCH   - [alone] 1
-              LITERAL 4i64 1
+              LITERAL 4i641
               PUNCH   , [alone] 1
-              LITERAL 3.14f32 1
+              LITERAL 3.14f321
               PUNCH   , [alone] 1
-              LITERAL "hello bridge" 1
+              LITERAL "hello bridge"1
               PUNCH   , [alone] 1
-              LITERAL "suffixed"suffix 1
+              LITERAL "suffixed"suffix1
               PUNCH   , [alone] 1
-              LITERAL r##"raw"## 1
+              LITERAL r##"raw"##1
               PUNCH   , [alone] 1
-              LITERAL 'a' 1
+              LITERAL 'a'1
               PUNCH   , [alone] 1
-              LITERAL b'b' 1
+              LITERAL b'b'1
               PUNCH   , [alone] 1
-              LITERAL c"null" 1"###]],
+              LITERAL c"null"1"###]],
         expect![[r###"
-            SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL 1u16 SpanData { range: 0..4, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              PUNCH   , [alone] SpanData { range: 4..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL 2_u32 SpanData { range: 6..11, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              PUNCH   , [alone] SpanData { range: 11..12, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              PUNCH   - [alone] SpanData { range: 13..14, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL 4i64 SpanData { range: 14..18, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              PUNCH   , [alone] SpanData { range: 18..19, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL 3.14f32 SpanData { range: 20..27, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              PUNCH   , [alone] SpanData { range: 27..28, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL "hello bridge" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              PUNCH   , [alone] SpanData { range: 43..44, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL "suffixed"suffix SpanData { range: 45..61, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              PUNCH   , [alone] SpanData { range: 61..62, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL r##"raw"## SpanData { range: 63..73, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              PUNCH   , [alone] SpanData { range: 73..74, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL 'a' SpanData { range: 75..78, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              PUNCH   , [alone] SpanData { range: 78..79, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL b'b' SpanData { range: 80..84, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              PUNCH   , [alone] SpanData { range: 84..85, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              LITERAL c"null" SpanData { range: 86..93, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"###]],
+            SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
+              LITERAL 1u1642:2@0..4#0
+              PUNCH   , [alone] 42:2@4..5#0
+              LITERAL 2_u3242:2@6..11#0
+              PUNCH   , [alone] 42:2@11..12#0
+              PUNCH   - [alone] 42:2@13..14#0
+              LITERAL 4i6442:2@14..18#0
+              PUNCH   , [alone] 42:2@18..19#0
+              LITERAL 3.14f3242:2@20..27#0
+              PUNCH   , [alone] 42:2@27..28#0
+              LITERAL "hello bridge"42:2@29..43#0
+              PUNCH   , [alone] 42:2@43..44#0
+              LITERAL "suffixed"suffix42:2@45..61#0
+              PUNCH   , [alone] 42:2@61..62#0
+              LITERAL r##"raw"##42:2@63..73#0
+              PUNCH   , [alone] 42:2@73..74#0
+              LITERAL 'a'42:2@75..78#0
+              PUNCH   , [alone] 42:2@78..79#0
+              LITERAL b'b'42:2@80..84#0
+              PUNCH   , [alone] 42:2@84..85#0
+              LITERAL c"null"42:2@86..93#0"###]],
     );
 }
 
@@ -231,15 +236,15 @@
               IDENT   compile_error 1
               PUNCH   ! [alone] 1
               SUBTREE () 1 1
-                LITERAL "#[attr_error(some arguments)] mod m {}" 1
+                LITERAL "#[attr_error(some arguments)] mod m {}"1
               PUNCH   ; [alone] 1"##]],
         expect![[r##"
-            SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              IDENT   compile_error SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              PUNCH   ! [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              SUBTREE () SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-                LITERAL "#[attr_error(some arguments)] mod m {}" SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }
-              PUNCH   ; [alone] SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"##]],
+            SUBTREE $$ 42:2@0..100#0 42:2@0..100#0
+              IDENT   compile_error 42:2@0..100#0
+              PUNCH   ! [alone] 42:2@0..100#0
+              SUBTREE () 42:2@0..100#0 42:2@0..100#0
+                LITERAL "#[attr_error(some arguments)] mod m {}"42:2@0..100#0
+              PUNCH   ; [alone] 42:2@0..100#0"##]],
     );
 }
 
diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs
index 9a1311d..6050bc9 100644
--- a/crates/proc-macro-srv/src/tests/utils.rs
+++ b/crates/proc-macro-srv/src/tests/utils.rs
@@ -91,7 +91,7 @@
     let res = expander
         .expand(macro_name, fixture.into_subtree(call_site), attr, def_site, call_site, mixed_site)
         .unwrap();
-    expect_s.assert_eq(&format!("{res:?}"));
+    expect_s.assert_eq(&format!("{res:#?}"));
 }
 
 pub(crate) fn list() -> Vec<String> {
diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs
index 84f2e60..eeec13a 100644
--- a/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -76,7 +76,7 @@
         );
 
         let workspace = ProjectWorkspace::DetachedFiles {
-            files: vec![tmp_file.clone()],
+            files: vec![tmp_file],
             sysroot,
             rustc_cfg: vec![],
             toolchain: None,
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs
index ae58e6b..1c5a862 100644
--- a/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -56,8 +56,6 @@
         vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
     };
 
-    let _g = crate::tracing::hprof::init("*>150");
-
     {
         let _it = stdx::timeit("initial");
         let analysis = host.analysis();
@@ -67,13 +65,16 @@
     {
         let _it = stdx::timeit("change");
         let mut text = host.analysis().file_text(file_id).unwrap().to_string();
-        text.push_str("\npub fn _dummy() {}\n");
+        text = text.replace(
+            "self.data.cargo_buildScripts_rebuildOnSave",
+            "self. data. cargo_buildScripts_rebuildOnSave",
+        );
         let mut change = ChangeWithProcMacros::new();
         change.change_file(file_id, Some(text));
         host.apply_change(change);
     }
 
-    let _g = crate::tracing::hprof::init("*>50");
+    let _g = crate::tracing::hprof::init("*>20");
 
     {
         let _it = stdx::timeit("after change");
diff --git a/crates/rust-analyzer/src/lsp/ext.rs b/crates/rust-analyzer/src/lsp/ext.rs
index 86ab652..710ce7f 100644
--- a/crates/rust-analyzer/src/lsp/ext.rs
+++ b/crates/rust-analyzer/src/lsp/ext.rs
@@ -234,6 +234,13 @@
     const METHOD: &'static str = "experimental/endRunTest";
 }
 
+pub enum AppendOutputToRunTest {}
+
+impl Notification for AppendOutputToRunTest {
+    type Params = String;
+    const METHOD: &'static str = "experimental/appendOutputToRunTest";
+}
+
 pub enum AbortRunTest {}
 
 impl Notification for AbortRunTest {
diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs
index 0423b2f..e77d0c1 100644
--- a/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -1519,13 +1519,28 @@
         id: test_item.id,
         label: test_item.label,
         kind: match test_item.kind {
-            ide::TestItemKind::Crate => lsp_ext::TestItemKind::Package,
+            ide::TestItemKind::Crate(id) => 'b: {
+                let Some((cargo_ws, target)) = snap.cargo_target_for_crate_root(id) else {
+                    break 'b lsp_ext::TestItemKind::Package;
+                };
+                let target = &cargo_ws[target];
+                match target.kind {
+                    project_model::TargetKind::Bin
+                    | project_model::TargetKind::Lib { .. }
+                    | project_model::TargetKind::Example
+                    | project_model::TargetKind::BuildScript
+                    | project_model::TargetKind::Other => lsp_ext::TestItemKind::Package,
+                    project_model::TargetKind::Test | project_model::TargetKind::Bench => {
+                        lsp_ext::TestItemKind::Test
+                    }
+                }
+            }
             ide::TestItemKind::Module => lsp_ext::TestItemKind::Module,
             ide::TestItemKind::Function => lsp_ext::TestItemKind::Test,
         },
         can_resolve_children: matches!(
             test_item.kind,
-            ide::TestItemKind::Crate | ide::TestItemKind::Module
+            ide::TestItemKind::Crate(_) | ide::TestItemKind::Module
         ),
         parent: test_item.parent,
         text_document: test_item
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index e106a85..ffe56e4 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -799,6 +799,9 @@
                 self.send_notification::<lsp_ext::EndRunTest>(());
                 self.test_run_session = None;
             }
+            flycheck::CargoTestMessage::Custom { text } => {
+                self.send_notification::<lsp_ext::AppendOutputToRunTest>(text);
+            }
         }
     }
 
diff --git a/crates/span/src/hygiene.rs b/crates/span/src/hygiene.rs
index 4f6d792..e4b0a26 100644
--- a/crates/span/src/hygiene.rs
+++ b/crates/span/src/hygiene.rs
@@ -26,9 +26,19 @@
 use crate::MacroCallId;
 
 /// Interned [`SyntaxContextData`].
-#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
 pub struct SyntaxContextId(InternId);
 
+impl fmt::Debug for SyntaxContextId {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        if f.alternate() {
+            write!(f, "{}", self.0.as_u32())
+        } else {
+            f.debug_tuple("SyntaxContextId").field(&self.0).finish()
+        }
+    }
+}
+
 impl salsa::InternKey for SyntaxContextId {
     fn from_intern_id(v: salsa::InternId) -> Self {
         SyntaxContextId(v)
diff --git a/crates/span/src/lib.rs b/crates/span/src/lib.rs
index b262476..6b849ce 100644
--- a/crates/span/src/lib.rs
+++ b/crates/span/src/lib.rs
@@ -44,7 +44,10 @@
 
 pub type Span = SpanData<SyntaxContextId>;
 
-#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
+/// Spans represent a region of code, used by the IDE to be able link macro inputs and outputs
+/// together. Positions in spans are relative to some [`SpanAnchor`] to make them more incremental
+/// friendly.
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
 pub struct SpanData<Ctx> {
     /// The text range of this span, relative to the anchor.
     /// We need the anchor for incrementality, as storing absolute ranges will require
@@ -56,6 +59,26 @@
     pub ctx: Ctx,
 }
 
+impl<Ctx: fmt::Debug> fmt::Debug for SpanData<Ctx> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        if f.alternate() {
+            fmt::Debug::fmt(&self.anchor.file_id.index(), f)?;
+            f.write_char(':')?;
+            fmt::Debug::fmt(&self.anchor.ast_id.into_raw(), f)?;
+            f.write_char('@')?;
+            fmt::Debug::fmt(&self.range, f)?;
+            f.write_char('#')?;
+            self.ctx.fmt(f)
+        } else {
+            f.debug_struct("SpanData")
+                .field("range", &self.range)
+                .field("anchor", &self.anchor)
+                .field("ctx", &self.ctx)
+                .finish()
+        }
+    }
+}
+
 impl<Ctx: Copy> SpanData<Ctx> {
     pub fn eq_ignoring_ctx(self, other: Self) -> bool {
         self.anchor == other.anchor && self.range == other.range
@@ -64,7 +87,7 @@
 
 impl Span {
     #[deprecated = "dummy spans will panic if surfaced incorrectly, as such they should be replaced appropriately"]
-    pub const DUMMY: Self = SpanData {
+    pub const DUMMY: Self = Self {
         range: TextRange::empty(TextSize::new(0)),
         anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID },
         ctx: SyntaxContextId::ROOT,
diff --git a/crates/span/src/map.rs b/crates/span/src/map.rs
index 7b42551..1f396a1 100644
--- a/crates/span/src/map.rs
+++ b/crates/span/src/map.rs
@@ -1,7 +1,7 @@
 //! A map that maps a span to every position in a file. Usually maps a span to some range of positions.
 //! Allows bidirectional lookup.
 
-use std::hash::Hash;
+use std::{fmt, hash::Hash};
 
 use stdx::{always, itertools::Itertools};
 use syntax::{TextRange, TextSize};
@@ -52,7 +52,7 @@
     /// Returns all [`TextRange`]s that correspond to the given span.
     ///
     /// Note this does a linear search through the entire backing vector.
-    pub fn ranges_with_span(&self, span: SpanData<S>) -> impl Iterator<Item = TextRange> + '_
+    pub fn ranges_with_span_exact(&self, span: SpanData<S>) -> impl Iterator<Item = TextRange> + '_
     where
         S: Copy,
     {
@@ -65,6 +65,25 @@
         })
     }
 
+    /// Returns all [`TextRange`]s whose spans contain the given span.
+    ///
+    /// Note this does a linear search through the entire backing vector.
+    pub fn ranges_with_span(&self, span: SpanData<S>) -> impl Iterator<Item = TextRange> + '_
+    where
+        S: Copy,
+    {
+        self.spans.iter().enumerate().filter_map(move |(idx, &(end, s))| {
+            if s.anchor != span.anchor {
+                return None;
+            }
+            if !s.range.contains_range(span.range) {
+                return None;
+            }
+            let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0);
+            Some(TextRange::new(start, end))
+        })
+    }
+
     /// Returns the span at the given position.
     pub fn span_at(&self, offset: TextSize) -> SpanData<S> {
         let entry = self.spans.partition_point(|&(it, _)| it <= offset);
@@ -94,6 +113,16 @@
     end: TextSize,
 }
 
+impl fmt::Display for RealSpanMap {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        writeln!(f, "RealSpanMap({:?}):", self.file_id)?;
+        for span in self.pairs.iter() {
+            writeln!(f, "{}: {}", u32::from(span.0), span.1.into_raw().into_u32())?;
+        }
+        Ok(())
+    }
+}
+
 impl RealSpanMap {
     /// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id).
     pub fn absolute(file_id: FileId) -> Self {
diff --git a/crates/stdx/src/anymap.rs b/crates/stdx/src/anymap.rs
index 899cd8a..d47b3d1 100644
--- a/crates/stdx/src/anymap.rs
+++ b/crates/stdx/src/anymap.rs
@@ -194,21 +194,6 @@
 mod tests {
     use super::*;
 
-    #[derive(Clone, Debug, PartialEq)]
-    struct A(i32);
-    #[derive(Clone, Debug, PartialEq)]
-    struct B(i32);
-    #[derive(Clone, Debug, PartialEq)]
-    struct C(i32);
-    #[derive(Clone, Debug, PartialEq)]
-    struct D(i32);
-    #[derive(Clone, Debug, PartialEq)]
-    struct E(i32);
-    #[derive(Clone, Debug, PartialEq)]
-    struct F(i32);
-    #[derive(Clone, Debug, PartialEq)]
-    struct J(i32);
-
     #[test]
     fn test_varieties() {
         fn assert_send<T: Send>() {}
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index 1bc1ef8..c3d6f50 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -139,6 +139,17 @@
     }
 }
 
+impl From<ast::ExternItem> for ast::Item {
+    fn from(extern_item: ast::ExternItem) -> Self {
+        match extern_item {
+            ast::ExternItem::Static(it) => ast::Item::Static(it),
+            ast::ExternItem::Fn(it) => ast::Item::Fn(it),
+            ast::ExternItem::MacroCall(it) => ast::Item::MacroCall(it),
+            ast::ExternItem::TypeAlias(it) => ast::Item::TypeAlias(it),
+        }
+    }
+}
+
 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
 pub enum AttrKind {
     Inner,
diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs
index eec88f8..28289a6 100644
--- a/crates/tt/src/lib.rs
+++ b/crates/tt/src/lib.rs
@@ -177,17 +177,19 @@
     let align = "  ".repeat(level);
 
     let Delimiter { kind, open, close } = &subtree.delimiter;
-    let aux = match kind {
-        DelimiterKind::Invisible => format!("$$ {:?} {:?}", open, close),
-        DelimiterKind::Parenthesis => format!("() {:?} {:?}", open, close),
-        DelimiterKind::Brace => format!("{{}} {:?} {:?}", open, close),
-        DelimiterKind::Bracket => format!("[] {:?} {:?}", open, close),
+    let delim = match kind {
+        DelimiterKind::Invisible => "$$",
+        DelimiterKind::Parenthesis => "()",
+        DelimiterKind::Brace => "{}",
+        DelimiterKind::Bracket => "[]",
     };
 
-    if subtree.token_trees.is_empty() {
-        write!(f, "{align}SUBTREE {aux}")?;
-    } else {
-        writeln!(f, "{align}SUBTREE {aux}")?;
+    write!(f, "{align}SUBTREE {delim} ",)?;
+    fmt::Debug::fmt(&open, f)?;
+    write!(f, " ")?;
+    fmt::Debug::fmt(&close, f)?;
+    if !subtree.token_trees.is_empty() {
+        writeln!(f)?;
         for (idx, child) in subtree.token_trees.iter().enumerate() {
             print_debug_token(f, child, level + 1)?;
             if idx != subtree.token_trees.len() - 1 {
@@ -208,16 +210,24 @@
 
     match tkn {
         TokenTree::Leaf(leaf) => match leaf {
-            Leaf::Literal(lit) => write!(f, "{}LITERAL {} {:?}", align, lit.text, lit.span)?,
-            Leaf::Punct(punct) => write!(
-                f,
-                "{}PUNCH   {} [{}] {:?}",
-                align,
-                punct.char,
-                if punct.spacing == Spacing::Alone { "alone" } else { "joint" },
-                punct.span
-            )?,
-            Leaf::Ident(ident) => write!(f, "{}IDENT   {} {:?}", align, ident.text, ident.span)?,
+            Leaf::Literal(lit) => {
+                write!(f, "{}LITERAL {}", align, lit.text)?;
+                fmt::Debug::fmt(&lit.span, f)?;
+            }
+            Leaf::Punct(punct) => {
+                write!(
+                    f,
+                    "{}PUNCH   {} [{}] ",
+                    align,
+                    punct.char,
+                    if punct.spacing == Spacing::Alone { "alone" } else { "joint" },
+                )?;
+                fmt::Debug::fmt(&punct.span, f)?;
+            }
+            Leaf::Ident(ident) => {
+                write!(f, "{}IDENT   {} ", align, ident.text)?;
+                fmt::Debug::fmt(&ident.span, f)?;
+            }
         },
         TokenTree::Subtree(subtree) => {
             print_debug_subtree(f, subtree, level)?;
diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md
index af5b4e5..cf9ad5f 100644
--- a/docs/dev/lsp-extensions.md
+++ b/docs/dev/lsp-extensions.md
@@ -1,5 +1,5 @@
 <!---
-lsp/ext.rs hash: 6bc140531b403717
+lsp/ext.rs hash: 61f485497d6e8e88
 
 If you need to change the above hash to make the test pass, please check if you
 need to adjust this doc as well and ping this issue:
@@ -509,6 +509,13 @@
 }
 ```
 
+**Method:** `experimental/appendOutputToRunTest`
+
+**Notification:** `string`
+
+This notification is used for reporting messages independent of any single test and related to the run session
+in general, e.g. cargo compiling progress messages or warnings.
+
 ## Open External Documentation
 
 This request is sent from the client to the server to obtain web and local URL(s) for documentation related to the symbol under the cursor, if available.
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json
index 291cef9..bd8b0e9 100644
--- a/editors/code/package-lock.json
+++ b/editors/code/package-lock.json
@@ -2290,9 +2290,9 @@
             "dev": true
         },
         "node_modules/follow-redirects": {
-            "version": "1.15.4",
-            "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz",
-            "integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==",
+            "version": "1.15.6",
+            "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz",
+            "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==",
             "dev": true,
             "funding": [
                 {
diff --git a/editors/code/src/lsp_ext.ts b/editors/code/src/lsp_ext.ts
index 31ac3d9..ca81063 100644
--- a/editors/code/src/lsp_ext.ts
+++ b/editors/code/src/lsp_ext.ts
@@ -100,6 +100,9 @@
 export const runTest = new lc.RequestType<RunTestParams, void, void>("experimental/runTest");
 export const abortRunTest = new lc.NotificationType0("experimental/abortRunTest");
 export const endRunTest = new lc.NotificationType0("experimental/endRunTest");
+export const appendOutputToRunTest = new lc.NotificationType<string>(
+    "experimental/appendOutputToRunTest",
+);
 export const changeTestState = new lc.NotificationType<ChangeTestStateParams>(
     "experimental/changeTestState",
 );
diff --git a/editors/code/src/test_explorer.ts b/editors/code/src/test_explorer.ts
index 2f0b4d5..ac4ffb1 100644
--- a/editors/code/src/test_explorer.ts
+++ b/editors/code/src/test_explorer.ts
@@ -142,6 +142,12 @@
     );
 
     ctx.pushClientCleanup(
+        client.onNotification(ra.appendOutputToRunTest, (output) => {
+            currentTestRun!.appendOutput(`${output}\r\n`);
+        }),
+    );
+
+    ctx.pushClientCleanup(
         client.onNotification(ra.changeTestState, (results) => {
             const test = idToTestMap.get(results.testId)!;
             if (results.state.tag === "failed") {