Auto merge of #129066 - weihanglo:update-cargo, r=weihanglo

Update cargo

7 commits in 0d8d22f83b066503f6b2b755925197e959e58b4f..2f738d617c6ead388f899802dd1a7fd66858a691
2024-08-08 12:54:24 +0000 to 2024-08-13 10:57:52 +0000
- chore: downgrade to openssl v1.1.1 (again) (rust-lang/cargo#14391)
- feat(trim-paths): rustdoc supports trim-paths for diagnostics (rust-lang/cargo#14389)
- Use longhand gitoxide path-spec patterns (rust-lang/cargo#14380)
- feat: Add `info` cargo subcommand (rust-lang/cargo#14141)
- CI: Switch macos aarch64 to nightly (rust-lang/cargo#14382)
- Use context instead of with_context (rust-lang/cargo#14377)
- Fix: `cargo package` failed on bare commit git repo. (rust-lang/cargo#14359)

r? ghost
diff --git a/.github/workflows/publish-libs.yaml b/.github/workflows/publish-libs.yaml
index 34ca53e..f1533bf 100644
--- a/.github/workflows/publish-libs.yaml
+++ b/.github/workflows/publish-libs.yaml
@@ -30,7 +30,6 @@
         run: |
           git config --global user.email "runner@gha.local"
           git config --global user.name "GitHub Action"
-          # Remove r-a crates from the workspaces so we don't auto-publish them as well
-          sed -i 's/ "crates\/\*"//' ./Cargo.toml
-          sed -i 's/ "xtask\/"//' ./Cargo.toml
+          # Only publish the crates under lib/
+          sed -i 's|^members = .*$|members = ["lib/*"]|' Cargo.toml
           cargo workspaces publish --yes --exact --from-git --no-git-commit --allow-dirty
diff --git a/Cargo.lock b/Cargo.lock
index b98a119..41dc440 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -52,16 +52,16 @@
 
 [[package]]
 name = "backtrace"
-version = "0.3.72"
+version = "0.3.73"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "17c6a35df3749d2e8bb1b7b21a976d82b15548788d2735b9d82f329268f71a11"
+checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a"
 dependencies = [
  "addr2line",
  "cc",
  "cfg-if",
  "libc",
  "miniz_oxide",
- "object 0.35.0",
+ "object 0.36.3",
  "rustc-demangle",
 ]
 
@@ -92,9 +92,9 @@
 
 [[package]]
 name = "bitflags"
-version = "2.5.0"
+version = "2.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1"
+checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
 
 [[package]]
 name = "byteorder"
@@ -136,9 +136,9 @@
 
 [[package]]
 name = "cc"
-version = "1.0.98"
+version = "1.1.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "41c270e7540d725e65ac7f1b212ac8ce349719624d7bcff99f8e2e488e8cf03f"
+checksum = "e9e8aabfac534be767c909e0690571677d49f41bd8465ae876fe043d52ba5292"
 
 [[package]]
 name = "cfg"
@@ -148,10 +148,10 @@
  "derive_arbitrary",
  "expect-test",
  "intern",
- "mbe",
  "oorandom",
  "rustc-hash",
  "syntax",
+ "syntax-bridge",
  "tt",
 ]
 
@@ -185,7 +185,7 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d5f2eb1cd6054da221bd1ac0197fb2fe5e2caf3dcb93619398fc1433f8f09093"
 dependencies = [
- "bitflags 2.5.0",
+ "bitflags 2.6.0",
  "chalk-derive",
 ]
 
@@ -226,9 +226,9 @@
 
 [[package]]
 name = "cov-mark"
-version = "2.0.0-pre.1"
+version = "2.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0d48d8f76bd9331f19fe2aaf3821a9f9fb32c3963e1e3d6ce82a8c09cef7444a"
+checksum = "0570650661aa447e7335f1d5e4f499d8e58796e617bedc9267d971e51c8b49d4"
 
 [[package]]
 name = "crc32fast"
@@ -366,9 +366,9 @@
 
 [[package]]
 name = "either"
-version = "1.12.0"
+version = "1.13.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3dca9240753cf90908d7e4aac30f630662b02aebaa1b58a3cadabdb23385b58b"
+checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
 
 [[package]]
 name = "ena"
@@ -397,14 +397,14 @@
 
 [[package]]
 name = "filetime"
-version = "0.2.23"
+version = "0.2.24"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd"
+checksum = "bf401df4a4e3872c4fe8151134cf483738e74b67fc934d6532c882b3d24a4550"
 dependencies = [
  "cfg-if",
  "libc",
- "redox_syscall 0.4.1",
- "windows-sys 0.52.0",
+ "libredox",
+ "windows-sys 0.59.0",
 ]
 
 [[package]]
@@ -415,32 +415,15 @@
 
 [[package]]
 name = "flate2"
-version = "1.0.30"
+version = "1.0.31"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae"
+checksum = "7f211bbe8e69bbd0cfdea405084f128ae8b4aaa6b0b522fc8f2b009084797920"
 dependencies = [
  "crc32fast",
  "miniz_oxide",
 ]
 
 [[package]]
-name = "flycheck"
-version = "0.0.0"
-dependencies = [
- "cargo_metadata",
- "crossbeam-channel",
- "paths",
- "process-wrap",
- "project-model",
- "rustc-hash",
- "serde",
- "serde_json",
- "stdx",
- "toolchain",
- "tracing",
-]
-
-[[package]]
 name = "form_urlencoded"
 version = "1.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -529,7 +512,7 @@
 dependencies = [
  "arrayvec",
  "base-db",
- "bitflags 2.5.0",
+ "bitflags 2.6.0",
  "cfg",
  "cov-mark",
  "dashmap",
@@ -554,6 +537,7 @@
  "span",
  "stdx",
  "syntax",
+ "syntax-bridge",
  "test-fixture",
  "test-utils",
  "tracing",
@@ -582,6 +566,7 @@
  "span",
  "stdx",
  "syntax",
+ "syntax-bridge",
  "tracing",
  "triomphe",
  "tt",
@@ -593,7 +578,7 @@
 dependencies = [
  "arrayvec",
  "base-db",
- "bitflags 2.5.0",
+ "bitflags 2.6.0",
  "chalk-derive",
  "chalk-ir",
  "chalk-recursive",
@@ -722,7 +707,7 @@
 dependencies = [
  "arrayvec",
  "base-db",
- "bitflags 2.5.0",
+ "bitflags 2.6.0",
  "cov-mark",
  "crossbeam-channel",
  "either",
@@ -803,9 +788,9 @@
 
 [[package]]
 name = "indexmap"
-version = "2.2.6"
+version = "2.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26"
+checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0"
 dependencies = [
  "equivalent",
  "hashbrown",
@@ -895,9 +880,9 @@
 
 [[package]]
 name = "lazy_static"
-version = "1.4.0"
+version = "1.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
 
 [[package]]
 name = "libc"
@@ -907,19 +892,19 @@
 
 [[package]]
 name = "libloading"
-version = "0.8.3"
+version = "0.8.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19"
+checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
 dependencies = [
  "cfg-if",
- "windows-targets 0.52.5",
+ "windows-targets 0.52.6",
 ]
 
 [[package]]
 name = "libmimalloc-sys"
-version = "0.1.38"
+version = "0.1.39"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0e7bb23d733dfcc8af652a78b7bf232f0e967710d044732185e561e47c0336b6"
+checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44"
 dependencies = [
  "cc",
  "libc",
@@ -931,8 +916,9 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
 dependencies = [
- "bitflags 2.5.0",
+ "bitflags 2.6.0",
  "libc",
+ "redox_syscall",
 ]
 
 [[package]]
@@ -996,9 +982,9 @@
 
 [[package]]
 name = "log"
-version = "0.4.21"
+version = "0.4.22"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c"
+checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
 
 [[package]]
 name = "lsp-server"
@@ -1056,6 +1042,7 @@
  "span",
  "stdx",
  "syntax",
+ "syntax-bridge",
  "test-utils",
  "tracing",
  "tt",
@@ -1063,9 +1050,9 @@
 
 [[package]]
 name = "memchr"
-version = "2.7.2"
+version = "2.7.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d"
+checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
 
 [[package]]
 name = "memmap2"
@@ -1087,18 +1074,18 @@
 
 [[package]]
 name = "mimalloc"
-version = "0.1.42"
+version = "0.1.43"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e9186d86b79b52f4a77af65604b51225e8db1d6ee7e3f41aec1e40829c71a176"
+checksum = "68914350ae34959d83f732418d51e2427a794055d0b9529f48259ac07af65633"
 dependencies = [
  "libmimalloc-sys",
 ]
 
 [[package]]
 name = "miniz_oxide"
-version = "0.7.3"
+version = "0.7.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "87dfd01fe195c66b572b37921ad8803d010623c0aca821bea2302239d155cdae"
+checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08"
 dependencies = [
  "adler",
 ]
@@ -1130,7 +1117,7 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4"
 dependencies = [
- "bitflags 2.5.0",
+ "bitflags 2.6.0",
  "cfg-if",
  "cfg_aliases",
  "libc",
@@ -1148,7 +1135,7 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "6205bd8bb1e454ad2e27422015fb5e4f2bcc7e08fa8f27058670d208324a4d2d"
 dependencies = [
- "bitflags 2.5.0",
+ "bitflags 2.6.0",
  "crossbeam-channel",
  "filetime",
  "fsevent-sys",
@@ -1163,11 +1150,11 @@
 
 [[package]]
 name = "nu-ansi-term"
-version = "0.50.0"
+version = "0.50.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dd2800e1520bdc966782168a627aa5d1ad92e33b984bf7c7615d31280c83ff14"
+checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399"
 dependencies = [
- "windows-sys 0.48.0",
+ "windows-sys 0.52.0",
 ]
 
 [[package]]
@@ -1197,9 +1184,9 @@
 
 [[package]]
 name = "object"
-version = "0.35.0"
+version = "0.36.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b8ec7ab813848ba4522158d5517a6093db1ded27575b070f4177b8d12b41db5e"
+checksum = "27b64972346851a39438c60b341ebc01bba47464ae329e55cf343eb93964efd9"
 dependencies = [
  "memchr",
 ]
@@ -1212,9 +1199,9 @@
 
 [[package]]
 name = "oorandom"
-version = "11.1.3"
+version = "11.1.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
+checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9"
 
 [[package]]
 name = "option-ext"
@@ -1240,9 +1227,9 @@
 dependencies = [
  "cfg-if",
  "libc",
- "redox_syscall 0.5.1",
+ "redox_syscall",
  "smallvec",
- "windows-targets 0.52.5",
+ "windows-targets 0.52.6",
 ]
 
 [[package]]
@@ -1268,6 +1255,7 @@
 version = "0.0.0"
 dependencies = [
  "camino",
+ "serde",
 ]
 
 [[package]]
@@ -1319,9 +1307,12 @@
 
 [[package]]
 name = "ppv-lite86"
-version = "0.2.17"
+version = "0.2.20"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
+checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04"
+dependencies = [
+ "zerocopy",
+]
 
 [[package]]
 name = "proc-macro-api"
@@ -1330,14 +1321,12 @@
  "base-db",
  "indexmap",
  "intern",
- "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "paths",
  "rustc-hash",
  "serde",
  "serde_json",
  "span",
  "stdx",
- "text-size",
  "tracing",
  "tt",
 ]
@@ -1350,7 +1339,6 @@
  "expect-test",
  "intern",
  "libloading",
- "mbe",
  "memmap2",
  "object 0.33.0",
  "paths",
@@ -1360,6 +1348,7 @@
  "snap",
  "span",
  "stdx",
+ "syntax-bridge",
  "tt",
 ]
 
@@ -1380,9 +1369,9 @@
 
 [[package]]
 name = "proc-macro2"
-version = "1.0.85"
+version = "1.0.86"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23"
+checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
 dependencies = [
  "unicode-ident",
 ]
@@ -1460,7 +1449,7 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b"
 dependencies = [
- "bitflags 2.5.0",
+ "bitflags 2.6.0",
  "memchr",
  "unicase",
 ]
@@ -1485,20 +1474,20 @@
 
 [[package]]
 name = "ra-ap-rustc_abi"
-version = "0.53.0"
+version = "0.63.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "80b1d613eee933486c0613a7bc26e515e46f43adf479d1edd5e537f983e9ce46"
+checksum = "b011c39d409940a890414e3a7b239762ac16d88029ad71b050a8374831b93790"
 dependencies = [
- "bitflags 2.5.0",
+ "bitflags 2.6.0",
  "ra-ap-rustc_index",
  "tracing",
 ]
 
 [[package]]
 name = "ra-ap-rustc_index"
-version = "0.53.0"
+version = "0.63.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f072060ac77e9e1a02cc20028095993af7e72cc0804779c68bcbf47b16de49c9"
+checksum = "9027acdee649b0b27eb10b7db5be833efee3362d394935c5eed8f0745a9d43ce"
 dependencies = [
  "arrayvec",
  "ra-ap-rustc_index_macros",
@@ -1507,21 +1496,20 @@
 
 [[package]]
 name = "ra-ap-rustc_index_macros"
-version = "0.53.0"
+version = "0.63.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "82f3d6dcb30a66905388e14756b8f2216131d9f8004922c07f13335840e058d1"
+checksum = "540b86dc0384141ac8e825fc2874cd44bffd4277d99d8ec63ee416f1a98d5997"
 dependencies = [
  "proc-macro2",
  "quote",
  "syn",
- "synstructure",
 ]
 
 [[package]]
 name = "ra-ap-rustc_lexer"
-version = "0.53.0"
+version = "0.63.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dbd8a2b0bdcba9892cbce0b25f6c953d31b0febc1f3420fc692884fce5a23ad8"
+checksum = "3bdf98bb457b47b9ae4aeebf867d0ca440c86925e0b6381658c4a02589748c9d"
 dependencies = [
  "unicode-properties",
  "unicode-xid",
@@ -1529,9 +1517,9 @@
 
 [[package]]
 name = "ra-ap-rustc_parse_format"
-version = "0.53.0"
+version = "0.63.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "70dad7a491c2554590222e0c9212dcb7c2e7aceb668875075012a35ea780d135"
+checksum = "e8fe3556ab6311bb775220563a300e2bf62ec56404521fe0c511a583937683d5"
 dependencies = [
  "ra-ap-rustc_index",
  "ra-ap-rustc_lexer",
@@ -1539,9 +1527,9 @@
 
 [[package]]
 name = "ra-ap-rustc_pattern_analysis"
-version = "0.53.0"
+version = "0.63.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "34768e1faf88c31f2e9ad57b48318a52b507dafac0cddbf01b5d63bfc0b0a365"
+checksum = "1709080fdeb5db630e1c2644026c2962aaa32416cd92f0190c04b0c21e114b91"
 dependencies = [
  "ra-ap-rustc_index",
  "rustc-hash",
@@ -1602,20 +1590,11 @@
 
 [[package]]
 name = "redox_syscall"
-version = "0.4.1"
+version = "0.5.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
+checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4"
 dependencies = [
- "bitflags 1.3.2",
-]
-
-[[package]]
-name = "redox_syscall"
-version = "0.5.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e"
-dependencies = [
- "bitflags 2.5.0",
+ "bitflags 2.6.0",
 ]
 
 [[package]]
@@ -1648,12 +1627,12 @@
 dependencies = [
  "always-assert",
  "anyhow",
+ "cargo_metadata",
  "cfg",
  "crossbeam-channel",
  "dirs",
  "dissimilar",
  "expect-test",
- "flycheck",
  "hir",
  "hir-def",
  "hir-ty",
@@ -1665,7 +1644,6 @@
  "load-cargo",
  "lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "lsp-types",
- "mbe",
  "memchr",
  "mimalloc",
  "nohash-hasher",
@@ -1675,6 +1653,7 @@
  "parser",
  "paths",
  "proc-macro-api",
+ "process-wrap",
  "profile",
  "project-model",
  "rayon",
@@ -1685,6 +1664,7 @@
  "serde_json",
  "stdx",
  "syntax",
+ "syntax-bridge",
  "test-fixture",
  "test-utils",
  "tikv-jemallocator",
@@ -1716,9 +1696,9 @@
 
 [[package]]
 name = "rustc_apfloat"
-version = "0.2.0+llvm-462a31f5a5ab"
+version = "0.2.1+llvm-462a31f5a5ab"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "465187772033a5ee566f69fe008df03628fce549a0899aae76f0a0c2e34696be"
+checksum = "886d94c63c812a8037c4faca2607453a0fa4cf82f734665266876b022244543f"
 dependencies = [
  "bitflags 1.3.2",
  "smallvec",
@@ -1801,18 +1781,18 @@
 
 [[package]]
 name = "serde"
-version = "1.0.203"
+version = "1.0.206"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094"
+checksum = "5b3e4cd94123dd520a128bcd11e34d9e9e423e7e3e50425cb1b4b1e3549d0284"
 dependencies = [
  "serde_derive",
 ]
 
 [[package]]
 name = "serde_derive"
-version = "1.0.203"
+version = "1.0.206"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba"
+checksum = "fabfb6138d2383ea8208cf98ccf69cdfb1aff4088460681d84189aa259762f97"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1821,12 +1801,13 @@
 
 [[package]]
 name = "serde_json"
-version = "1.0.117"
+version = "1.0.124"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3"
+checksum = "66ad62847a56b3dba58cc891acd13884b9c61138d330c0d7b6181713d4fce38d"
 dependencies = [
  "indexmap",
  "itoa",
+ "memchr",
  "ryu",
  "serde",
 ]
@@ -1844,9 +1825,9 @@
 
 [[package]]
 name = "serde_spanned"
-version = "0.6.6"
+version = "0.6.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0"
+checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d"
 dependencies = [
  "serde",
 ]
@@ -1923,9 +1904,9 @@
 
 [[package]]
 name = "syn"
-version = "2.0.66"
+version = "2.0.74"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5"
+checksum = "1fceb41e3d546d0bd83421d3409b1460cc7444cd389341a4c880fe7a042cb3d7"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1968,6 +1949,21 @@
 ]
 
 [[package]]
+name = "syntax-bridge"
+version = "0.0.0"
+dependencies = [
+ "intern",
+ "parser",
+ "rustc-hash",
+ "span",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "tracing",
+ "tt",
+]
+
+[[package]]
 name = "test-fixture"
 version = "0.0.0"
 dependencies = [
@@ -1987,6 +1983,7 @@
 version = "0.0.0"
 dependencies = [
  "dissimilar",
+ "paths",
  "profile",
  "rustc-hash",
  "stdx",
@@ -2010,18 +2007,18 @@
 
 [[package]]
 name = "thiserror"
-version = "1.0.61"
+version = "1.0.63"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709"
+checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724"
 dependencies = [
  "thiserror-impl",
 ]
 
 [[package]]
 name = "thiserror-impl"
-version = "1.0.61"
+version = "1.0.63"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533"
+checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -2090,9 +2087,9 @@
 
 [[package]]
 name = "tinyvec"
-version = "1.6.0"
+version = "1.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
+checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938"
 dependencies = [
  "tinyvec_macros",
 ]
@@ -2105,9 +2102,9 @@
 
 [[package]]
 name = "toml"
-version = "0.8.14"
+version = "0.8.19"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6f49eb2ab21d2f26bd6db7bf383edc527a7ebaee412d17af4d40fdccd442f335"
+checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e"
 dependencies = [
  "serde",
  "serde_spanned",
@@ -2117,18 +2114,18 @@
 
 [[package]]
 name = "toml_datetime"
-version = "0.6.6"
+version = "0.6.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf"
+checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41"
 dependencies = [
  "serde",
 ]
 
 [[package]]
 name = "toml_edit"
-version = "0.22.14"
+version = "0.22.20"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f21c7aaf97f1bd9ca9d4f9e73b0a6c74bd5afef56f2bc931943a6e1c37e04e38"
+checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d"
 dependencies = [
  "indexmap",
  "serde",
@@ -2214,9 +2211,9 @@
 
 [[package]]
 name = "triomphe"
-version = "0.1.12"
+version = "0.1.13"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1b2cb4fbb9995eeb36ac86fadf24031ccd58f99d6b4b2d7b911db70bddb80d90"
+checksum = "e6631e42e10b40c0690bf92f404ebcfe6e1fdb480391d15f17cc8e96eeed5369"
 dependencies = [
  "serde",
  "stable_deref_trait",
@@ -2289,9 +2286,9 @@
 
 [[package]]
 name = "url"
-version = "2.5.0"
+version = "2.5.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633"
+checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c"
 dependencies = [
  "form_urlencoded",
  "idna",
@@ -2307,14 +2304,15 @@
 
 [[package]]
 name = "version_check"
-version = "0.9.4"
+version = "0.9.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
 
 [[package]]
 name = "vfs"
 version = "0.0.0"
 dependencies = [
+ "crossbeam-channel",
  "fst",
  "indexmap",
  "nohash-hasher",
@@ -2331,6 +2329,8 @@
  "crossbeam-channel",
  "notify",
  "paths",
+ "rayon",
+ "rustc-hash",
  "stdx",
  "tracing",
  "vfs",
@@ -2355,11 +2355,11 @@
 
 [[package]]
 name = "winapi-util"
-version = "0.1.8"
+version = "0.1.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b"
+checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
 dependencies = [
- "windows-sys 0.52.0",
+ "windows-sys 0.59.0",
 ]
 
 [[package]]
@@ -2369,7 +2369,7 @@
 checksum = "1de69df01bdf1ead2f4ac895dc77c9351aefff65b2f3db429a343f9cbf05e132"
 dependencies = [
  "windows-core",
- "windows-targets 0.52.5",
+ "windows-targets 0.52.6",
 ]
 
 [[package]]
@@ -2381,7 +2381,7 @@
  "windows-implement",
  "windows-interface",
  "windows-result",
- "windows-targets 0.52.5",
+ "windows-targets 0.52.6",
 ]
 
 [[package]]
@@ -2408,11 +2408,11 @@
 
 [[package]]
 name = "windows-result"
-version = "0.1.1"
+version = "0.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "749f0da9cc72d82e600d8d2e44cadd0b9eedb9038f71a1c58556ac1c5791813b"
+checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8"
 dependencies = [
- "windows-targets 0.52.5",
+ "windows-targets 0.52.6",
 ]
 
 [[package]]
@@ -2430,7 +2430,16 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
 dependencies = [
- "windows-targets 0.52.5",
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.59.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
+dependencies = [
+ "windows-targets 0.52.6",
 ]
 
 [[package]]
@@ -2450,18 +2459,18 @@
 
 [[package]]
 name = "windows-targets"
-version = "0.52.5"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb"
+checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
 dependencies = [
- "windows_aarch64_gnullvm 0.52.5",
- "windows_aarch64_msvc 0.52.5",
- "windows_i686_gnu 0.52.5",
+ "windows_aarch64_gnullvm 0.52.6",
+ "windows_aarch64_msvc 0.52.6",
+ "windows_i686_gnu 0.52.6",
  "windows_i686_gnullvm",
- "windows_i686_msvc 0.52.5",
- "windows_x86_64_gnu 0.52.5",
- "windows_x86_64_gnullvm 0.52.5",
- "windows_x86_64_msvc 0.52.5",
+ "windows_i686_msvc 0.52.6",
+ "windows_x86_64_gnu 0.52.6",
+ "windows_x86_64_gnullvm 0.52.6",
+ "windows_x86_64_msvc 0.52.6",
 ]
 
 [[package]]
@@ -2472,9 +2481,9 @@
 
 [[package]]
 name = "windows_aarch64_gnullvm"
-version = "0.52.5"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263"
+checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
 
 [[package]]
 name = "windows_aarch64_msvc"
@@ -2484,9 +2493,9 @@
 
 [[package]]
 name = "windows_aarch64_msvc"
-version = "0.52.5"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6"
+checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
 
 [[package]]
 name = "windows_i686_gnu"
@@ -2496,15 +2505,15 @@
 
 [[package]]
 name = "windows_i686_gnu"
-version = "0.52.5"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670"
+checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
 
 [[package]]
 name = "windows_i686_gnullvm"
-version = "0.52.5"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9"
+checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
 
 [[package]]
 name = "windows_i686_msvc"
@@ -2514,9 +2523,9 @@
 
 [[package]]
 name = "windows_i686_msvc"
-version = "0.52.5"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf"
+checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
 
 [[package]]
 name = "windows_x86_64_gnu"
@@ -2526,9 +2535,9 @@
 
 [[package]]
 name = "windows_x86_64_gnu"
-version = "0.52.5"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9"
+checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
 
 [[package]]
 name = "windows_x86_64_gnullvm"
@@ -2538,9 +2547,9 @@
 
 [[package]]
 name = "windows_x86_64_gnullvm"
-version = "0.52.5"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596"
+checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
 
 [[package]]
 name = "windows_x86_64_msvc"
@@ -2550,15 +2559,15 @@
 
 [[package]]
 name = "windows_x86_64_msvc"
-version = "0.52.5"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0"
+checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
 
 [[package]]
 name = "winnow"
-version = "0.6.11"
+version = "0.6.18"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "56c52728401e1dc672a56e81e593e912aa54c78f40246869f78359a2bf24d29d"
+checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f"
 dependencies = [
  "memchr",
 ]
@@ -2619,6 +2628,27 @@
 ]
 
 [[package]]
+name = "zerocopy"
+version = "0.7.35"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0"
+dependencies = [
+ "byteorder",
+ "zerocopy-derive",
+]
+
+[[package]]
+name = "zerocopy-derive"
+version = "0.7.35"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
 name = "zip"
 version = "0.6.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/Cargo.toml b/Cargo.toml
index c2f601a..56e80e1 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -4,10 +4,11 @@
 resolver = "2"
 
 [workspace.package]
-rust-version = "1.78"
+rust-version = "1.80"
 edition = "2021"
 license = "MIT OR Apache-2.0"
 authors = ["rust-analyzer team"]
+repository = "https://github.com/rust-lang/rust-analyzer"
 
 [profile.dev]
 debug = 1
@@ -51,7 +52,6 @@
 # local crates
 base-db = { path = "./crates/base-db", version = "0.0.0" }
 cfg = { path = "./crates/cfg", version = "0.0.0", features = ["tt"] }
-flycheck = { path = "./crates/flycheck", version = "0.0.0" }
 hir = { path = "./crates/hir", version = "0.0.0" }
 hir-def = { path = "./crates/hir-def", version = "0.0.0" }
 hir-expand = { path = "./crates/hir-expand", version = "0.0.0" }
@@ -77,17 +77,18 @@
 span = { path = "./crates/span", version = "0.0.0" }
 stdx = { path = "./crates/stdx", version = "0.0.0" }
 syntax = { path = "./crates/syntax", version = "0.0.0" }
+syntax-bridge = { path = "./crates/syntax-bridge", version = "0.0.0" }
 text-edit = { path = "./crates/text-edit", version = "0.0.0" }
 toolchain = { path = "./crates/toolchain", version = "0.0.0" }
 tt = { path = "./crates/tt", version = "0.0.0" }
 vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
 vfs = { path = "./crates/vfs", version = "0.0.0" }
 
-ra-ap-rustc_lexer = { version = "0.53.0", default-features = false }
-ra-ap-rustc_parse_format = { version = "0.53.0", default-features = false }
-ra-ap-rustc_index = { version = "0.53.0", default-features = false }
-ra-ap-rustc_abi = { version = "0.53.0", default-features = false }
-ra-ap-rustc_pattern_analysis = { version = "0.53.0", default-features = false }
+ra-ap-rustc_lexer = { version = "0.63.0", default-features = false }
+ra-ap-rustc_parse_format = { version = "0.63.0", default-features = false }
+ra-ap-rustc_index = { version = "0.63.0", default-features = false }
+ra-ap-rustc_abi = { version = "0.63.0", default-features = false }
+ra-ap-rustc_pattern_analysis = { version = "0.63.0", default-features = false }
 
 # local crates that aren't published to crates.io. These should not have versions.
 test-fixture = { path = "./crates/test-fixture" }
@@ -124,11 +125,11 @@
 nohash-hasher = "0.2.0"
 oorandom = "11.1.3"
 object = { version = "0.33.0", default-features = false, features = [
-    "std",
-    "read_core",
-    "elf",
-    "macho",
-    "pe",
+  "std",
+  "read_core",
+  "elf",
+  "macho",
+  "pe",
 ] }
 process-wrap = { version = "8.0.2", features = ["std"] }
 pulldown-cmark-to-cmark = "10.0.4"
@@ -158,7 +159,6 @@
 xshell = "0.2.5"
 
 
-
 # We need to freeze the version of the crate, as the raw-api feature is considered unstable
 dashmap = { version = "=5.5.3", features = ["raw-api"] }
 
diff --git a/crates/base-db/Cargo.toml b/crates/base-db/Cargo.toml
index 1b1ee03..b17b08a 100644
--- a/crates/base-db/Cargo.toml
+++ b/crates/base-db/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "base-db"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "Basic database traits for rust-analyzer. The concrete DB is defined by `ide` (aka `ra_ap_ide`)."
 
 authors.workspace = true
 edition.workspace = true
diff --git a/crates/base-db/src/change.rs b/crates/base-db/src/change.rs
index 0fd54e1..a9d91d6 100644
--- a/crates/base-db/src/change.rs
+++ b/crates/base-db/src/change.rs
@@ -7,7 +7,7 @@
 use triomphe::Arc;
 use vfs::FileId;
 
-use crate::{CrateGraph, SourceDatabaseExt, SourceDatabaseExt2, SourceRoot, SourceRootId};
+use crate::{CrateGraph, SourceDatabaseFileInputExt, SourceRoot, SourceRootDatabase, SourceRootId};
 
 /// Encapsulate a bunch of raw `.set` calls on the database.
 #[derive(Default)]
@@ -50,7 +50,7 @@
         self.crate_graph = Some(graph);
     }
 
-    pub fn apply(self, db: &mut dyn SourceDatabaseExt) {
+    pub fn apply(self, db: &mut dyn SourceRootDatabase) {
         let _p = tracing::info_span!("FileChange::apply").entered();
         if let Some(roots) = self.roots {
             for (idx, root) in roots.into_iter().enumerate() {
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index 460581f..3616fa9 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -690,6 +690,14 @@
     pub fn extend_from_other(&mut self, other: &Env) {
         self.entries.extend(other.entries.iter().map(|(x, y)| (x.to_owned(), y.to_owned())));
     }
+
+    pub fn is_empty(&self) -> bool {
+        self.entries.is_empty()
+    }
+
+    pub fn insert(&mut self, k: impl Into<String>, v: impl Into<String>) -> Option<String> {
+        self.entries.insert(k.into(), v.into())
+    }
 }
 
 impl From<Env> for Vec<(String, String)> {
@@ -700,6 +708,15 @@
     }
 }
 
+impl<'a> IntoIterator for &'a Env {
+    type Item = (&'a String, &'a String);
+    type IntoIter = std::collections::hash_map::Iter<'a, String, String>;
+
+    fn into_iter(self) -> Self::IntoIter {
+        self.entries.iter()
+    }
+}
+
 #[derive(Debug)]
 pub struct CyclicDependenciesError {
     path: Vec<(CrateId, Option<CrateDisplayName>)>,
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index f319f98..20ef45d 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -1,5 +1,5 @@
 //! base_db defines basic database traits. The concrete DB is defined by ide.
-
+// FIXME: Rename this crate, base db is non descriptive
 mod change;
 mod input;
 
@@ -47,8 +47,6 @@
 pub const DEFAULT_BORROWCK_LRU_CAP: u16 = 2024;
 
 pub trait FileLoader {
-    /// Text of the file.
-    fn file_text(&self, file_id: FileId) -> Arc<str>;
     fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>;
     /// Crates whose root's source root is the same as the source root of `file_id`
     fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]>;
@@ -58,6 +56,13 @@
 /// model. Everything else in rust-analyzer is derived from these queries.
 #[salsa::query_group(SourceDatabaseStorage)]
 pub trait SourceDatabase: FileLoader + std::fmt::Debug {
+    #[salsa::input]
+    fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>;
+
+    /// Text of the file.
+    #[salsa::lru]
+    fn file_text(&self, file_id: FileId) -> Arc<str>;
+
     /// Parses the file into the syntax tree.
     #[salsa::lru]
     fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
@@ -99,16 +104,18 @@
     }
 }
 
+fn file_text(db: &dyn SourceDatabase, file_id: FileId) -> Arc<str> {
+    let bytes = db.compressed_file_text(file_id);
+    let bytes =
+        lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail");
+    let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8");
+    Arc::from(text)
+}
+
 /// We don't want to give HIR knowledge of source roots, hence we extract these
 /// methods into a separate DB.
-#[salsa::query_group(SourceDatabaseExtStorage)]
-pub trait SourceDatabaseExt: SourceDatabase {
-    #[salsa::input]
-    fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>;
-
-    #[salsa::lru]
-    fn file_text(&self, file_id: FileId) -> Arc<str>;
-
+#[salsa::query_group(SourceRootDatabaseStorage)]
+pub trait SourceRootDatabase: SourceDatabase {
     /// Path to a file, relative to the root of its source root.
     /// Source root of the file.
     #[salsa::input]
@@ -121,15 +128,7 @@
     fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>;
 }
 
-fn file_text(db: &dyn SourceDatabaseExt, file_id: FileId) -> Arc<str> {
-    let bytes = db.compressed_file_text(file_id);
-    let bytes =
-        lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail");
-    let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8");
-    Arc::from(text)
-}
-
-pub trait SourceDatabaseExt2 {
+pub trait SourceDatabaseFileInputExt {
     fn set_file_text(&mut self, file_id: FileId, text: &str) {
         self.set_file_text_with_durability(file_id, text, Durability::LOW);
     }
@@ -142,7 +141,7 @@
     );
 }
 
-impl<Db: ?Sized + SourceDatabaseExt> SourceDatabaseExt2 for Db {
+impl<Db: ?Sized + SourceRootDatabase> SourceDatabaseFileInputExt for Db {
     fn set_file_text_with_durability(
         &mut self,
         file_id: FileId,
@@ -159,7 +158,7 @@
     }
 }
 
-fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<[CrateId]> {
+fn source_root_crates(db: &dyn SourceRootDatabase, id: SourceRootId) -> Arc<[CrateId]> {
     let graph = db.crate_graph();
     let mut crates = graph
         .iter()
@@ -173,13 +172,12 @@
     crates.into_iter().collect()
 }
 
-/// Silly workaround for cyclic deps between the traits
+// FIXME: Would be nice to get rid of this somehow
+/// Silly workaround for cyclic deps due to the SourceRootDatabase and SourceDatabase split
+/// regarding FileLoader
 pub struct FileLoaderDelegate<T>(pub T);
 
-impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
-    fn file_text(&self, file_id: FileId) -> Arc<str> {
-        SourceDatabaseExt::file_text(self.0, file_id)
-    }
+impl<T: SourceRootDatabase> FileLoader for FileLoaderDelegate<&'_ T> {
     fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
         // FIXME: this *somehow* should be platform agnostic...
         let source_root = self.0.file_source_root(path.anchor);
diff --git a/crates/cfg/Cargo.toml b/crates/cfg/Cargo.toml
index faf93f6..29b7ad6 100644
--- a/crates/cfg/Cargo.toml
+++ b/crates/cfg/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "cfg"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "Conditional compiling options, `cfg` attribute parser and evaluator for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
@@ -28,7 +29,7 @@
 derive_arbitrary = "1.3.2"
 
 # local deps
-mbe.workspace = true
+syntax-bridge.workspace = true
 syntax.workspace = true
 
 [lints]
diff --git a/crates/cfg/src/lib.rs b/crates/cfg/src/lib.rs
index 6d46dfb..e9daaf7 100644
--- a/crates/cfg/src/lib.rs
+++ b/crates/cfg/src/lib.rs
@@ -108,6 +108,14 @@
     }
 }
 
+impl FromIterator<CfgAtom> for CfgOptions {
+    fn from_iter<T: IntoIterator<Item = CfgAtom>>(iter: T) -> Self {
+        let mut options = CfgOptions::default();
+        options.extend(iter);
+        options
+    }
+}
+
 #[derive(Default, Clone, Debug, PartialEq, Eq)]
 pub struct CfgDiff {
     // Invariants: No duplicates, no atom that's both in `enable` and `disable`.
diff --git a/crates/cfg/src/tests.rs b/crates/cfg/src/tests.rs
index 597023a..6d87d83 100644
--- a/crates/cfg/src/tests.rs
+++ b/crates/cfg/src/tests.rs
@@ -1,8 +1,11 @@
 use arbitrary::{Arbitrary, Unstructured};
 use expect_test::{expect, Expect};
 use intern::Symbol;
-use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, DummyTestSpanMap, DUMMY};
 use syntax::{ast, AstNode, Edition};
+use syntax_bridge::{
+    dummy_test_span_utils::{DummyTestSpanMap, DUMMY},
+    syntax_node_to_token_tree, DocCommentDesugarMode,
+};
 
 use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
 
diff --git a/crates/flycheck/Cargo.toml b/crates/flycheck/Cargo.toml
deleted file mode 100644
index bb3a94c..0000000
--- a/crates/flycheck/Cargo.toml
+++ /dev/null
@@ -1,30 +0,0 @@
-[package]
-name = "flycheck"
-version = "0.0.0"
-description = "TBD"
-
-authors.workspace = true
-edition.workspace = true
-license.workspace = true
-rust-version.workspace = true
-
-[lib]
-doctest = false
-
-[dependencies]
-cargo_metadata.workspace = true
-crossbeam-channel.workspace = true
-tracing.workspace = true
-rustc-hash.workspace = true
-serde_json.workspace = true
-serde.workspace = true
-process-wrap.workspace = true
-
-# local deps
-paths.workspace = true
-stdx.workspace = true
-toolchain.workspace = true
-project-model.workspace = true
-
-[lints]
-workspace = true
\ No newline at end of file
diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml
index 8ac2d00..5b9d227 100644
--- a/crates/hir-def/Cargo.toml
+++ b/crates/hir-def/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "hir-def"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "RPC Api for the `proc-macro-srv` crate of rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
@@ -52,7 +53,7 @@
 # local deps
 test-utils.workspace = true
 test-fixture.workspace = true
-
+syntax-bridge.workspace = true
 [features]
 in-rust-tree = ["hir-expand/in-rust-tree"]
 
diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs
index ba88495..198dc93 100644
--- a/crates/hir-def/src/attr.rs
+++ b/crates/hir-def/src/attr.rs
@@ -657,9 +657,9 @@
     use triomphe::Arc;
 
     use hir_expand::span_map::{RealSpanMap, SpanMap};
-    use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode};
     use span::FileId;
     use syntax::{ast, AstNode, TextRange};
+    use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode};
 
     use crate::attr::{DocAtom, DocExpr};
 
diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs
index d3c134f..a988317 100644
--- a/crates/hir-def/src/body.rs
+++ b/crates/hir-def/src/body.rs
@@ -118,6 +118,7 @@
     MacroError { node: InFile<AstPtr<ast::MacroCall>>, err: ExpandError },
     UnresolvedMacroCall { node: InFile<AstPtr<ast::MacroCall>>, path: ModPath },
     UnreachableLabel { node: InFile<AstPtr<ast::Lifetime>>, name: Name },
+    AwaitOutsideOfAsync { node: InFile<AstPtr<ast::AwaitExpr>>, location: String },
     UndeclaredLabel { node: InFile<AstPtr<ast::Lifetime>>, name: Name },
 }
 
@@ -157,7 +158,7 @@
                             }),
                         )
                     });
-                    is_async_fn = data.has_async_kw();
+                    is_async_fn = data.is_async();
                     src.map(|it| it.body().map(ast::Expr::from))
                 }
                 DefWithBodyId::ConstId(c) => {
diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs
index 9e30aff..abf7895 100644
--- a/crates/hir-def/src/body/lower.rs
+++ b/crates/hir-def/src/body/lower.rs
@@ -72,6 +72,7 @@
         is_lowering_coroutine: false,
         label_ribs: Vec::new(),
         current_binding_owner: None,
+        awaitable_context: None,
     }
     .collect(params, body, is_async_fn)
 }
@@ -100,6 +101,8 @@
     // resolution
     label_ribs: Vec<LabelRib>,
     current_binding_owner: Option<ExprId>,
+
+    awaitable_context: Option<Awaitable>,
 }
 
 #[derive(Clone, Debug)]
@@ -135,6 +138,11 @@
     }
 }
 
+enum Awaitable {
+    Yes,
+    No(&'static str),
+}
+
 #[derive(Debug, Default)]
 struct BindingList {
     map: FxHashMap<Name, BindingId>,
@@ -180,6 +188,18 @@
         body: Option<ast::Expr>,
         is_async_fn: bool,
     ) -> (Body, BodySourceMap) {
+        self.awaitable_context.replace(if is_async_fn {
+            Awaitable::Yes
+        } else {
+            match self.owner {
+                DefWithBodyId::FunctionId(..) => Awaitable::No("non-async function"),
+                DefWithBodyId::StaticId(..) => Awaitable::No("static"),
+                DefWithBodyId::ConstId(..) | DefWithBodyId::InTypeConstId(..) => {
+                    Awaitable::No("constant")
+                }
+                DefWithBodyId::VariantId(..) => Awaitable::No("enum variant"),
+            }
+        });
         if let Some((param_list, mut attr_enabled)) = param_list {
             let mut params = vec![];
             if let Some(self_param) =
@@ -280,31 +300,40 @@
                 }
                 Some(ast::BlockModifier::Async(_)) => {
                     self.with_label_rib(RibKind::Closure, |this| {
-                        this.collect_block_(e, |id, statements, tail| Expr::Async {
-                            id,
-                            statements,
-                            tail,
+                        this.with_awaitable_block(Awaitable::Yes, |this| {
+                            this.collect_block_(e, |id, statements, tail| Expr::Async {
+                                id,
+                                statements,
+                                tail,
+                            })
                         })
                     })
                 }
                 Some(ast::BlockModifier::Const(_)) => {
                     self.with_label_rib(RibKind::Constant, |this| {
-                        let (result_expr_id, prev_binding_owner) =
-                            this.initialize_binding_owner(syntax_ptr);
-                        let inner_expr = this.collect_block(e);
-                        let it = this.db.intern_anonymous_const(ConstBlockLoc {
-                            parent: this.owner,
-                            root: inner_expr,
-                        });
-                        this.body.exprs[result_expr_id] = Expr::Const(it);
-                        this.current_binding_owner = prev_binding_owner;
-                        result_expr_id
+                        this.with_awaitable_block(Awaitable::No("constant block"), |this| {
+                            let (result_expr_id, prev_binding_owner) =
+                                this.initialize_binding_owner(syntax_ptr);
+                            let inner_expr = this.collect_block(e);
+                            let it = this.db.intern_anonymous_const(ConstBlockLoc {
+                                parent: this.owner,
+                                root: inner_expr,
+                            });
+                            this.body.exprs[result_expr_id] = Expr::Const(it);
+                            this.current_binding_owner = prev_binding_owner;
+                            result_expr_id
+                        })
                     })
                 }
                 // FIXME
-                Some(ast::BlockModifier::AsyncGen(_)) | Some(ast::BlockModifier::Gen(_)) | None => {
-                    self.collect_block(e)
+                Some(ast::BlockModifier::AsyncGen(_)) => {
+                    self.with_awaitable_block(Awaitable::Yes, |this| this.collect_block(e))
                 }
+                Some(ast::BlockModifier::Gen(_)) => self
+                    .with_awaitable_block(Awaitable::No("non-async gen block"), |this| {
+                        this.collect_block(e)
+                    }),
+                None => self.collect_block(e),
             },
             ast::Expr::LoopExpr(e) => {
                 let label = e.label().map(|label| self.collect_label(label));
@@ -469,6 +498,12 @@
             }
             ast::Expr::AwaitExpr(e) => {
                 let expr = self.collect_expr_opt(e.expr());
+                if let Awaitable::No(location) = self.is_lowering_awaitable_block() {
+                    self.source_map.diagnostics.push(BodyDiagnostic::AwaitOutsideOfAsync {
+                        node: InFile::new(self.expander.current_file_id(), AstPtr::new(&e)),
+                        location: location.to_string(),
+                    });
+                }
                 self.alloc_expr(Expr::Await { expr }, syntax_ptr)
             }
             ast::Expr::TryExpr(e) => self.collect_try_operator(syntax_ptr, e),
@@ -527,7 +562,13 @@
                 let prev_is_lowering_coroutine = mem::take(&mut this.is_lowering_coroutine);
                 let prev_try_block_label = this.current_try_block_label.take();
 
-                let body = this.collect_expr_opt(e.body());
+                let awaitable = if e.async_token().is_some() {
+                    Awaitable::Yes
+                } else {
+                    Awaitable::No("non-async closure")
+                };
+                let body =
+                    this.with_awaitable_block(awaitable, |this| this.collect_expr_opt(e.body()));
 
                 let closure_kind = if this.is_lowering_coroutine {
                     let movability = if e.static_token().is_some() {
@@ -2082,6 +2123,21 @@
     fn alloc_label_desugared(&mut self, label: Label) -> LabelId {
         self.body.labels.alloc(label)
     }
+
+    fn is_lowering_awaitable_block(&self) -> &Awaitable {
+        self.awaitable_context.as_ref().unwrap_or(&Awaitable::No("unknown"))
+    }
+
+    fn with_awaitable_block<T>(
+        &mut self,
+        awaitable: Awaitable,
+        f: impl FnOnce(&mut Self) -> T,
+    ) -> T {
+        let orig = self.awaitable_context.replace(awaitable);
+        let res = f(self);
+        self.awaitable_context = orig;
+        res
+    }
 }
 
 fn comma_follows_token(t: Option<syntax::SyntaxToken>) -> bool {
diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs
index c3c2e51..d17ebd7 100644
--- a/crates/hir-def/src/data.rs
+++ b/crates/hir-def/src/data.rs
@@ -94,6 +94,12 @@
             .filter(|it| !it.is_empty())
             .map(Box::new);
         let rustc_allow_incoherent_impl = attrs.by_key(&sym::rustc_allow_incoherent_impl).exists();
+        if flags.contains(FnFlags::HAS_UNSAFE_KW)
+            && !crate_graph[krate].edition.at_least_2024()
+            && attrs.by_key(&sym::rustc_deprecated_safe_2024).exists()
+        {
+            flags.remove(FnFlags::HAS_UNSAFE_KW);
+        }
 
         Arc::new(FunctionData {
             name: func.name.clone(),
@@ -126,19 +132,19 @@
         self.flags.contains(FnFlags::HAS_SELF_PARAM)
     }
 
-    pub fn has_default_kw(&self) -> bool {
+    pub fn is_default(&self) -> bool {
         self.flags.contains(FnFlags::HAS_DEFAULT_KW)
     }
 
-    pub fn has_const_kw(&self) -> bool {
+    pub fn is_const(&self) -> bool {
         self.flags.contains(FnFlags::HAS_CONST_KW)
     }
 
-    pub fn has_async_kw(&self) -> bool {
+    pub fn is_async(&self) -> bool {
         self.flags.contains(FnFlags::HAS_ASYNC_KW)
     }
 
-    pub fn has_unsafe_kw(&self) -> bool {
+    pub fn is_unsafe(&self) -> bool {
         self.flags.contains(FnFlags::HAS_UNSAFE_KW)
     }
 
diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs
index 56feb01..b1103d3 100644
--- a/crates/hir-def/src/db.rs
+++ b/crates/hir-def/src/db.rs
@@ -160,7 +160,7 @@
     fn const_data(&self, konst: ConstId) -> Arc<ConstData>;
 
     #[salsa::invoke(StaticData::static_data_query)]
-    fn static_data(&self, konst: StaticId) -> Arc<StaticData>;
+    fn static_data(&self, statik: StaticId) -> Arc<StaticData>;
 
     #[salsa::invoke(Macro2Data::macro2_data_query)]
     fn macro2_data(&self, makro: Macro2Id) -> Arc<Macro2Data>;
@@ -240,14 +240,14 @@
 
     fn crate_supports_no_std(&self, crate_id: CrateId) -> bool;
 
-    fn include_macro_invoc(&self, crate_id: CrateId) -> Vec<(MacroCallId, EditionedFileId)>;
+    fn include_macro_invoc(&self, crate_id: CrateId) -> Arc<[(MacroCallId, EditionedFileId)]>;
 }
 
 // return: macro call id and include file id
 fn include_macro_invoc(
     db: &dyn DefDatabase,
     krate: CrateId,
-) -> Vec<(MacroCallId, EditionedFileId)> {
+) -> Arc<[(MacroCallId, EditionedFileId)]> {
     db.crate_def_map(krate)
         .modules
         .values()
diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs
index 91594ae..5a3a3e9 100644
--- a/crates/hir-def/src/find_path.rs
+++ b/crates/hir-def/src/find_path.rs
@@ -50,13 +50,13 @@
             prefix: prefix_kind,
             cfg,
             ignore_local_imports,
+            is_std_item: db.crate_graph()[item_module.krate()].origin.is_lang(),
             from,
             from_def_map: &from.def_map(db),
             fuel: Cell::new(FIND_PATH_FUEL),
         },
         item,
         MAX_PATH_LEN,
-        db.crate_graph()[item_module.krate()].origin.is_lang(),
     )
 }
 
@@ -98,20 +98,16 @@
     prefix: PrefixKind,
     cfg: ImportPathConfig,
     ignore_local_imports: bool,
+    is_std_item: bool,
     from: ModuleId,
     from_def_map: &'db DefMap,
     fuel: Cell<usize>,
 }
 
 /// Attempts to find a path to refer to the given `item` visible from the `from` ModuleId
-fn find_path_inner(
-    ctx: &FindPathCtx<'_>,
-    item: ItemInNs,
-    max_len: usize,
-    is_std_item: bool,
-) -> Option<ModPath> {
+fn find_path_inner(ctx: &FindPathCtx<'_>, item: ItemInNs, max_len: usize) -> Option<ModPath> {
     // - if the item is a module, jump straight to module search
-    if !is_std_item {
+    if !ctx.is_std_item {
         if let ItemInNs::Types(ModuleDefId::ModuleId(module_id)) = item {
             return find_path_for_module(ctx, &mut FxHashSet::default(), module_id, true, max_len)
                 .map(|choice| choice.path);
@@ -138,12 +134,9 @@
 
     if let Some(ModuleDefId::EnumVariantId(variant)) = item.as_module_def_id() {
         // - if the item is an enum variant, refer to it via the enum
-        if let Some(mut path) = find_path_inner(
-            ctx,
-            ItemInNs::Types(variant.lookup(ctx.db).parent.into()),
-            max_len,
-            is_std_item,
-        ) {
+        if let Some(mut path) =
+            find_path_inner(ctx, ItemInNs::Types(variant.lookup(ctx.db).parent.into()), max_len)
+        {
             path.push_segment(ctx.db.enum_variant_data(variant).name.clone());
             return Some(path);
         }
@@ -152,16 +145,6 @@
         // variant somewhere
     }
 
-    if is_std_item {
-        // The item we are searching for comes from the sysroot libraries, so skip prefer looking in
-        // the sysroot libraries directly.
-        // We do need to fallback as the item in question could be re-exported by another crate
-        // while not being a transitive dependency of the current crate.
-        if let Some(choice) = find_in_sysroot(ctx, &mut FxHashSet::default(), item, max_len) {
-            return Some(choice.path);
-        }
-    }
-
     let mut best_choice = None;
     calculate_best_path(ctx, &mut FxHashSet::default(), item, max_len, &mut best_choice);
     best_choice.map(|choice| choice.path)
@@ -366,6 +349,12 @@
         // Item was defined in the same crate that wants to import it. It cannot be found in any
         // dependency in this case.
         calculate_best_path_local(ctx, visited_modules, item, max_len, best_choice)
+    } else if ctx.is_std_item {
+        // The item we are searching for comes from the sysroot libraries, so skip prefer looking in
+        // the sysroot libraries directly.
+        // We do need to fallback as the item in question could be re-exported by another crate
+        // while not being a transitive dependency of the current crate.
+        find_in_sysroot(ctx, visited_modules, item, max_len, best_choice)
     } else {
         // Item was defined in some upstream crate. This means that it must be exported from one,
         // too (unless we can't name it at all). It could *also* be (re)exported by the same crate
@@ -382,10 +371,10 @@
     visited_modules: &mut FxHashSet<(ItemInNs, ModuleId)>,
     item: ItemInNs,
     max_len: usize,
-) -> Option<Choice> {
+    best_choice: &mut Option<Choice>,
+) {
     let crate_graph = ctx.db.crate_graph();
     let dependencies = &crate_graph[ctx.from.krate].dependencies;
-    let mut best_choice = None;
     let mut search = |lang, best_choice: &mut _| {
         if let Some(dep) = dependencies.iter().filter(|it| it.is_sysroot()).find(|dep| {
             match crate_graph[dep.crate_id].origin {
@@ -397,29 +386,31 @@
         }
     };
     if ctx.cfg.prefer_no_std {
-        search(LangCrateOrigin::Core, &mut best_choice);
+        search(LangCrateOrigin::Core, best_choice);
         if matches!(best_choice, Some(Choice { stability: Stable, .. })) {
-            return best_choice;
+            return;
         }
-        search(LangCrateOrigin::Std, &mut best_choice);
+        search(LangCrateOrigin::Std, best_choice);
         if matches!(best_choice, Some(Choice { stability: Stable, .. })) {
-            return best_choice;
+            return;
         }
     } else {
-        search(LangCrateOrigin::Std, &mut best_choice);
+        search(LangCrateOrigin::Std, best_choice);
         if matches!(best_choice, Some(Choice { stability: Stable, .. })) {
-            return best_choice;
+            return;
         }
-        search(LangCrateOrigin::Core, &mut best_choice);
+        search(LangCrateOrigin::Core, best_choice);
         if matches!(best_choice, Some(Choice { stability: Stable, .. })) {
-            return best_choice;
+            return;
         }
     }
-    let mut best_choice = None;
-    dependencies.iter().filter(|it| it.is_sysroot()).for_each(|dep| {
-        find_in_dep(ctx, visited_modules, item, max_len, &mut best_choice, dep.crate_id);
-    });
-    best_choice
+    dependencies
+        .iter()
+        .filter(|it| it.is_sysroot())
+        .chain(dependencies.iter().filter(|it| !it.is_sysroot()))
+        .for_each(|dep| {
+            find_in_dep(ctx, visited_modules, item, max_len, best_choice, dep.crate_id);
+        });
 }
 
 fn find_in_dep(
@@ -491,6 +482,7 @@
     );
 }
 
+#[derive(Debug)]
 struct Choice {
     path: ModPath,
     /// The length in characters of the path
@@ -676,6 +668,7 @@
         path: &str,
         prefer_prelude: bool,
         prefer_absolute: bool,
+        prefer_no_std: bool,
         expect: Expect,
     ) {
         let (db, pos) = TestDB::with_position(ra_fixture);
@@ -717,7 +710,7 @@
                 module,
                 prefix,
                 ignore_local_imports,
-                ImportPathConfig { prefer_no_std: false, prefer_prelude, prefer_absolute },
+                ImportPathConfig { prefer_no_std, prefer_prelude, prefer_absolute },
             );
             format_to!(
                 res,
@@ -732,15 +725,19 @@
     }
 
     fn check_found_path(ra_fixture: &str, path: &str, expect: Expect) {
-        check_found_path_(ra_fixture, path, false, false, expect);
+        check_found_path_(ra_fixture, path, false, false, false, expect);
     }
 
     fn check_found_path_prelude(ra_fixture: &str, path: &str, expect: Expect) {
-        check_found_path_(ra_fixture, path, true, false, expect);
+        check_found_path_(ra_fixture, path, true, false, false, expect);
     }
 
     fn check_found_path_absolute(ra_fixture: &str, path: &str, expect: Expect) {
-        check_found_path_(ra_fixture, path, false, true, expect);
+        check_found_path_(ra_fixture, path, false, true, false, expect);
+    }
+
+    fn check_found_path_prefer_no_std(ra_fixture: &str, path: &str, expect: Expect) {
+        check_found_path_(ra_fixture, path, false, false, true, expect);
     }
 
     #[test]
@@ -1361,9 +1358,66 @@
             "#]],
         );
     }
+    #[test]
+    fn prefer_core_paths_over_std_for_mod_reexport() {
+        check_found_path_prefer_no_std(
+            r#"
+//- /main.rs crate:main deps:core,std
+
+$0
+
+//- /stdlib.rs crate:std deps:core
+
+pub use core::pin;
+
+//- /corelib.rs crate:core
+
+pub mod pin {
+    pub struct Pin;
+}
+            "#,
+            "std::pin::Pin",
+            expect![[r#"
+                Plain  (imports ✔): core::pin::Pin
+                Plain  (imports ✖): core::pin::Pin
+                ByCrate(imports ✔): core::pin::Pin
+                ByCrate(imports ✖): core::pin::Pin
+                BySelf (imports ✔): core::pin::Pin
+                BySelf (imports ✖): core::pin::Pin
+            "#]],
+        );
+    }
 
     #[test]
     fn prefer_core_paths_over_std() {
+        check_found_path_prefer_no_std(
+            r#"
+//- /main.rs crate:main deps:core,std
+
+$0
+
+//- /std.rs crate:std deps:core
+
+pub mod fmt {
+    pub use core::fmt::Error;
+}
+
+//- /zzz.rs crate:core
+
+pub mod fmt {
+    pub struct Error;
+}
+        "#,
+            "core::fmt::Error",
+            expect![[r#"
+                Plain  (imports ✔): core::fmt::Error
+                Plain  (imports ✖): core::fmt::Error
+                ByCrate(imports ✔): core::fmt::Error
+                ByCrate(imports ✖): core::fmt::Error
+                BySelf (imports ✔): core::fmt::Error
+                BySelf (imports ✖): core::fmt::Error
+            "#]],
+        );
         check_found_path(
             r#"
 //- /main.rs crate:main deps:core,std
@@ -1878,10 +1932,9 @@
 
     #[test]
     fn respect_unstable_modules() {
-        check_found_path(
+        check_found_path_prefer_no_std(
             r#"
 //- /main.rs crate:main deps:std,core
-#![no_std]
 extern crate std;
 $0
 //- /longer.rs crate:std deps:core
diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs
index 66412b2..4ced30c 100644
--- a/crates/hir-def/src/lib.rs
+++ b/crates/hir-def/src/lib.rs
@@ -105,7 +105,7 @@
 
 type FxIndexMap<K, V> =
     indexmap::IndexMap<K, V, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
-/// A wrapper around two booleans, [`ImportPathConfig::prefer_no_std`] and [`ImportPathConfig::prefer_prelude`].
+/// A wrapper around three booleans
 #[derive(Debug, Clone, PartialEq, Eq, Hash, Copy)]
 pub struct ImportPathConfig {
     /// If true, prefer to unconditionally use imports of the `core` and `alloc` crate
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs
index 64b37d2..fc14608 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -1201,7 +1201,6 @@
 
 #[test]
 fn test_meta_doc_comments() {
-    cov_mark::check!(test_meta_doc_comments);
     check(
         r#"
 macro_rules! m {
diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs
index d34f0af..b430e2c 100644
--- a/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -317,9 +317,9 @@
         _: Span,
         _: Span,
     ) -> Result<Subtree, ProcMacroExpansionError> {
-        let (parse, _) = ::mbe::token_tree_to_syntax_node(
+        let (parse, _) = syntax_bridge::token_tree_to_syntax_node(
             subtree,
-            ::mbe::TopEntryPoint::MacroItems,
+            syntax_bridge::TopEntryPoint::MacroItems,
             span::Edition::CURRENT,
         );
         if parse.errors().is_empty() {
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index 483bffc..debc5a4 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -56,7 +56,6 @@
 };
 
 static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
-static EXPANSION_DEPTH_LIMIT: Limit = Limit::new(128);
 static FIXED_POINT_LIMIT: Limit = Limit::new(8192);
 
 pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeId) -> DefMap {
@@ -1440,7 +1439,14 @@
         depth: usize,
         container: ItemContainerId,
     ) {
-        if EXPANSION_DEPTH_LIMIT.check(depth).is_err() {
+        let recursion_limit = self.def_map.recursion_limit() as usize;
+        let recursion_limit = Limit::new(if cfg!(test) {
+            // Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug
+            std::cmp::min(32, recursion_limit)
+        } else {
+            recursion_limit
+        });
+        if recursion_limit.check(depth).is_err() {
             cov_mark::hit!(macro_expansion_overflow);
             tracing::warn!("macro expansion is too deep");
             return;
@@ -2003,7 +2009,7 @@
                             Err(cfg) => {
                                 self.emit_unconfigured_diagnostic(
                                     self.tree_id,
-                                    AttrOwner::TopLevel,
+                                    AttrOwner::ModItem(module_id.into()),
                                     &cfg,
                                 );
                             }
diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs
index e82af31..d319831 100644
--- a/crates/hir-def/src/nameres/tests/incremental.rs
+++ b/crates/hir-def/src/nameres/tests/incremental.rs
@@ -1,4 +1,4 @@
-use base_db::{SourceDatabase, SourceDatabaseExt2 as _};
+use base_db::{SourceDatabase, SourceDatabaseFileInputExt as _};
 use test_fixture::WithFixture;
 
 use crate::{db::DefDatabase, nameres::tests::TestDB, AdtId, ModuleDefId};
diff --git a/crates/hir-def/src/path/lower.rs b/crates/hir-def/src/path/lower.rs
index 7c39773..70918a9 100644
--- a/crates/hir-def/src/path/lower.rs
+++ b/crates/hir-def/src/path/lower.rs
@@ -194,6 +194,11 @@
         match generic_arg {
             ast::GenericArg::TypeArg(type_arg) => {
                 let type_ref = TypeRef::from_ast_opt(lower_ctx, type_arg.ty());
+                type_ref.walk(&mut |tr| {
+                    if let TypeRef::ImplTrait(bounds) = tr {
+                        lower_ctx.update_impl_traits_bounds(bounds.clone());
+                    }
+                });
                 args.push(GenericArg::Type(type_ref));
             }
             ast::GenericArg::AssocTypeArg(assoc_type_arg) => {
diff --git a/crates/hir-def/src/test_db.rs b/crates/hir-def/src/test_db.rs
index f44472e..df9dec6 100644
--- a/crates/hir-def/src/test_db.rs
+++ b/crates/hir-def/src/test_db.rs
@@ -19,7 +19,7 @@
 };
 
 #[salsa::database(
-    base_db::SourceDatabaseExtStorage,
+    base_db::SourceRootDatabaseStorage,
     base_db::SourceDatabaseStorage,
     hir_expand::db::ExpandDatabaseStorage,
     crate::db::InternDatabaseStorage,
@@ -69,9 +69,6 @@
 impl panic::RefUnwindSafe for TestDB {}
 
 impl FileLoader for TestDB {
-    fn file_text(&self, file_id: FileId) -> Arc<str> {
-        FileLoaderDelegate(self).file_text(file_id)
-    }
     fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
         FileLoaderDelegate(self).resolve_path(path)
     }
diff --git a/crates/hir-expand/Cargo.toml b/crates/hir-expand/Cargo.toml
index ca05618..03a9d54 100644
--- a/crates/hir-expand/Cargo.toml
+++ b/crates/hir-expand/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "hir-expand"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "Macro expansion for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
@@ -33,6 +34,7 @@
 limit.workspace = true
 span.workspace = true
 parser.workspace = true
+syntax-bridge.workspace = true
 
 [dev-dependencies]
 expect-test = "1.4.0"
diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs
index 777e415..79cfeb4 100644
--- a/crates/hir-expand/src/attrs.rs
+++ b/crates/hir-expand/src/attrs.rs
@@ -6,14 +6,12 @@
 use either::Either;
 use intern::{sym, Interned, Symbol};
 
-use mbe::{
-    desugar_doc_comment_text, syntax_node_to_token_tree, DelimiterKind, DocCommentDesugarMode,
-    Punct,
-};
+use mbe::{DelimiterKind, Punct};
 use smallvec::{smallvec, SmallVec};
 use span::{Span, SyntaxContextId};
 use syntax::unescape;
 use syntax::{ast, match_ast, AstNode, AstToken, SyntaxNode};
+use syntax_bridge::{desugar_doc_comment_text, syntax_node_to_token_tree, DocCommentDesugarMode};
 use triomphe::ThinArc;
 
 use crate::name::Name;
diff --git a/crates/hir-expand/src/builtin/derive_macro.rs b/crates/hir-expand/src/builtin/derive_macro.rs
index f560d3b..7d3e8de 100644
--- a/crates/hir-expand/src/builtin/derive_macro.rs
+++ b/crates/hir-expand/src/builtin/derive_macro.rs
@@ -2,10 +2,10 @@
 
 use intern::sym;
 use itertools::izip;
-use mbe::DocCommentDesugarMode;
 use rustc_hash::FxHashSet;
 use span::{MacroCallId, Span};
 use stdx::never;
+use syntax_bridge::DocCommentDesugarMode;
 use tracing::debug;
 
 use crate::{
@@ -209,9 +209,9 @@
 }
 
 fn parse_adt(tt: &tt::Subtree, call_site: Span) -> Result<BasicAdtInfo, ExpandError> {
-    let (parsed, tm) = &mbe::token_tree_to_syntax_node(
+    let (parsed, tm) = &syntax_bridge::token_tree_to_syntax_node(
         tt,
-        mbe::TopEntryPoint::MacroItems,
+        syntax_bridge::TopEntryPoint::MacroItems,
         parser::Edition::CURRENT_FIXME,
     );
     let macro_items = ast::MacroItems::cast(parsed.syntax_node())
@@ -268,7 +268,7 @@
                 match this {
                     Some(it) => {
                         param_type_set.insert(it.as_name());
-                        mbe::syntax_node_to_token_tree(
+                        syntax_bridge::syntax_node_to_token_tree(
                             it.syntax(),
                             tm,
                             call_site,
@@ -282,7 +282,7 @@
             };
             let bounds = match &param {
                 ast::TypeOrConstParam::Type(it) => it.type_bound_list().map(|it| {
-                    mbe::syntax_node_to_token_tree(
+                    syntax_bridge::syntax_node_to_token_tree(
                         it.syntax(),
                         tm,
                         call_site,
@@ -295,7 +295,7 @@
                 let ty = param
                     .ty()
                     .map(|ty| {
-                        mbe::syntax_node_to_token_tree(
+                        syntax_bridge::syntax_node_to_token_tree(
                             ty.syntax(),
                             tm,
                             call_site,
@@ -316,7 +316,7 @@
     let where_clause = if let Some(w) = where_clause {
         w.predicates()
             .map(|it| {
-                mbe::syntax_node_to_token_tree(
+                syntax_bridge::syntax_node_to_token_tree(
                     it.syntax(),
                     tm,
                     call_site,
@@ -353,7 +353,7 @@
             param_type_set.contains(&name).then_some(p)
         })
         .map(|it| {
-            mbe::syntax_node_to_token_tree(
+            syntax_bridge::syntax_node_to_token_tree(
                 it.syntax(),
                 tm,
                 call_site,
diff --git a/crates/hir-expand/src/builtin/fn_macro.rs b/crates/hir-expand/src/builtin/fn_macro.rs
index 7903ac0..795d9b1 100644
--- a/crates/hir-expand/src/builtin/fn_macro.rs
+++ b/crates/hir-expand/src/builtin/fn_macro.rs
@@ -4,13 +4,14 @@
 use cfg::CfgExpr;
 use either::Either;
 use intern::{sym, Symbol};
-use mbe::{parse_exprs_with_sep, parse_to_token_tree, DelimiterKind};
+use mbe::{expect_fragment, DelimiterKind};
 use span::{Edition, EditionedFileId, Span, SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
 use stdx::format_to;
 use syntax::{
     format_smolstr,
     unescape::{unescape_byte, unescape_char, unescape_unicode, Mode},
 };
+use syntax_bridge::parse_to_token_tree;
 
 use crate::{
     builtin::quote::{dollar_crate, quote},
@@ -228,20 +229,22 @@
     span: Span,
 ) -> ExpandResult<tt::Subtree> {
     let call_site_span = span_with_call_site_ctxt(db, span, id);
-    let args = parse_exprs_with_sep(tt, ',', call_site_span, Edition::CURRENT_FIXME);
+
+    let mut iter = ::tt::iter::TtIter::new(tt);
+
+    let cond = expect_fragment(
+        &mut iter,
+        parser::PrefixEntryPoint::Expr,
+        db.crate_graph()[id.lookup(db).krate].edition,
+        tt::DelimSpan { open: tt.delimiter.open, close: tt.delimiter.close },
+    );
+    _ = iter.expect_char(',');
+    let rest = iter.as_slice();
+
     let dollar_crate = dollar_crate(span);
-    let expanded = match &*args {
-        [cond, panic_args @ ..] => {
-            let comma = tt::Subtree {
-                delimiter: tt::Delimiter::invisible_spanned(call_site_span),
-                token_trees: Box::new([tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
-                    char: ',',
-                    spacing: tt::Spacing::Alone,
-                    span: call_site_span,
-                }))]),
-            };
-            let cond = cond.clone();
-            let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma);
+    let expanded = match cond.value {
+        Some(cond) => {
+            let panic_args = rest.iter().cloned();
             let mac = if use_panic_2021(db, span) {
                 quote! {call_site_span => #dollar_crate::panic::panic_2021!(##panic_args) }
             } else {
@@ -253,10 +256,13 @@
                 }
             }}
         }
-        [] => quote! {call_site_span =>{}},
+        None => quote! {call_site_span =>{}},
     };
 
-    ExpandResult::ok(expanded)
+    match cond.err {
+        Some(err) => ExpandResult::new(expanded, err.into()),
+        None => ExpandResult::ok(expanded),
+    }
 }
 
 fn file_expand(
diff --git a/crates/hir-expand/src/change.rs b/crates/hir-expand/src/change.rs
index 1a3dd0e..8b3f69d 100644
--- a/crates/hir-expand/src/change.rs
+++ b/crates/hir-expand/src/change.rs
@@ -1,7 +1,7 @@
 //! Defines a unit of change that can applied to the database to get the next
 //! state. Changes are transactional.
 use base_db::{
-    salsa::Durability, CrateGraph, CrateId, FileChange, SourceDatabaseExt, SourceRoot,
+    salsa::Durability, CrateGraph, CrateId, FileChange, SourceRoot, SourceRootDatabase,
     TargetLayoutLoadResult, Version,
 };
 use la_arena::RawIdx;
@@ -23,7 +23,7 @@
         Self::default()
     }
 
-    pub fn apply(self, db: &mut (impl ExpandDatabase + SourceDatabaseExt)) {
+    pub fn apply(self, db: &mut (impl ExpandDatabase + SourceRootDatabase)) {
         self.source_change.apply(db);
         if let Some(proc_macros) = self.proc_macros {
             db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH);
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index 01a3566..584f963 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -3,10 +3,11 @@
 use base_db::{salsa, CrateId, SourceDatabase};
 use either::Either;
 use limit::Limit;
-use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, MatchedArmIndex};
+use mbe::MatchedArmIndex;
 use rustc_hash::FxHashSet;
 use span::{AstIdMap, EditionedFileId, Span, SyntaxContextData, SyntaxContextId};
 use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T};
+use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode};
 use triomphe::Arc;
 
 use crate::{
@@ -165,7 +166,7 @@
     // Build the subtree and token mapping for the speculative args
     let (mut tt, undo_info) = match loc.kind {
         MacroCallKind::FnLike { .. } => (
-            mbe::syntax_node_to_token_tree(
+            syntax_bridge::syntax_node_to_token_tree(
                 speculative_args,
                 span_map,
                 span,
@@ -178,7 +179,7 @@
             SyntaxFixupUndoInfo::NONE,
         ),
         MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => (
-            mbe::syntax_node_to_token_tree(
+            syntax_bridge::syntax_node_to_token_tree(
                 speculative_args,
                 span_map,
                 span,
@@ -213,7 +214,7 @@
             fixups.remove.extend(censor_cfg);
 
             (
-                mbe::syntax_node_to_token_tree_modified(
+                syntax_bridge::syntax_node_to_token_tree_modified(
                     speculative_args,
                     span_map,
                     fixups.append,
@@ -459,7 +460,7 @@
                 return dummy_tt(kind);
             }
 
-            let mut tt = mbe::syntax_node_to_token_tree(
+            let mut tt = syntax_bridge::syntax_node_to_token_tree(
                 tt.syntax(),
                 map.as_ref(),
                 span,
@@ -515,7 +516,7 @@
         fixups.remove.extend(censor_cfg);
 
         (
-            mbe::syntax_node_to_token_tree_modified(
+            syntax_bridge::syntax_node_to_token_tree_modified(
                 syntax,
                 map,
                 fixups.append,
@@ -720,13 +721,13 @@
     edition: parser::Edition,
 ) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
     let entry_point = match expand_to {
-        ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
-        ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
-        ExpandTo::Pattern => mbe::TopEntryPoint::Pattern,
-        ExpandTo::Type => mbe::TopEntryPoint::Type,
-        ExpandTo::Expr => mbe::TopEntryPoint::Expr,
+        ExpandTo::Statements => syntax_bridge::TopEntryPoint::MacroStmts,
+        ExpandTo::Items => syntax_bridge::TopEntryPoint::MacroItems,
+        ExpandTo::Pattern => syntax_bridge::TopEntryPoint::Pattern,
+        ExpandTo::Type => syntax_bridge::TopEntryPoint::Type,
+        ExpandTo::Expr => syntax_bridge::TopEntryPoint::Expr,
     };
-    mbe::token_tree_to_syntax_node(tt, entry_point, edition)
+    syntax_bridge::token_tree_to_syntax_node(tt, entry_point, edition)
 }
 
 fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<()>> {
diff --git a/crates/hir-expand/src/declarative.rs b/crates/hir-expand/src/declarative.rs
index 48851af..b1a6eed 100644
--- a/crates/hir-expand/src/declarative.rs
+++ b/crates/hir-expand/src/declarative.rs
@@ -2,10 +2,10 @@
 
 use base_db::CrateId;
 use intern::sym;
-use mbe::DocCommentDesugarMode;
 use span::{Edition, MacroCallId, Span, SyntaxContextId};
 use stdx::TupleExt;
 use syntax::{ast, AstNode};
+use syntax_bridge::DocCommentDesugarMode;
 use triomphe::Arc;
 
 use crate::{
@@ -112,7 +112,7 @@
             ast::Macro::MacroRules(macro_rules) => (
                 match macro_rules.token_tree() {
                     Some(arg) => {
-                        let tt = mbe::syntax_node_to_token_tree(
+                        let tt = syntax_bridge::syntax_node_to_token_tree(
                             arg.syntax(),
                             map.as_ref(),
                             map.span_for_range(
@@ -135,14 +135,14 @@
                         let span =
                             map.span_for_range(macro_def.macro_token().unwrap().text_range());
                         let args = macro_def.args().map(|args| {
-                            mbe::syntax_node_to_token_tree(
+                            syntax_bridge::syntax_node_to_token_tree(
                                 args.syntax(),
                                 map.as_ref(),
                                 span,
                                 DocCommentDesugarMode::Mbe,
                             )
                         });
-                        let body = mbe::syntax_node_to_token_tree(
+                        let body = syntax_bridge::syntax_node_to_token_tree(
                             body.syntax(),
                             map.as_ref(),
                             span,
diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs
index 5385b44..3528b2d 100644
--- a/crates/hir-expand/src/eager.rs
+++ b/crates/hir-expand/src/eager.rs
@@ -19,9 +19,9 @@
 //!
 //! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
 use base_db::CrateId;
-use mbe::DocCommentDesugarMode;
 use span::SyntaxContextId;
 use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
+use syntax_bridge::DocCommentDesugarMode;
 use triomphe::Arc;
 
 use crate::{
@@ -82,7 +82,7 @@
         return ExpandResult { value: None, err };
     };
 
-    let mut subtree = mbe::syntax_node_to_token_tree(
+    let mut subtree = syntax_bridge::syntax_node_to_token_tree(
         &expanded_eager_input,
         arg_map,
         span,
diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs
index 71579d2..b6d5828 100644
--- a/crates/hir-expand/src/fixup.rs
+++ b/crates/hir-expand/src/fixup.rs
@@ -2,7 +2,6 @@
 //! fix up syntax errors in the code we're passing to them.
 
 use intern::sym;
-use mbe::DocCommentDesugarMode;
 use rustc_hash::{FxHashMap, FxHashSet};
 use smallvec::SmallVec;
 use span::{
@@ -14,6 +13,7 @@
     ast::{self, AstNode, HasLoopBody},
     match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
 };
+use syntax_bridge::DocCommentDesugarMode;
 use triomphe::Arc;
 use tt::Spacing;
 
@@ -76,7 +76,8 @@
         if can_handle_error(&node) && has_error_to_handle(&node) {
             remove.insert(node.clone().into());
             // the node contains an error node, we have to completely replace it by something valid
-            let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site, mode);
+            let original_tree =
+                syntax_bridge::syntax_node_to_token_tree(&node, span_map, call_site, mode);
             let idx = original.len() as u32;
             original.push(original_tree);
             let span = span_map.span_for_range(node_range);
@@ -434,9 +435,9 @@
 #[cfg(test)]
 mod tests {
     use expect_test::{expect, Expect};
-    use mbe::DocCommentDesugarMode;
     use span::{Edition, EditionedFileId, FileId};
     use syntax::TextRange;
+    use syntax_bridge::DocCommentDesugarMode;
     use triomphe::Arc;
 
     use crate::{
@@ -483,7 +484,7 @@
             span_map.span_for_range(TextRange::empty(0.into())),
             DocCommentDesugarMode::Mbe,
         );
-        let mut tt = mbe::syntax_node_to_token_tree_modified(
+        let mut tt = syntax_bridge::syntax_node_to_token_tree_modified(
             &parsed.syntax_node(),
             span_map.as_ref(),
             fixups.append,
@@ -498,9 +499,9 @@
         expect.assert_eq(&actual);
 
         // the fixed-up tree should be syntactically valid
-        let (parse, _) = mbe::token_tree_to_syntax_node(
+        let (parse, _) = syntax_bridge::token_tree_to_syntax_node(
             &tt,
-            ::mbe::TopEntryPoint::MacroItems,
+            syntax_bridge::TopEntryPoint::MacroItems,
             parser::Edition::CURRENT,
         );
         assert!(
@@ -513,7 +514,7 @@
 
         // the fixed-up + reversed version should be equivalent to the original input
         // modulo token IDs and `Punct`s' spacing.
-        let original_as_tt = mbe::syntax_node_to_token_tree(
+        let original_as_tt = syntax_bridge::syntax_node_to_token_tree(
             &parsed.syntax_node(),
             span_map.as_ref(),
             span_map.span_for_range(TextRange::empty(0.into())),
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index 18da77d..2bea902 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -176,7 +176,12 @@
             &ExpandErrorKind::MissingProcMacroExpander(def_crate) => {
                 match db.proc_macros().get_error_for_crate(def_crate) {
                     Some((e, hard_err)) => (e.to_owned(), hard_err),
-                    None => ("missing expander".to_owned(), true),
+                    None => (
+                        format!(
+                            "internal error: proc-macro map is missing error entry for crate {def_crate:?}"
+                        ),
+                        true,
+                    ),
                 }
             }
             ExpandErrorKind::MacroDefinition => {
diff --git a/crates/hir-expand/src/span_map.rs b/crates/hir-expand/src/span_map.rs
index 3be88ee..740c27b 100644
--- a/crates/hir-expand/src/span_map.rs
+++ b/crates/hir-expand/src/span_map.rs
@@ -28,13 +28,13 @@
     RealSpanMap(&'a RealSpanMap),
 }
 
-impl mbe::SpanMapper<Span> for SpanMap {
+impl syntax_bridge::SpanMapper<Span> for SpanMap {
     fn span_for(&self, range: TextRange) -> Span {
         self.span_for_range(range)
     }
 }
 
-impl mbe::SpanMapper<Span> for SpanMapRef<'_> {
+impl syntax_bridge::SpanMapper<Span> for SpanMapRef<'_> {
     fn span_for(&self, range: TextRange) -> Span {
         self.span_for_range(range)
     }
diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml
index b6c3368..b079b56 100644
--- a/crates/hir-ty/Cargo.toml
+++ b/crates/hir-ty/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "hir-ty"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "The type system for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs
index d506e00..a151ee0 100644
--- a/crates/hir-ty/src/chalk_db.rs
+++ b/crates/hir-ty/src/chalk_db.rs
@@ -275,7 +275,7 @@
                 };
                 chalk_ir::Binders::new(binders, bound)
             }
-            crate::ImplTraitId::AssociatedTypeImplTrait(alias, idx) => {
+            crate::ImplTraitId::TypeAliasImplTrait(alias, idx) => {
                 let datas = self
                     .db
                     .type_alias_impl_traits(alias)
diff --git a/crates/hir-ty/src/chalk_ext.rs b/crates/hir-ty/src/chalk_ext.rs
index 5765262..3025581 100644
--- a/crates/hir-ty/src/chalk_ext.rs
+++ b/crates/hir-ty/src/chalk_ext.rs
@@ -276,7 +276,7 @@
                             data.substitute(Interner, &subst).into_value_and_skipped_binders().0
                         })
                     }
-                    ImplTraitId::AssociatedTypeImplTrait(alias, idx) => {
+                    ImplTraitId::TypeAliasImplTrait(alias, idx) => {
                         db.type_alias_impl_traits(alias).map(|it| {
                             let data =
                                 (*it).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
@@ -295,7 +295,7 @@
                             data.substitute(Interner, &opaque_ty.substitution)
                         })
                     }
-                    ImplTraitId::AssociatedTypeImplTrait(alias, idx) => {
+                    ImplTraitId::TypeAliasImplTrait(alias, idx) => {
                         db.type_alias_impl_traits(alias).map(|it| {
                             let data =
                                 (*it).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs
index e52fae0..6e5a7cc 100644
--- a/crates/hir-ty/src/diagnostics/expr.rs
+++ b/crates/hir-ty/src/diagnostics/expr.rs
@@ -4,20 +4,25 @@
 
 use std::fmt;
 
+use chalk_solve::rust_ir::AdtKind;
 use either::Either;
-use hir_def::lang_item::LangItem;
-use hir_def::{resolver::HasResolver, AdtId, AssocItemId, DefWithBodyId, HasModule};
-use hir_def::{ItemContainerId, Lookup};
+use hir_def::{
+    lang_item::LangItem,
+    resolver::{HasResolver, ValueNs},
+    AdtId, AssocItemId, DefWithBodyId, HasModule, ItemContainerId, Lookup,
+};
 use intern::sym;
 use itertools::Itertools;
 use rustc_hash::FxHashSet;
 use rustc_pattern_analysis::constructor::Constructor;
-use syntax::{ast, AstNode};
+use syntax::{
+    ast::{self, UnaryOp},
+    AstNode,
+};
 use tracing::debug;
 use triomphe::Arc;
 use typed_arena::Arena;
 
-use crate::Interner;
 use crate::{
     db::HirDatabase,
     diagnostics::match_check::{
@@ -25,7 +30,7 @@
         pat_analysis::{self, DeconstructedPat, MatchCheckCtx, WitnessPat},
     },
     display::HirDisplay,
-    InferenceResult, Ty, TyExt,
+    Adjust, InferenceResult, Interner, Ty, TyExt, TyKind,
 };
 
 pub(crate) use hir_def::{
@@ -117,7 +122,7 @@
                 Expr::If { .. } => {
                     self.check_for_unnecessary_else(id, expr, db);
                 }
-                Expr::Block { .. } => {
+                Expr::Block { .. } | Expr::Async { .. } | Expr::Unsafe { .. } => {
                     self.validate_block(db, expr);
                 }
                 _ => {}
@@ -236,7 +241,12 @@
             return;
         }
 
-        let report = match cx.compute_match_usefulness(m_arms.as_slice(), scrut_ty.clone()) {
+        let known_valid_scrutinee = Some(self.is_known_valid_scrutinee(scrutinee_expr, db));
+        let report = match cx.compute_match_usefulness(
+            m_arms.as_slice(),
+            scrut_ty.clone(),
+            known_valid_scrutinee,
+        ) {
             Ok(report) => report,
             Err(()) => return,
         };
@@ -253,8 +263,52 @@
         }
     }
 
+    // [rustc's `is_known_valid_scrutinee`](https://github.com/rust-lang/rust/blob/c9bd03cb724e13cca96ad320733046cbdb16fbbe/compiler/rustc_mir_build/src/thir/pattern/check_match.rs#L288)
+    //
+    // While the above function in rustc uses thir exprs, r-a doesn't have them.
+    // So, the logic here is getting same result as "hir lowering + match with lowered thir"
+    // with "hir only"
+    fn is_known_valid_scrutinee(&self, scrutinee_expr: ExprId, db: &dyn HirDatabase) -> bool {
+        if self
+            .infer
+            .expr_adjustments
+            .get(&scrutinee_expr)
+            .is_some_and(|adjusts| adjusts.iter().any(|a| matches!(a.kind, Adjust::Deref(..))))
+        {
+            return false;
+        }
+
+        match &self.body[scrutinee_expr] {
+            Expr::UnaryOp { op: UnaryOp::Deref, .. } => false,
+            Expr::Path(path) => {
+                let value_or_partial = self
+                    .owner
+                    .resolver(db.upcast())
+                    .resolve_path_in_value_ns_fully(db.upcast(), path);
+                value_or_partial.map_or(true, |v| !matches!(v, ValueNs::StaticId(_)))
+            }
+            Expr::Field { expr, .. } => match self.infer.type_of_expr[*expr].kind(Interner) {
+                TyKind::Adt(adt, ..)
+                    if db.adt_datum(self.owner.krate(db.upcast()), *adt).kind == AdtKind::Union =>
+                {
+                    false
+                }
+                _ => self.is_known_valid_scrutinee(*expr, db),
+            },
+            Expr::Index { base, .. } => self.is_known_valid_scrutinee(*base, db),
+            Expr::Cast { expr, .. } => self.is_known_valid_scrutinee(*expr, db),
+            Expr::Missing => false,
+            _ => true,
+        }
+    }
+
     fn validate_block(&mut self, db: &dyn HirDatabase, expr: &Expr) {
-        let Expr::Block { statements, .. } = expr else { return };
+        let (Expr::Block { statements, .. }
+        | Expr::Async { statements, .. }
+        | Expr::Unsafe { statements, .. }) = expr
+        else {
+            return;
+        };
         let pattern_arena = Arena::new();
         let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db);
         for stmt in &**statements {
@@ -280,7 +334,7 @@
                 has_guard: false,
                 arm_data: (),
             };
-            let report = match cx.compute_match_usefulness(&[match_arm], ty.clone()) {
+            let report = match cx.compute_match_usefulness(&[match_arm], ty.clone(), None) {
                 Ok(v) => v,
                 Err(e) => {
                     debug!(?e, "match usefulness error");
diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
index a12e201..7b3abf5 100644
--- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
+++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
@@ -69,22 +69,20 @@
     body: DefWithBodyId,
     pub(crate) db: &'db dyn HirDatabase,
     exhaustive_patterns: bool,
-    min_exhaustive_patterns: bool,
 }
 
 impl<'db> MatchCheckCtx<'db> {
     pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'db dyn HirDatabase) -> Self {
         let def_map = db.crate_def_map(module.krate());
         let exhaustive_patterns = def_map.is_unstable_feature_enabled(&sym::exhaustive_patterns);
-        let min_exhaustive_patterns =
-            def_map.is_unstable_feature_enabled(&sym::min_exhaustive_patterns);
-        Self { module, body, db, exhaustive_patterns, min_exhaustive_patterns }
+        Self { module, body, db, exhaustive_patterns }
     }
 
     pub(crate) fn compute_match_usefulness(
         &self,
         arms: &[MatchArm<'db>],
         scrut_ty: Ty,
+        known_valid_scrutinee: Option<bool>,
     ) -> Result<UsefulnessReport<'db, Self>, ()> {
         if scrut_ty.contains_unknown() {
             return Err(());
@@ -95,8 +93,7 @@
             }
         }
 
-        // FIXME: Determine place validity correctly. For now, err on the safe side.
-        let place_validity = PlaceValidity::MaybeInvalid;
+        let place_validity = PlaceValidity::from_bool(known_valid_scrutinee.unwrap_or(true));
         // Measured to take ~100ms on modern hardware.
         let complexity_limit = Some(500000);
         compute_match_usefulness(self, arms, scrut_ty, place_validity, complexity_limit)
@@ -307,7 +304,8 @@
             &Str(void) => match void {},
             Wildcard | NonExhaustive | Hidden | PrivateUninhabited => PatKind::Wild,
             Never => PatKind::Never,
-            Missing | F32Range(..) | F64Range(..) | Opaque(..) | Or => {
+            Missing | F16Range(..) | F32Range(..) | F64Range(..) | F128Range(..) | Opaque(..)
+            | Or => {
                 never!("can't convert to pattern: {:?}", pat.ctor());
                 PatKind::Wild
             }
@@ -327,9 +325,6 @@
     fn is_exhaustive_patterns_feature_on(&self) -> bool {
         self.exhaustive_patterns
     }
-    fn is_min_exhaustive_patterns_feature_on(&self) -> bool {
-        self.min_exhaustive_patterns
-    }
 
     fn ctor_arity(
         &self,
@@ -356,8 +351,9 @@
             },
             Ref => 1,
             Slice(..) => unimplemented!(),
-            Never | Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..)
-            | Opaque(..) | NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => 0,
+            Never | Bool(..) | IntRange(..) | F16Range(..) | F32Range(..) | F64Range(..)
+            | F128Range(..) | Str(..) | Opaque(..) | NonExhaustive | PrivateUninhabited
+            | Hidden | Missing | Wildcard => 0,
             Or => {
                 never!("The `Or` constructor doesn't have a fixed arity");
                 0
@@ -419,8 +415,9 @@
                 }
             },
             Slice(_) => unreachable!("Found a `Slice` constructor in match checking"),
-            Never | Bool(..) | IntRange(..) | F32Range(..) | F64Range(..) | Str(..)
-            | Opaque(..) | NonExhaustive | PrivateUninhabited | Hidden | Missing | Wildcard => {
+            Never | Bool(..) | IntRange(..) | F16Range(..) | F32Range(..) | F64Range(..)
+            | F128Range(..) | Str(..) | Opaque(..) | NonExhaustive | PrivateUninhabited
+            | Hidden | Missing | Wildcard => {
                 smallvec![]
             }
             Or => {
diff --git a/crates/hir-ty/src/diagnostics/unsafe_check.rs b/crates/hir-ty/src/diagnostics/unsafe_check.rs
index 22aa5c6..3f54cdd 100644
--- a/crates/hir-ty/src/diagnostics/unsafe_check.rs
+++ b/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -17,7 +17,7 @@
 
     let mut res = Vec::new();
     let is_unsafe = match def {
-        DefWithBodyId::FunctionId(it) => db.function_data(it).has_unsafe_kw(),
+        DefWithBodyId::FunctionId(it) => db.function_data(it).is_unsafe(),
         DefWithBodyId::StaticId(_)
         | DefWithBodyId::ConstId(_)
         | DefWithBodyId::VariantId(_)
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index a433ecf..7c48195 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -1022,16 +1022,16 @@
                     // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
                     if parameters.len() - impl_ > 0 {
                         // `parameters` are in the order of fn's params (including impl traits), fn's lifetimes
+                        let without_impl = self_param as usize + type_ + const_ + lifetime;
                         // parent's params (those from enclosing impl or trait, if any).
-                        let (fn_params, other) =
-                            parameters.split_at(self_param as usize + type_ + const_ + lifetime);
-                        let (_impl, parent_params) = other.split_at(impl_);
+                        let (fn_params, parent_params) = parameters.split_at(without_impl + impl_);
                         debug_assert_eq!(parent_params.len(), parent_len);
 
                         let parent_params =
                             generic_args_sans_defaults(f, Some(generic_def_id), parent_params);
                         let fn_params =
-                            generic_args_sans_defaults(f, Some(generic_def_id), fn_params);
+                            &generic_args_sans_defaults(f, Some(generic_def_id), fn_params)
+                                [0..without_impl];
 
                         write!(f, "<")?;
                         hir_fmt_generic_arguments(f, parent_params, None)?;
@@ -1069,6 +1069,7 @@
                             module_id,
                             PrefixKind::Plain,
                             false,
+                            // FIXME: no_std Cfg?
                             ImportPathConfig {
                                 prefer_no_std: false,
                                 prefer_prelude: true,
@@ -1151,11 +1152,10 @@
                         )?;
                         // FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution
                     }
-                    ImplTraitId::AssociatedTypeImplTrait(alias, idx) => {
+                    ImplTraitId::TypeAliasImplTrait(alias, idx) => {
                         let datas =
                             db.type_alias_impl_traits(alias).expect("impl trait id without data");
-                        let data =
-                            (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
+                        let data = (*datas).as_ref().map(|it| it.impl_traits[idx].bounds.clone());
                         let bounds = data.substitute(Interner, &parameters);
                         let krate = alias.krate(db.upcast());
                         write_bounds_like_dyn_trait_with_prefix(
@@ -1338,7 +1338,7 @@
                             SizedByDefault::Sized { anchor: krate },
                         )?;
                     }
-                    ImplTraitId::AssociatedTypeImplTrait(alias, idx) => {
+                    ImplTraitId::TypeAliasImplTrait(alias, idx) => {
                         let datas =
                             db.type_alias_impl_traits(alias).expect("impl trait id without data");
                         let data =
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index 804bc53..45d423d 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -36,15 +36,14 @@
     body::Body,
     builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
     data::{ConstData, StaticData},
-    hir::LabelId,
-    hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, PatId},
+    hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, LabelId, PatId},
     lang_item::{LangItem, LangItemTarget},
     layout::Integer,
     path::{ModPath, Path},
     resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs},
     type_ref::{LifetimeRef, TypeRef},
-    AdtId, AssocItemId, DefWithBodyId, FieldId, FunctionId, ItemContainerId, Lookup, TraitId,
-    TupleFieldId, TupleId, TypeAliasId, VariantId,
+    AdtId, AssocItemId, DefWithBodyId, FieldId, FunctionId, ImplId, ItemContainerId, Lookup,
+    TraitId, TupleFieldId, TupleId, TypeAliasId, VariantId,
 };
 use hir_expand::name::Name;
 use indexmap::IndexSet;
@@ -785,14 +784,19 @@
     fn collect_const(&mut self, data: &ConstData) {
         let return_ty = self.make_ty(&data.type_ref);
 
-        // Constants might be associated items that define ATPITs.
-        self.insert_atpit_coercion_table(iter::once(&return_ty));
+        // Constants might be defining usage sites of TAITs.
+        self.make_tait_coercion_table(iter::once(&return_ty));
 
         self.return_ty = return_ty;
     }
 
     fn collect_static(&mut self, data: &StaticData) {
-        self.return_ty = self.make_ty(&data.type_ref);
+        let return_ty = self.make_ty(&data.type_ref);
+
+        // Statics might be defining usage sites of TAITs.
+        self.make_tait_coercion_table(iter::once(&return_ty));
+
+        self.return_ty = return_ty;
     }
 
     fn collect_fn(&mut self, func: FunctionId) {
@@ -857,11 +861,11 @@
         self.return_ty = self.normalize_associated_types_in(return_ty);
         self.return_coercion = Some(CoerceMany::new(self.return_ty.clone()));
 
-        // Functions might be associated items that define ATPITs.
-        // To define an ATPITs, that ATPIT must appear in the function's signatures.
+        // Functions might be defining usage sites of TAITs.
+        // To define an TAITs, that TAIT must appear in the function's signatures.
         // So, it suffices to check for params and return types.
         params_and_ret_tys.push(self.return_ty.clone());
-        self.insert_atpit_coercion_table(params_and_ret_tys.iter());
+        self.make_tait_coercion_table(params_and_ret_tys.iter());
     }
 
     fn insert_inference_vars_for_impl_trait<T>(&mut self, t: T, placeholders: Substitution) -> T
@@ -880,7 +884,7 @@
                         ImplTraitId::ReturnTypeImplTrait(def, idx) => {
                             (self.db.return_type_impl_traits(def), idx)
                         }
-                        ImplTraitId::AssociatedTypeImplTrait(def, idx) => {
+                        ImplTraitId::TypeAliasImplTrait(def, idx) => {
                             (self.db.type_alias_impl_traits(def), idx)
                         }
                         _ => unreachable!(),
@@ -909,23 +913,25 @@
     }
 
     /// The coercion of a non-inference var into an opaque type should fail,
-    /// but not in the defining sites of the ATPITs.
-    /// In such cases, we insert an proxy inference var for each ATPIT,
-    /// and coerce into it instead of ATPIT itself.
+    /// but not in the defining sites of the TAITs.
+    /// In such cases, we insert an proxy inference var for each TAIT,
+    /// and coerce into it instead of TAIT itself.
     ///
     /// The inference var stretagy is effective because;
     ///
-    /// - It can still unify types that coerced into ATPIT
+    /// - It can still unify types that coerced into TAITs
     /// - We are pushing `impl Trait` bounds into it
     ///
     /// This function inserts a map that maps the opaque type to that proxy inference var.
-    fn insert_atpit_coercion_table<'b>(&mut self, tys: impl Iterator<Item = &'b Ty>) {
-        struct OpaqueTyCollector<'a, 'b> {
+    fn make_tait_coercion_table<'b>(&mut self, tait_candidates: impl Iterator<Item = &'b Ty>) {
+        struct TypeAliasImplTraitCollector<'a, 'b> {
+            db: &'b dyn HirDatabase,
             table: &'b mut InferenceTable<'a>,
-            opaque_tys: FxHashMap<OpaqueTyId, Ty>,
+            assocs: FxHashMap<OpaqueTyId, (ImplId, Ty)>,
+            non_assocs: FxHashMap<OpaqueTyId, Ty>,
         }
 
-        impl<'a, 'b> TypeVisitor<Interner> for OpaqueTyCollector<'a, 'b> {
+        impl<'a, 'b> TypeVisitor<Interner> for TypeAliasImplTraitCollector<'a, 'b> {
             type BreakTy = ();
 
             fn as_dyn(&mut self) -> &mut dyn TypeVisitor<Interner, BreakTy = Self::BreakTy> {
@@ -944,59 +950,105 @@
                 let ty = self.table.resolve_ty_shallow(ty);
 
                 if let TyKind::OpaqueType(id, _) = ty.kind(Interner) {
-                    self.opaque_tys.insert(*id, ty.clone());
+                    if let ImplTraitId::TypeAliasImplTrait(alias_id, _) =
+                        self.db.lookup_intern_impl_trait_id((*id).into())
+                    {
+                        let loc = self.db.lookup_intern_type_alias(alias_id);
+                        match loc.container {
+                            ItemContainerId::ImplId(impl_id) => {
+                                self.assocs.insert(*id, (impl_id, ty.clone()));
+                            }
+                            ItemContainerId::ModuleId(..) | ItemContainerId::ExternBlockId(..) => {
+                                self.non_assocs.insert(*id, ty.clone());
+                            }
+                            _ => {}
+                        }
+                    }
                 }
 
                 ty.super_visit_with(self, outer_binder)
             }
         }
 
-        // Early return if this is not happening inside the impl block
-        let impl_id = if let Some(impl_id) = self.resolver.impl_def() {
-            impl_id
-        } else {
-            return;
+        let mut collector = TypeAliasImplTraitCollector {
+            db: self.db,
+            table: &mut self.table,
+            assocs: FxHashMap::default(),
+            non_assocs: FxHashMap::default(),
         };
-
-        let assoc_tys: FxHashSet<_> = self
-            .db
-            .impl_data(impl_id)
-            .items
-            .iter()
-            .filter_map(|item| match item {
-                AssocItemId::TypeAliasId(alias) => Some(*alias),
-                _ => None,
-            })
-            .collect();
-        if assoc_tys.is_empty() {
-            return;
-        }
-
-        let mut collector =
-            OpaqueTyCollector { table: &mut self.table, opaque_tys: FxHashMap::default() };
-        for ty in tys {
+        for ty in tait_candidates {
             ty.visit_with(collector.as_dyn(), DebruijnIndex::INNERMOST);
         }
-        let atpit_coercion_table: FxHashMap<_, _> = collector
-            .opaque_tys
-            .into_iter()
-            .filter_map(|(opaque_ty_id, ty)| {
-                if let ImplTraitId::AssociatedTypeImplTrait(alias_id, _) =
-                    self.db.lookup_intern_impl_trait_id(opaque_ty_id.into())
-                {
-                    if assoc_tys.contains(&alias_id) {
-                        let alias_placeholders = TyBuilder::placeholder_subst(self.db, alias_id);
-                        let ty = self.insert_inference_vars_for_impl_trait(ty, alias_placeholders);
-                        return Some((opaque_ty_id, ty));
-                    }
-                }
 
-                None
+        // Non-assoc TAITs can be define-used everywhere as long as they are
+        // in function signatures or const types, etc
+        let mut taits = collector.non_assocs;
+
+        // assoc TAITs(ATPITs) can be only define-used inside their impl block.
+        // They cannot be define-used in inner items like in the following;
+        //
+        // ```
+        // impl Trait for Struct {
+        //     type Assoc = impl Default;
+        //
+        //     fn assoc_fn() -> Self::Assoc {
+        //         let foo: Self::Assoc = true; // Allowed here
+        //
+        //         fn inner() -> Self::Assoc {
+        //              false                   // Not allowed here
+        //         }
+        //
+        //         foo
+        //     }
+        // }
+        // ```
+        let impl_id = match self.owner {
+            DefWithBodyId::FunctionId(it) => {
+                let loc = self.db.lookup_intern_function(it);
+                if let ItemContainerId::ImplId(impl_id) = loc.container {
+                    Some(impl_id)
+                } else {
+                    None
+                }
+            }
+            DefWithBodyId::ConstId(it) => {
+                let loc = self.db.lookup_intern_const(it);
+                if let ItemContainerId::ImplId(impl_id) = loc.container {
+                    Some(impl_id)
+                } else {
+                    None
+                }
+            }
+            _ => None,
+        };
+
+        if let Some(impl_id) = impl_id {
+            taits.extend(collector.assocs.into_iter().filter_map(|(id, (impl_, ty))| {
+                if impl_ == impl_id {
+                    Some((id, ty))
+                } else {
+                    None
+                }
+            }));
+        }
+
+        let tait_coercion_table: FxHashMap<_, _> = taits
+            .into_iter()
+            .filter_map(|(id, ty)| {
+                if let ImplTraitId::TypeAliasImplTrait(alias_id, _) =
+                    self.db.lookup_intern_impl_trait_id(id.into())
+                {
+                    let subst = TyBuilder::placeholder_subst(self.db, alias_id);
+                    let ty = self.insert_inference_vars_for_impl_trait(ty, subst);
+                    Some((id, ty))
+                } else {
+                    None
+                }
             })
             .collect();
 
-        if !atpit_coercion_table.is_empty() {
-            self.table.atpit_coercion_table = Some(atpit_coercion_table);
+        if !tait_coercion_table.is_empty() {
+            self.table.tait_coercion_table = Some(tait_coercion_table);
         }
     }
 
diff --git a/crates/hir-ty/src/infer/coerce.rs b/crates/hir-ty/src/infer/coerce.rs
index 7292885..6f85a4a 100644
--- a/crates/hir-ty/src/infer/coerce.rs
+++ b/crates/hir-ty/src/infer/coerce.rs
@@ -276,16 +276,16 @@
             return success(simple(Adjust::NeverToAny)(to_ty.clone()), to_ty.clone(), vec![]);
         }
 
-        // If we are coercing into an ATPIT, coerce into its proxy inference var, instead.
+        // If we are coercing into a TAIT, coerce into its proxy inference var, instead.
         let mut to_ty = to_ty;
         let _to;
-        if let Some(atpit_table) = &self.atpit_coercion_table {
+        if let Some(tait_table) = &self.tait_coercion_table {
             if let TyKind::OpaqueType(opaque_ty_id, _) = to_ty.kind(Interner) {
                 if !matches!(
                     from_ty.kind(Interner),
                     TyKind::InferenceVar(..) | TyKind::OpaqueType(..)
                 ) {
-                    if let Some(ty) = atpit_table.get(opaque_ty_id) {
+                    if let Some(ty) = tait_table.get(opaque_ty_id) {
                         _to = ty.clone();
                         to_ty = &_to;
                     }
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index 3d762b1..f5eb37f 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -1759,13 +1759,14 @@
         skip_indices: &[u32],
         is_varargs: bool,
     ) {
-        if args.len() != param_tys.len() + skip_indices.len() && !is_varargs {
+        let arg_count_mismatch = args.len() != param_tys.len() + skip_indices.len() && !is_varargs;
+        if arg_count_mismatch {
             self.push_diagnostic(InferenceDiagnostic::MismatchedArgCount {
                 call_expr: expr,
                 expected: param_tys.len() + skip_indices.len(),
                 found: args.len(),
             });
-        }
+        };
 
         // Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 --
         // We do this in a pretty awful way: first we type-check any arguments
@@ -1819,7 +1820,7 @@
                 // The function signature may contain some unknown types, so we need to insert
                 // type vars here to avoid type mismatch false positive.
                 let coercion_target = self.insert_type_vars(coercion_target);
-                if self.coerce(Some(arg), &ty, &coercion_target).is_err() {
+                if self.coerce(Some(arg), &ty, &coercion_target).is_err() && !arg_count_mismatch {
                     self.result.type_mismatches.insert(
                         arg.into(),
                         TypeMismatch { expected: coercion_target, actual: ty.clone() },
diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs
index 7ee63af..3e3578b 100644
--- a/crates/hir-ty/src/infer/unify.rs
+++ b/crates/hir-ty/src/infer/unify.rs
@@ -224,7 +224,7 @@
 pub(crate) struct InferenceTable<'a> {
     pub(crate) db: &'a dyn HirDatabase,
     pub(crate) trait_env: Arc<TraitEnvironment>,
-    pub(crate) atpit_coercion_table: Option<FxHashMap<OpaqueTyId, Ty>>,
+    pub(crate) tait_coercion_table: Option<FxHashMap<OpaqueTyId, Ty>>,
     var_unification_table: ChalkInferenceTable,
     type_variable_table: SmallVec<[TypeVariableFlags; 16]>,
     pending_obligations: Vec<Canonicalized<InEnvironment<Goal>>>,
@@ -244,7 +244,7 @@
         InferenceTable {
             db,
             trait_env,
-            atpit_coercion_table: None,
+            tait_coercion_table: None,
             var_unification_table: ChalkInferenceTable::new(),
             type_variable_table: SmallVec::new(),
             pending_obligations: Vec::new(),
diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs
index 034b9c7..47cc2a2 100644
--- a/crates/hir-ty/src/layout.rs
+++ b/crates/hir-ty/src/layout.rs
@@ -391,7 +391,7 @@
                     let infer = db.infer(func.into());
                     return db.layout_of_ty(infer.type_of_rpit[idx].clone(), trait_env);
                 }
-                crate::ImplTraitId::AssociatedTypeImplTrait(..) => {
+                crate::ImplTraitId::TypeAliasImplTrait(..) => {
                     return Err(LayoutError::NotImplemented);
                 }
                 crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs
index 2f93ce3..4c9e0a1 100644
--- a/crates/hir-ty/src/lib.rs
+++ b/crates/hir-ty/src/lib.rs
@@ -595,7 +595,7 @@
 #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
 pub enum ImplTraitId {
     ReturnTypeImplTrait(hir_def::FunctionId, ImplTraitIdx),
-    AssociatedTypeImplTrait(hir_def::TypeAliasId, ImplTraitIdx),
+    TypeAliasImplTrait(hir_def::TypeAliasId, ImplTraitIdx),
     AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId),
 }
 impl InternValueTrivial for ImplTraitId {}
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index 444628f..67cdb99 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -341,7 +341,7 @@
 
                         let impl_trait_id = origin.either(
                             |f| ImplTraitId::ReturnTypeImplTrait(f, idx),
-                            |a| ImplTraitId::AssociatedTypeImplTrait(a, idx),
+                            |a| ImplTraitId::TypeAliasImplTrait(a, idx),
                         );
                         let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
                         let generics =
@@ -1857,7 +1857,7 @@
         params,
         ret,
         data.is_varargs(),
-        if data.has_unsafe_kw() { Safety::Unsafe } else { Safety::Safe },
+        if data.is_unsafe() { Safety::Unsafe } else { Safety::Safe },
         data.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol),
     );
     make_binders(db, &generics, sig)
@@ -2131,7 +2131,6 @@
     if let Some(type_ref) = &data.type_ref {
         let _ty = ctx.lower_ty(type_ref);
     }
-    let generics = generics(db.upcast(), def.into());
     let type_alias_impl_traits = ImplTraits {
         impl_traits: match ctx.impl_trait_mode {
             ImplTraitLoweringState::Opaque(x) => x.into_inner(),
@@ -2141,6 +2140,7 @@
     if type_alias_impl_traits.impl_traits.is_empty() {
         None
     } else {
+        let generics = generics(db.upcast(), def.into());
         Some(Arc::new(make_binders(db, &generics, type_alias_impl_traits)))
     }
 }
diff --git a/crates/hir-ty/src/mir/monomorphization.rs b/crates/hir-ty/src/mir/monomorphization.rs
index 172dea0..8f6582b 100644
--- a/crates/hir-ty/src/mir/monomorphization.rs
+++ b/crates/hir-ty/src/mir/monomorphization.rs
@@ -82,8 +82,8 @@
                         };
                         filler.try_fold_ty(infer.type_of_rpit[idx].clone(), outer_binder)
                     }
-                    crate::ImplTraitId::AssociatedTypeImplTrait(..) => {
-                        not_supported!("associated type impl trait");
+                    crate::ImplTraitId::TypeAliasImplTrait(..) => {
+                        not_supported!("type alias impl trait");
                     }
                     crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
                         not_supported!("async block impl trait");
diff --git a/crates/hir-ty/src/test_db.rs b/crates/hir-ty/src/test_db.rs
index 108ae19..0efb9c5 100644
--- a/crates/hir-ty/src/test_db.rs
+++ b/crates/hir-ty/src/test_db.rs
@@ -15,7 +15,7 @@
 use triomphe::Arc;
 
 #[salsa::database(
-    base_db::SourceDatabaseExtStorage,
+    base_db::SourceRootDatabaseStorage,
     base_db::SourceDatabaseStorage,
     hir_expand::db::ExpandDatabaseStorage,
     hir_def::db::InternDatabaseStorage,
@@ -75,9 +75,6 @@
 impl panic::RefUnwindSafe for TestDB {}
 
 impl FileLoader for TestDB {
-    fn file_text(&self, file_id: FileId) -> Arc<str> {
-        FileLoaderDelegate(self).file_text(file_id)
-    }
     fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
         FileLoaderDelegate(self).resolve_path(path)
     }
diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs
index e67124d..0fcd789 100644
--- a/crates/hir-ty/src/tests.rs
+++ b/crates/hir-ty/src/tests.rs
@@ -9,10 +9,11 @@
 mod regression;
 mod simple;
 mod traits;
+mod type_alias_impl_traits;
 
 use std::env;
 
-use base_db::SourceDatabaseExt2 as _;
+use base_db::SourceDatabaseFileInputExt as _;
 use expect_test::Expect;
 use hir_def::{
     body::{Body, BodySourceMap, SyntheticSyntax},
diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs
index e9c62d3..0a24eeb 100644
--- a/crates/hir-ty/src/tests/incremental.rs
+++ b/crates/hir-ty/src/tests/incremental.rs
@@ -1,4 +1,4 @@
-use base_db::SourceDatabaseExt2 as _;
+use base_db::SourceDatabaseFileInputExt as _;
 use test_fixture::WithFixture;
 
 use crate::{db::HirDatabase, test_db::TestDB};
diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs
index ac2dfea..2819838 100644
--- a/crates/hir-ty/src/tests/regression.rs
+++ b/crates/hir-ty/src/tests/regression.rs
@@ -2122,3 +2122,22 @@
 "#,
     )
 }
+
+#[test]
+fn issue_17191() {
+    check_types(
+        r#"
+trait A {
+    type Item;
+}
+
+trait B<T> {}
+
+fn foo<T: B<impl A>>() {}
+
+fn test() {
+    let f = foo;
+      //^ fn foo<{unknown}>()
+}"#,
+    );
+}
diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs
index fb07e71..a98cff2 100644
--- a/crates/hir-ty/src/tests/traits.rs
+++ b/crates/hir-ty/src/tests/traits.rs
@@ -4692,119 +4692,6 @@
 }
 
 #[test]
-fn associated_type_impl_trait() {
-    check_types(
-        r#"
-trait Foo {}
-struct S1;
-impl Foo for S1 {}
-
-trait Bar {
-    type Item;
-    fn bar(&self) -> Self::Item;
-}
-struct S2;
-impl Bar for S2 {
-    type Item = impl Foo;
-    fn bar(&self) -> Self::Item {
-        S1
-    }
-}
-
-fn test() {
-    let x = S2.bar();
-      //^ impl Foo + ?Sized
-}
-        "#,
-    );
-}
-
-#[test]
-fn associated_type_impl_traits_complex() {
-    check_types(
-        r#"
-struct Unary<T>(T);
-struct Binary<T, U>(T, U);
-
-trait Foo {}
-struct S1;
-impl Foo for S1 {}
-
-trait Bar {
-    type Item;
-    fn bar(&self) -> Unary<Self::Item>;
-}
-struct S2;
-impl Bar for S2 {
-    type Item = Unary<impl Foo>;
-    fn bar(&self) -> Unary<<Self as Bar>::Item> {
-        Unary(Unary(S1))
-    }
-}
-
-trait Baz {
-    type Target1;
-    type Target2;
-    fn baz(&self) -> Binary<Self::Target1, Self::Target2>;
-}
-struct S3;
-impl Baz for S3 {
-    type Target1 = impl Foo;
-    type Target2 = Unary<impl Bar>;
-    fn baz(&self) -> Binary<Self::Target1, Self::Target2> {
-        Binary(S1, Unary(S2))
-    }
-}
-
-fn test() {
-    let x = S3.baz();
-      //^ Binary<impl Foo + ?Sized, Unary<impl Bar + ?Sized>>
-    let y = x.1.0.bar();
-      //^ Unary<Bar::Item<impl Bar + ?Sized>>
-}
-        "#,
-    );
-}
-
-#[test]
-fn associated_type_with_impl_trait_in_tuple() {
-    check_no_mismatches(
-        r#"
-pub trait Iterator {
-    type Item;
-}
-
-pub trait Value {}
-
-fn bar<I: Iterator<Item = (usize, impl Value)>>() {}
-
-fn foo() {
-    bar();
-}
-"#,
-    );
-}
-
-#[test]
-fn associated_type_with_impl_trait_in_nested_tuple() {
-    check_no_mismatches(
-        r#"
-pub trait Iterator {
-    type Item;
-}
-
-pub trait Value {}
-
-fn bar<I: Iterator<Item = ((impl Value, usize), u32)>>() {}
-
-fn foo() {
-    bar();
-}
-"#,
-    );
-}
-
-#[test]
 fn dyn_trait_with_lifetime_in_rpit() {
     check_types(
         r#"
diff --git a/crates/hir-ty/src/tests/type_alias_impl_traits.rs b/crates/hir-ty/src/tests/type_alias_impl_traits.rs
new file mode 100644
index 0000000..e2b7bf3
--- /dev/null
+++ b/crates/hir-ty/src/tests/type_alias_impl_traits.rs
@@ -0,0 +1,161 @@
+use expect_test::expect;
+
+use super::{check_infer_with_mismatches, check_no_mismatches, check_types};
+
+#[test]
+fn associated_type_impl_trait() {
+    check_types(
+        r#"
+trait Foo {}
+struct S1;
+impl Foo for S1 {}
+
+trait Bar {
+    type Item;
+    fn bar(&self) -> Self::Item;
+}
+struct S2;
+impl Bar for S2 {
+    type Item = impl Foo;
+    fn bar(&self) -> Self::Item {
+        S1
+    }
+}
+
+fn test() {
+    let x = S2.bar();
+      //^ impl Foo + ?Sized
+}
+        "#,
+    );
+}
+
+#[test]
+fn associated_type_impl_traits_complex() {
+    check_types(
+        r#"
+struct Unary<T>(T);
+struct Binary<T, U>(T, U);
+
+trait Foo {}
+struct S1;
+impl Foo for S1 {}
+
+trait Bar {
+    type Item;
+    fn bar(&self) -> Unary<Self::Item>;
+}
+struct S2;
+impl Bar for S2 {
+    type Item = Unary<impl Foo>;
+    fn bar(&self) -> Unary<<Self as Bar>::Item> {
+        Unary(Unary(S1))
+    }
+}
+
+trait Baz {
+    type Target1;
+    type Target2;
+    fn baz(&self) -> Binary<Self::Target1, Self::Target2>;
+}
+struct S3;
+impl Baz for S3 {
+    type Target1 = impl Foo;
+    type Target2 = Unary<impl Bar>;
+    fn baz(&self) -> Binary<Self::Target1, Self::Target2> {
+        Binary(S1, Unary(S2))
+    }
+}
+
+fn test() {
+    let x = S3.baz();
+      //^ Binary<impl Foo + ?Sized, Unary<impl Bar + ?Sized>>
+    let y = x.1.0.bar();
+      //^ Unary<Bar::Item<impl Bar + ?Sized>>
+}
+        "#,
+    );
+}
+
+#[test]
+fn associated_type_with_impl_trait_in_tuple() {
+    check_no_mismatches(
+        r#"
+pub trait Iterator {
+    type Item;
+}
+
+pub trait Value {}
+
+fn bar<I: Iterator<Item = (usize, impl Value)>>() {}
+
+fn foo() {
+    bar();
+}
+"#,
+    );
+}
+
+#[test]
+fn associated_type_with_impl_trait_in_nested_tuple() {
+    check_no_mismatches(
+        r#"
+pub trait Iterator {
+    type Item;
+}
+
+pub trait Value {}
+
+fn bar<I: Iterator<Item = ((impl Value, usize), u32)>>() {}
+
+fn foo() {
+    bar();
+}
+"#,
+    );
+}
+
+#[test]
+fn type_alias_impl_trait_simple() {
+    check_no_mismatches(
+        r#"
+trait Trait {}
+
+struct Struct;
+
+impl Trait for Struct {}
+
+type AliasTy = impl Trait;
+
+static ALIAS: AliasTy = {
+    let res: AliasTy = Struct;
+    res
+};
+"#,
+    );
+
+    check_infer_with_mismatches(
+        r#"
+trait Trait {}
+
+struct Struct;
+
+impl Trait for Struct {}
+
+type AliasTy = impl Trait;
+
+static ALIAS: i32 = {
+    // TATIs cannot be define-used if not in signature or type annotations
+    let _a: AliasTy = Struct;
+    5
+};
+"#,
+        expect![[r#"
+            106..220 '{     ...   5 }': i32
+            191..193 '_a': impl Trait + ?Sized
+            205..211 'Struct': Struct
+            217..218 '5': i32
+            205..211: expected impl Trait + ?Sized, got Struct
+        "#]],
+    )
+}
diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs
index fbec332..d1ce68d 100644
--- a/crates/hir-ty/src/utils.rs
+++ b/crates/hir-ty/src/utils.rs
@@ -253,12 +253,7 @@
 
 pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool {
     let data = db.function_data(func);
-    if data.has_unsafe_kw() {
-        // Functions that are `#[rustc_deprecated_safe_2024]` are safe to call before 2024.
-        if db.attrs(func.into()).by_key(&sym::rustc_deprecated_safe_2024).exists() {
-            // FIXME: Properly check the caller span and mark it as unsafe after 2024.
-            return false;
-        }
+    if data.is_unsafe() {
         return true;
     }
 
diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml
index edf26a0..324fa1c 100644
--- a/crates/hir/Cargo.toml
+++ b/crates/hir/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "hir"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "A high-level object-oriented access to Rust code for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs
index 4bb8c14..ffb9724 100644
--- a/crates/hir/src/diagnostics.rs
+++ b/crates/hir/src/diagnostics.rs
@@ -48,6 +48,7 @@
 // ]
 
 diagnostics![
+    AwaitOutsideOfAsync,
     BreakOutsideOfLoop,
     ExpectedFunction,
     InactiveCode,
@@ -135,6 +136,12 @@
     pub name: Name,
 }
 
+#[derive(Debug)]
+pub struct AwaitOutsideOfAsync {
+    pub node: InFile<AstPtr<ast::AwaitExpr>>,
+    pub location: String,
+}
+
 #[derive(Debug, Clone, Eq, PartialEq)]
 pub struct UndeclaredLabel {
     pub node: InFile<AstPtr<ast::Lifetime>>,
diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs
index 7def828..12dd8b5 100644
--- a/crates/hir/src/display.rs
+++ b/crates/hir/src/display.rs
@@ -69,13 +69,13 @@
 
         write_visibility(module_id, self.visibility(db), f)?;
 
-        if data.has_default_kw() {
+        if data.is_default() {
             f.write_str("default ")?;
         }
-        if data.has_const_kw() {
+        if data.is_const() {
             f.write_str("const ")?;
         }
-        if data.has_async_kw() {
+        if data.is_async() {
             f.write_str("async ")?;
         }
         if self.is_unsafe_to_call(db) {
@@ -125,7 +125,7 @@
         // `FunctionData::ret_type` will be `::core::future::Future<Output = ...>` for async fns.
         // Use ugly pattern match to strip the Future trait.
         // Better way?
-        let ret_type = if !data.has_async_kw() {
+        let ret_type = if !data.is_async() {
             &data.ret_type
         } else {
             match &*data.ret_type {
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 266ef2a..1a3becd 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -1,4 +1,4 @@
-//! HIR (previously known as descriptors) provides a high-level object oriented
+//! HIR (previously known as descriptors) provides a high-level object-oriented
 //! access to Rust code.
 //!
 //! The principal difference between HIR and syntax trees is that HIR is bound
@@ -1828,6 +1828,9 @@
                     is_bang: true,
                 }
                 .into(),
+                BodyDiagnostic::AwaitOutsideOfAsync { node, location } => {
+                    AwaitOutsideOfAsync { node: *node, location: location.clone() }.into()
+                }
                 BodyDiagnostic::UnreachableLabel { node, name } => {
                     UnreachableLabel { node: *node, name: name.clone() }.into()
                 }
@@ -2186,11 +2189,11 @@
     }
 
     pub fn is_const(self, db: &dyn HirDatabase) -> bool {
-        db.function_data(self.id).has_const_kw()
+        db.function_data(self.id).is_const()
     }
 
     pub fn is_async(self, db: &dyn HirDatabase) -> bool {
-        db.function_data(self.id).has_async_kw()
+        db.function_data(self.id).is_async()
     }
 
     /// Does this function have `#[test]` attribute?
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 29f9897..a377163 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -28,7 +28,7 @@
 use itertools::Itertools;
 use rustc_hash::{FxHashMap, FxHashSet};
 use smallvec::{smallvec, SmallVec};
-use span::{EditionedFileId, FileId, Span, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
+use span::{EditionedFileId, FileId};
 use stdx::TupleExt;
 use syntax::{
     algo::skip_trivia_token,
@@ -757,81 +757,9 @@
         res
     }
 
-    // return:
-    // SourceAnalyzer(file_id that original call include!)
-    // macro file id
-    // token in include! macro mapped from token in params
-    // span for the mapped token
-    fn is_from_include_file(
-        &self,
-        token: SyntaxToken,
-    ) -> Option<(SourceAnalyzer, HirFileId, SyntaxToken, Span)> {
-        let parent = token.parent()?;
-        let file_id = self.find_file(&parent).file_id.file_id()?;
-
-        // iterate related crates and find all include! invocations that include_file_id matches
-        for (invoc, _) in self
-            .db
-            .relevant_crates(file_id.file_id())
-            .iter()
-            .flat_map(|krate| self.db.include_macro_invoc(*krate))
-            .filter(|&(_, include_file_id)| include_file_id == file_id)
-        {
-            let macro_file = invoc.as_macro_file();
-            let expansion_info = {
-                self.with_ctx(|ctx| {
-                    ctx.cache
-                        .expansion_info_cache
-                        .entry(macro_file)
-                        .or_insert_with(|| {
-                            let exp_info = macro_file.expansion_info(self.db.upcast());
-
-                            let InMacroFile { file_id, value } = exp_info.expanded();
-                            if let InFile { file_id, value: Some(value) } = exp_info.arg() {
-                                self.cache(value.ancestors().last().unwrap(), file_id);
-                            }
-                            self.cache(value, file_id.into());
-
-                            exp_info
-                        })
-                        .clone()
-                })
-            };
-
-            // FIXME: uncached parse
-            // Create the source analyzer for the macro call scope
-            let Some(sa) = expansion_info
-                .arg()
-                .value
-                .and_then(|it| self.analyze_no_infer(&it.ancestors().last().unwrap()))
-            else {
-                continue;
-            };
-
-            // get mapped token in the include! macro file
-            let span = span::Span {
-                range: token.text_range(),
-                anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
-                ctx: SyntaxContextId::ROOT,
-            };
-            let Some(InMacroFile { file_id, value: mut mapped_tokens }) =
-                expansion_info.map_range_down_exact(span)
-            else {
-                continue;
-            };
-
-            // if we find one, then return
-            if let Some(t) = mapped_tokens.next() {
-                return Some((sa, file_id.into(), t, span));
-            }
-        }
-
-        None
-    }
-
     fn descend_into_macros_impl(
         &self,
-        mut token: SyntaxToken,
+        token: SyntaxToken,
         f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
     ) {
         let _p = tracing::info_span!("descend_into_macros_impl").entered();
@@ -848,17 +776,7 @@
                         return;
                     }
                 },
-                None => {
-                    // if we cannot find a source analyzer for this token, then we try to find out
-                    // whether this file is an included file and treat that as the include input
-                    let Some((it, macro_file_id, mapped_token, s)) =
-                        self.is_from_include_file(token)
-                    else {
-                        return;
-                    };
-                    token = mapped_token;
-                    (it, s, macro_file_id)
-                }
+                None => return,
             };
 
         let mut m_cache = self.macro_call_cache.borrow_mut();
diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs
index 1376ddd..09df639 100644
--- a/crates/hir/src/semantics/source_to_def.rs
+++ b/crates/hir/src/semantics/source_to_def.rs
@@ -94,8 +94,9 @@
     },
     hir::{BindingId, LabelId},
     AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId,
-    FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId,
-    StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId,
+    FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, Lookup, MacroId,
+    ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId,
+    VariantId,
 };
 use hir_expand::{
     attrs::AttrId, name::AsName, ExpansionInfo, HirFileId, HirFileIdExt, MacroCallId,
@@ -131,11 +132,30 @@
             for &crate_id in self.db.relevant_crates(file).iter() {
                 // Note: `mod` declarations in block modules cannot be supported here
                 let crate_def_map = self.db.crate_def_map(crate_id);
-                mods.extend(
+                let n_mods = mods.len();
+                let modules = |file| {
                     crate_def_map
                         .modules_for_file(file)
-                        .map(|local_id| crate_def_map.module_id(local_id)),
-                )
+                        .map(|local_id| crate_def_map.module_id(local_id))
+                };
+                mods.extend(modules(file));
+                if mods.len() == n_mods {
+                    mods.extend(
+                        self.db
+                            .include_macro_invoc(crate_id)
+                            .iter()
+                            .filter(|&&(_, file_id)| file_id == file)
+                            .flat_map(|(call, _)| {
+                                modules(
+                                    call.lookup(self.db.upcast())
+                                        .kind
+                                        .file_id()
+                                        .original_file(self.db.upcast())
+                                        .file_id(),
+                                )
+                            }),
+                    );
+                }
             }
             if mods.is_empty() {
                 // FIXME: detached file
diff --git a/crates/ide-assists/Cargo.toml b/crates/ide-assists/Cargo.toml
index b1e7609..df52562 100644
--- a/crates/ide-assists/Cargo.toml
+++ b/crates/ide-assists/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "ide-assists"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "Code assists for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
diff --git a/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
index ebfbb83..4cd15f1 100644
--- a/crates/ide-assists/src/handlers/add_missing_impl_members.rs
+++ b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -2280,4 +2280,29 @@
             "#,
         )
     }
+
+    #[test]
+    fn impl_with_type_param_with_former_param_as_default() {
+        check_assist(
+            add_missing_impl_members,
+            r#"
+pub trait Test<'a, T, U = T> {
+    fn test(item: &'a T) -> U;
+}
+impl<'a> Test<'a, i32> for bool {
+    $0
+}
+"#,
+            r#"
+pub trait Test<'a, T, U = T> {
+    fn test(item: &'a T) -> U;
+}
+impl<'a> Test<'a, i32> for bool {
+    fn test(item: &'a i32) -> i32 {
+        ${0:todo!()}
+    }
+}
+"#,
+        );
+    }
 }
diff --git a/crates/ide-assists/src/handlers/bind_unused_param.rs b/crates/ide-assists/src/handlers/bind_unused_param.rs
index 839ffa2..8f053f4 100644
--- a/crates/ide-assists/src/handlers/bind_unused_param.rs
+++ b/crates/ide-assists/src/handlers/bind_unused_param.rs
@@ -43,7 +43,7 @@
 
     acc.add(
         AssistId("bind_unused_param", AssistKind::QuickFix),
-        &format!("Bind as `let _ = {ident_pat};`"),
+        format!("Bind as `let _ = {ident_pat};`"),
         param.syntax().text_range(),
         |builder| {
             let line_index = ctx.db().line_index(ctx.file_id().into());
diff --git a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
index 44f31dc..c72bd41 100644
--- a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
+++ b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
@@ -2,7 +2,7 @@
 use ide_db::defs::{Definition, NameRefClass};
 use syntax::{
     ast::{self, AstNode, HasGenericParams, HasVisibility},
-    match_ast, SyntaxNode,
+    match_ast, SyntaxKind, SyntaxNode,
 };
 
 use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists};
@@ -100,7 +100,9 @@
                 ast::make::tokens::single_newline().text(),
             );
             edit.insert(tuple_fields_text_range.start(), w.syntax().text());
-            edit.insert(tuple_fields_text_range.start(), ",");
+            if !w.syntax().last_token().is_some_and(|t| t.kind() == SyntaxKind::COMMA) {
+                edit.insert(tuple_fields_text_range.start(), ",");
+            }
             edit.insert(
                 tuple_fields_text_range.start(),
                 ast::make::tokens::single_newline().text(),
@@ -882,4 +884,27 @@
 "#,
         );
     }
+
+    #[test]
+    fn where_clause_with_trailing_comma() {
+        check_assist(
+            convert_tuple_struct_to_named_struct,
+            r#"
+trait Foo {}
+
+struct Bar$0<T>(pub T)
+where
+    T: Foo,;
+"#,
+            r#"
+trait Foo {}
+
+struct Bar<T>
+where
+    T: Foo,
+{ pub field1: T }
+
+"#,
+        );
+    }
 }
diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs
index e42be63..6469957 100644
--- a/crates/ide-assists/src/tests.rs
+++ b/crates/ide-assists/src/tests.rs
@@ -3,7 +3,7 @@
 use expect_test::expect;
 use hir::{FileRange, Semantics};
 use ide_db::{
-    base_db::SourceDatabaseExt,
+    base_db::{SourceDatabase, SourceRootDatabase},
     imports::insert_use::{ImportGranularity, InsertUseConfig},
     source_change::FileSystemEdit,
     EditionedFileId, RootDatabase, SnippetCap,
diff --git a/crates/ide-completion/Cargo.toml b/crates/ide-completion/Cargo.toml
index 6a4c70d4..035b2fc 100644
--- a/crates/ide-completion/Cargo.toml
+++ b/crates/ide-completion/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "ide-completion"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "Utilities for generating completions of user input for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs
index 414b096..58e9b72 100644
--- a/crates/ide-completion/src/completions.rs
+++ b/crates/ide-completion/src/completions.rs
@@ -24,7 +24,7 @@
 
 use std::iter;
 
-use hir::{sym, HasAttrs, ImportPathConfig, Name, ScopeDef, Variant};
+use hir::{sym, HasAttrs, Name, ScopeDef, Variant};
 use ide_db::{imports::import_assets::LocatedImport, RootDatabase, SymbolKind};
 use syntax::{ast, SmolStr, ToSmolStr};
 
@@ -645,11 +645,7 @@
         if let Some(path) = ctx.module.find_path(
             ctx.db,
             hir::ModuleDef::from(variant),
-            ImportPathConfig {
-                prefer_no_std: ctx.config.prefer_no_std,
-                prefer_prelude: ctx.config.prefer_prelude,
-                prefer_absolute: ctx.config.prefer_absolute,
-            },
+            ctx.config.import_path_config(),
         ) {
             // Variants with trivial paths are already added by the existing completion logic,
             // so we should avoid adding these twice
diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs
index 71ff6b5..ff2c8da 100644
--- a/crates/ide-completion/src/completions/expr.rs
+++ b/crates/ide-completion/src/completions/expr.rs
@@ -1,6 +1,6 @@
 //! Completion of names from the current scope in expression position.
 
-use hir::{sym, ImportPathConfig, Name, ScopeDef};
+use hir::{sym, Name, ScopeDef};
 use syntax::ast;
 
 use crate::{
@@ -174,11 +174,7 @@
                             .find_path(
                                 ctx.db,
                                 hir::ModuleDef::from(strukt),
-                                ImportPathConfig {
-                                    prefer_no_std: ctx.config.prefer_no_std,
-                                    prefer_prelude: ctx.config.prefer_prelude,
-                                    prefer_absolute: ctx.config.prefer_absolute,
-                                },
+                                ctx.config.import_path_config(),
                             )
                             .filter(|it| it.len() > 1);
 
@@ -200,11 +196,7 @@
                             .find_path(
                                 ctx.db,
                                 hir::ModuleDef::from(un),
-                                ImportPathConfig {
-                                    prefer_no_std: ctx.config.prefer_no_std,
-                                    prefer_prelude: ctx.config.prefer_prelude,
-                                    prefer_absolute: ctx.config.prefer_absolute,
-                                },
+                                ctx.config.import_path_config(),
                             )
                             .filter(|it| it.len() > 1);
 
diff --git a/crates/ide-completion/src/completions/flyimport.rs b/crates/ide-completion/src/completions/flyimport.rs
index e803072..fdce7c5 100644
--- a/crates/ide-completion/src/completions/flyimport.rs
+++ b/crates/ide-completion/src/completions/flyimport.rs
@@ -1,5 +1,5 @@
 //! See [`import_on_the_fly`].
-use hir::{ImportPathConfig, ItemInNs, ModuleDef};
+use hir::{ItemInNs, ModuleDef};
 use ide_db::imports::{
     import_assets::{ImportAssets, LocatedImport},
     insert_use::ImportScope,
@@ -256,11 +256,7 @@
     };
     let user_input_lowercased = potential_import_name.to_lowercase();
 
-    let import_cfg = ImportPathConfig {
-        prefer_no_std: ctx.config.prefer_no_std,
-        prefer_prelude: ctx.config.prefer_prelude,
-        prefer_absolute: ctx.config.prefer_absolute,
-    };
+    let import_cfg = ctx.config.import_path_config();
 
     import_assets
         .search_for_imports(&ctx.sema, import_cfg, ctx.config.insert_use.prefix_kind)
@@ -306,12 +302,7 @@
         ItemInNs::Values(def) => matches!(def, hir::ModuleDef::Const(_)),
     };
     let user_input_lowercased = potential_import_name.to_lowercase();
-
-    let cfg = ImportPathConfig {
-        prefer_no_std: ctx.config.prefer_no_std,
-        prefer_prelude: ctx.config.prefer_prelude,
-        prefer_absolute: ctx.config.prefer_absolute,
-    };
+    let cfg = ctx.config.import_path_config();
 
     import_assets
         .search_for_imports(&ctx.sema, cfg, ctx.config.insert_use.prefix_kind)
@@ -353,11 +344,7 @@
 
     let user_input_lowercased = potential_import_name.to_lowercase();
 
-    let cfg = ImportPathConfig {
-        prefer_no_std: ctx.config.prefer_no_std,
-        prefer_prelude: ctx.config.prefer_prelude,
-        prefer_absolute: ctx.config.prefer_absolute,
-    };
+    let cfg = ctx.config.import_path_config();
 
     import_assets
         .search_for_imports(&ctx.sema, cfg, ctx.config.insert_use.prefix_kind)
diff --git a/crates/ide-completion/src/completions/mod_.rs b/crates/ide-completion/src/completions/mod_.rs
index 713968c..d9a1089 100644
--- a/crates/ide-completion/src/completions/mod_.rs
+++ b/crates/ide-completion/src/completions/mod_.rs
@@ -4,7 +4,7 @@
 
 use hir::{HirFileIdExt, Module};
 use ide_db::{
-    base_db::{SourceDatabaseExt, VfsPath},
+    base_db::{SourceRootDatabase, VfsPath},
     FxHashSet, RootDatabase, SymbolKind,
 };
 use stdx::IsNoneOr;
diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs
index d919609..977e0d8 100644
--- a/crates/ide-completion/src/completions/postfix.rs
+++ b/crates/ide-completion/src/completions/postfix.rs
@@ -2,7 +2,7 @@
 
 mod format_like;
 
-use hir::{ImportPathConfig, ItemInNs};
+use hir::ItemInNs;
 use ide_db::{
     documentation::{Documentation, HasDocs},
     imports::insert_use::ImportScope,
@@ -60,11 +60,7 @@
         None => return,
     };
 
-    let cfg = ImportPathConfig {
-        prefer_no_std: ctx.config.prefer_no_std,
-        prefer_prelude: ctx.config.prefer_prelude,
-        prefer_absolute: ctx.config.prefer_absolute,
-    };
+    let cfg = ctx.config.import_path_config();
 
     if let Some(drop_trait) = ctx.famous_defs().core_ops_Drop() {
         if receiver_ty.impls_trait(ctx.db, drop_trait, &[]) {
diff --git a/crates/ide-completion/src/config.rs b/crates/ide-completion/src/config.rs
index 7d062cb..d885b82 100644
--- a/crates/ide-completion/src/config.rs
+++ b/crates/ide-completion/src/config.rs
@@ -4,6 +4,7 @@
 //! module, and we use to statically check that we only produce snippet
 //! completions if we are allowed to.
 
+use hir::ImportPathConfig;
 use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap};
 
 use crate::snippet::Snippet;
@@ -45,4 +46,12 @@
             .iter()
             .flat_map(|snip| snip.prefix_triggers.iter().map(move |trigger| (&**trigger, snip)))
     }
+
+    pub fn import_path_config(&self) -> ImportPathConfig {
+        ImportPathConfig {
+            prefer_no_std: self.prefer_no_std,
+            prefer_prelude: self.prefer_prelude,
+            prefer_absolute: self.prefer_absolute,
+        }
+    }
 }
diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs
index 424f944..90c1728 100644
--- a/crates/ide-completion/src/lib.rs
+++ b/crates/ide-completion/src/lib.rs
@@ -10,7 +10,6 @@
 #[cfg(test)]
 mod tests;
 
-use hir::ImportPathConfig;
 use ide_db::{
     helpers::mod_path_to_ast,
     imports::{
@@ -249,11 +248,7 @@
     let new_ast = scope.clone_for_update();
     let mut import_insert = TextEdit::builder();
 
-    let cfg = ImportPathConfig {
-        prefer_no_std: config.prefer_no_std,
-        prefer_prelude: config.prefer_prelude,
-        prefer_absolute: config.prefer_absolute,
-    };
+    let cfg = config.import_path_config();
 
     imports.into_iter().for_each(|(full_import_path, imported_name)| {
         let items_with_name = items_locator::items_with_name(
diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs
index abcff62..02d667c 100644
--- a/crates/ide-completion/src/render.rs
+++ b/crates/ide-completion/src/render.rs
@@ -10,7 +10,7 @@
 pub(crate) mod union_literal;
 pub(crate) mod variant;
 
-use hir::{sym, AsAssocItem, HasAttrs, HirDisplay, ImportPathConfig, ModuleDef, ScopeDef, Type};
+use hir::{sym, AsAssocItem, HasAttrs, HirDisplay, ModuleDef, ScopeDef, Type};
 use ide_db::{
     documentation::{Documentation, HasDocs},
     helpers::item_name,
@@ -294,11 +294,7 @@
             .unwrap_or_else(|| String::from("..."))
     };
 
-    let cfg = ImportPathConfig {
-        prefer_no_std: ctx.config.prefer_no_std,
-        prefer_prelude: ctx.config.prefer_prelude,
-        prefer_absolute: ctx.config.prefer_absolute,
-    };
+    let cfg = ctx.config.import_path_config();
 
     let label = expr.gen_source_code(&ctx.scope, &mut label_formatter, cfg).ok()?;
 
diff --git a/crates/ide-completion/src/snippet.rs b/crates/ide-completion/src/snippet.rs
index 1eb8c57..5265aa8 100644
--- a/crates/ide-completion/src/snippet.rs
+++ b/crates/ide-completion/src/snippet.rs
@@ -100,7 +100,6 @@
 // }
 // ----
 
-use hir::ImportPathConfig;
 use ide_db::imports::import_assets::LocatedImport;
 use itertools::Itertools;
 use syntax::{ast, AstNode, GreenNode, SyntaxNode};
@@ -169,11 +168,7 @@
 }
 
 fn import_edits(ctx: &CompletionContext<'_>, requires: &[GreenNode]) -> Option<Vec<LocatedImport>> {
-    let import_cfg = ImportPathConfig {
-        prefer_no_std: ctx.config.prefer_no_std,
-        prefer_prelude: ctx.config.prefer_prelude,
-        prefer_absolute: ctx.config.prefer_absolute,
-    };
+    let import_cfg = ctx.config.import_path_config();
 
     let resolve = |import: &GreenNode| {
         let path = ast::Path::cast(SyntaxNode::new_root(import.clone()))?;
diff --git a/crates/ide-completion/src/tests.rs b/crates/ide-completion/src/tests.rs
index f6274cf..415f2af 100644
--- a/crates/ide-completion/src/tests.rs
+++ b/crates/ide-completion/src/tests.rs
@@ -23,10 +23,10 @@
 mod use_tree;
 mod visibility;
 
+use base_db::SourceDatabase;
 use expect_test::Expect;
 use hir::PrefixKind;
 use ide_db::{
-    base_db::FileLoader,
     imports::insert_use::{ImportGranularity, InsertUseConfig},
     FilePosition, RootDatabase, SnippetCap,
 };
diff --git a/crates/ide-db/Cargo.toml b/crates/ide-db/Cargo.toml
index 9a6826a..8f3cae2 100644
--- a/crates/ide-db/Cargo.toml
+++ b/crates/ide-db/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "ide-db"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "Core data structure representing IDE state for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
@@ -13,7 +14,7 @@
 
 [dependencies]
 cov-mark = "2.0.0-pre.1"
-crossbeam-channel = "0.5.5"
+crossbeam-channel.workspace = true
 tracing.workspace = true
 rayon.workspace = true
 fst = { version = "0.4.7", default-features = false }
diff --git a/crates/ide-db/src/helpers.rs b/crates/ide-db/src/helpers.rs
index f6a7819..e6638dd 100644
--- a/crates/ide-db/src/helpers.rs
+++ b/crates/ide-db/src/helpers.rs
@@ -2,7 +2,7 @@
 
 use std::collections::VecDeque;
 
-use base_db::SourceDatabaseExt;
+use base_db::SourceRootDatabase;
 use hir::{Crate, DescendPreference, ItemInNs, ModuleDef, Name, Semantics};
 use span::FileId;
 use syntax::{
diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs
index 4c52ba3..3cf2998 100644
--- a/crates/ide-db/src/lib.rs
+++ b/crates/ide-db/src/lib.rs
@@ -1,4 +1,4 @@
-//! This crate defines the core datastructure representing IDE state -- `RootDatabase`.
+//! This crate defines the core data structure representing IDE state -- `RootDatabase`.
 //!
 //! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search.
 
@@ -74,7 +74,7 @@
 pub type FileRange = FileRangeWrapper<FileId>;
 
 #[salsa::database(
-    base_db::SourceDatabaseExtStorage,
+    base_db::SourceRootDatabaseStorage,
     base_db::SourceDatabaseStorage,
     hir::db::ExpandDatabaseStorage,
     hir::db::DefDatabaseStorage,
@@ -125,9 +125,6 @@
 }
 
 impl FileLoader for RootDatabase {
-    fn file_text(&self, file_id: FileId) -> Arc<str> {
-        FileLoaderDelegate(self).file_text(file_id)
-    }
     fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
         FileLoaderDelegate(self).resolve_path(path)
     }
diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs
index 84a388a..0afa916 100644
--- a/crates/ide-db/src/path_transform.rs
+++ b/crates/ide-db/src/path_transform.rs
@@ -7,7 +7,7 @@
 use rustc_hash::FxHashMap;
 use syntax::{
     ast::{self, make, AstNode, HasGenericArgs},
-    ted, SyntaxNode,
+    ted, NodeOrToken, SyntaxNode,
 };
 
 #[derive(Default)]
@@ -328,10 +328,26 @@
                         let qualified = make::path_from_segments(std::iter::once(segment), false);
                         ted::replace(path.syntax(), qualified.clone_for_update().syntax());
                     } else if let Some(path_ty) = ast::PathType::cast(parent) {
-                        ted::replace(
-                            path_ty.syntax(),
-                            subst.clone_subtree().clone_for_update().syntax(),
-                        );
+                        let old = path_ty.syntax();
+
+                        if old.parent().is_some() {
+                            ted::replace(old, subst.clone_subtree().clone_for_update().syntax());
+                        } else {
+                            // Some `path_ty` has no parent, especially ones made for default value
+                            // of type parameters.
+                            // In this case, `ted` cannot replace `path_ty` with `subst` directly.
+                            // So, just replace its children as long as the `subst` is the same type.
+                            let new = subst.clone_subtree().clone_for_update();
+                            if !matches!(new, ast::Type::PathType(..)) {
+                                return None;
+                            }
+                            let start = path_ty.syntax().first_child().map(NodeOrToken::Node)?;
+                            let end = path_ty.syntax().last_child().map(NodeOrToken::Node)?;
+                            ted::replace_all(
+                                start..=end,
+                                new.syntax().children().map(NodeOrToken::Node).collect::<Vec<_>>(),
+                            );
+                        }
                     } else {
                         ted::replace(
                             path.syntax(),
diff --git a/crates/ide-db/src/prime_caches.rs b/crates/ide-db/src/prime_caches.rs
index 62104fb..bb121f4 100644
--- a/crates/ide-db/src/prime_caches.rs
+++ b/crates/ide-db/src/prime_caches.rs
@@ -11,7 +11,7 @@
 use crate::{
     base_db::{
         salsa::{Database, ParallelDatabase, Snapshot},
-        Cancelled, CrateId, SourceDatabase, SourceDatabaseExt,
+        Cancelled, CrateId, SourceDatabase, SourceRootDatabase,
     },
     FxIndexMap, RootDatabase,
 };
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index 05b32e2..9e01a6d 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -6,7 +6,7 @@
 
 use std::mem;
 
-use base_db::{salsa::Database, SourceDatabase, SourceDatabaseExt};
+use base_db::{salsa::Database, SourceDatabase, SourceRootDatabase};
 use hir::{
     sym, AsAssocItem, DefWithBody, DescendPreference, FileRange, HasAttrs, HasSource, HirFileIdExt,
     InFile, InRealFile, ModuleSource, PathResolution, Semantics, Visibility,
@@ -663,9 +663,16 @@
         name_ref: &ast::NameRef,
         sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool,
     ) -> bool {
+        // See https://github.com/rust-lang/rust-analyzer/pull/15864/files/e0276dc5ddc38c65240edb408522bb869f15afb4#r1389848845
+        let ty_eq = |ty: hir::Type| match (ty.as_adt(), self_ty.as_adt()) {
+            (Some(ty), Some(self_ty)) => ty == self_ty,
+            (None, None) => ty == *self_ty,
+            _ => false,
+        };
+
         match NameRefClass::classify(self.sema, name_ref) {
             Some(NameRefClass::Definition(Definition::SelfType(impl_)))
-                if impl_.self_ty(self.sema.db).as_adt() == self_ty.as_adt() =>
+                if ty_eq(impl_.self_ty(self.sema.db)) =>
             {
                 let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
                 let reference = FileReference {
diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs
index c70aed4..209b147 100644
--- a/crates/ide-db/src/symbol_index.rs
+++ b/crates/ide-db/src/symbol_index.rs
@@ -29,7 +29,7 @@
 
 use base_db::{
     salsa::{self, ParallelDatabase},
-    SourceDatabaseExt, SourceRootId, Upcast,
+    SourceRootDatabase, SourceRootId, Upcast,
 };
 use fst::{raw::IndexedValue, Automaton, Streamer};
 use hir::{
@@ -100,7 +100,7 @@
 }
 
 #[salsa::query_group(SymbolsDatabaseStorage)]
-pub trait SymbolsDatabase: HirDatabase + SourceDatabaseExt + Upcast<dyn HirDatabase> {
+pub trait SymbolsDatabase: HirDatabase + SourceRootDatabase + Upcast<dyn HirDatabase> {
     /// The symbol index for a given module. These modules should only be in source roots that
     /// are inside local_roots.
     fn module_symbols(&self, module: Module) -> Arc<SymbolIndex>;
diff --git a/crates/ide-diagnostics/Cargo.toml b/crates/ide-diagnostics/Cargo.toml
index edd0500..9c3a279 100644
--- a/crates/ide-diagnostics/Cargo.toml
+++ b/crates/ide-diagnostics/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "ide-diagnostics"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "Diagnostics rendering and fixits for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
diff --git a/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs b/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs
new file mode 100644
index 0000000..92b6e74
--- /dev/null
+++ b/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs
@@ -0,0 +1,101 @@
+use crate::{adjusted_display_range, Diagnostic, DiagnosticsContext};
+
+// Diagnostic: await-outside-of-async
+//
+// This diagnostic is triggered if the `await` keyword is used outside of an async function or block
+pub(crate) fn await_outside_of_async(
+    ctx: &DiagnosticsContext<'_>,
+    d: &hir::AwaitOutsideOfAsync,
+) -> Diagnostic {
+    let display_range =
+        adjusted_display_range(ctx, d.node, &|node| Some(node.await_token()?.text_range()));
+    Diagnostic::new(
+        crate::DiagnosticCode::RustcHardError("E0728"),
+        format!("`await` is used inside {}, which is not an `async` context", d.location),
+        display_range,
+    )
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::tests::check_diagnostics;
+
+    #[test]
+    fn await_inside_non_async_fn() {
+        check_diagnostics(
+            r#"
+async fn foo() {}
+
+fn bar() {
+    foo().await;
+        //^^^^^ error: `await` is used inside non-async function, which is not an `async` context
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn await_inside_async_fn() {
+        check_diagnostics(
+            r#"
+async fn foo() {}
+
+async fn bar() {
+    foo().await;
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn await_inside_closure() {
+        check_diagnostics(
+            r#"
+async fn foo() {}
+
+async fn bar() {
+    let _a = || { foo().await };
+                      //^^^^^ error: `await` is used inside non-async closure, which is not an `async` context
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn await_inside_async_block() {
+        check_diagnostics(
+            r#"
+async fn foo() {}
+
+fn bar() {
+    let _a = async { foo().await };
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn await_in_complex_context() {
+        check_diagnostics(
+            r#"
+async fn foo() {}
+
+fn bar() {
+    async fn baz() {
+        let a = foo().await;
+    }
+
+    let x = || {
+        let y = async {
+            baz().await;
+            let z = || {
+                baz().await;
+                    //^^^^^ error: `await` is used inside non-async closure, which is not an `async` context
+            };
+        };
+    };
+}
+"#,
+        );
+    }
+}
diff --git a/crates/ide-diagnostics/src/handlers/inactive_code.rs b/crates/ide-diagnostics/src/handlers/inactive_code.rs
index acff811..1f8f805 100644
--- a/crates/ide-diagnostics/src/handlers/inactive_code.rs
+++ b/crates/ide-diagnostics/src/handlers/inactive_code.rs
@@ -42,7 +42,10 @@
     use crate::{tests::check_diagnostics_with_config, DiagnosticsConfig};
 
     pub(crate) fn check(ra_fixture: &str) {
-        let config = DiagnosticsConfig::test_sample();
+        let config = DiagnosticsConfig {
+            disabled: std::iter::once("unlinked-file".to_owned()).collect(),
+            ..DiagnosticsConfig::test_sample()
+        };
         check_diagnostics_with_config(config, ra_fixture)
     }
 
@@ -171,4 +174,25 @@
 "#,
         );
     }
+
+    #[test]
+    fn modules() {
+        check(
+            r#"
+//- /main.rs
+  #[cfg(outline)] mod outline;
+//^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: outline is disabled
+
+  mod outline_inner;
+//^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: outline_inner is disabled
+
+  #[cfg(inline)] mod inline {}
+//^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: inline is disabled
+
+//- /outline_inner.rs
+#![cfg(outline_inner)]
+//- /outline.rs
+"#,
+        );
+    }
 }
diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs
index e59b63f..6a97669 100644
--- a/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -273,11 +273,7 @@
 
     #[test]
     fn include_does_not_break_diagnostics() {
-        let mut config = DiagnosticsConfig::test_sample();
-        config.disabled.insert("inactive-code".to_owned());
-        config.disabled.insert("unlinked-file".to_owned());
-        check_diagnostics_with_config(
-            config,
+        check_diagnostics(
             r#"
 //- minicore: include
 //- /lib.rs crate:lib
diff --git a/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
index 56ec45c..7126617 100644
--- a/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
+++ b/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
@@ -472,4 +472,18 @@
 "#,
         )
     }
+
+    #[test]
+    fn no_type_mismatches_when_arg_count_mismatch() {
+        check_diagnostics(
+            r#"
+fn foo((): (), (): ()) {
+    foo(1, 2, 3);
+           // ^^ error: expected 2 arguments, found 3
+    foo(1);
+      // ^ error: expected 2 arguments, found 1
+}
+"#,
+        );
+    }
 }
diff --git a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
index 9729627..f39738f 100644
--- a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
@@ -1032,6 +1032,44 @@
         check_diagnostics_no_bails(&code);
     }
 
+    #[test]
+    fn min_exhaustive() {
+        check_diagnostics(
+            r#"
+//- minicore: result
+fn test(x: Result<i32, !>) {
+    match x {
+        Ok(_y) => {}
+    }
+}
+"#,
+        );
+        check_diagnostics(
+            r#"
+//- minicore: result
+fn test(ptr: *const Result<i32, !>) {
+    unsafe {
+        match *ptr {
+            //^^^^ error: missing match arm: `Err(!)` not covered
+            Ok(_x) => {}
+        }
+    }
+}
+"#,
+        );
+        check_diagnostics(
+            r#"
+//- minicore: result
+fn test(x: Result<i32, &'static !>) {
+    match x {
+        //^ error: missing match arm: `Err(_)` not covered
+        Ok(_y) => {}
+    }
+}
+"#,
+        );
+    }
+
     mod rust_unstable {
         use super::*;
 
diff --git a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
index 30dd26a..af8ac60 100644
--- a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -487,4 +487,28 @@
             "#,
         )
     }
+
+    #[test]
+    fn rustc_deprecated_safe_2024() {
+        check_diagnostics(
+            r#"
+//- /ed2021.rs crate:ed2021 edition:2021
+#[rustc_deprecated_safe_2024]
+unsafe fn safe() -> u8 {
+    0
+}
+//- /ed2024.rs crate:ed2024 edition:2024
+#[rustc_deprecated_safe_2024]
+unsafe fn not_safe() -> u8 {
+    0
+}
+//- /main.rs crate:main deps:ed2021,ed2024
+fn main() {
+    ed2021::safe();
+    ed2024::not_safe();
+  //^^^^^^^^^^^^^^^^^^💡 error: this operation is unsafe and requires an unsafe function or block
+}
+            "#,
+        )
+    }
 }
diff --git a/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs b/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs
index 1a4d287..ff1eeb0 100644
--- a/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs
+++ b/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs
@@ -44,4 +44,65 @@
 "#,
         );
     }
+
+    #[test]
+    fn option_nonexhaustive_inside_blocks() {
+        check_diagnostics(
+            r#"
+//- minicore: option
+fn main() {
+    '_a: {
+        let None = Some(5);
+          //^^^^ error: non-exhaustive pattern: `Some(_)` not covered
+    }
+}
+"#,
+        );
+
+        check_diagnostics(
+            r#"
+//- minicore: future, option
+fn main() {
+    let _ = async {
+        let None = Some(5);
+          //^^^^ error: non-exhaustive pattern: `Some(_)` not covered
+    };
+}
+"#,
+        );
+
+        check_diagnostics(
+            r#"
+//- minicore: option
+fn main() {
+    unsafe {
+        let None = Some(5);
+          //^^^^ error: non-exhaustive pattern: `Some(_)` not covered
+    }
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn min_exhaustive() {
+        check_diagnostics(
+            r#"
+//- minicore: result
+fn test(x: Result<i32, !>) {
+    let Ok(_y) = x;
+}
+"#,
+        );
+
+        check_diagnostics(
+            r#"
+//- minicore: result
+fn test(x: Result<i32, &'static !>) {
+    let Ok(_y) = x;
+      //^^^^^^ error: non-exhaustive pattern: `Err(_)` not covered
+}
+"#,
+        );
+    }
 }
diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index 4e52d28..6f5c68d 100644
--- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -2,8 +2,12 @@
 use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, HirFileIdExt, InFile, Type};
 use ide_db::{famous_defs::FamousDefs, source_change::SourceChange};
 use syntax::{
-    ast::{self, BlockExpr, ExprStmt},
-    AstNode, AstPtr,
+    ast::{
+        self,
+        edit::{AstNodeEdit, IndentLevel},
+        BlockExpr, Expr, ExprStmt,
+    },
+    AstNode, AstPtr, TextSize,
 };
 use text_edit::TextEdit;
 
@@ -119,6 +123,38 @@
         return None;
     }
 
+    if d.actual.is_unit() {
+        if let Expr::BlockExpr(block) = &expr {
+            if block.tail_expr().is_none() {
+                let mut builder = TextEdit::builder();
+                let block_indent = block.indent_level();
+
+                if block.statements().count() == 0 {
+                    // Empty block
+                    let indent = block_indent + 1;
+                    builder.insert(
+                        block.syntax().text_range().start() + TextSize::from(1),
+                        format!("\n{indent}{variant_name}(())\n{block_indent}"),
+                    );
+                } else {
+                    let indent = IndentLevel::from(1);
+                    builder.insert(
+                        block.syntax().text_range().end() - TextSize::from(1),
+                        format!("{indent}{variant_name}(())\n{block_indent}"),
+                    );
+                }
+
+                let source_change = SourceChange::from_text_edit(
+                    expr_ptr.file_id.original_file(ctx.sema.db),
+                    builder.finish(),
+                );
+                let name = format!("Insert {variant_name}(()) as the tail of this block");
+                acc.push(fix("insert_wrapped_unit", &name, source_change, expr_range));
+            }
+            return Some(());
+        }
+    }
+
     let mut builder = TextEdit::builder();
     builder.insert(expr.syntax().text_range().start(), format!("{variant_name}("));
     builder.insert(expr.syntax().text_range().end(), ")".to_owned());
@@ -534,6 +570,36 @@
     }
 
     #[test]
+    fn test_wrapped_unit_as_block_tail_expr() {
+        check_fix(
+            r#"
+//- minicore: result
+fn foo() -> Result<(), ()> {
+    foo();
+}$0
+            "#,
+            r#"
+fn foo() -> Result<(), ()> {
+    foo();
+    Ok(())
+}
+            "#,
+        );
+
+        check_fix(
+            r#"
+//- minicore: result
+fn foo() -> Result<(), ()> {}$0
+            "#,
+            r#"
+fn foo() -> Result<(), ()> {
+    Ok(())
+}
+            "#,
+        );
+    }
+
+    #[test]
     fn test_in_const_and_static() {
         check_fix(
             r#"
diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs
index 1b71a3a..a1573ba 100644
--- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs
+++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs
@@ -4,7 +4,7 @@
 
 use hir::{db::DefDatabase, DefMap, InFile, ModuleSource};
 use ide_db::{
-    base_db::{FileLoader, SourceDatabaseExt},
+    base_db::{FileLoader, SourceDatabase, SourceRootDatabase},
     source_change::SourceChange,
     FileId, FileRange, LineIndexDatabase,
 };
@@ -47,7 +47,7 @@
         //
         // Only show this diagnostic on the first three characters of
         // the file, to avoid overwhelming the user during startup.
-        range = FileLoader::file_text(ctx.sema.db, file_id)
+        range = SourceDatabase::file_text(ctx.sema.db, file_id)
             .char_indices()
             .take(3)
             .last()
@@ -502,4 +502,16 @@
 "#,
         );
     }
+
+    #[test]
+    fn include_macro_works() {
+        check_diagnostics(
+            r#"
+//- minicore: include
+//- /main.rs
+include!("bar/foo/mod.rs");
+//- /bar/foo/mod.rs
+"#,
+        );
+    }
 }
diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs
index 263ab74..a61c5f0 100644
--- a/crates/ide-diagnostics/src/lib.rs
+++ b/crates/ide-diagnostics/src/lib.rs
@@ -24,6 +24,7 @@
 //! don't yet have a great pattern for how to do them properly.
 
 mod handlers {
+    pub(crate) mod await_outside_of_async;
     pub(crate) mod break_outside_of_loop;
     pub(crate) mod expected_function;
     pub(crate) mod inactive_code;
@@ -96,6 +97,7 @@
 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
 pub enum DiagnosticCode {
     RustcHardError(&'static str),
+    SyntaxError,
     RustcLint(&'static str),
     Clippy(&'static str),
     Ra(&'static str, Severity),
@@ -107,6 +109,9 @@
             DiagnosticCode::RustcHardError(e) => {
                 format!("https://doc.rust-lang.org/stable/error_codes/{e}.html")
             }
+            DiagnosticCode::SyntaxError => {
+                String::from("https://doc.rust-lang.org/stable/reference/")
+            }
             DiagnosticCode::RustcLint(e) => {
                 format!("https://doc.rust-lang.org/rustc/?search={e}")
             }
@@ -125,6 +130,7 @@
             | DiagnosticCode::RustcLint(r)
             | DiagnosticCode::Clippy(r)
             | DiagnosticCode::Ra(r, _) => r,
+            DiagnosticCode::SyntaxError => "syntax-error",
         }
     }
 }
@@ -154,7 +160,7 @@
             message,
             range: range.into(),
             severity: match code {
-                DiagnosticCode::RustcHardError(_) => Severity::Error,
+                DiagnosticCode::RustcHardError(_) | DiagnosticCode::SyntaxError => Severity::Error,
                 // FIXME: Rustc lints are not always warning, but the ones that are currently implemented are all warnings.
                 DiagnosticCode::RustcLint(_) => Severity::Warning,
                 // FIXME: We can make this configurable, and if the user uses `cargo clippy` on flycheck, we can
@@ -297,31 +303,54 @@
     }
 }
 
-/// Request diagnostics for the given [`FileId`]. The produced diagnostics may point to other files
+/// Request parser level diagnostics for the given [`FileId`].
+pub fn syntax_diagnostics(
+    db: &RootDatabase,
+    config: &DiagnosticsConfig,
+    file_id: FileId,
+) -> Vec<Diagnostic> {
+    let _p = tracing::info_span!("syntax_diagnostics").entered();
+
+    if config.disabled.contains("syntax-error") {
+        return Vec::new();
+    }
+
+    let sema = Semantics::new(db);
+    let file_id = sema
+        .attach_first_edition(file_id)
+        .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
+
+    // [#3434] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily.
+    db.parse_errors(file_id)
+        .as_deref()
+        .into_iter()
+        .flatten()
+        .take(128)
+        .map(|err| {
+            Diagnostic::new(
+                DiagnosticCode::SyntaxError,
+                format!("Syntax Error: {err}"),
+                FileRange { file_id: file_id.into(), range: err.range() },
+            )
+        })
+        .collect()
+}
+
+/// Request semantic diagnostics for the given [`FileId`]. The produced diagnostics may point to other files
 /// due to macros.
-pub fn diagnostics(
+pub fn semantic_diagnostics(
     db: &RootDatabase,
     config: &DiagnosticsConfig,
     resolve: &AssistResolveStrategy,
     file_id: FileId,
 ) -> Vec<Diagnostic> {
-    let _p = tracing::info_span!("diagnostics").entered();
+    let _p = tracing::info_span!("semantic_diagnostics").entered();
     let sema = Semantics::new(db);
     let file_id = sema
         .attach_first_edition(file_id)
         .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
     let mut res = Vec::new();
 
-    // [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily.
-    res.extend(db.parse_errors(file_id).as_deref().into_iter().flatten().take(128).map(|err| {
-        Diagnostic::new(
-            DiagnosticCode::RustcHardError("syntax-error"),
-            format!("Syntax Error: {err}"),
-            FileRange { file_id: file_id.into(), range: err.range() },
-        )
-    }));
-    let parse_errors = res.len();
-
     let parse = sema.parse(file_id);
 
     // FIXME: This iterates the entire file which is a rather expensive operation.
@@ -341,13 +370,17 @@
     match module {
         // A bunch of parse errors in a file indicate some bigger structural parse changes in the
         // file, so we skip semantic diagnostics so we can show these faster.
-        Some(m) if parse_errors < 16 => m.diagnostics(db, &mut diags, config.style_lints),
-        Some(_) => (),
+        Some(m) => {
+            if !db.parse_errors(file_id).as_deref().is_some_and(|es| es.len() >= 16) {
+                m.diagnostics(db, &mut diags, config.style_lints);
+            }
+        }
         None => handlers::unlinked_file::unlinked_file(&ctx, &mut res, file_id.file_id()),
     }
 
     for diag in diags {
         let d = match diag {
+            AnyDiagnostic::AwaitOutsideOfAsync(d) => handlers::await_outside_of_async::await_outside_of_async(&ctx, &d),
             AnyDiagnostic::ExpectedFunction(d) => handlers::expected_function::expected_function(&ctx, &d),
             AnyDiagnostic::InactiveCode(d) => match handlers::inactive_code::inactive_code(&ctx, &d) {
                 Some(it) => it,
@@ -363,7 +396,7 @@
                 res.extend(d.errors.iter().take(16).map(|err| {
                     {
                         Diagnostic::new(
-                            DiagnosticCode::RustcHardError("syntax-error"),
+                            DiagnosticCode::SyntaxError,
                             format!("Syntax Error in Expansion: {err}"),
                             ctx.resolve_precise_location(&d.node.clone(), d.precise_location),
                         )
@@ -464,6 +497,19 @@
     res
 }
 
+/// Request both syntax and semantic diagnostics for the given [`FileId`].
+pub fn full_diagnostics(
+    db: &RootDatabase,
+    config: &DiagnosticsConfig,
+    resolve: &AssistResolveStrategy,
+    file_id: FileId,
+) -> Vec<Diagnostic> {
+    let mut res = syntax_diagnostics(db, config, file_id);
+    let sema = semantic_diagnostics(db, config, resolve, file_id);
+    res.extend(sema);
+    res
+}
+
 // `__RA_EVERY_LINT` is a fake lint group to allow every lint in proc macros
 
 static RUSTC_LINT_GROUPS_DICT: Lazy<FxHashMap<&str, Vec<&str>>> =
diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs
index e56fca1..ec74640 100644
--- a/crates/ide-diagnostics/src/tests.rs
+++ b/crates/ide-diagnostics/src/tests.rs
@@ -1,7 +1,7 @@
 #![allow(clippy::print_stderr)]
 
 use ide_db::{
-    assists::AssistResolveStrategy, base_db::SourceDatabaseExt, LineIndexDatabase, RootDatabase,
+    assists::AssistResolveStrategy, base_db::SourceDatabase, LineIndexDatabase, RootDatabase,
 };
 use itertools::Itertools;
 use stdx::trim_indent;
@@ -59,10 +59,14 @@
     let after = trim_indent(ra_fixture_after);
 
     let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
-    let diagnostic =
-        super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_position.file_id.into())
-            .pop()
-            .expect("no diagnostics");
+    let diagnostic = super::full_diagnostics(
+        &db,
+        &config,
+        &AssistResolveStrategy::All,
+        file_position.file_id.into(),
+    )
+    .pop()
+    .expect("no diagnostics");
     let fix = &diagnostic
         .fixes
         .unwrap_or_else(|| panic!("{:?} diagnostic misses fixes", diagnostic.code))[nth];
@@ -102,37 +106,39 @@
     let (db, file_position) = RootDatabase::with_position(ra_fixture_before);
     let mut conf = DiagnosticsConfig::test_sample();
     conf.expr_fill_default = ExprFillDefaultMode::Default;
-    let fix =
-        super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id.into())
-            .into_iter()
-            .find(|d| {
-                d.fixes
-                    .as_ref()
-                    .and_then(|fixes| {
-                        fixes.iter().find(|fix| {
-                            if !fix.target.contains_inclusive(file_position.offset) {
-                                return false;
-                            }
-                            let actual = {
-                                let source_change = fix.source_change.as_ref().unwrap();
-                                let file_id =
-                                    *source_change.source_file_edits.keys().next().unwrap();
-                                let mut actual = db.file_text(file_id).to_string();
+    let fix = super::full_diagnostics(
+        &db,
+        &conf,
+        &AssistResolveStrategy::All,
+        file_position.file_id.into(),
+    )
+    .into_iter()
+    .find(|d| {
+        d.fixes
+            .as_ref()
+            .and_then(|fixes| {
+                fixes.iter().find(|fix| {
+                    if !fix.target.contains_inclusive(file_position.offset) {
+                        return false;
+                    }
+                    let actual = {
+                        let source_change = fix.source_change.as_ref().unwrap();
+                        let file_id = *source_change.source_file_edits.keys().next().unwrap();
+                        let mut actual = db.file_text(file_id).to_string();
 
-                                for (edit, snippet_edit) in source_change.source_file_edits.values()
-                                {
-                                    edit.apply(&mut actual);
-                                    if let Some(snippet_edit) = snippet_edit {
-                                        snippet_edit.apply(&mut actual);
-                                    }
-                                }
-                                actual
-                            };
-                            after == actual
-                        })
-                    })
-                    .is_some()
-            });
+                        for (edit, snippet_edit) in source_change.source_file_edits.values() {
+                            edit.apply(&mut actual);
+                            if let Some(snippet_edit) = snippet_edit {
+                                snippet_edit.apply(&mut actual);
+                            }
+                        }
+                        actual
+                    };
+                    after == actual
+                })
+            })
+            .is_some()
+    });
     assert!(fix.is_some(), "no diagnostic with desired fix");
 }
 
@@ -144,38 +150,40 @@
     let mut conf = DiagnosticsConfig::test_sample();
     conf.expr_fill_default = ExprFillDefaultMode::Default;
     let mut n_fixes = 0;
-    let fix =
-        super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id.into())
-            .into_iter()
-            .find(|d| {
-                d.fixes
-                    .as_ref()
-                    .and_then(|fixes| {
-                        n_fixes += fixes.len();
-                        fixes.iter().find(|fix| {
-                            if !fix.target.contains_inclusive(file_position.offset) {
-                                return false;
-                            }
-                            let actual = {
-                                let source_change = fix.source_change.as_ref().unwrap();
-                                let file_id =
-                                    *source_change.source_file_edits.keys().next().unwrap();
-                                let mut actual = db.file_text(file_id).to_string();
+    let fix = super::full_diagnostics(
+        &db,
+        &conf,
+        &AssistResolveStrategy::All,
+        file_position.file_id.into(),
+    )
+    .into_iter()
+    .find(|d| {
+        d.fixes
+            .as_ref()
+            .and_then(|fixes| {
+                n_fixes += fixes.len();
+                fixes.iter().find(|fix| {
+                    if !fix.target.contains_inclusive(file_position.offset) {
+                        return false;
+                    }
+                    let actual = {
+                        let source_change = fix.source_change.as_ref().unwrap();
+                        let file_id = *source_change.source_file_edits.keys().next().unwrap();
+                        let mut actual = db.file_text(file_id).to_string();
 
-                                for (edit, snippet_edit) in source_change.source_file_edits.values()
-                                {
-                                    edit.apply(&mut actual);
-                                    if let Some(snippet_edit) = snippet_edit {
-                                        snippet_edit.apply(&mut actual);
-                                    }
-                                }
-                                actual
-                            };
-                            after == actual
-                        })
-                    })
-                    .is_some()
-            });
+                        for (edit, snippet_edit) in source_change.source_file_edits.values() {
+                            edit.apply(&mut actual);
+                            if let Some(snippet_edit) = snippet_edit {
+                                snippet_edit.apply(&mut actual);
+                            }
+                        }
+                        actual
+                    };
+                    after == actual
+                })
+            })
+            .is_some()
+    });
     assert!(fix.is_some(), "no diagnostic with desired fix");
     assert!(n_fixes == 1, "Too many fixes suggested");
 }
@@ -183,7 +191,7 @@
 /// Checks that there's a diagnostic *without* fix at `$0`.
 pub(crate) fn check_no_fix(ra_fixture: &str) {
     let (db, file_position) = RootDatabase::with_position(ra_fixture);
-    let diagnostic = super::diagnostics(
+    let diagnostic = super::full_diagnostics(
         &db,
         &DiagnosticsConfig::test_sample(),
         &AssistResolveStrategy::All,
@@ -215,7 +223,7 @@
         .iter()
         .copied()
         .flat_map(|file_id| {
-            super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id.into())
+            super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id.into())
                 .into_iter()
                 .map(|d| {
                     let mut annotation = String::new();
@@ -243,6 +251,12 @@
         let mut actual = annotations.remove(&file_id).unwrap_or_default();
         let expected = extract_annotations(&db.file_text(file_id));
         actual.sort_by_key(|(range, _)| range.start());
+        // FIXME: We should panic on duplicates instead, but includes currently cause us to report
+        // diagnostics twice for the calling module when both files are queried.
+        actual.dedup();
+        // actual.iter().duplicates().for_each(|(range, msg)| {
+        //     panic!("duplicate diagnostic at {:?}: {msg:?}", line_index.line_col(range.start()))
+        // });
         if expected.is_empty() {
             // makes minicore smoke test debuggable
             for (e, _) in &actual {
@@ -277,10 +291,10 @@
     let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#);
     let file_id = file_id.into();
 
-    let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
+    let diagnostics = super::full_diagnostics(&db, &config, &AssistResolveStrategy::All, file_id);
     assert!(diagnostics.is_empty());
 
-    let diagnostics = super::diagnostics(
+    let diagnostics = super::full_diagnostics(
         &db,
         &DiagnosticsConfig::test_sample(),
         &AssistResolveStrategy::All,
diff --git a/crates/ide-ssr/Cargo.toml b/crates/ide-ssr/Cargo.toml
index 57b1f94..fad62fa 100644
--- a/crates/ide-ssr/Cargo.toml
+++ b/crates/ide-ssr/Cargo.toml
@@ -1,8 +1,8 @@
 [package]
 name = "ide-ssr"
 version = "0.0.0"
-description = "Structural search and replace of Rust code"
-repository = "https://github.com/rust-lang/rust-analyzer"
+repository.workspace = true
+description = "Structural search and replace of Rust code for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
diff --git a/crates/ide-ssr/src/lib.rs b/crates/ide-ssr/src/lib.rs
index e62ef60..54236ea 100644
--- a/crates/ide-ssr/src/lib.rs
+++ b/crates/ide-ssr/src/lib.rs
@@ -84,7 +84,7 @@
 
 use crate::{errors::bail, matching::MatchFailureReason};
 use hir::{FileRange, Semantics};
-use ide_db::{EditionedFileId, FileId, FxHashMap, RootDatabase};
+use ide_db::{base_db::SourceDatabase, EditionedFileId, FileId, FxHashMap, RootDatabase};
 use resolving::ResolvedRule;
 use syntax::{ast, AstNode, SyntaxNode, TextRange};
 use text_edit::TextEdit;
@@ -141,7 +141,7 @@
 
     /// Constructs an instance using the start of the first file in `db` as the lookup context.
     pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
-        use ide_db::base_db::SourceDatabaseExt;
+        use ide_db::base_db::SourceRootDatabase;
         use ide_db::symbol_index::SymbolsDatabase;
         if let Some(first_file_id) =
             db.local_roots().iter().next().and_then(|root| db.source_root(*root).iter().next())
@@ -172,7 +172,6 @@
 
     /// Finds matches for all added rules and returns edits for all found matches.
     pub fn edits(&self) -> FxHashMap<FileId, TextEdit> {
-        use ide_db::base_db::SourceDatabaseExt;
         let mut matches_by_file = FxHashMap::default();
         for m in self.matches().matches {
             matches_by_file
@@ -228,7 +227,6 @@
         file_id: EditionedFileId,
         snippet: &str,
     ) -> Vec<MatchDebugInfo> {
-        use ide_db::base_db::SourceDatabaseExt;
         let file = self.sema.parse(file_id);
         let mut res = Vec::new();
         let file_text = self.sema.db.file_text(file_id.into());
diff --git a/crates/ide-ssr/src/search.rs b/crates/ide-ssr/src/search.rs
index 8323866..241de10 100644
--- a/crates/ide-ssr/src/search.rs
+++ b/crates/ide-ssr/src/search.rs
@@ -156,7 +156,7 @@
     fn search_files_do(&self, mut callback: impl FnMut(FileId)) {
         if self.restrict_ranges.is_empty() {
             // Unrestricted search.
-            use ide_db::base_db::SourceDatabaseExt;
+            use ide_db::base_db::SourceRootDatabase;
             use ide_db::symbol_index::SymbolsDatabase;
             for &root in self.sema.db.local_roots().iter() {
                 let sr = self.sema.db.source_root(root);
diff --git a/crates/ide-ssr/src/tests.rs b/crates/ide-ssr/src/tests.rs
index 4477a26..4293088 100644
--- a/crates/ide-ssr/src/tests.rs
+++ b/crates/ide-ssr/src/tests.rs
@@ -1,7 +1,7 @@
 use expect_test::{expect, Expect};
 use hir::{FilePosition, FileRange};
 use ide_db::{
-    base_db::{salsa::Durability, SourceDatabaseExt},
+    base_db::{salsa::Durability, SourceDatabase},
     EditionedFileId, FxHashSet,
 };
 use test_utils::RangeOrOffset;
diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml
index 9d8400b..d976d60 100644
--- a/crates/ide/Cargo.toml
+++ b/crates/ide/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "ide"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "IDE-centric APIs for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs
index d0701a4..8a8bc07 100644
--- a/crates/ide/src/goto_definition.rs
+++ b/crates/ide/src/goto_definition.rs
@@ -10,7 +10,7 @@
     Semantics,
 };
 use ide_db::{
-    base_db::{AnchoredPath, FileLoader},
+    base_db::{AnchoredPath, FileLoader, SourceDatabase},
     defs::{Definition, IdentClass},
     helpers::pick_best_token,
     RootDatabase, SymbolKind,
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index 3257305..516e32e 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -1,5 +1,5 @@
 use expect_test::{expect, Expect};
-use ide_db::{base_db::FileLoader, FileRange};
+use ide_db::{base_db::SourceDatabase, FileRange};
 use syntax::TextRange;
 
 use crate::{
@@ -8579,3 +8579,26 @@
         "#]],
     );
 }
+
+#[test]
+fn hover_fn_with_impl_trait_arg() {
+    check(
+        r#"
+trait Foo {}
+impl Foo for bool {}
+fn bar<const WIDTH: u8>(_: impl Foo) {}
+fn test() {
+    let f = bar::<3>;
+    f$0(true);
+}
+"#,
+        expect![[r#"
+            *f*
+
+            ```rust
+            // size = 0, align = 1
+            let f: fn bar<3>(bool)
+            ```
+        "#]],
+    );
+}
diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs
index 5775aba..7310852 100644
--- a/crates/ide/src/inlay_hints/bind_pat.rs
+++ b/crates/ide/src/inlay_hints/bind_pat.rs
@@ -1147,4 +1147,19 @@
 }"#,
         );
     }
+
+    #[test]
+    fn works_in_included_file() {
+        check_types(
+            r#"
+//- minicore: include
+//- /main.rs
+include!("foo.rs");
+//- /foo.rs
+fn main() {
+    let _x = 42;
+      //^^ i32
+}"#,
+        );
+    }
 }
diff --git a/crates/ide/src/inlay_hints/closing_brace.rs b/crates/ide/src/inlay_hints/closing_brace.rs
index ea96c95..8f2777f 100644
--- a/crates/ide/src/inlay_hints/closing_brace.rs
+++ b/crates/ide/src/inlay_hints/closing_brace.rs
@@ -7,7 +7,7 @@
 use ide_db::{FileRange, RootDatabase};
 use span::EditionedFileId;
 use syntax::{
-    ast::{self, AstNode, HasName},
+    ast::{self, AstNode, HasLoopBody, HasName},
     match_ast, SyntaxKind, SyntaxNode, T,
 };
 
@@ -57,9 +57,24 @@
         // the actual number of lines in this case should be the line count of the parent BlockExpr,
         // which the `min_lines` config cares about
         node = node.parent()?;
-        let block = label.syntax().parent().and_then(ast::BlockExpr::cast)?;
-        closing_token = block.stmt_list()?.r_curly_token()?;
+
+        let parent = label.syntax().parent()?;
+        let block;
+        match_ast! {
+            match parent {
+                ast::BlockExpr(block_expr) => {
+                    block = block_expr.stmt_list()?;
+                },
+                ast::AnyHasLoopBody(loop_expr) => {
+                    block = loop_expr.loop_body()?.stmt_list()?;
+                },
+                _ => return None,
+            }
+        }
+        closing_token = block.r_curly_token()?;
+
         let lifetime = label.lifetime().map_or_else(String::new, |it| it.to_string());
+
         (lifetime, Some(label.syntax().text_range()))
     } else if let Some(block) = ast::BlockExpr::cast(node.clone()) {
         closing_token = block.stmt_list()?.r_curly_token()?;
@@ -219,6 +234,19 @@
       //^ 'do_a
     }
   //^ 'end
+
+    'a: loop {
+        'b: for i in 0..5 {
+            'c: while true {
+
+
+            }
+          //^ 'c
+        }
+      //^ 'b
+    }
+  //^ 'a
+
   }
 //^ fn test
 "#,
diff --git a/crates/ide/src/interpret_function.rs b/crates/ide/src/interpret_function.rs
index aeb3c8c..ff1317d 100644
--- a/crates/ide/src/interpret_function.rs
+++ b/crates/ide/src/interpret_function.rs
@@ -1,5 +1,5 @@
 use hir::Semantics;
-use ide_db::{base_db::SourceDatabaseExt, FilePosition, LineIndexDatabase, RootDatabase};
+use ide_db::{base_db::SourceRootDatabase, FilePosition, LineIndexDatabase, RootDatabase};
 use std::{fmt::Write, time::Instant};
 use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange};
 
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index 8cb81a9..eff4bc3 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -65,7 +65,7 @@
 use ide_db::{
     base_db::{
         salsa::{self, ParallelDatabase},
-        CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, SourceDatabaseExt, VfsPath,
+        CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, SourceRootDatabase, VfsPath,
     },
     prime_caches, symbol_index, FxHashMap, FxIndexSet, LineIndexDatabase,
 };
@@ -286,7 +286,7 @@
 
     /// Gets the text of the source file.
     pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<str>> {
-        self.with_db(|db| SourceDatabaseExt::file_text(db, file_id))
+        self.with_db(|db| SourceDatabase::file_text(db, file_id))
     }
 
     /// Gets the syntax tree of the file.
@@ -672,14 +672,33 @@
             .unwrap_or_default())
     }
 
-    /// Computes the set of diagnostics for the given file.
-    pub fn diagnostics(
+    /// Computes the set of parser level diagnostics for the given file.
+    pub fn syntax_diagnostics(
+        &self,
+        config: &DiagnosticsConfig,
+        file_id: FileId,
+    ) -> Cancellable<Vec<Diagnostic>> {
+        self.with_db(|db| ide_diagnostics::syntax_diagnostics(db, config, file_id))
+    }
+
+    /// Computes the set of semantic diagnostics for the given file.
+    pub fn semantic_diagnostics(
         &self,
         config: &DiagnosticsConfig,
         resolve: AssistResolveStrategy,
         file_id: FileId,
     ) -> Cancellable<Vec<Diagnostic>> {
-        self.with_db(|db| ide_diagnostics::diagnostics(db, config, &resolve, file_id))
+        self.with_db(|db| ide_diagnostics::semantic_diagnostics(db, config, &resolve, file_id))
+    }
+
+    /// Computes the set of both syntax and semantic diagnostics for the given file.
+    pub fn full_diagnostics(
+        &self,
+        config: &DiagnosticsConfig,
+        resolve: AssistResolveStrategy,
+        file_id: FileId,
+    ) -> Cancellable<Vec<Diagnostic>> {
+        self.with_db(|db| ide_diagnostics::full_diagnostics(db, config, &resolve, file_id))
     }
 
     /// Convenience function to return assists + quick fixes for diagnostics
@@ -697,7 +716,7 @@
 
         self.with_db(|db| {
             let diagnostic_assists = if diagnostics_config.enabled && include_fixes {
-                ide_diagnostics::diagnostics(db, diagnostics_config, &resolve, frange.file_id)
+                ide_diagnostics::full_diagnostics(db, diagnostics_config, &resolve, frange.file_id)
                     .into_iter()
                     .flat_map(|it| it.fixes.unwrap_or_default())
                     .filter(|it| it.target.intersect(frange.range).is_some())
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index 46c2d47..64b82b3 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -60,7 +60,6 @@
         move |def: Definition| {
             let mut usages =
                 def.usages(sema).set_scope(search_scope.as_ref()).include_self_refs().all();
-
             if literal_search {
                 retain_adt_literal_usages(&mut usages, def, sema);
             }
@@ -818,6 +817,30 @@
     }
 
     #[test]
+    fn test_self_inside_not_adt_impl() {
+        check(
+            r#"
+pub trait TestTrait {
+    type Assoc;
+    fn stuff() -> Self;
+}
+impl TestTrait for () {
+    type Assoc$0 = u8;
+    fn stuff() -> Self {
+        let me: Self = ();
+        me
+    }
+}
+"#,
+            expect![[r#"
+                Assoc TypeAlias FileId(0) 92..108 97..102
+
+                FileId(0) 31..36
+            "#]],
+        )
+    }
+
+    #[test]
     fn test_find_all_refs_two_modules() {
         check(
             r#"
diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs
index cd9b7ae..eaccee0 100644
--- a/crates/ide/src/static_index.rs
+++ b/crates/ide/src/static_index.rs
@@ -3,9 +3,12 @@
 
 use hir::{db::HirDatabase, Crate, HirFileIdExt, Module, Semantics};
 use ide_db::{
-    base_db::SourceDatabaseExt, defs::Definition, documentation::Documentation,
-    famous_defs::FamousDefs, helpers::get_definition, FileId, FileRange, FxHashMap, FxHashSet,
-    RootDatabase,
+    base_db::{SourceRootDatabase, VfsPath},
+    defs::Definition,
+    documentation::Documentation,
+    famous_defs::FamousDefs,
+    helpers::get_definition,
+    FileId, FileRange, FxHashMap, FxHashSet, RootDatabase,
 };
 use syntax::{AstNode, SyntaxKind::*, SyntaxNode, TextRange, T};
 
@@ -227,13 +230,16 @@
         self.files.push(result);
     }
 
-    pub fn compute(analysis: &Analysis) -> StaticIndex<'_> {
+    pub fn compute<'a>(analysis: &'a Analysis, workspace_root: &VfsPath) -> StaticIndex<'a> {
         let db = &*analysis.db;
         let work = all_modules(db).into_iter().filter(|module| {
             let file_id = module.definition_source_file_id(db).original_file(db);
             let source_root = db.file_source_root(file_id.into());
             let source_root = db.source_root(source_root);
-            !source_root.is_library
+            let is_vendored = source_root
+                .path_for_file(&file_id.into())
+                .is_some_and(|module_path| module_path.starts_with(workspace_root));
+            !source_root.is_library || is_vendored
         });
         let mut this = StaticIndex {
             files: vec![],
@@ -259,12 +265,13 @@
 #[cfg(test)]
 mod tests {
     use crate::{fixture, StaticIndex};
-    use ide_db::{FileRange, FxHashSet};
+    use ide_db::{base_db::VfsPath, FileRange, FxHashSet};
     use syntax::TextSize;
 
     fn check_all_ranges(ra_fixture: &str) {
         let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
-        let s = StaticIndex::compute(&analysis);
+        let s =
+            StaticIndex::compute(&analysis, &VfsPath::new_virtual_path("/workspace".to_owned()));
         let mut range_set: FxHashSet<_> = ranges.iter().map(|it| it.0).collect();
         for f in s.files {
             for (range, _) in f.tokens {
@@ -283,7 +290,8 @@
     #[track_caller]
     fn check_definitions(ra_fixture: &str) {
         let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
-        let s = StaticIndex::compute(&analysis);
+        let s =
+            StaticIndex::compute(&analysis, &VfsPath::new_virtual_path("/workspace".to_owned()));
         let mut range_set: FxHashSet<_> = ranges.iter().map(|it| it.0).collect();
         for (_, t) in s.tokens.iter() {
             if let Some(t) = t.definition {
@@ -326,7 +334,7 @@
     fn multi_crate() {
         check_definitions(
             r#"
-//- /main.rs crate:main deps:foo
+//- /workspace/main.rs crate:main deps:foo
 
 
 use foo::func;
@@ -335,7 +343,7 @@
  //^^^^
     func();
 }
-//- /foo/lib.rs crate:foo
+//- /workspace/foo/lib.rs crate:foo
 
 pub func() {
 
@@ -345,6 +353,24 @@
     }
 
     #[test]
+    fn vendored_crate() {
+        check_all_ranges(
+            r#"
+//- /workspace/main.rs crate:main deps:external,vendored
+struct Main(i32);
+     //^^^^ ^^^
+
+//- /external/lib.rs new_source_root:library crate:external@0.1.0,https://a.b/foo.git library
+struct ExternalLibrary(i32);
+
+//- /workspace/vendored/lib.rs new_source_root:library crate:vendored@0.1.0,https://a.b/bar.git library
+struct VendoredLibrary(i32);
+     //^^^^^^^^^^^^^^^ ^^^
+"#,
+        );
+    }
+
+    #[test]
     fn derives() {
         check_all_ranges(
             r#"
diff --git a/crates/ide/src/view_crate_graph.rs b/crates/ide/src/view_crate_graph.rs
index 7270121..9ff099f 100644
--- a/crates/ide/src/view_crate_graph.rs
+++ b/crates/ide/src/view_crate_graph.rs
@@ -1,6 +1,6 @@
 use dot::{Id, LabelText};
 use ide_db::{
-    base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceDatabaseExt},
+    base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceRootDatabase},
     FxHashSet, RootDatabase,
 };
 use triomphe::Arc;
diff --git a/crates/intern/Cargo.toml b/crates/intern/Cargo.toml
index c08ecb5..5e7ee54 100644
--- a/crates/intern/Cargo.toml
+++ b/crates/intern/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "intern"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "Global `Arc`-based object interning infrastructure for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
diff --git a/crates/limit/Cargo.toml b/crates/limit/Cargo.toml
index c1a7688..30666f5 100644
--- a/crates/limit/Cargo.toml
+++ b/crates/limit/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "limit"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "A struct to enforce limits for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
diff --git a/crates/load-cargo/Cargo.toml b/crates/load-cargo/Cargo.toml
index 64ed93b..23fd50a 100644
--- a/crates/load-cargo/Cargo.toml
+++ b/crates/load-cargo/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "load-cargo"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "Loads a Cargo project into a static instance of rust-analyzer for analysis."
 
 rust-version.workspace = true
 edition.workspace = true
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index 8737f22..abad7e9 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -16,11 +16,16 @@
 use itertools::Itertools;
 use proc_macro_api::{MacroDylib, ProcMacroServer};
 use project_model::{
-    CargoConfig, ManifestPath, PackageRoot, ProjectManifest, ProjectWorkspace, ProjectWorkspaceKind,
+    CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace, ProjectWorkspaceKind,
 };
 use span::Span;
-use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath};
+use vfs::{
+    file_set::FileSetConfig,
+    loader::{Handle, LoadingProgress},
+    AbsPath, AbsPathBuf, VfsPath,
+};
 
+#[derive(Debug)]
 pub struct LoadCargoConfig {
     pub load_out_dirs_from_check: bool,
     pub with_proc_macro_server: ProcMacroServerChoice,
@@ -60,11 +65,11 @@
     let (sender, receiver) = unbounded();
     let mut vfs = vfs::Vfs::default();
     let mut loader = {
-        let loader =
-            vfs_notify::NotifyHandle::spawn(Box::new(move |msg| sender.send(msg).unwrap()));
+        let loader = vfs_notify::NotifyHandle::spawn(sender);
         Box::new(loader)
     };
 
+    tracing::debug!(?load_config, "LoadCargoConfig");
     let proc_macro_server = match &load_config.with_proc_macro_server {
         ProcMacroServerChoice::Sysroot => ws
             .find_sysroot_proc_macro_srv()
@@ -77,6 +82,14 @@
             Err((anyhow::format_err!("proc macro server disabled"), false))
         }
     };
+    match &proc_macro_server {
+        Ok(server) => {
+            tracing::info!(path=%server.path(), "Proc-macro server started")
+        }
+        Err((e, _)) => {
+            tracing::info!(%e, "Failed to start proc-macro server")
+        }
+    }
 
     let (crate_graph, proc_macros) = ws.to_crate_graph(
         &mut |path: &AbsPath| {
@@ -247,7 +260,7 @@
             let mut file_set_roots: Vec<VfsPath> = vec![];
             let mut entries = vec![];
 
-            if let Some(manifest) = ws.manifest().map(ManifestPath::as_ref) {
+            if let Some(manifest) = ws.manifest().map(|it| it.to_path_buf()) {
                 file_set_roots.push(VfsPath::from(manifest.to_owned()));
                 entries.push(manifest.to_owned());
             }
@@ -409,8 +422,8 @@
     // wait until Vfs has loaded all roots
     for task in receiver {
         match task {
-            vfs::loader::Message::Progress { n_done, n_total, .. } => {
-                if n_done == Some(n_total) {
+            vfs::loader::Message::Progress { n_done, .. } => {
+                if n_done == LoadingProgress::Finished {
                     break;
                 }
             }
diff --git a/crates/mbe/Cargo.toml b/crates/mbe/Cargo.toml
index 5783462..756d42e 100644
--- a/crates/mbe/Cargo.toml
+++ b/crates/mbe/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "mbe"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "Handling of `macro_rules` macros for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
@@ -25,6 +26,7 @@
 stdx.workspace = true
 span.workspace = true
 intern.workspace = true
+syntax-bridge.workspace = true
 
 [dev-dependencies]
 test-utils.workspace = true
diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs
index b6db4d2..43604eb 100644
--- a/crates/mbe/src/benchmark.rs
+++ b/crates/mbe/src/benchmark.rs
@@ -7,11 +7,15 @@
     ast::{self, HasName},
     AstNode,
 };
+use syntax_bridge::{
+    dummy_test_span_utils::{DummyTestSpanMap, DUMMY},
+    syntax_node_to_token_tree, DocCommentDesugarMode,
+};
 use test_utils::{bench, bench_fixture, skip_slow_tests};
 
 use crate::{
     parser::{MetaVarKind, Op, RepeatKind, Separator},
-    syntax_node_to_token_tree, DeclarativeMacro, DocCommentDesugarMode, DummyTestSpanMap, DUMMY,
+    DeclarativeMacro,
 };
 
 #[test]
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs
index 568490d..8878553 100644
--- a/crates/mbe/src/lib.rs
+++ b/crates/mbe/src/lib.rs
@@ -8,13 +8,12 @@
 
 mod expander;
 mod parser;
-mod syntax_bridge;
-mod to_parser_input;
 
 #[cfg(test)]
 mod benchmark;
 
 use span::{Edition, Span, SyntaxContextId};
+use syntax_bridge::to_parser_input;
 use tt::iter::TtIter;
 use tt::DelimSpan;
 
@@ -23,18 +22,8 @@
 
 use crate::parser::{MetaTemplate, MetaVarKind, Op};
 
-// FIXME: we probably should re-think  `token_tree_to_syntax_node` interfaces
-pub use ::parser::TopEntryPoint;
 pub use tt::{Delimiter, DelimiterKind, Punct};
 
-pub use crate::syntax_bridge::{
-    desugar_doc_comment_text, parse_exprs_with_sep, parse_to_token_tree,
-    parse_to_token_tree_static_span, syntax_node_to_token_tree, syntax_node_to_token_tree_modified,
-    token_tree_to_syntax_node, DocCommentDesugarMode, SpanMapper,
-};
-
-pub use crate::syntax_bridge::dummy_test_span_utils::*;
-
 #[derive(Debug, PartialEq, Eq, Clone)]
 pub enum ParseError {
     UnexpectedToken(Box<str>),
@@ -361,7 +350,7 @@
     }
 }
 
-fn expect_fragment(
+pub fn expect_fragment(
     tt_iter: &mut TtIter<'_, Span>,
     entry_point: ::parser::PrefixEntryPoint,
     edition: ::parser::Edition,
@@ -369,7 +358,7 @@
 ) -> ExpandResult<Option<tt::TokenTree<Span>>> {
     use ::parser;
     let buffer = tt::buffer::TokenBuffer::from_tokens(tt_iter.as_slice());
-    let parser_input = to_parser_input::to_parser_input(edition, &buffer);
+    let parser_input = to_parser_input(edition, &buffer);
     let tree_traversal = entry_point.parse(&parser_input, edition);
     let mut cursor = buffer.begin();
     let mut error = false;
diff --git a/crates/parser/Cargo.toml b/crates/parser/Cargo.toml
index 54b57c2..d525566 100644
--- a/crates/parser/Cargo.toml
+++ b/crates/parser/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "parser"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "The Rust parser for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
diff --git a/crates/paths/Cargo.toml b/crates/paths/Cargo.toml
index 59a4ad9..d4b0a54 100644
--- a/crates/paths/Cargo.toml
+++ b/crates/paths/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "paths"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "Path wrappers for absolute and relative paths rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
@@ -13,13 +14,10 @@
 
 [dependencies]
 camino.workspace = true
-# Adding this dep sadly puts a lot of rust-analyzer crates after the
-# serde-derive crate. Even though we don't activate the derive feature here,
-# someone else in the crate graph certainly does!
-# serde.workspace = true
+serde = { workspace = true, optional = true }
 
 [features]
-serde1 = ["camino/serde1"]
+serde1 = ["camino/serde1", "dep:serde"]
 
 [lints]
 workspace = true
diff --git a/crates/paths/src/lib.rs b/crates/paths/src/lib.rs
index 885f071..0084244 100644
--- a/crates/paths/src/lib.rs
+++ b/crates/paths/src/lib.rs
@@ -1,4 +1,4 @@
-//! Thin wrappers around `std::path`/`camino::path`, distinguishing between absolute and
+//! Thin wrappers around [`camino::path`], distinguishing between absolute and
 //! relative paths.
 
 use std::{
@@ -8,9 +8,9 @@
     path::{Path, PathBuf},
 };
 
-pub use camino::*;
+pub use camino::{Utf8Component, Utf8Components, Utf8Path, Utf8PathBuf, Utf8Prefix};
 
-/// Wrapper around an absolute [`Utf8PathBuf`].
+/// A [`Utf8PathBuf`] that is guaranteed to be absolute.
 #[derive(Debug, Clone, Ord, PartialOrd, Eq, Hash)]
 pub struct AbsPathBuf(Utf8PathBuf);
 
@@ -73,16 +73,6 @@
     }
 }
 
-impl TryFrom<PathBuf> for AbsPathBuf {
-    type Error = PathBuf;
-    fn try_from(path_buf: PathBuf) -> Result<AbsPathBuf, PathBuf> {
-        if !path_buf.is_absolute() {
-            return Err(path_buf);
-        }
-        Ok(AbsPathBuf(Utf8PathBuf::from_path_buf(path_buf)?))
-    }
-}
-
 impl TryFrom<&str> for AbsPathBuf {
     type Error = Utf8PathBuf;
     fn try_from(path: &str) -> Result<AbsPathBuf, Utf8PathBuf> {
@@ -151,6 +141,10 @@
     pub fn push<P: AsRef<Utf8Path>>(&mut self, suffix: P) {
         self.0.push(suffix)
     }
+
+    pub fn join(&self, path: impl AsRef<Utf8Path>) -> Self {
+        Self(self.0.join(path))
+    }
 }
 
 impl fmt::Display for AbsPathBuf {
diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml
index 345fb9f..84b877f 100644
--- a/crates/proc-macro-api/Cargo.toml
+++ b/crates/proc-macro-api/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "proc-macro-api"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "RPC Api for the `proc-macro-srv` crate of rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
@@ -22,12 +23,11 @@
 paths = { workspace = true, features = ["serde1"] }
 tt.workspace = true
 stdx.workspace = true
-text-size.workspace = true
-span.workspace = true
 # Ideally this crate would not depend on salsa things, but we need span information here which wraps
 # InternIds for the syntax context
+span.workspace = true
+# only here due to the `Env` newtype :/
 base-db.workspace = true
-la-arena.workspace = true
 intern.workspace = true
 
 [lints]
diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs
index 6a99b5e..8835285 100644
--- a/crates/proc-macro-api/src/msg.rs
+++ b/crates/proc-macro-api/src/msg.rs
@@ -158,9 +158,7 @@
 #[cfg(test)]
 mod tests {
     use intern::{sym, Symbol};
-    use la_arena::RawIdx;
-    use span::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId};
-    use text_size::{TextRange, TextSize};
+    use span::{ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange, TextSize};
     use tt::{Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Subtree, TokenTree};
 
     use super::*;
@@ -171,7 +169,7 @@
                 span::FileId::from_raw(0xe4e4e),
                 span::Edition::CURRENT,
             ),
-            ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)),
+            ast_id: ErasedFileAstId::from_raw(0),
         };
 
         let token_trees = Box::new([
diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs
index a8661f5..88256e9 100644
--- a/crates/proc-macro-api/src/msg/flat.rs
+++ b/crates/proc-macro-api/src/msg/flat.rs
@@ -38,11 +38,9 @@
 use std::collections::VecDeque;
 
 use intern::Symbol;
-use la_arena::RawIdx;
 use rustc_hash::FxHashMap;
 use serde::{Deserialize, Serialize};
-use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId};
-use text_size::TextRange;
+use span::{EditionedFileId, ErasedFileAstId, Span, SpanAnchor, SyntaxContextId, TextRange};
 
 use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, EXTENDED_LEAF_DATA};
 
@@ -54,7 +52,7 @@
         .flat_map(|span| {
             [
                 span.anchor.file_id.as_u32(),
-                span.anchor.ast_id.into_raw().into_u32(),
+                span.anchor.ast_id.into_raw(),
                 span.range.start().into(),
                 span.range.end().into(),
                 span.ctx.into_u32(),
@@ -71,7 +69,7 @@
             Span {
                 anchor: SpanAnchor {
                     file_id: EditionedFileId::from_raw(file_id),
-                    ast_id: ErasedFileAstId::from_raw(RawIdx::from_u32(ast_id)),
+                    ast_id: ErasedFileAstId::from_raw(ast_id),
                 },
                 range: TextRange::new(start.into(), end.into()),
                 ctx: SyntaxContextId::from_u32(e),
diff --git a/crates/proc-macro-srv-cli/Cargo.toml b/crates/proc-macro-srv-cli/Cargo.toml
index a559ba0..1c39451 100644
--- a/crates/proc-macro-srv-cli/Cargo.toml
+++ b/crates/proc-macro-srv-cli/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "proc-macro-srv-cli"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "A standalone binary for the `proc-macro-srv` crate of rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml
index 673b5bd..e8d9677 100644
--- a/crates/proc-macro-srv/Cargo.toml
+++ b/crates/proc-macro-srv/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "proc-macro-srv"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "Proc-macro server for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
@@ -19,23 +20,24 @@
 
 stdx.workspace = true
 tt.workspace = true
-mbe.workspace = true
+syntax-bridge.workspace = true
 paths.workspace = true
 base-db.workspace = true
 span.workspace = true
 proc-macro-api.workspace = true
-ra-ap-rustc_lexer.workspace = true
 intern.workspace = true
 
+ra-ap-rustc_lexer.workspace = true
+
 [dev-dependencies]
-expect-test = "1.4.0"
+expect-test.workspace = true
 
 # used as proc macro test targets
 proc-macro-test.path = "./proc-macro-test"
 
 [features]
 sysroot-abi = []
-in-rust-tree = ["mbe/in-rust-tree", "tt/in-rust-tree","sysroot-abi"]
+in-rust-tree = ["syntax-bridge/in-rust-tree", "tt/in-rust-tree", "sysroot-abi"]
 
 [lints]
 workspace = true
diff --git a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
index 8b9eb3b..552d99f 100644
--- a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
+++ b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
@@ -479,7 +479,7 @@
             range: TextRange::empty(TextSize::new(0)),
             anchor: span::SpanAnchor {
                 file_id: EditionedFileId::current_edition(FileId::from_raw(0)),
-                ast_id: span::ErasedFileAstId::from_raw(0.into()),
+                ast_id: span::ErasedFileAstId::from_raw(0),
             },
             ctx: SyntaxContextId::ROOT,
         };
@@ -515,7 +515,7 @@
             range: TextRange::empty(TextSize::new(0)),
             anchor: span::SpanAnchor {
                 file_id: EditionedFileId::current_edition(FileId::from_raw(0)),
-                ast_id: span::ErasedFileAstId::from_raw(0.into()),
+                ast_id: span::ErasedFileAstId::from_raw(0),
             },
             ctx: SyntaxContextId::ROOT,
         };
diff --git a/crates/proc-macro-srv/src/server_impl/token_stream.rs b/crates/proc-macro-srv/src/server_impl/token_stream.rs
index cdf93fa..4d8d496 100644
--- a/crates/proc-macro-srv/src/server_impl/token_stream.rs
+++ b/crates/proc-macro-srv/src/server_impl/token_stream.rs
@@ -126,9 +126,12 @@
     /// change these errors into `LexError`s later.
     impl<S: Copy + fmt::Debug> TokenStream<S> {
         pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, String> {
-            let subtree =
-                mbe::parse_to_token_tree_static_span(span::Edition::CURRENT_FIXME, call_site, src)
-                    .ok_or("lexing error")?;
+            let subtree = syntax_bridge::parse_to_token_tree_static_span(
+                span::Edition::CURRENT_FIXME,
+                call_site,
+                src,
+            )
+            .ok_or("lexing error")?;
 
             Ok(TokenStream::with_subtree(subtree))
         }
diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs
index 70eff51..4f1a18c 100644
--- a/crates/proc-macro-srv/src/tests/utils.rs
+++ b/crates/proc-macro-srv/src/tests/utils.rs
@@ -9,7 +9,8 @@
 
 fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream<TokenId> {
     crate::server_impl::TokenStream::with_subtree(
-        mbe::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src).unwrap(),
+        syntax_bridge::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src)
+            .unwrap(),
     )
 }
 
@@ -19,7 +20,7 @@
     src: &str,
 ) -> crate::server_impl::TokenStream<Span> {
     crate::server_impl::TokenStream::with_subtree(
-        mbe::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src).unwrap(),
+        syntax_bridge::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src).unwrap(),
     )
 }
 
@@ -69,7 +70,7 @@
         range: TextRange::new(0.into(), 150.into()),
         anchor: SpanAnchor {
             file_id: EditionedFileId::current_edition(FileId::from_raw(41)),
-            ast_id: ErasedFileAstId::from_raw(From::from(1)),
+            ast_id: ErasedFileAstId::from_raw(1),
         },
         ctx: SyntaxContextId::ROOT,
     };
@@ -77,7 +78,7 @@
         range: TextRange::new(0.into(), 100.into()),
         anchor: SpanAnchor {
             file_id: EditionedFileId::current_edition(FileId::from_raw(42)),
-            ast_id: ErasedFileAstId::from_raw(From::from(2)),
+            ast_id: ErasedFileAstId::from_raw(2),
         },
         ctx: SyntaxContextId::ROOT,
     };
diff --git a/crates/profile/Cargo.toml b/crates/profile/Cargo.toml
index 5989dc6..2e3413f 100644
--- a/crates/profile/Cargo.toml
+++ b/crates/profile/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "profile"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "A collection of tools for profiling rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
diff --git a/crates/project-model/Cargo.toml b/crates/project-model/Cargo.toml
index 8b34bd3..68e0e1b 100644
--- a/crates/project-model/Cargo.toml
+++ b/crates/project-model/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "project-model"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "A representation for a Cargo project for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_dependencies.rs
similarity index 89%
rename from crates/project-model/src/build_scripts.rs
rename to crates/project-model/src/build_dependencies.rs
index 839d8e5..e7a4b8f 100644
--- a/crates/project-model/src/build_scripts.rs
+++ b/crates/project-model/src/build_dependencies.rs
@@ -1,28 +1,25 @@
-//! Workspace information we get from cargo consists of two pieces. The first is
-//! the output of `cargo metadata`. The second is the output of running
-//! `build.rs` files (`OUT_DIR` env var, extra cfg flags) and compiling proc
-//! macro.
+//! Logic to invoke `cargo` for building build-dependencies (build scripts and proc-macros) as well as
+//! executing the build scripts to fetch required dependency information (`OUT_DIR` env var, extra
+//! cfg flags, etc).
 //!
-//! This module implements this second part. We use "build script" terminology
-//! here, but it covers procedural macros as well.
+//! In essence this just invokes `cargo` with the appropriate output format which we consume,
+//! but if enabled we will also use `RUSTC_WRAPPER` to only compile the build scripts and
+//! proc-macros and skip everything else.
 
-use std::{
-    cell::RefCell,
-    io, mem,
-    path::{self, PathBuf},
-    process::Command,
-};
+use std::{cell::RefCell, io, mem, process::Command};
 
+use base_db::Env;
 use cargo_metadata::{camino::Utf8Path, Message};
+use cfg::CfgAtom;
 use itertools::Itertools;
 use la_arena::ArenaMap;
-use paths::{AbsPath, AbsPathBuf};
+use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
 use rustc_hash::{FxHashMap, FxHashSet};
 use serde::Deserialize;
 use toolchain::Tool;
 
 use crate::{
-    cfg::CfgFlag, utf8_stdout, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation,
+    utf8_stdout, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation,
     InvocationStrategy, ManifestPath, Package, Sysroot, TargetKind,
 };
 
@@ -37,12 +34,12 @@
 #[derive(Debug, Clone, Default, PartialEq, Eq)]
 pub(crate) struct BuildScriptOutput {
     /// List of config flags defined by this package's build script.
-    pub(crate) cfgs: Vec<CfgFlag>,
+    pub(crate) cfgs: Vec<CfgAtom>,
     /// List of cargo-related environment variables with their value.
     ///
     /// If the package has a build script which defines environment variables,
     /// they can also be found here.
-    pub(crate) envs: Vec<(String, String)>,
+    pub(crate) envs: Env,
     /// Directory where a build script might place its output.
     pub(crate) out_dir: Option<AbsPathBuf>,
     /// Path to the proc-macro library file if this package exposes proc-macros.
@@ -50,7 +47,7 @@
 }
 
 impl BuildScriptOutput {
-    fn is_unchanged(&self) -> bool {
+    fn is_empty(&self) -> bool {
         self.cfgs.is_empty()
             && self.envs.is_empty()
             && self.out_dir.is_none()
@@ -59,10 +56,348 @@
 }
 
 impl WorkspaceBuildScripts {
+    /// Runs the build scripts for the given workspace
+    pub(crate) fn run_for_workspace(
+        config: &CargoConfig,
+        workspace: &CargoWorkspace,
+        progress: &dyn Fn(String),
+        sysroot: &Sysroot,
+    ) -> io::Result<WorkspaceBuildScripts> {
+        let current_dir = match &config.invocation_location {
+            InvocationLocation::Root(root) if config.run_build_script_command.is_some() => root,
+            _ => workspace.workspace_root(),
+        };
+
+        let allowed_features = workspace.workspace_features();
+        let cmd = Self::build_command(
+            config,
+            &allowed_features,
+            workspace.manifest_path(),
+            current_dir,
+            sysroot,
+        )?;
+        Self::run_per_ws(cmd, workspace, progress)
+    }
+
+    /// Runs the build scripts by invoking the configured command *once*.
+    /// This populates the outputs for all passed in workspaces.
+    pub(crate) fn run_once(
+        config: &CargoConfig,
+        workspaces: &[&CargoWorkspace],
+        progress: &dyn Fn(String),
+        workspace_root: &AbsPathBuf,
+    ) -> io::Result<Vec<WorkspaceBuildScripts>> {
+        assert_eq!(config.invocation_strategy, InvocationStrategy::Once);
+
+        let current_dir = match &config.invocation_location {
+            InvocationLocation::Root(root) => root,
+            InvocationLocation::Workspace => {
+                return Err(io::Error::new(
+                    io::ErrorKind::Other,
+                    "Cannot run build scripts from workspace with invocation strategy `once`",
+                ))
+            }
+        };
+        let cmd = Self::build_command(
+            config,
+            &Default::default(),
+            // This is not gonna be used anyways, so just construct a dummy here
+            &ManifestPath::try_from(workspace_root.clone()).unwrap(),
+            current_dir,
+            &Sysroot::empty(),
+        )?;
+        // NB: Cargo.toml could have been modified between `cargo metadata` and
+        // `cargo check`. We shouldn't assume that package ids we see here are
+        // exactly those from `config`.
+        let mut by_id = FxHashMap::default();
+        // some workspaces might depend on the same crates, so we need to duplicate the outputs
+        // to those collisions
+        let mut collisions = Vec::new();
+        let mut res: Vec<_> = workspaces
+            .iter()
+            .enumerate()
+            .map(|(idx, workspace)| {
+                let mut res = WorkspaceBuildScripts::default();
+                for package in workspace.packages() {
+                    res.outputs.insert(package, BuildScriptOutput::default());
+                    if by_id.contains_key(&workspace[package].id) {
+                        collisions.push((&workspace[package].id, idx, package));
+                    } else {
+                        by_id.insert(workspace[package].id.clone(), (package, idx));
+                    }
+                }
+                res
+            })
+            .collect();
+
+        let errors = Self::run_command(
+            cmd,
+            |package, cb| {
+                if let Some(&(package, workspace)) = by_id.get(package) {
+                    cb(&workspaces[workspace][package].name, &mut res[workspace].outputs[package]);
+                }
+            },
+            progress,
+        )?;
+        res.iter_mut().for_each(|it| it.error.clone_from(&errors));
+        collisions.into_iter().for_each(|(id, workspace, package)| {
+            if let Some(&(p, w)) = by_id.get(id) {
+                res[workspace].outputs[package] = res[w].outputs[p].clone();
+            }
+        });
+
+        if tracing::enabled!(tracing::Level::INFO) {
+            for (idx, workspace) in workspaces.iter().enumerate() {
+                for package in workspace.packages() {
+                    let package_build_data = &mut res[idx].outputs[package];
+                    if !package_build_data.is_empty() {
+                        tracing::info!(
+                            "{}: {package_build_data:?}",
+                            workspace[package].manifest.parent(),
+                        );
+                    }
+                }
+            }
+        }
+
+        Ok(res)
+    }
+
+    pub fn error(&self) -> Option<&str> {
+        self.error.as_deref()
+    }
+
+    pub(crate) fn get_output(&self, idx: Package) -> Option<&BuildScriptOutput> {
+        self.outputs.get(idx)
+    }
+
+    /// Assembles build script outputs for the rustc crates via `--print target-libdir`.
+    pub(crate) fn rustc_crates(
+        rustc: &CargoWorkspace,
+        current_dir: &AbsPath,
+        extra_env: &FxHashMap<String, String>,
+        sysroot: &Sysroot,
+    ) -> Self {
+        let mut bs = WorkspaceBuildScripts::default();
+        for p in rustc.packages() {
+            bs.outputs.insert(p, BuildScriptOutput::default());
+        }
+        let res = (|| {
+            let target_libdir = (|| {
+                let mut cargo_config = sysroot.tool(Tool::Cargo);
+                cargo_config.envs(extra_env);
+                cargo_config
+                    .current_dir(current_dir)
+                    .args(["rustc", "-Z", "unstable-options", "--print", "target-libdir"])
+                    .env("RUSTC_BOOTSTRAP", "1");
+                if let Ok(it) = utf8_stdout(cargo_config) {
+                    return Ok(it);
+                }
+                let mut cmd = sysroot.tool(Tool::Rustc);
+                cmd.envs(extra_env);
+                cmd.args(["--print", "target-libdir"]);
+                utf8_stdout(cmd)
+            })()?;
+
+            let target_libdir = AbsPathBuf::try_from(Utf8PathBuf::from(target_libdir))
+                .map_err(|_| anyhow::format_err!("target-libdir was not an absolute path"))?;
+            tracing::info!("Loading rustc proc-macro paths from {target_libdir}");
+
+            let proc_macro_dylibs: Vec<(String, AbsPathBuf)> = std::fs::read_dir(target_libdir)?
+                .filter_map(|entry| {
+                    let dir_entry = entry.ok()?;
+                    if dir_entry.file_type().ok()?.is_file() {
+                        let path = dir_entry.path();
+                        let extension = path.extension()?;
+                        if extension == std::env::consts::DLL_EXTENSION {
+                            let name = path.file_stem()?.to_str()?.split_once('-')?.0.to_owned();
+                            let path = AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).ok()?)
+                                .ok()?;
+                            return Some((name, path));
+                        }
+                    }
+                    None
+                })
+                .collect();
+            for p in rustc.packages() {
+                let package = &rustc[p];
+                if package
+                    .targets
+                    .iter()
+                    .any(|&it| matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true }))
+                {
+                    if let Some((_, path)) = proc_macro_dylibs
+                        .iter()
+                        .find(|(name, _)| *name.trim_start_matches("lib") == package.name)
+                    {
+                        bs.outputs[p].proc_macro_dylib_path = Some(path.clone());
+                    }
+                }
+            }
+
+            if tracing::enabled!(tracing::Level::INFO) {
+                for package in rustc.packages() {
+                    let package_build_data = &bs.outputs[package];
+                    if !package_build_data.is_empty() {
+                        tracing::info!(
+                            "{}: {package_build_data:?}",
+                            rustc[package].manifest.parent(),
+                        );
+                    }
+                }
+            }
+            Ok(())
+        })();
+        if let Err::<_, anyhow::Error>(e) = res {
+            bs.error = Some(e.to_string());
+        }
+        bs
+    }
+
+    fn run_per_ws(
+        cmd: Command,
+        workspace: &CargoWorkspace,
+        progress: &dyn Fn(String),
+    ) -> io::Result<WorkspaceBuildScripts> {
+        let mut res = WorkspaceBuildScripts::default();
+        let outputs = &mut res.outputs;
+        // NB: Cargo.toml could have been modified between `cargo metadata` and
+        // `cargo check`. We shouldn't assume that package ids we see here are
+        // exactly those from `config`.
+        let mut by_id: FxHashMap<String, Package> = FxHashMap::default();
+        for package in workspace.packages() {
+            outputs.insert(package, BuildScriptOutput::default());
+            by_id.insert(workspace[package].id.clone(), package);
+        }
+
+        res.error = Self::run_command(
+            cmd,
+            |package, cb| {
+                if let Some(&package) = by_id.get(package) {
+                    cb(&workspace[package].name, &mut outputs[package]);
+                }
+            },
+            progress,
+        )?;
+
+        if tracing::enabled!(tracing::Level::INFO) {
+            for package in workspace.packages() {
+                let package_build_data = &outputs[package];
+                if !package_build_data.is_empty() {
+                    tracing::info!(
+                        "{}: {package_build_data:?}",
+                        workspace[package].manifest.parent(),
+                    );
+                }
+            }
+        }
+
+        Ok(res)
+    }
+
+    fn run_command(
+        cmd: Command,
+        // ideally this would be something like:
+        // with_output_for: impl FnMut(&str, dyn FnOnce(&mut BuildScriptOutput)),
+        // but owned trait objects aren't a thing
+        mut with_output_for: impl FnMut(&str, &mut dyn FnMut(&str, &mut BuildScriptOutput)),
+        progress: &dyn Fn(String),
+    ) -> io::Result<Option<String>> {
+        let errors = RefCell::new(String::new());
+        let push_err = |err: &str| {
+            let mut e = errors.borrow_mut();
+            e.push_str(err);
+            e.push('\n');
+        };
+
+        tracing::info!("Running build scripts: {:?}", cmd);
+        let output = stdx::process::spawn_with_streaming_output(
+            cmd,
+            &mut |line| {
+                // Copy-pasted from existing cargo_metadata. It seems like we
+                // should be using serde_stacker here?
+                let mut deserializer = serde_json::Deserializer::from_str(line);
+                deserializer.disable_recursion_limit();
+                let message = Message::deserialize(&mut deserializer)
+                    .unwrap_or_else(|_| Message::TextLine(line.to_owned()));
+
+                match message {
+                    Message::BuildScriptExecuted(mut message) => {
+                        with_output_for(&message.package_id.repr, &mut |name, data| {
+                            progress(format!("running build-script: {name}"));
+                            let cfgs = {
+                                let mut acc = Vec::new();
+                                for cfg in &message.cfgs {
+                                    match crate::parse_cfg(cfg) {
+                                        Ok(it) => acc.push(it),
+                                        Err(err) => {
+                                            push_err(&format!(
+                                                "invalid cfg from cargo-metadata: {err}"
+                                            ));
+                                            return;
+                                        }
+                                    };
+                                }
+                                acc
+                            };
+                            data.envs.extend(message.env.drain(..));
+                            // cargo_metadata crate returns default (empty) path for
+                            // older cargos, which is not absolute, so work around that.
+                            let out_dir = mem::take(&mut message.out_dir);
+                            if !out_dir.as_str().is_empty() {
+                                let out_dir = AbsPathBuf::assert(out_dir);
+                                // inject_cargo_env(package, package_build_data);
+                                data.envs.insert("OUT_DIR", out_dir.as_str());
+                                data.out_dir = Some(out_dir);
+                                data.cfgs = cfgs;
+                            }
+                        });
+                    }
+                    Message::CompilerArtifact(message) => {
+                        with_output_for(&message.package_id.repr, &mut |name, data| {
+                            progress(format!("building proc-macros: {name}"));
+                            if message.target.kind.iter().any(|k| k == "proc-macro") {
+                                // Skip rmeta file
+                                if let Some(filename) =
+                                    message.filenames.iter().find(|file| is_dylib(file))
+                                {
+                                    let filename = AbsPath::assert(filename);
+                                    data.proc_macro_dylib_path = Some(filename.to_owned());
+                                }
+                            }
+                        });
+                    }
+                    Message::CompilerMessage(message) => {
+                        progress(message.target.name);
+
+                        if let Some(diag) = message.message.rendered.as_deref() {
+                            push_err(diag);
+                        }
+                    }
+                    Message::BuildFinished(_) => {}
+                    Message::TextLine(_) => {}
+                    _ => {}
+                }
+            },
+            &mut |line| {
+                push_err(line);
+            },
+        )?;
+
+        let errors = if !output.status.success() {
+            let errors = errors.into_inner();
+            Some(if errors.is_empty() { "cargo check failed".to_owned() } else { errors })
+        } else {
+            None
+        };
+        Ok(errors)
+    }
+
     fn build_command(
         config: &CargoConfig,
         allowed_features: &FxHashSet<String>,
         manifest_path: &ManifestPath,
+        current_dir: &AbsPath,
         sysroot: &Sysroot,
     ) -> io::Result<Command> {
         let mut cmd = match config.run_build_script_command.as_deref() {
@@ -78,7 +413,7 @@
                 cmd.args(&config.extra_args);
 
                 cmd.arg("--manifest-path");
-                cmd.arg(manifest_path.as_ref());
+                cmd.arg(manifest_path);
 
                 if let Some(target_dir) = &config.target_dir {
                     cmd.arg("--target-dir").arg(target_dir);
@@ -125,6 +460,7 @@
             }
         };
 
+        cmd.current_dir(current_dir);
         cmd.envs(&config.extra_env);
         if config.wrap_rustc_in_build_scripts {
             // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
@@ -137,345 +473,6 @@
 
         Ok(cmd)
     }
-
-    /// Runs the build scripts for the given workspace
-    pub(crate) fn run_for_workspace(
-        config: &CargoConfig,
-        workspace: &CargoWorkspace,
-        progress: &dyn Fn(String),
-        sysroot: &Sysroot,
-    ) -> io::Result<WorkspaceBuildScripts> {
-        let current_dir = match &config.invocation_location {
-            InvocationLocation::Root(root) if config.run_build_script_command.is_some() => {
-                root.as_path()
-            }
-            _ => workspace.workspace_root(),
-        }
-        .as_ref();
-
-        let allowed_features = workspace.workspace_features();
-        let cmd =
-            Self::build_command(config, &allowed_features, workspace.manifest_path(), sysroot)?;
-        Self::run_per_ws(cmd, workspace, current_dir, progress)
-    }
-
-    /// Runs the build scripts by invoking the configured command *once*.
-    /// This populates the outputs for all passed in workspaces.
-    pub(crate) fn run_once(
-        config: &CargoConfig,
-        workspaces: &[&CargoWorkspace],
-        progress: &dyn Fn(String),
-        workspace_root: &AbsPathBuf,
-    ) -> io::Result<Vec<WorkspaceBuildScripts>> {
-        assert_eq!(config.invocation_strategy, InvocationStrategy::Once);
-
-        let current_dir = match &config.invocation_location {
-            InvocationLocation::Root(root) => root,
-            InvocationLocation::Workspace => {
-                return Err(io::Error::new(
-                    io::ErrorKind::Other,
-                    "Cannot run build scripts from workspace with invocation strategy `once`",
-                ))
-            }
-        };
-        let cmd = Self::build_command(
-            config,
-            &Default::default(),
-            // This is not gonna be used anyways, so just construct a dummy here
-            &ManifestPath::try_from(workspace_root.clone()).unwrap(),
-            &Sysroot::empty(),
-        )?;
-        // NB: Cargo.toml could have been modified between `cargo metadata` and
-        // `cargo check`. We shouldn't assume that package ids we see here are
-        // exactly those from `config`.
-        let mut by_id = FxHashMap::default();
-        // some workspaces might depend on the same crates, so we need to duplicate the outputs
-        // to those collisions
-        let mut collisions = Vec::new();
-        let mut res: Vec<_> = workspaces
-            .iter()
-            .enumerate()
-            .map(|(idx, workspace)| {
-                let mut res = WorkspaceBuildScripts::default();
-                for package in workspace.packages() {
-                    res.outputs.insert(package, BuildScriptOutput::default());
-                    if by_id.contains_key(&workspace[package].id) {
-                        collisions.push((&workspace[package].id, idx, package));
-                    } else {
-                        by_id.insert(workspace[package].id.clone(), (package, idx));
-                    }
-                }
-                res
-            })
-            .collect();
-
-        let errors = Self::run_command(
-            cmd,
-            current_dir.as_path().as_ref(),
-            |package, cb| {
-                if let Some(&(package, workspace)) = by_id.get(package) {
-                    cb(&workspaces[workspace][package].name, &mut res[workspace].outputs[package]);
-                }
-            },
-            progress,
-        )?;
-        res.iter_mut().for_each(|it| it.error.clone_from(&errors));
-        collisions.into_iter().for_each(|(id, workspace, package)| {
-            if let Some(&(p, w)) = by_id.get(id) {
-                res[workspace].outputs[package] = res[w].outputs[p].clone();
-            }
-        });
-
-        if tracing::enabled!(tracing::Level::INFO) {
-            for (idx, workspace) in workspaces.iter().enumerate() {
-                for package in workspace.packages() {
-                    let package_build_data = &mut res[idx].outputs[package];
-                    if !package_build_data.is_unchanged() {
-                        tracing::info!(
-                            "{}: {package_build_data:?}",
-                            workspace[package].manifest.parent(),
-                        );
-                    }
-                }
-            }
-        }
-
-        Ok(res)
-    }
-
-    fn run_per_ws(
-        cmd: Command,
-        workspace: &CargoWorkspace,
-        current_dir: &path::Path,
-        progress: &dyn Fn(String),
-    ) -> io::Result<WorkspaceBuildScripts> {
-        let mut res = WorkspaceBuildScripts::default();
-        let outputs = &mut res.outputs;
-        // NB: Cargo.toml could have been modified between `cargo metadata` and
-        // `cargo check`. We shouldn't assume that package ids we see here are
-        // exactly those from `config`.
-        let mut by_id: FxHashMap<String, Package> = FxHashMap::default();
-        for package in workspace.packages() {
-            outputs.insert(package, BuildScriptOutput::default());
-            by_id.insert(workspace[package].id.clone(), package);
-        }
-
-        res.error = Self::run_command(
-            cmd,
-            current_dir,
-            |package, cb| {
-                if let Some(&package) = by_id.get(package) {
-                    cb(&workspace[package].name, &mut outputs[package]);
-                }
-            },
-            progress,
-        )?;
-
-        if tracing::enabled!(tracing::Level::INFO) {
-            for package in workspace.packages() {
-                let package_build_data = &outputs[package];
-                if !package_build_data.is_unchanged() {
-                    tracing::info!(
-                        "{}: {package_build_data:?}",
-                        workspace[package].manifest.parent(),
-                    );
-                }
-            }
-        }
-
-        Ok(res)
-    }
-
-    fn run_command(
-        mut cmd: Command,
-        current_dir: &path::Path,
-        // ideally this would be something like:
-        // with_output_for: impl FnMut(&str, dyn FnOnce(&mut BuildScriptOutput)),
-        // but owned trait objects aren't a thing
-        mut with_output_for: impl FnMut(&str, &mut dyn FnMut(&str, &mut BuildScriptOutput)),
-        progress: &dyn Fn(String),
-    ) -> io::Result<Option<String>> {
-        let errors = RefCell::new(String::new());
-        let push_err = |err: &str| {
-            let mut e = errors.borrow_mut();
-            e.push_str(err);
-            e.push('\n');
-        };
-
-        tracing::info!("Running build scripts in {}: {:?}", current_dir.display(), cmd);
-        cmd.current_dir(current_dir);
-        let output = stdx::process::spawn_with_streaming_output(
-            cmd,
-            &mut |line| {
-                // Copy-pasted from existing cargo_metadata. It seems like we
-                // should be using serde_stacker here?
-                let mut deserializer = serde_json::Deserializer::from_str(line);
-                deserializer.disable_recursion_limit();
-                let message = Message::deserialize(&mut deserializer)
-                    .unwrap_or_else(|_| Message::TextLine(line.to_owned()));
-
-                match message {
-                    Message::BuildScriptExecuted(mut message) => {
-                        with_output_for(&message.package_id.repr, &mut |name, data| {
-                            progress(format!("running build-script: {name}"));
-                            let cfgs = {
-                                let mut acc = Vec::new();
-                                for cfg in &message.cfgs {
-                                    match cfg.parse::<CfgFlag>() {
-                                        Ok(it) => acc.push(it),
-                                        Err(err) => {
-                                            push_err(&format!(
-                                                "invalid cfg from cargo-metadata: {err}"
-                                            ));
-                                            return;
-                                        }
-                                    };
-                                }
-                                acc
-                            };
-                            if !message.env.is_empty() {
-                                data.envs = mem::take(&mut message.env);
-                            }
-                            // cargo_metadata crate returns default (empty) path for
-                            // older cargos, which is not absolute, so work around that.
-                            let out_dir = mem::take(&mut message.out_dir);
-                            if !out_dir.as_str().is_empty() {
-                                let out_dir = AbsPathBuf::assert(out_dir);
-                                // inject_cargo_env(package, package_build_data);
-                                data.envs.push(("OUT_DIR".to_owned(), out_dir.as_str().to_owned()));
-                                data.out_dir = Some(out_dir);
-                                data.cfgs = cfgs;
-                            }
-                        });
-                    }
-                    Message::CompilerArtifact(message) => {
-                        with_output_for(&message.package_id.repr, &mut |name, data| {
-                            progress(format!("building proc-macros: {name}"));
-                            if message.target.kind.iter().any(|k| k == "proc-macro") {
-                                // Skip rmeta file
-                                if let Some(filename) =
-                                    message.filenames.iter().find(|name| is_dylib(name))
-                                {
-                                    let filename = AbsPath::assert(filename);
-                                    data.proc_macro_dylib_path = Some(filename.to_owned());
-                                }
-                            }
-                        });
-                    }
-                    Message::CompilerMessage(message) => {
-                        progress(message.target.name);
-
-                        if let Some(diag) = message.message.rendered.as_deref() {
-                            push_err(diag);
-                        }
-                    }
-                    Message::BuildFinished(_) => {}
-                    Message::TextLine(_) => {}
-                    _ => {}
-                }
-            },
-            &mut |line| {
-                push_err(line);
-            },
-        )?;
-
-        let errors = if !output.status.success() {
-            let errors = errors.into_inner();
-            Some(if errors.is_empty() { "cargo check failed".to_owned() } else { errors })
-        } else {
-            None
-        };
-        Ok(errors)
-    }
-
-    pub fn error(&self) -> Option<&str> {
-        self.error.as_deref()
-    }
-
-    pub(crate) fn get_output(&self, idx: Package) -> Option<&BuildScriptOutput> {
-        self.outputs.get(idx)
-    }
-
-    pub(crate) fn rustc_crates(
-        rustc: &CargoWorkspace,
-        current_dir: &AbsPath,
-        extra_env: &FxHashMap<String, String>,
-        sysroot: &Sysroot,
-    ) -> Self {
-        let mut bs = WorkspaceBuildScripts::default();
-        for p in rustc.packages() {
-            bs.outputs.insert(p, BuildScriptOutput::default());
-        }
-        let res = (|| {
-            let target_libdir = (|| {
-                let mut cargo_config = sysroot.tool(Tool::Cargo);
-                cargo_config.envs(extra_env);
-                cargo_config
-                    .current_dir(current_dir)
-                    .args(["rustc", "-Z", "unstable-options", "--print", "target-libdir"])
-                    .env("RUSTC_BOOTSTRAP", "1");
-                if let Ok(it) = utf8_stdout(cargo_config) {
-                    return Ok(it);
-                }
-                let mut cmd = sysroot.tool(Tool::Rustc);
-                cmd.envs(extra_env);
-                cmd.args(["--print", "target-libdir"]);
-                utf8_stdout(cmd)
-            })()?;
-
-            let target_libdir = AbsPathBuf::try_from(PathBuf::from(target_libdir))
-                .map_err(|_| anyhow::format_err!("target-libdir was not an absolute path"))?;
-            tracing::info!("Loading rustc proc-macro paths from {target_libdir}");
-
-            let proc_macro_dylibs: Vec<(String, AbsPathBuf)> = std::fs::read_dir(target_libdir)?
-                .filter_map(|entry| {
-                    let dir_entry = entry.ok()?;
-                    if dir_entry.file_type().ok()?.is_file() {
-                        let path = dir_entry.path();
-                        let extension = path.extension()?;
-                        if extension == std::env::consts::DLL_EXTENSION {
-                            let name = path.file_stem()?.to_str()?.split_once('-')?.0.to_owned();
-                            let path = AbsPathBuf::try_from(path).ok()?;
-                            return Some((name, path));
-                        }
-                    }
-                    None
-                })
-                .collect();
-            for p in rustc.packages() {
-                let package = &rustc[p];
-                if package
-                    .targets
-                    .iter()
-                    .any(|&it| matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true }))
-                {
-                    if let Some((_, path)) = proc_macro_dylibs
-                        .iter()
-                        .find(|(name, _)| *name.trim_start_matches("lib") == package.name)
-                    {
-                        bs.outputs[p].proc_macro_dylib_path = Some(path.clone());
-                    }
-                }
-            }
-
-            if tracing::enabled!(tracing::Level::INFO) {
-                for package in rustc.packages() {
-                    let package_build_data = &bs.outputs[package];
-                    if !package_build_data.is_unchanged() {
-                        tracing::info!(
-                            "{}: {package_build_data:?}",
-                            rustc[package].manifest.parent(),
-                        );
-                    }
-                }
-            }
-            Ok(())
-        })();
-        if let Err::<_, anyhow::Error>(e) = res {
-            bs.error = Some(e.to_string());
-        }
-        bs
-    }
 }
 
 // FIXME: Find a better way to know if it is a dylib.
diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs
index 632ba1c..38eeede 100644
--- a/crates/project-model/src/cargo_workspace.rs
+++ b/crates/project-model/src/cargo_workspace.rs
@@ -85,8 +85,6 @@
     pub target: Option<String>,
     /// Sysroot loading behavior
     pub sysroot: Option<RustLibSource>,
-    /// Whether to invoke `cargo metadata` on the sysroot crate.
-    pub sysroot_query_metadata: bool,
     pub sysroot_src: Option<AbsPathBuf>,
     /// rustc private crate source
     pub rustc_source: Option<RustLibSource>,
@@ -259,6 +257,7 @@
         current_dir: &AbsPath,
         config: &CargoConfig,
         sysroot: &Sysroot,
+        locked: bool,
         progress: &dyn Fn(String),
     ) -> anyhow::Result<cargo_metadata::Metadata> {
         let targets = find_list_of_build_targets(config, cargo_toml, sysroot);
@@ -312,6 +311,9 @@
             // opt into it themselves.
             other_options.push("-Zscript".to_owned());
         }
+        if locked {
+            other_options.push("--locked".to_owned());
+        }
         meta.other_options(other_options);
 
         // FIXME: Fetching metadata is a slow process, as it might require
diff --git a/crates/project-model/src/cfg.rs b/crates/project-model/src/cfg.rs
deleted file mode 100644
index e921e3d..0000000
--- a/crates/project-model/src/cfg.rs
+++ /dev/null
@@ -1,100 +0,0 @@
-//! Parsing of CfgFlags as command line arguments, as in
-//!
-//! rustc main.rs --cfg foo --cfg 'feature="bar"'
-use std::{fmt, str::FromStr};
-
-use cfg::{CfgDiff, CfgOptions};
-use intern::Symbol;
-use rustc_hash::FxHashMap;
-use serde::Serialize;
-
-#[derive(Clone, Eq, PartialEq, Debug, Serialize)]
-pub enum CfgFlag {
-    Atom(String),
-    KeyValue { key: String, value: String },
-}
-
-impl FromStr for CfgFlag {
-    type Err = String;
-    fn from_str(s: &str) -> Result<Self, Self::Err> {
-        let res = match s.split_once('=') {
-            Some((key, value)) => {
-                if !(value.starts_with('"') && value.ends_with('"')) {
-                    return Err(format!("Invalid cfg ({s:?}), value should be in quotes"));
-                }
-                let key = key.to_owned();
-                let value = value[1..value.len() - 1].to_string();
-                CfgFlag::KeyValue { key, value }
-            }
-            None => CfgFlag::Atom(s.into()),
-        };
-        Ok(res)
-    }
-}
-
-impl<'de> serde::Deserialize<'de> for CfgFlag {
-    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
-    where
-        D: serde::Deserializer<'de>,
-    {
-        String::deserialize(deserializer)?.parse().map_err(serde::de::Error::custom)
-    }
-}
-
-impl Extend<CfgFlag> for CfgOptions {
-    fn extend<T: IntoIterator<Item = CfgFlag>>(&mut self, iter: T) {
-        for cfg_flag in iter {
-            match cfg_flag {
-                CfgFlag::Atom(it) => self.insert_atom(Symbol::intern(&it)),
-                CfgFlag::KeyValue { key, value } => {
-                    self.insert_key_value(Symbol::intern(&key), Symbol::intern(&value))
-                }
-            }
-        }
-    }
-}
-
-impl FromIterator<CfgFlag> for CfgOptions {
-    fn from_iter<T: IntoIterator<Item = CfgFlag>>(iter: T) -> Self {
-        let mut this = CfgOptions::default();
-        this.extend(iter);
-        this
-    }
-}
-
-impl fmt::Display for CfgFlag {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        match self {
-            CfgFlag::Atom(atom) => f.write_str(atom),
-            CfgFlag::KeyValue { key, value } => {
-                f.write_str(key)?;
-                f.write_str("=")?;
-                f.write_str(value)
-            }
-        }
-    }
-}
-
-/// A set of cfg-overrides per crate.
-#[derive(Default, Debug, Clone, Eq, PartialEq)]
-pub struct CfgOverrides {
-    /// A global set of overrides matching all crates.
-    pub global: CfgDiff,
-    /// A set of overrides matching specific crates.
-    pub selective: FxHashMap<String, CfgDiff>,
-}
-
-impl CfgOverrides {
-    pub fn len(&self) -> usize {
-        self.global.len() + self.selective.values().map(|it| it.len()).sum::<usize>()
-    }
-
-    pub fn apply(&self, cfg_options: &mut CfgOptions, name: &str) {
-        if !self.global.is_empty() {
-            cfg_options.apply_diff(self.global.clone());
-        };
-        if let Some(diff) = self.selective.get(name) {
-            cfg_options.apply_diff(diff.clone());
-        };
-    }
-}
diff --git a/crates/project-model/src/env.rs b/crates/project-model/src/env.rs
index 049acc2..ac7246a 100644
--- a/crates/project-model/src/env.rs
+++ b/crates/project-model/src/env.rs
@@ -90,10 +90,13 @@
     stdout
         .lines()
         .filter_map(|l| l.strip_prefix("env."))
-        .filter_map(|l| {
-            l.split_once(" = ")
-                // cargo used to report it with this, keep it for a couple releases around
-                .or_else(|| l.split_once(".value = "))
+        .filter_map(|l| l.split_once(" = "))
+        .filter_map(|(k, v)| {
+            if k.contains('.') {
+                k.strip_suffix(".value").zip(Some(v))
+            } else {
+                Some((k, v))
+            }
         })
         .map(|(key, value)| (key.to_owned(), value.trim_matches('"').to_owned()))
         .collect()
diff --git a/crates/project-model/src/lib.rs b/crates/project-model/src/lib.rs
index 92bf6a0..4fa7050 100644
--- a/crates/project-model/src/lib.rs
+++ b/crates/project-model/src/lib.rs
@@ -15,9 +15,8 @@
 //!   procedural macros).
 //! * Lowering of concrete model to a [`base_db::CrateGraph`]
 
-mod build_scripts;
+mod build_dependencies;
 mod cargo_workspace;
-mod cfg;
 mod env;
 mod manifest_path;
 pub mod project_json;
@@ -37,16 +36,15 @@
 };
 
 use anyhow::{bail, format_err, Context};
-use paths::{AbsPath, AbsPathBuf};
+use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
 use rustc_hash::FxHashSet;
 
 pub use crate::{
-    build_scripts::WorkspaceBuildScripts,
+    build_dependencies::WorkspaceBuildScripts,
     cargo_workspace::{
         CargoConfig, CargoFeatures, CargoWorkspace, Package, PackageData, PackageDependency,
         RustLibSource, Target, TargetData, TargetKind,
     },
-    cfg::CfgOverrides,
     manifest_path::ManifestPath,
     project_json::{ProjectJson, ProjectJsonData},
     sysroot::Sysroot,
@@ -68,6 +66,9 @@
         if path.file_name().unwrap_or_default() == "rust-project.json" {
             return Ok(ProjectManifest::ProjectJson(path));
         }
+        if path.file_name().unwrap_or_default() == ".rust-project.json" {
+            return Ok(ProjectManifest::ProjectJson(path));
+        }
         if path.file_name().unwrap_or_default() == "Cargo.toml" {
             return Ok(ProjectManifest::CargoToml(path));
         }
@@ -94,6 +95,9 @@
         if let Some(project_json) = find_in_parent_dirs(path, "rust-project.json") {
             return Ok(vec![ProjectManifest::ProjectJson(project_json)]);
         }
+        if let Some(project_json) = find_in_parent_dirs(path, ".rust-project.json") {
+            return Ok(vec![ProjectManifest::ProjectJson(project_json)]);
+        }
         return find_cargo_toml(path)
             .map(|paths| paths.into_iter().map(ProjectManifest::CargoToml).collect());
 
@@ -132,8 +136,11 @@
                 .filter_map(Result::ok)
                 .map(|it| it.path().join("Cargo.toml"))
                 .filter(|it| it.exists())
+                .map(Utf8PathBuf::from_path_buf)
+                .filter_map(Result::ok)
                 .map(AbsPathBuf::try_from)
-                .filter_map(|it| it.ok()?.try_into().ok())
+                .filter_map(Result::ok)
+                .filter_map(|it| it.try_into().ok())
                 .collect()
         }
     }
@@ -192,3 +199,42 @@
     #[default]
     Workspace,
 }
+
+/// A set of cfg-overrides per crate.
+#[derive(Default, Debug, Clone, Eq, PartialEq)]
+pub struct CfgOverrides {
+    /// A global set of overrides matching all crates.
+    pub global: cfg::CfgDiff,
+    /// A set of overrides matching specific crates.
+    pub selective: rustc_hash::FxHashMap<String, cfg::CfgDiff>,
+}
+
+impl CfgOverrides {
+    pub fn len(&self) -> usize {
+        self.global.len() + self.selective.values().map(|it| it.len()).sum::<usize>()
+    }
+
+    pub fn apply(&self, cfg_options: &mut cfg::CfgOptions, name: &str) {
+        if !self.global.is_empty() {
+            cfg_options.apply_diff(self.global.clone());
+        };
+        if let Some(diff) = self.selective.get(name) {
+            cfg_options.apply_diff(diff.clone());
+        };
+    }
+}
+
+fn parse_cfg(s: &str) -> Result<cfg::CfgAtom, String> {
+    let res = match s.split_once('=') {
+        Some((key, value)) => {
+            if !(value.starts_with('"') && value.ends_with('"')) {
+                return Err(format!("Invalid cfg ({s:?}), value should be in quotes"));
+            }
+            let key = intern::Symbol::intern(key);
+            let value = intern::Symbol::intern(&value[1..value.len() - 1]);
+            cfg::CfgAtom::KeyValue { key, value }
+        }
+        None => cfg::CfgAtom::Flag(intern::Symbol::intern(s)),
+    };
+    Ok(res)
+}
diff --git a/crates/project-model/src/manifest_path.rs b/crates/project-model/src/manifest_path.rs
index 2331c0c..a8be5df 100644
--- a/crates/project-model/src/manifest_path.rs
+++ b/crates/project-model/src/manifest_path.rs
@@ -64,6 +64,18 @@
     }
 }
 
+impl AsRef<std::path::Path> for ManifestPath {
+    fn as_ref(&self) -> &std::path::Path {
+        self.file.as_ref()
+    }
+}
+
+impl AsRef<std::ffi::OsStr> for ManifestPath {
+    fn as_ref(&self) -> &std::ffi::OsStr {
+        self.file.as_ref()
+    }
+}
+
 impl Borrow<AbsPath> for ManifestPath {
     fn borrow(&self) -> &AbsPath {
         self.file.borrow()
diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs
index cf0a6ad..7dea0c3 100644
--- a/crates/project-model/src/project_json.rs
+++ b/crates/project-model/src/project_json.rs
@@ -50,12 +50,13 @@
 //! rust-project.json over time via configuration request!)
 
 use base_db::{CrateDisplayName, CrateName};
+use cfg::CfgAtom;
 use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
 use rustc_hash::FxHashMap;
 use serde::{de, Deserialize, Serialize};
 use span::Edition;
 
-use crate::{cfg::CfgFlag, ManifestPath, TargetKind};
+use crate::{ManifestPath, TargetKind};
 
 /// Roots and crates that compose this Rust project.
 #[derive(Clone, Debug, Eq, PartialEq)]
@@ -73,106 +74,6 @@
     runnables: Vec<Runnable>,
 }
 
-/// A crate points to the root module of a crate and lists the dependencies of the crate. This is
-/// useful in creating the crate graph.
-#[derive(Clone, Debug, Eq, PartialEq)]
-pub struct Crate {
-    pub(crate) display_name: Option<CrateDisplayName>,
-    pub root_module: AbsPathBuf,
-    pub(crate) edition: Edition,
-    pub(crate) version: Option<String>,
-    pub(crate) deps: Vec<Dep>,
-    pub(crate) cfg: Vec<CfgFlag>,
-    pub(crate) target: Option<String>,
-    pub(crate) env: FxHashMap<String, String>,
-    pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>,
-    pub(crate) is_workspace_member: bool,
-    pub(crate) include: Vec<AbsPathBuf>,
-    pub(crate) exclude: Vec<AbsPathBuf>,
-    pub(crate) is_proc_macro: bool,
-    pub(crate) repository: Option<String>,
-    pub build: Option<Build>,
-}
-
-/// Additional, build-specific data about a crate.
-#[derive(Clone, Debug, Eq, PartialEq)]
-pub struct Build {
-    /// The name associated with this crate.
-    ///
-    /// This is determined by the build system that produced
-    /// the `rust-project.json` in question. For instance, if buck were used,
-    /// the label might be something like `//ide/rust/rust-analyzer:rust-analyzer`.
-    ///
-    /// Do not attempt to parse the contents of this string; it is a build system-specific
-    /// identifier similar to [`Crate::display_name`].
-    pub label: String,
-    /// Path corresponding to the build system-specific file defining the crate.
-    ///
-    /// It is roughly analogous to [`ManifestPath`], but it should *not* be used with
-    /// [`crate::ProjectManifest::from_manifest_file`], as the build file may not be
-    /// be in the `rust-project.json`.
-    pub build_file: Utf8PathBuf,
-    /// The kind of target.
-    ///
-    /// Examples (non-exhaustively) include [`TargetKind::Bin`], [`TargetKind::Lib`],
-    /// and [`TargetKind::Test`]. This information is used to determine what sort
-    /// of runnable codelens to provide, if any.
-    pub target_kind: TargetKind,
-}
-
-/// A template-like structure for describing runnables.
-///
-/// These are used for running and debugging binaries and tests without encoding
-/// build system-specific knowledge into rust-analyzer.
-///
-/// # Example
-///
-/// Below is an example of a test runnable. `{label}` and `{test_id}`
-/// are explained in [`Runnable::args`]'s documentation.
-///
-/// ```json
-/// {
-///     "program": "buck",
-///     "args": [
-///         "test",
-///          "{label}",
-///          "--",
-///          "{test_id}",
-///          "--print-passing-details"
-///     ],
-///     "cwd": "/home/user/repo-root/",
-///     "kind": "testOne"
-/// }
-/// ```
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct Runnable {
-    /// The program invoked by the runnable.
-    ///
-    /// For example, this might be `cargo`, `buck`, or `bazel`.
-    pub program: String,
-    /// The arguments passed to [`Runnable::program`].
-    ///
-    /// The args can contain two template strings: `{label}` and `{test_id}`.
-    /// rust-analyzer will find and replace `{label}` with [`Build::label`] and
-    /// `{test_id}` with the test name.
-    pub args: Vec<String>,
-    /// The current working directory of the runnable.
-    pub cwd: Utf8PathBuf,
-    pub kind: RunnableKind,
-}
-
-/// The kind of runnable.
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub enum RunnableKind {
-    Check,
-
-    /// Can run a binary.
-    Run,
-
-    /// Run a single test.
-    TestOne,
-}
-
 impl ProjectJson {
     /// Create a new ProjectJson instance.
     ///
@@ -301,6 +202,106 @@
     }
 }
 
+/// A crate points to the root module of a crate and lists the dependencies of the crate. This is
+/// useful in creating the crate graph.
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct Crate {
+    pub(crate) display_name: Option<CrateDisplayName>,
+    pub root_module: AbsPathBuf,
+    pub(crate) edition: Edition,
+    pub(crate) version: Option<String>,
+    pub(crate) deps: Vec<Dep>,
+    pub(crate) cfg: Vec<CfgAtom>,
+    pub(crate) target: Option<String>,
+    pub(crate) env: FxHashMap<String, String>,
+    pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>,
+    pub(crate) is_workspace_member: bool,
+    pub(crate) include: Vec<AbsPathBuf>,
+    pub(crate) exclude: Vec<AbsPathBuf>,
+    pub(crate) is_proc_macro: bool,
+    pub(crate) repository: Option<String>,
+    pub build: Option<Build>,
+}
+
+/// Additional, build-specific data about a crate.
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct Build {
+    /// The name associated with this crate.
+    ///
+    /// This is determined by the build system that produced
+    /// the `rust-project.json` in question. For instance, if buck were used,
+    /// the label might be something like `//ide/rust/rust-analyzer:rust-analyzer`.
+    ///
+    /// Do not attempt to parse the contents of this string; it is a build system-specific
+    /// identifier similar to [`Crate::display_name`].
+    pub label: String,
+    /// Path corresponding to the build system-specific file defining the crate.
+    ///
+    /// It is roughly analogous to [`ManifestPath`], but it should *not* be used with
+    /// [`crate::ProjectManifest::from_manifest_file`], as the build file may not be
+    /// be in the `rust-project.json`.
+    pub build_file: Utf8PathBuf,
+    /// The kind of target.
+    ///
+    /// Examples (non-exhaustively) include [`TargetKind::Bin`], [`TargetKind::Lib`],
+    /// and [`TargetKind::Test`]. This information is used to determine what sort
+    /// of runnable codelens to provide, if any.
+    pub target_kind: TargetKind,
+}
+
+/// A template-like structure for describing runnables.
+///
+/// These are used for running and debugging binaries and tests without encoding
+/// build system-specific knowledge into rust-analyzer.
+///
+/// # Example
+///
+/// Below is an example of a test runnable. `{label}` and `{test_id}`
+/// are explained in [`Runnable::args`]'s documentation.
+///
+/// ```json
+/// {
+///     "program": "buck",
+///     "args": [
+///         "test",
+///          "{label}",
+///          "--",
+///          "{test_id}",
+///          "--print-passing-details"
+///     ],
+///     "cwd": "/home/user/repo-root/",
+///     "kind": "testOne"
+/// }
+/// ```
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Runnable {
+    /// The program invoked by the runnable.
+    ///
+    /// For example, this might be `cargo`, `buck`, or `bazel`.
+    pub program: String,
+    /// The arguments passed to [`Runnable::program`].
+    ///
+    /// The args can contain two template strings: `{label}` and `{test_id}`.
+    /// rust-analyzer will find and replace `{label}` with [`Build::label`] and
+    /// `{test_id}` with the test name.
+    pub args: Vec<String>,
+    /// The current working directory of the runnable.
+    pub cwd: Utf8PathBuf,
+    pub kind: RunnableKind,
+}
+
+/// The kind of runnable.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum RunnableKind {
+    Check,
+
+    /// Can run a binary.
+    Run,
+
+    /// Run a single test.
+    TestOne,
+}
+
 #[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
 pub struct ProjectJsonData {
     sysroot: Option<Utf8PathBuf>,
@@ -319,7 +320,8 @@
     version: Option<semver::Version>,
     deps: Vec<Dep>,
     #[serde(default)]
-    cfg: Vec<CfgFlag>,
+    #[serde(with = "cfg_")]
+    cfg: Vec<CfgAtom>,
     target: Option<String>,
     #[serde(default)]
     env: FxHashMap<String, String>,
@@ -334,6 +336,33 @@
     build: Option<BuildData>,
 }
 
+mod cfg_ {
+    use cfg::CfgAtom;
+    use serde::{Deserialize, Serialize};
+
+    pub(super) fn deserialize<'de, D>(deserializer: D) -> Result<Vec<CfgAtom>, D::Error>
+    where
+        D: serde::Deserializer<'de>,
+    {
+        let cfg: Vec<String> = Vec::deserialize(deserializer)?;
+        cfg.into_iter().map(|it| crate::parse_cfg(&it).map_err(serde::de::Error::custom)).collect()
+    }
+    pub(super) fn serialize<S>(cfg: &[CfgAtom], serializer: S) -> Result<S::Ok, S::Error>
+    where
+        S: serde::Serializer,
+    {
+        cfg.iter()
+            .map(|cfg| match cfg {
+                CfgAtom::Flag(flag) => flag.as_str().to_owned(),
+                CfgAtom::KeyValue { key, value } => {
+                    format!("{}=\"{}\"", key.as_str(), value.as_str())
+                }
+            })
+            .collect::<Vec<String>>()
+            .serialize(serializer)
+    }
+}
+
 #[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
 #[serde(rename = "edition")]
 enum EditionData {
@@ -378,6 +407,29 @@
     Lib,
     Test,
 }
+/// Identifies a crate by position in the crates array.
+///
+/// This will differ from `CrateId` when multiple `ProjectJson`
+/// workspaces are loaded.
+#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq, Hash)]
+#[serde(transparent)]
+pub struct CrateArrayIdx(pub usize);
+
+#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
+pub(crate) struct Dep {
+    /// Identifies a crate by position in the crates array.
+    #[serde(rename = "crate")]
+    pub(crate) krate: CrateArrayIdx,
+    #[serde(serialize_with = "serialize_crate_name")]
+    #[serde(deserialize_with = "deserialize_crate_name")]
+    pub(crate) name: CrateName,
+}
+
+#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
+struct CrateSource {
+    include_dirs: Vec<Utf8PathBuf>,
+    exclude_dirs: Vec<Utf8PathBuf>,
+}
 
 impl From<TargetKindData> for TargetKind {
     fn from(data: TargetKindData) -> Self {
@@ -416,30 +468,6 @@
     }
 }
 
-/// Identifies a crate by position in the crates array.
-///
-/// This will differ from `CrateId` when multiple `ProjectJson`
-/// workspaces are loaded.
-#[derive(Serialize, Deserialize, Debug, Clone, Copy, Eq, PartialEq, Hash)]
-#[serde(transparent)]
-pub struct CrateArrayIdx(pub usize);
-
-#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
-pub(crate) struct Dep {
-    /// Identifies a crate by position in the crates array.
-    #[serde(rename = "crate")]
-    pub(crate) krate: CrateArrayIdx,
-    #[serde(serialize_with = "serialize_crate_name")]
-    #[serde(deserialize_with = "deserialize_crate_name")]
-    pub(crate) name: CrateName,
-}
-
-#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
-struct CrateSource {
-    include_dirs: Vec<Utf8PathBuf>,
-    exclude_dirs: Vec<Utf8PathBuf>,
-}
-
 fn deserialize_crate_name<'de, D>(de: D) -> std::result::Result<CrateName, D::Error>
 where
     D: de::Deserializer<'de>,
diff --git a/crates/project-model/src/rustc_cfg.rs b/crates/project-model/src/rustc_cfg.rs
index 599897f..aa73ff8 100644
--- a/crates/project-model/src/rustc_cfg.rs
+++ b/crates/project-model/src/rustc_cfg.rs
@@ -1,10 +1,12 @@
 //! Runs `rustc --print cfg` to get built-in cfg flags.
 
 use anyhow::Context;
+use cfg::CfgAtom;
+use intern::Symbol;
 use rustc_hash::FxHashMap;
 use toolchain::Tool;
 
-use crate::{cfg::CfgFlag, utf8_stdout, ManifestPath, Sysroot};
+use crate::{utf8_stdout, ManifestPath, Sysroot};
 
 /// Determines how `rustc --print cfg` is discovered and invoked.
 pub(crate) enum RustcCfgConfig<'a> {
@@ -20,15 +22,15 @@
     target: Option<&str>,
     extra_env: &FxHashMap<String, String>,
     config: RustcCfgConfig<'_>,
-) -> Vec<CfgFlag> {
+) -> Vec<CfgAtom> {
     let _p = tracing::info_span!("rustc_cfg::get").entered();
-    let mut res = Vec::with_capacity(6 * 2 + 1);
+    let mut res: Vec<_> = Vec::with_capacity(6 * 2 + 1);
 
     // Some nightly-only cfgs, which are required for stdlib
-    res.push(CfgFlag::Atom("target_thread_local".into()));
+    res.push(CfgAtom::Flag(Symbol::intern("target_thread_local")));
     for ty in ["8", "16", "32", "64", "cas", "ptr"] {
         for key in ["target_has_atomic", "target_has_atomic_load_store"] {
-            res.push(CfgFlag::KeyValue { key: key.to_owned(), value: ty.into() });
+            res.push(CfgAtom::KeyValue { key: Symbol::intern(key), value: Symbol::intern(ty) });
         }
     }
 
@@ -42,8 +44,7 @@
         }
     };
 
-    let rustc_cfgs =
-        rustc_cfgs.lines().map(|it| it.parse::<CfgFlag>()).collect::<Result<Vec<_>, _>>();
+    let rustc_cfgs = rustc_cfgs.lines().map(crate::parse_cfg).collect::<Result<Vec<_>, _>>();
 
     match rustc_cfgs {
         Ok(rustc_cfgs) => {
diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs
index 1eeec4c..419fac3 100644
--- a/crates/project-model/src/sysroot.rs
+++ b/crates/project-model/src/sysroot.rs
@@ -123,32 +123,27 @@
 // FIXME: Expose a builder api as loading the sysroot got way too modular and complicated.
 impl Sysroot {
     /// Attempts to discover the toolchain's sysroot from the given `dir`.
-    pub fn discover(
-        dir: &AbsPath,
-        extra_env: &FxHashMap<String, String>,
-        metadata: bool,
-    ) -> Sysroot {
+    pub fn discover(dir: &AbsPath, extra_env: &FxHashMap<String, String>) -> Sysroot {
         let sysroot_dir = discover_sysroot_dir(dir, extra_env);
         let sysroot_src_dir = sysroot_dir.as_ref().ok().map(|sysroot_dir| {
             discover_sysroot_src_dir_or_add_component(sysroot_dir, dir, extra_env)
         });
-        Sysroot::load_core_check(Some(sysroot_dir), sysroot_src_dir, metadata)
+        Sysroot::load_core_check(Some(sysroot_dir), sysroot_src_dir)
     }
 
     pub fn discover_with_src_override(
         current_dir: &AbsPath,
         extra_env: &FxHashMap<String, String>,
         sysroot_src_dir: AbsPathBuf,
-        metadata: bool,
     ) -> Sysroot {
         let sysroot_dir = discover_sysroot_dir(current_dir, extra_env);
-        Sysroot::load_core_check(Some(sysroot_dir), Some(Ok(sysroot_src_dir)), metadata)
+        Sysroot::load_core_check(Some(sysroot_dir), Some(Ok(sysroot_src_dir)))
     }
 
-    pub fn discover_sysroot_src_dir(sysroot_dir: AbsPathBuf, metadata: bool) -> Sysroot {
+    pub fn discover_sysroot_src_dir(sysroot_dir: AbsPathBuf) -> Sysroot {
         let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir)
             .ok_or_else(|| format_err!("can't find standard library sources in {sysroot_dir}"));
-        Sysroot::load_core_check(Some(Ok(sysroot_dir)), Some(sysroot_src_dir), metadata)
+        Sysroot::load_core_check(Some(Ok(sysroot_dir)), Some(sysroot_src_dir))
     }
 
     pub fn discover_rustc_src(&self) -> Option<ManifestPath> {
@@ -191,20 +186,15 @@
             })
     }
 
-    pub fn load(
-        sysroot_dir: Option<AbsPathBuf>,
-        sysroot_src_dir: Option<AbsPathBuf>,
-        metadata: bool,
-    ) -> Sysroot {
-        Self::load_core_check(sysroot_dir.map(Ok), sysroot_src_dir.map(Ok), metadata)
+    pub fn load(sysroot_dir: Option<AbsPathBuf>, sysroot_src_dir: Option<AbsPathBuf>) -> Sysroot {
+        Self::load_core_check(sysroot_dir.map(Ok), sysroot_src_dir.map(Ok))
     }
 
     fn load_core_check(
         sysroot_dir: Option<Result<AbsPathBuf, anyhow::Error>>,
         sysroot_src_dir: Option<Result<AbsPathBuf, anyhow::Error>>,
-        metadata: bool,
     ) -> Sysroot {
-        let mut sysroot = Self::load_(sysroot_dir, sysroot_src_dir, metadata);
+        let mut sysroot = Self::load_(sysroot_dir, sysroot_src_dir);
         if sysroot.error.is_none() {
             if let Some(src_root) = &sysroot.src_root {
                 let has_core = match &sysroot.mode {
@@ -230,7 +220,6 @@
     fn load_(
         sysroot_dir: Option<Result<AbsPathBuf, anyhow::Error>>,
         sysroot_src_dir: Option<Result<AbsPathBuf, anyhow::Error>>,
-        metadata: bool,
     ) -> Sysroot {
         let sysroot_dir = match sysroot_dir {
             Some(Ok(sysroot_dir)) => Some(sysroot_dir),
@@ -263,119 +252,16 @@
                 }
             }
         };
-        if metadata {
-            let sysroot: Option<_> = (|| {
-                let sysroot_cargo_toml = ManifestPath::try_from(
-                    AbsPathBuf::try_from(&*format!("{sysroot_src_dir}/sysroot/Cargo.toml")).ok()?,
-                )
-                .ok()?;
-                let current_dir =
-                    AbsPathBuf::try_from(&*format!("{sysroot_src_dir}/sysroot")).ok()?;
-
-                let mut cargo_config = CargoConfig::default();
-                // the sysroot uses `public-dependency`, so we make cargo think it's a nightly
-                cargo_config.extra_env.insert(
-                    "__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS".to_owned(),
-                    "nightly".to_owned(),
-                );
-
-                let res = CargoWorkspace::fetch_metadata(
-                    &sysroot_cargo_toml,
-                    &current_dir,
-                    &cargo_config,
-                    &Sysroot::empty(),
-                    &|_| (),
-                )
-                .map_err(|e| {
-                    tracing::error!(
-                        "failed to load sysroot `{sysroot_src_dir}/sysroot/Cargo.toml`: {}",
-                        e
-                    );
-                    e
-                });
-                if let Err(e) =
-                    std::fs::remove_file(format!("{sysroot_src_dir}/sysroot/Cargo.lock"))
-                {
-                    tracing::error!(
-                        "failed to remove sysroot `{sysroot_src_dir}/sysroot/Cargo.lock`: {}",
-                        e
-                    )
-                }
-                let mut res = res.ok()?;
-
-                // Patch out `rustc-std-workspace-*` crates to point to the real crates.
-                // This is done prior to `CrateGraph` construction to avoid having duplicate `std` targets.
-
-                let mut fake_core = None;
-                let mut fake_alloc = None;
-                let mut fake_std = None;
-                let mut real_core = None;
-                let mut real_alloc = None;
-                let mut real_std = None;
-                res.packages.iter().enumerate().for_each(|(idx, package)| {
-                    match package.name.strip_prefix("rustc-std-workspace-") {
-                        Some("core") => fake_core = Some((idx, package.id.clone())),
-                        Some("alloc") => fake_alloc = Some((idx, package.id.clone())),
-                        Some("std") => fake_std = Some((idx, package.id.clone())),
-                        Some(_) => {
-                            tracing::warn!("unknown rustc-std-workspace-* crate: {}", package.name)
-                        }
-                        None => match &*package.name {
-                            "core" => real_core = Some(package.id.clone()),
-                            "alloc" => real_alloc = Some(package.id.clone()),
-                            "std" => real_std = Some(package.id.clone()),
-                            _ => (),
-                        },
-                    }
-                });
-
-                let patches =
-                    [fake_core.zip(real_core), fake_alloc.zip(real_alloc), fake_std.zip(real_std)]
-                        .into_iter()
-                        .flatten();
-
-                let resolve = res.resolve.as_mut().expect("metadata executed with deps");
-                let mut remove_nodes = vec![];
-                for (idx, node) in resolve.nodes.iter_mut().enumerate() {
-                    // Replace them in the dependency list
-                    node.deps.iter_mut().for_each(|dep| {
-                        if let Some((_, real)) =
-                            patches.clone().find(|((_, fake_id), _)| *fake_id == dep.pkg)
-                        {
-                            dep.pkg = real;
-                        }
-                    });
-                    if patches.clone().any(|((_, fake), _)| fake == node.id) {
-                        remove_nodes.push(idx);
-                    }
-                }
-                // Remove the fake ones from the resolve data
-                remove_nodes.into_iter().rev().for_each(|r| {
-                    resolve.nodes.remove(r);
-                });
-                // Remove the fake ones from the packages
-                patches.map(|((r, _), _)| r).sorted().rev().for_each(|r| {
-                    res.packages.remove(r);
-                });
-
-                res.workspace_members = res
-                    .packages
-                    .iter()
-                    .filter(|&package| RELEVANT_SYSROOT_CRATES.contains(&&*package.name))
-                    .map(|package| package.id.clone())
-                    .collect();
-                let cargo_workspace = CargoWorkspace::new(res, sysroot_cargo_toml);
-                Some(Sysroot {
-                    root: sysroot_dir.clone(),
-                    src_root: Some(sysroot_src_dir.clone()),
-                    mode: SysrootMode::Workspace(cargo_workspace),
-                    error: None,
-                })
-            })();
-            if let Some(sysroot) = sysroot {
+        let library_manifest = ManifestPath::try_from(sysroot_src_dir.join("Cargo.toml")).unwrap();
+        if fs::metadata(&library_manifest).is_ok() {
+            if let Some(sysroot) =
+                Self::load_library_via_cargo(library_manifest, &sysroot_dir, &sysroot_src_dir)
+            {
                 return sysroot;
             }
         }
+        tracing::debug!("Stitching sysroot library: {sysroot_src_dir}");
+
         let mut stitched = Stitched { crates: Arena::default() };
 
         for path in SYSROOT_CRATES.trim().lines() {
@@ -384,7 +270,7 @@
                 .into_iter()
                 .map(|it| sysroot_src_dir.join(it))
                 .filter_map(|it| ManifestPath::try_from(it).ok())
-                .find(|it| fs::metadata(it.as_ref()).is_ok());
+                .find(|it| fs::metadata(it).is_ok());
 
             if let Some(root) = root {
                 stitched.crates.alloc(SysrootCrateData {
@@ -425,6 +311,92 @@
             error: None,
         }
     }
+
+    fn load_library_via_cargo(
+        library_manifest: ManifestPath,
+        sysroot_dir: &Option<AbsPathBuf>,
+        sysroot_src_dir: &AbsPathBuf,
+    ) -> Option<Sysroot> {
+        tracing::debug!("Loading library metadata: {library_manifest}");
+        let mut cargo_config = CargoConfig::default();
+        // the sysroot uses `public-dependency`, so we make cargo think it's a nightly
+        cargo_config.extra_env.insert(
+            "__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS".to_owned(),
+            "nightly".to_owned(),
+        );
+
+        let mut res = match CargoWorkspace::fetch_metadata(
+            &library_manifest,
+            sysroot_src_dir,
+            &cargo_config,
+            &Sysroot::empty(),
+            // Make sure we never attempt to write to the sysroot
+            true,
+            &|_| (),
+        ) {
+            Ok(it) => it,
+            Err(e) => {
+                tracing::error!("`cargo metadata` failed on `{library_manifest}` : {e}");
+                return None;
+            }
+        };
+
+        // Patch out `rustc-std-workspace-*` crates to point to the real crates.
+        // This is done prior to `CrateGraph` construction to prevent de-duplication logic from failing.
+        let patches = {
+            let mut fake_core = None;
+            let mut fake_alloc = None;
+            let mut fake_std = None;
+            let mut real_core = None;
+            let mut real_alloc = None;
+            let mut real_std = None;
+            res.packages.iter().enumerate().for_each(|(idx, package)| {
+                match package.name.strip_prefix("rustc-std-workspace-") {
+                    Some("core") => fake_core = Some((idx, package.id.clone())),
+                    Some("alloc") => fake_alloc = Some((idx, package.id.clone())),
+                    Some("std") => fake_std = Some((idx, package.id.clone())),
+                    Some(_) => {
+                        tracing::warn!("unknown rustc-std-workspace-* crate: {}", package.name)
+                    }
+                    None => match &*package.name {
+                        "core" => real_core = Some(package.id.clone()),
+                        "alloc" => real_alloc = Some(package.id.clone()),
+                        "std" => real_std = Some(package.id.clone()),
+                        _ => (),
+                    },
+                }
+            });
+
+            [fake_core.zip(real_core), fake_alloc.zip(real_alloc), fake_std.zip(real_std)]
+                .into_iter()
+                .flatten()
+        };
+
+        let resolve = res.resolve.as_mut().expect("metadata executed with deps");
+        resolve.nodes.retain_mut(|node| {
+            // Replace `rustc-std-workspace` crate with the actual one in the dependency list
+            node.deps.iter_mut().for_each(|dep| {
+                let real_pkg = patches.clone().find(|((_, fake_id), _)| *fake_id == dep.pkg);
+                if let Some((_, real)) = real_pkg {
+                    dep.pkg = real;
+                }
+            });
+            // Remove this node if it's a fake one
+            !patches.clone().any(|((_, fake), _)| fake == node.id)
+        });
+        // Remove the fake ones from the package list
+        patches.map(|((idx, _), _)| idx).sorted().rev().for_each(|idx| {
+            res.packages.remove(idx);
+        });
+
+        let cargo_workspace = CargoWorkspace::new(res, library_manifest);
+        Some(Sysroot {
+            root: sysroot_dir.clone(),
+            src_root: Some(sysroot_src_dir.clone()),
+            mode: SysrootMode::Workspace(cargo_workspace),
+            error: None,
+        })
+    }
 }
 
 fn discover_sysroot_dir(
@@ -471,13 +443,13 @@
             get_rust_src(sysroot_path)
         })
         .ok_or_else(|| {
-            format_err!(
-                "\
+            let error = "\
 can't load standard library from sysroot
 {sysroot_path}
 (discovered via `rustc --print sysroot`)
-try installing the Rust source the same way you installed rustc",
-            )
+try installing the Rust source the same way you installed rustc";
+            tracing::error!(error);
+            format_err!(error)
         })
 }
 
@@ -485,7 +457,7 @@
     let rustc_src = sysroot_path.join("lib/rustlib/rustc-src/rust/compiler/rustc/Cargo.toml");
     let rustc_src = ManifestPath::try_from(rustc_src).ok()?;
     tracing::debug!("checking for rustc source code: {rustc_src}");
-    if fs::metadata(rustc_src.as_ref()).is_ok() {
+    if fs::metadata(&rustc_src).is_ok() {
         Some(rustc_src)
     } else {
         None
@@ -531,5 +503,3 @@
 const PROC_MACRO_DEPS: &str = "
 std
 core";
-
-const RELEVANT_SYSROOT_CRATES: &[&str] = &["core", "alloc", "std", "test", "proc_macro"];
diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs
index 8f5457b..e3bc81e 100644
--- a/crates/project-model/src/tests.rs
+++ b/crates/project-model/src/tests.rs
@@ -12,8 +12,8 @@
 use triomphe::Arc;
 
 use crate::{
-    workspace::ProjectWorkspaceKind, CargoWorkspace, CfgOverrides, ManifestPath, ProjectJson,
-    ProjectJsonData, ProjectWorkspace, Sysroot, WorkspaceBuildScripts,
+    sysroot::SysrootMode, workspace::ProjectWorkspaceKind, CargoWorkspace, CfgOverrides,
+    ManifestPath, ProjectJson, ProjectJsonData, ProjectWorkspace, Sysroot, WorkspaceBuildScripts,
 };
 
 fn load_cargo(file: &str) -> (CrateGraph, ProcMacroPaths) {
@@ -146,7 +146,7 @@
     // fake sysroot, so we give them both the same path:
     let sysroot_dir = AbsPathBuf::assert(sysroot_path);
     let sysroot_src_dir = sysroot_dir.clone();
-    Sysroot::load(Some(sysroot_dir), Some(sysroot_src_dir), false)
+    Sysroot::load(Some(sysroot_dir), Some(sysroot_src_dir))
 }
 
 fn rooted_project_json(data: ProjectJsonData) -> ProjectJson {
@@ -274,10 +274,9 @@
 }
 
 #[test]
+// FIXME Remove the ignore
+#[ignore = "requires nightly until the sysroot ships a cargo workspace for library on stable"]
 fn smoke_test_real_sysroot_cargo() {
-    if std::env::var("SYSROOT_CARGO_METADATA").is_err() {
-        return;
-    }
     let file_map = &mut FxHashMap::<AbsPathBuf, FileId>::default();
     let meta: Metadata = get_test_json_file("hello-world-metadata.json");
     let manifest_path =
@@ -286,8 +285,8 @@
     let sysroot = Sysroot::discover(
         AbsPath::assert(Utf8Path::new(env!("CARGO_MANIFEST_DIR"))),
         &Default::default(),
-        true,
     );
+    assert!(matches!(sysroot.mode(), SysrootMode::Workspace(_)));
 
     let project_workspace = ProjectWorkspace {
         kind: ProjectWorkspaceKind::Cargo {
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index 31d1c77..5620dfa 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -20,16 +20,15 @@
 use triomphe::Arc;
 
 use crate::{
-    build_scripts::BuildScriptOutput,
+    build_dependencies::BuildScriptOutput,
     cargo_workspace::{DepKind, PackageData, RustLibSource},
-    cfg::{CfgFlag, CfgOverrides},
     env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env},
     project_json::{Crate, CrateArrayIdx},
     rustc_cfg::{self, RustcCfgConfig},
     sysroot::{SysrootCrate, SysrootMode},
     target_data_layout::{self, RustcDataLayoutConfig},
-    utf8_stdout, CargoConfig, CargoWorkspace, InvocationStrategy, ManifestPath, Package,
-    ProjectJson, ProjectManifest, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts,
+    utf8_stdout, CargoConfig, CargoWorkspace, CfgOverrides, InvocationStrategy, ManifestPath,
+    Package, ProjectJson, ProjectManifest, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts,
 };
 use tracing::{debug, error, info};
 
@@ -55,7 +54,7 @@
     /// `rustc --print cfg`.
     // FIXME: make this a per-crate map, as, eg, build.rs might have a
     // different target.
-    pub rustc_cfg: Vec<CfgFlag>,
+    pub rustc_cfg: Vec<CfgAtom>,
     /// The toolchain version used by this workspace.
     pub toolchain: Option<Version>,
     /// The target data layout queried for workspace.
@@ -194,7 +193,7 @@
     ) -> anyhow::Result<ProjectWorkspace> {
         let res = match manifest {
             ProjectManifest::ProjectJson(project_json) => {
-                let file = fs::read_to_string(project_json.as_ref())
+                let file = fs::read_to_string(project_json)
                     .with_context(|| format!("Failed to read json file {project_json}"))?;
                 let data = serde_json::from_str(&file)
                     .with_context(|| format!("Failed to deserialize json file {project_json}"))?;
@@ -213,28 +212,22 @@
             }
             ProjectManifest::CargoToml(cargo_toml) => {
                 let sysroot = match (&config.sysroot, &config.sysroot_src) {
-                    (Some(RustLibSource::Discover), None) => Sysroot::discover(
-                        cargo_toml.parent(),
-                        &config.extra_env,
-                        config.sysroot_query_metadata,
-                    ),
+                    (Some(RustLibSource::Discover), None) => {
+                        Sysroot::discover(cargo_toml.parent(), &config.extra_env)
+                    }
                     (Some(RustLibSource::Discover), Some(sysroot_src)) => {
                         Sysroot::discover_with_src_override(
                             cargo_toml.parent(),
                             &config.extra_env,
                             sysroot_src.clone(),
-                            config.sysroot_query_metadata,
                         )
                     }
-                    (Some(RustLibSource::Path(path)), None) => Sysroot::discover_sysroot_src_dir(
-                        path.clone(),
-                        config.sysroot_query_metadata,
-                    ),
-                    (Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => Sysroot::load(
-                        Some(sysroot.clone()),
-                        Some(sysroot_src.clone()),
-                        config.sysroot_query_metadata,
-                    ),
+                    (Some(RustLibSource::Path(path)), None) => {
+                        Sysroot::discover_sysroot_src_dir(path.clone())
+                    }
+                    (Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => {
+                        Sysroot::load(Some(sysroot.clone()), Some(sysroot_src.clone()))
+                    }
                     (None, _) => Sysroot::empty(),
                 };
                 tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.src_root(), root = ?sysroot.root(), "Using sysroot");
@@ -260,6 +253,7 @@
                             ..config.clone()
                         },
                         &sysroot,
+                        false,
                         progress,
                     ) {
                         Ok(meta) => {
@@ -312,7 +306,8 @@
                     cargo_toml.parent(),
                     config,
                     &sysroot,
-                    progress,
+                        false,
+                        progress,
                 )
                 .with_context(|| {
                     format!(
@@ -350,8 +345,7 @@
         extra_env: &FxHashMap<String, String>,
         cfg_overrides: &CfgOverrides,
     ) -> ProjectWorkspace {
-        let sysroot =
-            Sysroot::load(project_json.sysroot.clone(), project_json.sysroot_src.clone(), false);
+        let sysroot = Sysroot::load(project_json.sysroot.clone(), project_json.sysroot_src.clone());
         let cfg_config = RustcCfgConfig::Rustc(&sysroot);
         let data_layout_config = RustcDataLayoutConfig::Rustc(&sysroot);
         let toolchain = match get_toolchain_version(
@@ -386,12 +380,8 @@
     ) -> anyhow::Result<ProjectWorkspace> {
         let dir = detached_file.parent();
         let sysroot = match &config.sysroot {
-            Some(RustLibSource::Path(path)) => {
-                Sysroot::discover_sysroot_src_dir(path.clone(), config.sysroot_query_metadata)
-            }
-            Some(RustLibSource::Discover) => {
-                Sysroot::discover(dir, &config.extra_env, config.sysroot_query_metadata)
-            }
+            Some(RustLibSource::Path(path)) => Sysroot::discover_sysroot_src_dir(path.clone()),
+            Some(RustLibSource::Discover) => Sysroot::discover(dir, &config.extra_env),
             None => Sysroot::empty(),
         };
 
@@ -412,14 +402,14 @@
         );
 
         let cargo_script =
-            CargoWorkspace::fetch_metadata(detached_file, dir, config, &sysroot, &|_| ()).ok().map(
-                |ws| {
+            CargoWorkspace::fetch_metadata(detached_file, dir, config, &sysroot, false, &|_| ())
+                .ok()
+                .map(|ws| {
                     (
                         CargoWorkspace::new(ws, detached_file.clone()),
                         WorkspaceBuildScripts::default(),
                     )
-                },
-            );
+                });
 
         let cargo_config_extra_env = cargo_config_env(detached_file, &config.extra_env, &sysroot);
         Ok(ProjectWorkspace {
@@ -651,7 +641,7 @@
             ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, .. } => {
                 iter::once(PackageRoot {
                     is_local: true,
-                    include: vec![file.as_ref().to_owned()],
+                    include: vec![file.to_path_buf()],
                     exclude: Vec::new(),
                 })
                 .chain(cargo_script.iter().flat_map(|(cargo, build_scripts)| {
@@ -851,7 +841,7 @@
 
 #[instrument(skip_all)]
 fn project_json_to_crate_graph(
-    rustc_cfg: Vec<CfgFlag>,
+    rustc_cfg: Vec<CfgAtom>,
     load: FileLoader<'_>,
     project: &ProjectJson,
     sysroot: &Sysroot,
@@ -863,8 +853,8 @@
     let (public_deps, libproc_macro) =
         sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load);
 
-    let r_a_cfg_flag = CfgFlag::Atom("rust_analyzer".to_owned());
-    let mut cfg_cache: FxHashMap<&str, Vec<CfgFlag>> = FxHashMap::default();
+    let r_a_cfg_flag = CfgAtom::Flag(sym::rust_analyzer.clone());
+    let mut cfg_cache: FxHashMap<&str, Vec<CfgAtom>> = FxHashMap::default();
 
     let idx_to_crate_id: FxHashMap<CrateArrayIdx, CrateId> = project
         .crates()
@@ -971,7 +961,7 @@
     rustc: Option<&(CargoWorkspace, WorkspaceBuildScripts)>,
     cargo: &CargoWorkspace,
     sysroot: &Sysroot,
-    rustc_cfg: Vec<CfgFlag>,
+    rustc_cfg: Vec<CfgAtom>,
     override_cfg: &CfgOverrides,
     build_scripts: &WorkspaceBuildScripts,
 ) -> (CrateGraph, ProcMacroPaths) {
@@ -1154,7 +1144,7 @@
 }
 
 fn detached_file_to_crate_graph(
-    rustc_cfg: Vec<CfgFlag>,
+    rustc_cfg: Vec<CfgAtom>,
     load: FileLoader<'_>,
     detached_file: &ManifestPath,
     sysroot: &Sysroot,
@@ -1317,11 +1307,10 @@
         None
     } else {
         let mut potential_cfg_options = cfg_options.clone();
-        potential_cfg_options.extend(
-            pkg.features
-                .iter()
-                .map(|feat| CfgFlag::KeyValue { key: "feature".into(), value: feat.0.into() }),
-        );
+        potential_cfg_options.extend(pkg.features.iter().map(|feat| CfgAtom::KeyValue {
+            key: sym::feature.clone(),
+            value: Symbol::intern(feat.0),
+        }));
         Some(potential_cfg_options)
     };
     let cfg_options = {
@@ -1358,12 +1347,13 @@
     );
     if let TargetKind::Lib { is_proc_macro: true } = kind {
         let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) {
-            Some(it) => it.cloned().map(|path| Ok((cargo_name.to_owned(), path))),
-            None => Some(Err("proc-macro crate is missing its build data".to_owned())),
+            Some(it) => match it {
+                Some(path) => Ok((cargo_name.to_owned(), path.clone())),
+                None => Err("proc-macro crate build data is missing dylib path".to_owned()),
+            },
+            None => Err("proc-macro crate is missing its build data".to_owned()),
         };
-        if let Some(proc_macro) = proc_macro {
-            proc_macros.insert(crate_id, proc_macro);
-        }
+        proc_macros.insert(crate_id, proc_macro);
     }
 
     crate_id
@@ -1386,7 +1376,7 @@
 fn sysroot_to_crate_graph(
     crate_graph: &mut CrateGraph,
     sysroot: &Sysroot,
-    rustc_cfg: Vec<CfgFlag>,
+    rustc_cfg: Vec<CfgAtom>,
     load: FileLoader<'_>,
 ) -> (SysrootPublicDeps, Option<CrateId>) {
     let _p = tracing::info_span!("sysroot_to_crate_graph").entered();
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index bc1b13a..eb95f42 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -2,7 +2,7 @@
 name = "rust-analyzer"
 version = "0.0.0"
 homepage = "https://rust-analyzer.github.io/"
-repository = "https://github.com/rust-analyzer/rust-analyzer"
+repository.workspace = true
 description = "A language server for the Rust programming language"
 documentation = "https://rust-analyzer.github.io/manual.html"
 autobins = false
@@ -21,7 +21,7 @@
 
 [dependencies]
 anyhow.workspace = true
-crossbeam-channel = "0.5.5"
+crossbeam-channel.workspace = true
 dirs = "5.0.1"
 dissimilar.workspace = true
 itertools.workspace = true
@@ -47,9 +47,10 @@
 walkdir = "2.3.2"
 semver.workspace = true
 memchr = "2.7.1"
+cargo_metadata.workspace = true
+process-wrap.workspace = true
 
 cfg.workspace = true
-flycheck.workspace = true
 hir-def.workspace = true
 hir-ty.workspace = true
 hir.workspace = true
@@ -82,20 +83,20 @@
 
 test-utils.workspace = true
 test-fixture.workspace = true
-mbe.workspace = true
+syntax-bridge.workspace = true
 
 [features]
 jemalloc = ["jemallocator", "profile/jemalloc"]
 force-always-assert = ["always-assert/force"]
 sysroot-abi = []
 in-rust-tree = [
-    "sysroot-abi",
-    "syntax/in-rust-tree",
-    "parser/in-rust-tree",
-    "hir/in-rust-tree",
-    "hir-def/in-rust-tree",
-    "hir-ty/in-rust-tree",
-    "load-cargo/in-rust-tree",
+  "sysroot-abi",
+  "syntax/in-rust-tree",
+  "parser/in-rust-tree",
+  "hir/in-rust-tree",
+  "hir-def/in-rust-tree",
+  "hir-ty/in-rust-tree",
+  "load-cargo/in-rust-tree",
 ]
 
 [lints]
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs
index 6a980a1..42953d3 100644
--- a/crates/rust-analyzer/src/bin/main.rs
+++ b/crates/rust-analyzer/src/bin/main.rs
@@ -14,6 +14,7 @@
 
 use anyhow::Context;
 use lsp_server::Connection;
+use paths::Utf8PathBuf;
 use rust_analyzer::{
     cli::flags,
     config::{Config, ConfigChange, ConfigErrors},
@@ -189,6 +190,7 @@
     let root_path = match root_uri
         .and_then(|it| it.to_file_path().ok())
         .map(patch_path_prefix)
+        .and_then(|it| Utf8PathBuf::from_path_buf(it).ok())
         .and_then(|it| AbsPathBuf::try_from(it).ok())
     {
         Some(it) => it,
@@ -218,6 +220,7 @@
                 .into_iter()
                 .filter_map(|it| it.uri.to_file_path().ok())
                 .map(patch_path_prefix)
+                .filter_map(|it| Utf8PathBuf::from_path_buf(it).ok())
                 .filter_map(|it| AbsPathBuf::try_from(it).ok())
                 .collect::<Vec<_>>()
         })
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index 380105d..06f4ba8 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -23,7 +23,7 @@
 use ide_db::{
     base_db::{
         salsa::{self, debug::DebugQueryTable, ParallelDatabase},
-        SourceDatabase, SourceDatabaseExt,
+        SourceDatabase, SourceRootDatabase,
     },
     EditionedFileId, LineIndexDatabase, SnippetCap,
 };
@@ -64,7 +64,6 @@
                 true => None,
                 false => Some(RustLibSource::Discover),
             },
-            sysroot_query_metadata: self.query_sysroot_metadata,
             ..Default::default()
         };
         let no_progress = &|_| ();
@@ -977,7 +976,7 @@
         let mut sw = self.stop_watch();
 
         for &file_id in &file_ids {
-            _ = analysis.diagnostics(
+            _ = analysis.full_diagnostics(
                 &DiagnosticsConfig {
                     enabled: true,
                     proc_macros_enabled: true,
diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs
index 4ddeb4a..cdac0e5 100644
--- a/crates/rust-analyzer/src/cli/diagnostics.rs
+++ b/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -6,7 +6,7 @@
 
 use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
 use ide::{AnalysisHost, AssistResolveStrategy, Diagnostic, DiagnosticsConfig, Severity};
-use ide_db::{base_db::SourceDatabaseExt, LineIndexDatabase};
+use ide_db::{base_db::SourceRootDatabase, LineIndexDatabase};
 use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
 
 use crate::cli::flags;
@@ -63,7 +63,7 @@
                     _vfs.file_path(file_id.into())
                 );
                 for diagnostic in analysis
-                    .diagnostics(
+                    .full_diagnostics(
                         &DiagnosticsConfig::test_sample(),
                         AssistResolveStrategy::None,
                         file_id.into(),
diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs
index b3b8ab9..2a3e74c 100644
--- a/crates/rust-analyzer/src/cli/flags.rs
+++ b/crates/rust-analyzer/src/cli/flags.rs
@@ -71,9 +71,6 @@
             optional --with-deps
             /// Don't load sysroot crates (`std`, `core` & friends).
             optional --no-sysroot
-            /// Run cargo metadata on the sysroot to analyze its third-party dependencies.
-            /// Requires --no-sysroot to not be set.
-            optional --query-sysroot-metadata
 
             /// Don't run build scripts or load `OUT_DIR` values by running `cargo check` before analysis.
             optional --disable-build-scripts
@@ -214,7 +211,6 @@
     pub only: Option<String>,
     pub with_deps: bool,
     pub no_sysroot: bool,
-    pub query_sysroot_metadata: bool,
     pub disable_build_scripts: bool,
     pub disable_proc_macros: bool,
     pub skip_lowering: bool,
diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs
index 3ff9be7..016f0ed 100644
--- a/crates/rust-analyzer/src/cli/lsif.rs
+++ b/crates/rust-analyzer/src/cli/lsif.rs
@@ -276,7 +276,7 @@
         eprintln!("Generating LSIF started...");
         let now = Instant::now();
         let cargo_config =
-            CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
+            &CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
         let no_progress = &|_| ();
         let load_cargo_config = LoadCargoConfig {
             load_out_dirs_from_check: true,
@@ -284,9 +284,11 @@
             prefill_caches: false,
         };
         let path = AbsPathBuf::assert_utf8(env::current_dir()?.join(self.path));
-        let manifest = ProjectManifest::discover_single(&path)?;
+        let root = ProjectManifest::discover_single(&path)?;
+        let mut workspace = ProjectWorkspace::load(root, cargo_config, no_progress)?;
 
-        let workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
+        let build_scripts = workspace.run_build_scripts(cargo_config, no_progress)?;
+        workspace.set_build_scripts(build_scripts);
 
         let (db, vfs, _proc_macro) =
             load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
@@ -294,7 +296,7 @@
         let db = host.raw_database();
         let analysis = host.analysis();
 
-        let si = StaticIndex::compute(&analysis);
+        let si = StaticIndex::compute(&analysis, &path.clone().into());
 
         let mut lsif = LsifManager::new(&analysis, db, &vfs);
         lsif.add_vertex(lsif::Vertex::MetaData(lsif::MetaData {
diff --git a/crates/rust-analyzer/src/cli/run_tests.rs b/crates/rust-analyzer/src/cli/run_tests.rs
index 10cb2d5..157ef43 100644
--- a/crates/rust-analyzer/src/cli/run_tests.rs
+++ b/crates/rust-analyzer/src/cli/run_tests.rs
@@ -2,7 +2,7 @@
 
 use hir::{Crate, Module};
 use hir_ty::db::HirDatabase;
-use ide_db::{base_db::SourceDatabaseExt, LineIndexDatabase};
+use ide_db::{base_db::SourceRootDatabase, LineIndexDatabase};
 use profile::StopWatch;
 use project_model::{CargoConfig, RustLibSource};
 use syntax::TextRange;
diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs
index 3156587..75efdfd 100644
--- a/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -8,6 +8,7 @@
 use hir::{ChangeWithProcMacros, Crate};
 use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig};
 use itertools::Either;
+use paths::Utf8PathBuf;
 use profile::StopWatch;
 use project_model::target_data_layout::RustcDataLayoutConfig;
 use project_model::{
@@ -64,12 +65,12 @@
     fn new() -> Result<Self> {
         let mut path = std::env::temp_dir();
         path.push("ra-rustc-test.rs");
-        let tmp_file = AbsPathBuf::try_from(path).unwrap();
+        let tmp_file = AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).unwrap()).unwrap();
         std::fs::write(&tmp_file, "")?;
         let cargo_config =
             CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
 
-        let sysroot = Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env, false);
+        let sysroot = Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env);
         let data_layout = target_data_layout::get(
             RustcDataLayoutConfig::Rustc(&sysroot),
             None,
@@ -154,7 +155,7 @@
                     let root_file = self.root_file;
                     move || {
                         let res = std::panic::catch_unwind(move || {
-                            analysis.diagnostics(
+                            analysis.full_diagnostics(
                                 diagnostic_config,
                                 ide::AssistResolveStrategy::None,
                                 root_file,
diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs
index ee134b6..2fc0ef8 100644
--- a/crates/rust-analyzer/src/cli/scip.rs
+++ b/crates/rust-analyzer/src/cli/scip.rs
@@ -63,7 +63,7 @@
         let db = host.raw_database();
         let analysis = host.analysis();
 
-        let si = StaticIndex::compute(&analysis);
+        let si = StaticIndex::compute(&analysis, &root.clone().into());
 
         let metadata = scip_types::Metadata {
             version: scip_types::ProtocolVersion::UnspecifiedProtocolVersion.into(),
@@ -334,6 +334,7 @@
     use ide::{FilePosition, TextSize};
     use scip::symbol::format_symbol;
     use test_fixture::ChangeFixture;
+    use vfs::VfsPath;
 
     fn position(ra_fixture: &str) -> (AnalysisHost, FilePosition) {
         let mut host = AnalysisHost::default();
@@ -351,7 +352,8 @@
         let (host, position) = position(ra_fixture);
 
         let analysis = host.analysis();
-        let si = StaticIndex::compute(&analysis);
+        let si =
+            StaticIndex::compute(&analysis, &VfsPath::new_virtual_path("/workspace".to_owned()));
 
         let FilePosition { file_id, offset } = position;
 
@@ -384,7 +386,7 @@
     fn basic() {
         check_symbol(
             r#"
-//- /lib.rs crate:main deps:foo
+//- /workspace/lib.rs crate:main deps:foo
 use foo::example_mod::func;
 fn main() {
     func$0();
@@ -485,7 +487,7 @@
     fn symbol_for_field() {
         check_symbol(
             r#"
-    //- /lib.rs crate:main deps:foo
+    //- /workspace/lib.rs crate:main deps:foo
     use foo::St;
     fn main() {
         let x = St { a$0: 2 };
@@ -503,7 +505,7 @@
     fn symbol_for_param() {
         check_symbol(
             r#"
-//- /lib.rs crate:main deps:foo
+//- /workspace/lib.rs crate:main deps:foo
 use foo::example_mod::func;
 fn main() {
     func(42);
@@ -521,7 +523,7 @@
     fn symbol_for_closure_param() {
         check_symbol(
             r#"
-//- /lib.rs crate:main deps:foo
+//- /workspace/lib.rs crate:main deps:foo
 use foo::example_mod::func;
 fn main() {
     func();
@@ -541,7 +543,7 @@
     fn local_symbol_for_local() {
         check_symbol(
             r#"
-    //- /lib.rs crate:main deps:foo
+    //- /workspace/lib.rs crate:main deps:foo
     use foo::module::func;
     fn main() {
         func();
@@ -561,13 +563,13 @@
     fn global_symbol_for_pub_struct() {
         check_symbol(
             r#"
-    //- /lib.rs crate:main
+    //- /workspace/lib.rs crate:main
     mod foo;
 
     fn main() {
         let _bar = foo::Bar { i: 0 };
     }
-    //- /foo.rs
+    //- /workspace/foo.rs
     pub struct Bar$0 {
         pub i: i32,
     }
@@ -580,13 +582,13 @@
     fn global_symbol_for_pub_struct_reference() {
         check_symbol(
             r#"
-    //- /lib.rs crate:main
+    //- /workspace/lib.rs crate:main
     mod foo;
 
     fn main() {
         let _bar = foo::Bar$0 { i: 0 };
     }
-    //- /foo.rs
+    //- /workspace/foo.rs
     pub struct Bar {
         pub i: i32,
     }
@@ -599,7 +601,7 @@
     fn symbol_for_for_type_alias() {
         check_symbol(
             r#"
-    //- /lib.rs crate:main
+    //- /workspace/lib.rs crate:main
     pub type MyTypeAlias$0 = u8;
     "#,
             "rust-analyzer cargo main . MyTypeAlias#",
@@ -615,7 +617,8 @@
         host.raw_database_mut().apply_change(change_fixture.change);
 
         let analysis = host.analysis();
-        let si = StaticIndex::compute(&analysis);
+        let si =
+            StaticIndex::compute(&analysis, &VfsPath::new_virtual_path("/workspace".to_owned()));
 
         let file = si.files.first().unwrap();
         let (_, token_id) = file.tokens.first().unwrap();
diff --git a/crates/rust-analyzer/src/cli/ssr.rs b/crates/rust-analyzer/src/cli/ssr.rs
index 7f24fa2..3caa487 100644
--- a/crates/rust-analyzer/src/cli/ssr.rs
+++ b/crates/rust-analyzer/src/cli/ssr.rs
@@ -1,7 +1,7 @@
 //! Applies structured search replace rules from the command line.
 
 use anyhow::Context;
-use ide_db::EditionedFileId;
+use ide_db::{base_db::SourceDatabase, EditionedFileId};
 use ide_ssr::MatchFinder;
 use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
 use project_model::{CargoConfig, RustLibSource};
@@ -10,7 +10,6 @@
 
 impl flags::Ssr {
     pub fn run(self) -> anyhow::Result<()> {
-        use ide_db::base_db::SourceDatabaseExt;
         let cargo_config =
             CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() };
         let load_cargo_config = LoadCargoConfig {
@@ -46,7 +45,7 @@
     /// `debug_snippet`. This is intended for debugging and probably isn't in it's current form useful
     /// for much else.
     pub fn run(self) -> anyhow::Result<()> {
-        use ide_db::base_db::SourceDatabaseExt;
+        use ide_db::base_db::SourceRootDatabase;
         use ide_db::symbol_index::SymbolsDatabase;
         let cargo_config = CargoConfig::default();
         let load_cargo_config = LoadCargoConfig {
diff --git a/crates/flycheck/src/command.rs b/crates/rust-analyzer/src/command.rs
similarity index 98%
rename from crates/flycheck/src/command.rs
rename to crates/rust-analyzer/src/command.rs
index 38c7c81..f1009eb 100644
--- a/crates/flycheck/src/command.rs
+++ b/crates/rust-analyzer/src/command.rs
@@ -1,5 +1,5 @@
-//! Utilities for running a cargo command like `cargo check` or `cargo test` in a separate thread and
-//! parse its stdout/stderr.
+//! Utilities for running a cargo command like `cargo check` or `cargo test` in a separate thread
+//! and parse its stdout/stderr.
 
 use std::{
     ffi::OsString,
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index b9b8cfd..02f5d75 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -7,7 +7,6 @@
 
 use cfg::{CfgAtom, CfgDiff};
 use dirs::config_dir;
-use flycheck::{CargoOptions, FlycheckConfig};
 use hir::Symbol;
 use ide::{
     AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode,
@@ -35,8 +34,9 @@
 use vfs::{AbsPath, AbsPathBuf, VfsPath};
 
 use crate::{
-    capabilities::ClientCapabilities,
     diagnostics::DiagnosticsMapConfig,
+    flycheck::{CargoOptions, FlycheckConfig},
+    lsp::capabilities::ClientCapabilities,
     lsp_ext::{WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope},
 };
 
@@ -143,13 +143,6 @@
         ///
         /// This option does not take effect until rust-analyzer is restarted.
         cargo_sysroot: Option<String>    = Some("discover".to_owned()),
-        /// Whether to run cargo metadata on the sysroot library allowing rust-analyzer to analyze
-        /// third-party dependencies of the standard libraries.
-        ///
-        /// This will cause `cargo` to create a lockfile in your sysroot directory. rust-analyzer
-        /// will attempt to clean up afterwards, but nevertheless requires the location to be
-        /// writable to.
-        cargo_sysrootQueryMetadata: bool     = false,
         /// Relative path to the sysroot library sources. If left unset, this will default to
         /// `{cargo.sysroot}/lib/rustlib/src/rust/library`.
         ///
@@ -1839,7 +1832,6 @@
         });
         let sysroot_src =
             self.cargo_sysrootSrc(None).as_ref().map(|sysroot| self.root_path.join(sysroot));
-        let sysroot_query_metadata = self.cargo_sysrootQueryMetadata(None);
 
         CargoConfig {
             all_targets: *self.cargo_allTargets(None),
@@ -1852,7 +1844,6 @@
             },
             target: self.cargo_target(None).clone(),
             sysroot,
-            sysroot_query_metadata: *sysroot_query_metadata,
             sysroot_src,
             rustc_source,
             cfg_overrides: project_model::CfgOverrides {
@@ -1908,7 +1899,7 @@
         *self.check_workspace(None)
     }
 
-    pub fn cargo_test_options(&self) -> CargoOptions {
+    pub(crate) fn cargo_test_options(&self) -> CargoOptions {
         CargoOptions {
             target_triples: self.cargo_target(None).clone().into_iter().collect(),
             all_targets: false,
@@ -1924,7 +1915,7 @@
         }
     }
 
-    pub fn flycheck(&self) -> FlycheckConfig {
+    pub(crate) fn flycheck(&self) -> FlycheckConfig {
         match &self.check_overrideCommand(None) {
             Some(args) if !args.is_empty() => {
                 let mut args = args.clone();
@@ -1934,16 +1925,18 @@
                     args,
                     extra_env: self.check_extra_env(),
                     invocation_strategy: match self.check_invocationStrategy(None) {
-                        InvocationStrategy::Once => flycheck::InvocationStrategy::Once,
+                        InvocationStrategy::Once => crate::flycheck::InvocationStrategy::Once,
                         InvocationStrategy::PerWorkspace => {
-                            flycheck::InvocationStrategy::PerWorkspace
+                            crate::flycheck::InvocationStrategy::PerWorkspace
                         }
                     },
                     invocation_location: match self.check_invocationLocation(None) {
                         InvocationLocation::Root => {
-                            flycheck::InvocationLocation::Root(self.root_path.clone())
+                            crate::flycheck::InvocationLocation::Root(self.root_path.clone())
                         }
-                        InvocationLocation::Workspace => flycheck::InvocationLocation::Workspace,
+                        InvocationLocation::Workspace => {
+                            crate::flycheck::InvocationLocation::Workspace
+                        }
                     },
                 }
             }
@@ -3450,7 +3443,7 @@
         let s = remove_ws(&schema);
         if !p.contains(&s) {
             package_json.replace_range(start..end, &schema);
-            ensure_file_contents(&package_json_path, &package_json)
+            ensure_file_contents(package_json_path.as_std_path(), &package_json)
         }
     }
 
@@ -3458,7 +3451,7 @@
     fn generate_config_documentation() {
         let docs_path = project_root().join("docs/user/generated_config.adoc");
         let expected = FullConfigInput::manual();
-        ensure_file_contents(&docs_path, &expected);
+        ensure_file_contents(docs_path.as_std_path(), &expected);
     }
 
     fn remove_ws(text: &str) -> String {
@@ -3467,13 +3460,8 @@
 
     #[test]
     fn proc_macro_srv_null() {
-        let mut config = Config::new(
-            AbsPathBuf::try_from(project_root()).unwrap(),
-            Default::default(),
-            vec![],
-            None,
-            None,
-        );
+        let mut config =
+            Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None);
 
         let mut change = ConfigChange::default();
         change.change_client_config(serde_json::json!({
@@ -3487,32 +3475,22 @@
 
     #[test]
     fn proc_macro_srv_abs() {
-        let mut config = Config::new(
-            AbsPathBuf::try_from(project_root()).unwrap(),
-            Default::default(),
-            vec![],
-            None,
-            None,
-        );
+        let mut config =
+            Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None);
         let mut change = ConfigChange::default();
         change.change_client_config(serde_json::json!({
         "procMacro" : {
-            "server": project_root().display().to_string(),
+            "server": project_root().to_string(),
         }}));
 
         (config, _, _) = config.apply_change(change);
-        assert_eq!(config.proc_macro_srv(), Some(AbsPathBuf::try_from(project_root()).unwrap()));
+        assert_eq!(config.proc_macro_srv(), Some(AbsPathBuf::assert(project_root())));
     }
 
     #[test]
     fn proc_macro_srv_rel() {
-        let mut config = Config::new(
-            AbsPathBuf::try_from(project_root()).unwrap(),
-            Default::default(),
-            vec![],
-            None,
-            None,
-        );
+        let mut config =
+            Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None);
 
         let mut change = ConfigChange::default();
 
@@ -3531,13 +3509,8 @@
 
     #[test]
     fn cargo_target_dir_unset() {
-        let mut config = Config::new(
-            AbsPathBuf::try_from(project_root()).unwrap(),
-            Default::default(),
-            vec![],
-            None,
-            None,
-        );
+        let mut config =
+            Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None);
 
         let mut change = ConfigChange::default();
 
@@ -3554,13 +3527,8 @@
 
     #[test]
     fn cargo_target_dir_subdir() {
-        let mut config = Config::new(
-            AbsPathBuf::try_from(project_root()).unwrap(),
-            Default::default(),
-            vec![],
-            None,
-            None,
-        );
+        let mut config =
+            Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None);
 
         let mut change = ConfigChange::default();
         change.change_client_config(serde_json::json!({
@@ -3577,13 +3545,8 @@
 
     #[test]
     fn cargo_target_dir_relative_dir() {
-        let mut config = Config::new(
-            AbsPathBuf::try_from(project_root()).unwrap(),
-            Default::default(),
-            vec![],
-            None,
-            None,
-        );
+        let mut config =
+            Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None);
 
         let mut change = ConfigChange::default();
         change.change_client_config(serde_json::json!({
@@ -3603,13 +3566,8 @@
 
     #[test]
     fn toml_unknown_key() {
-        let config = Config::new(
-            AbsPathBuf::try_from(project_root()).unwrap(),
-            Default::default(),
-            vec![],
-            None,
-            None,
-        );
+        let config =
+            Config::new(AbsPathBuf::assert(project_root()), Default::default(), vec![], None, None);
 
         let mut change = ConfigChange::default();
 
diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs
index b23e7b7..034c49c 100644
--- a/crates/rust-analyzer/src/diagnostics.rs
+++ b/crates/rust-analyzer/src/diagnostics.rs
@@ -11,7 +11,7 @@
 use stdx::iter_eq_by;
 use triomphe::Arc;
 
-use crate::{global_state::GlobalStateSnapshot, lsp, lsp_ext};
+use crate::{global_state::GlobalStateSnapshot, lsp, lsp_ext, main_loop::DiagnosticsTaskKind};
 
 pub(crate) type CheckFixes = Arc<IntMap<usize, IntMap<FileId, Vec<Fix>>>>;
 
@@ -28,7 +28,8 @@
 #[derive(Debug, Default, Clone)]
 pub(crate) struct DiagnosticCollection {
     // FIXME: should be IntMap<FileId, Vec<ra_id::Diagnostic>>
-    pub(crate) native: IntMap<FileId, (DiagnosticsGeneration, Vec<lsp_types::Diagnostic>)>,
+    pub(crate) native_syntax: IntMap<FileId, (DiagnosticsGeneration, Vec<lsp_types::Diagnostic>)>,
+    pub(crate) native_semantic: IntMap<FileId, (DiagnosticsGeneration, Vec<lsp_types::Diagnostic>)>,
     // FIXME: should be Vec<flycheck::Diagnostic>
     pub(crate) check: IntMap<usize, IntMap<FileId, Vec<lsp_types::Diagnostic>>>,
     pub(crate) check_fixes: CheckFixes,
@@ -64,7 +65,8 @@
     }
 
     pub(crate) fn clear_native_for(&mut self, file_id: FileId) {
-        self.native.remove(&file_id);
+        self.native_syntax.remove(&file_id);
+        self.native_semantic.remove(&file_id);
         self.changes.insert(file_id);
     }
 
@@ -88,43 +90,51 @@
         self.changes.insert(file_id);
     }
 
-    pub(crate) fn set_native_diagnostics(
-        &mut self,
-        generation: DiagnosticsGeneration,
-        file_id: FileId,
-        mut diagnostics: Vec<lsp_types::Diagnostic>,
-    ) {
-        diagnostics.sort_by_key(|it| (it.range.start, it.range.end));
-        if let Some((old_gen, existing_diagnostics)) = self.native.get_mut(&file_id) {
-            if existing_diagnostics.len() == diagnostics.len()
-                && iter_eq_by(&diagnostics, &*existing_diagnostics, |new, existing| {
-                    are_diagnostics_equal(new, existing)
-                })
-            {
-                // don't signal an update if the diagnostics are the same
-                return;
+    pub(crate) fn set_native_diagnostics(&mut self, kind: DiagnosticsTaskKind) {
+        let (generation, diagnostics, target) = match kind {
+            DiagnosticsTaskKind::Syntax(generation, diagnostics) => {
+                (generation, diagnostics, &mut self.native_syntax)
             }
-            if *old_gen < generation || generation == 0 {
-                self.native.insert(file_id, (generation, diagnostics));
+            DiagnosticsTaskKind::Semantic(generation, diagnostics) => {
+                (generation, diagnostics, &mut self.native_semantic)
+            }
+        };
+
+        for (file_id, mut diagnostics) in diagnostics {
+            diagnostics.sort_by_key(|it| (it.range.start, it.range.end));
+
+            if let Some((old_gen, existing_diagnostics)) = target.get_mut(&file_id) {
+                if existing_diagnostics.len() == diagnostics.len()
+                    && iter_eq_by(&diagnostics, &*existing_diagnostics, |new, existing| {
+                        are_diagnostics_equal(new, existing)
+                    })
+                {
+                    // don't signal an update if the diagnostics are the same
+                    continue;
+                }
+                if *old_gen < generation || generation == 0 {
+                    target.insert(file_id, (generation, diagnostics));
+                } else {
+                    existing_diagnostics.extend(diagnostics);
+                    // FIXME: Doing the merge step of a merge sort here would be a bit more performant
+                    // but eh
+                    existing_diagnostics.sort_by_key(|it| (it.range.start, it.range.end))
+                }
             } else {
-                existing_diagnostics.extend(diagnostics);
-                // FIXME: Doing the merge step of a merge sort here would be a bit more performant
-                // but eh
-                existing_diagnostics.sort_by_key(|it| (it.range.start, it.range.end))
+                target.insert(file_id, (generation, diagnostics));
             }
-        } else {
-            self.native.insert(file_id, (generation, diagnostics));
+            self.changes.insert(file_id);
         }
-        self.changes.insert(file_id);
     }
 
     pub(crate) fn diagnostics_for(
         &self,
         file_id: FileId,
     ) -> impl Iterator<Item = &lsp_types::Diagnostic> {
-        let native = self.native.get(&file_id).into_iter().flat_map(|(_, d)| d);
+        let native_syntax = self.native_syntax.get(&file_id).into_iter().flat_map(|(_, d)| d);
+        let native_semantic = self.native_semantic.get(&file_id).into_iter().flat_map(|(_, d)| d);
         let check = self.check.values().filter_map(move |it| it.get(&file_id)).flatten();
-        native.chain(check)
+        native_syntax.chain(native_semantic).chain(check)
     }
 
     pub(crate) fn take_changes(&mut self) -> Option<IntSet<FileId>> {
@@ -147,10 +157,16 @@
         && left.message == right.message
 }
 
+pub(crate) enum NativeDiagnosticsFetchKind {
+    Syntax,
+    Semantic,
+}
+
 pub(crate) fn fetch_native_diagnostics(
-    snapshot: GlobalStateSnapshot,
+    snapshot: &GlobalStateSnapshot,
     subscriptions: std::sync::Arc<[FileId]>,
     slice: std::ops::Range<usize>,
+    kind: NativeDiagnosticsFetchKind,
 ) -> Vec<(FileId, Vec<lsp_types::Diagnostic>)> {
     let _p = tracing::info_span!("fetch_native_diagnostics").entered();
     let _ctx = stdx::panic_context::enter("fetch_native_diagnostics".to_owned());
@@ -180,14 +196,17 @@
             let line_index = snapshot.file_line_index(file_id).ok()?;
             let source_root = snapshot.analysis.source_root_id(file_id).ok()?;
 
-            let diagnostics = snapshot
-                .analysis
-                .diagnostics(
-                    &snapshot.config.diagnostics(Some(source_root)),
-                    ide::AssistResolveStrategy::None,
-                    file_id,
-                )
-                .ok()?
+            let config = &snapshot.config.diagnostics(Some(source_root));
+            let diagnostics = match kind {
+                NativeDiagnosticsFetchKind::Syntax => {
+                    snapshot.analysis.syntax_diagnostics(config, file_id).ok()?
+                }
+                NativeDiagnosticsFetchKind::Semantic => snapshot
+                    .analysis
+                    .semantic_diagnostics(config, ide::AssistResolveStrategy::None, file_id)
+                    .ok()?,
+            };
+            let diagnostics = diagnostics
                 .into_iter()
                 .filter_map(|d| {
                     if d.range.file_id == file_id {
diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs
index defa464..208a70b 100644
--- a/crates/rust-analyzer/src/diagnostics/to_proto.rs
+++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs
@@ -1,7 +1,7 @@
 //! This module provides the functionality needed to convert diagnostics from
 //! `cargo check` json format to the LSP diagnostic format.
 
-use flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan};
+use crate::flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan};
 use itertools::Itertools;
 use rustc_hash::FxHashMap;
 use stdx::format_to;
@@ -17,8 +17,8 @@
 /// Determines the LSP severity from a diagnostic
 fn diagnostic_severity(
     config: &DiagnosticsMapConfig,
-    level: flycheck::DiagnosticLevel,
-    code: Option<flycheck::DiagnosticCode>,
+    level: crate::flycheck::DiagnosticLevel,
+    code: Option<crate::flycheck::DiagnosticCode>,
 ) -> Option<lsp_types::DiagnosticSeverity> {
     let res = match level {
         DiagnosticLevel::Ice => lsp_types::DiagnosticSeverity::ERROR,
@@ -181,7 +181,7 @@
 fn map_rust_child_diagnostic(
     config: &DiagnosticsMapConfig,
     workspace_root: &AbsPath,
-    rd: &flycheck::Diagnostic,
+    rd: &crate::flycheck::Diagnostic,
     snap: &GlobalStateSnapshot,
 ) -> MappedRustChildDiagnostic {
     let spans: Vec<&DiagnosticSpan> = rd.spans.iter().filter(|s| s.is_primary).collect();
@@ -284,7 +284,7 @@
 /// If the diagnostic has no primary span this will return `None`
 pub(crate) fn map_rust_diagnostic_to_lsp(
     config: &DiagnosticsMapConfig,
-    rd: &flycheck::Diagnostic,
+    rd: &crate::flycheck::Diagnostic,
     workspace_root: &AbsPath,
     snap: &GlobalStateSnapshot,
 ) -> Vec<MappedRustDiagnostic> {
@@ -537,7 +537,8 @@
     }
 
     fn check_with_config(config: DiagnosticsMapConfig, diagnostics_json: &str, expect: ExpectFile) {
-        let diagnostic: flycheck::Diagnostic = serde_json::from_str(diagnostics_json).unwrap();
+        let diagnostic: crate::flycheck::Diagnostic =
+            serde_json::from_str(diagnostics_json).unwrap();
         let workspace_root: &AbsPath = Utf8Path::new("/test/").try_into().unwrap();
         let (sender, _) = crossbeam_channel::unbounded();
         let state = GlobalState::new(
diff --git a/crates/rust-analyzer/src/diff.rs b/crates/rust-analyzer/src/diff.rs
deleted file mode 100644
index 3fcfb4a..0000000
--- a/crates/rust-analyzer/src/diff.rs
+++ /dev/null
@@ -1,53 +0,0 @@
-//! Generate minimal `TextEdit`s from different text versions
-use dissimilar::Chunk;
-use ide::{TextEdit, TextRange, TextSize};
-
-pub(crate) fn diff(left: &str, right: &str) -> TextEdit {
-    let chunks = dissimilar::diff(left, right);
-    textedit_from_chunks(chunks)
-}
-
-fn textedit_from_chunks(chunks: Vec<dissimilar::Chunk<'_>>) -> TextEdit {
-    let mut builder = TextEdit::builder();
-    let mut pos = TextSize::default();
-
-    let mut chunks = chunks.into_iter().peekable();
-    while let Some(chunk) = chunks.next() {
-        if let (Chunk::Delete(deleted), Some(&Chunk::Insert(inserted))) = (chunk, chunks.peek()) {
-            chunks.next().unwrap();
-            let deleted_len = TextSize::of(deleted);
-            builder.replace(TextRange::at(pos, deleted_len), inserted.into());
-            pos += deleted_len;
-            continue;
-        }
-
-        match chunk {
-            Chunk::Equal(text) => {
-                pos += TextSize::of(text);
-            }
-            Chunk::Delete(deleted) => {
-                let deleted_len = TextSize::of(deleted);
-                builder.delete(TextRange::at(pos, deleted_len));
-                pos += deleted_len;
-            }
-            Chunk::Insert(inserted) => {
-                builder.insert(pos, inserted.into());
-            }
-        }
-    }
-    builder.finish()
-}
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-
-    #[test]
-    fn diff_applies() {
-        let mut original = String::from("fn foo(a:u32){\n}");
-        let result = "fn foo(a: u32) {}";
-        let edit = diff(&original, result);
-        edit.apply(&mut original);
-        assert_eq!(original, result);
-    }
-}
diff --git a/crates/flycheck/src/project_json.rs b/crates/rust-analyzer/src/discover.rs
similarity index 85%
rename from crates/flycheck/src/project_json.rs
rename to crates/rust-analyzer/src/discover.rs
index b6e4495..7e9162e 100644
--- a/crates/flycheck/src/project_json.rs
+++ b/crates/rust-analyzer/src/discover.rs
@@ -1,4 +1,5 @@
-//! A `cargo-metadata`-equivalent for non-Cargo build systems.
+//! Infrastructure for lazy project discovery. Currently only support rust-project.json discovery
+//! via a custom discover command.
 use std::{io, process::Command};
 
 use crossbeam_channel::Sender;
@@ -9,19 +10,19 @@
 
 use crate::command::{CommandHandle, ParseFromLine};
 
-pub const ARG_PLACEHOLDER: &str = "{arg}";
+pub(crate) const ARG_PLACEHOLDER: &str = "{arg}";
 
 /// A command wrapper for getting a `rust-project.json`.
 ///
-/// This is analogous to `cargo-metadata`, but for non-Cargo build systems.
-pub struct Discover {
+/// This is analogous to discovering a cargo project + running `cargo-metadata` on it, but for non-Cargo build systems.
+pub(crate) struct DiscoverCommand {
     command: Vec<String>,
     sender: Sender<DiscoverProjectMessage>,
 }
 
 #[derive(PartialEq, Clone, Debug, Serialize)]
 #[serde(rename_all = "camelCase")]
-pub enum DiscoverArgument {
+pub(crate) enum DiscoverArgument {
     Path(#[serde(serialize_with = "serialize_abs_pathbuf")] AbsPathBuf),
     Buildfile(#[serde(serialize_with = "serialize_abs_pathbuf")] AbsPathBuf),
 }
@@ -34,14 +35,14 @@
     se.serialize_str(path.as_str())
 }
 
-impl Discover {
-    /// Create a new [Discover].
-    pub fn new(sender: Sender<DiscoverProjectMessage>, command: Vec<String>) -> Self {
+impl DiscoverCommand {
+    /// Create a new [DiscoverCommand].
+    pub(crate) fn new(sender: Sender<DiscoverProjectMessage>, command: Vec<String>) -> Self {
         Self { sender, command }
     }
 
     /// Spawn the command inside [Discover] and report progress, if any.
-    pub fn spawn(&self, discover_arg: DiscoverArgument) -> io::Result<DiscoverHandle> {
+    pub(crate) fn spawn(&self, discover_arg: DiscoverArgument) -> io::Result<DiscoverHandle> {
         let command = &self.command[0];
         let args = &self.command[1..];
 
@@ -65,7 +66,7 @@
 
 /// A handle to a spawned [Discover].
 #[derive(Debug)]
-pub struct DiscoverHandle {
+pub(crate) struct DiscoverHandle {
     _handle: CommandHandle<DiscoverProjectMessage>,
 }
 
@@ -81,7 +82,7 @@
 }
 
 #[derive(Debug, PartialEq, Clone)]
-pub enum DiscoverProjectMessage {
+pub(crate) enum DiscoverProjectMessage {
     Finished { project: ProjectJsonData, buildfile: AbsPathBuf },
     Error { error: String, source: Option<String> },
     Progress { message: String },
diff --git a/crates/flycheck/src/lib.rs b/crates/rust-analyzer/src/flycheck.rs
similarity index 80%
rename from crates/flycheck/src/lib.rs
rename to crates/rust-analyzer/src/flycheck.rs
index 3dd2a91..8f2e7d1 100644
--- a/crates/flycheck/src/lib.rs
+++ b/crates/rust-analyzer/src/flycheck.rs
@@ -1,59 +1,48 @@
-//! Flycheck provides the functionality needed to run `cargo check` or
-//! another compatible command (f.x. clippy) in a background thread and provide
+//! Flycheck provides the functionality needed to run `cargo check` to provide
 //! LSP diagnostics based on the output of the command.
 
-// FIXME: This crate now handles running `cargo test` needed in the test explorer in
-// addition to `cargo check`. Either split it into 3 crates (one for test, one for check
-// and one common utilities) or change its name and docs to reflect the current state.
-
 use std::{fmt, io, process::Command, time::Duration};
 
-use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
+use crossbeam_channel::{select_biased, unbounded, Receiver, Sender};
 use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
 use rustc_hash::FxHashMap;
 use serde::Deserialize;
 
-pub use cargo_metadata::diagnostic::{
+pub(crate) use cargo_metadata::diagnostic::{
     Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan,
-    DiagnosticSpanMacroExpansion,
 };
 use toolchain::Tool;
 
-mod command;
-pub mod project_json;
-mod test_runner;
-
-use command::{CommandHandle, ParseFromLine};
-pub use test_runner::{CargoTestHandle, CargoTestMessage, TestState, TestTarget};
+use crate::command::{CommandHandle, ParseFromLine};
 
 #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
-pub enum InvocationStrategy {
+pub(crate) enum InvocationStrategy {
     Once,
     #[default]
     PerWorkspace,
 }
 
 #[derive(Clone, Debug, Default, PartialEq, Eq)]
-pub enum InvocationLocation {
+pub(crate) enum InvocationLocation {
     Root(AbsPathBuf),
     #[default]
     Workspace,
 }
 
 #[derive(Clone, Debug, PartialEq, Eq)]
-pub struct CargoOptions {
-    pub target_triples: Vec<String>,
-    pub all_targets: bool,
-    pub no_default_features: bool,
-    pub all_features: bool,
-    pub features: Vec<String>,
-    pub extra_args: Vec<String>,
-    pub extra_env: FxHashMap<String, String>,
-    pub target_dir: Option<Utf8PathBuf>,
+pub(crate) struct CargoOptions {
+    pub(crate) target_triples: Vec<String>,
+    pub(crate) all_targets: bool,
+    pub(crate) no_default_features: bool,
+    pub(crate) all_features: bool,
+    pub(crate) features: Vec<String>,
+    pub(crate) extra_args: Vec<String>,
+    pub(crate) extra_env: FxHashMap<String, String>,
+    pub(crate) target_dir: Option<Utf8PathBuf>,
 }
 
 impl CargoOptions {
-    fn apply_on_command(&self, cmd: &mut Command) {
+    pub(crate) fn apply_on_command(&self, cmd: &mut Command) {
         for target in &self.target_triples {
             cmd.args(["--target", target.as_str()]);
         }
@@ -79,7 +68,7 @@
 }
 
 #[derive(Clone, Debug, PartialEq, Eq)]
-pub enum FlycheckConfig {
+pub(crate) enum FlycheckConfig {
     CargoCommand {
         command: String,
         options: CargoOptions,
@@ -110,7 +99,7 @@
 /// diagnostics based on the output.
 /// The spawned thread is shut down when this struct is dropped.
 #[derive(Debug)]
-pub struct FlycheckHandle {
+pub(crate) struct FlycheckHandle {
     // XXX: drop order is significant
     sender: Sender<StateChange>,
     _thread: stdx::thread::JoinHandle,
@@ -118,9 +107,9 @@
 }
 
 impl FlycheckHandle {
-    pub fn spawn(
+    pub(crate) fn spawn(
         id: usize,
-        sender: Box<dyn Fn(Message) + Send>,
+        sender: Sender<FlycheckMessage>,
         config: FlycheckConfig,
         sysroot_root: Option<AbsPathBuf>,
         workspace_root: AbsPathBuf,
@@ -137,28 +126,28 @@
     }
 
     /// Schedule a re-start of the cargo check worker to do a workspace wide check.
-    pub fn restart_workspace(&self, saved_file: Option<AbsPathBuf>) {
+    pub(crate) fn restart_workspace(&self, saved_file: Option<AbsPathBuf>) {
         self.sender.send(StateChange::Restart { package: None, saved_file }).unwrap();
     }
 
     /// Schedule a re-start of the cargo check worker to do a package wide check.
-    pub fn restart_for_package(&self, package: String) {
+    pub(crate) fn restart_for_package(&self, package: String) {
         self.sender
             .send(StateChange::Restart { package: Some(package), saved_file: None })
             .unwrap();
     }
 
     /// Stop this cargo check worker.
-    pub fn cancel(&self) {
+    pub(crate) fn cancel(&self) {
         self.sender.send(StateChange::Cancel).unwrap();
     }
 
-    pub fn id(&self) -> usize {
+    pub(crate) fn id(&self) -> usize {
         self.id
     }
 }
 
-pub enum Message {
+pub(crate) enum FlycheckMessage {
     /// Request adding a diagnostic with fixes included to a file
     AddDiagnostic { id: usize, workspace_root: AbsPathBuf, diagnostic: Diagnostic },
 
@@ -173,19 +162,19 @@
     },
 }
 
-impl fmt::Debug for Message {
+impl fmt::Debug for FlycheckMessage {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match self {
-            Message::AddDiagnostic { id, workspace_root, diagnostic } => f
+            FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic } => f
                 .debug_struct("AddDiagnostic")
                 .field("id", id)
                 .field("workspace_root", workspace_root)
                 .field("diagnostic_code", &diagnostic.code.as_ref().map(|it| &it.code))
                 .finish(),
-            Message::ClearDiagnostics { id } => {
+            FlycheckMessage::ClearDiagnostics { id } => {
                 f.debug_struct("ClearDiagnostics").field("id", id).finish()
             }
-            Message::Progress { id, progress } => {
+            FlycheckMessage::Progress { id, progress } => {
                 f.debug_struct("Progress").field("id", id).field("progress", progress).finish()
             }
         }
@@ -193,7 +182,7 @@
 }
 
 #[derive(Debug)]
-pub enum Progress {
+pub(crate) enum Progress {
     DidStart,
     DidCheckCrate(String),
     DidFinish(io::Result<()>),
@@ -210,7 +199,7 @@
 struct FlycheckActor {
     /// The workspace id of this flycheck instance.
     id: usize,
-    sender: Box<dyn Fn(Message) + Send>,
+    sender: Sender<FlycheckMessage>,
     config: FlycheckConfig,
     manifest_path: Option<AbsPathBuf>,
     /// Either the workspace root of the workspace we are flychecking,
@@ -241,12 +230,12 @@
     Finished,
 }
 
-pub const SAVED_FILE_PLACEHOLDER: &str = "$saved_file";
+pub(crate) const SAVED_FILE_PLACEHOLDER: &str = "$saved_file";
 
 impl FlycheckActor {
     fn new(
         id: usize,
-        sender: Box<dyn Fn(Message) + Send>,
+        sender: Sender<FlycheckMessage>,
         config: FlycheckConfig,
         sysroot_root: Option<AbsPathBuf>,
         workspace_root: AbsPathBuf,
@@ -267,17 +256,18 @@
     }
 
     fn report_progress(&self, progress: Progress) {
-        self.send(Message::Progress { id: self.id, progress });
+        self.send(FlycheckMessage::Progress { id: self.id, progress });
     }
 
     fn next_event(&self, inbox: &Receiver<StateChange>) -> Option<Event> {
-        if let Ok(msg) = inbox.try_recv() {
-            // give restarts a preference so check outputs don't block a restart or stop
-            return Some(Event::RequestStateChange(msg));
-        }
-        select! {
+        let Some(command_receiver) = &self.command_receiver else {
+            return inbox.recv().ok().map(Event::RequestStateChange);
+        };
+
+        // Biased to give restarts a preference so check outputs don't block a restart or stop
+        select_biased! {
             recv(inbox) -> msg => msg.ok().map(Event::RequestStateChange),
-            recv(self.command_receiver.as_ref().unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())),
+            recv(command_receiver) -> msg => Some(Event::CheckEvent(msg.ok())),
         }
     }
 
@@ -340,7 +330,7 @@
                         );
                     }
                     if self.status == FlycheckStatus::Started {
-                        self.send(Message::ClearDiagnostics { id: self.id });
+                        self.send(FlycheckMessage::ClearDiagnostics { id: self.id });
                     }
                     self.report_progress(Progress::DidFinish(res));
                     self.status = FlycheckStatus::Finished;
@@ -362,9 +352,9 @@
                             "diagnostic received"
                         );
                         if self.status == FlycheckStatus::Started {
-                            self.send(Message::ClearDiagnostics { id: self.id });
+                            self.send(FlycheckMessage::ClearDiagnostics { id: self.id });
                         }
-                        self.send(Message::AddDiagnostic {
+                        self.send(FlycheckMessage::AddDiagnostic {
                             id: self.id,
                             workspace_root: self.root.clone(),
                             diagnostic: msg,
@@ -399,7 +389,7 @@
         package: Option<&str>,
         saved_file: Option<&AbsPath>,
     ) -> Option<Command> {
-        let (mut cmd, args) = match &self.config {
+        match &self.config {
             FlycheckConfig::CargoCommand { command, options, ansi_color_output } => {
                 let mut cmd = Command::new(Tool::Cargo.path());
                 if let Some(sysroot_root) = &self.sysroot_root {
@@ -430,7 +420,8 @@
                 cmd.arg("--keep-going");
 
                 options.apply_on_command(&mut cmd);
-                (cmd, options.extra_args.clone())
+                cmd.args(&options.extra_args);
+                Some(cmd)
             }
             FlycheckConfig::CustomCommand {
                 command,
@@ -459,38 +450,36 @@
                     }
                 }
 
-                if args.contains(&SAVED_FILE_PLACEHOLDER.to_owned()) {
-                    // If the custom command has a $saved_file placeholder, and
-                    // we're saving a file, replace the placeholder in the arguments.
-                    if let Some(saved_file) = saved_file {
-                        let args = args
-                            .iter()
-                            .map(|arg| {
-                                if arg == SAVED_FILE_PLACEHOLDER {
-                                    saved_file.to_string()
-                                } else {
-                                    arg.clone()
-                                }
-                            })
-                            .collect();
-                        (cmd, args)
-                    } else {
-                        // The custom command has a $saved_file placeholder,
-                        // but we had an IDE event that wasn't a file save. Do nothing.
-                        return None;
+                // If the custom command has a $saved_file placeholder, and
+                // we're saving a file, replace the placeholder in the arguments.
+                if let Some(saved_file) = saved_file {
+                    for arg in args {
+                        if arg == SAVED_FILE_PLACEHOLDER {
+                            cmd.arg(saved_file);
+                        } else {
+                            cmd.arg(arg);
+                        }
                     }
                 } else {
-                    (cmd, args.clone())
-                }
-            }
-        };
+                    for arg in args {
+                        if arg == SAVED_FILE_PLACEHOLDER {
+                            // The custom command has a $saved_file placeholder,
+                            // but we had an IDE event that wasn't a file save. Do nothing.
+                            return None;
+                        }
 
-        cmd.args(args);
-        Some(cmd)
+                        cmd.arg(arg);
+                    }
+                }
+
+                Some(cmd)
+            }
+        }
     }
 
-    fn send(&self, check_task: Message) {
-        (self.sender)(check_task);
+    #[track_caller]
+    fn send(&self, check_task: FlycheckMessage) {
+        self.sender.send(check_task).unwrap();
     }
 }
 
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index f1dde10..7a7ec1d 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -6,10 +6,9 @@
 use std::{ops::Not as _, time::Instant};
 
 use crossbeam_channel::{unbounded, Receiver, Sender};
-use flycheck::{project_json, FlycheckHandle};
 use hir::ChangeWithProcMacros;
 use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId};
-use ide_db::base_db::{CrateId, ProcMacroPaths, SourceDatabaseExt};
+use ide_db::base_db::{CrateId, ProcMacroPaths, SourceDatabase, SourceRootDatabase};
 use itertools::Itertools;
 use load_cargo::SourceRootConfig;
 use lsp_types::{SemanticTokens, Url};
@@ -28,15 +27,18 @@
 use crate::{
     config::{Config, ConfigChange, ConfigErrors, RatomlFileKind},
     diagnostics::{CheckFixes, DiagnosticCollection},
+    discover,
+    flycheck::{FlycheckHandle, FlycheckMessage},
     line_index::{LineEndings, LineIndex},
     lsp::{from_proto, to_proto::url_from_abs_path},
     lsp_ext,
     main_loop::Task,
     mem_docs::MemDocs,
-    op_queue::OpQueue,
+    op_queue::{Cause, OpQueue},
     reload,
     target_spec::{CargoTargetSpec, ProjectJsonTargetSpec, TargetSpec},
     task_pool::{TaskPool, TaskQueue},
+    test_runner::{CargoTestHandle, CargoTestMessage},
 };
 
 pub(crate) struct FetchWorkspaceRequest {
@@ -88,28 +90,28 @@
 
     // Flycheck
     pub(crate) flycheck: Arc<[FlycheckHandle]>,
-    pub(crate) flycheck_sender: Sender<flycheck::Message>,
-    pub(crate) flycheck_receiver: Receiver<flycheck::Message>,
+    pub(crate) flycheck_sender: Sender<FlycheckMessage>,
+    pub(crate) flycheck_receiver: Receiver<FlycheckMessage>,
     pub(crate) last_flycheck_error: Option<String>,
 
     // Test explorer
-    pub(crate) test_run_session: Option<Vec<flycheck::CargoTestHandle>>,
-    pub(crate) test_run_sender: Sender<flycheck::CargoTestMessage>,
-    pub(crate) test_run_receiver: Receiver<flycheck::CargoTestMessage>,
+    pub(crate) test_run_session: Option<Vec<CargoTestHandle>>,
+    pub(crate) test_run_sender: Sender<CargoTestMessage>,
+    pub(crate) test_run_receiver: Receiver<CargoTestMessage>,
     pub(crate) test_run_remaining_jobs: usize,
 
     // Project loading
-    pub(crate) discover_handle: Option<project_json::DiscoverHandle>,
-    pub(crate) discover_sender: Sender<project_json::DiscoverProjectMessage>,
-    pub(crate) discover_receiver: Receiver<project_json::DiscoverProjectMessage>,
+    pub(crate) discover_handle: Option<discover::DiscoverHandle>,
+    pub(crate) discover_sender: Sender<discover::DiscoverProjectMessage>,
+    pub(crate) discover_receiver: Receiver<discover::DiscoverProjectMessage>,
 
     // VFS
     pub(crate) loader: Handle<Box<dyn vfs::loader::Handle>, Receiver<vfs::loader::Message>>,
     pub(crate) vfs: Arc<RwLock<(vfs::Vfs, IntMap<FileId, LineEndings>)>>,
     pub(crate) vfs_config_version: u32,
     pub(crate) vfs_progress_config_version: u32,
-    pub(crate) vfs_progress_n_total: usize,
-    pub(crate) vfs_progress_n_done: usize,
+    pub(crate) vfs_done: bool,
+    pub(crate) wants_to_switch: Option<Cause>,
 
     /// `workspaces` field stores the data we actually use, while the `OpQueue`
     /// stores the result of the last fetch.
@@ -183,8 +185,7 @@
     pub(crate) fn new(sender: Sender<lsp_server::Message>, config: Config) -> GlobalState {
         let loader = {
             let (sender, receiver) = unbounded::<vfs::loader::Message>();
-            let handle: vfs_notify::NotifyHandle =
-                vfs::loader::Handle::spawn(Box::new(move |msg| sender.send(msg).unwrap()));
+            let handle: vfs_notify::NotifyHandle = vfs::loader::Handle::spawn(sender);
             let handle = Box::new(handle) as Box<dyn vfs::loader::Handle>;
             Handle { handle, receiver }
         };
@@ -252,8 +253,8 @@
             vfs: Arc::new(RwLock::new((vfs::Vfs::default(), IntMap::default()))),
             vfs_config_version: 0,
             vfs_progress_config_version: 0,
-            vfs_progress_n_total: 0,
-            vfs_progress_n_done: 0,
+            vfs_done: true,
+            wants_to_switch: None,
 
             workspaces: Arc::from(Vec::new()),
             crate_graph_file_dependencies: FxHashSet::default(),
@@ -458,6 +459,11 @@
             }
         }
 
+        // FIXME: `workspace_structure_change` is computed from `should_refresh_for_change` which is
+        // path syntax based. That is not sufficient for all cases so we should lift that check out
+        // into a `QueuedTask`, see `handle_did_save_text_document`.
+        // Or maybe instead of replacing that check, kick off a semantic one if the syntactic one
+        // didn't find anything (to make up for the lack of precision).
         {
             if !matches!(&workspace_structure_change, Some((.., true))) {
                 _ = self
@@ -557,8 +563,52 @@
         self.req_queue.incoming.is_completed(&request.id)
     }
 
+    #[track_caller]
     fn send(&self, message: lsp_server::Message) {
-        self.sender.send(message).unwrap()
+        self.sender.send(message).unwrap();
+    }
+
+    pub(crate) fn publish_diagnostics(
+        &mut self,
+        uri: Url,
+        version: Option<i32>,
+        mut diagnostics: Vec<lsp_types::Diagnostic>,
+    ) {
+        // We put this on a separate thread to avoid blocking the main thread with serialization work
+        self.task_pool.handle.spawn_with_sender(stdx::thread::ThreadIntent::Worker, {
+            let sender = self.sender.clone();
+            move |_| {
+                // VSCode assumes diagnostic messages to be non-empty strings, so we need to patch
+                // empty diagnostics. Neither the docs of VSCode nor the LSP spec say whether
+                // diagnostic messages are actually allowed to be empty or not and patching this
+                // in the VSCode client does not work as the assertion happens in the protocol
+                // conversion. So this hack is here to stay, and will be considered a hack
+                // until the LSP decides to state that empty messages are allowed.
+
+                // See https://github.com/rust-lang/rust-analyzer/issues/11404
+                // See https://github.com/rust-lang/rust-analyzer/issues/13130
+                let patch_empty = |message: &mut String| {
+                    if message.is_empty() {
+                        " ".clone_into(message);
+                    }
+                };
+
+                for d in &mut diagnostics {
+                    patch_empty(&mut d.message);
+                    if let Some(dri) = &mut d.related_information {
+                        for dri in dri {
+                            patch_empty(&mut dri.message);
+                        }
+                    }
+                }
+
+                let not = lsp_server::Notification::new(
+                    <lsp_types::notification::PublishDiagnostics as lsp_types::notification::Notification>::METHOD.to_owned(),
+                    lsp_types::PublishDiagnosticsParams { uri, diagnostics, version },
+                );
+                _ = sender.send(not.into());
+            }
+        });
     }
 }
 
diff --git a/crates/rust-analyzer/src/dispatch.rs b/crates/rust-analyzer/src/handlers/dispatch.rs
similarity index 87%
rename from crates/rust-analyzer/src/dispatch.rs
rename to crates/rust-analyzer/src/handlers/dispatch.rs
index ebdc196..a105ec6 100644
--- a/crates/rust-analyzer/src/dispatch.rs
+++ b/crates/rust-analyzer/src/handlers/dispatch.rs
@@ -97,16 +97,45 @@
         self
     }
 
-    /// Dispatches a non-latency-sensitive request onto the thread pool.
+    /// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not
+    /// ready this will return a default constructed [`R::Result`].
     pub(crate) fn on<const ALLOW_RETRYING: bool, R>(
         &mut self,
         f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>,
     ) -> &mut Self
     where
-        R: lsp_types::request::Request + 'static,
-        R::Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug,
-        R::Result: Serialize,
+        R: lsp_types::request::Request<
+                Params: DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug,
+                Result: Serialize + Default,
+            > + 'static,
     {
+        if !self.global_state.vfs_done {
+            if let Some(lsp_server::Request { id, .. }) =
+                self.req.take_if(|it| it.method == R::METHOD)
+            {
+                self.global_state.respond(lsp_server::Response::new_ok(id, R::Result::default()));
+            }
+            return self;
+        }
+        self.on_with_thread_intent::<true, ALLOW_RETRYING, R>(ThreadIntent::Worker, f)
+    }
+
+    /// Dispatches a non-latency-sensitive request onto the thread pool. When the VFS is marked not
+    /// ready this will return the parameter as is.
+    pub(crate) fn on_identity<const ALLOW_RETRYING: bool, R, Params>(
+        &mut self,
+        f: fn(GlobalStateSnapshot, Params) -> anyhow::Result<R::Result>,
+    ) -> &mut Self
+    where
+        R: lsp_types::request::Request<Params = Params, Result = Params> + 'static,
+        Params: Serialize + DeserializeOwned + panic::UnwindSafe + Send + fmt::Debug,
+    {
+        if !self.global_state.vfs_done {
+            if let Some((request, params, _)) = self.parse::<R>() {
+                self.global_state.respond(lsp_server::Response::new_ok(request.id, &params))
+            }
+            return self;
+        }
         self.on_with_thread_intent::<true, ALLOW_RETRYING, R>(ThreadIntent::Worker, f)
     }
 
@@ -198,11 +227,7 @@
         R: lsp_types::request::Request,
         R::Params: DeserializeOwned + fmt::Debug,
     {
-        let req = match &self.req {
-            Some(req) if req.method == R::METHOD => self.req.take()?,
-            _ => return None,
-        };
-
+        let req = self.req.take_if(|it| it.method == R::METHOD)?;
         let res = crate::from_json(R::METHOD, &req.params);
         match res {
             Ok(params) => {
diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs
index 4b14dcf..de5d1f2 100644
--- a/crates/rust-analyzer/src/handlers/notification.rs
+++ b/crates/rust-analyzer/src/handlers/notification.rs
@@ -9,6 +9,7 @@
     DidChangeWatchedFilesParams, DidChangeWorkspaceFoldersParams, DidCloseTextDocumentParams,
     DidOpenTextDocumentParams, DidSaveTextDocumentParams, WorkDoneProgressCancelParams,
 };
+use paths::Utf8PathBuf;
 use triomphe::Arc;
 use vfs::{AbsPathBuf, ChangeKind, VfsPath};
 
@@ -157,6 +158,8 @@
                 .map(|cfg| cfg.files_to_watch.iter().map(String::as_str).collect::<Vec<&str>>())
                 .unwrap_or_default();
 
+            // FIXME: We should move this check into a QueuedTask and do semantic resolution of
+            // the files. There is only so much we can tell syntactically from the path.
             if reload::should_refresh_for_change(path, ChangeKind::Modify, additional_files) {
                 state.fetch_workspaces_queue.request_op(
                     format!("workspace vfs file change saved {path}"),
@@ -240,6 +243,7 @@
 
     for workspace in params.event.removed {
         let Ok(path) = workspace.uri.to_file_path() else { continue };
+        let Ok(path) = Utf8PathBuf::from_path_buf(path) else { continue };
         let Ok(path) = AbsPathBuf::try_from(path) else { continue };
         config.remove_workspace(&path);
     }
@@ -249,6 +253,7 @@
         .added
         .into_iter()
         .filter_map(|it| it.uri.to_file_path().ok())
+        .filter_map(|it| Utf8PathBuf::from_path_buf(it).ok())
         .filter_map(|it| AbsPathBuf::try_from(it).ok());
     config.add_workspaces(added);
 
diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs
index eca139d..34325ac 100644
--- a/crates/rust-analyzer/src/handlers/request.rs
+++ b/crates/rust-analyzer/src/handlers/request.rs
@@ -36,7 +36,6 @@
 
 use crate::{
     config::{Config, RustfmtConfig, WorkspaceSymbolConfig},
-    diff::diff,
     global_state::{FetchWorkspaceRequest, GlobalState, GlobalStateSnapshot},
     hack_recover_crate_name,
     line_index::LineEndings,
@@ -51,6 +50,7 @@
         FetchDependencyListResult, PositionOrRange, ViewCrateGraphParams, WorkspaceSymbolParams,
     },
     target_spec::{CargoTargetSpec, TargetSpec},
+    test_runner::{CargoTestHandle, TestTarget},
 };
 
 pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> {
@@ -246,15 +246,15 @@
         if let ProjectWorkspaceKind::Cargo { cargo, .. } = &ws.kind {
             let test_target = if let Some(namespace_root) = namespace_root {
                 if let Some(package_name) = find_package_name(namespace_root, cargo) {
-                    flycheck::TestTarget::Package(package_name)
+                    TestTarget::Package(package_name)
                 } else {
-                    flycheck::TestTarget::Workspace
+                    TestTarget::Workspace
                 }
             } else {
-                flycheck::TestTarget::Workspace
+                TestTarget::Workspace
             };
 
-            let handle = flycheck::CargoTestHandle::new(
+            let handle = CargoTestHandle::new(
                 test_path,
                 state.config.cargo_test_options(),
                 cargo.workspace_root(),
@@ -781,9 +781,12 @@
     if let Ok(file_path) = &params.text_document.uri.to_file_path() {
         if file_path.file_name().unwrap_or_default() == "Cargo.toml" {
             // search workspaces for parent packages or fallback to workspace root
-            let abs_path_buf = match AbsPathBuf::try_from(file_path.to_path_buf()).ok() {
-                Some(abs_path_buf) => abs_path_buf,
-                None => return Ok(None),
+            let abs_path_buf = match Utf8PathBuf::from_path_buf(file_path.to_path_buf())
+                .ok()
+                .map(AbsPathBuf::try_from)
+            {
+                Some(Ok(abs_path_buf)) => abs_path_buf,
+                _ => return Ok(None),
             };
 
             let manifest_path = match ManifestPath::try_from(abs_path_buf).ok() {
@@ -2366,3 +2369,47 @@
         ResourceOp::Delete(_) => ResourceOperationKind::Delete,
     }
 }
+
+pub(crate) fn diff(left: &str, right: &str) -> TextEdit {
+    use dissimilar::Chunk;
+
+    let chunks = dissimilar::diff(left, right);
+
+    let mut builder = TextEdit::builder();
+    let mut pos = TextSize::default();
+
+    let mut chunks = chunks.into_iter().peekable();
+    while let Some(chunk) = chunks.next() {
+        if let (Chunk::Delete(deleted), Some(&Chunk::Insert(inserted))) = (chunk, chunks.peek()) {
+            chunks.next().unwrap();
+            let deleted_len = TextSize::of(deleted);
+            builder.replace(TextRange::at(pos, deleted_len), inserted.into());
+            pos += deleted_len;
+            continue;
+        }
+
+        match chunk {
+            Chunk::Equal(text) => {
+                pos += TextSize::of(text);
+            }
+            Chunk::Delete(deleted) => {
+                let deleted_len = TextSize::of(deleted);
+                builder.delete(TextRange::at(pos, deleted_len));
+                pos += deleted_len;
+            }
+            Chunk::Insert(inserted) => {
+                builder.insert(pos, inserted.into());
+            }
+        }
+    }
+    builder.finish()
+}
+
+#[test]
+fn diff_smoke_test() {
+    let mut original = String::from("fn foo(a:u32){\n}");
+    let result = "fn foo(a: u32) {}";
+    let edit = diff(&original, result);
+    edit.apply(&mut original);
+    assert_eq!(original, result);
+}
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs
index ff8eb6c..28f4b80 100644
--- a/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -46,13 +46,19 @@
 
     let (db, vfs, _proc_macro) = {
         let _it = stdx::timeit("workspace loading");
-        load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap()
+        load_workspace_at(
+            workspace_to_load.as_std_path(),
+            &cargo_config,
+            &load_cargo_config,
+            &|_| {},
+        )
+        .unwrap()
     };
     let mut host = AnalysisHost::with_database(db);
 
     let file_id = {
         let file = workspace_to_load.join(file);
-        let path = VfsPath::from(AbsPathBuf::assert_utf8(file));
+        let path = VfsPath::from(AbsPathBuf::assert(file));
         vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
     };
 
@@ -106,13 +112,19 @@
 
     let (db, vfs, _proc_macro) = {
         let _it = stdx::timeit("workspace loading");
-        load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap()
+        load_workspace_at(
+            workspace_to_load.as_std_path(),
+            &cargo_config,
+            &load_cargo_config,
+            &|_| {},
+        )
+        .unwrap()
     };
     let mut host = AnalysisHost::with_database(db);
 
     let file_id = {
         let file = workspace_to_load.join(file);
-        let path = VfsPath::from(AbsPathBuf::assert_utf8(file));
+        let path = VfsPath::from(AbsPathBuf::assert(file));
         vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
     };
 
@@ -274,13 +286,19 @@
 
     let (db, vfs, _proc_macro) = {
         let _it = stdx::timeit("workspace loading");
-        load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap()
+        load_workspace_at(
+            workspace_to_load.as_std_path(),
+            &cargo_config,
+            &load_cargo_config,
+            &|_| {},
+        )
+        .unwrap()
     };
     let mut host = AnalysisHost::with_database(db);
 
     let file_id = {
         let file = workspace_to_load.join(file);
-        let path = VfsPath::from(AbsPathBuf::assert_utf8(file));
+        let path = VfsPath::from(AbsPathBuf::assert(file));
         vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
     };
 
@@ -307,7 +325,7 @@
         term_search_borrowck: true,
     };
     host.analysis()
-        .diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id)
+        .full_diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id)
         .unwrap();
 
     let _g = crate::tracing::hprof::init("*");
@@ -325,7 +343,7 @@
         let _p = tracing::info_span!("diagnostics").entered();
         let _span = profile::cpu_span();
         host.analysis()
-            .diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id)
+            .full_diagnostics(&diagnostics_config, ide::AssistResolveStrategy::None, file_id)
             .unwrap();
     }
 }
diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs
index 174979e..714991e 100644
--- a/crates/rust-analyzer/src/lib.rs
+++ b/crates/rust-analyzer/src/lib.rs
@@ -11,10 +11,10 @@
 
 pub mod cli;
 
-mod capabilities;
+mod command;
 mod diagnostics;
-mod diff;
-mod dispatch;
+mod discover;
+mod flycheck;
 mod hack_recover_crate_name;
 mod line_index;
 mod main_loop;
@@ -23,9 +23,11 @@
 mod reload;
 mod target_spec;
 mod task_pool;
+mod test_runner;
 mod version;
 
 mod handlers {
+    pub(crate) mod dispatch;
     pub(crate) mod notification;
     pub(crate) mod request;
 }
@@ -47,7 +49,7 @@
 use serde::de::DeserializeOwned;
 
 pub use crate::{
-    capabilities::server_capabilities, main_loop::main_loop, reload::ws_to_crate_graph,
+    lsp::capabilities::server_capabilities, main_loop::main_loop, reload::ws_to_crate_graph,
     version::version,
 };
 
diff --git a/crates/rust-analyzer/src/lsp.rs b/crates/rust-analyzer/src/lsp.rs
index 9e0d42f..122ad20 100644
--- a/crates/rust-analyzer/src/lsp.rs
+++ b/crates/rust-analyzer/src/lsp.rs
@@ -3,6 +3,8 @@
 use core::fmt;
 
 pub mod ext;
+
+pub(crate) mod capabilities;
 pub(crate) mod from_proto;
 pub(crate) mod semantic_tokens;
 pub(crate) mod to_proto;
diff --git a/crates/rust-analyzer/src/capabilities.rs b/crates/rust-analyzer/src/lsp/capabilities.rs
similarity index 100%
rename from crates/rust-analyzer/src/capabilities.rs
rename to crates/rust-analyzer/src/lsp/capabilities.rs
diff --git a/crates/rust-analyzer/src/lsp/ext.rs b/crates/rust-analyzer/src/lsp/ext.rs
index 1fcb636..8d1a686 100644
--- a/crates/rust-analyzer/src/lsp/ext.rs
+++ b/crates/rust-analyzer/src/lsp/ext.rs
@@ -61,7 +61,7 @@
 #[serde(rename_all = "camelCase")]
 pub struct FetchDependencyListParams {}
 
-#[derive(Deserialize, Serialize, Debug)]
+#[derive(Deserialize, Serialize, Debug, Default)]
 #[serde(rename_all = "camelCase")]
 pub struct FetchDependencyListResult {
     pub crates: Vec<CrateInfoResult>,
@@ -194,7 +194,7 @@
     pub runnable: Option<Runnable>,
 }
 
-#[derive(Deserialize, Serialize, Debug)]
+#[derive(Deserialize, Serialize, Debug, Default)]
 #[serde(rename_all = "camelCase")]
 pub struct DiscoverTestResults {
     pub tests: Vec<TestItem>,
@@ -690,6 +690,12 @@
     WithLocal(ExternalDocsPair),
 }
 
+impl Default for ExternalDocsResponse {
+    fn default() -> Self {
+        ExternalDocsResponse::Simple(None)
+    }
+}
+
 #[derive(Debug, Default, PartialEq, Serialize, Deserialize, Clone)]
 #[serde(rename_all = "camelCase")]
 pub struct ExternalDocsPair {
diff --git a/crates/rust-analyzer/src/lsp/from_proto.rs b/crates/rust-analyzer/src/lsp/from_proto.rs
index aea4242..1f45448 100644
--- a/crates/rust-analyzer/src/lsp/from_proto.rs
+++ b/crates/rust-analyzer/src/lsp/from_proto.rs
@@ -2,6 +2,7 @@
 use anyhow::format_err;
 use ide::{Annotation, AnnotationKind, AssistKind, LineCol};
 use ide_db::{line_index::WideLineCol, FileId, FilePosition, FileRange};
+use paths::Utf8PathBuf;
 use syntax::{TextRange, TextSize};
 use vfs::AbsPathBuf;
 
@@ -13,7 +14,7 @@
 
 pub(crate) fn abs_path(url: &lsp_types::Url) -> anyhow::Result<AbsPathBuf> {
     let path = url.to_file_path().map_err(|()| anyhow::format_err!("url is not a file"))?;
-    Ok(AbsPathBuf::try_from(path).unwrap())
+    Ok(AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).unwrap()).unwrap())
 }
 
 pub(crate) fn vfs_path(url: &lsp_types::Url) -> anyhow::Result<vfs::VfsPath> {
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 9c82074..1d4ee71 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -9,26 +9,28 @@
 
 use always_assert::always;
 use crossbeam_channel::{select, Receiver};
-use flycheck::project_json;
-use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath};
+use ide_db::base_db::{SourceDatabase, SourceRootDatabase, VfsPath};
 use lsp_server::{Connection, Notification, Request};
 use lsp_types::{notification::Notification as _, TextDocumentIdentifier};
 use stdx::thread::ThreadIntent;
 use tracing::{error, span, Level};
-use vfs::{AbsPathBuf, FileId};
+use vfs::{loader::LoadingProgress, AbsPathBuf, FileId};
 
 use crate::{
     config::Config,
-    diagnostics::{fetch_native_diagnostics, DiagnosticsGeneration},
-    dispatch::{NotificationDispatcher, RequestDispatcher},
+    diagnostics::{fetch_native_diagnostics, DiagnosticsGeneration, NativeDiagnosticsFetchKind},
+    discover::{DiscoverArgument, DiscoverCommand, DiscoverProjectMessage},
+    flycheck::{self, FlycheckMessage},
     global_state::{file_id_to_url, url_to_file_id, FetchWorkspaceRequest, GlobalState},
     hack_recover_crate_name,
+    handlers::dispatch::{NotificationDispatcher, RequestDispatcher},
     lsp::{
         from_proto, to_proto,
         utils::{notification_is, Progress},
     },
     lsp_ext,
     reload::{BuildDataProgress, ProcMacroProgress, ProjectWorkspaceProgress},
+    test_runner::{CargoTestMessage, TestState},
 };
 
 pub fn main_loop(config: Config, connection: Connection) -> anyhow::Result<()> {
@@ -61,9 +63,9 @@
     Task(Task),
     QueuedTask(QueuedTask),
     Vfs(vfs::loader::Message),
-    Flycheck(flycheck::Message),
-    TestResult(flycheck::CargoTestMessage),
-    DiscoverProject(project_json::DiscoverProjectMessage),
+    Flycheck(FlycheckMessage),
+    TestResult(CargoTestMessage),
+    DiscoverProject(DiscoverProjectMessage),
 }
 
 impl fmt::Display for Event {
@@ -87,16 +89,23 @@
 }
 
 #[derive(Debug)]
+pub(crate) enum DiagnosticsTaskKind {
+    Syntax(DiagnosticsGeneration, Vec<(FileId, Vec<lsp_types::Diagnostic>)>),
+    Semantic(DiagnosticsGeneration, Vec<(FileId, Vec<lsp_types::Diagnostic>)>),
+}
+
+#[derive(Debug)]
 pub(crate) enum Task {
     Response(lsp_server::Response),
     DiscoverLinkedProjects(DiscoverProjectParam),
     Retry(lsp_server::Request),
-    Diagnostics(DiagnosticsGeneration, Vec<(FileId, Vec<lsp_types::Diagnostic>)>),
+    Diagnostics(DiagnosticsTaskKind),
     DiscoverTest(lsp_ext::DiscoverTestResults),
     PrimeCaches(PrimeCachesProgress),
     FetchWorkspace(ProjectWorkspaceProgress),
     FetchBuildData(BuildDataProgress),
     LoadProcMacros(ProcMacroProgress),
+    // FIXME: Remove this in favor of a more general QueuedTask, see `handle_did_save_text_document`
     BuildDepsHaveChanged,
 }
 
@@ -164,8 +173,10 @@
         }
 
         if self.config.discover_workspace_config().is_none() {
-            let req = FetchWorkspaceRequest { path: None, force_crate_graph_reload: false };
-            self.fetch_workspaces_queue.request_op("startup".to_owned(), req);
+            self.fetch_workspaces_queue.request_op(
+                "startup".to_owned(),
+                FetchWorkspaceRequest { path: None, force_crate_graph_reload: false },
+            );
             if let Some((cause, FetchWorkspaceRequest { path, force_crate_graph_reload })) =
                 self.fetch_workspaces_queue.should_start_op()
             {
@@ -173,7 +184,10 @@
             }
         }
 
-        while let Some(event) = self.next_event(&inbox) {
+        while let Ok(event) = self.next_event(&inbox) {
+            let Some(event) = event else {
+                anyhow::bail!("client exited without proper shutdown sequence");
+            };
             if matches!(
                 &event,
                 Event::Lsp(lsp_server::Message::Notification(Notification { method, .. }))
@@ -184,7 +198,7 @@
             self.handle_event(event)?;
         }
 
-        anyhow::bail!("client exited without proper shutdown sequence")
+        Err(anyhow::anyhow!("A receiver has been dropped, something panicked!"))
     }
 
     fn register_did_save_capability(&mut self, additional_patterns: impl Iterator<Item = String>) {
@@ -231,37 +245,40 @@
         );
     }
 
-    fn next_event(&self, inbox: &Receiver<lsp_server::Message>) -> Option<Event> {
+    fn next_event(
+        &self,
+        inbox: &Receiver<lsp_server::Message>,
+    ) -> Result<Option<Event>, crossbeam_channel::RecvError> {
         select! {
             recv(inbox) -> msg =>
-                msg.ok().map(Event::Lsp),
+                return Ok(msg.ok().map(Event::Lsp)),
 
             recv(self.task_pool.receiver) -> task =>
-                Some(Event::Task(task.unwrap())),
+                task.map(Event::Task),
 
             recv(self.deferred_task_queue.receiver) -> task =>
-                Some(Event::QueuedTask(task.unwrap())),
+                task.map(Event::QueuedTask),
 
             recv(self.fmt_pool.receiver) -> task =>
-                Some(Event::Task(task.unwrap())),
+                task.map(Event::Task),
 
             recv(self.loader.receiver) -> task =>
-                Some(Event::Vfs(task.unwrap())),
+                task.map(Event::Vfs),
 
             recv(self.flycheck_receiver) -> task =>
-                Some(Event::Flycheck(task.unwrap())),
+                task.map(Event::Flycheck),
 
             recv(self.test_run_receiver) -> task =>
-                Some(Event::TestResult(task.unwrap())),
+                task.map(Event::TestResult),
 
             recv(self.discover_receiver) -> task =>
-                Some(Event::DiscoverProject(task.unwrap())),
+                task.map(Event::DiscoverProject),
         }
+        .map(Some)
     }
 
     fn handle_event(&mut self, event: Event) -> anyhow::Result<()> {
         let loop_start = Instant::now();
-        // NOTE: don't count blocking select! call as a loop-turn time
         let _p = tracing::info_span!("GlobalState::handle_event", event = %event).entered();
 
         let event_dbg_msg = format!("{event:?}");
@@ -375,9 +392,14 @@
             }
         }
         let event_handling_duration = loop_start.elapsed();
-
-        let state_changed = self.process_changes();
-        let memdocs_added_or_removed = self.mem_docs.take_changes();
+        let (state_changed, memdocs_added_or_removed) = if self.vfs_done {
+            if let Some(cause) = self.wants_to_switch.take() {
+                self.switch_workspaces(cause);
+            }
+            (self.process_changes(), self.mem_docs.take_changes())
+        } else {
+            (false, false)
+        };
 
         if self.is_quiescent() {
             let became_quiescent = !was_quiescent;
@@ -423,40 +445,13 @@
         if let Some(diagnostic_changes) = self.diagnostics.take_changes() {
             for file_id in diagnostic_changes {
                 let uri = file_id_to_url(&self.vfs.read().0, file_id);
-                let mut diagnostics =
-                    self.diagnostics.diagnostics_for(file_id).cloned().collect::<Vec<_>>();
-
-                // VSCode assumes diagnostic messages to be non-empty strings, so we need to patch
-                // empty diagnostics. Neither the docs of VSCode nor the LSP spec say whether
-                // diagnostic messages are actually allowed to be empty or not and patching this
-                // in the VSCode client does not work as the assertion happens in the protocol
-                // conversion. So this hack is here to stay, and will be considered a hack
-                // until the LSP decides to state that empty messages are allowed.
-
-                // See https://github.com/rust-lang/rust-analyzer/issues/11404
-                // See https://github.com/rust-lang/rust-analyzer/issues/13130
-                let patch_empty = |message: &mut String| {
-                    if message.is_empty() {
-                        " ".clone_into(message);
-                    }
-                };
-
-                for d in &mut diagnostics {
-                    patch_empty(&mut d.message);
-                    if let Some(dri) = &mut d.related_information {
-                        for dri in dri {
-                            patch_empty(&mut dri.message);
-                        }
-                    }
-                }
-
                 let version = from_proto::vfs_path(&uri)
-                    .map(|path| self.mem_docs.get(&path).map(|it| it.version))
-                    .unwrap_or_default();
+                    .ok()
+                    .and_then(|path| self.mem_docs.get(&path).map(|it| it.version));
 
-                self.send_notification::<lsp_types::notification::PublishDiagnostics>(
-                    lsp_types::PublishDiagnosticsParams { uri, diagnostics, version },
-                );
+                let diagnostics =
+                    self.diagnostics.diagnostics_for(file_id).cloned().collect::<Vec<_>>();
+                self.publish_diagnostics(uri, version, diagnostics);
             }
         }
 
@@ -549,14 +544,37 @@
             }
             // Diagnostics are triggered by the user typing
             // so we run them on a latency sensitive thread.
-            self.task_pool.handle.spawn(ThreadIntent::LatencySensitive, {
-                let snapshot = self.snapshot();
+            let snapshot = self.snapshot();
+            self.task_pool.handle.spawn_with_sender(ThreadIntent::LatencySensitive, {
                 let subscriptions = subscriptions.clone();
-                move || {
-                    Task::Diagnostics(
-                        generation,
-                        fetch_native_diagnostics(snapshot, subscriptions, slice),
-                    )
+                // Do not fetch semantic diagnostics (and populate query results) if we haven't even
+                // loaded the initial workspace yet.
+                let fetch_semantic =
+                    self.vfs_done && self.fetch_workspaces_queue.last_op_result().is_some();
+                move |sender| {
+                    let diags = fetch_native_diagnostics(
+                        &snapshot,
+                        subscriptions.clone(),
+                        slice.clone(),
+                        NativeDiagnosticsFetchKind::Syntax,
+                    );
+                    sender
+                        .send(Task::Diagnostics(DiagnosticsTaskKind::Syntax(generation, diags)))
+                        .unwrap();
+
+                    if fetch_semantic {
+                        let diags = fetch_native_diagnostics(
+                            &snapshot,
+                            subscriptions,
+                            slice,
+                            NativeDiagnosticsFetchKind::Semantic,
+                        );
+                        sender
+                            .send(Task::Diagnostics(DiagnosticsTaskKind::Semantic(
+                                generation, diags,
+                            )))
+                            .unwrap();
+                    }
                 }
             });
             start = end;
@@ -564,6 +582,9 @@
     }
 
     fn update_tests(&mut self) {
+        if !self.vfs_done {
+            return;
+        }
         let db = self.analysis_host.raw_database();
         let subscriptions = self
             .mem_docs
@@ -644,10 +665,8 @@
             // Only retry requests that haven't been cancelled. Otherwise we do unnecessary work.
             Task::Retry(req) if !self.is_completed(&req) => self.on_request(req),
             Task::Retry(_) => (),
-            Task::Diagnostics(generation, diagnostics_per_file) => {
-                for (file_id, diagnostics) in diagnostics_per_file {
-                    self.diagnostics.set_native_diagnostics(generation, file_id, diagnostics)
-                }
+            Task::Diagnostics(kind) => {
+                self.diagnostics.set_native_diagnostics(kind);
             }
             Task::PrimeCaches(progress) => match progress {
                 PrimeCachesProgress::Begin => prime_caches_progress.push(progress),
@@ -672,7 +691,7 @@
                         if let Err(e) = self.fetch_workspace_error() {
                             error!("FetchWorkspaceError:\n{e}");
                         }
-                        self.switch_workspaces("fetched workspace".to_owned());
+                        self.wants_to_switch = Some("fetched workspace".to_owned());
                         (Progress::End, None)
                     }
                 };
@@ -686,8 +705,7 @@
                         // `self.report_progress` is called later
                         let title = &cfg.progress_label.clone();
                         let command = cfg.command.clone();
-                        let discover =
-                            project_json::Discover::new(self.discover_sender.clone(), command);
+                        let discover = DiscoverCommand::new(self.discover_sender.clone(), command);
 
                         self.report_progress(title, Progress::Begin, None, None, None);
                         self.discover_workspace_queue
@@ -695,12 +713,8 @@
                         let _ = self.discover_workspace_queue.should_start_op();
 
                         let arg = match arg {
-                            DiscoverProjectParam::Buildfile(it) => {
-                                project_json::DiscoverArgument::Buildfile(it)
-                            }
-                            DiscoverProjectParam::Path(it) => {
-                                project_json::DiscoverArgument::Path(it)
-                            }
+                            DiscoverProjectParam::Buildfile(it) => DiscoverArgument::Buildfile(it),
+                            DiscoverProjectParam::Path(it) => DiscoverArgument::Path(it),
                         };
 
                         let handle = discover.spawn(arg).unwrap();
@@ -718,8 +732,9 @@
                             error!("FetchBuildDataError:\n{e}");
                         }
 
-                        self.switch_workspaces("fetched build data".to_owned());
-
+                        if self.wants_to_switch.is_none() {
+                            self.wants_to_switch = Some("fetched build data".to_owned());
+                        }
                         (Some(Progress::End), None)
                     }
                 };
@@ -772,16 +787,14 @@
                 let _p = tracing::info_span!("GlobalState::handle_vfs_mgs/progress").entered();
                 always!(config_version <= self.vfs_config_version);
 
-                let state = match n_done {
-                    None => Progress::Begin,
-                    Some(done) if done == n_total => Progress::End,
-                    Some(_) => Progress::Report,
+                let (n_done, state) = match n_done {
+                    LoadingProgress::Started => (0, Progress::Begin),
+                    LoadingProgress::Progress(n_done) => (n_done.min(n_total), Progress::Report),
+                    LoadingProgress::Finished => (n_total, Progress::End),
                 };
-                let n_done = n_done.unwrap_or_default();
 
                 self.vfs_progress_config_version = config_version;
-                self.vfs_progress_n_total = n_total;
-                self.vfs_progress_n_done = n_done;
+                self.vfs_done = state == Progress::End;
 
                 let mut message = format!("{n_done}/{n_total}");
                 if let Some(dir) = dir {
@@ -850,14 +863,14 @@
         }
     }
 
-    fn handle_discover_msg(&mut self, message: project_json::DiscoverProjectMessage) {
+    fn handle_discover_msg(&mut self, message: DiscoverProjectMessage) {
         let title = self
             .config
             .discover_workspace_config()
             .map(|cfg| cfg.progress_label.clone())
             .expect("No title could be found; this is a bug");
         match message {
-            project_json::DiscoverProjectMessage::Finished { project, buildfile } => {
+            DiscoverProjectMessage::Finished { project, buildfile } => {
                 self.report_progress(&title, Progress::End, None, None, None);
                 self.discover_workspace_queue.op_completed(());
 
@@ -865,10 +878,10 @@
                 config.add_linked_projects(project, buildfile);
                 self.update_configuration(config);
             }
-            project_json::DiscoverProjectMessage::Progress { message } => {
+            DiscoverProjectMessage::Progress { message } => {
                 self.report_progress(&title, Progress::Report, Some(message), None, None)
             }
-            project_json::DiscoverProjectMessage::Error { error, source } => {
+            DiscoverProjectMessage::Error { error, source } => {
                 let message = format!("Project discovery failed: {error}");
                 self.discover_workspace_queue.op_completed(());
                 self.show_and_log_error(message.clone(), source);
@@ -877,16 +890,14 @@
         }
     }
 
-    fn handle_cargo_test_msg(&mut self, message: flycheck::CargoTestMessage) {
+    fn handle_cargo_test_msg(&mut self, message: CargoTestMessage) {
         match message {
-            flycheck::CargoTestMessage::Test { name, state } => {
+            CargoTestMessage::Test { name, state } => {
                 let state = match state {
-                    flycheck::TestState::Started => lsp_ext::TestState::Started,
-                    flycheck::TestState::Ignored => lsp_ext::TestState::Skipped,
-                    flycheck::TestState::Ok => lsp_ext::TestState::Passed,
-                    flycheck::TestState::Failed { stdout } => {
-                        lsp_ext::TestState::Failed { message: stdout }
-                    }
+                    TestState::Started => lsp_ext::TestState::Started,
+                    TestState::Ignored => lsp_ext::TestState::Skipped,
+                    TestState::Ok => lsp_ext::TestState::Passed,
+                    TestState::Failed { stdout } => lsp_ext::TestState::Failed { message: stdout },
                 };
                 let Some(test_id) = hack_recover_crate_name::lookup_name(name) else {
                     return;
@@ -895,23 +906,23 @@
                     lsp_ext::ChangeTestStateParams { test_id, state },
                 );
             }
-            flycheck::CargoTestMessage::Suite => (),
-            flycheck::CargoTestMessage::Finished => {
+            CargoTestMessage::Suite => (),
+            CargoTestMessage::Finished => {
                 self.test_run_remaining_jobs = self.test_run_remaining_jobs.saturating_sub(1);
                 if self.test_run_remaining_jobs == 0 {
                     self.send_notification::<lsp_ext::EndRunTest>(());
                     self.test_run_session = None;
                 }
             }
-            flycheck::CargoTestMessage::Custom { text } => {
+            CargoTestMessage::Custom { text } => {
                 self.send_notification::<lsp_ext::AppendOutputToRunTest>(text);
             }
         }
     }
 
-    fn handle_flycheck_msg(&mut self, message: flycheck::Message) {
+    fn handle_flycheck_msg(&mut self, message: FlycheckMessage) {
         match message {
-            flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => {
+            FlycheckMessage::AddDiagnostic { id, workspace_root, diagnostic } => {
                 let snap = self.snapshot();
                 let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp(
                     &self.config.diagnostics_map(),
@@ -937,9 +948,9 @@
                 }
             }
 
-            flycheck::Message::ClearDiagnostics { id } => self.diagnostics.clear_check(id),
+            FlycheckMessage::ClearDiagnostics { id } => self.diagnostics.clear_check(id),
 
-            flycheck::Message::Progress { id, progress } => {
+            FlycheckMessage::Progress { id, progress } => {
                 let (state, message) = match progress {
                     flycheck::Progress::DidStart => (Progress::Begin, None),
                     flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)),
@@ -1054,9 +1065,9 @@
             .on::<NO_RETRY, lsp_request::GotoImplementation>(handlers::handle_goto_implementation)
             .on::<NO_RETRY, lsp_request::GotoTypeDefinition>(handlers::handle_goto_type_definition)
             .on::<NO_RETRY, lsp_request::InlayHintRequest>(handlers::handle_inlay_hints)
-            .on::<NO_RETRY, lsp_request::InlayHintResolveRequest>(handlers::handle_inlay_hints_resolve)
+            .on_identity::<NO_RETRY, lsp_request::InlayHintResolveRequest, _>(handlers::handle_inlay_hints_resolve)
             .on::<NO_RETRY, lsp_request::CodeLensRequest>(handlers::handle_code_lens)
-            .on::<NO_RETRY, lsp_request::CodeLensResolve>(handlers::handle_code_lens_resolve)
+            .on_identity::<NO_RETRY, lsp_request::CodeLensResolve, _>(handlers::handle_code_lens_resolve)
             .on::<NO_RETRY, lsp_request::PrepareRenameRequest>(handlers::handle_prepare_rename)
             .on::<NO_RETRY, lsp_request::Rename>(handlers::handle_rename)
             .on::<NO_RETRY, lsp_request::References>(handlers::handle_references)
@@ -1083,7 +1094,7 @@
             .on::<NO_RETRY, lsp_ext::Runnables>(handlers::handle_runnables)
             .on::<NO_RETRY, lsp_ext::RelatedTests>(handlers::handle_related_tests)
             .on::<NO_RETRY, lsp_ext::CodeActionRequest>(handlers::handle_code_action)
-            .on::<RETRY, lsp_ext::CodeActionResolveRequest>(handlers::handle_code_action_resolve)
+            .on_identity::<RETRY, lsp_ext::CodeActionResolveRequest, _>(handlers::handle_code_action_resolve)
             .on::<NO_RETRY, lsp_ext::HoverRequest>(handlers::handle_hover)
             .on::<NO_RETRY, lsp_ext::ExternalDocs>(handlers::handle_open_docs)
             .on::<NO_RETRY, lsp_ext::OpenCargoToml>(handlers::handle_open_cargo_toml)
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 5c95ccd..dee34b1 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -15,7 +15,6 @@
 // FIXME: This is a mess that needs some untangling work
 use std::{iter, mem};
 
-use flycheck::{FlycheckConfig, FlycheckHandle};
 use hir::{db::DefDatabase, ChangeWithProcMacros, ProcMacros, ProcMacrosBuilder};
 use ide_db::{
     base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, Version},
@@ -32,6 +31,7 @@
 
 use crate::{
     config::{Config, FilesWatcher, LinkedProject},
+    flycheck::{FlycheckConfig, FlycheckHandle},
     global_state::{FetchWorkspaceRequest, GlobalState},
     lsp_ext,
     main_loop::{DiscoverProjectParam, Task},
@@ -62,13 +62,13 @@
 
 impl GlobalState {
     pub(crate) fn is_quiescent(&self) -> bool {
-        !(self.last_reported_status.is_none()
-            || self.fetch_workspaces_queue.op_in_progress()
-            || self.fetch_build_data_queue.op_in_progress()
-            || self.fetch_proc_macros_queue.op_in_progress()
-            || self.discover_workspace_queue.op_in_progress()
-            || self.vfs_progress_config_version < self.vfs_config_version
-            || self.vfs_progress_n_done < self.vfs_progress_n_total)
+        self.vfs_done
+            && self.last_reported_status.is_some()
+            && !self.fetch_workspaces_queue.op_in_progress()
+            && !self.fetch_build_data_queue.op_in_progress()
+            && !self.fetch_proc_macros_queue.op_in_progress()
+            && !self.discover_workspace_queue.op_in_progress()
+            && self.vfs_progress_config_version >= self.vfs_config_version
     }
 
     pub(crate) fn update_configuration(&mut self, config: Config) {
@@ -102,15 +102,13 @@
     }
 
     pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams {
-        let mut status = lsp_ext::ServerStatusParams {
-            health: lsp_ext::Health::Ok,
-            quiescent: self.is_quiescent(),
-            message: None,
-        };
+        let quiescent = self.is_quiescent();
+        let mut status =
+            lsp_ext::ServerStatusParams { health: lsp_ext::Health::Ok, quiescent, message: None };
         let mut message = String::new();
 
         if !self.config.cargo_autoreload(None)
-            && self.is_quiescent()
+            && quiescent
             && self.fetch_workspaces_queue.op_requested()
             && self.config.discover_workspace_config().is_none()
         {
@@ -242,7 +240,7 @@
             let discover_command = self.config.discover_workspace_config().cloned();
             let is_quiescent = !(self.discover_workspace_queue.op_in_progress()
                 || self.vfs_progress_config_version < self.vfs_config_version
-                || self.vfs_progress_n_done < self.vfs_progress_n_total);
+                || !self.vfs_done);
 
             move |sender| {
                 let progress = {
@@ -751,20 +749,22 @@
         let config = self.config.flycheck();
         let sender = self.flycheck_sender.clone();
         let invocation_strategy = match config {
-            FlycheckConfig::CargoCommand { .. } => flycheck::InvocationStrategy::PerWorkspace,
+            FlycheckConfig::CargoCommand { .. } => {
+                crate::flycheck::InvocationStrategy::PerWorkspace
+            }
             FlycheckConfig::CustomCommand { invocation_strategy, .. } => invocation_strategy,
         };
 
         self.flycheck = match invocation_strategy {
-            flycheck::InvocationStrategy::Once => vec![FlycheckHandle::spawn(
+            crate::flycheck::InvocationStrategy::Once => vec![FlycheckHandle::spawn(
                 0,
-                Box::new(move |msg| sender.send(msg).unwrap()),
+                sender,
                 config,
                 None,
                 self.config.root_path().clone(),
                 None,
             )],
-            flycheck::InvocationStrategy::PerWorkspace => {
+            crate::flycheck::InvocationStrategy::PerWorkspace => {
                 self.workspaces
                     .iter()
                     .enumerate()
@@ -793,10 +793,9 @@
                         ))
                     })
                     .map(|(id, (root, manifest_path), sysroot_root)| {
-                        let sender = sender.clone();
                         FlycheckHandle::spawn(
                             id,
-                            Box::new(move |msg| sender.send(msg).unwrap()),
+                            sender.clone(),
                             config.clone(),
                             sysroot_root,
                             root.to_path_buf(),
diff --git a/crates/rust-analyzer/src/target_spec.rs b/crates/rust-analyzer/src/target_spec.rs
index 67e1bad..965fd41 100644
--- a/crates/rust-analyzer/src/target_spec.rs
+++ b/crates/rust-analyzer/src/target_spec.rs
@@ -263,11 +263,14 @@
     use super::*;
 
     use ide::Edition;
-    use mbe::{syntax_node_to_token_tree, DocCommentDesugarMode, DummyTestSpanMap, DUMMY};
     use syntax::{
         ast::{self, AstNode},
         SmolStr,
     };
+    use syntax_bridge::{
+        dummy_test_span_utils::{DummyTestSpanMap, DUMMY},
+        syntax_node_to_token_tree, DocCommentDesugarMode,
+    };
 
     fn check(cfg: &str, expected_features: &[&str]) {
         let cfg_expr = {
diff --git a/crates/flycheck/src/test_runner.rs b/crates/rust-analyzer/src/test_runner.rs
similarity index 93%
rename from crates/flycheck/src/test_runner.rs
rename to crates/rust-analyzer/src/test_runner.rs
index 74ebca3..293cff4 100644
--- a/crates/flycheck/src/test_runner.rs
+++ b/crates/rust-analyzer/src/test_runner.rs
@@ -10,12 +10,12 @@
 
 use crate::{
     command::{CommandHandle, ParseFromLine},
-    CargoOptions,
+    flycheck::CargoOptions,
 };
 
 #[derive(Debug, Deserialize)]
 #[serde(tag = "event", rename_all = "camelCase")]
-pub enum TestState {
+pub(crate) enum TestState {
     Started,
     Ok,
     Ignored,
@@ -24,7 +24,7 @@
 
 #[derive(Debug, Deserialize)]
 #[serde(tag = "type", rename_all = "camelCase")]
-pub enum CargoTestMessage {
+pub(crate) enum CargoTestMessage {
     Test {
         name: String,
         #[serde(flatten)]
@@ -54,7 +54,7 @@
 }
 
 #[derive(Debug)]
-pub struct CargoTestHandle {
+pub(crate) struct CargoTestHandle {
     _handle: CommandHandle<CargoTestMessage>,
 }
 
@@ -64,13 +64,13 @@
 // cargo test --package my-package --no-fail-fast -- module::func -Z unstable-options --format=json
 
 #[derive(Debug)]
-pub enum TestTarget {
+pub(crate) enum TestTarget {
     Workspace,
     Package(String),
 }
 
 impl CargoTestHandle {
-    pub fn new(
+    pub(crate) fn new(
         path: Option<&str>,
         options: CargoOptions,
         root: &AbsPath,
diff --git a/crates/rust-analyzer/tests/crate_graph.rs b/crates/rust-analyzer/tests/crate_graph.rs
index 66481d3..b8a82fd 100644
--- a/crates/rust-analyzer/tests/crate_graph.rs
+++ b/crates/rust-analyzer/tests/crate_graph.rs
@@ -69,7 +69,7 @@
     // fake sysroot, so we give them both the same path:
     let sysroot_dir = AbsPathBuf::assert_utf8(sysroot_path);
     let sysroot_src_dir = sysroot_dir.clone();
-    Sysroot::load(Some(sysroot_dir), Some(sysroot_src_dir), false)
+    Sysroot::load(Some(sysroot_dir), Some(sysroot_src_dir))
 }
 
 #[test]
diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs
index b1ef483..54cd27f 100644
--- a/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -750,7 +750,7 @@
 
     let code = format!(
         r#"
-//- /rust-project.json
+//- /.rust-project.json
 {project}
 
 //- /src/lib.rs
@@ -909,7 +909,7 @@
 
 fn out_dirs_check_impl(root_contains_symlink: bool) {
     if skip_slow_tests() {
-        // return;
+        return;
     }
 
     let mut server = Project::with_fixture(
@@ -1084,7 +1084,6 @@
     let sysroot = project_model::Sysroot::discover(
         &AbsPathBuf::assert_utf8(std::env::current_dir().unwrap()),
         &Default::default(),
-        false,
     );
 
     let proc_macro_server_path = sysroot.discover_proc_macro_srv().unwrap();
@@ -1125,7 +1124,6 @@
 proc-macro = true
 
 //- /bar/src/lib.rs
-extern crate proc_macro;
 use proc_macro::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
 macro_rules! t {
     ($n:literal) => {
diff --git a/crates/span/Cargo.toml b/crates/span/Cargo.toml
index 9f85f01..3381dac 100644
--- a/crates/span/Cargo.toml
+++ b/crates/span/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "span"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "File and span related types for rust-analyzer."
 
 rust-version.workspace = true
 edition.workspace = true
diff --git a/crates/span/src/ast_id.rs b/crates/span/src/ast_id.rs
index b61baa2..0ebd72e 100644
--- a/crates/span/src/ast_id.rs
+++ b/crates/span/src/ast_id.rs
@@ -18,7 +18,28 @@
 
 /// See crates\hir-expand\src\ast_id_map.rs
 /// This is a type erased FileAstId.
-pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>;
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct ErasedFileAstId(u32);
+
+impl ErasedFileAstId {
+    pub const fn into_raw(self) -> u32 {
+        self.0
+    }
+    pub const fn from_raw(u32: u32) -> Self {
+        Self(u32)
+    }
+}
+
+impl fmt::Display for ErasedFileAstId {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        self.0.fmt(f)
+    }
+}
+impl fmt::Debug for ErasedFileAstId {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        self.0.fmt(f)
+    }
+}
 
 /// `AstId` points to an AST node in a specific file.
 pub struct FileAstId<N: AstIdNode> {
@@ -47,7 +68,7 @@
 
 impl<N: AstIdNode> fmt::Debug for FileAstId<N> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw.into_raw())
+        write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw)
     }
 }
 
@@ -176,7 +197,10 @@
         let ptr = ptr.syntax_node_ptr();
         let hash = hash_ptr(&ptr);
         match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) {
-            Some((&raw, &())) => FileAstId { raw, covariant: PhantomData },
+            Some((&raw, &())) => FileAstId {
+                raw: ErasedFileAstId(raw.into_raw().into_u32()),
+                covariant: PhantomData,
+            },
             None => panic!(
                 "Can't find {:?} in AstIdMap:\n{:?}",
                 ptr,
@@ -186,18 +210,19 @@
     }
 
     pub fn get<N: AstIdNode>(&self, id: FileAstId<N>) -> AstPtr<N> {
-        AstPtr::try_from_raw(self.arena[id.raw]).unwrap()
+        AstPtr::try_from_raw(self.arena[Idx::from_raw(RawIdx::from_u32(id.raw.into_raw()))])
+            .unwrap()
     }
 
     pub fn get_erased(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
-        self.arena[id]
+        self.arena[Idx::from_raw(RawIdx::from_u32(id.into_raw()))]
     }
 
     fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
         let ptr = SyntaxNodePtr::new(item);
         let hash = hash_ptr(&ptr);
         match self.map.raw_entry().from_hash(hash, |&idx| self.arena[idx] == ptr) {
-            Some((&idx, &())) => idx,
+            Some((&idx, &())) => ErasedFileAstId(idx.into_raw().into_u32()),
             None => panic!(
                 "Can't find {:?} in AstIdMap:\n{:?}",
                 item,
@@ -207,7 +232,7 @@
     }
 
     fn alloc(&mut self, item: &SyntaxNode) -> ErasedFileAstId {
-        self.arena.alloc(SyntaxNodePtr::new(item))
+        ErasedFileAstId(self.arena.alloc(SyntaxNodePtr::new(item)).into_raw().into_u32())
     }
 }
 
diff --git a/crates/span/src/hygiene.rs b/crates/span/src/hygiene.rs
index e8c5583..874480c 100644
--- a/crates/span/src/hygiene.rs
+++ b/crates/span/src/hygiene.rs
@@ -79,6 +79,10 @@
 #[derive(Copy, Clone, Hash, PartialEq, Eq)]
 pub struct SyntaxContextData {
     /// Invariant: Only [`SyntaxContextId::ROOT`] has a [`None`] outer expansion.
+    // FIXME: The None case needs to encode the context crate id. We can encode that as the MSB of
+    // MacroCallId is reserved anyways so we can do bit tagging here just fine.
+    // The bigger issue is that that will cause interning to now create completely separate chains
+    // per crate. Though that is likely not a problem as `MacroCallId`s are already crate calling dependent.
     pub outer_expn: Option<MacroCallId>,
     pub outer_transparency: Transparency,
     pub parent: SyntaxContextId,
diff --git a/crates/span/src/lib.rs b/crates/span/src/lib.rs
index b4e21d6..61e4c98 100644
--- a/crates/span/src/lib.rs
+++ b/crates/span/src/lib.rs
@@ -21,15 +21,14 @@
 /// The root ast id always points to the encompassing file, using this in spans is discouraged as
 /// any range relative to it will be effectively absolute, ruining the entire point of anchored
 /// relative text ranges.
-pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId =
-    la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0));
+pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(0);
 
 /// FileId used as the span for syntax node fixups. Any Span containing this file id is to be
 /// considered fake.
 pub const FIXUP_ERASED_FILE_AST_ID_MARKER: ErasedFileAstId =
-    // we pick the second to last for this in case we every consider making this a NonMaxU32, this
+    // we pick the second to last for this in case we ever consider making this a NonMaxU32, this
     // is required to be stable for the proc-macro-server
-    la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(!0 - 1));
+    ErasedFileAstId::from_raw(!0 - 1);
 
 pub type Span = SpanData<SyntaxContextId>;
 
diff --git a/crates/span/src/map.rs b/crates/span/src/map.rs
index 6269f4c..c539754 100644
--- a/crates/span/src/map.rs
+++ b/crates/span/src/map.rs
@@ -119,7 +119,7 @@
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         writeln!(f, "RealSpanMap({:?}):", self.file_id)?;
         for span in self.pairs.iter() {
-            writeln!(f, "{}: {}", u32::from(span.0), span.1.into_raw().into_u32())?;
+            writeln!(f, "{}: {}", u32::from(span.0), span.1.into_raw())?;
         }
         Ok(())
     }
diff --git a/crates/stdx/Cargo.toml b/crates/stdx/Cargo.toml
index 99824df..bf0d6df 100644
--- a/crates/stdx/Cargo.toml
+++ b/crates/stdx/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "stdx"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "Missing batteries for standard libraries for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
@@ -16,7 +17,7 @@
 always-assert = { version = "0.2.0", features = ["tracing"] }
 jod-thread = "0.1.2"
 libc.workspace = true
-crossbeam-channel = "0.5.5"
+crossbeam-channel.workspace = true
 itertools.workspace = true
 # Think twice before adding anything here
 
diff --git a/crates/syntax-bridge/Cargo.toml b/crates/syntax-bridge/Cargo.toml
new file mode 100644
index 0000000..e995ff3
--- /dev/null
+++ b/crates/syntax-bridge/Cargo.toml
@@ -0,0 +1,34 @@
+[package]
+name = "syntax-bridge"
+version = "0.0.0"
+repository.workspace = true
+description = "Conversions between syntax nodes and token trees for rust-analyzer."
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
+
+[lib]
+doctest = false
+
+[dependencies]
+rustc-hash.workspace = true
+tracing.workspace = true
+
+# local deps
+syntax.workspace = true
+parser.workspace = true
+tt.workspace = true
+stdx.workspace = true
+span.workspace = true
+intern.workspace = true
+
+[dev-dependencies]
+test-utils.workspace = true
+
+[features]
+in-rust-tree = ["parser/in-rust-tree", "tt/in-rust-tree", "syntax/in-rust-tree"]
+
+[lints]
+workspace = true
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/syntax-bridge/src/lib.rs
similarity index 95%
rename from crates/mbe/src/syntax_bridge.rs
rename to crates/syntax-bridge/src/lib.rs
index a29efdd..b0afd24 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/syntax-bridge/src/lib.rs
@@ -14,11 +14,13 @@
 };
 use tt::{
     buffer::{Cursor, TokenBuffer},
-    iter::TtIter,
     token_to_literal,
 };
 
-use crate::to_parser_input::to_parser_input;
+mod to_parser_input;
+pub use to_parser_input::to_parser_input;
+// FIXME: we probably should re-think  `token_tree_to_syntax_node` interfaces
+pub use ::parser::TopEntryPoint;
 
 #[cfg(test)]
 mod tests;
@@ -43,7 +45,7 @@
 }
 
 /// Dummy things for testing where spans don't matter.
-pub(crate) mod dummy_test_span_utils {
+pub mod dummy_test_span_utils {
 
     use span::{Span, SyntaxContextId};
 
@@ -211,50 +213,6 @@
     Some(convert_tokens(&mut conv))
 }
 
-/// Split token tree with separate expr: $($e:expr)SEP*
-pub fn parse_exprs_with_sep(
-    tt: &tt::Subtree<span::Span>,
-    sep: char,
-    span: span::Span,
-    edition: Edition,
-) -> Vec<tt::Subtree<span::Span>> {
-    if tt.token_trees.is_empty() {
-        return Vec::new();
-    }
-
-    let mut iter = TtIter::new(tt);
-    let mut res = Vec::new();
-
-    while iter.peek_n(0).is_some() {
-        let expanded = crate::expect_fragment(
-            &mut iter,
-            parser::PrefixEntryPoint::Expr,
-            edition,
-            tt::DelimSpan { open: tt.delimiter.open, close: tt.delimiter.close },
-        );
-
-        res.push(match expanded.value {
-            None => break,
-            Some(tt) => tt.subtree_or_wrap(tt::DelimSpan { open: span, close: span }),
-        });
-
-        let mut fork = iter.clone();
-        if fork.expect_char(sep).is_err() {
-            break;
-        }
-        iter = fork;
-    }
-
-    if iter.peek_n(0).is_some() {
-        res.push(tt::Subtree {
-            delimiter: tt::Delimiter::invisible_spanned(span),
-            token_trees: iter.cloned().collect(),
-        });
-    }
-
-    res
-}
-
 fn convert_tokens<S, C>(conv: &mut C) -> tt::Subtree<S>
 where
     C: TokenConverter<S>,
@@ -479,7 +437,6 @@
     span: S,
     mode: DocCommentDesugarMode,
 ) -> Option<Vec<tt::TokenTree<S>>> {
-    cov_mark::hit!(test_meta_doc_comments);
     let comment = ast::Comment::cast(token.clone())?;
     let doc = comment.kind().doc?;
 
diff --git a/crates/syntax-bridge/src/tests.rs b/crates/syntax-bridge/src/tests.rs
new file mode 100644
index 0000000..7b8e3f2
--- /dev/null
+++ b/crates/syntax-bridge/src/tests.rs
@@ -0,0 +1,104 @@
+use rustc_hash::FxHashMap;
+use span::Span;
+use syntax::{ast, AstNode};
+use test_utils::extract_annotations;
+use tt::{
+    buffer::{TokenBuffer, TokenTreeRef},
+    Leaf, Punct, Spacing,
+};
+
+use crate::{
+    dummy_test_span_utils::{DummyTestSpanMap, DUMMY},
+    syntax_node_to_token_tree, DocCommentDesugarMode,
+};
+
+fn check_punct_spacing(fixture: &str) {
+    let source_file = ast::SourceFile::parse(fixture, span::Edition::CURRENT).ok().unwrap();
+    let subtree = syntax_node_to_token_tree(
+        source_file.syntax(),
+        DummyTestSpanMap,
+        DUMMY,
+        DocCommentDesugarMode::Mbe,
+    );
+    let mut annotations: FxHashMap<_, _> = extract_annotations(fixture)
+        .into_iter()
+        .map(|(range, annotation)| {
+            let spacing = match annotation.as_str() {
+                "Alone" => Spacing::Alone,
+                "Joint" => Spacing::Joint,
+                a => panic!("unknown annotation: {a}"),
+            };
+            (range, spacing)
+        })
+        .collect();
+
+    let buf = TokenBuffer::from_subtree(&subtree);
+    let mut cursor = buf.begin();
+    while !cursor.eof() {
+        while let Some(token_tree) = cursor.token_tree() {
+            if let TokenTreeRef::Leaf(
+                Leaf::Punct(Punct { spacing, span: Span { range, .. }, .. }),
+                _,
+            ) = token_tree
+            {
+                if let Some(expected) = annotations.remove(range) {
+                    assert_eq!(expected, *spacing);
+                }
+            }
+            cursor = cursor.bump_subtree();
+        }
+        cursor = cursor.bump();
+    }
+
+    assert!(annotations.is_empty(), "unchecked annotations: {annotations:?}");
+}
+
+#[test]
+fn punct_spacing() {
+    check_punct_spacing(
+        r#"
+fn main() {
+    0+0;
+   //^ Alone
+    0+(0);
+   //^ Alone
+    0<=0;
+   //^ Joint
+   // ^ Alone
+    0<=(0);
+   // ^ Alone
+    a=0;
+   //^ Alone
+    a=(0);
+   //^ Alone
+    a+=0;
+   //^ Joint
+   // ^ Alone
+    a+=(0);
+   // ^ Alone
+    a&&b;
+   //^ Joint
+   // ^ Alone
+    a&&(b);
+   // ^ Alone
+    foo::bar;
+   //  ^ Joint
+   //   ^ Alone
+    use foo::{bar,baz,};
+   //       ^ Alone
+   //            ^ Alone
+   //                ^ Alone
+    struct Struct<'a> {};
+   //            ^ Joint
+   //             ^ Joint
+    Struct::<0>;
+   //       ^ Alone
+    Struct::<{0}>;
+   //       ^ Alone
+    ;;
+  //^ Joint
+  // ^ Alone
+}
+        "#,
+    );
+}
diff --git a/crates/mbe/src/to_parser_input.rs b/crates/syntax-bridge/src/to_parser_input.rs
similarity index 98%
rename from crates/mbe/src/to_parser_input.rs
rename to crates/syntax-bridge/src/to_parser_input.rs
index c35b285..2c54899 100644
--- a/crates/mbe/src/to_parser_input.rs
+++ b/crates/syntax-bridge/src/to_parser_input.rs
@@ -8,7 +8,7 @@
 
 use tt::buffer::TokenBuffer;
 
-pub(crate) fn to_parser_input<S: Copy + fmt::Debug>(
+pub fn to_parser_input<S: Copy + fmt::Debug>(
     edition: Edition,
     buffer: &TokenBuffer<'_, S>,
 ) -> parser::Input {
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml
index b371ec6..994c214 100644
--- a/crates/syntax/Cargo.toml
+++ b/crates/syntax/Cargo.toml
@@ -1,8 +1,8 @@
 [package]
 name = "syntax"
 version = "0.0.0"
-description = "Comment and whitespace preserving parser for the Rust language"
-repository = "https://github.com/rust-lang/rust-analyzer"
+repository.workspace = true
+description = "Concrete syntax tree definitions for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
diff --git a/crates/syntax/src/tests.rs b/crates/syntax/src/tests.rs
index f0d58ef..b50489c 100644
--- a/crates/syntax/src/tests.rs
+++ b/crates/syntax/src/tests.rs
@@ -79,7 +79,7 @@
     let crates_dir = project_root().join("crates");
 
     let mut files = Vec::new();
-    let mut work = vec![crates_dir.to_path_buf()];
+    let mut work = vec![crates_dir.into_std_path_buf()];
     while let Some(dir) = work.pop() {
         for entry in dir.read_dir().unwrap() {
             let entry = entry.unwrap();
@@ -127,7 +127,7 @@
 }
 
 fn test_data_dir() -> PathBuf {
-    project_root().join("crates/syntax/test_data")
+    project_root().into_std_path_buf().join("crates/syntax/test_data")
 }
 
 fn assert_errors_are_present(errors: &[SyntaxError], path: &Path) {
diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs
index e1f40f5..03e85a8 100644
--- a/crates/test-fixture/src/lib.rs
+++ b/crates/test-fixture/src/lib.rs
@@ -3,7 +3,7 @@
 
 use base_db::{
     CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Env, FileChange,
-    FileSet, LangCrateOrigin, SourceDatabaseExt, SourceRoot, Version, VfsPath,
+    FileSet, LangCrateOrigin, SourceRoot, SourceRootDatabase, Version, VfsPath,
 };
 use cfg::CfgOptions;
 use hir_expand::{
@@ -26,7 +26,7 @@
 
 pub const WORKSPACE: base_db::SourceRootId = base_db::SourceRootId(0);
 
-pub trait WithFixture: Default + ExpandDatabase + SourceDatabaseExt + 'static {
+pub trait WithFixture: Default + ExpandDatabase + SourceRootDatabase + 'static {
     #[track_caller]
     fn with_single_file(ra_fixture: &str) -> (Self, EditionedFileId) {
         let fixture = ChangeFixture::parse(ra_fixture);
@@ -101,7 +101,7 @@
     }
 }
 
-impl<DB: ExpandDatabase + SourceDatabaseExt + Default + 'static> WithFixture for DB {}
+impl<DB: ExpandDatabase + SourceRootDatabase + Default + 'static> WithFixture for DB {}
 
 pub struct ChangeFixture {
     pub file_position: Option<(EditionedFileId, RangeOrOffset)>,
diff --git a/crates/test-utils/Cargo.toml b/crates/test-utils/Cargo.toml
index 2ff1fad..b145772 100644
--- a/crates/test-utils/Cargo.toml
+++ b/crates/test-utils/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "test-utils"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "Assorted testing utilities for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
@@ -18,6 +19,7 @@
 tracing.workspace = true
 rustc-hash.workspace = true
 
+paths.workspace = true
 stdx.workspace = true
 profile.workspace = true
 
diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs
index 088817b..36be993 100644
--- a/crates/test-utils/src/lib.rs
+++ b/crates/test-utils/src/lib.rs
@@ -18,6 +18,7 @@
     path::{Path, PathBuf},
 };
 
+use paths::Utf8PathBuf;
 use profile::StopWatch;
 use stdx::is_ci;
 use text_size::{TextRange, TextSize};
@@ -402,9 +403,10 @@
 }
 
 /// Returns the path to the root directory of `rust-analyzer` project.
-pub fn project_root() -> PathBuf {
+pub fn project_root() -> Utf8PathBuf {
     let dir = env!("CARGO_MANIFEST_DIR");
-    PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned()
+    Utf8PathBuf::from_path_buf(PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned())
+        .unwrap()
 }
 
 pub fn format_diff(chunks: Vec<dissimilar::Chunk<'_>>) -> String {
diff --git a/crates/text-edit/Cargo.toml b/crates/text-edit/Cargo.toml
index f745674..dc6b3d3 100644
--- a/crates/text-edit/Cargo.toml
+++ b/crates/text-edit/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "text-edit"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "Representation of a `TextEdit` for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
diff --git a/crates/toolchain/Cargo.toml b/crates/toolchain/Cargo.toml
index c85efd4..87e8efb 100644
--- a/crates/toolchain/Cargo.toml
+++ b/crates/toolchain/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "toolchain"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "Discovery of `cargo` & `rustc` executables for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
diff --git a/crates/tt/Cargo.toml b/crates/tt/Cargo.toml
index cea1519..82e7c24 100644
--- a/crates/tt/Cargo.toml
+++ b/crates/tt/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "tt"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "A `TokenTree` data structure for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
diff --git a/crates/vfs-notify/Cargo.toml b/crates/vfs-notify/Cargo.toml
index a6d5027..09296dc 100644
--- a/crates/vfs-notify/Cargo.toml
+++ b/crates/vfs-notify/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "vfs-notify"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "Implementation of `loader::Handle` for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
@@ -14,12 +15,14 @@
 [dependencies]
 tracing.workspace = true
 walkdir = "2.3.2"
-crossbeam-channel = "0.5.5"
+crossbeam-channel.workspace = true
 notify = "6.1.1"
+rayon = "1.10.0"
 
 stdx.workspace = true
 vfs.workspace = true
 paths.workspace = true
+rustc-hash.workspace = true
 
 [lints]
-workspace = true
\ No newline at end of file
+workspace = true
diff --git a/crates/vfs-notify/src/lib.rs b/crates/vfs-notify/src/lib.rs
index 7e0f9af..0ae8b7b 100644
--- a/crates/vfs-notify/src/lib.rs
+++ b/crates/vfs-notify/src/lib.rs
@@ -10,12 +10,15 @@
 use std::{
     fs,
     path::{Component, Path},
+    sync::atomic::AtomicUsize,
 };
 
-use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
-use notify::{Config, RecommendedWatcher, RecursiveMode, Watcher};
-use paths::{AbsPath, AbsPathBuf};
-use vfs::loader;
+use crossbeam_channel::{select, unbounded, Receiver, Sender};
+use notify::{Config, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
+use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
+use rayon::iter::{IndexedParallelIterator as _, IntoParallelIterator as _, ParallelIterator};
+use rustc_hash::FxHashSet;
+use vfs::loader::{self, LoadingProgress};
 use walkdir::WalkDir;
 
 #[derive(Debug)]
@@ -59,7 +62,8 @@
 
 struct NotifyActor {
     sender: loader::Sender,
-    watched_entries: Vec<loader::Entry>,
+    watched_file_entries: FxHashSet<AbsPathBuf>,
+    watched_dir_entries: Vec<loader::Directories>,
     // Drop order is significant.
     watcher: Option<(RecommendedWatcher, Receiver<NotifyEvent>)>,
 }
@@ -72,14 +76,22 @@
 
 impl NotifyActor {
     fn new(sender: loader::Sender) -> NotifyActor {
-        NotifyActor { sender, watched_entries: Vec::new(), watcher: None }
+        NotifyActor {
+            sender,
+            watched_dir_entries: Vec::new(),
+            watched_file_entries: FxHashSet::default(),
+            watcher: None,
+        }
     }
 
     fn next_event(&self, receiver: &Receiver<Message>) -> Option<Event> {
-        let watcher_receiver = self.watcher.as_ref().map(|(_, receiver)| receiver);
+        let Some((_, watcher_receiver)) = &self.watcher else {
+            return receiver.recv().ok().map(Event::Message);
+        };
+
         select! {
             recv(receiver) -> it => it.ok().map(Event::Message),
-            recv(watcher_receiver.unwrap_or(&never())) -> it => Some(Event::NotifyEvent(it.unwrap())),
+            recv(watcher_receiver) -> it => Some(Event::NotifyEvent(it.unwrap())),
         }
     }
 
@@ -94,7 +106,10 @@
                             let (watcher_sender, watcher_receiver) = unbounded();
                             let watcher = log_notify_error(RecommendedWatcher::new(
                                 move |event| {
-                                    watcher_sender.send(event).unwrap();
+                                    // we don't care about the error. If sending fails that usually
+                                    // means we were dropped, so unwrapping will just add to the
+                                    // panic noise.
+                                    _ = watcher_sender.send(event);
                                 },
                                 Config::default(),
                             ));
@@ -104,35 +119,74 @@
                         let config_version = config.version;
 
                         let n_total = config.load.len();
+                        self.watched_dir_entries.clear();
+                        self.watched_file_entries.clear();
+
                         self.send(loader::Message::Progress {
                             n_total,
-                            n_done: None,
+                            n_done: LoadingProgress::Started,
                             config_version,
                             dir: None,
                         });
 
-                        self.watched_entries.clear();
+                        let (entry_tx, entry_rx) = unbounded();
+                        let (watch_tx, watch_rx) = unbounded();
+                        let processed = AtomicUsize::new(0);
 
-                        for (i, entry) in config.load.into_iter().enumerate() {
-                            let watch = config.watch.contains(&i);
-                            if watch {
-                                self.watched_entries.push(entry.clone());
+                        config.load.into_par_iter().enumerate().for_each(|(i, entry)| {
+                            let do_watch = config.watch.contains(&i);
+                            if do_watch {
+                                _ = entry_tx.send(entry.clone());
                             }
-                            let files =
-                                self.load_entry(entry, watch, |file| loader::Message::Progress {
-                                    n_total,
-                                    n_done: Some(i),
-                                    dir: Some(file),
-                                    config_version,
-                                });
+                            let files = Self::load_entry(
+                                |f| _ = watch_tx.send(f.to_owned()),
+                                entry,
+                                do_watch,
+                                |file| {
+                                    self.send(loader::Message::Progress {
+                                        n_total,
+                                        n_done: LoadingProgress::Progress(
+                                            processed.load(std::sync::atomic::Ordering::Relaxed),
+                                        ),
+                                        dir: Some(file),
+                                        config_version,
+                                    });
+                                },
+                            );
                             self.send(loader::Message::Loaded { files });
                             self.send(loader::Message::Progress {
                                 n_total,
-                                n_done: Some(i + 1),
+                                n_done: LoadingProgress::Progress(
+                                    processed.fetch_add(1, std::sync::atomic::Ordering::AcqRel) + 1,
+                                ),
                                 config_version,
                                 dir: None,
                             });
+                        });
+
+                        drop(watch_tx);
+                        for path in watch_rx {
+                            self.watch(&path);
                         }
+
+                        drop(entry_tx);
+                        for entry in entry_rx {
+                            match entry {
+                                loader::Entry::Files(files) => {
+                                    self.watched_file_entries.extend(files)
+                                }
+                                loader::Entry::Directories(dir) => {
+                                    self.watched_dir_entries.push(dir)
+                                }
+                            }
+                        }
+
+                        self.send(loader::Message::Progress {
+                            n_total,
+                            n_done: LoadingProgress::Finished,
+                            config_version,
+                            dir: None,
+                        });
                     }
                     Message::Invalidate(path) => {
                         let contents = read(path.as_path());
@@ -142,55 +196,69 @@
                 },
                 Event::NotifyEvent(event) => {
                     if let Some(event) = log_notify_error(event) {
-                        let files = event
-                            .paths
-                            .into_iter()
-                            .map(|path| AbsPathBuf::try_from(path).unwrap())
-                            .filter_map(|path| {
-                                let meta = fs::metadata(&path).ok()?;
-                                if meta.file_type().is_dir()
-                                    && self
-                                        .watched_entries
-                                        .iter()
-                                        .any(|entry| entry.contains_dir(&path))
-                                {
-                                    self.watch(path);
-                                    return None;
-                                }
+                        if let EventKind::Create(_) | EventKind::Modify(_) | EventKind::Remove(_) =
+                            event.kind
+                        {
+                            let files = event
+                                .paths
+                                .into_iter()
+                                .filter_map(|path| {
+                                    Some(
+                                        AbsPathBuf::try_from(
+                                            Utf8PathBuf::from_path_buf(path).ok()?,
+                                        )
+                                        .expect("path is absolute"),
+                                    )
+                                })
+                                .filter_map(|path| -> Option<(AbsPathBuf, Option<Vec<u8>>)> {
+                                    let meta = fs::metadata(&path).ok()?;
+                                    if meta.file_type().is_dir()
+                                        && self
+                                            .watched_dir_entries
+                                            .iter()
+                                            .any(|dir| dir.contains_dir(&path))
+                                    {
+                                        self.watch(path.as_ref());
+                                        return None;
+                                    }
 
-                                if !meta.file_type().is_file() {
-                                    return None;
-                                }
-                                if !self
-                                    .watched_entries
-                                    .iter()
-                                    .any(|entry| entry.contains_file(&path))
-                                {
-                                    return None;
-                                }
+                                    if !meta.file_type().is_file() {
+                                        return None;
+                                    }
 
-                                let contents = read(&path);
-                                Some((path, contents))
-                            })
-                            .collect();
-                        self.send(loader::Message::Changed { files });
+                                    if !(self.watched_file_entries.contains(&path)
+                                        || self
+                                            .watched_dir_entries
+                                            .iter()
+                                            .any(|dir| dir.contains_file(&path)))
+                                    {
+                                        return None;
+                                    }
+
+                                    let contents = read(&path);
+                                    Some((path, contents))
+                                })
+                                .collect();
+                            self.send(loader::Message::Changed { files });
+                        }
                     }
                 }
             }
         }
     }
+
     fn load_entry(
-        &mut self,
+        mut watch: impl FnMut(&Path),
         entry: loader::Entry,
-        watch: bool,
-        make_message: impl Fn(AbsPathBuf) -> loader::Message,
+        do_watch: bool,
+        send_message: impl Fn(AbsPathBuf),
     ) -> Vec<(AbsPathBuf, Option<Vec<u8>>)> {
         match entry {
             loader::Entry::Files(files) => files
                 .into_iter()
                 .map(|file| {
-                    if watch {
-                        self.watch(file.clone());
+                    if do_watch {
+                        watch(file.as_ref());
                     }
                     let contents = read(file.as_path());
                     (file, contents)
@@ -200,7 +268,7 @@
                 let mut res = Vec::new();
 
                 for root in &dirs.include {
-                    self.send(make_message(root.clone()));
+                    send_message(root.clone());
                     let walkdir =
                         WalkDir::new(root).follow_links(true).into_iter().filter_entry(|entry| {
                             if !entry.file_type().is_dir() {
@@ -208,7 +276,7 @@
                             }
                             let path = entry.path();
 
-                            if path_is_parent_symlink(path) {
+                            if path_might_be_cyclic(path) {
                                 return false;
                             }
 
@@ -220,12 +288,15 @@
                         let depth = entry.depth();
                         let is_dir = entry.file_type().is_dir();
                         let is_file = entry.file_type().is_file();
-                        let abs_path = AbsPathBuf::try_from(entry.into_path()).ok()?;
+                        let abs_path = AbsPathBuf::try_from(
+                            Utf8PathBuf::from_path_buf(entry.into_path()).ok()?,
+                        )
+                        .ok()?;
                         if depth < 2 && is_dir {
-                            self.send(make_message(abs_path.clone()));
+                            send_message(abs_path.clone());
                         }
-                        if is_dir && watch {
-                            self.watch(abs_path.clone());
+                        if is_dir && do_watch {
+                            watch(abs_path.as_ref());
                         }
                         if !is_file {
                             return None;
@@ -247,13 +318,15 @@
         }
     }
 
-    fn watch(&mut self, path: AbsPathBuf) {
+    fn watch(&mut self, path: &Path) {
         if let Some((watcher, _)) = &mut self.watcher {
-            log_notify_error(watcher.watch(path.as_ref(), RecursiveMode::NonRecursive));
+            log_notify_error(watcher.watch(path, RecursiveMode::NonRecursive));
         }
     }
-    fn send(&mut self, msg: loader::Message) {
-        (self.sender)(msg);
+
+    #[track_caller]
+    fn send(&self, msg: loader::Message) {
+        self.sender.send(msg).unwrap();
     }
 }
 
@@ -271,7 +344,7 @@
 /// heuristic is not sufficient to catch all symlink cycles (it's
 /// possible to construct cycle using two or more symlinks), but it
 /// catches common cases.
-fn path_is_parent_symlink(path: &Path) -> bool {
+fn path_might_be_cyclic(path: &Path) -> bool {
     let Ok(destination) = std::fs::read_link(path) else {
         return false;
     };
diff --git a/crates/vfs/Cargo.toml b/crates/vfs/Cargo.toml
index 84f2110..e8a6195 100644
--- a/crates/vfs/Cargo.toml
+++ b/crates/vfs/Cargo.toml
@@ -1,7 +1,8 @@
 [package]
 name = "vfs"
 version = "0.0.0"
-description = "TBD"
+repository.workspace = true
+description = "A virtual file system for rust-analyzer."
 
 authors.workspace = true
 edition.workspace = true
@@ -17,6 +18,7 @@
 fst = "0.4.7"
 indexmap.workspace = true
 nohash-hasher.workspace = true
+crossbeam-channel.workspace = true
 
 paths.workspace = true
 stdx.workspace = true
diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs
index 77f890f..bc40e03 100644
--- a/crates/vfs/src/lib.rs
+++ b/crates/vfs/src/lib.rs
@@ -201,8 +201,8 @@
     pub fn set_file_contents(&mut self, path: VfsPath, contents: Option<Vec<u8>>) -> bool {
         let _p = span!(Level::INFO, "Vfs::set_file_contents").entered();
         let file_id = self.alloc_file_id(path);
-        let state = self.get(file_id);
-        let change_kind = match (state, contents) {
+        let state: FileState = self.get(file_id);
+        let change = match (state, contents) {
             (FileState::Deleted, None) => return false,
             (FileState::Deleted, Some(v)) => {
                 let hash = hash_once::<FxHasher>(&*v);
@@ -225,7 +225,7 @@
             };
         };
 
-        let changed_file = ChangedFile { file_id, change: change_kind };
+        let changed_file = ChangedFile { file_id, change };
         match self.changes.entry(file_id) {
             // two changes to the same file in one cycle, merge them appropriately
             Entry::Occupied(mut o) => {
diff --git a/crates/vfs/src/loader.rs b/crates/vfs/src/loader.rs
index 3af91b1..f24354c 100644
--- a/crates/vfs/src/loader.rs
+++ b/crates/vfs/src/loader.rs
@@ -43,6 +43,13 @@
     pub watch: Vec<usize>,
 }
 
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub enum LoadingProgress {
+    Started,
+    Progress(usize),
+    Finished,
+}
+
 /// Message about an action taken by a [`Handle`].
 pub enum Message {
     /// Indicate a gradual progress.
@@ -52,7 +59,7 @@
         /// The total files to be loaded.
         n_total: usize,
         /// The files that have been loaded successfully.
-        n_done: Option<usize>,
+        n_done: LoadingProgress,
         /// The dir being loaded, `None` if its for a file.
         dir: Option<AbsPathBuf>,
         /// The [`Config`] version.
@@ -65,7 +72,7 @@
 }
 
 /// Type that will receive [`Messages`](Message) from a [`Handle`].
-pub type Sender = Box<dyn Fn(Message) + Send>;
+pub type Sender = crossbeam_channel::Sender<Message>;
 
 /// Interface for reading and watching files.
 pub trait Handle: fmt::Debug {
diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md
index e559f88..4786bd5 100644
--- a/docs/dev/lsp-extensions.md
+++ b/docs/dev/lsp-extensions.md
@@ -1,5 +1,5 @@
 <!---
-lsp/ext.rs hash: e92e1f12229b0071
+lsp/ext.rs hash: 3429c08745984b3d
 
 If you need to change the above hash to make the test pass, please check if you
 need to adjust this doc as well and ping this issue:
diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc
index ac95767..2be338d 100644
--- a/docs/user/generated_config.adoc
+++ b/docs/user/generated_config.adoc
@@ -144,16 +144,6 @@
 
 This option does not take effect until rust-analyzer is restarted.
 --
-[[rust-analyzer.cargo.sysrootQueryMetadata]]rust-analyzer.cargo.sysrootQueryMetadata (default: `false`)::
-+
---
-Whether to run cargo metadata on the sysroot library allowing rust-analyzer to analyze
-third-party dependencies of the standard libraries.
-
-This will cause `cargo` to create a lockfile in your sysroot directory. rust-analyzer
-will attempt to clean up afterwards, but nevertheless requires the location to be
-writable to.
---
 [[rust-analyzer.cargo.sysrootSrc]]rust-analyzer.cargo.sysrootSrc (default: `null`)::
 +
 --
diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc
index a177720..703ec66 100644
--- a/docs/user/manual.adoc
+++ b/docs/user/manual.adoc
@@ -203,6 +203,12 @@
 $ brew install rust-analyzer
 ----
 
+==== Windows
+
+It is recommended to install the latest Microsoft Visual C++ Redistributable prior to installation.
+Download links can be found
+https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist[here].
+
 === VS Code or VSCodium in Flatpak
 
 Setting up `rust-analyzer` with a Flatpak version of Code is not trivial because of the Flatpak sandbox.
diff --git a/editors/code/package.json b/editors/code/package.json
index 4b59412..bf9c4a3 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -71,7 +71,9 @@
         "workspaceContains:Cargo.toml",
         "workspaceContains:*/Cargo.toml",
         "workspaceContains:rust-project.json",
-        "workspaceContains:*/rust-project.json"
+        "workspaceContains:*/rust-project.json",
+        "workspaceContains:.rust-project.json",
+        "workspaceContains:*/.rust-project.json"
     ],
     "main": "./out/main",
     "contributes": {
@@ -829,16 +831,6 @@
             {
                 "title": "cargo",
                 "properties": {
-                    "rust-analyzer.cargo.sysrootQueryMetadata": {
-                        "markdownDescription": "Whether to run cargo metadata on the sysroot library allowing rust-analyzer to analyze\nthird-party dependencies of the standard libraries.\n\nThis will cause `cargo` to create a lockfile in your sysroot directory. rust-analyzer\nwill attempt to clean up afterwards, but nevertheless requires the location to be\nwritable to.",
-                        "default": false,
-                        "type": "boolean"
-                    }
-                }
-            },
-            {
-                "title": "cargo",
-                "properties": {
                     "rust-analyzer.cargo.sysrootSrc": {
                         "markdownDescription": "Relative path to the sysroot library sources. If left unset, this will default to\n`{cargo.sysroot}/lib/rustlib/src/rust/library`.\n\nThis option does not take effect until rust-analyzer is restarted.",
                         "default": null,
@@ -3221,6 +3213,10 @@
             {
                 "fileMatch": "rust-project.json",
                 "url": "https://json.schemastore.org/rust-project.json"
+            },
+            {
+                "fileMatch": ".rust-project.json",
+                "url": "https://json.schemastore.org/rust-project.json"
             }
         ],
         "walkthroughs": [
diff --git a/editors/code/src/bootstrap.ts b/editors/code/src/bootstrap.ts
index 42dd076..daead47 100644
--- a/editors/code/src/bootstrap.ts
+++ b/editors/code/src/bootstrap.ts
@@ -4,6 +4,7 @@
 import { type Env, log } from "./util";
 import type { PersistentState } from "./persistent_state";
 import { exec, spawnSync } from "child_process";
+import { TextDecoder } from "node:util";
 
 export async function bootstrap(
     context: vscode.ExtensionContext,
@@ -50,26 +51,35 @@
         }
         return explicitPath;
     }
-    if (packageJson.releaseTag === null) return "rust-analyzer";
 
-    if (vscode.workspace.workspaceFolders?.length === 1) {
-        // otherwise check if there is a toolchain override for the current vscode workspace
-        // and if the toolchain of this override has a rust-analyzer component
-        // if so, use the rust-analyzer component
-        const toolchainTomlExists = await fileExists(
-            vscode.Uri.joinPath(vscode.workspace.workspaceFolders[0]!.uri, "rust-toolchain.toml"),
-        );
-        if (toolchainTomlExists) {
-            const res = spawnSync("rustup", ["which", "rust-analyzer"], {
-                encoding: "utf8",
-                env: { ...process.env },
-                cwd: vscode.workspace.workspaceFolders[0]!.uri.fsPath,
-            });
-            if (!res.error && res.status === 0) {
-                return res.stdout.trim();
+    let toolchainServerPath = undefined;
+    if (vscode.workspace.workspaceFolders) {
+        for (const workspaceFolder of vscode.workspace.workspaceFolders) {
+            // otherwise check if there is a toolchain override for the current vscode workspace
+            // and if the toolchain of this override has a rust-analyzer component
+            // if so, use the rust-analyzer component
+            const toolchainUri = vscode.Uri.joinPath(workspaceFolder.uri, "rust-toolchain.toml");
+            if (await hasToolchainFileWithRaDeclared(toolchainUri)) {
+                const res = spawnSync("rustup", ["which", "rust-analyzer"], {
+                    encoding: "utf8",
+                    env: { ...process.env },
+                    cwd: workspaceFolder.uri.fsPath,
+                });
+                if (!res.error && res.status === 0) {
+                    toolchainServerPath = earliestToolchainPath(
+                        toolchainServerPath,
+                        res.stdout.trim(),
+                        raVersionResolver,
+                    );
+                }
             }
         }
     }
+    if (toolchainServerPath) {
+        return toolchainServerPath;
+    }
+
+    if (packageJson.releaseTag === null) return "rust-analyzer";
 
     // finally, use the bundled one
     const ext = process.platform === "win32" ? ".exe" : "";
@@ -102,6 +112,57 @@
     return undefined;
 }
 
+// Given a path to a rust-analyzer executable, resolve its version and return it.
+function raVersionResolver(path: string): string | undefined {
+    const res = spawnSync(path, ["--version"], {
+        encoding: "utf8",
+    });
+    if (!res.error && res.status === 0) {
+        return res.stdout;
+    } else {
+        return undefined;
+    }
+}
+
+// Given a path to two rust-analyzer executables, return the earliest one by date.
+function earliestToolchainPath(
+    path0: string | undefined,
+    path1: string,
+    raVersionResolver: (path: string) => string | undefined,
+): string {
+    if (path0) {
+        if (orderFromPath(path0, raVersionResolver) < orderFromPath(path1, raVersionResolver)) {
+            return path0;
+        } else {
+            return path1;
+        }
+    } else {
+        return path1;
+    }
+}
+
+// Further to extracting a date for comparison, determine the order of a toolchain as follows:
+//  Highest - nightly
+//  Medium  - versioned
+//  Lowest  - stable
+// Example paths:
+//  nightly   - /Users/myuser/.rustup/toolchains/nightly-2022-11-22-aarch64-apple-darwin/bin/rust-analyzer
+//  versioned - /Users/myuser/.rustup/toolchains/1.72.1-aarch64-apple-darwin/bin/rust-analyzer
+//  stable    - /Users/myuser/.rustup/toolchains/stable-aarch64-apple-darwin/bin/rust-analyzer
+function orderFromPath(
+    path: string,
+    raVersionResolver: (path: string) => string | undefined,
+): string {
+    const raVersion = raVersionResolver(path);
+    const raDate = raVersion?.match(/^rust-analyzer .*\(.* (\d{4}-\d{2}-\d{2})\)$/);
+    if (raDate?.length === 2) {
+        const precedence = path.includes("nightly-") ? "0" : "1";
+        return "0-" + raDate[1] + "/" + precedence;
+    } else {
+        return "2";
+    }
+}
+
 async function fileExists(uri: vscode.Uri) {
     return await vscode.workspace.fs.stat(uri).then(
         () => true,
@@ -109,6 +170,19 @@
     );
 }
 
+async function hasToolchainFileWithRaDeclared(uri: vscode.Uri): Promise<boolean> {
+    try {
+        const toolchainFileContents = new TextDecoder().decode(
+            await vscode.workspace.fs.readFile(uri),
+        );
+        return (
+            toolchainFileContents.match(/components\s*=\s*\[.*\"rust-analyzer\".*\]/g)?.length === 1
+        );
+    } catch (e) {
+        return false;
+    }
+}
+
 export function isValidExecutable(path: string, extraEnv: Env): boolean {
     log.debug("Checking availability of a binary at", path);
 
@@ -207,3 +281,8 @@
         },
     );
 }
+
+export const _private = {
+    earliestToolchainPath,
+    orderFromPath,
+};
diff --git a/editors/code/src/debug.ts b/editors/code/src/debug.ts
index d9622b4..3aae0f9 100644
--- a/editors/code/src/debug.ts
+++ b/editors/code/src/debug.ts
@@ -7,21 +7,15 @@
 import type { Ctx } from "./ctx";
 import { prepareEnv } from "./run";
 import { execute, isCargoRunnableArgs, unwrapUndefinable } from "./util";
+import type { Config } from "./config";
 
 const debugOutput = vscode.window.createOutputChannel("Debug");
-type DebugConfigProvider = (
-    runnable: ra.Runnable,
-    runnableArgs: ra.CargoRunnableArgs,
-    executable: string,
-    env: Record<string, string>,
-    sourceFileMap?: Record<string, string>,
-) => vscode.DebugConfiguration;
 
 export async function makeDebugConfig(ctx: Ctx, runnable: ra.Runnable): Promise<void> {
     const scope = ctx.activeRustEditor?.document.uri;
     if (!scope) return;
 
-    const debugConfig = await getDebugConfiguration(ctx, runnable);
+    const debugConfig = await getDebugConfiguration(ctx.config, runnable, false);
     if (!debugConfig) return;
 
     const wsLaunchSection = vscode.workspace.getConfiguration("launch", scope);
@@ -57,7 +51,7 @@
         message = " (from launch.json)";
         debugOutput.clear();
     } else {
-        debugConfig = await getDebugConfiguration(ctx, runnable);
+        debugConfig = await getDebugConfiguration(ctx.config, runnable);
     }
 
     if (!debugConfig) return false;
@@ -74,35 +68,35 @@
 }
 
 async function getDebugConfiguration(
-    ctx: Ctx,
+    config: Config,
     runnable: ra.Runnable,
+    inheritEnv: boolean = true,
 ): Promise<vscode.DebugConfiguration | undefined> {
     if (!isCargoRunnableArgs(runnable.args)) {
         return;
     }
     const runnableArgs: ra.CargoRunnableArgs = runnable.args;
 
-    const editor = ctx.activeRustEditor;
-    if (!editor) return;
+    const debugOptions = config.debug;
 
-    const knownEngines: Record<string, DebugConfigProvider> = {
-        "vadimcn.vscode-lldb": getCodeLldbDebugConfig,
-        "ms-vscode.cpptools": getCCppDebugConfig,
-        "webfreak.debug": getNativeDebugConfig,
-    };
-    const debugOptions = ctx.config.debug;
+    let provider: null | KnownEnginesType = null;
 
-    let debugEngine = null;
     if (debugOptions.engine === "auto") {
-        for (var engineId in knownEngines) {
-            debugEngine = vscode.extensions.getExtension(engineId);
-            if (debugEngine) break;
+        for (const engineId in knownEngines) {
+            const debugEngine = vscode.extensions.getExtension(engineId);
+            if (debugEngine) {
+                provider = knownEngines[engineId as keyof typeof knownEngines];
+                break;
+            }
         }
     } else if (debugOptions.engine) {
-        debugEngine = vscode.extensions.getExtension(debugOptions.engine);
+        const debugEngine = vscode.extensions.getExtension(debugOptions.engine);
+        if (debugEngine && Object.keys(knownEngines).includes(debugOptions.engine)) {
+            provider = knownEngines[debugOptions.engine as keyof typeof knownEngines];
+        }
     }
 
-    if (!debugEngine) {
+    if (!provider) {
         const commandCCpp: string = createCommandLink("ms-vscode.cpptools");
         const commandCodeLLDB: string = createCommandLink("vadimcn.vscode-lldb");
         const commandNativeDebug: string = createCommandLink("webfreak.debug");
@@ -116,7 +110,7 @@
     }
 
     debugOutput.clear();
-    if (ctx.config.debug.openDebugPane) {
+    if (config.debug.openDebugPane) {
         debugOutput.show(true);
     }
     // folder exists or RA is not active.
@@ -131,37 +125,36 @@
               firstWorkspace;
 
     const workspace = unwrapUndefinable(maybeWorkspace);
-    const wsFolder = path.normalize(workspace.uri.fsPath);
+    let wsFolder = path.normalize(workspace.uri.fsPath);
+    if (os.platform() === "win32") {
+        // in windows, the drive letter can vary in casing for VSCode, so we gotta normalize that first
+        wsFolder = wsFolder.replace(/^[a-z]:\\/, (c) => c.toUpperCase());
+    }
+
     const workspaceQualifier = isMultiFolderWorkspace ? `:${workspace.name}` : "";
     function simplifyPath(p: string): string {
+        // in windows, the drive letter can vary in casing for VSCode, so we gotta normalize that first
+        if (os.platform() === "win32") {
+            p = p.replace(/^[a-z]:\\/, (c) => c.toUpperCase());
+        }
         // see https://github.com/rust-lang/rust-analyzer/pull/5513#issuecomment-663458818 for why this is needed
         return path.normalize(p).replace(wsFolder, `\${workspaceFolder${workspaceQualifier}}`);
     }
 
-    const env = prepareEnv(runnable.label, runnableArgs, ctx.config.runnablesExtraEnv);
+    const env = prepareEnv(inheritEnv, runnable.label, runnableArgs, config.runnablesExtraEnv);
     const executable = await getDebugExecutable(runnableArgs, env);
     let sourceFileMap = debugOptions.sourceFileMap;
     if (sourceFileMap === "auto") {
         sourceFileMap = {};
-        const sysroot = env["RUSTC_TOOLCHAIN"];
-        if (sysroot) {
-            // let's try to use the default toolchain
-            const data = await execute(`rustc -V -v`, { cwd: wsFolder, env });
-            const rx = /commit-hash:\s(.*)$/m;
-
-            const commitHash = rx.exec(data)?.[1];
-            if (commitHash) {
-                const rustlib = path.normalize(sysroot + "/lib/rustlib/src/rust");
-                sourceFileMap[`/rustc/${commitHash}/`] = rustlib;
-            }
-        }
+        await discoverSourceFileMap(sourceFileMap, env, wsFolder);
     }
 
-    const provider = unwrapUndefinable(knownEngines[debugEngine.id]);
-    const debugConfig = provider(
+    const debugConfig = getDebugConfig(
+        provider,
+        simplifyPath,
         runnable,
         runnableArgs,
-        simplifyPath(executable),
+        executable,
         env,
         sourceFileMap,
     );
@@ -186,6 +179,92 @@
     return debugConfig;
 }
 
+async function discoverSourceFileMap(
+    sourceFileMap: Record<string, string>,
+    env: Record<string, string>,
+    cwd: string,
+) {
+    const sysroot = env["RUSTC_TOOLCHAIN"];
+    if (sysroot) {
+        // let's try to use the default toolchain
+        const data = await execute(`rustc -V -v`, { cwd, env });
+        const rx = /commit-hash:\s(.*)$/m;
+
+        const commitHash = rx.exec(data)?.[1];
+        if (commitHash) {
+            const rustlib = path.normalize(sysroot + "/lib/rustlib/src/rust");
+            sourceFileMap[`/rustc/${commitHash}/`] = rustlib;
+        }
+    }
+}
+
+type PropertyFetcher<Config, Input, Key extends keyof Config> = (
+    input: Input,
+) => [Key, Config[Key]];
+
+type DebugConfigProvider<Type extends string, DebugConfig extends BaseDebugConfig<Type>> = {
+    executableProperty: keyof DebugConfig;
+    environmentProperty: PropertyFetcher<DebugConfig, Record<string, string>, keyof DebugConfig>;
+    runnableArgsProperty: PropertyFetcher<DebugConfig, ra.CargoRunnableArgs, keyof DebugConfig>;
+    sourceFileMapProperty?: keyof DebugConfig;
+    type: Type;
+    additional?: Record<string, unknown>;
+};
+
+type KnownEnginesType = (typeof knownEngines)[keyof typeof knownEngines];
+const knownEngines: {
+    "vadimcn.vscode-lldb": DebugConfigProvider<"lldb", CodeLldbDebugConfig>;
+    "ms-vscode.cpptools": DebugConfigProvider<"cppvsdbg" | "cppdbg", CCppDebugConfig>;
+    "webfreak.debug": DebugConfigProvider<"gdb", NativeDebugConfig>;
+} = {
+    "vadimcn.vscode-lldb": {
+        type: "lldb",
+        executableProperty: "program",
+        environmentProperty: (env) => ["env", env],
+        runnableArgsProperty: (runnableArgs: ra.CargoRunnableArgs) => [
+            "args",
+            runnableArgs.executableArgs,
+        ],
+        sourceFileMapProperty: "sourceMap",
+        additional: {
+            sourceLanguages: ["rust"],
+        },
+    },
+    "ms-vscode.cpptools": {
+        type: os.platform() === "win32" ? "cppvsdbg" : "cppdbg",
+        executableProperty: "program",
+        environmentProperty: (env) => [
+            "environment",
+            Object.entries(env).map((entry) => ({
+                name: entry[0],
+                value: entry[1],
+            })),
+        ],
+        runnableArgsProperty: (runnableArgs: ra.CargoRunnableArgs) => [
+            "args",
+            runnableArgs.executableArgs,
+        ],
+        sourceFileMapProperty: "sourceFileMap",
+        additional: {
+            osx: {
+                MIMode: "lldb",
+            },
+        },
+    },
+    "webfreak.debug": {
+        type: "gdb",
+        executableProperty: "target",
+        runnableArgsProperty: (runnableArgs: ra.CargoRunnableArgs) => [
+            "arguments",
+            quote(runnableArgs.executableArgs),
+        ],
+        environmentProperty: (env) => ["env", env],
+        additional: {
+            valuesFormatting: "prettyPrinters",
+        },
+    },
+};
+
 async function getDebugExecutable(
     runnableArgs: ra.CargoRunnableArgs,
     env: Record<string, string>,
@@ -197,71 +276,74 @@
     return executable;
 }
 
-function getCCppDebugConfig(
+type BaseDebugConfig<type extends string> = {
+    type: type;
+    request: "launch";
+    name: string;
+    cwd: string;
+};
+
+function getDebugConfig(
+    provider: KnownEnginesType,
+    simplifyPath: (p: string) => string,
     runnable: ra.Runnable,
     runnableArgs: ra.CargoRunnableArgs,
     executable: string,
     env: Record<string, string>,
     sourceFileMap?: Record<string, string>,
 ): vscode.DebugConfiguration {
+    const {
+        environmentProperty,
+        executableProperty,
+        runnableArgsProperty,
+        type,
+        additional,
+        sourceFileMapProperty,
+    } = provider;
+    const [envProperty, envValue] = environmentProperty(env);
+    const [argsProperty, argsValue] = runnableArgsProperty(runnableArgs);
     return {
-        type: os.platform() === "win32" ? "cppvsdbg" : "cppdbg",
+        type,
         request: "launch",
         name: runnable.label,
-        program: executable,
-        args: runnableArgs.executableArgs,
-        cwd: runnable.args.cwd || runnableArgs.workspaceRoot || ".",
-        sourceFileMap,
-        environment: Object.entries(env).map((entry) => ({
-            name: entry[0],
-            value: entry[1],
-        })),
-        // See https://github.com/rust-lang/rust-analyzer/issues/16901#issuecomment-2024486941
-        osx: {
-            MIMode: "lldb",
-        },
+        cwd: simplifyPath(runnable.args.cwd || runnableArgs.workspaceRoot || "."),
+        [executableProperty]: simplifyPath(executable),
+        [envProperty]: envValue,
+        [argsProperty]: argsValue,
+        ...(sourceFileMapProperty ? { [sourceFileMapProperty]: sourceFileMap } : {}),
+        ...additional,
     };
 }
 
-function getCodeLldbDebugConfig(
-    runnable: ra.Runnable,
-    runnableArgs: ra.CargoRunnableArgs,
-    executable: string,
-    env: Record<string, string>,
-    sourceFileMap?: Record<string, string>,
-): vscode.DebugConfiguration {
-    return {
-        type: "lldb",
-        request: "launch",
-        name: runnable.label,
-        program: executable,
-        args: runnableArgs.executableArgs,
-        cwd: runnable.args.cwd || runnableArgs.workspaceRoot || ".",
-        sourceMap: sourceFileMap,
-        sourceLanguages: ["rust"],
-        env,
+type CCppDebugConfig = {
+    program: string;
+    args: string[];
+    sourceFileMap: Record<string, string> | undefined;
+    environment: {
+        name: string;
+        value: string;
+    }[];
+    // See https://github.com/rust-lang/rust-analyzer/issues/16901#issuecomment-2024486941
+    osx: {
+        MIMode: "lldb";
     };
-}
+} & BaseDebugConfig<"cppvsdbg" | "cppdbg">;
 
-function getNativeDebugConfig(
-    runnable: ra.Runnable,
-    runnableArgs: ra.CargoRunnableArgs,
-    executable: string,
-    env: Record<string, string>,
-    _sourceFileMap?: Record<string, string>,
-): vscode.DebugConfiguration {
-    return {
-        type: "gdb",
-        request: "launch",
-        name: runnable.label,
-        target: executable,
-        // See https://github.com/WebFreak001/code-debug/issues/359
-        arguments: quote(runnableArgs.executableArgs),
-        cwd: runnable.args.cwd || runnableArgs.workspaceRoot || ".",
-        env,
-        valuesFormatting: "prettyPrinters",
-    };
-}
+type CodeLldbDebugConfig = {
+    program: string;
+    args: string[];
+    sourceMap: Record<string, string> | undefined;
+    sourceLanguages: ["rust"];
+    env: Record<string, string>;
+} & BaseDebugConfig<"lldb">;
+
+type NativeDebugConfig = {
+    target: string;
+    // See https://github.com/WebFreak001/code-debug/issues/359
+    arguments: string;
+    env: Record<string, string>;
+    valuesFormatting: "prettyPrinters";
+} & BaseDebugConfig<"gdb">;
 
 // Based on https://github.com/ljharb/shell-quote/blob/main/quote.js
 function quote(xs: string[]) {
diff --git a/editors/code/src/run.ts b/editors/code/src/run.ts
index 7179eb3..dd0da6b 100644
--- a/editors/code/src/run.ts
+++ b/editors/code/src/run.ts
@@ -65,9 +65,14 @@
     }
 }
 
-export function prepareBaseEnv(base?: Record<string, string>): Record<string, string> {
+export function prepareBaseEnv(
+    inheritEnv: boolean,
+    base?: Record<string, string>,
+): Record<string, string> {
     const env: Record<string, string> = { RUST_BACKTRACE: "short" };
-    Object.assign(env, process.env);
+    if (inheritEnv) {
+        Object.assign(env, process.env);
+    }
     if (base) {
         Object.assign(env, base);
     }
@@ -75,11 +80,12 @@
 }
 
 export function prepareEnv(
+    inheritEnv: boolean,
     label: string,
     runnableArgs: ra.CargoRunnableArgs,
     runnableEnvCfg?: RunnableEnvCfg,
 ): Record<string, string> {
-    const env = prepareBaseEnv(runnableArgs.environment);
+    const env = prepareBaseEnv(inheritEnv, runnableArgs.environment);
     const platform = process.platform;
 
     const checkPlatform = (it: RunnableEnvCfgItem) => {
@@ -134,7 +140,7 @@
         };
         options = {
             cwd: runnableArgs.workspaceRoot || ".",
-            env: prepareEnv(runnable.label, runnableArgs, config.runnablesExtraEnv),
+            env: prepareEnv(true, runnable.label, runnableArgs, config.runnablesExtraEnv),
         };
     } else {
         const runnableArgs = runnable.args;
@@ -145,7 +151,7 @@
         };
         options = {
             cwd: runnableArgs.cwd,
-            env: prepareBaseEnv(),
+            env: prepareBaseEnv(true),
         };
     }
 
diff --git a/editors/code/tests/unit/bootstrap.test.ts b/editors/code/tests/unit/bootstrap.test.ts
new file mode 100644
index 0000000..8aeb721
--- /dev/null
+++ b/editors/code/tests/unit/bootstrap.test.ts
@@ -0,0 +1,96 @@
+import * as assert from "assert";
+import { _private } from "../../src/bootstrap";
+import type { Context } from ".";
+
+export async function getTests(ctx: Context) {
+    await ctx.suite("Bootstrap/Select toolchain RA", (suite) => {
+        suite.addTest("Order of nightly RA", async () => {
+            assert.deepStrictEqual(
+                _private.orderFromPath(
+                    "/Users/myuser/.rustup/toolchains/nightly-2022-11-22-aarch64-apple-darwin/bin/rust-analyzer",
+                    function (path: string) {
+                        assert.deepStrictEqual(
+                            path,
+                            "/Users/myuser/.rustup/toolchains/nightly-2022-11-22-aarch64-apple-darwin/bin/rust-analyzer",
+                        );
+                        return "rust-analyzer 1.67.0-nightly (b7bc90fe 2022-11-21)";
+                    },
+                ),
+                "0-2022-11-21/0",
+            );
+        });
+
+        suite.addTest("Order of versioned RA", async () => {
+            assert.deepStrictEqual(
+                _private.orderFromPath(
+                    "/Users/myuser/.rustup/toolchains/1.72.1-aarch64-apple-darwin/bin/rust-analyzer",
+                    function (path: string) {
+                        assert.deepStrictEqual(
+                            path,
+                            "/Users/myuser/.rustup/toolchains/1.72.1-aarch64-apple-darwin/bin/rust-analyzer",
+                        );
+                        return "rust-analyzer 1.72.1 (d5c2e9c3 2023-09-13)";
+                    },
+                ),
+                "0-2023-09-13/1",
+            );
+        });
+
+        suite.addTest("Order of versioned RA when unable to obtain version date", async () => {
+            assert.deepStrictEqual(
+                _private.orderFromPath(
+                    "/Users/myuser/.rustup/toolchains/1.72.1-aarch64-apple-darwin/bin/rust-analyzer",
+                    function () {
+                        return "rust-analyzer 1.72.1";
+                    },
+                ),
+                "2",
+            );
+        });
+
+        suite.addTest("Order of stable RA", async () => {
+            assert.deepStrictEqual(
+                _private.orderFromPath(
+                    "/Users/myuser/.rustup/toolchains/stable-aarch64-apple-darwin/bin/rust-analyzer",
+                    function (path: string) {
+                        assert.deepStrictEqual(
+                            path,
+                            "/Users/myuser/.rustup/toolchains/stable-aarch64-apple-darwin/bin/rust-analyzer",
+                        );
+                        return "rust-analyzer 1.79.0 (129f3b99 2024-06-10)";
+                    },
+                ),
+                "0-2024-06-10/1",
+            );
+        });
+
+        suite.addTest("Order with invalid path to RA", async () => {
+            assert.deepStrictEqual(
+                _private.orderFromPath("some-weird-path", function () {
+                    return undefined;
+                }),
+                "2",
+            );
+        });
+
+        suite.addTest("Earliest RA between nightly and stable", async () => {
+            assert.deepStrictEqual(
+                _private.earliestToolchainPath(
+                    "/Users/myuser/.rustup/toolchains/stable-aarch64-apple-darwin/bin/rust-analyzer",
+                    "/Users/myuser/.rustup/toolchains/nightly-2022-11-22-aarch64-apple-darwin/bin/rust-analyzer",
+                    function (path: string) {
+                        if (
+                            path ===
+                            "/Users/myuser/.rustup/toolchains/nightly-2022-11-22-aarch64-apple-darwin/bin/rust-analyzer"
+                        ) {
+                            return "rust-analyzer 1.67.0-nightly (b7bc90fe 2022-11-21)";
+                        } else {
+                            return "rust-analyzer 1.79.0 (129f3b99 2024-06-10)";
+                        }
+                    },
+                ),
+                "/Users/myuser/.rustup/toolchains/nightly-2022-11-22-aarch64-apple-darwin/bin/rust-analyzer",
+            );
+        });
+    });
+}
diff --git a/editors/code/tests/unit/runnable_env.test.ts b/editors/code/tests/unit/runnable_env.test.ts
index 81850e0..f0a62a3 100644
--- a/editors/code/tests/unit/runnable_env.test.ts
+++ b/editors/code/tests/unit/runnable_env.test.ts
@@ -19,7 +19,7 @@
 function fakePrepareEnv(runnableName: string, config?: RunnableEnvCfg): Record<string, string> {
     const runnable = makeRunnable(runnableName);
     const runnableArgs = runnable.args as ra.CargoRunnableArgs;
-    return prepareEnv(runnable.label, runnableArgs, config);
+    return prepareEnv(false, runnable.label, runnableArgs, config);
 }
 
 export async function getTests(ctx: Context) {
diff --git a/lib/lsp-server/Cargo.toml b/lib/lsp-server/Cargo.toml
index a89eb4b..fb3411c 100644
--- a/lib/lsp-server/Cargo.toml
+++ b/lib/lsp-server/Cargo.toml
@@ -10,11 +10,11 @@
 log = "0.4.17"
 serde_json = "1.0.108"
 serde = { version = "1.0.192", features = ["derive"] }
-crossbeam-channel = "0.5.8"
+crossbeam-channel.workspace = true
 
 [dev-dependencies]
 lsp-types = "=0.95"
 ctrlc = "3.4.1"
 
 [lints]
-workspace = true
\ No newline at end of file
+workspace = true
diff --git a/rust-version b/rust-version
index 001b900..d4f1703 100644
--- a/rust-version
+++ b/rust-version
@@ -1 +1 @@
-1b51d80027919563004918eaadfa0d890ac0eb93
+80eb5a8e910e5185d47cdefe3732d839c78a5e7e
diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs
index 21001c2..6555f22 100644
--- a/xtask/src/metrics.rs
+++ b/xtask/src/metrics.rs
@@ -117,11 +117,7 @@
     ) -> anyhow::Result<()> {
         assert!(Path::new(path).exists(), "unable to find bench in {path}");
         eprintln!("\nMeasuring analysis-stats/{name}");
-        let output = cmd!(
-            sh,
-            "./target/release/rust-analyzer -q analysis-stats {path} --query-sysroot-metadata"
-        )
-        .read()?;
+        let output = cmd!(sh, "./target/release/rust-analyzer -q analysis-stats {path}").read()?;
         for (metric, value, unit) in parse_metrics(&output) {
             self.report(&format!("analysis-stats/{name}/{metric}"), value, unit.into());
         }
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs
index e85f518..ea51d33 100644
--- a/xtask/src/tidy.rs
+++ b/xtask/src/tidy.rs
@@ -134,6 +134,7 @@
 Apache-2.0 WITH LLVM-exception
 Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT
 Apache-2.0/MIT
+BSD-2-Clause OR Apache-2.0 OR MIT
 BSD-3-Clause
 CC0-1.0
 ISC