Auto merge of #141210 - RalfJung:miri-std-doctests, r=saethlin
tools-aux ci runner: also cross-test doctests in Miri
Miri now supports running doctests across different targets. Let's use that to run the std doctests on aarch64-apple-darwin, i686-pc-windows-msvc.
try-job: x86_64-gnu-aux
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
new file mode 100644
index 0000000..466672e
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -0,0 +1,4 @@
+contact_links:
+ - name: Questions regarding rust-analyzer
+ url: https://github.com/rust-lang/rust-analyzer/discussions
+ about: Please ask and answer questions here instead of opening an issue
diff --git a/.github/ISSUE_TEMPLATE/critical_nightly_regression.md b/.github/ISSUE_TEMPLATE/critical_nightly_regression.md
index 23c4344..2b44bdc 100644
--- a/.github/ISSUE_TEMPLATE/critical_nightly_regression.md
+++ b/.github/ISSUE_TEMPLATE/critical_nightly_regression.md
@@ -12,5 +12,3 @@
Please try to provide information which will help us to fix the issue faster. Minimal reproducible examples with few dependencies are especially lovely <3.
-->
-
-This is a serious regression in nightly and it's important to fix it before the next release.
diff --git a/.github/ISSUE_TEMPLATE/question.md b/.github/ISSUE_TEMPLATE/question.md
deleted file mode 100644
index a90ade8..0000000
--- a/.github/ISSUE_TEMPLATE/question.md
+++ /dev/null
@@ -1,8 +0,0 @@
----
-name: Support Question
-about: A question regarding functionality of rust-analyzer.
-title: ''
-labels: 'C-support'
-assignees: ''
-
----
diff --git a/.github/workflows/metrics.yaml b/.github/workflows/metrics.yaml
index a4146d6..dc2f432 100644
--- a/.github/workflows/metrics.yaml
+++ b/.github/workflows/metrics.yaml
@@ -18,9 +18,9 @@
steps:
- name: Install Rust toolchain
run: |
- rustup update --no-self-update stable
- rustup default stable
- rustup component add --toolchain stable rust-src
+ rustup update --no-self-update beta
+ rustup default beta
+ rustup component add --toolchain beta rust-src
- name: Checkout repository
uses: actions/checkout@v4
@@ -61,9 +61,9 @@
steps:
- name: Install Rust toolchain
run: |
- rustup update --no-self-update stable
- rustup default stable
- rustup component add --toolchain stable rust-src
+ rustup update --no-self-update beta
+ rustup default beta
+ rustup component add --toolchain beta rust-src
- name: Checkout repository
uses: actions/checkout@v4
diff --git a/Cargo.lock b/Cargo.lock
index 8d6c828..01de430 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -18,15 +18,6 @@
checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
[[package]]
-name = "aho-corasick"
-version = "1.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
-dependencies = [
- "memchr",
-]
-
-[[package]]
name = "allocator-api2"
version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -61,9 +52,9 @@
[[package]]
name = "backtrace"
-version = "0.3.74"
+version = "0.3.75"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a"
+checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002"
dependencies = [
"addr2line",
"cfg-if",
@@ -80,6 +71,7 @@
dependencies = [
"cfg",
"dashmap",
+ "indexmap",
"intern",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"query-group-macro",
@@ -123,12 +115,9 @@
[[package]]
name = "boxcar"
-version = "0.2.11"
+version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6740c6e2fc6360fa57c35214c7493826aee95993926092606f27c983b40837be"
-dependencies = [
- "loom",
-]
+checksum = "66bb12751a83493ef4b8da1120451a262554e216a247f14b48cb5e8fe7ed8bdf"
[[package]]
name = "camino"
@@ -316,9 +305,9 @@
[[package]]
name = "ctrlc"
-version = "3.4.5"
+version = "3.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "90eeab0aa92f3f9b4e87f258c72b139c207d251f9cbc1080a0086b86a8870dd3"
+checksum = "46f93780a459b7d656ef7f071fe699c4d3d2cb201c4b24d085b6ddc505276e73"
dependencies = [
"nix",
"windows-sys 0.59.0",
@@ -472,9 +461,9 @@
[[package]]
name = "flate2"
-version = "1.1.0"
+version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc"
+checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece"
dependencies = [
"crc32fast",
"miniz_oxide",
@@ -511,19 +500,6 @@
checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a"
[[package]]
-name = "generator"
-version = "0.8.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cc6bd114ceda131d3b1d665eba35788690ad37f5916457286b32ab6fd3c438dd"
-dependencies = [
- "cfg-if",
- "libc",
- "log",
- "rustversion",
- "windows 0.58.0",
-]
-
-[[package]]
name = "getrandom"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1010,9 +986,9 @@
[[package]]
name = "indexmap"
-version = "2.8.0"
+version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3954d50fe15b02142bf25d3b8bdadb634ec3948f103d04ffe3031bc8fe9d7058"
+checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
dependencies = [
"equivalent",
"hashbrown 0.15.2",
@@ -1123,12 +1099,12 @@
[[package]]
name = "libloading"
-version = "0.8.6"
+version = "0.8.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
+checksum = "6a793df0d7afeac54f95b471d3af7f0d4fb975699f972341a4b76988d49cdf0c"
dependencies = [
"cfg-if",
- "windows-targets 0.52.6",
+ "windows-targets 0.53.0",
]
[[package]]
@@ -1213,19 +1189,6 @@
checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e"
[[package]]
-name = "loom"
-version = "0.7.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca"
-dependencies = [
- "cfg-if",
- "generator",
- "scoped-tls",
- "tracing",
- "tracing-subscriber",
-]
-
-[[package]]
name = "lsp-server"
version = "0.7.8"
dependencies = [
@@ -1265,15 +1228,6 @@
]
[[package]]
-name = "matchers"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
-dependencies = [
- "regex-automata 0.1.10",
-]
-
-[[package]]
name = "mbe"
version = "0.0.0"
dependencies = [
@@ -1358,9 +1312,9 @@
[[package]]
name = "nix"
-version = "0.29.0"
+version = "0.30.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46"
+checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6"
dependencies = [
"bitflags 2.9.0",
"cfg-if",
@@ -1401,16 +1355,6 @@
[[package]]
name = "nu-ansi-term"
-version = "0.46.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
-dependencies = [
- "overload",
- "winapi",
-]
-
-[[package]]
-name = "nu-ansi-term"
version = "0.50.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399"
@@ -1471,12 +1415,6 @@
checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]]
-name = "overload"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
-
-[[package]]
name = "parking_lot"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1640,14 +1578,14 @@
[[package]]
name = "process-wrap"
-version = "8.2.0"
+version = "8.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d35f4dc9988d1326b065b4def5e950c3ed727aa03e3151b86cc9e2aec6b03f54"
+checksum = "a3ef4f2f0422f23a82ec9f628ea2acd12871c81a9362b02c43c1aa86acfc3ba1"
dependencies = [
"indexmap",
"nix",
"tracing",
- "windows 0.59.0",
+ "windows",
]
[[package]]
@@ -1749,9 +1687,9 @@
[[package]]
name = "ra-ap-rustc_abi"
-version = "0.110.0"
+version = "0.113.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "912228bd8ed3beff1f6f9e5e2d4b37c0827ba3e2070060bf3858a311d0e29e30"
+checksum = "c33b8fa229789975647ca5426be432c7c327ebde89ab15889928185dbcee3230"
dependencies = [
"bitflags 2.9.0",
"ra-ap-rustc_hashes",
@@ -1761,18 +1699,18 @@
[[package]]
name = "ra-ap-rustc_hashes"
-version = "0.110.0"
+version = "0.113.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba520764daf057a9d963fa769f4762eaf87ac5d4900ae76195eeead64cd35afd"
+checksum = "0d68a3e389927002f552938a90b04787f6435f55b46fc5691360470d1cb2e99d"
dependencies = [
"rustc-stable-hash",
]
[[package]]
name = "ra-ap-rustc_index"
-version = "0.110.0"
+version = "0.113.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b76b5f9ee55f2d0e5a65bea23f6d738893349ce8d3d17a6720933e647ab04978"
+checksum = "32502273df2838d0ca13f1c67e2a48feef940e591f9771869f07e2db2acede53"
dependencies = [
"ra-ap-rustc_index_macros",
"smallvec",
@@ -1780,9 +1718,9 @@
[[package]]
name = "ra-ap-rustc_index_macros"
-version = "0.110.0"
+version = "0.113.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ddd972eb1face2fcaa0d94c01d97862fb955b5561d4f5932003bce8a6cadd8c6"
+checksum = "8a32f081864ae34c7ae6634edfa7a95ab9260ba85015e8b1d347580eda79d14f"
dependencies = [
"proc-macro2",
"quote",
@@ -1791,9 +1729,9 @@
[[package]]
name = "ra-ap-rustc_lexer"
-version = "0.110.0"
+version = "0.113.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba3a9876456fb2521097deef33ddeac1c18260c8eafb68054d986f8b9d6ce9fa"
+checksum = "ed34c51974718c5bd90d876d1364d9725159fc8030c2382b9cb837034152ed68"
dependencies = [
"memchr",
"unicode-properties",
@@ -1802,9 +1740,9 @@
[[package]]
name = "ra-ap-rustc_parse_format"
-version = "0.110.0"
+version = "0.113.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8e85de58dfcc60a5f9d5ec0157a657e3f84abd8f22c8a0c4d707cfb42c9011f4"
+checksum = "ff0440e5d27facbf4ff13ea651e48c2f6e360b3dbfc56251b41d60719b965fb8"
dependencies = [
"ra-ap-rustc_lexer",
"rustc-literal-escaper",
@@ -1812,9 +1750,9 @@
[[package]]
name = "ra-ap-rustc_pattern_analysis"
-version = "0.110.0"
+version = "0.113.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ceadf9db550db67deff7eff2e2765109b860c9d7e5bdfca144863020289c823d"
+checksum = "a6056efa57aba3aa0cc69a0bf1a8281624c23ad25b05748d11ebcd4668037bfc"
dependencies = [
"ra-ap-rustc_index",
"rustc-hash 2.1.1",
@@ -1864,50 +1802,6 @@
]
[[package]]
-name = "regex"
-version = "1.11.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
-dependencies = [
- "aho-corasick",
- "memchr",
- "regex-automata 0.4.9",
- "regex-syntax 0.8.5",
-]
-
-[[package]]
-name = "regex-automata"
-version = "0.1.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
-dependencies = [
- "regex-syntax 0.6.29",
-]
-
-[[package]]
-name = "regex-automata"
-version = "0.4.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
-dependencies = [
- "aho-corasick",
- "memchr",
- "regex-syntax 0.8.5",
-]
-
-[[package]]
-name = "regex-syntax"
-version = "0.6.29"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
-
-[[package]]
-name = "regex-syntax"
-version = "0.8.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
-
-[[package]]
name = "rowan"
version = "0.15.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2026,12 +1920,6 @@
]
[[package]]
-name = "rustversion"
-version = "1.0.20"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2"
-
-[[package]]
name = "ryu"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2039,9 +1927,9 @@
[[package]]
name = "salsa"
-version = "0.21.1"
+version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6f80d5cf3c3fcab2cef898012f242a670477a1baa609267376af9cb4409026c5"
+checksum = "c8fff508e3d6ef42a32607f7538e17171a877a12015e32036f46e99d00c95781"
dependencies = [
"boxcar",
"crossbeam-queue",
@@ -2062,15 +1950,15 @@
[[package]]
name = "salsa-macro-rules"
-version = "0.21.1"
+version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "05303d72606fbf2b9c9523cda2039bb8ecb00304027a3cd7e52b02a65c7d9185"
+checksum = "8ea72b3c06f2ce6350fe3a0eeb7aaaf842d1d8352b706973c19c4f02e298a87c"
[[package]]
name = "salsa-macros"
-version = "0.21.1"
+version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eb2f0e2a30c65cb3cd63440c491dde68d9af7e1be2b77832ac7057141107db50"
+checksum = "0ce92025bc160b27814a207cb78d680973af17f863c7f4fc56cf3a535e22f378"
dependencies = [
"heck",
"proc-macro2",
@@ -2228,6 +2116,7 @@
dependencies = [
"backtrace",
"crossbeam-channel",
+ "crossbeam-utils",
"itertools 0.14.0",
"jod-thread",
"libc",
@@ -2554,15 +2443,9 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
dependencies = [
- "matchers",
- "nu-ansi-term 0.46.0",
- "once_cell",
- "regex",
"sharded-slab",
- "smallvec",
"thread_local",
"time",
- "tracing",
"tracing-core",
"tracing-log",
]
@@ -2573,7 +2456,7 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f459ca79f1b0d5f71c54ddfde6debfc59c8b6eeb46808ae492077f739dc7b49c"
dependencies = [
- "nu-ansi-term 0.50.1",
+ "nu-ansi-term",
"tracing-core",
"tracing-log",
"tracing-subscriber",
@@ -2708,22 +2591,6 @@
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
-name = "winapi"
-version = "0.3.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
-dependencies = [
- "winapi-i686-pc-windows-gnu",
- "winapi-x86_64-pc-windows-gnu",
-]
-
-[[package]]
-name = "winapi-i686-pc-windows-gnu"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
-
-[[package]]
name = "winapi-util"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2733,73 +2600,55 @@
]
[[package]]
-name = "winapi-x86_64-pc-windows-gnu"
-version = "0.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
-
-[[package]]
name = "windows"
-version = "0.58.0"
+version = "0.61.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6"
+checksum = "c5ee8f3d025738cb02bad7868bbb5f8a6327501e870bf51f1b455b0a2454a419"
dependencies = [
- "windows-core 0.58.0",
- "windows-targets 0.52.6",
+ "windows-collections",
+ "windows-core",
+ "windows-future",
+ "windows-link",
+ "windows-numerics",
]
[[package]]
-name = "windows"
-version = "0.59.0"
+name = "windows-collections"
+version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7f919aee0a93304be7f62e8e5027811bbba96bcb1de84d6618be56e43f8a32a1"
+checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8"
dependencies = [
- "windows-core 0.59.0",
- "windows-targets 0.53.0",
+ "windows-core",
]
[[package]]
name = "windows-core"
-version = "0.58.0"
+version = "0.61.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99"
+checksum = "4763c1de310c86d75a878046489e2e5ba02c649d185f21c67d4cf8a56d098980"
dependencies = [
- "windows-implement 0.58.0",
- "windows-interface 0.58.0",
- "windows-result 0.2.0",
- "windows-strings 0.1.0",
- "windows-targets 0.52.6",
+ "windows-implement",
+ "windows-interface",
+ "windows-link",
+ "windows-result",
+ "windows-strings",
]
[[package]]
-name = "windows-core"
-version = "0.59.0"
+name = "windows-future"
+version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "810ce18ed2112484b0d4e15d022e5f598113e220c53e373fb31e67e21670c1ce"
+checksum = "7a1d6bbefcb7b60acd19828e1bc965da6fcf18a7e39490c5f8be71e54a19ba32"
dependencies = [
- "windows-implement 0.59.0",
- "windows-interface 0.59.0",
- "windows-result 0.3.1",
- "windows-strings 0.3.1",
- "windows-targets 0.53.0",
+ "windows-core",
+ "windows-link",
]
[[package]]
name = "windows-implement"
-version = "0.58.0"
+version = "0.60.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "windows-implement"
-version = "0.59.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "83577b051e2f49a058c308f17f273b570a6a758386fc291b5f6a934dd84e48c1"
+checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836"
dependencies = [
"proc-macro2",
"quote",
@@ -2808,20 +2657,9 @@
[[package]]
name = "windows-interface"
-version = "0.58.0"
+version = "0.59.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "windows-interface"
-version = "0.59.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cb26fd936d991781ea39e87c3a27285081e3c0da5ca0fcbc02d368cc6f52ff01"
+checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8"
dependencies = [
"proc-macro2",
"quote",
@@ -2830,43 +2668,34 @@
[[package]]
name = "windows-link"
-version = "0.1.0"
+version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6dccfd733ce2b1753b03b6d3c65edf020262ea35e20ccdf3e288043e6dd620e3"
+checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38"
[[package]]
-name = "windows-result"
+name = "windows-numerics"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e"
+checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1"
dependencies = [
- "windows-targets 0.52.6",
+ "windows-core",
+ "windows-link",
]
[[package]]
name = "windows-result"
-version = "0.3.1"
+version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "06374efe858fab7e4f881500e6e86ec8bc28f9462c47e5a9941a0142ad86b189"
+checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252"
dependencies = [
"windows-link",
]
[[package]]
name = "windows-strings"
-version = "0.1.0"
+version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10"
-dependencies = [
- "windows-result 0.2.0",
- "windows-targets 0.52.6",
-]
-
-[[package]]
-name = "windows-strings"
-version = "0.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319"
+checksum = "7a2ba9642430ee452d5a7aa78d72907ebe8cfda358e8cb7918a2050581322f97"
dependencies = [
"windows-link",
]
@@ -3230,17 +3059,14 @@
[[package]]
name = "zip"
-version = "2.4.2"
+version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fabe6324e908f85a1c52063ce7aa26b68dcb7eb6dbc83a2d148403c9bc3eba50"
+checksum = "12598812502ed0105f607f941c386f43d441e00148fce9dec3ca5ffb0bde9308"
dependencies = [
"arbitrary",
"crc32fast",
- "crossbeam-utils",
- "displaydoc",
"flate2",
"indexmap",
"memchr",
- "thiserror 2.0.12",
"time",
]
diff --git a/Cargo.toml b/Cargo.toml
index c4c2fdf..8c50718 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -85,11 +85,11 @@
vfs = { path = "./crates/vfs", version = "0.0.0" }
edition = { path = "./crates/edition", version = "0.0.0" }
-ra-ap-rustc_lexer = { version = "0.110", default-features = false }
-ra-ap-rustc_parse_format = { version = "0.110", default-features = false }
-ra-ap-rustc_index = { version = "0.110", default-features = false }
-ra-ap-rustc_abi = { version = "0.110", default-features = false }
-ra-ap-rustc_pattern_analysis = { version = "0.110", default-features = false }
+ra-ap-rustc_lexer = { version = "0.113", default-features = false }
+ra-ap-rustc_parse_format = { version = "0.113", default-features = false }
+ra-ap-rustc_index = { version = "0.113", default-features = false }
+ra-ap-rustc_abi = { version = "0.113", default-features = false }
+ra-ap-rustc_pattern_analysis = { version = "0.113", default-features = false }
# local crates that aren't published to crates.io. These should not have versions.
@@ -132,8 +132,8 @@
pulldown-cmark = { version = "0.9.6", default-features = false }
rayon = "1.10.0"
rowan = "=0.15.15"
-salsa = { version = "0.21.1", default-features = false, features = ["rayon","salsa_unstable"] }
-salsa-macros = "0.21.1"
+salsa = { version = "0.22.0", default-features = false, features = ["rayon","salsa_unstable"] }
+salsa-macros = "0.22.0"
semver = "1.0.26"
serde = { version = "1.0.219" }
serde_derive = { version = "1.0.219" }
diff --git a/crates/base-db/Cargo.toml b/crates/base-db/Cargo.toml
index e2e3253..3b423a8 100644
--- a/crates/base-db/Cargo.toml
+++ b/crates/base-db/Cargo.toml
@@ -21,6 +21,7 @@
triomphe.workspace = true
semver.workspace = true
tracing.workspace = true
+indexmap.workspace = true
# local deps
cfg.workspace = true
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index 9660e6e..7452381 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -14,7 +14,7 @@
use dashmap::mapref::entry::Entry;
use intern::Symbol;
use la_arena::{Arena, Idx, RawIdx};
-use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
+use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet, FxHasher};
use salsa::{Durability, Setter};
use span::Edition;
use triomphe::Arc;
@@ -24,6 +24,8 @@
pub type ProcMacroPaths = FxHashMap<CrateBuilderId, Result<(String, AbsPathBuf), String>>;
+type FxIndexSet<T> = indexmap::IndexSet<T, FxBuildHasher>;
+
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct SourceRootId(pub u32);
@@ -393,21 +395,21 @@
pub type CratesIdMap = FxHashMap<CrateBuilderId, Crate>;
#[salsa_macros::input]
-#[derive(Debug)]
+#[derive(Debug, PartialOrd, Ord)]
pub struct Crate {
- #[return_ref]
+ #[returns(ref)]
pub data: BuiltCrateData,
/// Crate data that is not needed for analysis.
///
/// This is split into a separate field to increase incrementality.
- #[return_ref]
+ #[returns(ref)]
pub extra_data: ExtraCrateData,
// This is in `Arc` because it is shared for all crates in a workspace.
- #[return_ref]
+ #[returns(ref)]
pub workspace_data: Arc<CrateWorkspaceData>,
- #[return_ref]
+ #[returns(ref)]
pub cfg_options: CfgOptions,
- #[return_ref]
+ #[returns(ref)]
pub env: Env,
}
@@ -474,7 +476,9 @@
}
pub fn set_in_db(self, db: &mut dyn RootQueryDb) -> CratesIdMap {
- let mut all_crates = Vec::with_capacity(self.arena.len());
+ // For some reason in some repositories we have duplicate crates, so we use a set and not `Vec`.
+ // We use an `IndexSet` because the list needs to be topologically sorted.
+ let mut all_crates = FxIndexSet::with_capacity_and_hasher(self.arena.len(), FxBuildHasher);
let mut visited = FxHashMap::default();
let mut visited_root_files = FxHashSet::default();
@@ -494,9 +498,11 @@
);
}
- if **old_all_crates != *all_crates {
+ if old_all_crates.len() != all_crates.len()
+ || old_all_crates.iter().any(|&krate| !all_crates.contains(&krate))
+ {
db.set_all_crates_with_durability(
- Arc::new(all_crates.into_boxed_slice()),
+ Arc::new(Vec::from_iter(all_crates).into_boxed_slice()),
Durability::MEDIUM,
);
}
@@ -509,7 +515,7 @@
crates_map: &CratesMap,
visited: &mut FxHashMap<CrateBuilderId, Crate>,
visited_root_files: &mut FxHashSet<FileId>,
- all_crates: &mut Vec<Crate>,
+ all_crates: &mut FxIndexSet<Crate>,
source: CrateBuilderId,
) -> Crate {
if let Some(&crate_id) = visited.get(&source) {
@@ -597,7 +603,7 @@
input
}
};
- all_crates.push(crate_input);
+ all_crates.insert(crate_input);
visited.insert(source, crate_input);
crate_input
}
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index a67fbf7..4d4e6ca 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -32,6 +32,7 @@
macro_rules! impl_intern_key {
($id:ident, $loc:ident) => {
#[salsa_macros::interned(no_lifetime)]
+ #[derive(PartialOrd, Ord)]
pub struct $id {
pub loc: $loc,
}
@@ -165,6 +166,7 @@
}
#[salsa_macros::interned(no_lifetime, debug, constructor=from_span)]
+#[derive(PartialOrd, Ord)]
pub struct EditionedFileId {
pub editioned_file_id: span::EditionedFileId,
}
@@ -356,7 +358,7 @@
}
fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<&[SyntaxError]> {
- #[salsa_macros::tracked(return_ref)]
+ #[salsa_macros::tracked(returns(ref))]
fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<Box<[SyntaxError]>> {
let errors = db.parse(file_id).errors();
match &*errors {
diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs
index 2cbdbe1..4a9a3b1 100644
--- a/crates/hir-def/src/db.rs
+++ b/crates/hir-def/src/db.rs
@@ -24,8 +24,8 @@
item_tree::{AttrOwner, ItemTree},
lang_item::{self, LangItem},
nameres::{
- DefMap, LocalDefMap,
assoc::{ImplItems, TraitItems},
+ crate_def_map,
diagnostics::DefDiagnostics,
},
signatures::{
@@ -111,16 +111,6 @@
#[salsa::invoke(ItemTree::block_item_tree_query)]
fn block_item_tree(&self, block_id: BlockId) -> Arc<ItemTree>;
- #[salsa::invoke(DefMap::crate_local_def_map_query)]
- fn crate_local_def_map(&self, krate: Crate) -> (Arc<DefMap>, Arc<LocalDefMap>);
-
- #[salsa::invoke(DefMap::crate_def_map_query)]
- fn crate_def_map(&self, krate: Crate) -> Arc<DefMap>;
-
- /// Computes the block-level `DefMap`.
- #[salsa::invoke(DefMap::block_def_map_query)]
- fn block_def_map(&self, block: BlockId) -> Arc<DefMap>;
-
/// Turns a MacroId into a MacroDefId, describing the macro's definition post name resolution.
#[salsa::invoke(macro_def)]
fn macro_def(&self, m: MacroId) -> MacroDefId;
@@ -363,7 +353,7 @@
db: &dyn DefDatabase,
krate: Crate,
) -> Arc<[(MacroCallId, EditionedFileId)]> {
- db.crate_def_map(krate)
+ crate_def_map(db, krate)
.modules
.values()
.flat_map(|m| m.scope.iter_macro_invoc())
diff --git a/crates/hir-def/src/expr_store.rs b/crates/hir-def/src/expr_store.rs
index e3775c4..f617c32 100644
--- a/crates/hir-def/src/expr_store.rs
+++ b/crates/hir-def/src/expr_store.rs
@@ -19,7 +19,6 @@
use smallvec::SmallVec;
use span::{Edition, SyntaxContext};
use syntax::{AstPtr, SyntaxNodePtr, ast};
-use triomphe::Arc;
use tt::TextRange;
use crate::{
@@ -30,7 +29,7 @@
Array, AsmOperand, Binding, BindingId, Expr, ExprId, ExprOrPatId, Label, LabelId, Pat,
PatId, RecordFieldPat, Statement,
},
- nameres::DefMap,
+ nameres::{DefMap, block_def_map},
type_ref::{LifetimeRef, LifetimeRefId, PathId, TypeRef, TypeRefId},
};
@@ -225,8 +224,8 @@
pub fn blocks<'a>(
&'a self,
db: &'a dyn DefDatabase,
- ) -> impl Iterator<Item = (BlockId, Arc<DefMap>)> + 'a {
- self.block_scopes.iter().map(move |&block| (block, db.block_def_map(block)))
+ ) -> impl Iterator<Item = (BlockId, &'a DefMap)> + 'a {
+ self.block_scopes.iter().map(move |&block| (block, block_def_map(db, block)))
}
pub fn walk_bindings_in_pat(&self, pat_id: PatId, mut f: impl FnMut(BindingId)) {
@@ -299,17 +298,16 @@
Expr::InlineAsm(it) => it.operands.iter().for_each(|(_, op)| match op {
AsmOperand::In { expr, .. }
| AsmOperand::Out { expr: Some(expr), .. }
- | AsmOperand::InOut { expr, .. } => f(*expr),
+ | AsmOperand::InOut { expr, .. }
+ | AsmOperand::Const(expr)
+ | AsmOperand::Label(expr) => f(*expr),
AsmOperand::SplitInOut { in_expr, out_expr, .. } => {
f(*in_expr);
if let Some(out_expr) = out_expr {
f(*out_expr);
}
}
- AsmOperand::Out { expr: None, .. }
- | AsmOperand::Const(_)
- | AsmOperand::Label(_)
- | AsmOperand::Sym(_) => (),
+ AsmOperand::Out { expr: None, .. } | AsmOperand::Sym(_) => (),
}),
Expr::If { condition, then_branch, else_branch } => {
f(*condition);
@@ -436,17 +434,16 @@
Expr::InlineAsm(it) => it.operands.iter().for_each(|(_, op)| match op {
AsmOperand::In { expr, .. }
| AsmOperand::Out { expr: Some(expr), .. }
- | AsmOperand::InOut { expr, .. } => f(*expr),
+ | AsmOperand::InOut { expr, .. }
+ | AsmOperand::Const(expr)
+ | AsmOperand::Label(expr) => f(*expr),
AsmOperand::SplitInOut { in_expr, out_expr, .. } => {
f(*in_expr);
if let Some(out_expr) = out_expr {
f(*out_expr);
}
}
- AsmOperand::Out { expr: None, .. }
- | AsmOperand::Const(_)
- | AsmOperand::Label(_)
- | AsmOperand::Sym(_) => (),
+ AsmOperand::Out { expr: None, .. } | AsmOperand::Sym(_) => (),
}),
Expr::If { condition, then_branch, else_branch } => {
f(*condition);
diff --git a/crates/hir-def/src/expr_store/lower.rs b/crates/hir-def/src/expr_store/lower.rs
index 50505d5..29871f5 100644
--- a/crates/hir-def/src/expr_store/lower.rs
+++ b/crates/hir-def/src/expr_store/lower.rs
@@ -56,7 +56,7 @@
item_scope::BuiltinShadowMode,
item_tree::FieldsShape,
lang_item::LangItem,
- nameres::{DefMap, LocalDefMap, MacroSubNs},
+ nameres::{DefMap, LocalDefMap, MacroSubNs, block_def_map},
type_ref::{
ArrayType, ConstRef, FnType, LifetimeRef, LifetimeRefId, Mutability, PathId, Rawness,
RefType, TraitBoundModifier, TraitRef, TypeBound, TypeRef, TypeRefId, UseArgRef,
@@ -436,8 +436,8 @@
db: &'db dyn DefDatabase,
cfg_options: &'db CfgOptions,
expander: Expander,
- def_map: Arc<DefMap>,
- local_def_map: Arc<LocalDefMap>,
+ def_map: &'db DefMap,
+ local_def_map: &'db LocalDefMap,
module: ModuleId,
pub store: ExpressionStoreBuilder,
pub(crate) source_map: ExpressionStoreSourceMap,
@@ -544,7 +544,7 @@
current_file_id: HirFileId,
) -> ExprCollector<'_> {
let (def_map, local_def_map) = module.local_def_map(db);
- let expander = Expander::new(db, current_file_id, &def_map);
+ let expander = Expander::new(db, current_file_id, def_map);
ExprCollector {
db,
cfg_options: module.krate().cfg_options(db),
@@ -1947,7 +1947,7 @@
let resolver = |path: &_| {
self.def_map
.resolve_path(
- &self.local_def_map,
+ self.local_def_map,
self.db,
module,
path,
@@ -2163,12 +2163,12 @@
};
let (module, def_map) =
- match block_id.map(|block_id| (self.db.block_def_map(block_id), block_id)) {
+ match block_id.map(|block_id| (block_def_map(self.db, block_id), block_id)) {
Some((def_map, block_id)) => {
self.store.block_scopes.push(block_id);
(def_map.module_id(DefMap::ROOT), def_map)
}
- None => (self.module, self.def_map.clone()),
+ None => (self.module, self.def_map),
};
let prev_def_map = mem::replace(&mut self.def_map, def_map);
let prev_local_module = mem::replace(&mut self.module, module);
@@ -2247,7 +2247,7 @@
// This could also be a single-segment path pattern. To
// decide that, we need to try resolving the name.
let (resolved, _) = self.def_map.resolve_path(
- &self.local_def_map,
+ self.local_def_map,
self.db,
self.module.local_id,
&name.clone().into(),
diff --git a/crates/hir-def/src/expr_store/lower/path.rs b/crates/hir-def/src/expr_store/lower/path.rs
index 629d1f2..be006c9 100644
--- a/crates/hir-def/src/expr_store/lower/path.rs
+++ b/crates/hir-def/src/expr_store/lower/path.rs
@@ -232,6 +232,14 @@
.with_borrow_mut(|map| map.extend(ast_segments.into_iter().zip(ast_segments_offset..)));
}
+ if let Some(last_segment_args @ Some(GenericArgs { has_self_type: true, .. })) =
+ generic_args.last_mut()
+ {
+ // Well-formed code cannot have `<T as Trait>` without an associated item after,
+ // and this causes panics in hir-ty lowering.
+ *last_segment_args = None;
+ }
+
let mod_path = Interned::new(ModPath::from_segments(kind, segments));
if type_anchor.is_none() && generic_args.is_empty() {
return Some(Path::BarePath(mod_path));
diff --git a/crates/hir-def/src/expr_store/lower/path/tests.rs b/crates/hir-def/src/expr_store/lower/path/tests.rs
index 337cb10..8fd81c7 100644
--- a/crates/hir-def/src/expr_store/lower/path/tests.rs
+++ b/crates/hir-def/src/expr_store/lower/path/tests.rs
@@ -4,7 +4,6 @@
use test_fixture::WithFixture;
use crate::{
- db::DefDatabase,
expr_store::{
ExpressionStore,
lower::{
@@ -14,13 +13,15 @@
path::Path,
pretty,
},
+ nameres::crate_def_map,
test_db::TestDB,
};
fn lower_path(path: ast::Path) -> (TestDB, ExpressionStore, Option<Path>) {
let (db, file_id) = TestDB::with_single_file("");
let krate = db.fetch_test_crate();
- let mut ctx = ExprCollector::new(&db, db.crate_def_map(krate).root_module_id(), file_id.into());
+ let mut ctx =
+ ExprCollector::new(&db, crate_def_map(&db, krate).root_module_id(), file_id.into());
let lowered_path = ctx.lower_path(path, &mut ExprCollector::impl_trait_allocator);
let store = ctx.store.finish();
(db, store, lowered_path)
diff --git a/crates/hir-def/src/expr_store/scope.rs b/crates/hir-def/src/expr_store/scope.rs
index 431ea9e..a46711c 100644
--- a/crates/hir-def/src/expr_store/scope.rs
+++ b/crates/hir-def/src/expr_store/scope.rs
@@ -324,11 +324,13 @@
use test_fixture::WithFixture;
use test_utils::{assert_eq_text, extract_offset};
- use crate::{FunctionId, ModuleDefId, db::DefDatabase, test_db::TestDB};
+ use crate::{
+ FunctionId, ModuleDefId, db::DefDatabase, nameres::crate_def_map, test_db::TestDB,
+ };
fn find_function(db: &TestDB, file_id: FileId) -> FunctionId {
let krate = db.test_crate();
- let crate_def_map = db.crate_def_map(krate);
+ let crate_def_map = crate_def_map(db, krate);
let module = crate_def_map.modules_for_file(db, file_id).next().unwrap();
let (_, def) = crate_def_map[module].scope.entries().next().unwrap();
diff --git a/crates/hir-def/src/expr_store/tests/body.rs b/crates/hir-def/src/expr_store/tests/body.rs
index d6645dc..29e249b 100644
--- a/crates/hir-def/src/expr_store/tests/body.rs
+++ b/crates/hir-def/src/expr_store/tests/body.rs
@@ -1,9 +1,10 @@
mod block;
-use crate::{DefWithBodyId, ModuleDefId, hir::MatchArm, test_db::TestDB};
+use crate::{DefWithBodyId, ModuleDefId, hir::MatchArm, nameres::crate_def_map, test_db::TestDB};
use expect_test::{Expect, expect};
use la_arena::RawIdx;
use test_fixture::WithFixture;
+use triomphe::Arc;
use super::super::*;
@@ -11,7 +12,7 @@
let db = TestDB::with_files(ra_fixture);
let krate = db.fetch_test_crate();
- let def_map = db.crate_def_map(krate);
+ let def_map = crate_def_map(&db, krate);
let mut fn_def = None;
'outer: for (_, module) in def_map.modules() {
for decl in module.scope.declarations() {
diff --git a/crates/hir-def/src/expr_store/tests/body/block.rs b/crates/hir-def/src/expr_store/tests/body/block.rs
index da3b65d..5f7b510 100644
--- a/crates/hir-def/src/expr_store/tests/body/block.rs
+++ b/crates/hir-def/src/expr_store/tests/body/block.rs
@@ -189,8 +189,8 @@
}
"#,
expect![[r#"
- BlockId(3801) in BlockRelativeModuleId { block: Some(BlockId(3800)), local_id: Idx::<ModuleData>(1) }
- BlockId(3800) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
+ BlockId(3c01) in BlockRelativeModuleId { block: Some(BlockId(3c00)), local_id: Idx::<ModuleData>(1) }
+ BlockId(3c00) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
crate scope
"#]],
);
diff --git a/crates/hir-def/src/expr_store/tests/signatures.rs b/crates/hir-def/src/expr_store/tests/signatures.rs
index 80561d6..efb558a 100644
--- a/crates/hir-def/src/expr_store/tests/signatures.rs
+++ b/crates/hir-def/src/expr_store/tests/signatures.rs
@@ -1,6 +1,7 @@
use crate::{
GenericDefId, ModuleDefId,
expr_store::pretty::{print_function, print_struct},
+ nameres::crate_def_map,
test_db::TestDB,
};
use expect_test::{Expect, expect};
@@ -12,7 +13,7 @@
let db = TestDB::with_files(ra_fixture);
let krate = db.fetch_test_crate();
- let def_map = db.crate_def_map(krate);
+ let def_map = crate_def_map(&db, krate);
let mut defs = vec![];
for (_, module) in def_map.modules() {
for decl in module.scope.declarations() {
diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs
index 9d62d9c..bb75621 100644
--- a/crates/hir-def/src/find_path.rs
+++ b/crates/hir-def/src/find_path.rs
@@ -52,7 +52,7 @@
ignore_local_imports,
is_std_item: item_module.krate().data(db).origin.is_lang(),
from,
- from_def_map: &from.def_map(db),
+ from_def_map: from.def_map(db),
fuel: Cell::new(FIND_PATH_FUEL),
},
item,
@@ -691,7 +691,7 @@
let (def_map, local_def_map) = module.local_def_map(&db);
let resolved = def_map
.resolve_path(
- &local_def_map,
+ local_def_map,
&db,
module.local_id,
&mod_path,
diff --git a/crates/hir-def/src/hir/format_args.rs b/crates/hir-def/src/hir/format_args.rs
index f27a406..271484d 100644
--- a/crates/hir-def/src/hir/format_args.rs
+++ b/crates/hir-def/src/hir/format_args.rs
@@ -297,7 +297,8 @@
unfinished_literal.clear();
}
- let span = parser.arg_places.get(placeholder_index).and_then(|s| to_span(s.clone()));
+ let span =
+ parser.arg_places.get(placeholder_index).and_then(|s| to_span(s.clone()));
placeholder_index += 1;
let position_span = to_span(position_span);
diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs
index db571f0..a6138fb 100644
--- a/crates/hir-def/src/import_map.rs
+++ b/crates/hir-def/src/import_map.rs
@@ -16,7 +16,7 @@
AssocItemId, AttrDefId, Complete, FxIndexMap, ModuleDefId, ModuleId, TraitId,
db::DefDatabase,
item_scope::{ImportOrExternCrate, ItemInNs},
- nameres::DefMap,
+ nameres::{DefMap, crate_def_map},
visibility::Visibility,
};
@@ -129,7 +129,7 @@
fn collect_import_map(db: &dyn DefDatabase, krate: Crate) -> ImportMapIndex {
let _p = tracing::info_span!("collect_import_map").entered();
- let def_map = db.crate_def_map(krate);
+ let def_map = crate_def_map(db, krate);
let mut map = FxIndexMap::default();
// We look only into modules that are public(ly reexported), starting with the crate root.
diff --git a/crates/hir-def/src/lang_item.rs b/crates/hir-def/src/lang_item.rs
index 51a833b..4ad4477 100644
--- a/crates/hir-def/src/lang_item.rs
+++ b/crates/hir-def/src/lang_item.rs
@@ -10,6 +10,7 @@
use crate::{
AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, ModuleDefId,
StaticId, StructId, TraitId, TypeAliasId, UnionId, db::DefDatabase, expr_store::path::Path,
+ nameres::crate_def_map,
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -84,13 +85,13 @@
}
/// Salsa query. This will look for lang items in a specific crate.
-#[salsa_macros::tracked(return_ref)]
+#[salsa_macros::tracked(returns(ref))]
pub fn crate_lang_items(db: &dyn DefDatabase, krate: Crate) -> Option<Box<LangItems>> {
let _p = tracing::info_span!("crate_lang_items_query").entered();
let mut lang_items = LangItems::default();
- let crate_def_map = db.crate_def_map(krate);
+ let crate_def_map = crate_def_map(db, krate);
for (_, module_data) in crate_def_map.modules() {
for impl_def in module_data.scope.impls() {
@@ -209,7 +210,7 @@
let mut traits = Vec::new();
- let crate_def_map = db.crate_def_map(krate);
+ let crate_def_map = crate_def_map(db, krate);
for (_, module_data) in crate_def_map.modules() {
for def in module_data.scope.declarations() {
diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs
index 28011bd..b41ff02 100644
--- a/crates/hir-def/src/lib.rs
+++ b/crates/hir-def/src/lib.rs
@@ -92,7 +92,7 @@
Const, Enum, ExternCrate, Function, Impl, ItemTreeId, ItemTreeNode, Macro2, MacroRules,
Static, Struct, Trait, TraitAlias, TypeAlias, Union, Use, Variant,
},
- nameres::LocalDefMap,
+ nameres::{LocalDefMap, block_def_map, crate_def_map, crate_local_def_map},
signatures::VariantFields,
};
@@ -324,12 +324,13 @@
}
impl CrateRootModuleId {
- pub fn def_map(&self, db: &dyn DefDatabase) -> Arc<DefMap> {
- db.crate_def_map(self.krate)
+ pub fn def_map(self, db: &dyn DefDatabase) -> &DefMap {
+ crate_def_map(db, self.krate)
}
- pub(crate) fn local_def_map(&self, db: &dyn DefDatabase) -> (Arc<DefMap>, Arc<LocalDefMap>) {
- db.crate_local_def_map(self.krate)
+ pub(crate) fn local_def_map(self, db: &dyn DefDatabase) -> (&DefMap, &LocalDefMap) {
+ let def_map = crate_local_def_map(db, self.krate);
+ (def_map.def_map(db), def_map.local(db))
}
pub fn krate(self) -> Crate {
@@ -390,26 +391,29 @@
}
impl ModuleId {
- pub fn def_map(self, db: &dyn DefDatabase) -> Arc<DefMap> {
+ pub fn def_map(self, db: &dyn DefDatabase) -> &DefMap {
match self.block {
- Some(block) => db.block_def_map(block),
- None => db.crate_def_map(self.krate),
+ Some(block) => block_def_map(db, block),
+ None => crate_def_map(db, self.krate),
}
}
- pub(crate) fn local_def_map(self, db: &dyn DefDatabase) -> (Arc<DefMap>, Arc<LocalDefMap>) {
+ pub(crate) fn local_def_map(self, db: &dyn DefDatabase) -> (&DefMap, &LocalDefMap) {
match self.block {
- Some(block) => (db.block_def_map(block), self.only_local_def_map(db)),
- None => db.crate_local_def_map(self.krate),
+ Some(block) => (block_def_map(db, block), self.only_local_def_map(db)),
+ None => {
+ let def_map = crate_local_def_map(db, self.krate);
+ (def_map.def_map(db), def_map.local(db))
+ }
}
}
- pub(crate) fn only_local_def_map(self, db: &dyn DefDatabase) -> Arc<LocalDefMap> {
- db.crate_local_def_map(self.krate).1
+ pub(crate) fn only_local_def_map(self, db: &dyn DefDatabase) -> &LocalDefMap {
+ crate_local_def_map(db, self.krate).local(db)
}
- pub fn crate_def_map(self, db: &dyn DefDatabase) -> Arc<DefMap> {
- db.crate_def_map(self.krate)
+ pub fn crate_def_map(self, db: &dyn DefDatabase) -> &DefMap {
+ crate_def_map(db, self.krate)
}
pub fn krate(self) -> Crate {
@@ -701,6 +705,16 @@
// casting them, and somehow making the constructors private, which would be annoying.
impl_from!(FunctionId, ConstId, TypeAliasId for AssocItemId);
+impl From<AssocItemId> for ModuleDefId {
+ fn from(item: AssocItemId) -> Self {
+ match item {
+ AssocItemId::FunctionId(f) => f.into(),
+ AssocItemId::ConstId(c) => c.into(),
+ AssocItemId::TypeAliasId(t) => t.into(),
+ }
+ }
+}
+
#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa_macros::Supertype)]
pub enum GenericDefId {
AdtId(AdtId),
diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
index e21d141..293868d 100644
--- a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
+++ b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
@@ -454,13 +454,13 @@
#[rustc_builtin_macro]
macro_rules! concat {}
-fn main() { concat!("fo", "o", 0, r#""bar""#, "\n", false, '"', '\0'); }
+fn main() { concat!("fo", "o", 0, r#""bar""#, "\n", false, '"', -4, - 4, '\0'); }
"##,
expect![[r##"
#[rustc_builtin_macro]
macro_rules! concat {}
-fn main() { "foo0\"bar\"\nfalse\"\u{0}"; }
+fn main() { "foo0\"bar\"\nfalse\"-4-4\u{0}"; }
"##]],
);
}
@@ -510,24 +510,6 @@
}
#[test]
-fn test_concat_idents_expand() {
- check(
- r##"
-#[rustc_builtin_macro]
-macro_rules! concat_idents {}
-
-fn main() { concat_idents!(foo, bar); }
-"##,
- expect![[r##"
-#[rustc_builtin_macro]
-macro_rules! concat_idents {}
-
-fn main() { foobar; }
-"##]],
- );
-}
-
-#[test]
fn test_quote_string() {
check(
r##"
diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs
index 800c96e..dc4334e 100644
--- a/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -39,7 +39,7 @@
use crate::{
AdtId, Lookup, ModuleDefId,
db::DefDatabase,
- nameres::{DefMap, ModuleSource},
+ nameres::{DefMap, ModuleSource, crate_def_map},
src::HasSource,
test_db::TestDB,
tt::TopSubtree,
@@ -49,7 +49,7 @@
fn check_errors(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let db = TestDB::with_files(ra_fixture);
let krate = db.fetch_test_crate();
- let def_map = db.crate_def_map(krate);
+ let def_map = crate_def_map(&db, krate);
let errors = def_map
.modules()
.flat_map(|module| module.1.scope.all_macro_calls())
@@ -113,7 +113,7 @@
let (body, sm) = db.body_with_source_map(body);
if let Some(it) =
- body.blocks(db).find_map(|block| resolve(db, &block.1, ast_id, ast_ptr))
+ body.blocks(db).find_map(|block| resolve(db, block.1, ast_id, ast_ptr))
{
return Some(it);
}
@@ -127,7 +127,7 @@
let db = TestDB::with_files_extra_proc_macros(ra_fixture, extra_proc_macros);
let krate = db.fetch_test_crate();
- let def_map = db.crate_def_map(krate);
+ let def_map = crate_def_map(&db, krate);
let local_id = DefMap::ROOT;
let source = def_map[local_id].definition_source(&db);
let source_file = match source.value {
@@ -142,7 +142,7 @@
let ast_id = db.ast_id_map(source.file_id).ast_id(¯o_call_node);
let ast_id = InFile::new(source.file_id, ast_id);
let ptr = InFile::new(source.file_id, AstPtr::new(¯o_call_node));
- let macro_call_id = resolve(&db, &def_map, ast_id, ptr)
+ let macro_call_id = resolve(&db, def_map, ast_id, ptr)
.unwrap_or_else(|| panic!("unable to find semantic macro call {macro_call_node}"));
let expansion_result = db.parse_macro_expansion(macro_call_id);
expansions.push((macro_call_node.clone(), expansion_result));
@@ -380,8 +380,4 @@
panic!("got invalid macro input: {:?}", parse.errors());
}
}
-
- fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
- other.as_any().type_id() == std::any::TypeId::of::<Self>()
- }
}
diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs
index fc66d8e..f337f83 100644
--- a/crates/hir-def/src/nameres.rs
+++ b/crates/hir-def/src/nameres.rs
@@ -112,6 +112,18 @@
extern_prelude: FxIndexMap<Name, (CrateRootModuleId, Option<ExternCrateId>)>,
}
+impl std::hash::Hash for LocalDefMap {
+ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+ let LocalDefMap { extern_prelude } = self;
+ extern_prelude.len().hash(state);
+ for (name, (crate_root, extern_crate)) in extern_prelude {
+ name.hash(state);
+ crate_root.hash(state);
+ extern_crate.hash(state);
+ }
+ }
+}
+
impl LocalDefMap {
pub(crate) const EMPTY: &Self =
&Self { extern_prelude: FxIndexMap::with_hasher(rustc_hash::FxBuildHasher) };
@@ -250,7 +262,7 @@
}
impl BlockRelativeModuleId {
- fn def_map(self, db: &dyn DefDatabase, krate: Crate) -> Arc<DefMap> {
+ fn def_map(self, db: &dyn DefDatabase, krate: Crate) -> &DefMap {
self.into_module(krate).def_map(db)
}
@@ -358,6 +370,87 @@
pub scope: ItemScope,
}
+#[inline]
+pub fn crate_def_map(db: &dyn DefDatabase, crate_id: Crate) -> &DefMap {
+ crate_local_def_map(db, crate_id).def_map(db)
+}
+
+#[allow(unused_lifetimes)]
+mod __ {
+ use super::*;
+ #[salsa_macros::tracked]
+ pub(crate) struct DefMapPair<'db> {
+ #[tracked]
+ #[returns(ref)]
+ pub(crate) def_map: DefMap,
+ #[returns(ref)]
+ pub(crate) local: LocalDefMap,
+ }
+}
+pub(crate) use __::DefMapPair;
+
+#[salsa_macros::tracked(returns(ref))]
+pub(crate) fn crate_local_def_map(db: &dyn DefDatabase, crate_id: Crate) -> DefMapPair<'_> {
+ let krate = crate_id.data(db);
+ let _p = tracing::info_span!(
+ "crate_def_map_query",
+ name=?crate_id
+ .extra_data(db)
+ .display_name
+ .as_ref()
+ .map(|it| it.crate_name().to_smolstr())
+ .unwrap_or_default()
+ )
+ .entered();
+
+ let module_data = ModuleData::new(
+ ModuleOrigin::CrateRoot { definition: krate.root_file_id(db) },
+ Visibility::Public,
+ );
+
+ let def_map =
+ DefMap::empty(crate_id, Arc::new(DefMapCrateData::new(krate.edition)), module_data, None);
+ let (def_map, local_def_map) = collector::collect_defs(
+ db,
+ def_map,
+ TreeId::new(krate.root_file_id(db).into(), None),
+ None,
+ );
+
+ DefMapPair::new(db, def_map, local_def_map)
+}
+
+#[salsa_macros::tracked(returns(ref))]
+pub fn block_def_map(db: &dyn DefDatabase, block_id: BlockId) -> DefMap {
+ let BlockLoc { ast_id, module } = block_id.lookup(db);
+
+ let visibility = Visibility::Module(
+ ModuleId { krate: module.krate, local_id: DefMap::ROOT, block: module.block },
+ VisibilityExplicitness::Implicit,
+ );
+ let module_data =
+ ModuleData::new(ModuleOrigin::BlockExpr { block: ast_id, id: block_id }, visibility);
+
+ let local_def_map = crate_local_def_map(db, module.krate);
+ let def_map = DefMap::empty(
+ module.krate,
+ local_def_map.def_map(db).data.clone(),
+ module_data,
+ Some(BlockInfo {
+ block: block_id,
+ parent: BlockRelativeModuleId { block: module.block, local_id: module.local_id },
+ }),
+ );
+
+ let (def_map, _) = collector::collect_defs(
+ db,
+ def_map,
+ TreeId::new(ast_id.file_id, Some(block_id)),
+ Some(local_def_map.local(db)),
+ );
+ def_map
+}
+
impl DefMap {
/// The module id of a crate or block root.
pub const ROOT: LocalModuleId = LocalModuleId::from_raw(la_arena::RawIdx::from_u32(0));
@@ -366,77 +459,6 @@
self.data.edition
}
- pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, crate_id: Crate) -> Arc<DefMap> {
- db.crate_local_def_map(crate_id).0
- }
-
- pub(crate) fn crate_local_def_map_query(
- db: &dyn DefDatabase,
- crate_id: Crate,
- ) -> (Arc<DefMap>, Arc<LocalDefMap>) {
- let krate = crate_id.data(db);
- let _p = tracing::info_span!(
- "crate_def_map_query",
- name=?crate_id
- .extra_data(db)
- .display_name
- .as_ref()
- .map(|it| it.crate_name().to_smolstr())
- .unwrap_or_default()
- )
- .entered();
-
- let module_data = ModuleData::new(
- ModuleOrigin::CrateRoot { definition: krate.root_file_id(db) },
- Visibility::Public,
- );
-
- let def_map = DefMap::empty(
- crate_id,
- Arc::new(DefMapCrateData::new(krate.edition)),
- module_data,
- None,
- );
- let (def_map, local_def_map) = collector::collect_defs(
- db,
- def_map,
- TreeId::new(krate.root_file_id(db).into(), None),
- None,
- );
-
- (Arc::new(def_map), Arc::new(local_def_map))
- }
-
- pub(crate) fn block_def_map_query(db: &dyn DefDatabase, block_id: BlockId) -> Arc<DefMap> {
- let BlockLoc { ast_id, module } = block_id.lookup(db);
-
- let visibility = Visibility::Module(
- ModuleId { krate: module.krate, local_id: Self::ROOT, block: module.block },
- VisibilityExplicitness::Implicit,
- );
- let module_data =
- ModuleData::new(ModuleOrigin::BlockExpr { block: ast_id, id: block_id }, visibility);
-
- let (crate_map, crate_local_map) = db.crate_local_def_map(module.krate);
- let def_map = DefMap::empty(
- module.krate,
- crate_map.data.clone(),
- module_data,
- Some(BlockInfo {
- block: block_id,
- parent: BlockRelativeModuleId { block: module.block, local_id: module.local_id },
- }),
- );
-
- let (def_map, _) = collector::collect_defs(
- db,
- def_map,
- TreeId::new(ast_id.file_id, Some(block_id)),
- Some(crate_local_map),
- );
- Arc::new(def_map)
- }
-
fn empty(
krate: Crate,
crate_data: Arc<DefMapCrateData>,
@@ -595,7 +617,7 @@
go(&mut buf, db, current_map, "block scope", Self::ROOT);
buf.push('\n');
arc = block.parent.def_map(db, self.krate);
- current_map = &arc;
+ current_map = arc;
}
go(&mut buf, db, current_map, "crate", Self::ROOT);
return buf;
@@ -628,7 +650,7 @@
while let Some(block) = current_map.block {
format_to!(buf, "{:?} in {:?}\n", block.block, block.parent);
arc = block.parent.def_map(db, self.krate);
- current_map = &arc;
+ current_map = arc;
}
format_to!(buf, "crate scope\n");
@@ -708,7 +730,7 @@
let mut block = self.block;
while let Some(block_info) = block {
let parent = block_info.parent.def_map(db, self.krate);
- if let Some(it) = f(&parent, block_info.parent.local_id) {
+ if let Some(it) = f(parent, block_info.parent.local_id) {
return Some(it);
}
block = parent.block;
diff --git a/crates/hir-def/src/nameres/assoc.rs b/crates/hir-def/src/nameres/assoc.rs
index 448b908..86225d3 100644
--- a/crates/hir-def/src/nameres/assoc.rs
+++ b/crates/hir-def/src/nameres/assoc.rs
@@ -66,7 +66,16 @@
})
}
- pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
+ pub fn assoc_item_by_name(&self, name: &Name) -> Option<AssocItemId> {
+ self.items.iter().find_map(|&(ref item_name, item)| match item {
+ AssocItemId::FunctionId(_) if item_name == name => Some(item),
+ AssocItemId::TypeAliasId(_) if item_name == name => Some(item),
+ AssocItemId::ConstId(_) if item_name == name => Some(item),
+ _ => None,
+ })
+ }
+
+ pub fn macro_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
self.macro_calls.iter().flat_map(|it| it.iter()).copied()
}
}
@@ -100,7 +109,7 @@
(Arc::new(ImplItems { items, macro_calls }), DefDiagnostics::new(diagnostics))
}
- pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
+ pub fn macro_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
self.macro_calls.iter().flat_map(|it| it.iter()).copied()
}
}
@@ -108,8 +117,8 @@
struct AssocItemCollector<'a> {
db: &'a dyn DefDatabase,
module_id: ModuleId,
- def_map: Arc<DefMap>,
- local_def_map: Arc<LocalDefMap>,
+ def_map: &'a DefMap,
+ local_def_map: &'a LocalDefMap,
diagnostics: Vec<DefDiagnostic>,
container: ItemContainerId,
@@ -174,7 +183,7 @@
let ast_id_with_path = AstIdWithPath { path: attr.path.clone(), ast_id };
match self.def_map.resolve_attr_macro(
- &self.local_def_map,
+ self.local_def_map,
self.db,
self.module_id.local_id,
ast_id_with_path,
@@ -246,7 +255,7 @@
let resolver = |path: &_| {
self.def_map
.resolve_path(
- &self.local_def_map,
+ self.local_def_map,
self.db,
self.module_id.local_id,
path,
diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs
index 8df0f09..350c97c 100644
--- a/crates/hir-def/src/nameres/collector.rs
+++ b/crates/hir-def/src/nameres/collector.rs
@@ -26,7 +26,7 @@
use triomphe::Arc;
use crate::{
- AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, ExternBlockLoc,
+ AdtId, AssocItemId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, ExternBlockLoc,
ExternCrateId, ExternCrateLoc, FunctionId, FunctionLoc, ImplLoc, Intern, ItemContainerId,
LocalModuleId, Lookup, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId,
MacroRulesLoc, MacroRulesLocFlags, ModuleDefId, ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc,
@@ -43,9 +43,10 @@
nameres::{
BuiltinShadowMode, DefMap, LocalDefMap, MacroSubNs, ModuleData, ModuleOrigin, ResolveMode,
attr_resolution::{attr_macro_as_call_id, derive_macro_as_call_id},
+ crate_def_map,
diagnostics::DefDiagnostic,
mod_resolution::ModDir,
- path_resolution::ReachedFixedPoint,
+ path_resolution::{ReachedFixedPoint, ResolvePathResult},
proc_macro::{ProcMacroDef, ProcMacroKind, parse_macro_name_and_helper_attrs},
sub_namespace_match,
},
@@ -61,7 +62,7 @@
db: &dyn DefDatabase,
def_map: DefMap,
tree_id: TreeId,
- crate_local_def_map: Option<Arc<LocalDefMap>>,
+ crate_local_def_map: Option<&LocalDefMap>,
) -> (DefMap, LocalDefMap) {
let krate = &def_map.krate.data(db);
let cfg_options = def_map.krate.cfg_options(db);
@@ -216,7 +217,7 @@
def_map: DefMap,
local_def_map: LocalDefMap,
/// Set only in case of blocks.
- crate_local_def_map: Option<Arc<LocalDefMap>>,
+ crate_local_def_map: Option<&'a LocalDefMap>,
// The dependencies of the current crate, including optional deps like `test`.
deps: FxHashMap<Name, BuiltDependency>,
glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, Visibility, GlobId)>>,
@@ -533,7 +534,7 @@
);
let (per_ns, _) = self.def_map.resolve_path(
- self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map),
+ self.crate_local_def_map.unwrap_or(&self.local_def_map),
self.db,
DefMap::ROOT,
&path,
@@ -556,7 +557,7 @@
}
fn local_def_map(&mut self) -> &LocalDefMap {
- self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map)
+ self.crate_local_def_map.unwrap_or(&self.local_def_map)
}
/// Adds a definition of procedural macro `name` to the root module.
@@ -688,7 +689,7 @@
let vis = self
.def_map
.resolve_visibility(
- self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map),
+ self.crate_local_def_map.unwrap_or(&self.local_def_map),
self.db,
module_id,
vis,
@@ -731,7 +732,7 @@
names: Option<Vec<Name>>,
extern_crate: Option<ExternCrateId>,
) {
- let def_map = self.db.crate_def_map(krate);
+ let def_map = crate_def_map(self.db, krate);
// `#[macro_use]` brings macros into macro_use prelude. Yes, even non-`macro_rules!`
// macros.
let root_scope = &def_map[DefMap::ROOT].scope;
@@ -811,32 +812,35 @@
let _p = tracing::info_span!("resolve_import", import_path = %import.path.display(self.db, Edition::LATEST))
.entered();
tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition);
- let res = self.def_map.resolve_path_fp_with_macro(
- self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map),
- self.db,
- ResolveMode::Import,
- module_id,
- &import.path,
- BuiltinShadowMode::Module,
- None, // An import may resolve to any kind of macro.
- );
+ let ResolvePathResult { resolved_def, segment_index, reached_fixedpoint, prefix_info } =
+ self.def_map.resolve_path_fp_with_macro(
+ self.crate_local_def_map.unwrap_or(&self.local_def_map),
+ self.db,
+ ResolveMode::Import,
+ module_id,
+ &import.path,
+ BuiltinShadowMode::Module,
+ None, // An import may resolve to any kind of macro.
+ );
- let def = res.resolved_def;
- if res.reached_fixedpoint == ReachedFixedPoint::No || def.is_none() {
+ if reached_fixedpoint == ReachedFixedPoint::No
+ || resolved_def.is_none()
+ || segment_index.is_some()
+ {
return PartialResolvedImport::Unresolved;
}
- if res.prefix_info.differing_crate {
+ if prefix_info.differing_crate {
return PartialResolvedImport::Resolved(
- def.filter_visibility(|v| matches!(v, Visibility::Public)),
+ resolved_def.filter_visibility(|v| matches!(v, Visibility::Public)),
);
}
// Check whether all namespaces are resolved.
- if def.is_full() {
- PartialResolvedImport::Resolved(def)
+ if resolved_def.is_full() {
+ PartialResolvedImport::Resolved(resolved_def)
} else {
- PartialResolvedImport::Indeterminate(def)
+ PartialResolvedImport::Indeterminate(resolved_def)
}
}
@@ -849,7 +853,7 @@
let vis = self
.def_map
.resolve_visibility(
- self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map),
+ self.crate_local_def_map.unwrap_or(&self.local_def_map),
self.db,
module_id,
&directive.import.visibility,
@@ -986,6 +990,43 @@
Some(ImportOrExternCrate::Glob(glob)),
);
}
+ Some(ModuleDefId::TraitId(it)) => {
+ // FIXME: Implement this correctly
+ // We can't actually call `trait_items`, the reason being that if macro calls
+ // occur, they will call back into the def map which we might be computing right
+ // now resulting in a cycle.
+ // To properly implement this, trait item collection needs to be done in def map
+ // collection...
+ let resolutions = if true {
+ vec![]
+ } else {
+ self.db
+ .trait_items(it)
+ .items
+ .iter()
+ .map(|&(ref name, variant)| {
+ let res = match variant {
+ AssocItemId::FunctionId(it) => {
+ PerNs::values(it.into(), vis, None)
+ }
+ AssocItemId::ConstId(it) => {
+ PerNs::values(it.into(), vis, None)
+ }
+ AssocItemId::TypeAliasId(it) => {
+ PerNs::types(it.into(), vis, None)
+ }
+ };
+ (Some(name.clone()), res)
+ })
+ .collect::<Vec<_>>()
+ };
+ self.update(
+ module_id,
+ &resolutions,
+ vis,
+ Some(ImportOrExternCrate::Glob(glob)),
+ );
+ }
Some(d) => {
tracing::debug!("glob import {:?} from non-module/enum {:?}", import, d);
}
@@ -1240,7 +1281,7 @@
};
let resolver = |path: &_| {
let resolved_res = self.def_map.resolve_path_fp_with_macro(
- self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map),
+ self.crate_local_def_map.unwrap_or(&self.local_def_map),
self.db,
ResolveMode::Other,
directive.module_id,
@@ -1307,7 +1348,7 @@
);
// Record its helper attributes.
if def_id.krate != self.def_map.krate {
- let def_map = self.db.crate_def_map(def_id.krate);
+ let def_map = crate_def_map(self.db, def_id.krate);
if let Some(helpers) = def_map.data.exported_derives.get(&def_id) {
self.def_map
.derive_helpers_in_scope
@@ -1553,7 +1594,7 @@
self.def_map.krate,
|path| {
let resolved_res = self.def_map.resolve_path_fp_with_macro(
- self.crate_local_def_map.as_deref().unwrap_or(&self.local_def_map),
+ self.crate_local_def_map.unwrap_or(&self.local_def_map),
self.db,
ResolveMode::Other,
directive.module_id,
@@ -1702,11 +1743,8 @@
let module = self.def_collector.def_map.module_id(module_id);
let def_map = &mut self.def_collector.def_map;
- let local_def_map = self
- .def_collector
- .crate_local_def_map
- .as_deref()
- .unwrap_or(&self.def_collector.local_def_map);
+ let local_def_map =
+ self.def_collector.crate_local_def_map.unwrap_or(&self.def_collector.local_def_map);
match item {
ModItem::Mod(m) => self.collect_module(m, &attrs),
@@ -2133,10 +2171,7 @@
let def_map = &mut self.def_collector.def_map;
let vis = def_map
.resolve_visibility(
- self.def_collector
- .crate_local_def_map
- .as_deref()
- .unwrap_or(&self.def_collector.local_def_map),
+ self.def_collector.crate_local_def_map.unwrap_or(&self.def_collector.local_def_map),
self.def_collector.db,
self.module_id,
visibility,
diff --git a/crates/hir-def/src/nameres/path_resolution.rs b/crates/hir-def/src/nameres/path_resolution.rs
index a49155d..74ce33a 100644
--- a/crates/hir-def/src/nameres/path_resolution.rs
+++ b/crates/hir-def/src/nameres/path_resolution.rs
@@ -17,14 +17,17 @@
name::Name,
};
use span::Edition;
-use triomphe::Arc;
+use stdx::TupleExt;
use crate::{
AdtId, LocalModuleId, ModuleDefId,
db::DefDatabase,
item_scope::{BUILTIN_SCOPE, ImportOrExternCrate},
item_tree::FieldsShape,
- nameres::{BlockInfo, BuiltinShadowMode, DefMap, LocalDefMap, MacroSubNs, sub_namespace_match},
+ nameres::{
+ BlockInfo, BuiltinShadowMode, DefMap, LocalDefMap, MacroSubNs, crate_def_map,
+ sub_namespace_match,
+ },
per_ns::PerNs,
visibility::{RawVisibility, Visibility},
};
@@ -44,6 +47,7 @@
#[derive(Debug, Clone)]
pub(super) struct ResolvePathResult {
pub(super) resolved_def: PerNs,
+ /// The index of the last resolved segment, or `None` if the full path has been resolved.
pub(super) segment_index: Option<usize>,
pub(super) reached_fixedpoint: ReachedFixedPoint,
pub(super) prefix_info: ResolvePathResultPrefixInfo,
@@ -173,7 +177,6 @@
return result;
}
- let mut arc;
let mut current_map = self;
let mut merge = |new: ResolvePathResult| {
@@ -195,8 +198,7 @@
Some(block) if original_module == Self::ROOT => {
// Block modules "inherit" names from its parent module.
original_module = block.parent.local_id;
- arc = block.parent.def_map(db, current_map.krate);
- current_map = &arc;
+ current_map = block.parent.def_map(db, current_map.krate);
}
// Proper (non-block) modules, including those in block `DefMap`s, don't.
_ => {
@@ -204,8 +206,7 @@
// A module inside a block. Do not resolve items declared in upper blocks, but we do need to get
// the prelude items (which are not inserted into blocks because they can be overridden there).
original_module = Self::ROOT;
- arc = db.crate_def_map(self.krate);
- current_map = &arc;
+ current_map = crate_def_map(db, self.krate);
let new = current_map.resolve_path_fp_in_all_preludes(
local_def_map,
@@ -253,7 +254,7 @@
cov_mark::hit!(macro_dollar_crate_self);
PerNs::types(self.crate_root().into(), Visibility::Public, None)
} else {
- let def_map = db.crate_def_map(krate);
+ let def_map = crate_def_map(db, krate);
let module = def_map.module_id(Self::ROOT);
cov_mark::hit!(macro_dollar_crate_other);
PerNs::types(module.into(), Visibility::Public, None)
@@ -312,7 +313,7 @@
// Adjust `local_id` to `self`, i.e. the nearest non-block module.
if def_map.module_id(local_id).is_block_module() {
(ext, local_id) = adjust_to_nearest_non_block_module(db, def_map, local_id);
- def_map = &ext;
+ def_map = ext;
}
// Go up the module tree but skip block modules as `super` always refers to the
@@ -325,7 +326,7 @@
if def_map.module_id(local_id).is_block_module() {
(ext, local_id) =
adjust_to_nearest_non_block_module(db, def_map, local_id);
- def_map = &ext;
+ def_map = ext;
}
} else {
stdx::always!(def_map.block.is_none());
@@ -364,7 +365,15 @@
},
};
- self.resolve_remaining_segments(segments, curr_per_ns, path, db, shadow, original_module)
+ self.resolve_remaining_segments(
+ db,
+ mode,
+ segments,
+ curr_per_ns,
+ path,
+ shadow,
+ original_module,
+ )
}
/// Resolves a path only in the preludes, without accounting for item scopes.
@@ -413,7 +422,15 @@
}
};
- self.resolve_remaining_segments(segments, curr_per_ns, path, db, shadow, original_module)
+ self.resolve_remaining_segments(
+ db,
+ mode,
+ segments,
+ curr_per_ns,
+ path,
+ shadow,
+ original_module,
+ )
}
/// 2018-style absolute path -- only extern prelude
@@ -441,10 +458,11 @@
fn resolve_remaining_segments<'a>(
&self,
+ db: &dyn DefDatabase,
+ mode: ResolveMode,
mut segments: impl Iterator<Item = (usize, &'a Name)>,
mut curr_per_ns: PerNs,
path: &ModPath,
- db: &dyn DefDatabase,
shadow: BuiltinShadowMode,
original_module: LocalModuleId,
) -> ResolvePathResult {
@@ -465,6 +483,7 @@
curr_per_ns = match curr.def {
ModuleDefId::ModuleId(module) => {
if module.krate != self.krate {
+ // FIXME: Inefficient
let path = ModPath::from_segments(
PathKind::SELF,
path.segments()[i..].iter().cloned(),
@@ -478,7 +497,7 @@
let resolution = defp_map.resolve_path_fp_with_macro(
LocalDefMap::EMPTY,
db,
- ResolveMode::Other,
+ mode,
module.local_id,
&path,
shadow,
@@ -553,6 +572,44 @@
),
};
}
+ def @ ModuleDefId::TraitId(t) if mode == ResolveMode::Import => {
+ // FIXME: Implement this correctly
+ // We can't actually call `trait_items`, the reason being that if macro calls
+ // occur, they will call back into the def map which we might be computing right
+ // now resulting in a cycle.
+ // To properly implement this, trait item collection needs to be done in def map
+ // collection...
+ let item =
+ if true { None } else { db.trait_items(t).assoc_item_by_name(segment) };
+ return match item {
+ Some(item) => ResolvePathResult::new(
+ match item {
+ crate::AssocItemId::FunctionId(function_id) => PerNs::values(
+ function_id.into(),
+ curr.vis,
+ curr.import.and_then(|it| it.import_or_glob()),
+ ),
+ crate::AssocItemId::ConstId(const_id) => PerNs::values(
+ const_id.into(),
+ curr.vis,
+ curr.import.and_then(|it| it.import_or_glob()),
+ ),
+ crate::AssocItemId::TypeAliasId(type_alias_id) => {
+ PerNs::types(type_alias_id.into(), curr.vis, curr.import)
+ }
+ },
+ ReachedFixedPoint::Yes,
+ segments.next().map(TupleExt::head),
+ ResolvePathResultPrefixInfo::default(),
+ ),
+ None => ResolvePathResult::new(
+ PerNs::types(def, curr.vis, curr.import),
+ ReachedFixedPoint::Yes,
+ Some(i),
+ ResolvePathResultPrefixInfo::default(),
+ ),
+ };
+ }
s => {
// could be an inherent method call in UFCS form
// (`Struct::method`), or some other kind of associated item
@@ -715,7 +772,7 @@
} else {
// Extend lifetime
keep = prelude.def_map(db);
- &keep
+ keep
};
def_map[prelude.local_id].scope.get(name)
} else {
@@ -725,25 +782,23 @@
}
/// Given a block module, returns its nearest non-block module and the `DefMap` it belongs to.
-fn adjust_to_nearest_non_block_module(
- db: &dyn DefDatabase,
- def_map: &DefMap,
+fn adjust_to_nearest_non_block_module<'db>(
+ db: &'db dyn DefDatabase,
+ def_map: &'db DefMap,
mut local_id: LocalModuleId,
-) -> (Arc<DefMap>, LocalModuleId) {
+) -> (&'db DefMap, LocalModuleId) {
// INVARIANT: `local_id` in `def_map` must be a block module.
stdx::always!(def_map.module_id(local_id).is_block_module());
- let mut ext;
// This needs to be a local variable due to our mighty lifetime.
let mut def_map = def_map;
loop {
let BlockInfo { parent, .. } = def_map.block.expect("block module without parent module");
- ext = parent.def_map(db, def_map.krate);
- def_map = &ext;
+ def_map = parent.def_map(db, def_map.krate);
local_id = parent.local_id;
if !parent.is_block_module() {
- return (ext, local_id);
+ return (def_map, local_id);
}
}
}
diff --git a/crates/hir-def/src/nameres/tests.rs b/crates/hir-def/src/nameres/tests.rs
index 3fd095a..4a7974c 100644
--- a/crates/hir-def/src/nameres/tests.rs
+++ b/crates/hir-def/src/nameres/tests.rs
@@ -7,20 +7,25 @@
use base_db::RootQueryDb;
use expect_test::{Expect, expect};
use test_fixture::WithFixture;
-use triomphe::Arc;
-use crate::{db::DefDatabase, nameres::DefMap, test_db::TestDB};
+use crate::{
+ nameres::{DefMap, crate_def_map},
+ test_db::TestDB,
+};
-fn compute_crate_def_map(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> Arc<DefMap> {
+fn compute_crate_def_map(
+ #[rust_analyzer::rust_fixture] ra_fixture: &str,
+ cb: impl FnOnce(&DefMap),
+) {
let db = TestDB::with_files(ra_fixture);
let krate = db.fetch_test_crate();
- db.crate_def_map(krate)
+ cb(crate_def_map(&db, krate));
}
fn render_crate_def_map(#[rust_analyzer::rust_fixture] ra_fixture: &str) -> String {
let db = TestDB::with_files(ra_fixture);
let krate = db.fetch_test_crate();
- db.crate_def_map(krate).dump(&db)
+ crate_def_map(&db, krate).dump(&db)
}
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs
index 179a9c8..948e8be 100644
--- a/crates/hir-def/src/nameres/tests/incremental.rs
+++ b/crates/hir-def/src/nameres/tests/incremental.rs
@@ -7,24 +7,37 @@
use test_fixture::WithFixture;
use triomphe::Arc;
-use crate::{AdtId, ModuleDefId, db::DefDatabase, nameres::tests::TestDB};
+use crate::{
+ AdtId, ModuleDefId,
+ db::DefDatabase,
+ nameres::{crate_def_map, tests::TestDB},
+};
-fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) {
+fn check_def_map_is_not_recomputed(
+ #[rust_analyzer::rust_fixture] ra_fixture_initial: &str,
+ #[rust_analyzer::rust_fixture] ra_fixture_change: &str,
+) {
let (mut db, pos) = TestDB::with_position(ra_fixture_initial);
let krate = db.fetch_test_crate();
{
let events = db.log_executed(|| {
- db.crate_def_map(krate);
+ crate_def_map(&db, krate);
});
- assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
+ assert!(
+ format!("{events:?}").contains("crate_local_def_map"),
+ "no crate def map computed:\n{events:#?}",
+ )
}
db.set_file_text(pos.file_id.file_id(&db), ra_fixture_change);
{
let events = db.log_executed(|| {
- db.crate_def_map(krate);
+ crate_def_map(&db, krate);
});
- assert!(!format!("{events:?}").contains("crate_def_map"), "{events:#?}")
+ assert!(
+ !format!("{events:?}").contains("crate_local_def_map"),
+ "crate def map invalidated:\n{events:#?}",
+ )
}
}
@@ -44,7 +57,7 @@
);
for &krate in db.all_crates().iter() {
- db.crate_def_map(krate);
+ crate_def_map(&db, krate);
}
let all_crates_before = db.all_crates();
@@ -94,11 +107,11 @@
let events = db.log_executed(|| {
for &krate in db.all_crates().iter() {
- db.crate_def_map(krate);
+ crate_def_map(&db, krate);
}
});
let invalidated_def_maps =
- events.iter().filter(|event| event.contains("crate_def_map")).count();
+ events.iter().filter(|event| event.contains("crate_local_def_map")).count();
assert_eq!(invalidated_def_maps, 1, "{events:#?}")
}
@@ -330,7 +343,7 @@
let krate = db.test_crate();
{
let events = db.log_executed(|| {
- let crate_def_map = db.crate_def_map(krate);
+ let crate_def_map = crate_def_map(&db, krate);
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
assert_eq!(module_data.scope.resolutions().count(), 4);
});
@@ -352,7 +365,7 @@
{
let events = db.log_executed(|| {
- let crate_def_map = db.crate_def_map(krate);
+ let crate_def_map = crate_def_map(&db, krate);
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
assert_eq!(module_data.scope.resolutions().count(), 4);
});
@@ -403,7 +416,7 @@
{
let events = db.log_executed(|| {
- let crate_def_map = db.crate_def_map(krate);
+ let crate_def_map = crate_def_map(&db, krate);
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
assert_eq!(module_data.scope.resolutions().count(), 8);
assert_eq!(module_data.scope.impls().count(), 1);
diff --git a/crates/hir-def/src/nameres/tests/macros.rs b/crates/hir-def/src/nameres/tests/macros.rs
index 5f8a015..3cba88e 100644
--- a/crates/hir-def/src/nameres/tests/macros.rs
+++ b/crates/hir-def/src/nameres/tests/macros.rs
@@ -736,7 +736,7 @@
#[test]
fn macro_dollar_crate_is_correct_in_derive_meta() {
- let map = compute_crate_def_map(
+ compute_crate_def_map(
r#"
//- minicore: derive, clone
//- /main.rs crate:main deps:lib
@@ -753,13 +753,13 @@
pub use core::clone::Clone;
"#,
+ |map| assert_eq!(map.modules[DefMap::ROOT].scope.impls().len(), 1),
);
- assert_eq!(map.modules[DefMap::ROOT].scope.impls().len(), 1);
}
#[test]
fn expand_derive() {
- let map = compute_crate_def_map(
+ compute_crate_def_map(
r#"
//- /main.rs crate:main deps:core
use core::Copy;
@@ -775,8 +775,8 @@
#[rustc_builtin_macro]
pub macro Clone {}
"#,
+ |map| assert_eq!(map.modules[DefMap::ROOT].scope.impls().len(), 2),
);
- assert_eq!(map.modules[DefMap::ROOT].scope.impls().len(), 2);
}
#[test]
@@ -803,7 +803,7 @@
fn builtin_derive_with_unresolved_attributes_fall_back() {
// Tests that we still resolve derives after ignoring an unresolved attribute.
cov_mark::check!(unresolved_attribute_fallback);
- let map = compute_crate_def_map(
+ compute_crate_def_map(
r#"
//- /main.rs crate:main deps:core
use core::{Clone, derive};
@@ -818,8 +818,8 @@
#[rustc_builtin_macro]
pub macro Clone {}
"#,
+ |map| assert_eq!(map.modules[DefMap::ROOT].scope.impls().len(), 1),
);
- assert_eq!(map.modules[DefMap::ROOT].scope.impls().len(), 1);
}
#[test]
@@ -1096,7 +1096,7 @@
"#,
);
let krate = *db.all_crates().last().expect("no crate graph present");
- let def_map = db.crate_def_map(krate);
+ let def_map = crate_def_map(&db, krate);
assert_eq!(def_map.data.exported_derives.len(), 1);
match def_map.data.exported_derives.values().next() {
@@ -1446,7 +1446,7 @@
"#,
);
let krate = *db.all_crates().last().expect("no crate graph present");
- let def_map = db.crate_def_map(krate);
+ let def_map = crate_def_map(&db, krate);
let root_module = &def_map[DefMap::ROOT].scope;
assert!(
@@ -1544,7 +1544,7 @@
#[test]
fn macro_sub_namespace() {
- let map = compute_crate_def_map(
+ compute_crate_def_map(
r#"
//- minicore: derive, clone
macro_rules! Clone { () => {} }
@@ -1553,8 +1553,8 @@
#[derive(Clone)]
struct S;
"#,
+ |map| assert_eq!(map.modules[DefMap::ROOT].scope.impls().len(), 1),
);
- assert_eq!(map.modules[DefMap::ROOT].scope.impls().len(), 1);
}
#[test]
diff --git a/crates/hir-def/src/nameres/tests/mod_resolution.rs b/crates/hir-def/src/nameres/tests/mod_resolution.rs
index 071b55c..9c97e42 100644
--- a/crates/hir-def/src/nameres/tests/mod_resolution.rs
+++ b/crates/hir-def/src/nameres/tests/mod_resolution.rs
@@ -839,6 +839,7 @@
#[path = "./foo.rs"]
mod foo;
"#,
+ |_| (),
);
compute_crate_def_map(
@@ -852,6 +853,7 @@
#[path = "./foo.rs"]
mod foo;
"#,
+ |_| (),
);
}
@@ -894,3 +896,149 @@
"#]],
)
}
+
+#[test]
+fn invalid_imports() {
+ check(
+ r#"
+//- /main.rs
+mod module;
+
+use self::module::S::new;
+use self::module::unresolved;
+use self::module::C::const_based;
+use self::module::Enum::Variant::NoAssoc;
+
+//- /module.rs
+pub struct S;
+impl S {
+ pub fn new() {}
+}
+pub const C: () = ();
+pub enum Enum {
+ Variant,
+}
+ "#,
+ expect![[r#"
+ crate
+ NoAssoc: _
+ const_based: _
+ module: t
+ new: _
+ unresolved: _
+
+ crate::module
+ C: v
+ Enum: t
+ S: t v
+ "#]],
+ );
+}
+
+#[test]
+fn trait_item_imports_same_crate() {
+ check(
+ r#"
+//- /main.rs
+mod module;
+
+use self::module::Trait::{AssocType, ASSOC_CONST, MACRO_CONST, method};
+
+//- /module.rs
+macro_rules! m {
+ ($name:ident) => { const $name: () = (); };
+}
+pub trait Trait {
+ type AssocType;
+ const ASSOC_CONST: ();
+ fn method(&self);
+ m!(MACRO_CONST);
+}
+ "#,
+ expect![[r#"
+ crate
+ ASSOC_CONST: _
+ AssocType: _
+ MACRO_CONST: _
+ method: _
+ module: t
+
+ crate::module
+ Trait: t
+ "#]],
+ );
+ check(
+ r#"
+//- /main.rs
+mod module;
+
+use self::module::Trait::*;
+
+//- /module.rs
+macro_rules! m {
+ ($name:ident) => { const $name: () = (); };
+}
+pub trait Trait {
+ type AssocType;
+ const ASSOC_CONST: ();
+ fn method(&self);
+ m!(MACRO_CONST);
+}
+ "#,
+ expect![[r#"
+ crate
+ module: t
+
+ crate::module
+ Trait: t
+ "#]],
+ );
+}
+
+#[test]
+fn trait_item_imports_differing_crate() {
+ check(
+ r#"
+//- /main.rs deps:lib crate:main
+use lib::Trait::{AssocType, ASSOC_CONST, MACRO_CONST, method};
+
+//- /lib.rs crate:lib
+macro_rules! m {
+ ($name:ident) => { const $name: () = (); };
+}
+pub trait Trait {
+ type AssocType;
+ const ASSOC_CONST: ();
+ fn method(&self);
+ m!(MACRO_CONST);
+}
+ "#,
+ expect![[r#"
+ crate
+ ASSOC_CONST: _
+ AssocType: _
+ MACRO_CONST: _
+ method: _
+ "#]],
+ );
+ check(
+ r#"
+//- /main.rs deps:lib crate:main
+use lib::Trait::*;
+
+//- /lib.rs crate:lib
+macro_rules! m {
+ ($name:ident) => { const $name: () = (); };
+}
+pub trait Trait {
+ type AssocType;
+ const ASSOC_CONST: ();
+ fn method(&self);
+ m!(MACRO_CONST);
+}
+ "#,
+ expect![[r#"
+ crate
+ "#]],
+ );
+}
diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs
index 8a8d170..16988dd 100644
--- a/crates/hir-def/src/resolver.rs
+++ b/crates/hir-def/src/resolver.rs
@@ -34,30 +34,30 @@
item_scope::{BUILTIN_SCOPE, BuiltinShadowMode, ImportOrExternCrate, ImportOrGlob, ItemScope},
item_tree::ImportAlias,
lang_item::LangItemTarget,
- nameres::{DefMap, LocalDefMap, MacroSubNs, ResolvePathResultPrefixInfo},
+ nameres::{DefMap, LocalDefMap, MacroSubNs, ResolvePathResultPrefixInfo, block_def_map},
per_ns::PerNs,
type_ref::LifetimeRef,
visibility::{RawVisibility, Visibility},
};
#[derive(Debug, Clone)]
-pub struct Resolver {
+pub struct Resolver<'db> {
/// The stack of scopes, where the inner-most scope is the last item.
///
/// When using, you generally want to process the scopes in reverse order,
/// there's `scopes` *method* for that.
- scopes: Vec<Scope>,
- module_scope: ModuleItemMap,
+ scopes: Vec<Scope<'db>>,
+ module_scope: ModuleItemMap<'db>,
}
#[derive(Clone)]
-struct ModuleItemMap {
- def_map: Arc<DefMap>,
- local_def_map: Arc<LocalDefMap>,
+struct ModuleItemMap<'db> {
+ def_map: &'db DefMap,
+ local_def_map: &'db LocalDefMap,
module_id: LocalModuleId,
}
-impl fmt::Debug for ModuleItemMap {
+impl fmt::Debug for ModuleItemMap<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ModuleItemMap").field("module_id", &self.module_id).finish()
}
@@ -80,9 +80,9 @@
}
#[derive(Debug, Clone)]
-enum Scope {
+enum Scope<'db> {
/// All the items and imported names of a module
- BlockScope(ModuleItemMap),
+ BlockScope(ModuleItemMap<'db>),
/// Brings the generic parameters of an item into scope as well as the `Self` type alias /
/// generic for ADTs and impls.
GenericParams { def: GenericDefId, params: Arc<GenericParams> },
@@ -133,7 +133,7 @@
LifetimeParam(LifetimeParamId),
}
-impl Resolver {
+impl<'db> Resolver<'db> {
/// Resolve known trait from std, like `std::futures::Future`
pub fn resolve_known_trait(&self, db: &dyn DefDatabase, path: &ModPath) -> Option<TraitId> {
let res = self.resolve_module_path(db, path, BuiltinShadowMode::Other).take_types()?;
@@ -580,7 +580,7 @@
for scope in self.scopes() {
scope.process_names(&mut res, db);
}
- let ModuleItemMap { ref def_map, module_id, ref local_def_map } = self.module_scope;
+ let ModuleItemMap { def_map, module_id, local_def_map } = self.module_scope;
// FIXME: should we provide `self` here?
// f(
// Name::self_param(),
@@ -842,14 +842,14 @@
#[must_use]
pub fn update_to_inner_scope(
&mut self,
- db: &dyn DefDatabase,
+ db: &'db dyn DefDatabase,
owner: DefWithBodyId,
expr_id: ExprId,
) -> UpdateGuard {
#[inline(always)]
- fn append_expr_scope(
- db: &dyn DefDatabase,
- resolver: &mut Resolver,
+ fn append_expr_scope<'db>(
+ db: &'db dyn DefDatabase,
+ resolver: &mut Resolver<'db>,
owner: DefWithBodyId,
expr_scopes: &Arc<ExprScopes>,
scope_id: ScopeId,
@@ -863,7 +863,7 @@
scope_id,
}));
if let Some(block) = expr_scopes.block(scope_id) {
- let def_map = db.block_def_map(block);
+ let def_map = block_def_map(db, block);
let local_def_map = block.lookup(db).module.only_local_def_map(db);
resolver.scopes.push(Scope::BlockScope(ModuleItemMap {
def_map,
@@ -945,8 +945,8 @@
pub struct UpdateGuard(usize);
-impl Resolver {
- fn scopes(&self) -> impl Iterator<Item = &Scope> {
+impl<'db> Resolver<'db> {
+ fn scopes(&self) -> impl Iterator<Item = &Scope<'db>> {
self.scopes.iter().rev()
}
@@ -970,12 +970,12 @@
fn item_scope_(&self) -> (&DefMap, &LocalDefMap, LocalModuleId) {
self.scopes()
.find_map(|scope| match scope {
- Scope::BlockScope(m) => Some((&*m.def_map, &*m.local_def_map, m.module_id)),
+ Scope::BlockScope(m) => Some((m.def_map, m.local_def_map, m.module_id)),
_ => None,
})
.unwrap_or((
- &self.module_scope.def_map,
- &self.module_scope.local_def_map,
+ self.module_scope.def_map,
+ self.module_scope.local_def_map,
self.module_scope.module_id,
))
}
@@ -992,8 +992,8 @@
Label(LabelId),
}
-impl Scope {
- fn process_names(&self, acc: &mut ScopeNames, db: &dyn DefDatabase) {
+impl<'db> Scope<'db> {
+ fn process_names(&self, acc: &mut ScopeNames, db: &'db dyn DefDatabase) {
match self {
Scope::BlockScope(m) => {
m.def_map[m.module_id].scope.entries().for_each(|(name, def)| {
@@ -1047,7 +1047,11 @@
}
}
-pub fn resolver_for_expr(db: &dyn DefDatabase, owner: DefWithBodyId, expr_id: ExprId) -> Resolver {
+pub fn resolver_for_expr(
+ db: &dyn DefDatabase,
+ owner: DefWithBodyId,
+ expr_id: ExprId,
+) -> Resolver<'_> {
let r = owner.resolver(db);
let scopes = db.expr_scopes(owner);
let scope_id = scopes.scope_for(expr_id);
@@ -1058,25 +1062,25 @@
db: &dyn DefDatabase,
owner: DefWithBodyId,
scope_id: Option<ScopeId>,
-) -> Resolver {
+) -> Resolver<'_> {
let r = owner.resolver(db);
let scopes = db.expr_scopes(owner);
resolver_for_scope_(db, scopes, scope_id, r, owner)
}
-fn resolver_for_scope_(
- db: &dyn DefDatabase,
+fn resolver_for_scope_<'db>(
+ db: &'db dyn DefDatabase,
scopes: Arc<ExprScopes>,
scope_id: Option<ScopeId>,
- mut r: Resolver,
+ mut r: Resolver<'db>,
owner: DefWithBodyId,
-) -> Resolver {
+) -> Resolver<'db> {
let scope_chain = scopes.scope_chain(scope_id).collect::<Vec<_>>();
r.scopes.reserve(scope_chain.len());
for scope in scope_chain.into_iter().rev() {
if let Some(block) = scopes.block(scope) {
- let def_map = db.block_def_map(block);
+ let def_map = block_def_map(db, block);
let local_def_map = block.lookup(db).module.only_local_def_map(db);
r = r.push_block_scope(def_map, local_def_map);
// FIXME: This adds as many module scopes as there are blocks, but resolving in each
@@ -1092,18 +1096,26 @@
r
}
-impl Resolver {
- fn push_scope(mut self, scope: Scope) -> Resolver {
+impl<'db> Resolver<'db> {
+ fn push_scope(mut self, scope: Scope<'db>) -> Resolver<'db> {
self.scopes.push(scope);
self
}
- fn push_generic_params_scope(self, db: &dyn DefDatabase, def: GenericDefId) -> Resolver {
+ fn push_generic_params_scope(
+ self,
+ db: &'db dyn DefDatabase,
+ def: GenericDefId,
+ ) -> Resolver<'db> {
let params = db.generic_params(def);
self.push_scope(Scope::GenericParams { def, params })
}
- fn push_block_scope(self, def_map: Arc<DefMap>, local_def_map: Arc<LocalDefMap>) -> Resolver {
+ fn push_block_scope(
+ self,
+ def_map: &'db DefMap,
+ local_def_map: &'db LocalDefMap,
+ ) -> Resolver<'db> {
self.push_scope(Scope::BlockScope(ModuleItemMap {
def_map,
local_def_map,
@@ -1116,19 +1128,19 @@
owner: DefWithBodyId,
expr_scopes: Arc<ExprScopes>,
scope_id: ScopeId,
- ) -> Resolver {
+ ) -> Resolver<'db> {
self.push_scope(Scope::ExprScope(ExprScope { owner, expr_scopes, scope_id }))
}
}
-impl ModuleItemMap {
+impl<'db> ModuleItemMap<'db> {
fn resolve_path_in_value_ns(
&self,
- db: &dyn DefDatabase,
+ db: &'db dyn DefDatabase,
path: &ModPath,
) -> Option<(ResolveValueResult, ResolvePathResultPrefixInfo)> {
let (module_def, unresolved_idx, prefix_info) = self.def_map.resolve_path_locally(
- &self.local_def_map,
+ self.local_def_map,
db,
self.module_id,
path,
@@ -1167,7 +1179,7 @@
) -> Option<(TypeNs, Option<usize>, Option<ImportOrExternCrate>, ResolvePathResultPrefixInfo)>
{
let (module_def, idx, prefix_info) = self.def_map.resolve_path_locally(
- &self.local_def_map,
+ self.local_def_map,
db,
self.module_id,
path,
@@ -1263,11 +1275,11 @@
pub trait HasResolver: Copy {
/// Builds a resolver for type references inside this def.
- fn resolver(self, db: &dyn DefDatabase) -> Resolver;
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_>;
}
impl HasResolver for ModuleId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
let (mut def_map, local_def_map) = self.local_def_map(db);
let mut module_id = self.local_id;
@@ -1289,21 +1301,17 @@
}
let mut resolver = Resolver {
scopes: Vec::with_capacity(modules.len()),
- module_scope: ModuleItemMap {
- def_map,
- local_def_map: local_def_map.clone(),
- module_id,
- },
+ module_scope: ModuleItemMap { def_map, local_def_map, module_id },
};
for def_map in modules.into_iter().rev() {
- resolver = resolver.push_block_scope(def_map, local_def_map.clone());
+ resolver = resolver.push_block_scope(def_map, local_def_map);
}
resolver
}
}
impl HasResolver for CrateRootModuleId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
let (def_map, local_def_map) = self.local_def_map(db);
Resolver {
scopes: vec![],
@@ -1313,75 +1321,75 @@
}
impl HasResolver for TraitId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
lookup_resolver(db, self).push_generic_params_scope(db, self.into())
}
}
impl HasResolver for TraitAliasId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
lookup_resolver(db, self).push_generic_params_scope(db, self.into())
}
}
impl<T: Into<AdtId> + Copy> HasResolver for T {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
let def = self.into();
def.module(db).resolver(db).push_generic_params_scope(db, def.into())
}
}
impl HasResolver for FunctionId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
lookup_resolver(db, self).push_generic_params_scope(db, self.into())
}
}
impl HasResolver for ConstId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
lookup_resolver(db, self)
}
}
impl HasResolver for StaticId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
lookup_resolver(db, self)
}
}
impl HasResolver for TypeAliasId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
lookup_resolver(db, self).push_generic_params_scope(db, self.into())
}
}
impl HasResolver for ImplId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
self.lookup(db).container.resolver(db).push_generic_params_scope(db, self.into())
}
}
impl HasResolver for ExternBlockId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
// Same as parent's
lookup_resolver(db, self)
}
}
impl HasResolver for ExternCrateId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
lookup_resolver(db, self)
}
}
impl HasResolver for UseId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
lookup_resolver(db, self)
}
}
impl HasResolver for DefWithBodyId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
match self {
DefWithBodyId::ConstId(c) => c.resolver(db),
DefWithBodyId::FunctionId(f) => f.resolver(db),
@@ -1392,7 +1400,7 @@
}
impl HasResolver for ItemContainerId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
match self {
ItemContainerId::ModuleId(it) => it.resolver(db),
ItemContainerId::TraitId(it) => it.resolver(db),
@@ -1403,7 +1411,7 @@
}
impl HasResolver for GenericDefId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
match self {
GenericDefId::FunctionId(inner) => inner.resolver(db),
GenericDefId::AdtId(adt) => adt.resolver(db),
@@ -1418,13 +1426,13 @@
}
impl HasResolver for EnumVariantId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
self.lookup(db).parent.resolver(db)
}
}
impl HasResolver for VariantId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
match self {
VariantId::EnumVariantId(it) => it.resolver(db),
VariantId::StructId(it) => it.resolver(db),
@@ -1434,7 +1442,7 @@
}
impl HasResolver for MacroId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
match self {
MacroId::Macro2Id(it) => it.resolver(db),
MacroId::MacroRulesId(it) => it.resolver(db),
@@ -1444,29 +1452,29 @@
}
impl HasResolver for Macro2Id {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
lookup_resolver(db, self)
}
}
impl HasResolver for ProcMacroId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
lookup_resolver(db, self)
}
}
impl HasResolver for MacroRulesId {
- fn resolver(self, db: &dyn DefDatabase) -> Resolver {
+ fn resolver(self, db: &dyn DefDatabase) -> Resolver<'_> {
lookup_resolver(db, self)
}
}
-fn lookup_resolver<'db>(
- db: &(dyn DefDatabase + 'db),
+fn lookup_resolver(
+ db: &dyn DefDatabase,
lookup: impl Lookup<
Database = dyn DefDatabase,
Data = impl ItemTreeLoc<Container = impl HasResolver>,
>,
-) -> Resolver {
+) -> Resolver<'_> {
lookup.lookup(db).container().resolver(db)
}
diff --git a/crates/hir-def/src/test_db.rs b/crates/hir-def/src/test_db.rs
index 4709754..e30a5b6 100644
--- a/crates/hir-def/src/test_db.rs
+++ b/crates/hir-def/src/test_db.rs
@@ -15,7 +15,7 @@
use crate::{
LocalModuleId, Lookup, ModuleDefId, ModuleId,
db::DefDatabase,
- nameres::{DefMap, ModuleSource},
+ nameres::{DefMap, ModuleSource, block_def_map, crate_def_map},
src::HasSource,
};
@@ -30,9 +30,18 @@
impl Default for TestDB {
fn default() -> Self {
+ let events = <Arc<Mutex<Option<Vec<salsa::Event>>>>>::default();
let mut this = Self {
- storage: Default::default(),
- events: Default::default(),
+ storage: salsa::Storage::new(Some(Box::new({
+ let events = events.clone();
+ move |event| {
+ let mut events = events.lock().unwrap();
+ if let Some(events) = &mut *events {
+ events.push(event);
+ }
+ }
+ }))),
+ events,
files: Default::default(),
crates_map: Default::default(),
};
@@ -45,15 +54,7 @@
}
#[salsa_macros::db]
-impl salsa::Database for TestDB {
- fn salsa_event(&self, event: &dyn std::ops::Fn() -> salsa::Event) {
- let mut events = self.events.lock().unwrap();
- if let Some(events) = &mut *events {
- let event = event();
- events.push(event);
- }
- }
-}
+impl salsa::Database for TestDB {}
impl fmt::Debug for TestDB {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@@ -133,7 +134,7 @@
pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
for &krate in self.relevant_crates(file_id).iter() {
- let crate_def_map = self.crate_def_map(krate);
+ let crate_def_map = crate_def_map(self, krate);
for (local_id, data) in crate_def_map.modules() {
if data.origin.file_id().map(|file_id| file_id.file_id(self)) == Some(file_id) {
return crate_def_map.module_id(local_id);
@@ -146,16 +147,16 @@
pub(crate) fn module_at_position(&self, position: FilePosition) -> ModuleId {
let file_module = self.module_for_file(position.file_id.file_id(self));
let mut def_map = file_module.def_map(self);
- let module = self.mod_at_position(&def_map, position);
+ let module = self.mod_at_position(def_map, position);
- def_map = match self.block_at_position(&def_map, position) {
+ def_map = match self.block_at_position(def_map, position) {
Some(it) => it,
None => return def_map.module_id(module),
};
loop {
- let new_map = self.block_at_position(&def_map, position);
+ let new_map = self.block_at_position(def_map, position);
match new_map {
- Some(new_block) if !Arc::ptr_eq(&new_block, &def_map) => {
+ Some(new_block) if !std::ptr::eq(&new_block, &def_map) => {
def_map = new_block;
}
_ => {
@@ -206,7 +207,7 @@
res
}
- fn block_at_position(&self, def_map: &DefMap, position: FilePosition) -> Option<Arc<DefMap>> {
+ fn block_at_position(&self, def_map: &DefMap, position: FilePosition) -> Option<&DefMap> {
// Find the smallest (innermost) function in `def_map` containing the cursor.
let mut size = None;
let mut fn_def = None;
@@ -263,7 +264,7 @@
let mut containing_blocks =
scopes.scope_chain(Some(scope)).filter_map(|scope| scopes.block(scope));
- if let Some(block) = containing_blocks.next().map(|block| self.block_def_map(block)) {
+ if let Some(block) = containing_blocks.next().map(|block| block_def_map(self, block)) {
return Some(block);
}
}
diff --git a/crates/hir-def/src/visibility.rs b/crates/hir-def/src/visibility.rs
index b42c8d3..3c67ee9 100644
--- a/crates/hir-def/src/visibility.rs
+++ b/crates/hir-def/src/visibility.rs
@@ -28,7 +28,7 @@
impl Visibility {
pub fn resolve(
db: &dyn DefDatabase,
- resolver: &crate::resolver::Resolver,
+ resolver: &crate::resolver::Resolver<'_>,
raw_vis: &RawVisibility,
) -> Self {
// we fall back to public visibility (i.e. fail open) if the path can't be resolved
@@ -50,7 +50,7 @@
return false;
}
let def_map = from_module.def_map(db);
- Self::is_visible_from_def_map_(db, &def_map, to_module, from_module.local_id)
+ Self::is_visible_from_def_map_(db, def_map, to_module, from_module.local_id)
}
pub(crate) fn is_visible_from_def_map(
@@ -116,7 +116,7 @@
match def_map.parent() {
Some(module) => {
parent_arc = module.def_map(db);
- def_map = &*parent_arc;
+ def_map = parent_arc;
from_module = module.local_id;
}
// Reached the root module, nothing left to check.
@@ -257,7 +257,7 @@
}
#[inline]
-fn trait_vis(db: &dyn DefDatabase, resolver: &Resolver, trait_id: TraitId) -> Visibility {
+fn trait_vis(db: &dyn DefDatabase, resolver: &Resolver<'_>, trait_id: TraitId) -> Visibility {
let ItemLoc { id: tree_id, .. } = trait_id.lookup(db);
let item_tree = tree_id.item_tree(db);
let tr_def = &item_tree[tree_id.value];
diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs
index bb17eb0..94c9771 100644
--- a/crates/hir-expand/src/attrs.rs
+++ b/crates/hir-expand/src/attrs.rs
@@ -1,4 +1,5 @@
//! A higher level attributes based on TokenTree, with also some shortcuts.
+use std::iter;
use std::{borrow::Cow, fmt, ops};
use base_db::Crate;
@@ -122,16 +123,15 @@
(None, entries @ Some(_)) => Self { entries },
(Some(entries), None) => Self { entries: Some(entries.clone()) },
(Some(a), Some(b)) => {
- let last_ast_index = a.slice.last().map_or(0, |it| it.id.ast_index() + 1) as u32;
+ let last_ast_index = a.slice.last().map_or(0, |it| it.id.ast_index() + 1);
let items = a
.slice
.iter()
.cloned()
.chain(b.slice.iter().map(|it| {
let mut it = it.clone();
- it.id.id = (it.id.ast_index() as u32 + last_ast_index)
- | ((it.id.cfg_attr_index().unwrap_or(0) as u32)
- << AttrId::AST_INDEX_BITS);
+ let id = it.id.ast_index() + last_ast_index;
+ it.id = AttrId::new(id, it.id.is_inner_attr());
it
}))
.collect::<Vec<_>>();
@@ -175,25 +175,20 @@
// FIXME: This only handles a single level of cfg_attr nesting
// that is `#[cfg_attr(all(), cfg_attr(all(), cfg(any())))]` breaks again
impl AttrId {
- const CFG_ATTR_BITS: usize = 7;
- const AST_INDEX_MASK: usize = 0x00FF_FFFF;
- const AST_INDEX_BITS: usize = Self::AST_INDEX_MASK.count_ones() as usize;
- const CFG_ATTR_SET_BITS: u32 = 1 << 31;
+ const INNER_ATTR_SET_BIT: u32 = 1 << 31;
+
+ pub fn new(id: usize, is_inner: bool) -> Self {
+ assert!(id <= !Self::INNER_ATTR_SET_BIT as usize);
+ let id = id as u32;
+ Self { id: if is_inner { id | Self::INNER_ATTR_SET_BIT } else { id } }
+ }
pub fn ast_index(&self) -> usize {
- self.id as usize & Self::AST_INDEX_MASK
+ (self.id & !Self::INNER_ATTR_SET_BIT) as usize
}
- pub fn cfg_attr_index(&self) -> Option<usize> {
- if self.id & Self::CFG_ATTR_SET_BITS == 0 {
- None
- } else {
- Some(self.id as usize >> Self::AST_INDEX_BITS)
- }
- }
-
- pub fn with_cfg_attr(self, idx: usize) -> AttrId {
- AttrId { id: self.id | ((idx as u32) << Self::AST_INDEX_BITS) | Self::CFG_ATTR_SET_BITS }
+ pub fn is_inner_attr(&self) -> bool {
+ self.id & Self::INNER_ATTR_SET_BIT != 0
}
}
@@ -333,10 +328,7 @@
None => return smallvec![self.clone()],
};
let index = self.id;
- let attrs = parts
- .enumerate()
- .take(1 << AttrId::CFG_ATTR_BITS)
- .filter_map(|(idx, attr)| Attr::from_tt(db, attr, index.with_cfg_attr(idx)));
+ let attrs = parts.filter_map(|attr| Attr::from_tt(db, attr, index));
let cfg = TopSubtree::from_token_trees(subtree.top_subtree().delimiter, cfg);
let cfg = CfgExpr::parse(&cfg);
@@ -467,13 +459,18 @@
pub fn collect_attrs(
owner: &dyn ast::HasAttrs,
) -> impl Iterator<Item = (AttrId, Either<ast::Attr, ast::Comment>)> {
- let inner_attrs = inner_attributes(owner.syntax()).into_iter().flatten();
- let outer_attrs =
- ast::AttrDocCommentIter::from_syntax_node(owner.syntax()).filter(|el| match el {
+ let inner_attrs =
+ inner_attributes(owner.syntax()).into_iter().flatten().zip(iter::repeat(true));
+ let outer_attrs = ast::AttrDocCommentIter::from_syntax_node(owner.syntax())
+ .filter(|el| match el {
Either::Left(attr) => attr.kind().is_outer(),
Either::Right(comment) => comment.is_outer(),
- });
- outer_attrs.chain(inner_attrs).enumerate().map(|(id, attr)| (AttrId { id: id as u32 }, attr))
+ })
+ .zip(iter::repeat(false));
+ outer_attrs
+ .chain(inner_attrs)
+ .enumerate()
+ .map(|(id, (attr, is_inner))| (AttrId::new(id, is_inner), attr))
}
fn inner_attributes(
diff --git a/crates/hir-expand/src/builtin/derive_macro.rs b/crates/hir-expand/src/builtin/derive_macro.rs
index 68283b9..d135584 100644
--- a/crates/hir-expand/src/builtin/derive_macro.rs
+++ b/crates/hir-expand/src/builtin/derive_macro.rs
@@ -1,5 +1,6 @@
//! Builtin derives.
+use either::Either;
use intern::sym;
use itertools::{Itertools, izip};
use parser::SyntaxKind;
@@ -1179,10 +1180,10 @@
};
new_predicates.push(
make::where_pred(
- make::ty_path(make::path_from_segments(
+ Either::Right(make::ty_path(make::path_from_segments(
[make::path_segment(new_bounds_target)],
false,
- )),
+ ))),
new_bounds,
)
.clone_for_update(),
@@ -1245,7 +1246,9 @@
substitute_type_in_bound(ty, &pointee_param_name.text(), ADDED_PARAM)
})
});
- new_predicates.push(make::where_pred(pred_target, new_bounds).clone_for_update());
+ new_predicates.push(
+ make::where_pred(Either::Right(pred_target), new_bounds).clone_for_update(),
+ );
}
}
@@ -1260,10 +1263,10 @@
// Find the `#[pointee]` parameter and add an `Unsize<__S>` bound to it.
where_clause.add_predicate(
make::where_pred(
- make::ty_path(make::path_from_segments(
+ Either::Right(make::ty_path(make::path_from_segments(
[make::path_segment(make::name_ref(&pointee_param_name.text()))],
false,
- )),
+ ))),
[make::type_bound(make::ty_path(make::path_from_segments(
[
make::path_segment(make::name_ref("core")),
diff --git a/crates/hir-expand/src/builtin/fn_macro.rs b/crates/hir-expand/src/builtin/fn_macro.rs
index 621e174..3180b8d 100644
--- a/crates/hir-expand/src/builtin/fn_macro.rs
+++ b/crates/hir-expand/src/builtin/fn_macro.rs
@@ -140,7 +140,6 @@
EagerExpander:
(compile_error, CompileError) => compile_error_expand,
(concat, Concat) => concat_expand,
- (concat_idents, ConcatIdents) => concat_idents_expand,
(concat_bytes, ConcatBytes) => concat_bytes_expand,
(include, Include) => include_expand,
(include_bytes, IncludeBytes) => include_bytes_expand,
@@ -452,7 +451,10 @@
Some(_) => (),
None => span = Some(s),
};
- for (i, mut t) in tt.iter().enumerate() {
+
+ let mut i = 0;
+ let mut iter = tt.iter();
+ while let Some(mut t) = iter.next() {
// FIXME: hack on top of a hack: `$e:expr` captures get surrounded in parentheses
// to ensure the right parsing order, so skip the parentheses here. Ideally we'd
// implement rustc's model. cc https://github.com/rust-lang/rust-analyzer/pull/10623
@@ -504,10 +506,40 @@
record_span(id.span);
}
TtElement::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
+ // handle negative numbers
+ TtElement::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 0 && punct.char == '-' => {
+ let t = match iter.next() {
+ Some(t) => t,
+ None => {
+ err.get_or_insert(ExpandError::other(
+ call_site,
+ "unexpected end of input after '-'",
+ ));
+ break;
+ }
+ };
+
+ match t {
+ TtElement::Leaf(tt::Leaf::Literal(it))
+ if matches!(it.kind, tt::LitKind::Integer | tt::LitKind::Float) =>
+ {
+ format_to!(text, "-{}", it.symbol.as_str());
+ record_span(punct.span.cover(it.span));
+ }
+ _ => {
+ err.get_or_insert(ExpandError::other(
+ call_site,
+ "expected integer or floating pointer number after '-'",
+ ));
+ break;
+ }
+ }
+ }
_ => {
err.get_or_insert(ExpandError::other(call_site, "unexpected token"));
}
}
+ i += 1;
}
let span = span.unwrap_or_else(|| tt.top_subtree().delimiter.open);
ExpandResult { value: quote!(span =>#text), err }
@@ -627,30 +659,6 @@
Ok(())
}
-fn concat_idents_expand(
- _db: &dyn ExpandDatabase,
- _arg_id: MacroCallId,
- tt: &tt::TopSubtree,
- span: Span,
-) -> ExpandResult<tt::TopSubtree> {
- let mut err = None;
- let mut ident = String::new();
- for (i, t) in tt.iter().enumerate() {
- match t {
- TtElement::Leaf(tt::Leaf::Ident(id)) => {
- ident.push_str(id.sym.as_str());
- }
- TtElement::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
- _ => {
- err.get_or_insert(ExpandError::other(span, "unexpected token"));
- }
- }
- }
- // FIXME merge spans
- let ident = tt::Ident { sym: Symbol::intern(&ident), span, is_raw: tt::IdentIsRaw::No };
- ExpandResult { value: quote!(span =>#ident), err }
-}
-
fn relative_file(
db: &dyn ExpandDatabase,
call_id: MacroCallId,
diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs
index 8a1a33d..1cd975b 100644
--- a/crates/hir-expand/src/proc_macro.rs
+++ b/crates/hir-expand/src/proc_macro.rs
@@ -19,18 +19,8 @@
Attr,
}
-pub trait AsAny: Any {
- fn as_any(&self) -> &dyn Any;
-}
-
-impl<T: Any> AsAny for T {
- fn as_any(&self) -> &dyn Any {
- self
- }
-}
-
/// A proc-macro expander implementation.
-pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe + AsAny {
+pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe + Any {
/// Run the expander with the given input subtree, optional attribute input subtree (for
/// [`ProcMacroKind::Attr`]), environment variables, and span information.
fn expand(
@@ -44,7 +34,9 @@
current_dir: String,
) -> Result<tt::TopSubtree, ProcMacroExpansionError>;
- fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool;
+ fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
+ other.type_id() == self.type_id()
+ }
}
impl PartialEq for dyn ProcMacroExpander {
diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs
index cd799c0..22b96b5 100644
--- a/crates/hir-ty/src/chalk_db.rs
+++ b/crates/hir-ty/src/chalk_db.rs
@@ -259,7 +259,7 @@
}
fn well_known_trait_id(
&self,
- well_known_trait: rust_ir::WellKnownTrait,
+ well_known_trait: WellKnownTrait,
) -> Option<chalk_ir::TraitId<Interner>> {
let lang_attr = lang_item_from_well_known_trait(well_known_trait);
let trait_ = lang_attr.resolve_trait(self.db, self.krate)?;
diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs
index d1a1e13..f903b06 100644
--- a/crates/hir-ty/src/consteval.rs
+++ b/crates/hir-ty/src/consteval.rs
@@ -91,7 +91,7 @@
pub(crate) fn path_to_const<'g>(
db: &dyn HirDatabase,
- resolver: &Resolver,
+ resolver: &Resolver<'_>,
path: &Path,
mode: ParamLoweringMode,
args: impl FnOnce() -> &'g Generics,
diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs
index 5710641..9eb7ffe 100644
--- a/crates/hir-ty/src/diagnostics/expr.rs
+++ b/crates/hir-ty/src/diagnostics/expr.rs
@@ -25,7 +25,7 @@
use typed_arena::Arena;
use crate::{
- Adjust, InferenceResult, Interner, Ty, TyExt, TyKind,
+ Adjust, InferenceResult, Interner, TraitEnvironment, Ty, TyExt, TyKind,
db::HirDatabase,
diagnostics::match_check::{
self,
@@ -74,8 +74,9 @@
let _p = tracing::info_span!("BodyValidationDiagnostic::collect").entered();
let infer = db.infer(owner);
let body = db.body(owner);
+ let env = db.trait_environment_for_body(owner);
let mut validator =
- ExprValidator { owner, body, infer, diagnostics: Vec::new(), validate_lints };
+ ExprValidator { owner, body, infer, diagnostics: Vec::new(), validate_lints, env };
validator.validate_body(db);
validator.diagnostics
}
@@ -85,6 +86,7 @@
owner: DefWithBodyId,
body: Arc<Body>,
infer: Arc<InferenceResult>,
+ env: Arc<TraitEnvironment>,
diagnostics: Vec<BodyValidationDiagnostic>,
validate_lints: bool,
}
@@ -190,7 +192,7 @@
return;
}
- let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db);
+ let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db, self.env.clone());
let pattern_arena = Arena::new();
let mut m_arms = Vec::with_capacity(arms.len());
@@ -317,11 +319,14 @@
return;
};
let pattern_arena = Arena::new();
- let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db);
+ let cx = MatchCheckCtx::new(self.owner.module(db), self.owner, db, self.env.clone());
for stmt in &**statements {
let &Statement::Let { pat, initializer, else_branch: None, .. } = stmt else {
continue;
};
+ if self.infer.type_mismatch_for_pat(pat).is_some() {
+ continue;
+ }
let Some(initializer) = initializer else { continue };
let ty = &self.infer[initializer];
if ty.contains_unknown() {
@@ -480,7 +485,7 @@
}
impl FilterMapNextChecker {
- fn new(resolver: &hir_def::resolver::Resolver, db: &dyn HirDatabase) -> Self {
+ fn new(resolver: &hir_def::resolver::Resolver<'_>, db: &dyn HirDatabase) -> Self {
// Find and store the FunctionIds for Iterator::filter_map and Iterator::next
let (next_function_id, filter_map_function_id) = match LangItem::IteratorNext
.resolve_function(db, resolver.krate())
diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
index 068fc22..dd82a0f 100644
--- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
+++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs
@@ -12,9 +12,10 @@
};
use smallvec::{SmallVec, smallvec};
use stdx::never;
+use triomphe::Arc;
use crate::{
- AdtId, Interner, Scalar, Ty, TyExt, TyKind,
+ AdtId, Interner, Scalar, TraitEnvironment, Ty, TyExt, TyKind,
db::HirDatabase,
infer::normalize,
inhabitedness::{is_enum_variant_uninhabited_from, is_ty_uninhabited_from},
@@ -69,13 +70,19 @@
body: DefWithBodyId,
pub(crate) db: &'db dyn HirDatabase,
exhaustive_patterns: bool,
+ env: Arc<TraitEnvironment>,
}
impl<'db> MatchCheckCtx<'db> {
- pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'db dyn HirDatabase) -> Self {
- let def_map = db.crate_def_map(module.krate());
+ pub(crate) fn new(
+ module: ModuleId,
+ body: DefWithBodyId,
+ db: &'db dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ ) -> Self {
+ let def_map = module.crate_def_map(db);
let exhaustive_patterns = def_map.is_unstable_feature_enabled(&sym::exhaustive_patterns);
- Self { module, body, db, exhaustive_patterns }
+ Self { module, body, db, exhaustive_patterns, env }
}
pub(crate) fn compute_match_usefulness(
@@ -100,7 +107,7 @@
}
fn is_uninhabited(&self, ty: &Ty) -> bool {
- is_ty_uninhabited_from(self.db, ty, self.module)
+ is_ty_uninhabited_from(self.db, ty, self.module, self.env.clone())
}
/// Returns whether the given ADT is from another crate declared `#[non_exhaustive]`.
@@ -459,8 +466,13 @@
} else {
let mut variants = IndexVec::with_capacity(enum_data.variants.len());
for &(variant, _) in enum_data.variants.iter() {
- let is_uninhabited =
- is_enum_variant_uninhabited_from(cx.db, variant, subst, cx.module);
+ let is_uninhabited = is_enum_variant_uninhabited_from(
+ cx.db,
+ variant,
+ subst,
+ cx.module,
+ self.env.clone(),
+ );
let visibility = if is_uninhabited {
VariantVisibility::Empty
} else {
diff --git a/crates/hir-ty/src/diagnostics/unsafe_check.rs b/crates/hir-ty/src/diagnostics/unsafe_check.rs
index 73b99db..20cf3c7 100644
--- a/crates/hir-ty/src/diagnostics/unsafe_check.rs
+++ b/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -7,7 +7,7 @@
use hir_def::{
AdtId, DefWithBodyId, FieldId, FunctionId, VariantId,
expr_store::{Body, path::Path},
- hir::{Expr, ExprId, ExprOrPatId, Pat, PatId, Statement, UnaryOp},
+ hir::{AsmOperand, Expr, ExprId, ExprOrPatId, Pat, PatId, Statement, UnaryOp},
resolver::{HasResolver, ResolveValueResult, Resolver, ValueNs},
signatures::StaticFlags,
type_ref::Rawness,
@@ -131,28 +131,28 @@
visitor.walk_expr(current);
}
-struct UnsafeVisitor<'a> {
- db: &'a dyn HirDatabase,
- infer: &'a InferenceResult,
- body: &'a Body,
- resolver: Resolver,
+struct UnsafeVisitor<'db> {
+ db: &'db dyn HirDatabase,
+ infer: &'db InferenceResult,
+ body: &'db Body,
+ resolver: Resolver<'db>,
def: DefWithBodyId,
inside_unsafe_block: InsideUnsafeBlock,
inside_assignment: bool,
inside_union_destructure: bool,
- callback: &'a mut dyn FnMut(UnsafeDiagnostic),
+ callback: &'db mut dyn FnMut(UnsafeDiagnostic),
def_target_features: TargetFeatures,
// FIXME: This needs to be the edition of the span of each call.
edition: Edition,
}
-impl<'a> UnsafeVisitor<'a> {
+impl<'db> UnsafeVisitor<'db> {
fn new(
- db: &'a dyn HirDatabase,
- infer: &'a InferenceResult,
- body: &'a Body,
+ db: &'db dyn HirDatabase,
+ infer: &'db InferenceResult,
+ body: &'db Body,
def: DefWithBodyId,
- unsafe_expr_cb: &'a mut dyn FnMut(UnsafeDiagnostic),
+ unsafe_expr_cb: &'db mut dyn FnMut(UnsafeDiagnostic),
) -> Self {
let resolver = def.resolver(db);
let def_target_features = match def {
@@ -199,6 +199,17 @@
}
}
+ fn with_inside_unsafe_block<R>(
+ &mut self,
+ inside_unsafe_block: InsideUnsafeBlock,
+ f: impl FnOnce(&mut Self) -> R,
+ ) -> R {
+ let old = mem::replace(&mut self.inside_unsafe_block, inside_unsafe_block);
+ let result = f(self);
+ self.inside_unsafe_block = old;
+ result
+ }
+
fn walk_pats_top(&mut self, pats: impl Iterator<Item = PatId>, parent_expr: ExprId) {
let guard = self.resolver.update_to_inner_scope(self.db, self.def, parent_expr);
pats.for_each(|pat| self.walk_pat(pat));
@@ -303,7 +314,29 @@
self.walk_pats_top(std::iter::once(target), current);
self.inside_assignment = old_inside_assignment;
}
- Expr::InlineAsm(_) => self.on_unsafe_op(current.into(), UnsafetyReason::InlineAsm),
+ Expr::InlineAsm(asm) => {
+ self.on_unsafe_op(current.into(), UnsafetyReason::InlineAsm);
+ asm.operands.iter().for_each(|(_, op)| match op {
+ AsmOperand::In { expr, .. }
+ | AsmOperand::Out { expr: Some(expr), .. }
+ | AsmOperand::InOut { expr, .. }
+ | AsmOperand::Const(expr) => self.walk_expr(*expr),
+ AsmOperand::SplitInOut { in_expr, out_expr, .. } => {
+ self.walk_expr(*in_expr);
+ if let Some(out_expr) = out_expr {
+ self.walk_expr(*out_expr);
+ }
+ }
+ AsmOperand::Out { expr: None, .. } | AsmOperand::Sym(_) => (),
+ AsmOperand::Label(expr) => {
+ // Inline asm labels are considered safe even when inside unsafe blocks.
+ self.with_inside_unsafe_block(InsideUnsafeBlock::No, |this| {
+ this.walk_expr(*expr)
+ });
+ }
+ });
+ return;
+ }
// rustc allows union assignment to propagate through field accesses and casts.
Expr::Cast { .. } => self.inside_assignment = inside_assignment,
Expr::Field { .. } => {
@@ -317,17 +350,16 @@
}
}
Expr::Unsafe { statements, .. } => {
- let old_inside_unsafe_block =
- mem::replace(&mut self.inside_unsafe_block, InsideUnsafeBlock::Yes);
- self.walk_pats_top(
- statements.iter().filter_map(|statement| match statement {
- &Statement::Let { pat, .. } => Some(pat),
- _ => None,
- }),
- current,
- );
- self.body.walk_child_exprs_without_pats(current, |child| self.walk_expr(child));
- self.inside_unsafe_block = old_inside_unsafe_block;
+ self.with_inside_unsafe_block(InsideUnsafeBlock::Yes, |this| {
+ this.walk_pats_top(
+ statements.iter().filter_map(|statement| match statement {
+ &Statement::Let { pat, .. } => Some(pat),
+ _ => None,
+ }),
+ current,
+ );
+ this.body.walk_child_exprs_without_pats(current, |child| this.walk_expr(child));
+ });
return;
}
Expr::Block { statements, .. } | Expr::Async { statements, .. } => {
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index f0989d9..f210dd8 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -1463,6 +1463,8 @@
}
if f.closure_style == ClosureStyle::RANotation || !sig.ret().is_unit() {
write!(f, " -> ")?;
+ // FIXME: We display `AsyncFn` as `-> impl Future`, but this is hard to fix because
+ // we don't have a trait environment here, required to normalize `<Ret as Future>::Output`.
sig.ret().hir_fmt(f)?;
}
} else {
diff --git a/crates/hir-ty/src/dyn_compatibility.rs b/crates/hir-ty/src/dyn_compatibility.rs
index 106b996..ed8d8dc 100644
--- a/crates/hir-ty/src/dyn_compatibility.rs
+++ b/crates/hir-ty/src/dyn_compatibility.rs
@@ -9,8 +9,8 @@
};
use chalk_solve::rust_ir::InlineBound;
use hir_def::{
- AssocItemId, ConstId, FunctionId, GenericDefId, HasModule, TraitId, TypeAliasId,
- lang_item::LangItem, signatures::TraitFlags,
+ AssocItemId, ConstId, CrateRootModuleId, FunctionId, GenericDefId, HasModule, TraitId,
+ TypeAliasId, lang_item::LangItem, signatures::TraitFlags,
};
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
@@ -343,7 +343,7 @@
})
}
AssocItemId::TypeAliasId(it) => {
- let def_map = db.crate_def_map(trait_.krate(db));
+ let def_map = CrateRootModuleId::from(trait_.krate(db)).def_map(db);
if def_map.is_unstable_feature_enabled(&intern::sym::generic_associated_type_extended) {
ControlFlow::Continue(())
} else {
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index f0ec31d..e698fb2 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -594,16 +594,16 @@
/// The inference context contains all information needed during type inference.
#[derive(Clone, Debug)]
-pub(crate) struct InferenceContext<'a> {
- pub(crate) db: &'a dyn HirDatabase,
+pub(crate) struct InferenceContext<'db> {
+ pub(crate) db: &'db dyn HirDatabase,
pub(crate) owner: DefWithBodyId,
- pub(crate) body: &'a Body,
+ pub(crate) body: &'db Body,
/// Generally you should not resolve things via this resolver. Instead create a TyLoweringContext
/// and resolve the path via its methods. This will ensure proper error reporting.
- pub(crate) resolver: Resolver,
+ pub(crate) resolver: Resolver<'db>,
generic_def: GenericDefId,
generics: OnceCell<Generics>,
- table: unify::InferenceTable<'a>,
+ table: unify::InferenceTable<'db>,
/// The traits in scope, disregarding block modules. This is used for caching purposes.
traits_in_scope: FxHashSet<TraitId>,
pub(crate) result: InferenceResult,
@@ -695,12 +695,12 @@
TypeAlias,
}
-impl<'a> InferenceContext<'a> {
+impl<'db> InferenceContext<'db> {
fn new(
- db: &'a dyn HirDatabase,
+ db: &'db dyn HirDatabase,
owner: DefWithBodyId,
- body: &'a Body,
- resolver: Resolver,
+ body: &'db Body,
+ resolver: Resolver<'db>,
) -> Self {
let trait_env = db.trait_environment_for_body(owner);
InferenceContext {
diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs
index 800897c..bd57ca8 100644
--- a/crates/hir-ty/src/infer/closure.rs
+++ b/crates/hir-ty/src/infer/closure.rs
@@ -38,7 +38,7 @@
infer::{BreakableKind, CoerceMany, Diverges, coerce::CoerceNever},
make_binders,
mir::{BorrowKind, MirSpan, MutBorrowKind, ProjectionElem},
- to_chalk_trait_id,
+ to_assoc_type_id, to_chalk_trait_id,
traits::FnTrait,
utils::{self, elaborate_clause_supertraits},
};
@@ -245,7 +245,7 @@
}
fn deduce_closure_kind_from_predicate_clauses(
- &self,
+ &mut self,
expected_ty: &Ty,
clauses: impl DoubleEndedIterator<Item = WhereClause>,
closure_kind: ClosureKind,
@@ -378,7 +378,7 @@
}
fn deduce_sig_from_projection(
- &self,
+ &mut self,
closure_kind: ClosureKind,
projection_ty: &ProjectionTy,
projected_ty: &Ty,
@@ -392,13 +392,16 @@
// For now, we only do signature deduction based off of the `Fn` and `AsyncFn` traits,
// for closures and async closures, respectively.
- match closure_kind {
- ClosureKind::Closure | ClosureKind::Async
- if self.fn_trait_kind_from_trait_id(trait_).is_some() =>
- {
- self.extract_sig_from_projection(projection_ty, projected_ty)
- }
- _ => None,
+ let fn_trait_kind = self.fn_trait_kind_from_trait_id(trait_)?;
+ if !matches!(closure_kind, ClosureKind::Closure | ClosureKind::Async) {
+ return None;
+ }
+ if fn_trait_kind.is_async() {
+ // If the expected trait is `AsyncFn(...) -> X`, we don't know what the return type is,
+ // but we do know it must implement `Future<Output = X>`.
+ self.extract_async_fn_sig_from_projection(projection_ty, projected_ty)
+ } else {
+ self.extract_sig_from_projection(projection_ty, projected_ty)
}
}
@@ -424,6 +427,39 @@
)))
}
+ fn extract_async_fn_sig_from_projection(
+ &mut self,
+ projection_ty: &ProjectionTy,
+ projected_ty: &Ty,
+ ) -> Option<FnSubst<Interner>> {
+ let arg_param_ty = projection_ty.substitution.as_slice(Interner)[1].assert_ty_ref(Interner);
+
+ let TyKind::Tuple(_, input_tys) = arg_param_ty.kind(Interner) else {
+ return None;
+ };
+
+ let ret_param_future_output = projected_ty;
+ let ret_param_future = self.table.new_type_var();
+ let future_output =
+ LangItem::FutureOutput.resolve_type_alias(self.db, self.resolver.krate())?;
+ let future_projection = crate::AliasTy::Projection(crate::ProjectionTy {
+ associated_ty_id: to_assoc_type_id(future_output),
+ substitution: Substitution::from1(Interner, ret_param_future.clone()),
+ });
+ self.table.register_obligation(
+ crate::AliasEq { alias: future_projection, ty: ret_param_future_output.clone() }
+ .cast(Interner),
+ );
+
+ Some(FnSubst(Substitution::from_iter(
+ Interner,
+ input_tys.iter(Interner).map(|t| t.cast(Interner)).chain(Some(GenericArg::new(
+ Interner,
+ chalk_ir::GenericArgData::Ty(ret_param_future),
+ ))),
+ )))
+ }
+
fn fn_trait_kind_from_trait_id(&self, trait_id: hir_def::TraitId) -> Option<FnTrait> {
FnTrait::from_lang_item(self.db.lang_attr(trait_id.into())?)
}
diff --git a/crates/hir-ty/src/infer/diagnostics.rs b/crates/hir-ty/src/infer/diagnostics.rs
index e3c4f55..003364d 100644
--- a/crates/hir-ty/src/infer/diagnostics.rs
+++ b/crates/hir-ty/src/infer/diagnostics.rs
@@ -61,7 +61,7 @@
#[inline]
pub(super) fn new(
db: &'a dyn HirDatabase,
- resolver: &'a Resolver,
+ resolver: &'a Resolver<'_>,
store: &'a ExpressionStore,
diagnostics: &'a Diagnostics,
source: InferenceTyDiagnosticSource,
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index 8084b39..87b7f34 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -959,8 +959,8 @@
}
Expr::OffsetOf(_) => TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner),
Expr::InlineAsm(asm) => {
- let mut check_expr_asm_operand = |expr, is_input: bool| {
- let ty = self.infer_expr_no_expect(expr, ExprIsRead::Yes);
+ let check_expr_asm_operand = |this: &mut Self, expr, is_input: bool| {
+ let ty = this.infer_expr_no_expect(expr, ExprIsRead::Yes);
// If this is an input value, we require its type to be fully resolved
// at this point. This allows us to provide helpful coercions which help
@@ -970,18 +970,18 @@
// allows them to be inferred based on how they are used later in the
// function.
if is_input {
- let ty = self.resolve_ty_shallow(&ty);
+ let ty = this.resolve_ty_shallow(&ty);
match ty.kind(Interner) {
TyKind::FnDef(def, parameters) => {
let fnptr_ty = TyKind::Function(
- CallableSig::from_def(self.db, *def, parameters).to_fn_ptr(),
+ CallableSig::from_def(this.db, *def, parameters).to_fn_ptr(),
)
.intern(Interner);
- _ = self.coerce(Some(expr), &ty, &fnptr_ty, CoerceNever::Yes);
+ _ = this.coerce(Some(expr), &ty, &fnptr_ty, CoerceNever::Yes);
}
TyKind::Ref(mutbl, _, base_ty) => {
let ptr_ty = TyKind::Raw(*mutbl, base_ty.clone()).intern(Interner);
- _ = self.coerce(Some(expr), &ty, &ptr_ty, CoerceNever::Yes);
+ _ = this.coerce(Some(expr), &ty, &ptr_ty, CoerceNever::Yes);
}
_ => {}
}
@@ -990,22 +990,28 @@
let diverge = asm.options.contains(AsmOptions::NORETURN);
asm.operands.iter().for_each(|(_, operand)| match *operand {
- AsmOperand::In { expr, .. } => check_expr_asm_operand(expr, true),
+ AsmOperand::In { expr, .. } => check_expr_asm_operand(self, expr, true),
AsmOperand::Out { expr: Some(expr), .. } | AsmOperand::InOut { expr, .. } => {
- check_expr_asm_operand(expr, false)
+ check_expr_asm_operand(self, expr, false)
}
AsmOperand::Out { expr: None, .. } => (),
AsmOperand::SplitInOut { in_expr, out_expr, .. } => {
- check_expr_asm_operand(in_expr, true);
+ check_expr_asm_operand(self, in_expr, true);
if let Some(out_expr) = out_expr {
- check_expr_asm_operand(out_expr, false);
+ check_expr_asm_operand(self, out_expr, false);
}
}
- // FIXME
- AsmOperand::Label(_) => (),
- // FIXME
- AsmOperand::Const(_) => (),
- // FIXME
+ AsmOperand::Label(expr) => {
+ self.infer_expr(
+ expr,
+ &Expectation::HasType(self.result.standard_types.unit.clone()),
+ ExprIsRead::No,
+ );
+ }
+ AsmOperand::Const(expr) => {
+ self.infer_expr(expr, &Expectation::None, ExprIsRead::No);
+ }
+ // FIXME: `sym` should report for things that are not functions or statics.
AsmOperand::Sym(_) => (),
});
if diverge {
diff --git a/crates/hir-ty/src/inhabitedness.rs b/crates/hir-ty/src/inhabitedness.rs
index e0c3279..e81a5e3 100644
--- a/crates/hir-ty/src/inhabitedness.rs
+++ b/crates/hir-ty/src/inhabitedness.rs
@@ -7,17 +7,24 @@
};
use hir_def::{AdtId, EnumVariantId, ModuleId, VariantId, visibility::Visibility};
use rustc_hash::FxHashSet;
+use triomphe::Arc;
use crate::{
- Binders, Interner, Substitution, Ty, TyKind, consteval::try_const_usize, db::HirDatabase,
+ AliasTy, Binders, Interner, Substitution, TraitEnvironment, Ty, TyKind,
+ consteval::try_const_usize, db::HirDatabase,
};
// FIXME: Turn this into a query, it can be quite slow
/// Checks whether a type is visibly uninhabited from a particular module.
-pub(crate) fn is_ty_uninhabited_from(db: &dyn HirDatabase, ty: &Ty, target_mod: ModuleId) -> bool {
+pub(crate) fn is_ty_uninhabited_from(
+ db: &dyn HirDatabase,
+ ty: &Ty,
+ target_mod: ModuleId,
+ env: Arc<TraitEnvironment>,
+) -> bool {
let _p = tracing::info_span!("is_ty_uninhabited_from", ?ty).entered();
let mut uninhabited_from =
- UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default() };
+ UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default(), env };
let inhabitedness = ty.visit_with(&mut uninhabited_from, DebruijnIndex::INNERMOST);
inhabitedness == BREAK_VISIBLY_UNINHABITED
}
@@ -29,11 +36,12 @@
variant: EnumVariantId,
subst: &Substitution,
target_mod: ModuleId,
+ env: Arc<TraitEnvironment>,
) -> bool {
let _p = tracing::info_span!("is_enum_variant_uninhabited_from").entered();
let mut uninhabited_from =
- UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default() };
+ UninhabitedFrom { target_mod, db, max_depth: 500, recursive_ty: FxHashSet::default(), env };
let inhabitedness = uninhabited_from.visit_variant(variant.into(), subst);
inhabitedness == BREAK_VISIBLY_UNINHABITED
}
@@ -44,6 +52,7 @@
// guard for preventing stack overflow in non trivial non terminating types
max_depth: usize,
db: &'a dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
}
const CONTINUE_OPAQUELY_INHABITED: ControlFlow<VisiblyUninhabited> = Continue(());
@@ -78,6 +87,12 @@
Some(0) | None => CONTINUE_OPAQUELY_INHABITED,
Some(1..) => item_ty.super_visit_with(self, outer_binder),
},
+ TyKind::Alias(AliasTy::Projection(projection)) => {
+ // FIXME: I think this currently isn't used for monomorphized bodies, so there is no need to handle
+ // `TyKind::AssociatedType`, but perhaps in the future it will.
+ let normalized = self.db.normalize_projection(projection.clone(), self.env.clone());
+ self.visit_ty(&normalized, outer_binder)
+ }
_ => CONTINUE_OPAQUELY_INHABITED,
};
self.recursive_ty.remove(ty);
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index 9def39d..ea8e7cc 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -42,7 +42,6 @@
use hir_expand::name::Name;
use la_arena::{Arena, ArenaMap};
use rustc_hash::FxHashSet;
-use rustc_pattern_analysis::Captures;
use stdx::{impl_from, never};
use triomphe::{Arc, ThinArc};
@@ -151,10 +150,10 @@
}
#[derive(Debug)]
-pub struct TyLoweringContext<'a> {
- pub db: &'a dyn HirDatabase,
- resolver: &'a Resolver,
- store: &'a ExpressionStore,
+pub struct TyLoweringContext<'db> {
+ pub db: &'db dyn HirDatabase,
+ resolver: &'db Resolver<'db>,
+ store: &'db ExpressionStore,
def: GenericDefId,
generics: OnceCell<Generics>,
in_binders: DebruijnIndex,
@@ -170,11 +169,11 @@
lifetime_elision: LifetimeElisionKind,
}
-impl<'a> TyLoweringContext<'a> {
+impl<'db> TyLoweringContext<'db> {
pub fn new(
- db: &'a dyn HirDatabase,
- resolver: &'a Resolver,
- store: &'a ExpressionStore,
+ db: &'db dyn HirDatabase,
+ resolver: &'db Resolver<'db>,
+ store: &'db ExpressionStore,
def: GenericDefId,
lifetime_elision: LifetimeElisionKind,
) -> Self {
@@ -1176,13 +1175,13 @@
/// Generate implicit `: Sized` predicates for all generics that has no `?Sized` bound.
/// Exception is Self of a trait def.
-fn implicitly_sized_clauses<'a, 'subst: 'a>(
- db: &dyn HirDatabase,
+fn implicitly_sized_clauses<'db, 'a, 'subst: 'a>(
+ db: &'db dyn HirDatabase,
def: GenericDefId,
explicitly_unsized_tys: &'a FxHashSet<Ty>,
substitution: &'subst Substitution,
- resolver: &Resolver,
-) -> Option<impl Iterator<Item = WhereClause> + Captures<'a> + Captures<'subst>> {
+ resolver: &Resolver<'db>,
+) -> Option<impl Iterator<Item = WhereClause>> {
let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate()).map(to_chalk_trait_id)?;
let trait_self_idx = trait_self_param_idx(db, def);
diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs
index 8e549ca..3b295d4 100644
--- a/crates/hir-ty/src/method_resolution.rs
+++ b/crates/hir-ty/src/method_resolution.rs
@@ -10,7 +10,7 @@
use hir_def::{
AssocItemId, BlockId, ConstId, FunctionId, HasModule, ImplId, ItemContainerId, Lookup,
ModuleId, TraitId,
- nameres::{DefMap, assoc::ImplItems},
+ nameres::{DefMap, assoc::ImplItems, block_def_map, crate_def_map},
signatures::{ConstFlags, EnumFlags, FnFlags, StructFlags, TraitFlags, TypeAliasFlags},
};
use hir_expand::name::Name;
@@ -152,7 +152,7 @@
let _p = tracing::info_span!("trait_impls_in_crate_query", ?krate).entered();
let mut impls = FxHashMap::default();
- Self::collect_def_map(db, &mut impls, &db.crate_def_map(krate));
+ Self::collect_def_map(db, &mut impls, crate_def_map(db, krate));
Arc::new(Self::finish(impls))
}
@@ -164,7 +164,7 @@
let _p = tracing::info_span!("trait_impls_in_block_query").entered();
let mut impls = FxHashMap::default();
- Self::collect_def_map(db, &mut impls, &db.block_def_map(block));
+ Self::collect_def_map(db, &mut impls, block_def_map(db, block));
if impls.is_empty() { None } else { Some(Arc::new(Self::finish(impls))) }
}
@@ -214,7 +214,7 @@
for konst in module_data.scope.unnamed_consts() {
let body = db.body(konst.into());
for (_, block_def_map) in body.blocks(db) {
- Self::collect_def_map(db, map, &block_def_map);
+ Self::collect_def_map(db, map, block_def_map);
}
}
}
@@ -280,8 +280,8 @@
let _p = tracing::info_span!("inherent_impls_in_crate_query", ?krate).entered();
let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
- let crate_def_map = db.crate_def_map(krate);
- impls.collect_def_map(db, &crate_def_map);
+ let crate_def_map = crate_def_map(db, krate);
+ impls.collect_def_map(db, crate_def_map);
impls.shrink_to_fit();
Arc::new(impls)
@@ -294,8 +294,8 @@
let _p = tracing::info_span!("inherent_impls_in_block_query").entered();
let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
- let block_def_map = db.block_def_map(block);
- impls.collect_def_map(db, &block_def_map);
+ let block_def_map = block_def_map(db, block);
+ impls.collect_def_map(db, block_def_map);
impls.shrink_to_fit();
if impls.map.is_empty() && impls.invalid_impls.is_empty() {
@@ -337,7 +337,7 @@
for konst in module_data.scope.unnamed_consts() {
let body = db.body(konst.into());
for (_, block_def_map) in body.blocks(db) {
- self.collect_def_map(db, &block_def_map);
+ self.collect_def_map(db, block_def_map);
}
}
}
@@ -1399,7 +1399,7 @@
)?;
}
- block = db.block_def_map(block_id).parent().and_then(|module| module.containing_block());
+ block = block_def_map(db, block_id).parent().and_then(|module| module.containing_block());
}
for krate in def_crates {
diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs
index 26ef95d..90c52ee 100644
--- a/crates/hir-ty/src/mir/eval/shim.rs
+++ b/crates/hir-ty/src/mir/eval/shim.rs
@@ -5,6 +5,7 @@
use chalk_ir::TyKind;
use hir_def::{
+ CrateRootModuleId,
builtin_type::{BuiltinInt, BuiltinUint},
resolver::HasResolver,
};
@@ -153,7 +154,7 @@
) -> Result<Option<FunctionId>> {
// `PanicFmt` is redirected to `ConstPanicFmt`
if let Some(LangItem::PanicFmt) = self.db.lang_attr(def.into()) {
- let resolver = self.db.crate_def_map(self.crate_id).crate_root().resolver(self.db);
+ let resolver = CrateRootModuleId::from(self.crate_id).resolver(self.db);
let Some(const_panic_fmt) =
LangItem::ConstPanicFmt.resolve_function(self.db, resolver.krate())
@@ -1120,7 +1121,7 @@
// We don't call any drop glue yet, so there is nothing here
Ok(())
}
- "transmute" => {
+ "transmute" | "transmute_unchecked" => {
let [arg] = args else {
return Err(MirEvalError::InternalError(
"transmute arg is not provided".into(),
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index 7b48b15..e6caf2d 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -25,7 +25,7 @@
use triomphe::Arc;
use crate::{
- Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt,
+ Adjust, Adjustment, AutoBorrow, CallableDefId, TraitEnvironment, TyBuilder, TyExt,
consteval::ConstEvalError,
db::{HirDatabase, InternedClosure, InternedClosureId},
display::{DisplayTarget, HirDisplay, hir_display_with_store},
@@ -68,17 +68,18 @@
locals: Vec<LocalId>,
}
-struct MirLowerCtx<'a> {
+struct MirLowerCtx<'db> {
result: MirBody,
owner: DefWithBodyId,
current_loop_blocks: Option<LoopBlocks>,
labeled_loop_blocks: FxHashMap<LabelId, LoopBlocks>,
discr_temp: Option<Place>,
- db: &'a dyn HirDatabase,
- body: &'a Body,
- infer: &'a InferenceResult,
- resolver: Resolver,
+ db: &'db dyn HirDatabase,
+ body: &'db Body,
+ infer: &'db InferenceResult,
+ resolver: Resolver<'db>,
drop_scopes: Vec<DropScope>,
+ env: Arc<TraitEnvironment>,
}
// FIXME: Make this smaller, its stored in database queries
@@ -288,6 +289,7 @@
closures: vec![],
};
let resolver = owner.resolver(db);
+ let env = db.trait_environment_for_body(owner);
MirLowerCtx {
result: mir,
@@ -300,6 +302,7 @@
labeled_loop_blocks: Default::default(),
discr_temp: None,
drop_scopes: vec![DropScope::default()],
+ env,
}
}
@@ -944,10 +947,7 @@
let cast_kind = if source_ty.as_reference().is_some() {
CastKind::PointerCoercion(PointerCast::ArrayToPointer)
} else {
- let mut table = InferenceTable::new(
- self.db,
- self.db.trait_environment_for_body(self.owner),
- );
+ let mut table = InferenceTable::new(self.db, self.env.clone());
cast_kind(&mut table, &source_ty, &target_ty)?
};
@@ -1412,11 +1412,8 @@
}
fn lower_literal_to_operand(&mut self, ty: Ty, l: &Literal) -> Result<Operand> {
- let size = || {
- self.db
- .layout_of_ty(ty.clone(), self.db.trait_environment_for_body(self.owner))
- .map(|it| it.size.bytes_usize())
- };
+ let size =
+ || self.db.layout_of_ty(ty.clone(), self.env.clone()).map(|it| it.size.bytes_usize());
const USIZE_SIZE: usize = size_of::<usize>();
let bytes: Box<[_]> = match l {
hir_def::hir::Literal::String(b) => {
@@ -1723,7 +1720,12 @@
}
fn is_uninhabited(&self, expr_id: ExprId) -> bool {
- is_ty_uninhabited_from(self.db, &self.infer[expr_id], self.owner.module(self.db))
+ is_ty_uninhabited_from(
+ self.db,
+ &self.infer[expr_id],
+ self.owner.module(self.db),
+ self.env.clone(),
+ )
}
/// This function push `StorageLive` statement for the binding, and applies changes to add `StorageDead` and
diff --git a/crates/hir-ty/src/test_db.rs b/crates/hir-ty/src/test_db.rs
index bcd8aa6..d049c67 100644
--- a/crates/hir-ty/src/test_db.rs
+++ b/crates/hir-ty/src/test_db.rs
@@ -7,7 +7,7 @@
SourceRoot, SourceRootId, SourceRootInput,
};
-use hir_def::{ModuleId, db::DefDatabase};
+use hir_def::{ModuleId, db::DefDatabase, nameres::crate_def_map};
use hir_expand::EditionedFileId;
use rustc_hash::FxHashMap;
use salsa::{AsDynDatabase, Durability};
@@ -27,9 +27,18 @@
impl Default for TestDB {
fn default() -> Self {
+ let events = <Arc<Mutex<Option<Vec<salsa::Event>>>>>::default();
let mut this = Self {
- storage: Default::default(),
- events: Default::default(),
+ storage: salsa::Storage::new(Some(Box::new({
+ let events = events.clone();
+ move |event| {
+ let mut events = events.lock().unwrap();
+ if let Some(events) = &mut *events {
+ events.push(event);
+ }
+ }
+ }))),
+ events,
files: Default::default(),
crates_map: Default::default(),
};
@@ -103,14 +112,7 @@
}
#[salsa_macros::db]
-impl salsa::Database for TestDB {
- fn salsa_event(&self, event: &dyn std::ops::Fn() -> salsa::Event) {
- let mut events = self.events.lock().unwrap();
- if let Some(events) = &mut *events {
- events.push(event());
- }
- }
-}
+impl salsa::Database for TestDB {}
impl panic::RefUnwindSafe for TestDB {}
@@ -118,7 +120,7 @@
pub(crate) fn module_for_file_opt(&self, file_id: impl Into<FileId>) -> Option<ModuleId> {
let file_id = file_id.into();
for &krate in self.relevant_crates(file_id).iter() {
- let crate_def_map = self.crate_def_map(krate);
+ let crate_def_map = crate_def_map(self, krate);
for (local_id, data) in crate_def_map.modules() {
if data.origin.file_id().map(|file_id| file_id.file_id(self)) == Some(file_id) {
return Some(crate_def_map.module_id(local_id));
@@ -137,7 +139,7 @@
) -> FxHashMap<EditionedFileId, Vec<(TextRange, String)>> {
let mut files = Vec::new();
for &krate in self.all_crates().iter() {
- let crate_def_map = self.crate_def_map(krate);
+ let crate_def_map = crate_def_map(self, krate);
for (module_id, _) in crate_def_map.modules() {
let file_id = crate_def_map[module_id].origin.file_id();
files.extend(file_id)
diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs
index cc37f65..2b75bd6 100644
--- a/crates/hir-ty/src/tests.rs
+++ b/crates/hir-ty/src/tests.rs
@@ -132,7 +132,7 @@
None => continue,
};
let def_map = module.def_map(&db);
- visit_module(&db, &def_map, module.local_id, &mut |it| {
+ visit_module(&db, def_map, module.local_id, &mut |it| {
let def = match it {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
@@ -391,7 +391,7 @@
let def_map = module.def_map(&db);
let mut defs: Vec<(DefWithBodyId, Crate)> = Vec::new();
- visit_module(&db, &def_map, module.local_id, &mut |it| {
+ visit_module(&db, def_map, module.local_id, &mut |it| {
let def = match it {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
@@ -504,7 +504,7 @@
fn visit_body(db: &TestDB, body: &Body, cb: &mut dyn FnMut(ModuleDefId)) {
for (_, def_map) in body.blocks(db) {
for (mod_id, _) in def_map.modules() {
- visit_module(db, &def_map, mod_id, cb);
+ visit_module(db, def_map, mod_id, cb);
}
}
}
@@ -570,7 +570,7 @@
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
- visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ visit_module(&db, crate_def_map, module.local_id, &mut |def| {
db.infer(match def {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
@@ -609,7 +609,7 @@
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
- visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ visit_module(&db, crate_def_map, module.local_id, &mut |def| {
db.infer(match def {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::EnumVariantId(it) => it.into(),
diff --git a/crates/hir-ty/src/tests/closure_captures.rs b/crates/hir-ty/src/tests/closure_captures.rs
index 73f1ae5..88d21be 100644
--- a/crates/hir-ty/src/tests/closure_captures.rs
+++ b/crates/hir-ty/src/tests/closure_captures.rs
@@ -20,7 +20,7 @@
let def_map = module.def_map(&db);
let mut defs = Vec::new();
- visit_module(&db, &def_map, module.local_id, &mut |it| defs.push(it));
+ visit_module(&db, def_map, module.local_id, &mut |it| defs.push(it));
let mut captures_info = Vec::new();
for def in defs {
diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs
index 0542be0..48474d2 100644
--- a/crates/hir-ty/src/tests/incremental.rs
+++ b/crates/hir-ty/src/tests/incremental.rs
@@ -19,7 +19,7 @@
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
- visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ visit_module(&db, crate_def_map, module.local_id, &mut |def| {
if let ModuleDefId::FunctionId(it) = def {
db.infer(it.into());
}
@@ -41,7 +41,7 @@
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
- visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ visit_module(&db, crate_def_map, module.local_id, &mut |def| {
if let ModuleDefId::FunctionId(it) = def {
db.infer(it.into());
}
@@ -70,7 +70,7 @@
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
- visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ visit_module(&db, crate_def_map, module.local_id, &mut |def| {
if let ModuleDefId::FunctionId(it) = def {
db.infer(it.into());
}
@@ -97,7 +97,7 @@
let events = db.log_executed(|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let crate_def_map = module.def_map(&db);
- visit_module(&db, &crate_def_map, module.local_id, &mut |def| {
+ visit_module(&db, crate_def_map, module.local_id, &mut |def| {
if let ModuleDefId::FunctionId(it) = def {
db.infer(it.into());
}
diff --git a/crates/hir-ty/src/tests/macros.rs b/crates/hir-ty/src/tests/macros.rs
index 446f0b2..ea7a113 100644
--- a/crates/hir-ty/src/tests/macros.rs
+++ b/crates/hir-ty/src/tests/macros.rs
@@ -1505,6 +1505,10 @@
!119..120 'o': i32
293..294 'o': i32
308..317 'thread_id': usize
+ !314..320 'OffPtr': usize
+ !333..338 'OffFn': usize
+ !354..355 '0': i32
+ !371..382 'MEM_RELEASE': usize
"#]],
)
}
diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs
index eeebe38..cf51671 100644
--- a/crates/hir-ty/src/tests/simple.rs
+++ b/crates/hir-ty/src/tests/simple.rs
@@ -3902,3 +3902,66 @@
"#]],
);
}
+
+#[test]
+fn regression_19734() {
+ check_infer(
+ r#"
+trait Foo {
+ type Gat<'o>;
+}
+
+trait Bar {
+ fn baz() -> <Self::Xyz as Foo::Gat<'_>>;
+}
+
+fn foo<T: Bar>() {
+ T::baz();
+}
+ "#,
+ expect![[r#"
+ 110..127 '{ ...z(); }': ()
+ 116..122 'T::baz': fn baz<T>() -> <{unknown} as Foo>::Gat<'?>
+ 116..124 'T::baz()': Foo::Gat<'?, {unknown}>
+ "#]],
+ );
+}
+
+#[test]
+fn asm_const_label() {
+ check_infer(
+ r#"
+//- minicore: asm
+const fn bar() -> i32 { 123 }
+fn baz(s: &str) {}
+
+fn foo() {
+ unsafe {
+ core::arch::asm!(
+ "mov eax, {}",
+ "jmp {}",
+ const bar(),
+ label {
+ baz("hello");
+ },
+ );
+ }
+}
+ "#,
+ expect![[r#"
+ 22..29 '{ 123 }': i32
+ 24..27 '123': i32
+ 37..38 's': &'? str
+ 46..48 '{}': ()
+ !0..68 'builti...");},)': ()
+ !40..43 'bar': fn bar() -> i32
+ !40..45 'bar()': i32
+ !51..66 '{baz("hello");}': ()
+ !52..55 'baz': fn baz(&'? str)
+ !52..64 'baz("hello")': ()
+ !56..63 '"hello"': &'static str
+ 59..257 '{ ... } }': ()
+ 65..255 'unsafe... }': ()
+ "#]],
+ );
+}
diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs
index 1413760..e5d1fbe 100644
--- a/crates/hir-ty/src/tests/traits.rs
+++ b/crates/hir-ty/src/tests/traits.rs
@@ -4884,3 +4884,49 @@
"#]],
);
}
+
+#[test]
+fn import_trait_items() {
+ check_infer(
+ r#"
+//- minicore: default
+use core::default::Default::default;
+fn main() {
+ let a: i32 = default();
+}
+ "#,
+ expect![[r#"
+ 47..78 '{ ...t(); }': ()
+ 57..58 'a': i32
+ 66..73 'default': {unknown}
+ 66..75 'default()': i32
+ "#]],
+ );
+}
+
+#[test]
+fn async_fn_return_type() {
+ check_infer(
+ r#"
+//- minicore: async_fn
+fn foo<F: AsyncFn() -> R, R>(_: F) -> R {
+ loop {}
+}
+
+fn main() {
+ foo(async move || ());
+}
+ "#,
+ expect![[r#"
+ 29..30 '_': F
+ 40..55 '{ loop {} }': R
+ 46..53 'loop {}': !
+ 51..53 '{}': ()
+ 67..97 '{ ...()); }': ()
+ 73..76 'foo': fn foo<impl AsyncFn() -> impl Future<Output = ()>, ()>(impl AsyncFn() -> impl Future<Output = ()>)
+ 73..94 'foo(as...|| ())': ()
+ 77..93 'async ... || ()': impl AsyncFn() -> impl Future<Output = ()>
+ 91..93 '()': ()
+ "#]],
+ );
+}
diff --git a/crates/hir-ty/src/traits.rs b/crates/hir-ty/src/traits.rs
index f9f8776..7414b4f 100644
--- a/crates/hir-ty/src/traits.rs
+++ b/crates/hir-ty/src/traits.rs
@@ -291,4 +291,9 @@
pub fn get_id(self, db: &dyn HirDatabase, krate: Crate) -> Option<TraitId> {
self.lang_item().resolve_trait(db, krate)
}
+
+ #[inline]
+ pub(crate) fn is_async(self) -> bool {
+ matches!(self, FnTrait::AsyncFn | FnTrait::AsyncFnMut | FnTrait::AsyncFnOnce)
+ }
}
diff --git a/crates/hir-ty/src/variance.rs b/crates/hir-ty/src/variance.rs
index 6e1cd9a..d6b43ae 100644
--- a/crates/hir-ty/src/variance.rs
+++ b/crates/hir-ty/src/variance.rs
@@ -984,7 +984,7 @@
let mut defs: Vec<GenericDefId> = Vec::new();
let module = db.module_for_file_opt(file_id.file_id(&db)).unwrap();
let def_map = module.def_map(&db);
- crate::tests::visit_module(&db, &def_map, module.local_id, &mut |it| {
+ crate::tests::visit_module(&db, def_map, module.local_id, &mut |it| {
defs.push(match it {
ModuleDefId::FunctionId(it) => it.into(),
ModuleDefId::AdtId(it) => it.into(),
diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs
index b1c478d..b1cf30b 100644
--- a/crates/hir/src/attrs.rs
+++ b/crates/hir/src/attrs.rs
@@ -105,11 +105,12 @@
/// Resolves the item `link` points to in the scope of `def`.
pub fn resolve_doc_path_on(
db: &dyn HirDatabase,
- def: impl HasAttrs,
+ def: impl HasAttrs + Copy,
link: &str,
ns: Option<Namespace>,
+ is_inner_doc: bool,
) -> Option<DocLinkDef> {
- resolve_doc_path_on_(db, link, def.attr_id(), ns)
+ resolve_doc_path_on_(db, link, def.attr_id(), ns, is_inner_doc)
}
fn resolve_doc_path_on_(
@@ -117,9 +118,18 @@
link: &str,
attr_id: AttrDefId,
ns: Option<Namespace>,
+ is_inner_doc: bool,
) -> Option<DocLinkDef> {
let resolver = match attr_id {
- AttrDefId::ModuleId(it) => it.resolver(db),
+ AttrDefId::ModuleId(it) => {
+ if is_inner_doc {
+ it.resolver(db)
+ } else if let Some(parent) = Module::from(it).parent(db) {
+ parent.id.resolver(db)
+ } else {
+ it.resolver(db)
+ }
+ }
AttrDefId::FieldId(it) => it.parent.resolver(db),
AttrDefId::AdtId(it) => it.resolver(db),
AttrDefId::FunctionId(it) => it.resolver(db),
@@ -160,7 +170,7 @@
fn resolve_assoc_or_field(
db: &dyn HirDatabase,
- resolver: Resolver,
+ resolver: Resolver<'_>,
path: ModPath,
name: Name,
ns: Option<Namespace>,
@@ -248,7 +258,7 @@
fn resolve_impl_trait_item(
db: &dyn HirDatabase,
- resolver: Resolver,
+ resolver: Resolver<'_>,
ty: &Type,
name: &Name,
ns: Option<Namespace>,
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 3f1d5bb..e8218cf 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -97,7 +97,8 @@
diagnostics::*,
has_source::HasSource,
semantics::{
- PathResolution, Semantics, SemanticsImpl, SemanticsScope, TypeInfo, VisibleTraits,
+ PathResolution, PathResolutionPerNs, Semantics, SemanticsImpl, SemanticsScope, TypeInfo,
+ VisibleTraits,
},
};
@@ -119,7 +120,7 @@
find_path::PrefixKind,
import_map,
lang_item::LangItem,
- nameres::{DefMap, ModuleSource},
+ nameres::{DefMap, ModuleSource, crate_def_map},
per_ns::Namespace,
type_ref::{Mutability, TypeRef},
visibility::Visibility,
@@ -227,7 +228,7 @@
}
pub fn modules(self, db: &dyn HirDatabase) -> Vec<Module> {
- let def_map = db.crate_def_map(self.id);
+ let def_map = crate_def_map(db, self.id);
def_map.modules().map(|(id, _)| def_map.module_id(id).into()).collect()
}
@@ -528,7 +529,7 @@
/// might be missing `krate`. This can happen if a module's file is not included
/// in the module tree of any target in `Cargo.toml`.
pub fn crate_root(self, db: &dyn HirDatabase) -> Module {
- let def_map = db.crate_def_map(self.id.krate());
+ let def_map = crate_def_map(db, self.id.krate());
Module { id: def_map.crate_root().into() }
}
@@ -2468,7 +2469,7 @@
{
return None;
}
- let def_map = db.crate_def_map(HasModule::krate(&self.id, db));
+ let def_map = crate_def_map(db, HasModule::krate(&self.id, db));
def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() })
}
@@ -4015,8 +4016,7 @@
if let builtin @ Some(_) = Self::builtin(name) {
return builtin;
}
- let idx = db
- .crate_def_map(krate.id)
+ let idx = crate_def_map(db, krate.id)
.registered_attrs()
.iter()
.position(|it| it.as_str() == name)? as u32;
@@ -4031,7 +4031,7 @@
pub fn name(&self, db: &dyn HirDatabase) -> Name {
match self.krate {
Some(krate) => Name::new_symbol_root(
- db.crate_def_map(krate).registered_attrs()[self.idx as usize].clone(),
+ crate_def_map(db, krate).registered_attrs()[self.idx as usize].clone(),
),
None => Name::new_symbol_root(Symbol::intern(
hir_expand::inert_attr_macro::INERT_ATTRIBUTES[self.idx as usize].name,
@@ -4059,14 +4059,14 @@
pub(crate) fn by_name(db: &dyn HirDatabase, krate: Crate, name: &str) -> Option<Self> {
let krate = krate.id;
let idx =
- db.crate_def_map(krate).registered_tools().iter().position(|it| it.as_str() == name)?
+ crate_def_map(db, krate).registered_tools().iter().position(|it| it.as_str() == name)?
as u32;
Some(ToolModule { krate, idx })
}
pub fn name(&self, db: &dyn HirDatabase) -> Name {
Name::new_symbol_root(
- db.crate_def_map(self.krate).registered_tools()[self.idx as usize].clone(),
+ crate_def_map(db, self.krate).registered_tools()[self.idx as usize].clone(),
)
}
@@ -4488,7 +4488,7 @@
MacroCallKind::Derive { ast_id, derive_attr_index, derive_index, .. } => {
let module_id = self.id.lookup(db).container;
(
- db.crate_def_map(module_id.krate())[module_id.local_id]
+ crate_def_map(db, module_id.krate())[module_id.local_id]
.scope
.derive_macro_invoc(ast_id, derive_attr_index)?,
derive_index,
@@ -4530,7 +4530,7 @@
impl TraitRef {
pub(crate) fn new_with_resolver(
db: &dyn HirDatabase,
- resolver: &Resolver,
+ resolver: &Resolver<'_>,
trait_ref: hir_ty::TraitRef,
) -> TraitRef {
let env = resolver
@@ -4752,13 +4752,13 @@
}
impl Type {
- pub(crate) fn new_with_resolver(db: &dyn HirDatabase, resolver: &Resolver, ty: Ty) -> Type {
+ pub(crate) fn new_with_resolver(db: &dyn HirDatabase, resolver: &Resolver<'_>, ty: Ty) -> Type {
Type::new_with_resolver_inner(db, resolver, ty)
}
pub(crate) fn new_with_resolver_inner(
db: &dyn HirDatabase,
- resolver: &Resolver,
+ resolver: &Resolver<'_>,
ty: Ty,
) -> Type {
let environment = resolver
@@ -5972,6 +5972,59 @@
}
}
+ pub fn tail_padding(&self, field_size: &mut impl FnMut(usize) -> Option<u64>) -> Option<u64> {
+ match self.0.fields {
+ layout::FieldsShape::Primitive => None,
+ layout::FieldsShape::Union(_) => None,
+ layout::FieldsShape::Array { stride, count } => count.checked_sub(1).and_then(|tail| {
+ let tail_field_size = field_size(tail as usize)?;
+ let offset = stride.bytes() * tail;
+ self.0.size.bytes().checked_sub(offset)?.checked_sub(tail_field_size)
+ }),
+ layout::FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
+ let tail = memory_index.last_index()?;
+ let tail_field_size = field_size(tail.0.into_raw().into_u32() as usize)?;
+ let offset = offsets.get(tail)?.bytes();
+ self.0.size.bytes().checked_sub(offset)?.checked_sub(tail_field_size)
+ }
+ }
+ }
+
+ pub fn largest_padding(
+ &self,
+ field_size: &mut impl FnMut(usize) -> Option<u64>,
+ ) -> Option<u64> {
+ match self.0.fields {
+ layout::FieldsShape::Primitive => None,
+ layout::FieldsShape::Union(_) => None,
+ layout::FieldsShape::Array { stride: _, count: 0 } => None,
+ layout::FieldsShape::Array { stride, .. } => {
+ let size = field_size(0)?;
+ stride.bytes().checked_sub(size)
+ }
+ layout::FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
+ let mut reverse_index = vec![None; memory_index.len()];
+ for (src, (mem, offset)) in memory_index.iter().zip(offsets.iter()).enumerate() {
+ reverse_index[*mem as usize] = Some((src, offset.bytes()));
+ }
+ if reverse_index.iter().any(|it| it.is_none()) {
+ stdx::never!();
+ return None;
+ }
+ reverse_index
+ .into_iter()
+ .flatten()
+ .chain(std::iter::once((0, self.0.size.bytes())))
+ .tuple_windows()
+ .filter_map(|((i, start), (_, end))| {
+ let size = field_size(i)?;
+ end.checked_sub(start)?.checked_sub(size)
+ })
+ .max()
+ }
+ }
+ }
+
pub fn enum_tag_size(&self) -> Option<usize> {
let tag_size =
if let layout::Variants::Multiple { tag, tag_encoding, .. } = &self.0.variants {
@@ -6400,7 +6453,7 @@
})
.filter_map(|&krate| {
let segments = segments.clone();
- let mut def_map = db.crate_def_map(krate);
+ let mut def_map = crate_def_map(db, krate);
let mut module = &def_map[DefMap::ROOT];
let mut segments = segments.with_position().peekable();
while let Some((_, segment)) = segments.next_if(|&(position, _)| {
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index 4d092c1..aea2254 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -15,7 +15,7 @@
DefWithBodyId, FunctionId, MacroId, StructId, TraitId, VariantId,
expr_store::{Body, ExprOrPatSource, path::Path},
hir::{BindingId, Expr, ExprId, ExprOrPatId, Pat},
- nameres::ModuleOrigin,
+ nameres::{ModuleOrigin, crate_def_map},
resolver::{self, HasResolver, Resolver, TypeNs},
type_ref::Mutability,
};
@@ -103,6 +103,26 @@
}
}
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+pub struct PathResolutionPerNs {
+ pub type_ns: Option<PathResolution>,
+ pub value_ns: Option<PathResolution>,
+ pub macro_ns: Option<PathResolution>,
+}
+
+impl PathResolutionPerNs {
+ pub fn new(
+ type_ns: Option<PathResolution>,
+ value_ns: Option<PathResolution>,
+ macro_ns: Option<PathResolution>,
+ ) -> Self {
+ PathResolutionPerNs { type_ns, value_ns, macro_ns }
+ }
+ pub fn any(&self) -> Option<PathResolution> {
+ self.type_ns.or(self.value_ns).or(self.macro_ns)
+ }
+}
+
#[derive(Debug)]
pub struct TypeInfo {
/// The original type of the expression or pattern.
@@ -127,7 +147,7 @@
}
/// Primary API to get semantic information, like types, from syntax trees.
-pub struct Semantics<'db, DB> {
+pub struct Semantics<'db, DB: ?Sized> {
pub db: &'db DB,
imp: SemanticsImpl<'db>,
}
@@ -341,7 +361,7 @@
match file_id {
HirFileId::FileId(file_id) => {
let module = self.file_to_module_defs(file_id.file_id(self.db)).next()?;
- let def_map = self.db.crate_def_map(module.krate().id);
+ let def_map = crate_def_map(self.db, module.krate().id);
match def_map[module.id.local_id].origin {
ModuleOrigin::CrateRoot { .. } => None,
ModuleOrigin::File { declaration, declaration_tree_id, .. } => {
@@ -387,14 +407,10 @@
res
}
- pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
- let sa = self.analyze_no_infer(macro_call.syntax())?;
-
- let macro_call = InFile::new(sa.file_id, macro_call);
- let file_id = sa.expand(self.db, macro_call)?;
-
+ pub fn expand_macro_call(&self, macro_call: &ast::MacroCall) -> Option<InFile<SyntaxNode>> {
+ let file_id = self.to_def(macro_call)?;
let node = self.parse_or_expand(file_id.into());
- Some(node)
+ Some(InFile::new(file_id.into(), node))
}
pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> {
@@ -414,10 +430,7 @@
&self,
macro_call: &ast::MacroCall,
) -> Option<ExpandResult<SyntaxNode>> {
- let sa = self.analyze_no_infer(macro_call.syntax())?;
-
- let macro_call = InFile::new(sa.file_id, macro_call);
- let file_id = sa.expand(self.db, macro_call)?;
+ let file_id = self.to_def(macro_call)?;
let macro_call = self.db.lookup_intern_macro_call(file_id);
let skip = matches!(
@@ -448,10 +461,10 @@
}
/// If `item` has an attribute macro attached to it, expands it.
- pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<SyntaxNode>> {
+ pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<ExpandResult<InFile<SyntaxNode>>> {
let src = self.wrap_node_infile(item.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src.as_ref()))?;
- Some(self.expand(macro_call_id))
+ Some(self.expand(macro_call_id).map(|it| InFile::new(macro_call_id.into(), it)))
}
pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
@@ -554,9 +567,7 @@
speculative_args: &ast::TokenTree,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, Vec<(SyntaxToken, u8)>)> {
- let analyzer = self.analyze_no_infer(actual_macro_call.syntax())?;
- let macro_call = InFile::new(analyzer.file_id, actual_macro_call);
- let macro_file = analyzer.expansion(macro_call)?;
+ let macro_file = self.to_def(actual_macro_call)?;
hir_expand::db::expand_speculative(
self.db,
macro_file,
@@ -758,6 +769,31 @@
})
}
+ /// Descends the token into the include expansion, if its file is an included file.
+ pub fn descend_token_into_include_expansion(
+ &self,
+ tok: InRealFile<SyntaxToken>,
+ ) -> InFile<SyntaxToken> {
+ let Some(include) =
+ self.s2d_cache.borrow_mut().get_or_insert_include_for(self.db, tok.file_id)
+ else {
+ return tok.into();
+ };
+ let span = self.db.real_span_map(tok.file_id).span_for_range(tok.value.text_range());
+ let Some(InMacroFile { file_id, value: mut mapped_tokens }) = self.with_ctx(|ctx| {
+ Some(
+ ctx.cache
+ .get_or_insert_expansion(ctx.db, include)
+ .map_range_down(span)?
+ .map(SmallVec::<[_; 2]>::from_iter),
+ )
+ }) else {
+ return tok.into();
+ };
+ // We should only get one result at most
+ mapped_tokens.pop().map_or_else(|| tok.into(), |(tok, _)| InFile::new(file_id.into(), tok))
+ }
+
/// Maps a node down by mapping its first and last token down.
pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
// This might not be the correct way to do this, but it works for now
@@ -826,49 +862,35 @@
res
}
- // FIXME: This isn't quite right wrt to inner attributes
- /// Does a syntactic traversal to check whether this token might be inside a macro call
- pub fn might_be_inside_macro_call(&self, token: &SyntaxToken) -> bool {
- token.parent_ancestors().any(|ancestor| {
+ pub fn is_inside_macro_call(&self, token: InFile<&SyntaxToken>) -> bool {
+ // FIXME: Maybe `ancestors_with_macros()` is more suitable here? Currently
+ // this is only used on real (not macro) files so this is not a problem.
+ token.value.parent_ancestors().any(|ancestor| {
if ast::MacroCall::can_cast(ancestor.kind()) {
return true;
}
- // Check if it is an item (only items can have macro attributes) that has a non-builtin attribute.
- let Some(item) = ast::Item::cast(ancestor) else { return false };
- item.attrs().any(|attr| {
- let Some(meta) = attr.meta() else { return false };
- let Some(path) = meta.path() else { return false };
- if let Some(attr_name) = path.as_single_name_ref() {
- let attr_name = attr_name.text();
- let attr_name = Symbol::intern(attr_name.as_str());
- if attr_name == sym::derive {
- return true;
- }
- // We ignore `#[test]` and friends in the def map, so we cannot expand them.
- // FIXME: We match by text. This is both hacky and incorrect (people can, and do, create
- // other macros named `test`). We cannot fix that unfortunately because we use this method
- // for speculative expansion in completion, which we cannot analyze. Fortunately, most macros
- // named `test` are test-like, meaning their expansion is not terribly important for IDE.
- if attr_name == sym::test
- || attr_name == sym::bench
- || attr_name == sym::test_case
- || find_builtin_attr_idx(&attr_name).is_some()
- {
- return false;
- }
+
+ let Some(item) = ast::Item::cast(ancestor) else {
+ return false;
+ };
+ // Optimization to skip the semantic check.
+ if item.attrs().all(|attr| {
+ attr.simple_name()
+ .is_some_and(|attr| find_builtin_attr_idx(&Symbol::intern(&attr)).is_some())
+ }) {
+ return false;
+ }
+ self.with_ctx(|ctx| {
+ if ctx.item_to_macro_call(token.with_value(&item)).is_some() {
+ return true;
}
- let mut segments = path.segments();
- let mut next_segment_text = || segments.next().and_then(|it| it.name_ref());
- // `#[core::prelude::rust_2024::test]` or `#[std::prelude::rust_2024::test]`.
- if next_segment_text().is_some_and(|it| matches!(&*it.text(), "core" | "std"))
- && next_segment_text().is_some_and(|it| it.text() == "prelude")
- && next_segment_text().is_some()
- && next_segment_text()
- .is_some_and(|it| matches!(&*it.text(), "test" | "bench" | "test_case"))
- {
- return false;
- }
- true
+ let adt = match item {
+ ast::Item::Struct(it) => it.into(),
+ ast::Item::Enum(it) => it.into(),
+ ast::Item::Union(it) => it.into(),
+ _ => return false,
+ };
+ ctx.has_derives(token.with_value(&adt))
})
})
}
@@ -1091,16 +1113,7 @@
let file_id = match m_cache.get(&mcall) {
Some(&it) => it,
None => {
- let it = token
- .parent()
- .and_then(|parent| {
- self.analyze_impl(
- InFile::new(expansion, &parent),
- None,
- false,
- )
- })?
- .expand(self.db, mcall.as_ref())?;
+ let it = ast::MacroCall::to_def(self, mcall.as_ref())?;
m_cache.insert(mcall, it);
it
}
@@ -1540,14 +1553,9 @@
}
pub fn resolve_macro_call2(&self, macro_call: InFile<&ast::MacroCall>) -> Option<Macro> {
- self.with_ctx(|ctx| {
- ctx.macro_call_to_macro_call(macro_call)
- .and_then(|call| macro_call_to_macro_id(ctx, call))
- .map(Into::into)
- })
- .or_else(|| {
- self.analyze(macro_call.value.syntax())?.resolve_macro_call(self.db, macro_call)
- })
+ self.to_def2(macro_call)
+ .and_then(|call| self.with_ctx(|ctx| macro_call_to_macro_id(ctx, call)))
+ .map(Into::into)
}
pub fn is_proc_macro_call(&self, macro_call: InFile<&ast::MacroCall>) -> bool {
@@ -1556,14 +1564,8 @@
}
pub fn resolve_macro_call_arm(&self, macro_call: &ast::MacroCall) -> Option<u32> {
- let sa = self.analyze(macro_call.syntax())?;
- self.db
- .parse_macro_expansion(
- sa.expand(self.db, self.wrap_node_infile(macro_call.clone()).as_ref())?,
- )
- .value
- .1
- .matched_arm
+ let file_id = self.to_def(macro_call)?;
+ self.db.parse_macro_expansion(file_id).value.1.matched_arm
}
pub fn get_unsafe_ops(&self, def: DefWithBody) -> FxHashSet<ExprOrPatSource> {
@@ -1606,6 +1608,10 @@
self.resolve_path_with_subst(path).map(|(it, _)| it)
}
+ pub fn resolve_path_per_ns(&self, path: &ast::Path) -> Option<PathResolutionPerNs> {
+ self.analyze(path.syntax())?.resolve_hir_path_per_ns(self.db, path)
+ }
+
pub fn resolve_path_with_subst(
&self,
path: &ast::Path,
@@ -1664,6 +1670,10 @@
T::to_def(self, src)
}
+ pub fn to_def2<T: ToDef>(&self, src: InFile<&T>) -> Option<T::Def> {
+ T::to_def(self, src)
+ }
+
fn file_to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
self.with_ctx(|ctx| ctx.file_to_def(file).to_owned()).into_iter().map(Module::from)
}
@@ -1711,13 +1721,13 @@
}
/// Returns none if the file of the node is not part of a crate.
- fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
+ fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer<'db>> {
let node = self.find_file(node);
self.analyze_impl(node, None, true)
}
/// Returns none if the file of the node is not part of a crate.
- fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
+ fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer<'db>> {
let node = self.find_file(node);
self.analyze_impl(node, None, false)
}
@@ -1726,7 +1736,7 @@
&self,
node: &SyntaxNode,
offset: TextSize,
- ) -> Option<SourceAnalyzer> {
+ ) -> Option<SourceAnalyzer<'db>> {
let node = self.find_file(node);
self.analyze_impl(node, Some(offset), false)
}
@@ -1737,7 +1747,7 @@
offset: Option<TextSize>,
// replace this, just make the inference result a `LazyCell`
infer_body: bool,
- ) -> Option<SourceAnalyzer> {
+ ) -> Option<SourceAnalyzer<'db>> {
let _p = tracing::info_span!("SemanticsImpl::analyze_impl").entered();
let container = self.with_ctx(|ctx| ctx.find_container(node))?;
@@ -1984,13 +1994,13 @@
/// Note that if you are wondering "what does this specific existing name mean?",
/// you'd better use the `resolve_` family of methods.
#[derive(Debug)]
-pub struct SemanticsScope<'a> {
- pub db: &'a dyn HirDatabase,
+pub struct SemanticsScope<'db> {
+ pub db: &'db dyn HirDatabase,
file_id: HirFileId,
- resolver: Resolver,
+ resolver: Resolver<'db>,
}
-impl SemanticsScope<'_> {
+impl<'db> SemanticsScope<'db> {
pub fn module(&self) -> Module {
Module { id: self.resolver.module() }
}
@@ -2006,7 +2016,7 @@
})
}
- pub(crate) fn resolver(&self) -> &Resolver {
+ pub(crate) fn resolver(&self) -> &Resolver<'db> {
&self.resolver
}
@@ -2133,7 +2143,7 @@
struct RenameConflictsVisitor<'a> {
db: &'a dyn HirDatabase,
owner: DefWithBodyId,
- resolver: Resolver,
+ resolver: Resolver<'a>,
body: &'a Body,
to_be_renamed: BindingId,
new_name: Symbol,
diff --git a/crates/hir/src/semantics/child_by_source.rs b/crates/hir/src/semantics/child_by_source.rs
index 9393d08..6accf9b 100644
--- a/crates/hir/src/semantics/child_by_source.rs
+++ b/crates/hir/src/semantics/child_by_source.rs
@@ -36,9 +36,14 @@
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
let data = db.trait_items(*self);
- data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
+ data.macro_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|(ast_id, call_id)| {
- res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db), call_id);
+ let ptr = ast_id.to_ptr(db);
+ if let Some(ptr) = ptr.cast::<ast::MacroCall>() {
+ res[keys::MACRO_CALL].insert(ptr, call_id);
+ } else {
+ res[keys::ATTR_MACRO_CALL].insert(ptr, call_id);
+ }
},
);
data.items.iter().for_each(|&(_, item)| {
@@ -50,10 +55,14 @@
impl ChildBySource for ImplId {
fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
let data = db.impl_items(*self);
- // FIXME: Macro calls
- data.attribute_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
+ data.macro_calls().filter(|(ast_id, _)| ast_id.file_id == file_id).for_each(
|(ast_id, call_id)| {
- res[keys::ATTR_MACRO_CALL].insert(ast_id.to_ptr(db), call_id);
+ let ptr = ast_id.to_ptr(db);
+ if let Some(ptr) = ptr.cast::<ast::MacroCall>() {
+ res[keys::MACRO_CALL].insert(ptr, call_id);
+ } else {
+ res[keys::ATTR_MACRO_CALL].insert(ptr, call_id);
+ }
},
);
data.items.iter().for_each(|&(_, item)| {
diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs
index 587c51d..7f6c9af 100644
--- a/crates/hir/src/semantics/source_to_def.rs
+++ b/crates/hir/src/semantics/source_to_def.rs
@@ -96,6 +96,7 @@
keys::{self, Key},
},
hir::{BindingId, Expr, LabelId},
+ nameres::{block_def_map, crate_def_map},
};
use hir_expand::{
EditionedFileId, ExpansionInfo, HirFileId, InMacroFile, MacroCallId, attrs::AttrId,
@@ -180,7 +181,7 @@
for &crate_id in self.db.relevant_crates(file).iter() {
// Note: `mod` declarations in block modules cannot be supported here
- let crate_def_map = self.db.crate_def_map(crate_id);
+ let crate_def_map = crate_def_map(self.db, crate_id);
let n_mods = mods.len();
let modules = |file| {
crate_def_map
@@ -226,7 +227,7 @@
let parent_module = match parent_declaration {
Some(Either::Right(parent_block)) => self
.block_to_def(parent_block.as_ref())
- .map(|block| self.db.block_def_map(block).root_module_id()),
+ .map(|block| block_def_map(self.db, block).root_module_id()),
Some(Either::Left(parent_declaration)) => {
self.module_to_def(parent_declaration.as_ref())
}
@@ -398,19 +399,6 @@
Some((container, label?))
}
- pub(super) fn item_to_macro_call(&mut self, src: InFile<&ast::Item>) -> Option<MacroCallId> {
- let map = self.dyn_map(src)?;
- map[keys::ATTR_MACRO_CALL].get(&AstPtr::new(src.value)).copied()
- }
-
- pub(super) fn macro_call_to_macro_call(
- &mut self,
- src: InFile<&ast::MacroCall>,
- ) -> Option<MacroCallId> {
- let map = self.dyn_map(src)?;
- map[keys::MACRO_CALL].get(&AstPtr::new(src.value)).copied()
- }
-
/// (AttrId, derive attribute call id, derive call ids)
pub(super) fn attr_to_derive_macro_call(
&mut self,
@@ -448,6 +436,17 @@
.or_insert_with(|| container.child_by_source(db, file_id))
}
+ pub(super) fn item_to_macro_call(&mut self, src: InFile<&ast::Item>) -> Option<MacroCallId> {
+ self.to_def(src, keys::ATTR_MACRO_CALL)
+ }
+
+ pub(super) fn macro_call_to_macro_call(
+ &mut self,
+ src: InFile<&ast::MacroCall>,
+ ) -> Option<MacroCallId> {
+ self.to_def(src, keys::MACRO_CALL)
+ }
+
pub(super) fn type_param_to_def(
&mut self,
src: InFile<&ast::TypeParam>,
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index c1a75ce..d22812d 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -10,7 +10,9 @@
use crate::{
Adt, AssocItem, BindingMode, BuiltinAttr, BuiltinType, Callable, Const, DeriveHelper, Field,
Function, GenericSubstitution, Local, Macro, ModuleDef, Static, Struct, ToolModule, Trait,
- TraitAlias, TupleField, Type, TypeAlias, Variant, db::HirDatabase, semantics::PathResolution,
+ TraitAlias, TupleField, Type, TypeAlias, Variant,
+ db::HirDatabase,
+ semantics::{PathResolution, PathResolutionPerNs},
};
use either::Either;
use hir_def::{
@@ -29,7 +31,7 @@
type_ref::{Mutability, TypeRefId},
};
use hir_expand::{
- HirFileId, InFile, MacroCallId,
+ HirFileId, InFile,
mod_path::{ModPath, PathKind, path},
name::{AsName, Name},
};
@@ -57,9 +59,9 @@
/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
/// original source files. It should not be used inside the HIR itself.
#[derive(Debug)]
-pub(crate) struct SourceAnalyzer {
+pub(crate) struct SourceAnalyzer<'db> {
pub(crate) file_id: HirFileId,
- pub(crate) resolver: Resolver,
+ pub(crate) resolver: Resolver<'db>,
pub(crate) body_or_sig: Option<BodyOrSig>,
}
@@ -85,32 +87,32 @@
},
}
-impl SourceAnalyzer {
+impl<'db> SourceAnalyzer<'db> {
pub(crate) fn new_for_body(
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
def: DefWithBodyId,
node: InFile<&SyntaxNode>,
offset: Option<TextSize>,
- ) -> SourceAnalyzer {
+ ) -> SourceAnalyzer<'db> {
Self::new_for_body_(db, def, node, offset, Some(db.infer(def)))
}
pub(crate) fn new_for_body_no_infer(
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
def: DefWithBodyId,
node: InFile<&SyntaxNode>,
offset: Option<TextSize>,
- ) -> SourceAnalyzer {
+ ) -> SourceAnalyzer<'db> {
Self::new_for_body_(db, def, node, offset, None)
}
pub(crate) fn new_for_body_(
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
def: DefWithBodyId,
node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
offset: Option<TextSize>,
infer: Option<Arc<InferenceResult>>,
- ) -> SourceAnalyzer {
+ ) -> SourceAnalyzer<'db> {
let (body, source_map) = db.body_with_source_map(def);
let scopes = db.expr_scopes(def);
let scope = match offset {
@@ -134,11 +136,11 @@
}
pub(crate) fn new_generic_def(
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
def: GenericDefId,
InFile { file_id, .. }: InFile<&SyntaxNode>,
_offset: Option<TextSize>,
- ) -> SourceAnalyzer {
+ ) -> SourceAnalyzer<'db> {
let (_params, store, source_map) = db.generic_params_and_store_and_source_map(def);
let resolver = def.resolver(db);
SourceAnalyzer {
@@ -149,11 +151,11 @@
}
pub(crate) fn new_variant_body(
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
def: VariantId,
InFile { file_id, .. }: InFile<&SyntaxNode>,
_offset: Option<TextSize>,
- ) -> SourceAnalyzer {
+ ) -> SourceAnalyzer<'db> {
let (fields, source_map) = db.variant_fields_with_source_map(def);
let resolver = def.resolver(db);
SourceAnalyzer {
@@ -168,9 +170,9 @@
}
pub(crate) fn new_for_resolver(
- resolver: Resolver,
+ resolver: Resolver<'db>,
node: InFile<&SyntaxNode>,
- ) -> SourceAnalyzer {
+ ) -> SourceAnalyzer<'db> {
SourceAnalyzer { resolver, body_or_sig: None, file_id: node.file_id }
}
@@ -216,11 +218,7 @@
})
}
- pub(crate) fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> {
- self.store_sm()?.expansion(node)
- }
-
- fn trait_environment(&self, db: &dyn HirDatabase) -> Arc<TraitEnvironment> {
+ fn trait_environment(&self, db: &'db dyn HirDatabase) -> Arc<TraitEnvironment> {
self.body_().map(|(def, ..)| def).map_or_else(
|| TraitEnvironment::empty(self.resolver.krate()),
|def| db.trait_environment_for_body(def),
@@ -259,7 +257,7 @@
infer.expr_adjustments.get(&expr_id).map(|v| &**v)
}
- pub(crate) fn type_of_type(&self, db: &dyn HirDatabase, ty: &ast::Type) -> Option<Type> {
+ pub(crate) fn type_of_type(&self, db: &'db dyn HirDatabase, ty: &ast::Type) -> Option<Type> {
let type_ref = self.type_id(ty)?;
let ty = TyLoweringContext::new(
db,
@@ -277,7 +275,7 @@
pub(crate) fn type_of_expr(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
expr: &ast::Expr,
) -> Option<(Type, Option<Type>)> {
let expr_id = self.expr_id(expr.clone())?;
@@ -293,7 +291,7 @@
pub(crate) fn type_of_pat(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
pat: &ast::Pat,
) -> Option<(Type, Option<Type>)> {
let expr_or_pat_id = self.pat_id(pat)?;
@@ -316,7 +314,7 @@
pub(crate) fn type_of_binding_in_pat(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
pat: &ast::IdentPat,
) -> Option<Type> {
let binding_id = self.binding_id_of_pat(pat)?;
@@ -328,7 +326,7 @@
pub(crate) fn type_of_self(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
_param: &ast::SelfParam,
) -> Option<Type> {
let binding = self.body()?.self_param?;
@@ -338,7 +336,7 @@
pub(crate) fn binding_mode_of_pat(
&self,
- _db: &dyn HirDatabase,
+ _db: &'db dyn HirDatabase,
pat: &ast::IdentPat,
) -> Option<BindingMode> {
let id = self.pat_id(&pat.clone().into())?;
@@ -353,7 +351,7 @@
}
pub(crate) fn pattern_adjustments(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
pat: &ast::Pat,
) -> Option<SmallVec<[Type; 1]>> {
let pat_id = self.pat_id(pat)?;
@@ -370,7 +368,7 @@
pub(crate) fn resolve_method_call_as_callable(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
call: &ast::MethodCallExpr,
) -> Option<Callable> {
let expr_id = self.expr_id(call.clone().into())?.as_expr()?;
@@ -384,7 +382,7 @@
pub(crate) fn resolve_method_call(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
call: &ast::MethodCallExpr,
) -> Option<Function> {
let expr_id = self.expr_id(call.clone().into())?.as_expr()?;
@@ -395,7 +393,7 @@
pub(crate) fn resolve_method_call_fallback(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
call: &ast::MethodCallExpr,
) -> Option<(Either<Function, Field>, Option<GenericSubstitution>)> {
let expr_id = self.expr_id(call.clone().into())?.as_expr()?;
@@ -419,7 +417,7 @@
pub(crate) fn resolve_expr_as_callable(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
call: &ast::Expr,
) -> Option<Callable> {
let (orig, adjusted) = self.type_of_expr(db, &call.clone())?;
@@ -441,7 +439,7 @@
&self,
field_expr: ExprId,
infer: &InferenceResult,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
) -> Option<GenericSubstitution> {
let body = self.store()?;
if let Expr::Field { expr: object_expr, name: _ } = body[field_expr] {
@@ -457,7 +455,7 @@
pub(crate) fn resolve_field_fallback(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
field: &ast::FieldExpr,
) -> Option<(Either<Either<Field, TupleField>, Function>, Option<GenericSubstitution>)> {
let (def, ..) = self.body_()?;
@@ -490,7 +488,7 @@
pub(crate) fn resolve_range_pat(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
range_pat: &ast::RangePat,
) -> Option<StructId> {
let path: ModPath = match (range_pat.op_kind()?, range_pat.start(), range_pat.end()) {
@@ -509,7 +507,7 @@
pub(crate) fn resolve_range_expr(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
range_expr: &ast::RangeExpr,
) -> Option<StructId> {
let path: ModPath = match (range_expr.op_kind()?, range_expr.start(), range_expr.end()) {
@@ -529,7 +527,7 @@
pub(crate) fn resolve_await_to_poll(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
await_expr: &ast::AwaitExpr,
) -> Option<FunctionId> {
let mut ty = self.ty_of_expr(await_expr.expr()?)?.clone();
@@ -566,7 +564,7 @@
pub(crate) fn resolve_prefix_expr(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
prefix_expr: &ast::PrefixExpr,
) -> Option<FunctionId> {
let (op_trait, op_fn) = match prefix_expr.op_kind()? {
@@ -608,7 +606,7 @@
pub(crate) fn resolve_index_expr(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
index_expr: &ast::IndexExpr,
) -> Option<FunctionId> {
let base_ty = self.ty_of_expr(index_expr.base()?)?;
@@ -640,7 +638,7 @@
pub(crate) fn resolve_bin_expr(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
binop_expr: &ast::BinExpr,
) -> Option<FunctionId> {
let op = binop_expr.op_kind()?;
@@ -661,7 +659,7 @@
pub(crate) fn resolve_try_expr(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
try_expr: &ast::TryExpr,
) -> Option<FunctionId> {
let ty = self.ty_of_expr(try_expr.expr()?)?;
@@ -680,7 +678,7 @@
pub(crate) fn resolve_record_field(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
field: &ast::RecordExprField,
) -> Option<(Field, Option<Local>, Type, GenericSubstitution)> {
let record_expr = ast::RecordExpr::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
@@ -724,7 +722,7 @@
pub(crate) fn resolve_record_pat_field(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
field: &ast::RecordPatField,
) -> Option<(Field, Type, GenericSubstitution)> {
let field_name = field.field_name()?.as_name();
@@ -743,25 +741,9 @@
))
}
- pub(crate) fn resolve_macro_call(
- &self,
- db: &dyn HirDatabase,
- macro_call: InFile<&ast::MacroCall>,
- ) -> Option<Macro> {
- let bs = self.store_sm()?;
- bs.expansion(macro_call).and_then(|it| {
- // FIXME: Block def maps
- let def = it.lookup(db).def;
- db.crate_def_map(def.krate)
- .macro_def_to_macro_id
- .get(&def.kind.erased_ast_id())
- .map(|it| (*it).into())
- })
- }
-
pub(crate) fn resolve_bind_pat_to_const(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
pat: &ast::IdentPat,
) -> Option<ModuleDef> {
let expr_or_pat_id = self.pat_id(&pat.clone().into())?;
@@ -795,7 +777,7 @@
pub(crate) fn resolve_offset_of_field(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
name_ref: &ast::NameRef,
) -> Option<(Either<crate::Variant, crate::Field>, GenericSubstitution)> {
let offset_of_expr = ast::OffsetOfExpr::cast(name_ref.syntax().parent()?)?;
@@ -867,7 +849,7 @@
pub(crate) fn resolve_path(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
path: &ast::Path,
) -> Option<(PathResolution, Option<GenericSubstitution>)> {
let parent = path.syntax().parent();
@@ -1159,7 +1141,9 @@
prefer_value_ns,
name_hygiene(db, InFile::new(self.file_id, path.syntax())),
Some(&store),
- )?;
+ false,
+ )
+ .any()?;
let subst = (|| {
let parent = parent()?;
let ty = if let Some(expr) = ast::Expr::cast(parent.clone()) {
@@ -1209,9 +1193,29 @@
}
}
- pub(crate) fn record_literal_missing_fields(
+ pub(crate) fn resolve_hir_path_per_ns(
&self,
db: &dyn HirDatabase,
+ path: &ast::Path,
+ ) -> Option<PathResolutionPerNs> {
+ let mut collector = ExprCollector::new(db, self.resolver.module(), self.file_id);
+ let hir_path =
+ collector.lower_path(path.clone(), &mut ExprCollector::impl_trait_error_allocator)?;
+ let store = collector.store.finish();
+ Some(resolve_hir_path_(
+ db,
+ &self.resolver,
+ &hir_path,
+ false,
+ name_hygiene(db, InFile::new(self.file_id, path.syntax())),
+ Some(&store),
+ true,
+ ))
+ }
+
+ pub(crate) fn record_literal_missing_fields(
+ &self,
+ db: &'db dyn HirDatabase,
literal: &ast::RecordExpr,
) -> Option<Vec<(Field, Type)>> {
let body = self.store()?;
@@ -1234,7 +1238,7 @@
pub(crate) fn record_pattern_missing_fields(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
pattern: &ast::RecordPat,
) -> Option<Vec<(Field, Type)>> {
let body = self.store()?;
@@ -1251,7 +1255,7 @@
fn missing_fields(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
substs: &Substitution,
variant: VariantId,
missing_fields: Vec<LocalFieldId>,
@@ -1268,18 +1272,6 @@
.collect()
}
- pub(crate) fn expand(
- &self,
- db: &dyn HirDatabase,
- macro_call: InFile<&ast::MacroCall>,
- ) -> Option<MacroCallId> {
- self.store_sm().and_then(|bs| bs.expansion(macro_call)).or_else(|| {
- self.resolver.item_scope().macro_invoc(
- macro_call.with_value(db.ast_id_map(macro_call.file_id).ast_id(macro_call.value)),
- )
- })
- }
-
pub(crate) fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
let infer = self.infer()?;
let expr_id = self.expr_id(record_lit.into())?;
@@ -1288,7 +1280,7 @@
pub(crate) fn is_unsafe_macro_call_expr(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
macro_expr: InFile<&ast::MacroExpr>,
) -> bool {
if let Some((def, body, sm, Some(infer))) = self.body_() {
@@ -1313,7 +1305,7 @@
pub(crate) fn resolve_offset_in_format_args(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
format_args: InFile<&ast::FormatArgsExpr>,
offset: TextSize,
) -> Option<(TextRange, Option<PathResolution>)> {
@@ -1384,7 +1376,7 @@
fn resolve_impl_method_or_trait_def(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
func: FunctionId,
substs: Substitution,
) -> FunctionId {
@@ -1393,7 +1385,7 @@
fn resolve_impl_method_or_trait_def_with_subst(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
func: FunctionId,
substs: Substitution,
) -> (FunctionId, Substitution) {
@@ -1407,7 +1399,7 @@
fn resolve_impl_const_or_trait_def_with_subst(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
const_id: ConstId,
subs: Substitution,
) -> (ConstId, Substitution) {
@@ -1421,7 +1413,7 @@
fn lang_trait_fn(
&self,
- db: &dyn HirDatabase,
+ db: &'db dyn HirDatabase,
lang_trait: LangItem,
method_name: &Name,
) -> Option<(TraitId, FunctionId)> {
@@ -1527,18 +1519,18 @@
#[inline]
pub(crate) fn resolve_hir_path(
db: &dyn HirDatabase,
- resolver: &Resolver,
+ resolver: &Resolver<'_>,
path: &Path,
hygiene: HygieneId,
store: Option<&ExpressionStore>,
) -> Option<PathResolution> {
- resolve_hir_path_(db, resolver, path, false, hygiene, store)
+ resolve_hir_path_(db, resolver, path, false, hygiene, store, false).any()
}
#[inline]
pub(crate) fn resolve_hir_path_as_attr_macro(
db: &dyn HirDatabase,
- resolver: &Resolver,
+ resolver: &Resolver<'_>,
path: &Path,
) -> Option<Macro> {
resolver
@@ -1549,12 +1541,13 @@
fn resolve_hir_path_(
db: &dyn HirDatabase,
- resolver: &Resolver,
+ resolver: &Resolver<'_>,
path: &Path,
prefer_value_ns: bool,
hygiene: HygieneId,
store: Option<&ExpressionStore>,
-) -> Option<PathResolution> {
+ resolve_per_ns: bool,
+) -> PathResolutionPerNs {
let types = || {
let (ty, unresolved) = match path.type_anchor() {
Some(type_ref) => resolver.generic_def().and_then(|def| {
@@ -1635,14 +1628,36 @@
.map(|(def, _)| PathResolution::Def(ModuleDef::Macro(def.into())))
};
- if prefer_value_ns { values().or_else(types) } else { types().or_else(values) }
- .or_else(items)
- .or_else(macros)
+ if resolve_per_ns {
+ PathResolutionPerNs {
+ type_ns: types().or_else(items),
+ value_ns: values(),
+ macro_ns: macros(),
+ }
+ } else {
+ let res = if prefer_value_ns {
+ values()
+ .map(|value_ns| PathResolutionPerNs::new(None, Some(value_ns), None))
+ .unwrap_or_else(|| PathResolutionPerNs::new(types(), None, None))
+ } else {
+ types()
+ .map(|type_ns| PathResolutionPerNs::new(Some(type_ns), None, None))
+ .unwrap_or_else(|| PathResolutionPerNs::new(None, values(), None))
+ };
+
+ if res.any().is_some() {
+ res
+ } else if let Some(type_ns) = items() {
+ PathResolutionPerNs::new(Some(type_ns), None, None)
+ } else {
+ PathResolutionPerNs::new(None, None, macros())
+ }
+ }
}
fn resolve_hir_value_path(
db: &dyn HirDatabase,
- resolver: &Resolver,
+ resolver: &Resolver<'_>,
body_owner: Option<DefWithBodyId>,
path: &Path,
hygiene: HygieneId,
@@ -1680,7 +1695,7 @@
/// then we know that `foo` in `my::foo::Bar` refers to the module, not the function.
fn resolve_hir_path_qualifier(
db: &dyn HirDatabase,
- resolver: &Resolver,
+ resolver: &Resolver<'_>,
path: &Path,
store: &ExpressionStore,
) -> Option<PathResolution> {
diff --git a/crates/ide-assists/src/handlers/desugar_try_expr.rs b/crates/ide-assists/src/handlers/desugar_try_expr.rs
new file mode 100644
index 0000000..efadde9
--- /dev/null
+++ b/crates/ide-assists/src/handlers/desugar_try_expr.rs
@@ -0,0 +1,281 @@
+use std::iter;
+
+use ide_db::{
+ assists::{AssistId, ExprFillDefaultMode},
+ ty_filter::TryEnum,
+};
+use syntax::{
+ AstNode, T,
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ make,
+ syntax_factory::SyntaxFactory,
+ },
+};
+
+use crate::assist_context::{AssistContext, Assists};
+
+// Assist: desugar_try_expr_match
+//
+// Replaces a `try` expression with a `match` expression.
+//
+// ```
+// # //- minicore: try, option
+// fn handle() {
+// let pat = Some(true)$0?;
+// }
+// ```
+// ->
+// ```
+// fn handle() {
+// let pat = match Some(true) {
+// Some(it) => it,
+// None => return None,
+// };
+// }
+// ```
+
+// Assist: desugar_try_expr_let_else
+//
+// Replaces a `try` expression with a `let else` statement.
+//
+// ```
+// # //- minicore: try, option
+// fn handle() {
+// let pat = Some(true)$0?;
+// }
+// ```
+// ->
+// ```
+// fn handle() {
+// let Some(pat) = Some(true) else {
+// return None;
+// };
+// }
+// ```
+pub(crate) fn desugar_try_expr(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let question_tok = ctx.find_token_syntax_at_offset(T![?])?;
+ let try_expr = question_tok.parent().and_then(ast::TryExpr::cast)?;
+
+ let expr = try_expr.expr()?;
+ let expr_type_info = ctx.sema.type_of_expr(&expr)?;
+
+ let try_enum = TryEnum::from_ty(&ctx.sema, &expr_type_info.original)?;
+
+ let target = try_expr.syntax().text_range();
+ acc.add(
+ AssistId::refactor_rewrite("desugar_try_expr_match"),
+ "Replace try expression with match",
+ target,
+ |edit| {
+ let sad_pat = match try_enum {
+ TryEnum::Option => make::path_pat(make::ext::ident_path("None")),
+ TryEnum::Result => make::tuple_struct_pat(
+ make::ext::ident_path("Err"),
+ iter::once(make::path_pat(make::ext::ident_path("err"))),
+ )
+ .into(),
+ };
+ let sad_expr = match try_enum {
+ TryEnum::Option => {
+ make::expr_return(Some(make::expr_path(make::ext::ident_path("None"))))
+ }
+ TryEnum::Result => make::expr_return(Some(
+ make::expr_call(
+ make::expr_path(make::ext::ident_path("Err")),
+ make::arg_list(iter::once(make::expr_path(make::ext::ident_path("err")))),
+ )
+ .into(),
+ )),
+ };
+
+ let happy_arm = make::match_arm(
+ try_enum.happy_pattern(make::ident_pat(false, false, make::name("it")).into()),
+ None,
+ make::expr_path(make::ext::ident_path("it")),
+ );
+ let sad_arm = make::match_arm(sad_pat, None, sad_expr);
+
+ let match_arm_list = make::match_arm_list([happy_arm, sad_arm]);
+
+ let expr_match = make::expr_match(expr.clone(), match_arm_list)
+ .indent(IndentLevel::from_node(try_expr.syntax()));
+
+ edit.replace_ast::<ast::Expr>(try_expr.clone().into(), expr_match.into());
+ },
+ );
+
+ if let Some(let_stmt) = try_expr.syntax().parent().and_then(ast::LetStmt::cast) {
+ if let_stmt.let_else().is_none() {
+ let pat = let_stmt.pat()?;
+ acc.add(
+ AssistId::refactor_rewrite("desugar_try_expr_let_else"),
+ "Replace try expression with let else",
+ target,
+ |builder| {
+ let make = SyntaxFactory::with_mappings();
+ let mut editor = builder.make_editor(let_stmt.syntax());
+
+ let indent_level = IndentLevel::from_node(let_stmt.syntax());
+ let new_let_stmt = make.let_else_stmt(
+ try_enum.happy_pattern(pat),
+ let_stmt.ty(),
+ expr,
+ make.block_expr(
+ iter::once(
+ make.expr_stmt(
+ make.expr_return(Some(match try_enum {
+ TryEnum::Option => make.expr_path(make.ident_path("None")),
+ TryEnum::Result => make
+ .expr_call(
+ make.expr_path(make.ident_path("Err")),
+ make.arg_list(iter::once(
+ match ctx.config.expr_fill_default {
+ ExprFillDefaultMode::Todo => make
+ .expr_macro(
+ make.ident_path("todo"),
+ make.token_tree(
+ syntax::SyntaxKind::L_PAREN,
+ [],
+ ),
+ )
+ .into(),
+ ExprFillDefaultMode::Underscore => {
+ make.expr_underscore().into()
+ }
+ ExprFillDefaultMode::Default => make
+ .expr_macro(
+ make.ident_path("todo"),
+ make.token_tree(
+ syntax::SyntaxKind::L_PAREN,
+ [],
+ ),
+ )
+ .into(),
+ },
+ )),
+ )
+ .into(),
+ }))
+ .indent(indent_level + 1)
+ .into(),
+ )
+ .into(),
+ ),
+ None,
+ )
+ .indent(indent_level),
+ );
+ editor.replace(let_stmt.syntax(), new_let_stmt.syntax());
+ editor.add_mappings(make.finish_with_mappings());
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
+ },
+ );
+ }
+ }
+ Some(())
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use crate::tests::{check_assist, check_assist_by_label, check_assist_not_applicable};
+
+ #[test]
+ fn test_desugar_try_expr_not_applicable() {
+ check_assist_not_applicable(
+ desugar_try_expr,
+ r#"
+ fn test() {
+ let pat: u32 = 25$0;
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_desugar_try_expr_option() {
+ check_assist(
+ desugar_try_expr,
+ r#"
+//- minicore: try, option
+fn test() {
+ let pat = Some(true)$0?;
+}
+ "#,
+ r#"
+fn test() {
+ let pat = match Some(true) {
+ Some(it) => it,
+ None => return None,
+ };
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_desugar_try_expr_result() {
+ check_assist(
+ desugar_try_expr,
+ r#"
+//- minicore: try, from, result
+fn test() {
+ let pat = Ok(true)$0?;
+}
+ "#,
+ r#"
+fn test() {
+ let pat = match Ok(true) {
+ Ok(it) => it,
+ Err(err) => return Err(err),
+ };
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn test_desugar_try_expr_option_let_else() {
+ check_assist_by_label(
+ desugar_try_expr,
+ r#"
+//- minicore: try, option
+fn test() {
+ let pat = Some(true)$0?;
+}
+ "#,
+ r#"
+fn test() {
+ let Some(pat) = Some(true) else {
+ return None;
+ };
+}
+ "#,
+ "Replace try expression with let else",
+ );
+ }
+
+ #[test]
+ fn test_desugar_try_expr_result_let_else() {
+ check_assist_by_label(
+ desugar_try_expr,
+ r#"
+//- minicore: try, from, result
+fn test() {
+ let pat = Ok(true)$0?;
+}
+ "#,
+ r#"
+fn test() {
+ let Ok(pat) = Ok(true) else {
+ return Err(todo!());
+ };
+}
+ "#,
+ "Replace try expression with let else",
+ );
+ }
+}
diff --git a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
index 2ac960e..bab2ccf 100644
--- a/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
+++ b/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs
@@ -1,7 +1,7 @@
use ide_db::famous_defs::FamousDefs;
use syntax::{
AstNode,
- ast::{self, make},
+ ast::{self, edit_in_place::Indent, make},
ted,
};
@@ -46,6 +46,7 @@
// ```
pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let impl_def = ctx.find_node_at_offset::<ast::Impl>()?.clone_for_update();
+ let indent = impl_def.indent_level();
let trait_ = impl_def.trait_()?;
if let ast::Type::PathType(trait_path) = trait_ {
@@ -97,8 +98,8 @@
})?;
let assoc_list = make::assoc_item_list().clone_for_update();
- assoc_list.add_item(syntax::ast::AssocItem::Fn(fn_));
ted::replace(impl_def.assoc_item_list()?.syntax(), assoc_list.syntax());
+ impl_def.get_or_create_assoc_item_list().add_item(syntax::ast::AssocItem::Fn(fn_));
let target = impl_def.syntax().text_range();
acc.add(
@@ -106,7 +107,7 @@
"Generate `IndexMut` impl from this `Index` trait",
target,
|edit| {
- edit.insert(target.start(), format!("$0{impl_def}\n\n"));
+ edit.insert(target.start(), format!("$0{impl_def}\n\n{indent}"));
},
)
}
@@ -190,6 +191,93 @@
}
#[test]
+ fn test_generate_mut_trait_impl_non_zero_indent() {
+ check_assist(
+ generate_mut_trait_impl,
+ r#"
+//- minicore: index
+mod foo {
+ pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+ impl<T> core::ops::Index$0<Axis> for [T; 3] where T: Copy {
+ type Output = T;
+
+ fn index(&self, index: Axis) -> &Self::Output {
+ let var_name = &self[index as usize];
+ var_name
+ }
+ }
+}
+"#,
+ r#"
+mod foo {
+ pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+ $0impl<T> core::ops::IndexMut<Axis> for [T; 3] where T: Copy {
+ fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
+ let var_name = &self[index as usize];
+ var_name
+ }
+ }
+
+ impl<T> core::ops::Index<Axis> for [T; 3] where T: Copy {
+ type Output = T;
+
+ fn index(&self, index: Axis) -> &Self::Output {
+ let var_name = &self[index as usize];
+ var_name
+ }
+ }
+}
+"#,
+ );
+
+ check_assist(
+ generate_mut_trait_impl,
+ r#"
+//- minicore: index
+mod foo {
+ mod bar {
+ pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+ impl<T> core::ops::Index$0<Axis> for [T; 3] where T: Copy {
+ type Output = T;
+
+ fn index(&self, index: Axis) -> &Self::Output {
+ let var_name = &self[index as usize];
+ var_name
+ }
+ }
+ }
+}
+"#,
+ r#"
+mod foo {
+ mod bar {
+ pub enum Axis { X = 0, Y = 1, Z = 2 }
+
+ $0impl<T> core::ops::IndexMut<Axis> for [T; 3] where T: Copy {
+ fn index_mut(&mut self, index: Axis) -> &mut Self::Output {
+ let var_name = &self[index as usize];
+ var_name
+ }
+ }
+
+ impl<T> core::ops::Index<Axis> for [T; 3] where T: Copy {
+ type Output = T;
+
+ fn index(&self, index: Axis) -> &Self::Output {
+ let var_name = &self[index as usize];
+ var_name
+ }
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
fn test_generate_mut_trait_impl_not_applicable() {
check_assist_not_applicable(
generate_mut_trait_impl,
diff --git a/crates/ide-assists/src/handlers/generate_new.rs b/crates/ide-assists/src/handlers/generate_new.rs
index f963f48..4837f92 100644
--- a/crates/ide-assists/src/handlers/generate_new.rs
+++ b/crates/ide-assists/src/handlers/generate_new.rs
@@ -129,17 +129,23 @@
// Get the mutable version of the impl to modify
let impl_def = if let Some(impl_def) = impl_def {
+ fn_.indent(impl_def.indent_level());
builder.make_mut(impl_def)
} else {
// Generate a new impl to add the method to
let impl_def = generate_impl(&ast::Adt::Struct(strukt.clone()));
+ let indent_level = strukt.indent_level();
+ fn_.indent(indent_level);
// Insert it after the adt
let strukt = builder.make_mut(strukt.clone());
ted::insert_all_raw(
ted::Position::after(strukt.syntax()),
- vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()],
+ vec![
+ make::tokens::whitespace(&format!("\n\n{indent_level}")).into(),
+ impl_def.syntax().clone().into(),
+ ],
);
impl_def
@@ -426,6 +432,135 @@
}
#[test]
+ fn non_zero_indent() {
+ check_assist(
+ generate_new,
+ r#"
+mod foo {
+ struct $0Foo {}
+}
+"#,
+ r#"
+mod foo {
+ struct Foo {}
+
+ impl Foo {
+ fn $0new() -> Self {
+ Self { }
+ }
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+mod foo {
+ mod bar {
+ struct $0Foo {}
+ }
+}
+"#,
+ r#"
+mod foo {
+ mod bar {
+ struct Foo {}
+
+ impl Foo {
+ fn $0new() -> Self {
+ Self { }
+ }
+ }
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+mod foo {
+ struct $0Foo {}
+
+ impl Foo {
+ fn some() {}
+ }
+}
+"#,
+ r#"
+mod foo {
+ struct Foo {}
+
+ impl Foo {
+ fn $0new() -> Self {
+ Self { }
+ }
+
+ fn some() {}
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+mod foo {
+ mod bar {
+ struct $0Foo {}
+
+ impl Foo {
+ fn some() {}
+ }
+ }
+}
+"#,
+ r#"
+mod foo {
+ mod bar {
+ struct Foo {}
+
+ impl Foo {
+ fn $0new() -> Self {
+ Self { }
+ }
+
+ fn some() {}
+ }
+ }
+}
+"#,
+ );
+ check_assist(
+ generate_new,
+ r#"
+mod foo {
+ mod bar {
+struct $0Foo {}
+
+ impl Foo {
+ fn some() {}
+ }
+ }
+}
+"#,
+ r#"
+mod foo {
+ mod bar {
+struct Foo {}
+
+ impl Foo {
+ fn $0new() -> Self {
+ Self { }
+ }
+
+ fn some() {}
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
fn check_visibility_of_new_fn_based_on_struct() {
check_assist(
generate_new,
diff --git a/crates/ide-assists/src/handlers/move_bounds.rs b/crates/ide-assists/src/handlers/move_bounds.rs
index 7e8735b..a9df6f6 100644
--- a/crates/ide-assists/src/handlers/move_bounds.rs
+++ b/crates/ide-assists/src/handlers/move_bounds.rs
@@ -1,3 +1,4 @@
+use either::Either;
use syntax::{
ast::{
self, AstNode, HasName, HasTypeBounds,
@@ -30,10 +31,11 @@
) -> Option<()> {
let type_param_list = ctx.find_node_at_offset::<ast::GenericParamList>()?;
- let mut type_params = type_param_list.type_or_const_params();
+ let mut type_params = type_param_list.generic_params();
if type_params.all(|p| match p {
- ast::TypeOrConstParam::Type(t) => t.type_bound_list().is_none(),
- ast::TypeOrConstParam::Const(_) => true,
+ ast::GenericParam::TypeParam(t) => t.type_bound_list().is_none(),
+ ast::GenericParam::LifetimeParam(l) => l.type_bound_list().is_none(),
+ ast::GenericParam::ConstParam(_) => true,
}) {
return None;
}
@@ -53,20 +55,23 @@
match parent {
ast::Fn(it) => it.get_or_create_where_clause(),
ast::Trait(it) => it.get_or_create_where_clause(),
+ ast::TraitAlias(it) => it.get_or_create_where_clause(),
ast::Impl(it) => it.get_or_create_where_clause(),
ast::Enum(it) => it.get_or_create_where_clause(),
ast::Struct(it) => it.get_or_create_where_clause(),
+ ast::TypeAlias(it) => it.get_or_create_where_clause(),
_ => return,
}
};
- for toc_param in type_param_list.type_or_const_params() {
- let type_param = match toc_param {
- ast::TypeOrConstParam::Type(x) => x,
- ast::TypeOrConstParam::Const(_) => continue,
+ for generic_param in type_param_list.generic_params() {
+ let param: &dyn HasTypeBounds = match &generic_param {
+ ast::GenericParam::TypeParam(t) => t,
+ ast::GenericParam::LifetimeParam(l) => l,
+ ast::GenericParam::ConstParam(_) => continue,
};
- if let Some(tbl) = type_param.type_bound_list() {
- if let Some(predicate) = build_predicate(type_param) {
+ if let Some(tbl) = param.type_bound_list() {
+ if let Some(predicate) = build_predicate(generic_param) {
where_clause.add_predicate(predicate)
}
tbl.remove()
@@ -76,9 +81,23 @@
)
}
-fn build_predicate(param: ast::TypeParam) -> Option<ast::WherePred> {
- let path = make::ext::ident_path(¶m.name()?.syntax().to_string());
- let predicate = make::where_pred(make::ty_path(path), param.type_bound_list()?.bounds());
+fn build_predicate(param: ast::GenericParam) -> Option<ast::WherePred> {
+ let target = match ¶m {
+ ast::GenericParam::TypeParam(t) => {
+ Either::Right(make::ty_path(make::ext::ident_path(&t.name()?.to_string())))
+ }
+ ast::GenericParam::LifetimeParam(l) => Either::Left(l.lifetime()?),
+ ast::GenericParam::ConstParam(_) => return None,
+ };
+ let predicate = make::where_pred(
+ target,
+ match param {
+ ast::GenericParam::TypeParam(t) => t.type_bound_list()?,
+ ast::GenericParam::LifetimeParam(l) => l.type_bound_list()?,
+ ast::GenericParam::ConstParam(_) => return None,
+ }
+ .bounds(),
+ );
Some(predicate.clone_for_update())
}
@@ -123,4 +142,13 @@
r#"struct Pair<T>(T, T) where T: u32;"#,
);
}
+
+ #[test]
+ fn move_bounds_to_where_clause_trait() {
+ check_assist(
+ move_bounds_to_where_clause,
+ r#"trait T<'a: 'static, $0T: u32> {}"#,
+ r#"trait T<'a, T> where 'a: 'static, T: u32 {}"#,
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/remove_unused_imports.rs b/crates/ide-assists/src/handlers/remove_unused_imports.rs
index 1baf814..16debc4 100644
--- a/crates/ide-assists/src/handlers/remove_unused_imports.rs
+++ b/crates/ide-assists/src/handlers/remove_unused_imports.rs
@@ -1,6 +1,9 @@
use std::collections::hash_map::Entry;
-use hir::{FileRange, InFile, InRealFile, Module, ModuleSource};
+use hir::{
+ FileRange, InFile, InRealFile, Module, ModuleDef, ModuleSource, PathResolution,
+ PathResolutionPerNs,
+};
use ide_db::text_edit::TextRange;
use ide_db::{
FxHashMap, RootDatabase,
@@ -77,22 +80,17 @@
};
// Get the actual definition associated with this use item.
- let res = match ctx.sema.resolve_path(&path) {
- Some(x) => x,
- None => {
+ let res = match ctx.sema.resolve_path_per_ns(&path) {
+ Some(x) if x.any().is_some() => x,
+ Some(_) | None => {
return None;
}
};
- let def = match res {
- hir::PathResolution::Def(d) => Definition::from(d),
- _ => return None,
- };
-
if u.star_token().is_some() {
// Check if any of the children of this module are used
- let def_mod = match def {
- Definition::Module(module) => module,
+ let def_mod = match res.type_ns {
+ Some(PathResolution::Def(ModuleDef::Module(module))) => module,
_ => return None,
};
@@ -105,21 +103,13 @@
})
.any(|d| used_once_in_scope(ctx, d, u.rename(), scope))
{
- return Some(u);
+ Some(u)
+ } else {
+ None
}
- } else if let Definition::Trait(ref t) = def {
- // If the trait or any item is used.
- if !std::iter::once((def, u.rename()))
- .chain(t.items(ctx.db()).into_iter().map(|item| (item.into(), None)))
- .any(|(d, rename)| used_once_in_scope(ctx, d, rename, scope))
- {
- return Some(u);
- }
- } else if !used_once_in_scope(ctx, def, u.rename(), scope) {
- return Some(u);
+ } else {
+ is_path_per_ns_unused_in_scope(ctx, &u, scope, &res).then_some(u)
}
-
- None
})
.peekable();
@@ -141,6 +131,52 @@
}
}
+fn is_path_per_ns_unused_in_scope(
+ ctx: &AssistContext<'_>,
+ u: &ast::UseTree,
+ scope: &mut Vec<SearchScope>,
+ path: &PathResolutionPerNs,
+) -> bool {
+ if let Some(PathResolution::Def(ModuleDef::Trait(ref t))) = path.type_ns {
+ if is_trait_unused_in_scope(ctx, u, scope, t) {
+ let path = [path.value_ns, path.macro_ns];
+ is_path_unused_in_scope(ctx, u, scope, &path)
+ } else {
+ false
+ }
+ } else {
+ let path = [path.type_ns, path.value_ns, path.macro_ns];
+ is_path_unused_in_scope(ctx, u, scope, &path)
+ }
+}
+
+fn is_path_unused_in_scope(
+ ctx: &AssistContext<'_>,
+ u: &ast::UseTree,
+ scope: &mut Vec<SearchScope>,
+ path: &[Option<PathResolution>],
+) -> bool {
+ !path
+ .iter()
+ .filter_map(|path| *path)
+ .filter_map(|res| match res {
+ PathResolution::Def(d) => Some(Definition::from(d)),
+ _ => None,
+ })
+ .any(|def| used_once_in_scope(ctx, def, u.rename(), scope))
+}
+
+fn is_trait_unused_in_scope(
+ ctx: &AssistContext<'_>,
+ u: &ast::UseTree,
+ scope: &mut Vec<SearchScope>,
+ t: &hir::Trait,
+) -> bool {
+ !std::iter::once((Definition::Trait(*t), u.rename()))
+ .chain(t.items(ctx.db()).into_iter().map(|item| (item.into(), None)))
+ .any(|(d, rename)| used_once_in_scope(ctx, d, rename, scope))
+}
+
fn used_once_in_scope(
ctx: &AssistContext<'_>,
def: Definition,
@@ -1012,4 +1048,110 @@
"#,
);
}
+
+ #[test]
+ fn test_unused_macro() {
+ check_assist(
+ remove_unused_imports,
+ r#"
+//- /foo.rs crate:foo
+#[macro_export]
+macro_rules! m { () => {} }
+
+//- /main.rs crate:main deps:foo
+use foo::m;$0
+fn main() {}
+"#,
+ r#"
+fn main() {}
+"#,
+ );
+
+ check_assist_not_applicable(
+ remove_unused_imports,
+ r#"
+//- /foo.rs crate:foo
+#[macro_export]
+macro_rules! m { () => {} }
+
+//- /main.rs crate:main deps:foo
+use foo::m;$0
+fn main() {
+ m!();
+}
+"#,
+ );
+
+ check_assist_not_applicable(
+ remove_unused_imports,
+ r#"
+//- /foo.rs crate:foo
+#[macro_export]
+macro_rules! m { () => {} }
+
+//- /bar.rs crate:bar deps:foo
+pub use foo::m;
+fn m() {}
+
+
+//- /main.rs crate:main deps:bar
+use bar::m;$0
+fn main() {
+ m!();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_conflict_derive_macro() {
+ check_assist_not_applicable(
+ remove_unused_imports,
+ r#"
+//- proc_macros: derive_identity
+//- minicore: derive
+//- /bar.rs crate:bar
+pub use proc_macros::DeriveIdentity;
+pub trait DeriveIdentity {}
+
+//- /main.rs crate:main deps:bar
+$0use bar::DeriveIdentity;$0
+#[derive(DeriveIdentity)]
+struct S;
+"#,
+ );
+
+ check_assist_not_applicable(
+ remove_unused_imports,
+ r#"
+//- proc_macros: derive_identity
+//- minicore: derive
+//- /bar.rs crate:bar
+pub use proc_macros::DeriveIdentity;
+pub fn DeriveIdentity() {}
+
+//- /main.rs crate:main deps:bar
+$0use bar::DeriveIdentity;$0
+#[derive(DeriveIdentity)]
+struct S;
+"#,
+ );
+
+ check_assist_not_applicable(
+ remove_unused_imports,
+ r#"
+//- proc_macros: derive_identity
+//- minicore: derive
+//- /bar.rs crate:bar
+pub use proc_macros::DeriveIdentity;
+pub fn DeriveIdentity() {}
+
+//- /main.rs crate:main deps:bar
+$0use bar::DeriveIdentity;$0
+fn main() {
+ DeriveIdentity();
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs b/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs
deleted file mode 100644
index c6e864f..0000000
--- a/crates/ide-assists/src/handlers/replace_try_expr_with_match.rs
+++ /dev/null
@@ -1,148 +0,0 @@
-use std::iter;
-
-use ide_db::{assists::AssistId, ty_filter::TryEnum};
-use syntax::{
- AstNode, T,
- ast::{
- self,
- edit::{AstNodeEdit, IndentLevel},
- make,
- },
-};
-
-use crate::assist_context::{AssistContext, Assists};
-
-// Assist: replace_try_expr_with_match
-//
-// Replaces a `try` expression with a `match` expression.
-//
-// ```
-// # //- minicore: try, option
-// fn handle() {
-// let pat = Some(true)$0?;
-// }
-// ```
-// ->
-// ```
-// fn handle() {
-// let pat = match Some(true) {
-// Some(it) => it,
-// None => return None,
-// };
-// }
-// ```
-pub(crate) fn replace_try_expr_with_match(
- acc: &mut Assists,
- ctx: &AssistContext<'_>,
-) -> Option<()> {
- let qm_kw = ctx.find_token_syntax_at_offset(T![?])?;
- let qm_kw_parent = qm_kw.parent().and_then(ast::TryExpr::cast)?;
-
- let expr = qm_kw_parent.expr()?;
- let expr_type_info = ctx.sema.type_of_expr(&expr)?;
-
- let try_enum = TryEnum::from_ty(&ctx.sema, &expr_type_info.original)?;
-
- let target = qm_kw_parent.syntax().text_range();
- acc.add(
- AssistId::refactor_rewrite("replace_try_expr_with_match"),
- "Replace try expression with match",
- target,
- |edit| {
- let sad_pat = match try_enum {
- TryEnum::Option => make::path_pat(make::ext::ident_path("None")),
- TryEnum::Result => make::tuple_struct_pat(
- make::ext::ident_path("Err"),
- iter::once(make::path_pat(make::ext::ident_path("err"))),
- )
- .into(),
- };
- let sad_expr = match try_enum {
- TryEnum::Option => {
- make::expr_return(Some(make::expr_path(make::ext::ident_path("None"))))
- }
- TryEnum::Result => make::expr_return(Some(
- make::expr_call(
- make::expr_path(make::ext::ident_path("Err")),
- make::arg_list(iter::once(make::expr_path(make::ext::ident_path("err")))),
- )
- .into(),
- )),
- };
-
- let happy_arm = make::match_arm(
- try_enum.happy_pattern(make::ident_pat(false, false, make::name("it")).into()),
- None,
- make::expr_path(make::ext::ident_path("it")),
- );
- let sad_arm = make::match_arm(sad_pat, None, sad_expr);
-
- let match_arm_list = make::match_arm_list([happy_arm, sad_arm]);
-
- let expr_match = make::expr_match(expr, match_arm_list)
- .indent(IndentLevel::from_node(qm_kw_parent.syntax()));
- edit.replace_ast::<ast::Expr>(qm_kw_parent.into(), expr_match.into());
- },
- )
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- use crate::tests::{check_assist, check_assist_not_applicable};
-
- #[test]
- fn test_replace_try_expr_with_match_not_applicable() {
- check_assist_not_applicable(
- replace_try_expr_with_match,
- r#"
- fn test() {
- let pat: u32 = 25$0;
- }
- "#,
- );
- }
-
- #[test]
- fn test_replace_try_expr_with_match_option() {
- check_assist(
- replace_try_expr_with_match,
- r#"
-//- minicore: try, option
-fn test() {
- let pat = Some(true)$0?;
-}
- "#,
- r#"
-fn test() {
- let pat = match Some(true) {
- Some(it) => it,
- None => return None,
- };
-}
- "#,
- );
- }
-
- #[test]
- fn test_replace_try_expr_with_match_result() {
- check_assist(
- replace_try_expr_with_match,
- r#"
-//- minicore: try, from, result
-fn test() {
- let pat = Ok(true)$0?;
-}
- "#,
- r#"
-fn test() {
- let pat = match Ok(true) {
- Ok(it) => it,
- Err(err) => return Err(err),
- };
-}
- "#,
- );
- }
-}
diff --git a/crates/ide-assists/src/handlers/unwrap_type_to_generic_arg.rs b/crates/ide-assists/src/handlers/unwrap_type_to_generic_arg.rs
new file mode 100644
index 0000000..7b5adc1
--- /dev/null
+++ b/crates/ide-assists/src/handlers/unwrap_type_to_generic_arg.rs
@@ -0,0 +1,156 @@
+use ide_db::assists::AssistId;
+use syntax::{
+ AstNode,
+ ast::{self, GenericArg, HasGenericArgs},
+};
+
+use crate::{AssistContext, Assists};
+
+// Assist: unwrap_type_to_generic_arg
+//
+// This assist unwraps a type into its generic type argument.
+//
+// ```
+// fn foo() -> $0Option<i32> {
+// todo!()
+// }
+// ```
+// ->
+// ```
+// fn foo() -> i32 {
+// todo!()
+// }
+// ```
+pub(crate) fn unwrap_type_to_generic_arg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let path_type = ctx.find_node_at_offset::<ast::PathType>()?;
+ let path = path_type.path()?;
+ let segment = path.segment()?;
+ let args_list = segment.generic_arg_list()?;
+
+ let mut generic_arg = None;
+
+ for arg in args_list.generic_args() {
+ match arg {
+ GenericArg::ConstArg(_) | GenericArg::LifetimeArg(_) => (),
+ GenericArg::TypeArg(arg) if generic_arg.is_none() => {
+ generic_arg = Some(arg);
+ }
+ _ => return None,
+ }
+ }
+
+ let generic_arg = generic_arg?;
+
+ acc.add(
+ AssistId::refactor_extract("unwrap_type_to_generic_arg"),
+ format!("Unwrap type to type argument {generic_arg}"),
+ path_type.syntax().text_range(),
+ |builder| {
+ let mut editor = builder.make_editor(path_type.syntax());
+ editor.replace(path_type.syntax(), generic_arg.syntax());
+
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
+ },
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ #[test]
+ fn test_unwrap_type_to_generic_arg() {
+ check_assist(
+ unwrap_type_to_generic_arg,
+ r#"
+//- minicore: option
+fn foo() -> $0Option<i32> {
+ todo!()
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ todo!()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_type_to_generic_arg_not_applicable_for_non_generic_arg_list() {
+ check_assist_not_applicable(
+ unwrap_type_to_generic_arg,
+ r#"
+fn foo() -> $0i32 {}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_type_to_generic_arg_not_applicable_for_multiple_generic_args() {
+ check_assist_not_applicable(
+ unwrap_type_to_generic_arg,
+ r#"
+//- minicore: result
+fn foo() -> $0Result<i32, ()> {
+ todo!()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_type_to_generic_arg_with_lifetime_and_const() {
+ check_assist(
+ unwrap_type_to_generic_arg,
+ r#"
+enum Foo<'a, T, const N: usize> {
+ Bar(T),
+ Baz(&'a [T; N]),
+}
+
+fn test<'a>() -> $0Foo<'a, i32, 3> {
+ todo!()
+}
+"#,
+ r#"
+enum Foo<'a, T, const N: usize> {
+ Bar(T),
+ Baz(&'a [T; N]),
+}
+
+fn test<'a>() -> i32 {
+ todo!()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unwrap_type_to_generic_arg_in_let_stmt() {
+ check_assist(
+ unwrap_type_to_generic_arg,
+ r#"
+enum Foo<T> {
+ Bar(T),
+ Baz,
+}
+
+fn test() {
+ let foo: $0Foo<i32> = todo!();
+}
+"#,
+ r#"
+enum Foo<T> {
+ Bar(T),
+ Baz,
+}
+
+fn test() {
+ let foo: i32 = todo!();
+}
+"#,
+ );
+ }
+}
diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs
index 627ed37..c260443 100644
--- a/crates/ide-assists/src/lib.rs
+++ b/crates/ide-assists/src/lib.rs
@@ -139,6 +139,7 @@
mod destructure_struct_binding;
mod destructure_tuple_binding;
mod desugar_doc_comment;
+ mod desugar_try_expr;
mod expand_glob_import;
mod expand_rest_pattern;
mod extract_expressions_from_format_string;
@@ -214,7 +215,6 @@
mod replace_named_generic_with_impl;
mod replace_qualified_name_with_use;
mod replace_string_with_char;
- mod replace_try_expr_with_match;
mod replace_turbofish_with_explicit_type;
mod sort_items;
mod split_import;
@@ -229,6 +229,7 @@
mod unwrap_block;
mod unwrap_return_type;
mod unwrap_tuple;
+ mod unwrap_type_to_generic_arg;
mod wrap_return_type;
mod wrap_unwrap_cfg_attr;
@@ -272,6 +273,7 @@
destructure_struct_binding::destructure_struct_binding,
destructure_tuple_binding::destructure_tuple_binding,
desugar_doc_comment::desugar_doc_comment,
+ desugar_try_expr::desugar_try_expr,
expand_glob_import::expand_glob_import,
expand_glob_import::expand_glob_reexport,
expand_rest_pattern::expand_rest_pattern,
@@ -353,7 +355,6 @@
replace_method_eager_lazy::replace_with_lazy_method,
replace_named_generic_with_impl::replace_named_generic_with_impl,
replace_qualified_name_with_use::replace_qualified_name_with_use,
- replace_try_expr_with_match::replace_try_expr_with_match,
replace_turbofish_with_explicit_type::replace_turbofish_with_explicit_type,
sort_items::sort_items,
split_import::split_import,
@@ -369,6 +370,7 @@
unwrap_block::unwrap_block,
unwrap_return_type::unwrap_return_type,
unwrap_tuple::unwrap_tuple,
+ unwrap_type_to_generic_arg::unwrap_type_to_generic_arg,
wrap_return_type::wrap_return_type,
wrap_unwrap_cfg_attr::wrap_unwrap_cfg_attr,
diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs
index 01ab0be..72f7195 100644
--- a/crates/ide-assists/src/tests/generated.rs
+++ b/crates/ide-assists/src/tests/generated.rs
@@ -930,6 +930,47 @@
}
#[test]
+fn doctest_desugar_try_expr_let_else() {
+ check_doc_test(
+ "desugar_try_expr_let_else",
+ r#####"
+//- minicore: try, option
+fn handle() {
+ let pat = Some(true)$0?;
+}
+"#####,
+ r#####"
+fn handle() {
+ let Some(pat) = Some(true) else {
+ return None;
+ };
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_desugar_try_expr_match() {
+ check_doc_test(
+ "desugar_try_expr_match",
+ r#####"
+//- minicore: try, option
+fn handle() {
+ let pat = Some(true)$0?;
+}
+"#####,
+ r#####"
+fn handle() {
+ let pat = match Some(true) {
+ Some(it) => it,
+ None => return None,
+ };
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_expand_glob_import() {
check_doc_test(
"expand_glob_import",
@@ -3097,27 +3138,6 @@
}
#[test]
-fn doctest_replace_try_expr_with_match() {
- check_doc_test(
- "replace_try_expr_with_match",
- r#####"
-//- minicore: try, option
-fn handle() {
- let pat = Some(true)$0?;
-}
-"#####,
- r#####"
-fn handle() {
- let pat = match Some(true) {
- Some(it) => it,
- None => return None,
- };
-}
-"#####,
- )
-}
-
-#[test]
fn doctest_replace_turbofish_with_explicit_type() {
check_doc_test(
"replace_turbofish_with_explicit_type",
@@ -3482,6 +3502,23 @@
}
#[test]
+fn doctest_unwrap_type_to_generic_arg() {
+ check_doc_test(
+ "unwrap_type_to_generic_arg",
+ r#####"
+fn foo() -> $0Option<i32> {
+ todo!()
+}
+"#####,
+ r#####"
+fn foo() -> i32 {
+ todo!()
+}
+"#####,
+ )
+}
+
+#[test]
fn doctest_wrap_return_type_in_option() {
check_doc_test(
"wrap_return_type_in_option",
diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs
index cd18b3d..92cbf41 100644
--- a/crates/ide-completion/src/completions/env_vars.rs
+++ b/crates/ide-completion/src/completions/env_vars.rs
@@ -13,6 +13,7 @@
const CARGO_DEFINED_VARS: &[(&str, &str)] = &[
("CARGO", "Path to the cargo binary performing the build"),
("CARGO_MANIFEST_DIR", "The directory containing the manifest of your package"),
+ ("CARGO_MANIFEST_PATH", "The path to the manifest of your package"),
("CARGO_PKG_VERSION", "The full version of your package"),
("CARGO_PKG_VERSION_MAJOR", "The major version of your package"),
("CARGO_PKG_VERSION_MINOR", "The minor version of your package"),
diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs
index 54be7d2..3cdf211 100644
--- a/crates/ide-completion/src/completions/postfix.rs
+++ b/crates/ide-completion/src/completions/postfix.rs
@@ -311,6 +311,8 @@
let mut prefix = String::new();
+ let mut found_ref_or_deref = false;
+
while let Some(parent_deref_element) =
resulting_element.syntax().parent().and_then(ast::PrefixExpr::cast)
{
@@ -318,27 +320,26 @@
break;
}
+ found_ref_or_deref = true;
resulting_element = ast::Expr::from(parent_deref_element);
prefix.insert(0, '*');
}
- if let Some(first_ref_expr) = resulting_element.syntax().parent().and_then(ast::RefExpr::cast) {
- if let Some(expr) = first_ref_expr.expr() {
- resulting_element = expr;
- }
+ while let Some(parent_ref_element) =
+ resulting_element.syntax().parent().and_then(ast::RefExpr::cast)
+ {
+ found_ref_or_deref = true;
+ let exclusive = parent_ref_element.mut_token().is_some();
+ resulting_element = ast::Expr::from(parent_ref_element);
- while let Some(parent_ref_element) =
- resulting_element.syntax().parent().and_then(ast::RefExpr::cast)
- {
- let exclusive = parent_ref_element.mut_token().is_some();
- resulting_element = ast::Expr::from(parent_ref_element);
+ prefix.insert_str(0, if exclusive { "&mut " } else { "&" });
+ }
- prefix.insert_str(0, if exclusive { "&mut " } else { "&" });
- }
- } else {
- // If we do not find any ref expressions, restore
+ if !found_ref_or_deref {
+ // If we do not find any ref/deref expressions, restore
// all the progress of tree climbing
+ prefix.clear();
resulting_element = initial_element.clone();
}
diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs
index 3baf1f3..5287627 100644
--- a/crates/ide-completion/src/context.rs
+++ b/crates/ide-completion/src/context.rs
@@ -8,8 +8,8 @@
use base_db::RootQueryDb as _;
use hir::{
- DisplayTarget, HasAttrs, Local, ModuleDef, ModuleSource, Name, PathResolution, ScopeDef,
- Semantics, SemanticsScope, Symbol, Type, TypeInfo,
+ DisplayTarget, HasAttrs, InFile, Local, ModuleDef, ModuleSource, Name, PathResolution,
+ ScopeDef, Semantics, SemanticsScope, Symbol, Type, TypeInfo,
};
use ide_db::{
FilePosition, FxHashMap, FxHashSet, RootDatabase, famous_defs::FamousDefs,
@@ -751,7 +751,7 @@
original_offset,
} = expand_and_analyze(
&sema,
- original_file.syntax().clone(),
+ InFile::new(editioned_file_id.into(), original_file.syntax().clone()),
file_with_fake_ident.syntax().clone(),
offset,
&original_token,
diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs
index 391e237..7a2230b 100644
--- a/crates/ide-completion/src/context/analysis.rs
+++ b/crates/ide-completion/src/context/analysis.rs
@@ -1,7 +1,7 @@
//! Module responsible for analyzing the code surrounding the cursor for completion.
use std::iter;
-use hir::{ExpandResult, Semantics, Type, TypeInfo, Variant};
+use hir::{ExpandResult, InFile, Semantics, Type, TypeInfo, Variant};
use ide_db::{RootDatabase, active_parameter::ActiveParameter};
use itertools::Either;
use syntax::{
@@ -50,7 +50,7 @@
pub(super) fn expand_and_analyze(
sema: &Semantics<'_, RootDatabase>,
- original_file: SyntaxNode,
+ original_file: InFile<SyntaxNode>,
speculative_file: SyntaxNode,
offset: TextSize,
original_token: &SyntaxToken,
@@ -72,7 +72,7 @@
relative_offset,
)
.unwrap_or(ExpansionResult {
- original_file,
+ original_file: original_file.value,
speculative_file,
original_offset: offset,
speculative_offset: fake_ident_token.text_range().start(),
@@ -125,7 +125,7 @@
/// the best we can do.
fn expand_maybe_stop(
sema: &Semantics<'_, RootDatabase>,
- original_file: SyntaxNode,
+ original_file: InFile<SyntaxNode>,
speculative_file: SyntaxNode,
original_offset: TextSize,
fake_ident_token: SyntaxToken,
@@ -142,17 +142,16 @@
return result;
}
- // This needs to come after the recursive call, because our "inside macro" detection is subtly wrong
- // with regard to attribute macros named `test` that are not std's test. So hopefully we will expand
- // them successfully above and be able to analyze.
- // Left biased since there may already be an identifier token there, and we appended to it.
- if !sema.might_be_inside_macro_call(&fake_ident_token)
- && token_at_offset_ignore_whitespace(&original_file, original_offset + relative_offset)
- .is_some_and(|original_token| !sema.might_be_inside_macro_call(&original_token))
+ // We can't check whether the fake expansion is inside macro call, because that requires semantic info.
+ // But hopefully checking just the real one should be enough.
+ if token_at_offset_ignore_whitespace(&original_file.value, original_offset + relative_offset)
+ .is_some_and(|original_token| {
+ !sema.is_inside_macro_call(original_file.with_value(&original_token))
+ })
{
// Recursion base case.
Some(ExpansionResult {
- original_file,
+ original_file: original_file.value,
speculative_file,
original_offset,
speculative_offset: fake_ident_token.text_range().start(),
@@ -166,7 +165,7 @@
fn expand(
sema: &Semantics<'_, RootDatabase>,
- original_file: SyntaxNode,
+ original_file: InFile<SyntaxNode>,
speculative_file: SyntaxNode,
original_offset: TextSize,
fake_ident_token: SyntaxToken,
@@ -176,7 +175,7 @@
let parent_item =
|item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
- let original_node = token_at_offset_ignore_whitespace(&original_file, original_offset)
+ let original_node = token_at_offset_ignore_whitespace(&original_file.value, original_offset)
.and_then(|token| token.parent_ancestors().find_map(ast::Item::cast));
let ancestor_items = iter::successors(
Option::zip(
@@ -249,7 +248,7 @@
}
// No attributes have been expanded, so look for macro_call! token trees or derive token trees
- let orig_tt = ancestors_at_offset(&original_file, original_offset)
+ let orig_tt = ancestors_at_offset(&original_file.value, original_offset)
.map_while(Either::<ast::TokenTree, ast::Meta>::cast)
.last()?;
let spec_tt = ancestors_at_offset(&speculative_file, fake_ident_token.text_range().start())
@@ -292,7 +291,7 @@
fake_mapped_tokens.into_iter().min_by_key(|(_, rank)| *rank)
{
return Some(ExpansionResult {
- original_file,
+ original_file: original_file.value,
speculative_file,
original_offset,
speculative_offset: fake_ident_token.text_range().start(),
@@ -349,7 +348,7 @@
}
let result = expand_maybe_stop(
sema,
- actual_expansion.clone(),
+ InFile::new(file.into(), actual_expansion.clone()),
fake_expansion.clone(),
new_offset,
fake_mapped_token,
@@ -883,9 +882,10 @@
},
ast::MethodCallExpr(method) => {
let receiver = find_opt_node_in_file(original_file, method.receiver());
+ let has_parens = has_parens(&method);
let kind = NameRefKind::DotAccess(DotAccess {
receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
- kind: DotAccessKind::Method { has_parens: method.arg_list().is_some_and(|it| it.l_paren_token().is_some()) },
+ kind: DotAccessKind::Method { has_parens },
receiver,
ctx: DotAccessExprCtx { in_block_expr: is_in_block(method.syntax()), in_breakable: is_in_breakable(method.syntax()) }
});
@@ -1372,7 +1372,7 @@
}
}
- path_ctx.has_call_parens = it.syntax().parent().is_some_and(|it| ast::CallExpr::can_cast(it.kind()));
+ path_ctx.has_call_parens = it.syntax().parent().is_some_and(|it| ast::CallExpr::cast(it).is_some_and(|it| has_parens(&it)));
make_path_kind_expr(it.into())
},
@@ -1401,7 +1401,7 @@
match parent {
ast::PathType(it) => make_path_kind_type(it.into()),
ast::PathExpr(it) => {
- path_ctx.has_call_parens = it.syntax().parent().is_some_and(|it| ast::CallExpr::can_cast(it.kind()));
+ path_ctx.has_call_parens = it.syntax().parent().is_some_and(|it| ast::CallExpr::cast(it).is_some_and(|it| has_parens(&it)));
make_path_kind_expr(it.into())
},
@@ -1559,6 +1559,30 @@
Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx))
}
+/// When writing in the middle of some code the following situation commonly occurs (`|` denotes the cursor):
+/// ```ignore
+/// value.method|
+/// (1, 2, 3)
+/// ```
+/// Here, we want to complete the method parentheses & arguments (if the corresponding settings are on),
+/// but the thing is parsed as a method call with parentheses. Therefore we use heuristics: if the parentheses
+/// are on the next line, consider them non-existent.
+fn has_parens(node: &dyn HasArgList) -> bool {
+ let Some(arg_list) = node.arg_list() else { return false };
+ if arg_list.l_paren_token().is_none() {
+ return false;
+ }
+ let prev_siblings = iter::successors(arg_list.syntax().prev_sibling_or_token(), |it| {
+ it.prev_sibling_or_token()
+ });
+ prev_siblings
+ .take_while(|syntax| syntax.kind().is_trivia())
+ .filter_map(|syntax| {
+ syntax.into_token().filter(|token| token.kind() == SyntaxKind::WHITESPACE)
+ })
+ .all(|whitespace| !whitespace.text().contains('\n'))
+}
+
fn pattern_context_for(
sema: &Semantics<'_, RootDatabase>,
original_file: &SyntaxNode,
diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs
index d513794..b46e4c3 100644
--- a/crates/ide-completion/src/tests/expression.rs
+++ b/crates/ide-completion/src/tests/expression.rs
@@ -2112,6 +2112,56 @@
}
#[test]
+fn cfg_attr_attr_macro() {
+ check(
+ r#"
+//- proc_macros: identity
+#[cfg_attr(test, proc_macros::identity)]
+fn foo() {
+ $0
+}
+ "#,
+ expect![[r#"
+ fn foo() fn()
+ md proc_macros
+ bt u32 u32
+ kw async
+ kw const
+ kw crate::
+ kw enum
+ kw extern
+ kw false
+ kw fn
+ kw for
+ kw if
+ kw if let
+ kw impl
+ kw impl for
+ kw let
+ kw letm
+ kw loop
+ kw match
+ kw mod
+ kw return
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw true
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ kw while
+ kw while let
+ sn macro_rules
+ sn pd
+ sn ppd
+ "#]],
+ );
+}
+
+#[test]
fn escaped_label() {
check(
r#"
@@ -2126,3 +2176,70 @@
"#]],
);
}
+
+#[test]
+fn call_parens_with_newline() {
+ check_edit(
+ "foo",
+ r#"
+fn foo(v: i32) {}
+
+fn bar() {
+ foo$0
+ ()
+}
+ "#,
+ r#"
+fn foo(v: i32) {}
+
+fn bar() {
+ foo(${1:v});$0
+ ()
+}
+ "#,
+ );
+ check_edit(
+ "foo",
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self, v: i32) {}
+}
+
+fn bar() {
+ Foo.foo$0
+ ()
+}
+ "#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self, v: i32) {}
+}
+
+fn bar() {
+ Foo.foo(${1:v});$0
+ ()
+}
+ "#,
+ );
+}
+
+#[test]
+fn dbg_too_many_asterisks() {
+ check_edit(
+ "dbg",
+ r#"
+fn main() {
+ let x = &42;
+ let y = *x.$0;
+}
+ "#,
+ r#"
+fn main() {
+ let x = &42;
+ let y = dbg!(*x);
+}
+ "#,
+ );
+}
diff --git a/crates/ide-completion/src/tests/item.rs b/crates/ide-completion/src/tests/item.rs
index 5568903..ed87b33 100644
--- a/crates/ide-completion/src/tests/item.rs
+++ b/crates/ide-completion/src/tests/item.rs
@@ -4,7 +4,7 @@
//! in [crate::completions::mod_].
use expect_test::expect;
-use crate::tests::{check_edit, check_with_base_items};
+use crate::tests::{check, check_edit, check_with_base_items};
#[test]
fn target_type_or_trait_in_impl_block() {
@@ -308,3 +308,39 @@
"#]],
);
}
+
+#[test]
+fn expression_in_item_macro() {
+ check(
+ r#"
+fn foo() -> u8 { 0 }
+
+macro_rules! foo {
+ ($expr:expr) => {
+ const BAR: u8 = $expr;
+ };
+}
+
+foo!(f$0);
+ "#,
+ expect![[r#"
+ ct BAR u8
+ fn foo() fn() -> u8
+ ma foo!(…) macro_rules! foo
+ bt u32 u32
+ kw const
+ kw crate::
+ kw false
+ kw for
+ kw if
+ kw if let
+ kw loop
+ kw match
+ kw self::
+ kw true
+ kw unsafe
+ kw while
+ kw while let
+ "#]],
+ );
+}
diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs
index bf4f541..d5db1c4 100644
--- a/crates/ide-db/src/defs.rs
+++ b/crates/ide-db/src/defs.rs
@@ -6,7 +6,7 @@
// FIXME: this badly needs rename/rewrite (matklad, 2020-02-06).
use crate::RootDatabase;
-use crate::documentation::{Documentation, HasDocs};
+use crate::documentation::{DocsRangeMap, Documentation, HasDocs};
use crate::famous_defs::FamousDefs;
use arrayvec::ArrayVec;
use either::Either;
@@ -21,7 +21,7 @@
use span::Edition;
use stdx::{format_to, impl_from};
use syntax::{
- SyntaxKind, SyntaxNode, SyntaxToken,
+ SyntaxKind, SyntaxNode, SyntaxToken, TextSize,
ast::{self, AstNode},
match_ast,
};
@@ -210,29 +210,40 @@
famous_defs: Option<&FamousDefs<'_, '_>>,
display_target: DisplayTarget,
) -> Option<Documentation> {
+ self.docs_with_rangemap(db, famous_defs, display_target).map(|(docs, _)| docs)
+ }
+
+ pub fn docs_with_rangemap(
+ &self,
+ db: &RootDatabase,
+ famous_defs: Option<&FamousDefs<'_, '_>>,
+ display_target: DisplayTarget,
+ ) -> Option<(Documentation, Option<DocsRangeMap>)> {
let docs = match self {
- Definition::Macro(it) => it.docs(db),
- Definition::Field(it) => it.docs(db),
- Definition::Module(it) => it.docs(db),
- Definition::Crate(it) => it.docs(db),
- Definition::Function(it) => it.docs(db),
- Definition::Adt(it) => it.docs(db),
- Definition::Variant(it) => it.docs(db),
- Definition::Const(it) => it.docs(db),
- Definition::Static(it) => it.docs(db),
- Definition::Trait(it) => it.docs(db),
- Definition::TraitAlias(it) => it.docs(db),
+ Definition::Macro(it) => it.docs_with_rangemap(db),
+ Definition::Field(it) => it.docs_with_rangemap(db),
+ Definition::Module(it) => it.docs_with_rangemap(db),
+ Definition::Crate(it) => it.docs_with_rangemap(db),
+ Definition::Function(it) => it.docs_with_rangemap(db),
+ Definition::Adt(it) => it.docs_with_rangemap(db),
+ Definition::Variant(it) => it.docs_with_rangemap(db),
+ Definition::Const(it) => it.docs_with_rangemap(db),
+ Definition::Static(it) => it.docs_with_rangemap(db),
+ Definition::Trait(it) => it.docs_with_rangemap(db),
+ Definition::TraitAlias(it) => it.docs_with_rangemap(db),
Definition::TypeAlias(it) => {
- it.docs(db).or_else(|| {
+ it.docs_with_rangemap(db).or_else(|| {
// docs are missing, try to fall back to the docs of the aliased item.
let adt = it.ty(db).as_adt()?;
- let docs = adt.docs(db)?;
- let docs = format!(
- "*This is the documentation for* `{}`\n\n{}",
- adt.display(db, display_target),
- docs.as_str()
+ let (docs, range_map) = adt.docs_with_rangemap(db)?;
+ let header_docs = format!(
+ "*This is the documentation for* `{}`\n\n",
+ adt.display(db, display_target)
);
- Some(Documentation::new(docs))
+ let offset = TextSize::new(header_docs.len() as u32);
+ let range_map = range_map.shift_docstring_line_range(offset);
+ let docs = header_docs + docs.as_str();
+ Some((Documentation::new(docs), range_map))
})
}
Definition::BuiltinType(it) => {
@@ -241,17 +252,17 @@
let primitive_mod =
format!("prim_{}", it.name().display(fd.0.db, display_target.edition));
let doc_owner = find_std_module(fd, &primitive_mod, display_target.edition)?;
- doc_owner.docs(fd.0.db)
+ doc_owner.docs_with_rangemap(fd.0.db)
})
}
Definition::BuiltinLifetime(StaticLifetime) => None,
Definition::Local(_) => None,
Definition::SelfType(impl_def) => {
- impl_def.self_ty(db).as_adt().map(|adt| adt.docs(db))?
+ impl_def.self_ty(db).as_adt().map(|adt| adt.docs_with_rangemap(db))?
}
Definition::GenericParam(_) => None,
Definition::Label(_) => None,
- Definition::ExternCrateDecl(it) => it.docs(db),
+ Definition::ExternCrateDecl(it) => it.docs_with_rangemap(db),
Definition::BuiltinAttr(it) => {
let name = it.name(db);
@@ -276,7 +287,8 @@
name_value_str
);
}
- Some(Documentation::new(docs.replace('*', "\\*")))
+
+ return Some((Documentation::new(docs.replace('*', "\\*")), None));
}
Definition::ToolModule(_) => None,
Definition::DeriveHelper(_) => None,
@@ -291,8 +303,9 @@
let trait_ = assoc.implemented_trait(db)?;
let name = Some(assoc.name(db)?);
let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?;
- item.docs(db)
+ item.docs_with_rangemap(db)
})
+ .map(|(docs, range_map)| (docs, Some(range_map)))
}
pub fn label(&self, db: &RootDatabase, display_target: DisplayTarget) -> String {
diff --git a/crates/ide-db/src/documentation.rs b/crates/ide-db/src/documentation.rs
index ef2c839..30c355f 100644
--- a/crates/ide-db/src/documentation.rs
+++ b/crates/ide-db/src/documentation.rs
@@ -34,11 +34,13 @@
pub trait HasDocs: HasAttrs {
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation>;
+ fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)>;
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>,
+ is_inner_doc: bool,
) -> Option<hir::DocLinkDef>;
}
/// A struct to map text ranges from [`Documentation`] back to TextRanges in the syntax tree.
@@ -53,7 +55,7 @@
impl DocsRangeMap {
/// Maps a [`TextRange`] relative to the documentation string back to its AST range
- pub fn map(&self, range: TextRange) -> Option<InFile<TextRange>> {
+ pub fn map(&self, range: TextRange) -> Option<(InFile<TextRange>, AttrId)> {
let found = self.mapping.binary_search_by(|(probe, ..)| probe.ordering(range)).ok()?;
let (line_docs_range, idx, original_line_src_range) = self.mapping[found];
if !line_docs_range.contains_range(range) {
@@ -71,7 +73,7 @@
text_range.end() + original_line_src_range.start() + relative_range.start(),
string.syntax().text_range().len().min(range.len()),
);
- Some(InFile { file_id, value: range })
+ Some((InFile { file_id, value: range }, idx))
}
Either::Right(comment) => {
let text_range = comment.syntax().text_range();
@@ -82,10 +84,22 @@
+ relative_range.start(),
text_range.len().min(range.len()),
);
- Some(InFile { file_id, value: range })
+ Some((InFile { file_id, value: range }, idx))
}
}
}
+
+ pub fn shift_docstring_line_range(self, offset: TextSize) -> DocsRangeMap {
+ let mapping = self
+ .mapping
+ .into_iter()
+ .map(|(buf_offset, id, base_offset)| {
+ let buf_offset = buf_offset.checked_add(offset).unwrap();
+ (buf_offset, id, base_offset)
+ })
+ .collect_vec();
+ DocsRangeMap { source_map: self.source_map, mapping }
+ }
}
pub fn docs_with_rangemap(
@@ -161,13 +175,20 @@
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
docs_from_attrs(&self.attrs(db)).map(Documentation)
}
+ fn docs_with_rangemap(
+ self,
+ db: &dyn HirDatabase,
+ ) -> Option<(Documentation, DocsRangeMap)> {
+ docs_with_rangemap(db, &self.attrs(db))
+ }
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
- ns: Option<hir::Namespace>
+ ns: Option<hir::Namespace>,
+ is_inner_doc: bool,
) -> Option<hir::DocLinkDef> {
- resolve_doc_path_on(db, self, link, ns)
+ resolve_doc_path_on(db, self, link, ns, is_inner_doc)
}
}
)*};
@@ -184,13 +205,21 @@
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
hir::$enum::$variant(self).docs(db)
}
+
+ fn docs_with_rangemap(
+ self,
+ db: &dyn HirDatabase,
+ ) -> Option<(Documentation, DocsRangeMap)> {
+ hir::$enum::$variant(self).docs_with_rangemap(db)
+ }
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
- ns: Option<hir::Namespace>
+ ns: Option<hir::Namespace>,
+ is_inner_doc: bool,
) -> Option<hir::DocLinkDef> {
- hir::$enum::$variant(self).resolve_doc_path(db, link, ns)
+ hir::$enum::$variant(self).resolve_doc_path(db, link, ns, is_inner_doc)
}
}
)*};
@@ -207,16 +236,25 @@
}
}
+ fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)> {
+ match self {
+ hir::AssocItem::Function(it) => it.docs_with_rangemap(db),
+ hir::AssocItem::Const(it) => it.docs_with_rangemap(db),
+ hir::AssocItem::TypeAlias(it) => it.docs_with_rangemap(db),
+ }
+ }
+
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>,
+ is_inner_doc: bool,
) -> Option<hir::DocLinkDef> {
match self {
- hir::AssocItem::Function(it) => it.resolve_doc_path(db, link, ns),
- hir::AssocItem::Const(it) => it.resolve_doc_path(db, link, ns),
- hir::AssocItem::TypeAlias(it) => it.resolve_doc_path(db, link, ns),
+ hir::AssocItem::Function(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
+ hir::AssocItem::Const(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
+ hir::AssocItem::TypeAlias(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
}
}
}
@@ -238,13 +276,36 @@
}
.map(Documentation::new)
}
+
+ fn docs_with_rangemap(self, db: &dyn HirDatabase) -> Option<(Documentation, DocsRangeMap)> {
+ let crate_docs = docs_with_rangemap(db, &self.resolved_crate(db)?.root_module().attrs(db));
+ let decl_docs = docs_with_rangemap(db, &self.attrs(db));
+ match (decl_docs, crate_docs) {
+ (None, None) => None,
+ (Some(decl_docs), None) => Some(decl_docs),
+ (None, Some(crate_docs)) => Some(crate_docs),
+ (
+ Some((Documentation(mut decl_docs), mut decl_range_map)),
+ Some((Documentation(crate_docs), crate_range_map)),
+ ) => {
+ decl_docs.push('\n');
+ decl_docs.push('\n');
+ let offset = TextSize::new(decl_docs.len() as u32);
+ decl_docs += &crate_docs;
+ let crate_range_map = crate_range_map.shift_docstring_line_range(offset);
+ decl_range_map.mapping.extend(crate_range_map.mapping);
+ Some((Documentation(decl_docs), decl_range_map))
+ }
+ }
+ }
fn resolve_doc_path(
self,
db: &dyn HirDatabase,
link: &str,
ns: Option<hir::Namespace>,
+ is_inner_doc: bool,
) -> Option<hir::DocLinkDef> {
- resolve_doc_path_on(db, self, link, ns)
+ resolve_doc_path_on(db, self, link, ns, is_inner_doc)
}
}
diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs
index 63cc7cd..c94be7e 100644
--- a/crates/ide-db/src/lib.rs
+++ b/crates/ide-db/src/lib.rs
@@ -92,9 +92,7 @@
impl std::panic::RefUnwindSafe for RootDatabase {}
#[salsa_macros::db]
-impl salsa::Database for RootDatabase {
- fn salsa_event(&self, _event: &dyn Fn() -> salsa::Event) {}
-}
+impl salsa::Database for RootDatabase {}
impl Drop for RootDatabase {
fn drop(&mut self) {
diff --git a/crates/ide-db/src/prime_caches.rs b/crates/ide-db/src/prime_caches.rs
index cbe3140..5356614 100644
--- a/crates/ide-db/src/prime_caches.rs
+++ b/crates/ide-db/src/prime_caches.rs
@@ -2,12 +2,10 @@
//! sometimes is counter productive when, for example, the first goto definition
//! request takes longer to compute. This module implements prepopulation of
//! various caches, it's not really advanced at the moment.
-mod topologic_sort;
-
-use std::time::Duration;
+use std::panic::AssertUnwindSafe;
use hir::{Symbol, db::DefDatabase};
-use itertools::Itertools;
+use rustc_hash::FxHashMap;
use salsa::{Cancelled, Database};
use crate::{
@@ -35,59 +33,114 @@
) {
let _p = tracing::info_span!("parallel_prime_caches").entered();
- let mut crates_to_prime = {
- // FIXME: We already have the crate list topologically sorted (but without the things
- // `TopologicalSortIter` gives us). Maybe there is a way to avoid using it and rip it out
- // of the codebase?
- let mut builder = topologic_sort::TopologicalSortIter::builder();
-
- for &crate_id in db.all_crates().iter() {
- builder.add(crate_id, crate_id.data(db).dependencies.iter().map(|d| d.crate_id));
- }
-
- builder.build()
- };
-
enum ParallelPrimeCacheWorkerProgress {
- BeginCrate { crate_id: Crate, crate_name: Symbol },
- EndCrate { crate_id: Crate },
+ BeginCrateDefMap { crate_id: Crate, crate_name: Symbol },
+ EndCrateDefMap { crate_id: Crate },
+ EndCrateImportMap,
+ EndModuleSymbols,
Cancelled(Cancelled),
}
- // We split off def map computation from other work,
- // as the def map is the relevant one. Once the defmaps are computed
- // the project is ready to go, the other indices are just nice to have for some IDE features.
- #[derive(PartialOrd, Ord, PartialEq, Eq, Copy, Clone)]
- enum PrimingPhase {
- DefMap,
- ImportMap,
- CrateSymbols,
- }
+ // The setup here is a bit complicated. We try to make best use of compute resources.
+ // The idea is that if we have a def map available to compute, we should do that first.
+ // This is because def map is a dependency of both import map and symbols. So if we have
+ // e.g. a def map and a symbols, if we compute the def map we can, after it completes,
+ // compute the def maps of dependencies, the existing symbols and the symbols of the
+ // new crate, all in parallel. But if we compute the symbols, after that we will only
+ // have the def map to compute, and the rest of the CPU cores will rest, which is not
+ // good.
+ // However, it's better to compute symbols/import map than to compute a def map that
+ // isn't ready yet, because one of its dependencies hasn't yet completed its def map.
+ // Such def map will just block on the dependency, which is just wasted time. So better
+ // to compute the symbols/import map of an already computed def map in that time.
- let (work_sender, progress_receiver) = {
- let (progress_sender, progress_receiver) = crossbeam_channel::unbounded();
- let (work_sender, work_receiver) = crossbeam_channel::unbounded();
- let prime_caches_worker = move |db: RootDatabase| {
- while let Ok((crate_id, crate_name, kind)) = work_receiver.recv() {
- progress_sender
- .send(ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name })?;
-
- let cancelled = Cancelled::catch(|| match kind {
- PrimingPhase::DefMap => _ = db.crate_def_map(crate_id),
- PrimingPhase::ImportMap => _ = db.import_map(crate_id),
- PrimingPhase::CrateSymbols => _ = db.crate_symbols(crate_id.into()),
- });
-
- match cancelled {
- Ok(()) => progress_sender
- .send(ParallelPrimeCacheWorkerProgress::EndCrate { crate_id })?,
- Err(cancelled) => progress_sender
- .send(ParallelPrimeCacheWorkerProgress::Cancelled(cancelled))?,
- }
+ let (reverse_deps, mut to_be_done_deps) = {
+ let all_crates = db.all_crates();
+ let to_be_done_deps = all_crates
+ .iter()
+ .map(|&krate| (krate, krate.data(db).dependencies.len() as u32))
+ .collect::<FxHashMap<_, _>>();
+ let mut reverse_deps =
+ all_crates.iter().map(|&krate| (krate, Vec::new())).collect::<FxHashMap<_, _>>();
+ for &krate in &*all_crates {
+ for dep in &krate.data(db).dependencies {
+ reverse_deps.get_mut(&dep.crate_id).unwrap().push(krate);
}
+ }
+ (reverse_deps, to_be_done_deps)
+ };
- Ok::<_, crossbeam_channel::SendError<_>>(())
- };
+ let (def_map_work_sender, import_map_work_sender, symbols_work_sender, progress_receiver) = {
+ let (progress_sender, progress_receiver) = crossbeam_channel::unbounded();
+ let (def_map_work_sender, def_map_work_receiver) = crossbeam_channel::unbounded();
+ let (import_map_work_sender, import_map_work_receiver) = crossbeam_channel::unbounded();
+ let (symbols_work_sender, symbols_work_receiver) = crossbeam_channel::unbounded();
+ let prime_caches_worker =
+ move |db: RootDatabase| {
+ let handle_def_map = |crate_id, crate_name| {
+ progress_sender.send(ParallelPrimeCacheWorkerProgress::BeginCrateDefMap {
+ crate_id,
+ crate_name,
+ })?;
+
+ let cancelled = Cancelled::catch(|| _ = hir::crate_def_map(&db, crate_id));
+
+ match cancelled {
+ Ok(()) => progress_sender
+ .send(ParallelPrimeCacheWorkerProgress::EndCrateDefMap { crate_id })?,
+ Err(cancelled) => progress_sender
+ .send(ParallelPrimeCacheWorkerProgress::Cancelled(cancelled))?,
+ }
+
+ Ok::<_, crossbeam_channel::SendError<_>>(())
+ };
+ let handle_import_map = |crate_id| {
+ let cancelled = Cancelled::catch(|| _ = db.import_map(crate_id));
+
+ match cancelled {
+ Ok(()) => progress_sender
+ .send(ParallelPrimeCacheWorkerProgress::EndCrateImportMap)?,
+ Err(cancelled) => progress_sender
+ .send(ParallelPrimeCacheWorkerProgress::Cancelled(cancelled))?,
+ }
+
+ Ok::<_, crossbeam_channel::SendError<_>>(())
+ };
+ let handle_symbols = |module| {
+ let cancelled =
+ Cancelled::catch(AssertUnwindSafe(|| _ = db.module_symbols(module)));
+
+ match cancelled {
+ Ok(()) => progress_sender
+ .send(ParallelPrimeCacheWorkerProgress::EndModuleSymbols)?,
+ Err(cancelled) => progress_sender
+ .send(ParallelPrimeCacheWorkerProgress::Cancelled(cancelled))?,
+ }
+
+ Ok::<_, crossbeam_channel::SendError<_>>(())
+ };
+
+ loop {
+ db.unwind_if_revision_cancelled();
+
+ // Biased because we want to prefer def maps.
+ crossbeam_channel::select_biased! {
+ recv(def_map_work_receiver) -> work => {
+ let Ok((crate_id, crate_name)) = work else { break };
+ handle_def_map(crate_id, crate_name)?;
+ }
+ recv(import_map_work_receiver) -> work => {
+ let Ok(crate_id) = work else { break };
+ handle_import_map(crate_id)?;
+ }
+ recv(symbols_work_receiver) -> work => {
+ let Ok(module) = work else { break };
+ handle_symbols(module)?;
+ }
+ }
+ }
+ Ok::<_, crossbeam_channel::SendError<_>>(())
+ };
for id in 0..num_worker_threads {
stdx::thread::Builder::new(
@@ -103,138 +156,121 @@
.expect("failed to spawn thread");
}
- (work_sender, progress_receiver)
+ (def_map_work_sender, import_map_work_sender, symbols_work_sender, progress_receiver)
};
- let crates_total = crates_to_prime.pending();
- let mut crates_done = 0;
+ let crate_def_maps_total = db.all_crates().len();
+ let mut crate_def_maps_done = 0;
+ let (mut crate_import_maps_total, mut crate_import_maps_done) = (0usize, 0usize);
+ let (mut module_symbols_total, mut module_symbols_done) = (0usize, 0usize);
// an index map is used to preserve ordering so we can sort the progress report in order of
// "longest crate to index" first
let mut crates_currently_indexing =
FxIndexMap::with_capacity_and_hasher(num_worker_threads, Default::default());
- let mut additional_phases = vec![];
-
- while crates_done < crates_total {
- db.unwind_if_revision_cancelled();
-
- for krate in &mut crates_to_prime {
- let name = krate.extra_data(db).display_name.as_deref().cloned().unwrap_or_else(|| {
- Symbol::integer(salsa::plumbing::AsId::as_id(&krate).as_u32() as usize)
- });
- let origin = &krate.data(db).origin;
- if origin.is_lang() {
- additional_phases.push((krate, name.clone(), PrimingPhase::ImportMap));
- } else if origin.is_local() {
- // Compute the symbol search index.
- // This primes the cache for `ide_db::symbol_index::world_symbols()`.
- //
- // We do this for workspace crates only (members of local_roots), because doing it
- // for all dependencies could be *very* unnecessarily slow in a large project.
- //
- // FIXME: We should do it unconditionally if the configuration is set to default to
- // searching dependencies (rust-analyzer.workspace.symbol.search.scope), but we
- // would need to pipe that configuration information down here.
- additional_phases.push((krate, name.clone(), PrimingPhase::CrateSymbols));
- }
-
- work_sender.send((krate, name, PrimingPhase::DefMap)).ok();
+ for (&krate, &to_be_done_deps) in &to_be_done_deps {
+ if to_be_done_deps != 0 {
+ continue;
}
- // recv_timeout is somewhat a hack, we need a way to from this thread check to see if the current salsa revision
- // is cancelled on a regular basis. workers will only exit if they are processing a task that is cancelled, or
- // if this thread exits, and closes the work channel.
- let worker_progress = match progress_receiver.recv_timeout(Duration::from_millis(10)) {
- Ok(p) => p,
- Err(crossbeam_channel::RecvTimeoutError::Timeout) => {
- continue;
- }
- Err(crossbeam_channel::RecvTimeoutError::Disconnected) => {
- // all our workers have exited, mark us as finished and exit
- cb(ParallelPrimeCachesProgress {
- crates_currently_indexing: vec![],
- crates_done,
- crates_total: crates_done,
- work_type: "Indexing",
- });
- return;
- }
- };
- match worker_progress {
- ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name } => {
- crates_currently_indexing.insert(crate_id, crate_name);
- }
- ParallelPrimeCacheWorkerProgress::EndCrate { crate_id } => {
- crates_currently_indexing.swap_remove(&crate_id);
- crates_to_prime.mark_done(crate_id);
- crates_done += 1;
- }
- ParallelPrimeCacheWorkerProgress::Cancelled(cancelled) => {
- // Cancelled::throw should probably be public
- std::panic::resume_unwind(Box::new(cancelled));
- }
- };
+ let name = crate_name(db, krate);
+ def_map_work_sender.send((krate, name)).ok();
+ }
+
+ while crate_def_maps_done < crate_def_maps_total
+ || crate_import_maps_done < crate_import_maps_total
+ || module_symbols_done < module_symbols_total
+ {
+ db.unwind_if_revision_cancelled();
let progress = ParallelPrimeCachesProgress {
crates_currently_indexing: crates_currently_indexing.values().cloned().collect(),
- crates_done,
- crates_total,
+ crates_done: crate_def_maps_done,
+ crates_total: crate_def_maps_total,
work_type: "Indexing",
};
cb(progress);
- }
- let mut crates_done = 0;
- let crates_total = additional_phases.len();
- for w in additional_phases.into_iter().sorted_by_key(|&(_, _, phase)| phase) {
- work_sender.send(w).ok();
- }
-
- while crates_done < crates_total {
- db.unwind_if_revision_cancelled();
-
- // recv_timeout is somewhat a hack, we need a way to from this thread check to see if the current salsa revision
- // is cancelled on a regular basis. workers will only exit if they are processing a task that is cancelled, or
- // if this thread exits, and closes the work channel.
- let worker_progress = match progress_receiver.recv_timeout(Duration::from_millis(10)) {
+ // Biased to prefer progress updates (and because it's faster).
+ let progress = match progress_receiver.recv() {
Ok(p) => p,
- Err(crossbeam_channel::RecvTimeoutError::Timeout) => {
- continue;
- }
- Err(crossbeam_channel::RecvTimeoutError::Disconnected) => {
+ Err(crossbeam_channel::RecvError) => {
// all our workers have exited, mark us as finished and exit
cb(ParallelPrimeCachesProgress {
crates_currently_indexing: vec![],
- crates_done,
- crates_total: crates_done,
- work_type: "Populating symbols",
+ crates_done: crate_def_maps_done,
+ crates_total: crate_def_maps_done,
+ work_type: "Done",
});
return;
}
};
- match worker_progress {
- ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name } => {
+
+ match progress {
+ ParallelPrimeCacheWorkerProgress::BeginCrateDefMap { crate_id, crate_name } => {
crates_currently_indexing.insert(crate_id, crate_name);
}
- ParallelPrimeCacheWorkerProgress::EndCrate { crate_id } => {
+ ParallelPrimeCacheWorkerProgress::EndCrateDefMap { crate_id } => {
crates_currently_indexing.swap_remove(&crate_id);
- crates_done += 1;
+ crate_def_maps_done += 1;
+
+ // Fire ready dependencies.
+ for &dep in &reverse_deps[&crate_id] {
+ let to_be_done = to_be_done_deps.get_mut(&dep).unwrap();
+ *to_be_done -= 1;
+ if *to_be_done == 0 {
+ let dep_name = crate_name(db, dep);
+ def_map_work_sender.send((dep, dep_name)).ok();
+ }
+ }
+
+ if crate_def_maps_done == crate_def_maps_total {
+ cb(ParallelPrimeCachesProgress {
+ crates_currently_indexing: vec![],
+ crates_done: crate_def_maps_done,
+ crates_total: crate_def_maps_done,
+ work_type: "Collecting Symbols",
+ });
+ }
+
+ let origin = &crate_id.data(db).origin;
+ if origin.is_lang() {
+ crate_import_maps_total += 1;
+ import_map_work_sender.send(crate_id).ok();
+ } else if origin.is_local() {
+ // Compute the symbol search index.
+ // This primes the cache for `ide_db::symbol_index::world_symbols()`.
+ //
+ // We do this for workspace crates only (members of local_roots), because doing it
+ // for all dependencies could be *very* unnecessarily slow in a large project.
+ //
+ // FIXME: We should do it unconditionally if the configuration is set to default to
+ // searching dependencies (rust-analyzer.workspace.symbol.search.scope), but we
+ // would need to pipe that configuration information down here.
+ let modules = hir::Crate::from(crate_id).modules(db);
+ module_symbols_total += modules.len();
+ for module in modules {
+ symbols_work_sender.send(module).ok();
+ }
+ }
}
+ ParallelPrimeCacheWorkerProgress::EndCrateImportMap => crate_import_maps_done += 1,
+ ParallelPrimeCacheWorkerProgress::EndModuleSymbols => module_symbols_done += 1,
ParallelPrimeCacheWorkerProgress::Cancelled(cancelled) => {
// Cancelled::throw should probably be public
std::panic::resume_unwind(Box::new(cancelled));
}
- };
-
- let progress = ParallelPrimeCachesProgress {
- crates_currently_indexing: crates_currently_indexing.values().cloned().collect(),
- crates_done,
- crates_total,
- work_type: "Populating symbols",
- };
-
- cb(progress);
+ }
}
}
+
+fn crate_name(db: &RootDatabase, krate: Crate) -> Symbol {
+ krate
+ .extra_data(db)
+ .display_name
+ .as_deref()
+ .cloned()
+ .unwrap_or_else(|| Symbol::integer(salsa::plumbing::AsId::as_id(&krate).as_u32() as usize))
+}
diff --git a/crates/ide-db/src/prime_caches/topologic_sort.rs b/crates/ide-db/src/prime_caches/topologic_sort.rs
deleted file mode 100644
index c8a0386..0000000
--- a/crates/ide-db/src/prime_caches/topologic_sort.rs
+++ /dev/null
@@ -1,104 +0,0 @@
-//! helper data structure to schedule work for parallel prime caches.
-use std::{collections::VecDeque, hash::Hash};
-
-use crate::FxHashMap;
-
-pub(crate) struct TopologicSortIterBuilder<T> {
- nodes: FxHashMap<T, Entry<T>>,
-}
-
-// this implementation has different bounds on T than would be implied by #[derive(Default)]
-impl<T> Default for TopologicSortIterBuilder<T>
-where
- T: Copy + Eq + PartialEq + Hash,
-{
- fn default() -> Self {
- Self { nodes: Default::default() }
- }
-}
-
-impl<T> TopologicSortIterBuilder<T>
-where
- T: Copy + Eq + PartialEq + Hash,
-{
- fn get_or_create_entry(&mut self, item: T) -> &mut Entry<T> {
- self.nodes.entry(item).or_default()
- }
-
- pub(crate) fn add(&mut self, item: T, predecessors: impl IntoIterator<Item = T>) {
- let mut num_predecessors = 0;
-
- for predecessor in predecessors.into_iter() {
- self.get_or_create_entry(predecessor).successors.push(item);
- num_predecessors += 1;
- }
-
- let entry = self.get_or_create_entry(item);
- entry.num_predecessors += num_predecessors;
- }
-
- pub(crate) fn build(self) -> TopologicalSortIter<T> {
- let ready = self
- .nodes
- .iter()
- .filter_map(
- |(item, entry)| if entry.num_predecessors == 0 { Some(*item) } else { None },
- )
- .collect();
-
- TopologicalSortIter { nodes: self.nodes, ready }
- }
-}
-
-pub(crate) struct TopologicalSortIter<T> {
- ready: VecDeque<T>,
- nodes: FxHashMap<T, Entry<T>>,
-}
-
-impl<T> TopologicalSortIter<T>
-where
- T: Copy + Eq + PartialEq + Hash,
-{
- pub(crate) fn builder() -> TopologicSortIterBuilder<T> {
- TopologicSortIterBuilder::default()
- }
-
- pub(crate) fn pending(&self) -> usize {
- self.nodes.len()
- }
-
- pub(crate) fn mark_done(&mut self, item: T) {
- let entry = self.nodes.remove(&item).expect("invariant: unknown item marked as done");
-
- for successor in entry.successors {
- let succ_entry = self
- .nodes
- .get_mut(&successor)
- .expect("invariant: unknown successor referenced by entry");
-
- succ_entry.num_predecessors -= 1;
- if succ_entry.num_predecessors == 0 {
- self.ready.push_back(successor);
- }
- }
- }
-}
-
-impl<T> Iterator for TopologicalSortIter<T> {
- type Item = T;
-
- fn next(&mut self) -> Option<Self::Item> {
- self.ready.pop_front()
- }
-}
-
-struct Entry<T> {
- successors: Vec<T>,
- num_predecessors: usize,
-}
-
-impl<T> Default for Entry<T> {
- fn default() -> Self {
- Self { successors: Default::default(), num_predecessors: 0 }
- }
-}
diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs
index b8119e1..fa2a46a 100644
--- a/crates/ide-db/src/rename.rs
+++ b/crates/ide-db/src/rename.rs
@@ -390,11 +390,6 @@
let mut edited_ranges = Vec::new();
for &FileReference { range, ref name, .. } in references {
let name_range = name.text_range();
- if name_range.len() != range.len() {
- // This usage comes from a different token kind that was downmapped to a NameLike in a macro
- // Renaming this will most likely break things syntax-wise
- continue;
- }
let has_emitted_edit = match name {
// if the ranges differ then the node is inside a macro call, we can't really attempt
// to make special rewrites like shorthand syntax and such, so just rename the node in
diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs
index 30be5bc..d4ab759 100644
--- a/crates/ide-db/src/search.rs
+++ b/crates/ide-db/src/search.rs
@@ -524,6 +524,7 @@
fn find_nodes<'b>(
sema: &'b Semantics<'_, RootDatabase>,
name: &str,
+ file_id: EditionedFileId,
node: &syntax::SyntaxNode,
offset: TextSize,
) -> impl Iterator<Item = SyntaxNode> + 'b {
@@ -534,7 +535,7 @@
})
.into_iter()
.flat_map(move |token| {
- if sema.might_be_inside_macro_call(&token) {
+ if sema.is_inside_macro_call(InFile::new(file_id.into(), &token)) {
sema.descend_into_macros_exact(token)
} else {
<_>::from([token])
@@ -654,11 +655,14 @@
let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
for offset in FindUsages::match_indices(&file_text, &finder, search_range) {
- let usages =
- FindUsages::find_nodes(sema, ¤t_to_process, &tree, offset)
- .filter(|it| {
- matches!(it.kind(), SyntaxKind::NAME | SyntaxKind::NAME_REF)
- });
+ let usages = FindUsages::find_nodes(
+ sema,
+ ¤t_to_process,
+ file_id,
+ &tree,
+ offset,
+ )
+ .filter(|it| matches!(it.kind(), SyntaxKind::NAME | SyntaxKind::NAME_REF));
for usage in usages {
if let Some(alias) = usage.parent().and_then(|it| {
let path = ast::PathSegment::cast(it)?.parent_path();
@@ -813,7 +817,7 @@
let tree = LazyCell::new(move || this.sema.parse(file_id).syntax().clone());
for offset in FindUsages::match_indices(&file_text, finder, search_range) {
- let usages = FindUsages::find_nodes(this.sema, name, &tree, offset)
+ let usages = FindUsages::find_nodes(this.sema, name, file_id, &tree, offset)
.filter_map(ast::NameRef::cast);
for usage in usages {
let found_usage = usage
@@ -970,8 +974,8 @@
return;
}
- for name in
- Self::find_nodes(sema, name, &tree, offset).filter_map(ast::NameLike::cast)
+ for name in Self::find_nodes(sema, name, file_id, &tree, offset)
+ .filter_map(ast::NameLike::cast)
{
if match name {
ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink),
@@ -985,8 +989,8 @@
// Search for occurrences of the `Self` referring to our type
if let Some((self_ty, finder)) = &include_self_kw_refs {
for offset in Self::match_indices(&text, finder, search_range) {
- for name_ref in
- Self::find_nodes(sema, "Self", &tree, offset).filter_map(ast::NameRef::cast)
+ for name_ref in Self::find_nodes(sema, "Self", file_id, &tree, offset)
+ .filter_map(ast::NameRef::cast)
{
if self.found_self_ty_name_ref(self_ty, &name_ref, sink) {
return;
@@ -1010,7 +1014,7 @@
let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
for offset in Self::match_indices(&text, finder, search_range) {
- for name_ref in Self::find_nodes(sema, "super", &tree, offset)
+ for name_ref in Self::find_nodes(sema, "super", file_id, &tree, offset)
.filter_map(ast::NameRef::cast)
{
if self.found_name_ref(&name_ref, sink) {
@@ -1020,7 +1024,7 @@
}
if let Some(finder) = &is_crate_root {
for offset in Self::match_indices(&text, finder, search_range) {
- for name_ref in Self::find_nodes(sema, "crate", &tree, offset)
+ for name_ref in Self::find_nodes(sema, "crate", file_id, &tree, offset)
.filter_map(ast::NameRef::cast)
{
if self.found_name_ref(&name_ref, sink) {
@@ -1064,8 +1068,8 @@
let finder = &Finder::new("self");
for offset in Self::match_indices(&text, finder, search_range) {
- for name_ref in
- Self::find_nodes(sema, "self", &tree, offset).filter_map(ast::NameRef::cast)
+ for name_ref in Self::find_nodes(sema, "self", file_id, &tree, offset)
+ .filter_map(ast::NameRef::cast)
{
if self.found_self_module_name_ref(&name_ref, sink) {
return;
diff --git a/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs b/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs
index 92ca7a7..2a7b009 100644
--- a/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs
+++ b/crates/ide-diagnostics/src/handlers/await_outside_of_async.rs
@@ -14,6 +14,7 @@
format!("`await` is used inside {}, which is not an `async` context", d.location),
display_range,
)
+ .stable()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/bad_rtn.rs b/crates/ide-diagnostics/src/handlers/bad_rtn.rs
index 9ed85f9..ae42a88 100644
--- a/crates/ide-diagnostics/src/handlers/bad_rtn.rs
+++ b/crates/ide-diagnostics/src/handlers/bad_rtn.rs
@@ -12,6 +12,7 @@
"return type notation not allowed in this position yet",
d.rtn.map(Into::into),
)
+ .stable()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs b/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
index c25b0a7..cbcaab6 100644
--- a/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
+++ b/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs
@@ -19,6 +19,7 @@
message,
d.expr.map(|it| it.into()),
)
+ .stable()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/elided_lifetimes_in_path.rs b/crates/ide-diagnostics/src/handlers/elided_lifetimes_in_path.rs
index 438dd2f..b284d9b 100644
--- a/crates/ide-diagnostics/src/handlers/elided_lifetimes_in_path.rs
+++ b/crates/ide-diagnostics/src/handlers/elided_lifetimes_in_path.rs
@@ -15,7 +15,6 @@
"implicit elided lifetime not allowed here",
d.generics_or_segment.map(Into::into),
)
- .experimental()
} else {
Diagnostic::new_with_syntax_node_ptr(
ctx,
@@ -23,7 +22,6 @@
"hidden lifetime parameters in types are deprecated",
d.generics_or_segment.map(Into::into),
)
- .experimental()
}
}
diff --git a/crates/ide-diagnostics/src/handlers/expected_function.rs b/crates/ide-diagnostics/src/handlers/expected_function.rs
index a6da0fd..7d2ac37 100644
--- a/crates/ide-diagnostics/src/handlers/expected_function.rs
+++ b/crates/ide-diagnostics/src/handlers/expected_function.rs
@@ -15,7 +15,6 @@
format!("expected function, found {}", d.found.display(ctx.sema.db, ctx.display_target)),
d.call.map(|it| it.into()),
)
- .experimental()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs b/crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs
index b617c09..9ae6f01 100644
--- a/crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs
+++ b/crates/ide-diagnostics/src/handlers/generic_args_prohibited.rs
@@ -21,6 +21,7 @@
describe_reason(d.reason),
d.args.map(Into::into),
)
+ .stable()
.with_fixes(fixes(ctx, d))
}
diff --git a/crates/ide-diagnostics/src/handlers/inactive_code.rs b/crates/ide-diagnostics/src/handlers/inactive_code.rs
index 47e1c84..8611ef6 100644
--- a/crates/ide-diagnostics/src/handlers/inactive_code.rs
+++ b/crates/ide-diagnostics/src/handlers/inactive_code.rs
@@ -33,6 +33,7 @@
message,
ctx.sema.diagnostics_display_range(d.node),
)
+ .stable()
.with_unused(true);
Some(res)
}
diff --git a/crates/ide-diagnostics/src/handlers/incoherent_impl.rs b/crates/ide-diagnostics/src/handlers/incoherent_impl.rs
index 0b9a2ec..a0c364b 100644
--- a/crates/ide-diagnostics/src/handlers/incoherent_impl.rs
+++ b/crates/ide-diagnostics/src/handlers/incoherent_impl.rs
@@ -19,6 +19,7 @@
"cannot define inherent `impl` for foreign type".to_owned(),
display_range,
)
+ .stable()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/crates/ide-diagnostics/src/handlers/incorrect_case.rs
index 289a076..38f10c7 100644
--- a/crates/ide-diagnostics/src/handlers/incorrect_case.rs
+++ b/crates/ide-diagnostics/src/handlers/incorrect_case.rs
@@ -29,6 +29,7 @@
),
InFile::new(d.file, d.ident.into()),
)
+ .stable()
.with_fixes(fixes(ctx, d))
}
diff --git a/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs b/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs
index 17c7f75..06f3575 100644
--- a/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs
+++ b/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs
@@ -28,7 +28,6 @@
message,
d.generics_or_segment.map(Into::into),
)
- .experimental()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/incorrect_generics_order.rs b/crates/ide-diagnostics/src/handlers/incorrect_generics_order.rs
index 84496df..b71586d 100644
--- a/crates/ide-diagnostics/src/handlers/incorrect_generics_order.rs
+++ b/crates/ide-diagnostics/src/handlers/incorrect_generics_order.rs
@@ -28,6 +28,7 @@
message,
d.provided_arg.map(Into::into),
)
+ .stable()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/invalid_cast.rs b/crates/ide-diagnostics/src/handlers/invalid_cast.rs
index d72b210..7a6e98f 100644
--- a/crates/ide-diagnostics/src/handlers/invalid_cast.rs
+++ b/crates/ide-diagnostics/src/handlers/invalid_cast.rs
@@ -100,7 +100,7 @@
// "cannot cast to a pointer of an unknown kind".to_owned(),
// ),
};
- Diagnostic::new(code, message, display_range)
+ Diagnostic::new(code, message, display_range).stable()
}
// Diagnostic: cast-to-unsized
@@ -113,6 +113,7 @@
format_ty!(ctx, "cast to unsized type: `{}`", d.cast_ty),
display_range,
)
+ .stable()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs b/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
index ab0f513..8b708f2 100644
--- a/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
+++ b/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs
@@ -15,6 +15,7 @@
"`derive` may only be applied to `struct`s, `enum`s and `union`s",
display_range,
)
+ .stable()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/macro_error.rs b/crates/ide-diagnostics/src/handlers/macro_error.rs
index a2648a1..546512a 100644
--- a/crates/ide-diagnostics/src/handlers/macro_error.rs
+++ b/crates/ide-diagnostics/src/handlers/macro_error.rs
@@ -19,6 +19,7 @@
d.message.clone(),
display_range,
)
+ .stable()
}
// Diagnostic: macro-def-error
@@ -33,6 +34,7 @@
d.message.clone(),
display_range,
)
+ .stable()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/malformed_derive.rs b/crates/ide-diagnostics/src/handlers/malformed_derive.rs
index 0e47fff..701b30b 100644
--- a/crates/ide-diagnostics/src/handlers/malformed_derive.rs
+++ b/crates/ide-diagnostics/src/handlers/malformed_derive.rs
@@ -14,6 +14,7 @@
"malformed derive input, derive attributes are of the form `#[derive(Derive1, Derive2, ...)]`",
display_range,
)
+ .stable()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
index 63fd9b4..25c1e63 100644
--- a/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
+++ b/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs
@@ -26,6 +26,7 @@
message,
invalid_args_range(ctx, d.expr_or_pat, d.expected, d.found),
)
+ .stable()
}
// Diagnostic: mismatched-arg-count
@@ -42,6 +43,7 @@
message,
invalid_args_range(ctx, d.call_expr, d.expected, d.found),
)
+ .stable()
}
fn invalid_args_range(
diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs
index a354d12..2b76efb 100644
--- a/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -47,6 +47,7 @@
);
Diagnostic::new_with_syntax_node_ptr(ctx, DiagnosticCode::RustcHardError("E0063"), message, ptr)
+ .stable()
.with_fixes(fixes(ctx, d))
}
diff --git a/crates/ide-diagnostics/src/handlers/missing_lifetime.rs b/crates/ide-diagnostics/src/handlers/missing_lifetime.rs
index 8cdbb63..76b3074 100644
--- a/crates/ide-diagnostics/src/handlers/missing_lifetime.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_lifetime.rs
@@ -13,7 +13,6 @@
"missing lifetime specifier",
d.generics_or_segment.map(Into::into),
)
- .experimental()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
index d3d3c3a..1fc96b7 100644
--- a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
@@ -13,6 +13,7 @@
format!("missing match arm: {}", d.uncovered_patterns),
d.scrutinee_expr.map(Into::into),
)
+ .stable()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
index 3c36b45..6bd5417 100644
--- a/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_unsafe.rs
@@ -23,6 +23,7 @@
format!("{operation} is unsafe and requires an unsafe function or block"),
d.node.map(|it| it.into()),
)
+ .stable()
.with_fixes(fixes(ctx, d))
}
@@ -893,4 +894,68 @@
"#,
);
}
+
+ #[test]
+ fn asm_label() {
+ check_diagnostics(
+ r#"
+//- minicore: asm
+fn foo() {
+ unsafe {
+ core::arch::asm!(
+ "jmp {}",
+ label {
+ let p = 0xDEADBEAF as *mut u8;
+ *p = 3;
+ // ^^ error: dereference of raw pointer is unsafe and requires an unsafe function or block
+ },
+ );
+ }
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn regression_19823() {
+ check_diagnostics(
+ r#"
+pub trait FooTrait {
+ unsafe fn method1();
+ unsafe fn method2();
+}
+
+unsafe fn some_unsafe_fn() {}
+
+macro_rules! impl_foo {
+ () => {
+ unsafe fn method1() {
+ some_unsafe_fn();
+ }
+ unsafe fn method2() {
+ some_unsafe_fn();
+ }
+ };
+}
+
+pub struct S1;
+#[allow(unsafe_op_in_unsafe_fn)]
+impl FooTrait for S1 {
+ unsafe fn method1() {
+ some_unsafe_fn();
+ }
+
+ unsafe fn method2() {
+ some_unsafe_fn();
+ }
+}
+
+pub struct S2;
+#[allow(unsafe_op_in_unsafe_fn)]
+impl FooTrait for S2 {
+ impl_foo!();
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs b/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
index 7802713..01cf5e8 100644
--- a/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
+++ b/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs
@@ -11,7 +11,7 @@
format!("cannot move `{}` out of reference", d.ty.display(ctx.sema.db, ctx.display_target)),
d.span,
)
- .experimental() // spans are broken, and I'm not sure how precise we can detect copy types
+ // spans are broken, and I'm not sure how precise we can detect copy types
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/crates/ide-diagnostics/src/handlers/mutability_errors.rs
index 5d25f2c..8831efa 100644
--- a/crates/ide-diagnostics/src/handlers/mutability_errors.rs
+++ b/crates/ide-diagnostics/src/handlers/mutability_errors.rs
@@ -55,6 +55,7 @@
),
span,
)
+ .stable()
.with_fixes(fixes),
)
}
@@ -94,7 +95,7 @@
"variable does not need to be mutable",
ast,
)
- .experimental() // Not supporting `#[allow(unused_mut)]` in proc macros leads to false positive.
+ // Not supporting `#[allow(unused_mut)]` in proc macros leads to false positive, hence not stable.
.with_fixes(fixes),
)
}
diff --git a/crates/ide-diagnostics/src/handlers/no_such_field.rs b/crates/ide-diagnostics/src/handlers/no_such_field.rs
index fa3347a..84fb467 100644
--- a/crates/ide-diagnostics/src/handlers/no_such_field.rs
+++ b/crates/ide-diagnostics/src/handlers/no_such_field.rs
@@ -22,6 +22,7 @@
"field is private",
node,
)
+ .stable()
} else {
Diagnostic::new_with_syntax_node_ptr(
ctx,
@@ -32,6 +33,7 @@
"no such field",
node,
)
+ .stable()
.with_fixes(fixes(ctx, d))
}
}
diff --git a/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs b/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs
index ff1eeb0..f20b6de 100644
--- a/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs
+++ b/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs
@@ -14,6 +14,7 @@
format!("non-exhaustive pattern: {}", d.uncovered_patterns),
d.pat.map(Into::into),
)
+ .stable()
}
#[cfg(test)]
@@ -105,4 +106,29 @@
"#,
);
}
+
+ #[test]
+ fn empty_patterns_normalize() {
+ check_diagnostics(
+ r#"
+enum Infallible {}
+
+trait Foo {
+ type Assoc;
+}
+enum Enum<T: Foo> {
+ A,
+ B(T::Assoc),
+}
+
+impl Foo for () {
+ type Assoc = Infallible;
+}
+
+fn foo(v: Enum<()>) {
+ let Enum::A = v;
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/parenthesized_generic_args_without_fn_trait.rs b/crates/ide-diagnostics/src/handlers/parenthesized_generic_args_without_fn_trait.rs
index ccf5172..68f2b19 100644
--- a/crates/ide-diagnostics/src/handlers/parenthesized_generic_args_without_fn_trait.rs
+++ b/crates/ide-diagnostics/src/handlers/parenthesized_generic_args_without_fn_trait.rs
@@ -14,6 +14,7 @@
"parenthesized type parameters may only be used with a `Fn` trait",
d.args.map(Into::into),
)
+ .stable()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/private_assoc_item.rs b/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
index fe32c59..6d33ae0 100644
--- a/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
@@ -28,6 +28,7 @@
),
d.expr_or_pat.map(Into::into),
)
+ .stable()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/private_field.rs b/crates/ide-diagnostics/src/handlers/private_field.rs
index 237a9b8..5b4273a 100644
--- a/crates/ide-diagnostics/src/handlers/private_field.rs
+++ b/crates/ide-diagnostics/src/handlers/private_field.rs
@@ -15,6 +15,7 @@
),
d.expr.map(|it| it.into()),
)
+ .stable()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs
index 6b78645..dec7be8 100644
--- a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs
+++ b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs
@@ -31,6 +31,7 @@
"replace return <expr>; with <expr>",
display_range,
)
+ .stable()
.with_fixes(fixes(ctx, d)),
)
}
diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs
index 8d717b9..7dc5b5b 100644
--- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs
+++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs
@@ -36,7 +36,6 @@
"remove unnecessary else block",
display_range,
)
- .experimental()
.with_fixes(fixes(ctx, d)),
)
}
diff --git a/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs b/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
index 6b335c5..37ce5f5 100644
--- a/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
+++ b/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs
@@ -21,6 +21,7 @@
"replace filter_map(..).next() with find_map(..)",
InFile::new(d.file, d.next_expr.into()),
)
+ .stable()
.with_fixes(fixes(ctx, d))
}
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs b/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs
index 19ee1ca..dd142db 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_incorrect_safety.rs
@@ -33,6 +33,7 @@
},
),
)
+ .stable()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
index 2d7d78f..fa7ba90 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_missing_assoc_item.rs
@@ -29,6 +29,7 @@
&|impl_| impl_.trait_().map(|t| t.syntax().text_range()),
),
)
+ .stable()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs b/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs
index 35dc9b0..96911d4 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs
@@ -16,8 +16,6 @@
.to_owned(),
InFile::new(d.file_id, d.impl_.into()),
)
- // Not yet checked for false positives
- .experimental()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
index d5c4bcf..4327b12 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_redundant_assoc_item.rs
@@ -61,6 +61,7 @@
format!("{redundant_item_name} is not a member of trait `{trait_name}`"),
ide_db::FileRange { file_id: file_id.file_id(ctx.sema.db), range },
)
+ .stable()
.with_fixes(quickfix_for_redundant_assoc_item(
ctx,
d,
diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index 500c5de..076df1a 100644
--- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -53,8 +53,8 @@
display_range,
)
.with_fixes(fixes(ctx, d));
- if diag.fixes.is_none() {
- diag.experimental = true;
+ if diag.fixes.is_some() {
+ diag.experimental = false;
}
diag
}
@@ -1243,4 +1243,18 @@
"#,
);
}
+
+ #[test]
+ fn regression_19844() {
+ check_diagnostics(
+ r#"
+fn main() {
+ struct S {}
+ enum E { V() }
+ let E::V() = &S {};
+ // ^^^^^^ error: expected S, found E
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/typed_hole.rs b/crates/ide-diagnostics/src/handlers/typed_hole.rs
index a933f1b..1915a88 100644
--- a/crates/ide-diagnostics/src/handlers/typed_hole.rs
+++ b/crates/ide-diagnostics/src/handlers/typed_hole.rs
@@ -37,6 +37,7 @@
};
Diagnostic::new(DiagnosticCode::RustcHardError("typed-hole"), message, display_range)
+ .stable()
.with_fixes(fixes)
}
diff --git a/crates/ide-diagnostics/src/handlers/undeclared_label.rs b/crates/ide-diagnostics/src/handlers/undeclared_label.rs
index d16bfb8..f81d343 100644
--- a/crates/ide-diagnostics/src/handlers/undeclared_label.rs
+++ b/crates/ide-diagnostics/src/handlers/undeclared_label.rs
@@ -12,6 +12,7 @@
format!("use of undeclared label `{}`", name.display(ctx.sema.db, ctx.edition)),
d.node.map(|it| it.into()),
)
+ .stable()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs b/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs
index 06f176f..5627393 100644
--- a/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs
+++ b/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs
@@ -13,4 +13,5 @@
"unimplemented built-in macro".to_owned(),
d.node,
)
+ .stable()
}
diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs
index 47fa305..af9126c 100644
--- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs
+++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs
@@ -2,7 +2,8 @@
use std::iter;
-use hir::{DefMap, InFile, ModuleSource, db::DefDatabase};
+use hir::crate_def_map;
+use hir::{DefMap, InFile, ModuleSource};
use ide_db::base_db::RootQueryDb;
use ide_db::text_edit::TextEdit;
use ide_db::{
@@ -101,7 +102,8 @@
// check crate roots, i.e. main.rs, lib.rs, ...
let relevant_crates = db.relevant_crates(file_id);
'crates: for &krate in &*relevant_crates {
- let crate_def_map = ctx.sema.db.crate_def_map(krate);
+ // FIXME: This shouldnt need to access the crate def map directly
+ let crate_def_map = crate_def_map(ctx.sema.db, krate);
let root_module = &crate_def_map[DefMap::ROOT];
let Some(root_file_id) = root_module.origin.file_id() else { continue };
@@ -156,7 +158,7 @@
stack.pop();
let relevant_crates = db.relevant_crates(parent_id);
'crates: for &krate in relevant_crates.iter() {
- let crate_def_map = ctx.sema.db.crate_def_map(krate);
+ let crate_def_map = crate_def_map(ctx.sema.db, krate);
let Some((_, module)) = crate_def_map.modules().find(|(_, module)| {
module.origin.file_id().map(|file_id| file_id.file_id(ctx.sema.db)) == Some(parent_id)
&& !module.origin.is_inline()
diff --git a/crates/ide-diagnostics/src/handlers/unreachable_label.rs b/crates/ide-diagnostics/src/handlers/unreachable_label.rs
index bdff241..0c9e0d6 100644
--- a/crates/ide-diagnostics/src/handlers/unreachable_label.rs
+++ b/crates/ide-diagnostics/src/handlers/unreachable_label.rs
@@ -12,6 +12,7 @@
format!("use of unreachable label `{}`", name.display(ctx.sema.db, ctx.edition)),
d.node.map(|it| it.into()),
)
+ .stable()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_assoc_item.rs b/crates/ide-diagnostics/src/handlers/unresolved_assoc_item.rs
index 614057a..4ae528b 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_assoc_item.rs
@@ -13,7 +13,6 @@
"no such associated item",
d.expr_or_pat.map(Into::into),
)
- .experimental()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs b/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
index 4cd73d4..7c3eacf 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs
@@ -13,6 +13,7 @@
"unresolved extern crate",
d.decl.map(|it| it.into()),
)
+ .stable()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/crates/ide-diagnostics/src/handlers/unresolved_field.rs
index a4f4813..0649c97 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_field.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_field.rs
@@ -52,7 +52,6 @@
}),
)
.with_fixes(fixes(ctx, d))
- .experimental()
}
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedField) -> Option<Vec<Assist>> {
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_ident.rs b/crates/ide-diagnostics/src/handlers/unresolved_ident.rs
index 4f64dab..801023d 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_ident.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_ident.rs
@@ -13,7 +13,6 @@
range.range = in_node_range + range.range.start();
}
Diagnostic::new(DiagnosticCode::RustcHardError("E0425"), "no such value in this scope", range)
- .experimental()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_import.rs b/crates/ide-diagnostics/src/handlers/unresolved_import.rs
index 67c7e76..0da535d 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_import.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_import.rs
@@ -18,7 +18,6 @@
// - `cfg_if!`-generated code in libstd (we don't load the sysroot correctly)
// - `core::arch` (we don't handle `#[path = "../<path>"]` correctly)
// - proc macros and/or proc macro generated code
- .experimental()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
index 0d1c977..a87b8c4 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs
@@ -16,7 +16,6 @@
format!("unresolved macro `{}{bang}`", d.path.display(ctx.sema.db, ctx.edition)),
display_range,
)
- .experimental()
}
#[cfg(test)]
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
index 7f07009..00c2a8c 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs
@@ -47,7 +47,6 @@
}),
)
.with_fixes(fixes(ctx, d))
- .experimental()
}
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedMethodCall) -> Option<Vec<Assist>> {
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/crates/ide-diagnostics/src/handlers/unresolved_module.rs
index 599cabe..1a409d7 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_module.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_module.rs
@@ -28,6 +28,7 @@
},
d.decl.map(|it| it.into()),
)
+ .stable()
.with_fixes(fixes(ctx, d))
}
diff --git a/crates/ide-diagnostics/src/handlers/unused_variables.rs b/crates/ide-diagnostics/src/handlers/unused_variables.rs
index 77b1075..e6bbff0 100644
--- a/crates/ide-diagnostics/src/handlers/unused_variables.rs
+++ b/crates/ide-diagnostics/src/handlers/unused_variables.rs
@@ -50,8 +50,7 @@
ast.file_id.is_macro(),
ctx.edition,
)
- }))
- .experimental(),
+ })),
)
}
diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs
index 607721d..72bd66d 100644
--- a/crates/ide-diagnostics/src/lib.rs
+++ b/crates/ide-diagnostics/src/lib.rs
@@ -83,12 +83,11 @@
#[cfg(test)]
mod tests;
-use std::{collections::hash_map, iter, sync::LazyLock};
+use std::{iter, sync::LazyLock};
use either::Either;
use hir::{
- Crate, DisplayTarget, HirFileId, InFile, Semantics, db::ExpandDatabase,
- diagnostics::AnyDiagnostic,
+ Crate, DisplayTarget, InFile, Semantics, db::ExpandDatabase, diagnostics::AnyDiagnostic,
};
use ide_db::{
EditionedFileId, FileId, FileRange, FxHashMap, FxHashSet, RootDatabase, Severity, SnippetCap,
@@ -182,7 +181,7 @@
DiagnosticCode::Ra(_, s) => s,
},
unused: false,
- experimental: false,
+ experimental: true,
fixes: None,
main_node: None,
}
@@ -198,8 +197,8 @@
.with_main_node(node)
}
- fn experimental(mut self) -> Diagnostic {
- self.experimental = true;
+ fn stable(mut self) -> Diagnostic {
+ self.experimental = false;
self
}
@@ -424,14 +423,11 @@
AnyDiagnostic::MacroExpansionParseError(d) => {
// FIXME: Point to the correct error span here, not just the macro-call name
res.extend(d.errors.iter().take(16).map(|err| {
- {
Diagnostic::new(
DiagnosticCode::SyntaxError,
format!("Syntax Error in Expansion: {err}"),
ctx.resolve_precise_location(&d.node.clone(), d.precise_location),
)
- }
- .experimental()
}));
continue;
},
@@ -485,12 +481,8 @@
Some(it) => it,
None => continue,
},
- AnyDiagnostic::GenericArgsProhibited(d) => {
- handlers::generic_args_prohibited::generic_args_prohibited(&ctx, &d)
- }
- AnyDiagnostic::ParenthesizedGenericArgsWithoutFnTrait(d) => {
- handlers::parenthesized_generic_args_without_fn_trait::parenthesized_generic_args_without_fn_trait(&ctx, &d)
- }
+ AnyDiagnostic::GenericArgsProhibited(d) => handlers::generic_args_prohibited::generic_args_prohibited(&ctx, &d),
+ AnyDiagnostic::ParenthesizedGenericArgsWithoutFnTrait(d) => handlers::parenthesized_generic_args_without_fn_trait::parenthesized_generic_args_without_fn_trait(&ctx, &d),
AnyDiagnostic::BadRtn(d) => handlers::bad_rtn::bad_rtn(&ctx, &d),
AnyDiagnostic::IncorrectGenericsLen(d) => handlers::incorrect_generics_len::incorrect_generics_len(&ctx, &d),
AnyDiagnostic::IncorrectGenericsOrder(d) => handlers::incorrect_generics_order::incorrect_generics_order(&ctx, &d),
@@ -520,13 +512,7 @@
// The edition isn't accurate (each diagnostics may have its own edition due to macros),
// but it's okay as it's only being used for error recovery.
- handle_lints(
- &ctx.sema,
- &mut FxHashMap::default(),
- &mut lints,
- &mut Vec::new(),
- editioned_file_id.edition(db),
- );
+ handle_lints(&ctx.sema, &mut lints, editioned_file_id.edition(db));
res.retain(|d| d.severity != Severity::Allow);
@@ -591,8 +577,6 @@
true
}
-// `__RA_EVERY_LINT` is a fake lint group to allow every lint in proc macros
-
struct BuiltLint {
lint: &'static Lint,
groups: Vec<&'static str>,
@@ -636,9 +620,7 @@
fn handle_lints(
sema: &Semantics<'_, RootDatabase>,
- cache: &mut FxHashMap<HirFileId, FxHashMap<SmolStr, SeverityAttr>>,
diagnostics: &mut [(InFile<SyntaxNode>, &mut Diagnostic)],
- cache_stack: &mut Vec<HirFileId>,
edition: Edition,
) {
for (node, diag) in diagnostics {
@@ -652,7 +634,8 @@
diag.severity = default_severity;
}
- let mut diag_severity = fill_lint_attrs(sema, node, cache, cache_stack, diag, edition);
+ let mut diag_severity =
+ lint_severity_at(sema, node, &lint_groups(&diag.code, edition), edition);
if let outline_diag_severity @ Some(_) =
find_outline_mod_lint_severity(sema, node, diag, edition)
@@ -705,155 +688,22 @@
result
}
-#[derive(Debug, Clone, Copy)]
-struct SeverityAttr {
- severity: Severity,
- /// This field counts how far we are from the main node. Bigger values mean more far.
- ///
- /// Note this isn't accurate: there can be gaps between values (created when merging severity maps).
- /// The important thing is that if an attr is closer to the main node, it will have smaller value.
- ///
- /// This is necessary even though we take care to never overwrite a value from deeper nesting
- /// because of lint groups. For example, in the following code:
- /// ```
- /// #[warn(non_snake_case)]
- /// mod foo {
- /// #[allow(nonstandard_style)]
- /// mod bar {}
- /// }
- /// ```
- /// We want to not warn on non snake case inside `bar`. If we are traversing this for the first
- /// time, everything will be fine, because we will set `diag_severity` on the first matching group
- /// and never overwrite it since then. But if `bar` is cached, the cache will contain both
- /// `#[warn(non_snake_case)]` and `#[allow(nonstandard_style)]`, and without this field, we have
- /// no way of differentiating between the two.
- depth: u32,
-}
-
-fn fill_lint_attrs(
+fn lint_severity_at(
sema: &Semantics<'_, RootDatabase>,
node: &InFile<SyntaxNode>,
- cache: &mut FxHashMap<HirFileId, FxHashMap<SmolStr, SeverityAttr>>,
- cache_stack: &mut Vec<HirFileId>,
- diag: &Diagnostic,
+ lint_groups: &LintGroups,
edition: Edition,
) -> Option<Severity> {
- let mut collected_lint_attrs = FxHashMap::<SmolStr, SeverityAttr>::default();
- let mut diag_severity = None;
-
- let mut ancestors = node.value.ancestors().peekable();
- let mut depth = 0;
- loop {
- let ancestor = ancestors.next().expect("we always return from top-level nodes");
- depth += 1;
-
- if ancestors.peek().is_none() {
- // We don't want to insert too many nodes into cache, but top level nodes (aka. outline modules
- // or macro expansions) need to touch the database so they seem like a good fit to cache.
-
- if let Some(cached) = cache.get_mut(&node.file_id) {
- // This node (and everything above it) is already cached; the attribute is either here or nowhere.
-
- // Workaround for the borrow checker.
- let cached = std::mem::take(cached);
-
- cached.iter().for_each(|(lint, severity)| {
- for item in &*cache_stack {
- let node_cache_entry = cache
- .get_mut(item)
- .expect("we always insert cached nodes into the cache map");
- let lint_cache_entry = node_cache_entry.entry(lint.clone());
- if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
- // Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
- // overwrite top attrs.
- lint_cache_entry.insert(SeverityAttr {
- severity: severity.severity,
- depth: severity.depth + depth,
- });
- }
- }
- });
-
- let lints = lint_groups(&diag.code, edition);
- let all_matching_groups =
- lints.iter().filter_map(|lint_group| cached.get(lint_group));
- let cached_severity =
- all_matching_groups.min_by_key(|it| it.depth).map(|it| it.severity);
-
- cache.insert(node.file_id, cached);
-
- return diag_severity.or(cached_severity);
- }
-
- // Insert this node's descendants' attributes into any outline descendant, but not including this node.
- // This must come before inserting this node's own attributes to preserve order.
- collected_lint_attrs.drain().for_each(|(lint, severity)| {
- if diag_severity.is_none() && lint_groups(&diag.code, edition).contains(&lint) {
- diag_severity = Some(severity.severity);
- }
-
- for item in &*cache_stack {
- let node_cache_entry = cache
- .get_mut(item)
- .expect("we always insert cached nodes into the cache map");
- let lint_cache_entry = node_cache_entry.entry(lint.clone());
- if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
- // Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
- // overwrite top attrs.
- lint_cache_entry.insert(severity);
- }
- }
- });
-
- cache_stack.push(node.file_id);
- cache.insert(node.file_id, FxHashMap::default());
-
- if let Some(ancestor) = ast::AnyHasAttrs::cast(ancestor) {
- // Insert this node's attributes into any outline descendant, including this node.
- lint_attrs(sema, ancestor, edition).for_each(|(lint, severity)| {
- if diag_severity.is_none() && lint_groups(&diag.code, edition).contains(&lint) {
- diag_severity = Some(severity);
- }
-
- for item in &*cache_stack {
- let node_cache_entry = cache
- .get_mut(item)
- .expect("we always insert cached nodes into the cache map");
- let lint_cache_entry = node_cache_entry.entry(lint.clone());
- if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
- // Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
- // overwrite top attrs.
- lint_cache_entry.insert(SeverityAttr { severity, depth });
- }
- }
- });
- }
-
- let parent_node = sema.find_parent_file(node.file_id);
- if let Some(parent_node) = parent_node {
- let parent_severity =
- fill_lint_attrs(sema, &parent_node, cache, cache_stack, diag, edition);
- if diag_severity.is_none() {
- diag_severity = parent_severity;
- }
- }
- cache_stack.pop();
- return diag_severity;
- } else if let Some(ancestor) = ast::AnyHasAttrs::cast(ancestor) {
- lint_attrs(sema, ancestor, edition).for_each(|(lint, severity)| {
- if diag_severity.is_none() && lint_groups(&diag.code, edition).contains(&lint) {
- diag_severity = Some(severity);
- }
-
- let lint_cache_entry = collected_lint_attrs.entry(lint);
- if let hash_map::Entry::Vacant(lint_cache_entry) = lint_cache_entry {
- // Do not overwrite existing lint attributes, as we go bottom to top and bottom attrs
- // overwrite top attrs.
- lint_cache_entry.insert(SeverityAttr { severity, depth });
- }
- });
- }
- }
+ node.value
+ .ancestors()
+ .filter_map(ast::AnyHasAttrs::cast)
+ .find_map(|ancestor| {
+ lint_attrs(sema, ancestor, edition)
+ .find_map(|(lint, severity)| lint_groups.contains(&lint).then_some(severity))
+ })
+ .or_else(|| {
+ lint_severity_at(sema, &sema.find_parent_file(node.file_id)?, lint_groups, edition)
+ })
}
fn lint_attrs<'a>(
@@ -952,10 +802,6 @@
fn contains(&self, group: &str) -> bool {
self.groups.contains(&group) || (self.inside_warnings && group == "warnings")
}
-
- fn iter(&self) -> impl Iterator<Item = &'static str> {
- self.groups.iter().copied().chain(self.inside_warnings.then_some("warnings"))
- }
}
fn lint_groups(lint: &DiagnosticCode, edition: Edition) -> LintGroups {
diff --git a/crates/ide-ssr/src/lib.rs b/crates/ide-ssr/src/lib.rs
index 339c199..43c56ac 100644
--- a/crates/ide-ssr/src/lib.rs
+++ b/crates/ide-ssr/src/lib.rs
@@ -287,7 +287,7 @@
if let Some(expanded) = self.sema.expand_macro_call(¯o_call) {
if let Some(tt) = macro_call.token_tree() {
self.output_debug_for_nodes_at_range(
- &expanded,
+ &expanded.value,
range,
&Some(self.sema.original_range(tt.syntax())),
out,
diff --git a/crates/ide-ssr/src/search.rs b/crates/ide-ssr/src/search.rs
index d89911f..9afbedb 100644
--- a/crates/ide-ssr/src/search.rs
+++ b/crates/ide-ssr/src/search.rs
@@ -194,7 +194,7 @@
// nodes that originated entirely from within the token tree of the macro call.
// i.e. we don't want to match something that came from the macro itself.
if let Some(range) = self.sema.original_range_opt(tt.syntax()) {
- self.slow_scan_node(&expanded, rule, &Some(range), matches_out);
+ self.slow_scan_node(&expanded.value, rule, &Some(range), matches_out);
}
}
}
diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs
index f0247f3..2c98328 100644
--- a/crates/ide/src/doc_links.rs
+++ b/crates/ide/src/doc_links.rs
@@ -5,17 +5,21 @@
mod intra_doc_links;
+use std::ops::Range;
+
use pulldown_cmark::{BrokenLink, CowStr, Event, InlineStr, LinkType, Options, Parser, Tag};
use pulldown_cmark_to_cmark::{Options as CMarkOptions, cmark_resume_with_options};
use stdx::format_to;
use url::Url;
-use hir::{Adt, AsAssocItem, AssocItem, AssocItemContainer, HasAttrs, db::HirDatabase, sym};
+use hir::{
+ Adt, AsAssocItem, AssocItem, AssocItemContainer, AttrsWithOwner, HasAttrs, db::HirDatabase, sym,
+};
use ide_db::{
RootDatabase,
base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, RootQueryDb},
defs::{Definition, NameClass, NameRefClass},
- documentation::{Documentation, HasDocs, docs_with_rangemap},
+ documentation::{DocsRangeMap, Documentation, HasDocs, docs_with_rangemap},
helpers::pick_best_token,
};
use syntax::{
@@ -46,11 +50,17 @@
Options::ENABLE_FOOTNOTES.union(Options::ENABLE_TABLES).union(Options::ENABLE_TASKLISTS);
/// Rewrite documentation links in markdown to point to an online host (e.g. docs.rs)
-pub(crate) fn rewrite_links(db: &RootDatabase, markdown: &str, definition: Definition) -> String {
+pub(crate) fn rewrite_links(
+ db: &RootDatabase,
+ markdown: &str,
+ definition: Definition,
+ range_map: Option<DocsRangeMap>,
+) -> String {
let mut cb = broken_link_clone_cb;
- let doc = Parser::new_with_broken_link_callback(markdown, MARKDOWN_OPTIONS, Some(&mut cb));
+ let doc = Parser::new_with_broken_link_callback(markdown, MARKDOWN_OPTIONS, Some(&mut cb))
+ .into_offset_iter();
- let doc = map_links(doc, |target, title| {
+ let doc = map_links(doc, |target, title, range| {
// This check is imperfect, there's some overlap between valid intra-doc links
// and valid URLs so we choose to be too eager to try to resolve what might be
// a URL.
@@ -60,7 +70,16 @@
// Two possibilities:
// * path-based links: `../../module/struct.MyStruct.html`
// * module-based links (AKA intra-doc links): `super::super::module::MyStruct`
- if let Some((target, title)) = rewrite_intra_doc_link(db, definition, target, title) {
+ let text_range =
+ TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
+ let is_inner_doc = range_map
+ .as_ref()
+ .and_then(|range_map| range_map.map(text_range))
+ .map(|(_, attr_id)| attr_id.is_inner_attr())
+ .unwrap_or(false);
+ if let Some((target, title)) =
+ rewrite_intra_doc_link(db, definition, target, title, is_inner_doc)
+ {
(None, target, title)
} else if let Some(target) = rewrite_url_link(db, definition, target) {
(Some(LinkType::Inline), target, title.to_owned())
@@ -195,22 +214,23 @@
def: Definition,
link: &str,
ns: Option<hir::Namespace>,
+ is_inner_doc: bool,
) -> Option<Definition> {
match def {
- Definition::Module(it) => it.resolve_doc_path(db, link, ns),
- Definition::Crate(it) => it.resolve_doc_path(db, link, ns),
- Definition::Function(it) => it.resolve_doc_path(db, link, ns),
- Definition::Adt(it) => it.resolve_doc_path(db, link, ns),
- Definition::Variant(it) => it.resolve_doc_path(db, link, ns),
- Definition::Const(it) => it.resolve_doc_path(db, link, ns),
- Definition::Static(it) => it.resolve_doc_path(db, link, ns),
- Definition::Trait(it) => it.resolve_doc_path(db, link, ns),
- Definition::TraitAlias(it) => it.resolve_doc_path(db, link, ns),
- Definition::TypeAlias(it) => it.resolve_doc_path(db, link, ns),
- Definition::Macro(it) => it.resolve_doc_path(db, link, ns),
- Definition::Field(it) => it.resolve_doc_path(db, link, ns),
- Definition::SelfType(it) => it.resolve_doc_path(db, link, ns),
- Definition::ExternCrateDecl(it) => it.resolve_doc_path(db, link, ns),
+ Definition::Module(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
+ Definition::Crate(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
+ Definition::Function(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
+ Definition::Adt(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
+ Definition::Variant(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
+ Definition::Const(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
+ Definition::Static(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
+ Definition::Trait(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
+ Definition::TraitAlias(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
+ Definition::TypeAlias(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
+ Definition::Macro(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
+ Definition::Field(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
+ Definition::SelfType(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
+ Definition::ExternCrateDecl(it) => it.resolve_doc_path(db, link, ns, is_inner_doc),
Definition::BuiltinAttr(_)
| Definition::BuiltinType(_)
| Definition::BuiltinLifetime(_)
@@ -289,31 +309,58 @@
let relative_comment_offset = offset - original_start - prefix_len;
sema.descend_into_macros(doc_token).into_iter().find_map(|t| {
- let (node, descended_prefix_len) = match_ast! {
+ let (node, descended_prefix_len, is_inner) = match_ast!{
match t {
- ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?),
- ast::String(string) => (t.parent_ancestors().skip_while(|n| n.kind() != ATTR).nth(1)?, string.open_quote_text_range()?.len()),
+ ast::Comment(comment) => {
+ (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?, comment.is_inner())
+ },
+ ast::String(string) => {
+ let attr = t.parent_ancestors().find_map(ast::Attr::cast)?;
+ let attr_is_inner = attr.excl_token().map(|excl| excl.kind() == BANG).unwrap_or(false);
+ (attr.syntax().parent()?, string.open_quote_text_range()?.len(), attr_is_inner)
+ },
_ => return None,
}
};
let token_start = t.text_range().start();
let abs_in_expansion_offset = token_start + relative_comment_offset + descended_prefix_len;
-
- let (attributes, def) = doc_attributes(sema, &node)?;
+ let (attributes, def) = Self::doc_attributes(sema, &node, is_inner)?;
let (docs, doc_mapping) = docs_with_rangemap(sema.db, &attributes)?;
- let (in_expansion_range, link, ns) =
+ let (in_expansion_range, link, ns, is_inner) =
extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| {
- let mapped = doc_mapping.map(range)?;
- (mapped.value.contains(abs_in_expansion_offset)).then_some((mapped.value, link, ns))
+ let (mapped, idx) = doc_mapping.map(range)?;
+ (mapped.value.contains(abs_in_expansion_offset)).then_some((mapped.value, link, ns, idx.is_inner_attr()))
})?;
// get the relative range to the doc/attribute in the expansion
let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start;
// Apply relative range to the original input comment
let absolute_range = in_expansion_relative_range + original_start + prefix_len;
- let def = resolve_doc_path_for_def(sema.db, def, &link, ns)?;
+ let def = resolve_doc_path_for_def(sema.db, def, &link, ns, is_inner)?;
cb(def, node, absolute_range)
})
}
+
+ /// When we hover a inner doc item, this find a attached definition.
+ /// ```
+ /// // node == ITEM_LIST
+ /// // node.parent == EXPR_BLOCK
+ /// // node.parent().parent() == FN
+ /// fn f() {
+ /// //! [`S$0`]
+ /// }
+ /// ```
+ fn doc_attributes(
+ sema: &Semantics<'_, RootDatabase>,
+ node: &SyntaxNode,
+ is_inner_doc: bool,
+ ) -> Option<(AttrsWithOwner, Definition)> {
+ if is_inner_doc && node.kind() != SOURCE_FILE {
+ let parent = node.parent()?;
+ doc_attributes(sema, &parent).or(doc_attributes(sema, &parent.parent()?))
+ } else {
+ doc_attributes(sema, node)
+ }
+ }
}
fn broken_link_clone_cb(link: BrokenLink<'_>) -> Option<(CowStr<'_>, CowStr<'_>)> {
@@ -369,6 +416,7 @@
def: Definition,
target: &str,
title: &str,
+ is_inner_doc: bool,
) -> Option<(String, String)> {
let (link, ns) = parse_intra_doc_link(target);
@@ -377,7 +425,7 @@
None => (link, None),
};
- let resolved = resolve_doc_path_for_def(db, def, link, ns)?;
+ let resolved = resolve_doc_path_for_def(db, def, link, ns, is_inner_doc)?;
let mut url = get_doc_base_urls(db, resolved, None, None).0?;
let (_, file, frag) = filename_and_frag_for_def(db, resolved)?;
@@ -421,8 +469,8 @@
/// Rewrites a markdown document, applying 'callback' to each link.
fn map_links<'e>(
- events: impl Iterator<Item = Event<'e>>,
- callback: impl Fn(&str, &str) -> (Option<LinkType>, String, String),
+ events: impl Iterator<Item = (Event<'e>, Range<usize>)>,
+ callback: impl Fn(&str, &str, Range<usize>) -> (Option<LinkType>, String, String),
) -> impl Iterator<Item = Event<'e>> {
let mut in_link = false;
// holds the origin link target on start event and the rewritten one on end event
@@ -432,7 +480,7 @@
// `Shortcut` type parsed from Start/End tags doesn't make sense for url links
let mut end_link_type: Option<LinkType> = None;
- events.map(move |evt| match evt {
+ events.map(move |(evt, range)| match evt {
Event::Start(Tag::Link(link_type, ref target, _)) => {
in_link = true;
end_link_target = Some(target.clone());
@@ -449,7 +497,7 @@
}
Event::Text(s) if in_link => {
let (link_type, link_target_s, link_name) =
- callback(&end_link_target.take().unwrap(), &s);
+ callback(&end_link_target.take().unwrap(), &s, range);
end_link_target = Some(CowStr::Boxed(link_target_s.into()));
if !matches!(end_link_type, Some(LinkType::Autolink)) {
end_link_type = link_type;
@@ -458,7 +506,7 @@
}
Event::Code(s) if in_link => {
let (link_type, link_target_s, link_name) =
- callback(&end_link_target.take().unwrap(), &s);
+ callback(&end_link_target.take().unwrap(), &s, range);
end_link_target = Some(CowStr::Boxed(link_target_s.into()));
if !matches!(end_link_type, Some(LinkType::Autolink)) {
end_link_type = link_type;
diff --git a/crates/ide/src/doc_links/tests.rs b/crates/ide/src/doc_links/tests.rs
index 91785be..6af156f 100644
--- a/crates/ide/src/doc_links/tests.rs
+++ b/crates/ide/src/doc_links/tests.rs
@@ -5,7 +5,7 @@
use ide_db::{
FilePosition, FileRange, RootDatabase,
defs::Definition,
- documentation::{Documentation, HasDocs},
+ documentation::{DocsRangeMap, Documentation, HasDocs},
};
use itertools::Itertools;
use syntax::{AstNode, SyntaxNode, ast, match_ast};
@@ -45,8 +45,8 @@
fn check_rewrite(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
let (analysis, position) = fixture::position(ra_fixture);
let sema = &Semantics::new(&analysis.db);
- let (cursor_def, docs) = def_under_cursor(sema, &position);
- let res = rewrite_links(sema.db, docs.as_str(), cursor_def);
+ let (cursor_def, docs, range) = def_under_cursor(sema, &position);
+ let res = rewrite_links(sema.db, docs.as_str(), cursor_def, Some(range));
expect.assert_eq(&res)
}
@@ -56,12 +56,14 @@
let (analysis, position, mut expected) = fixture::annotations(ra_fixture);
expected.sort_by_key(key_fn);
let sema = &Semantics::new(&analysis.db);
- let (cursor_def, docs) = def_under_cursor(sema, &position);
+ let (cursor_def, docs, range) = def_under_cursor(sema, &position);
let defs = extract_definitions_from_docs(&docs);
let actual: Vec<_> = defs
.into_iter()
- .flat_map(|(_, link, ns)| {
- let def = resolve_doc_path_for_def(sema.db, cursor_def, &link, ns)
+ .flat_map(|(text_range, link, ns)| {
+ let attr = range.map(text_range);
+ let is_inner_attr = attr.map(|(_file, attr)| attr.is_inner_attr()).unwrap_or(false);
+ let def = resolve_doc_path_for_def(sema.db, cursor_def, &link, ns, is_inner_attr)
.unwrap_or_else(|| panic!("Failed to resolve {link}"));
def.try_to_nav(sema.db).unwrap().into_iter().zip(iter::repeat(link))
})
@@ -78,7 +80,7 @@
fn def_under_cursor(
sema: &Semantics<'_, RootDatabase>,
position: &FilePosition,
-) -> (Definition, Documentation) {
+) -> (Definition, Documentation, DocsRangeMap) {
let (docs, def) = sema
.parse_guess_edition(position.file_id)
.syntax()
@@ -89,31 +91,31 @@
.find_map(|it| node_to_def(sema, &it))
.expect("no def found")
.unwrap();
- let docs = docs.expect("no docs found for cursor def");
- (def, docs)
+ let (docs, range) = docs.expect("no docs found for cursor def");
+ (def, docs, range)
}
fn node_to_def(
sema: &Semantics<'_, RootDatabase>,
node: &SyntaxNode,
-) -> Option<Option<(Option<Documentation>, Definition)>> {
+) -> Option<Option<(Option<(Documentation, DocsRangeMap)>, Definition)>> {
Some(match_ast! {
match node {
- ast::SourceFile(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Module(def))),
- ast::Module(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Module(def))),
- ast::Fn(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Function(def))),
- ast::Struct(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Adt(hir::Adt::Struct(def)))),
- ast::Union(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Adt(hir::Adt::Union(def)))),
- ast::Enum(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Adt(hir::Adt::Enum(def)))),
- ast::Variant(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Variant(def))),
- ast::Trait(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Trait(def))),
- ast::Static(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Static(def))),
- ast::Const(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Const(def))),
- ast::TypeAlias(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::TypeAlias(def))),
- ast::Impl(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::SelfType(def))),
- ast::RecordField(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Field(def))),
- ast::TupleField(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Field(def))),
- ast::Macro(it) => sema.to_def(&it).map(|def| (def.docs(sema.db), Definition::Macro(def))),
+ ast::SourceFile(it) => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::Module(def))),
+ ast::Module(it) => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::Module(def))),
+ ast::Fn(it) => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::Function(def))),
+ ast::Struct(it) => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::Adt(hir::Adt::Struct(def)))),
+ ast::Union(it) => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::Adt(hir::Adt::Union(def)))),
+ ast::Enum(it) => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::Adt(hir::Adt::Enum(def)))),
+ ast::Variant(it) => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::Variant(def))),
+ ast::Trait(it) => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::Trait(def))),
+ ast::Static(it) => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::Static(def))),
+ ast::Const(it) => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::Const(def))),
+ ast::TypeAlias(it) => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::TypeAlias(def))),
+ ast::Impl(it) => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::SelfType(def))),
+ ast::RecordField(it) => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::Field(def))),
+ ast::TupleField(it) => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::Field(def))),
+ ast::Macro(it) => sema.to_def(&it).map(|def| (def.docs_with_rangemap(sema.db), Definition::Macro(def))),
// ast::Use(it) => sema.to_def(&it).map(|def| (Box::new(it) as _, def.attrs(sema.db))),
_ => return None,
}
@@ -576,6 +578,40 @@
}
#[test]
+fn doc_links_module() {
+ check_doc_links(
+ r#"
+/// [`M`]
+/// [`M::f`]
+mod M$0 {
+ //^ M
+ #![doc = "inner_item[`S`]"]
+
+ pub fn f() {}
+ //^ M::f
+ pub struct S;
+ //^ S
+}
+"#,
+ );
+
+ check_doc_links(
+ r#"
+mod M$0 {
+ //^ super::M
+ //! [`super::M`]
+ //! [`super::M::f`]
+ //! [`super::M::S`]
+ pub fn f() {}
+ //^ super::M::f
+ pub struct S;
+ //^ super::M::S
+}
+"#,
+ );
+}
+
+#[test]
fn rewrite_html_root_url() {
check_rewrite(
r#"
@@ -691,6 +727,29 @@
}
#[test]
+fn rewrite_module() {
+ check_rewrite(
+ r#"
+//- /main.rs crate:foo
+/// [Foo]
+pub mod $0Foo{
+};
+"#,
+ expect"#]],
+ );
+
+ check_rewrite(
+ r#"
+//- /main.rs crate:foo
+pub mod $0Foo{
+ //! [super::Foo]
+};
+"#,
+ expect"#]],
+ );
+}
+
+#[test]
fn rewrite_intra_doc_link_to_associated_item() {
check_rewrite(
r#"
diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs
index 241a702..7c39633 100644
--- a/crates/ide/src/expand_macro.rs
+++ b/crates/ide/src/expand_macro.rs
@@ -1,10 +1,10 @@
use hir::db::ExpandDatabase;
-use hir::{ExpandResult, InFile, Semantics};
+use hir::{ExpandResult, InFile, InRealFile, Semantics};
use ide_db::{
FileId, RootDatabase, base_db::Crate, helpers::pick_best_token,
syntax_helpers::prettify_macro_expansion,
};
-use span::{Edition, SpanMap, SyntaxContext, TextRange, TextSize};
+use span::{SpanMap, SyntaxContext, TextRange, TextSize};
use stdx::format_to;
use syntax::{AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T, ast, ted};
@@ -26,8 +26,9 @@
// 
pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> {
let sema = Semantics::new(db);
- let file = sema.parse_guess_edition(position.file_id);
- let krate = sema.file_to_module_def(position.file_id)?.krate().into();
+ let file_id = sema.attach_first_edition(position.file_id)?;
+ let file = sema.parse(file_id);
+ let krate = sema.file_to_module_def(file_id.file_id(db))?.krate().into();
let tok = pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind {
SyntaxKind::IDENT => 1,
@@ -86,7 +87,10 @@
return derive;
}
- let mut anc = tok.parent_ancestors();
+ let mut anc = sema
+ .descend_token_into_include_expansion(InRealFile::new(file_id, tok))
+ .value
+ .parent_ancestors();
let mut span_map = SpanMap::empty();
let mut error = String::new();
let (name, expanded, kind) = loop {
@@ -95,14 +99,7 @@
if let Some(item) = ast::Item::cast(node.clone()) {
if let Some(def) = sema.resolve_attr_macro_call(&item) {
break (
- def.name(db)
- .display(
- db,
- sema.attach_first_edition(position.file_id)
- .map(|it| it.edition(db))
- .unwrap_or(Edition::CURRENT),
- )
- .to_string(),
+ def.name(db).display(db, file_id.edition(db)).to_string(),
expand_macro_recur(&sema, &item, &mut error, &mut span_map, TextSize::new(0))?,
SyntaxKind::MACRO_ITEMS,
);
@@ -146,10 +143,11 @@
offset_in_original_node: TextSize,
) -> Option<SyntaxNode> {
let ExpandResult { value: expanded, err } = match macro_call {
- item @ ast::Item::MacroCall(macro_call) => {
- sema.expand_attr_macro(item).or_else(|| sema.expand_allowed_builtins(macro_call))?
- }
- item => sema.expand_attr_macro(item)?,
+ item @ ast::Item::MacroCall(macro_call) => sema
+ .expand_attr_macro(item)
+ .map(|it| it.map(|it| it.value))
+ .or_else(|| sema.expand_allowed_builtins(macro_call))?,
+ item => sema.expand_attr_macro(item)?.map(|it| it.value),
};
let expanded = expanded.clone_for_update();
if let Some(err) = err {
@@ -718,4 +716,88 @@
"#]],
);
}
+
+ #[test]
+ fn assoc_call() {
+ check(
+ r#"
+macro_rules! mac {
+ () => { fn assoc() {} }
+}
+impl () {
+ mac$0!();
+}
+ "#,
+ expect![[r#"
+ mac!
+ fn assoc(){}"#]],
+ );
+ }
+
+ #[test]
+ fn eager() {
+ check(
+ r#"
+//- minicore: concat
+macro_rules! my_concat {
+ ($head:expr, $($tail:tt)*) => { concat!($head, $($tail)*) };
+}
+
+
+fn test() {
+ _ = my_concat!(
+ conc$0at!("<", ">"),
+ "hi",
+ );
+}
+ "#,
+ expect![[r#"
+ my_concat!
+ "<>hi""#]],
+ );
+ }
+
+ #[test]
+ fn in_included() {
+ check(
+ r#"
+//- minicore: include
+//- /main.rs crate:main
+include!("./included.rs");
+//- /included.rs
+macro_rules! foo {
+ () => { fn item() {} };
+}
+foo$0!();
+"#,
+ expect![[r#"
+ foo!
+ fn item(){}"#]],
+ );
+ }
+
+ #[test]
+ fn include() {
+ check(
+ r#"
+//- minicore: include
+//- /main.rs crate:main
+include$0!("./included.rs");
+//- /included.rs
+macro_rules! foo {
+ () => { fn item() {} };
+}
+foo();
+"#,
+ expect![[r#"
+ include!
+ macro_rules! foo {
+ () => {
+ fn item(){}
+
+ };
+ }
+ foo();"#]],
+ );
+ }
}
diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs
index b894e85..c60ca35 100644
--- a/crates/ide/src/goto_definition.rs
+++ b/crates/ide/src/goto_definition.rs
@@ -1923,6 +1923,74 @@
}
#[test]
+ fn goto_def_for_intra_doc_link_outer_same_file() {
+ check(
+ r#"
+/// [`S$0`]
+mod m {
+ //! [`super::S`]
+}
+struct S;
+ //^
+ "#,
+ );
+
+ check(
+ r#"
+/// [`S$0`]
+mod m {}
+struct S;
+ //^
+ "#,
+ );
+
+ check(
+ r#"
+/// [`S$0`]
+fn f() {
+ //! [`S`]
+}
+struct S;
+ //^
+ "#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_intra_doc_link_inner_same_file() {
+ check(
+ r#"
+/// [`S`]
+mod m {
+ //! [`super::S$0`]
+}
+struct S;
+ //^
+ "#,
+ );
+
+ check(
+ r#"
+mod m {
+ //! [`super::S$0`]
+}
+struct S;
+ //^
+ "#,
+ );
+
+ check(
+ r#"
+fn f() {
+ //! [`S$0`]
+}
+struct S;
+ //^
+ "#,
+ );
+ }
+
+ #[test]
fn goto_def_for_intra_doc_link_inner() {
check(
r#"
diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs
index 80624ee..520ba39 100644
--- a/crates/ide/src/highlight_related.rs
+++ b/crates/ide/src/highlight_related.rs
@@ -89,6 +89,9 @@
T![break] | T![loop] | T![while] | T![continue] if config.break_points => {
highlight_break_points(sema, token).remove(&file_id)
}
+ T![unsafe] if token.parent().and_then(ast::BlockExpr::cast).is_some() => {
+ highlight_unsafe_points(sema, token).remove(&file_id)
+ }
T![|] if config.closure_captures => {
highlight_closure_captures(sema, token, file_id, span_file_id.file_id())
}
@@ -650,7 +653,7 @@
expr.macro_call().and_then(|call| self.sema.expand_macro_call(&call))
{
match_ast! {
- match expanded {
+ match (expanded.value) {
ast::MacroStmts(it) => {
self.handle_expanded(it, cb);
},
@@ -706,6 +709,44 @@
}
}
+pub(crate) fn highlight_unsafe_points(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+) -> FxHashMap<EditionedFileId, Vec<HighlightedRange>> {
+ fn hl(
+ sema: &Semantics<'_, RootDatabase>,
+ unsafe_token: &SyntaxToken,
+ block_expr: Option<ast::BlockExpr>,
+ ) -> Option<FxHashMap<EditionedFileId, Vec<HighlightedRange>>> {
+ let mut highlights: FxHashMap<EditionedFileId, Vec<_>> = FxHashMap::default();
+
+ let mut push_to_highlights = |file_id, range| {
+ if let Some(FileRange { file_id, range }) = original_frange(sema.db, file_id, range) {
+ let hrange = HighlightedRange { category: ReferenceCategory::empty(), range };
+ highlights.entry(file_id).or_default().push(hrange);
+ }
+ };
+
+ // highlight unsafe keyword itself
+ let unsafe_token_file_id = sema.hir_file_for(&unsafe_token.parent()?);
+ push_to_highlights(unsafe_token_file_id, Some(unsafe_token.text_range()));
+
+ // highlight unsafe operations
+ if let Some(block) = block_expr {
+ if let Some(body) = sema.body_for(InFile::new(unsafe_token_file_id, block.syntax())) {
+ let unsafe_ops = sema.get_unsafe_ops(body);
+ for unsafe_op in unsafe_ops {
+ push_to_highlights(unsafe_op.file_id, Some(unsafe_op.value.text_range()));
+ }
+ }
+ }
+
+ Some(highlights)
+ }
+
+ hl(sema, &token, token.parent().and_then(ast::BlockExpr::cast)).unwrap_or_default()
+}
+
#[cfg(test)]
mod tests {
use itertools::Itertools;
@@ -755,6 +796,32 @@
}
#[test]
+ fn test_hl_unsafe_block() {
+ check(
+ r#"
+fn foo() {
+ unsafe fn this_is_unsafe_function() {}
+
+ unsa$0fe {
+ //^^^^^^
+ let raw_ptr = &42 as *const i32;
+ let val = *raw_ptr;
+ //^^^^^^^^
+
+ let mut_ptr = &mut 5 as *mut i32;
+ *mut_ptr = 10;
+ //^^^^^^^^
+
+ this_is_unsafe_function();
+ //^^^^^^^^^^^^^^^^^^^^^^^^^
+ }
+
+}
+"#,
+ );
+ }
+
+ #[test]
fn test_hl_tuple_fields() {
check(
r#"
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs
index 075afce..8bb1c70 100644
--- a/crates/ide/src/hover.rs
+++ b/crates/ide/src/hover.rs
@@ -58,6 +58,7 @@
pub size: Option<MemoryLayoutHoverRenderKind>,
pub offset: Option<MemoryLayoutHoverRenderKind>,
pub alignment: Option<MemoryLayoutHoverRenderKind>,
+ pub padding: Option<MemoryLayoutHoverRenderKind>,
pub niches: bool,
}
@@ -456,7 +457,7 @@
let notable_traits = def_ty.map(|ty| notable_traits(db, &ty)).unwrap_or_default();
let subst_types = subst.map(|subst| subst.types(db));
- let markup = render::definition(
+ let (markup, range_map) = render::definition(
sema.db,
def,
famous_defs.as_ref(),
@@ -469,7 +470,7 @@
display_target,
);
HoverResult {
- markup: render::process_markup(sema.db, def, &markup, config),
+ markup: render::process_markup(sema.db, def, &markup, range_map, config),
actions: [
show_fn_references_action(sema.db, def),
show_implementations_action(sema.db, def),
diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs
index 69b83f3..c24864a 100644
--- a/crates/ide/src/hover/render.rs
+++ b/crates/ide/src/hover/render.rs
@@ -11,7 +11,7 @@
use ide_db::{
RootDatabase,
defs::Definition,
- documentation::HasDocs,
+ documentation::{DocsRangeMap, HasDocs},
famous_defs::FamousDefs,
generated::lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES},
syntax_helpers::prettify_macro_expansion,
@@ -21,7 +21,7 @@
Float,
ieee::{Half as f16, Quad as f128},
};
-use span::Edition;
+use span::{Edition, TextSize};
use stdx::format_to;
use syntax::{AstNode, AstToken, Direction, SyntaxToken, T, algo, ast, match_ast};
@@ -276,13 +276,10 @@
keyword_hints(sema, token, parent, edition, display_target);
let doc_owner = find_std_module(&famous_defs, &keyword_mod, edition)?;
- let docs = doc_owner.docs(sema.db)?;
- let markup = process_markup(
- sema.db,
- Definition::Module(doc_owner),
- &markup(Some(docs.into()), description, None, None, String::new()),
- config,
- );
+ let (docs, range_map) = doc_owner.docs_with_rangemap(sema.db)?;
+ let (markup, range_map) =
+ markup(Some(docs.into()), Some(range_map), description, None, None, String::new());
+ let markup = process_markup(sema.db, Definition::Module(doc_owner), &markup, range_map, config);
Some(HoverResult { markup, actions })
}
@@ -371,11 +368,15 @@
db: &RootDatabase,
def: Definition,
markup: &Markup,
+ markup_range_map: Option<DocsRangeMap>,
config: &HoverConfig,
) -> Markup {
let markup = markup.as_str();
- let markup =
- if config.links_in_hover { rewrite_links(db, markup, def) } else { remove_links(markup) };
+ let markup = if config.links_in_hover {
+ rewrite_links(db, markup, def, markup_range_map)
+ } else {
+ remove_links(markup)
+ };
Markup::from(markup)
}
@@ -482,7 +483,7 @@
config: &HoverConfig,
edition: Edition,
display_target: DisplayTarget,
-) -> Markup {
+) -> (Markup, Option<DocsRangeMap>) {
let mod_path = definition_path(db, &def, edition);
let label = match def {
Definition::Trait(trait_) => trait_
@@ -518,7 +519,12 @@
}
_ => def.label(db, display_target),
};
- let docs = def.docs(db, famous_defs, display_target);
+ let (docs, range_map) =
+ if let Some((docs, doc_range)) = def.docs_with_rangemap(db, famous_defs, display_target) {
+ (Some(docs), doc_range)
+ } else {
+ (None, None)
+ };
let value = || match def {
Definition::Variant(it) => {
if !it.parent_enum(db).is_data_carrying(db) {
@@ -624,27 +630,57 @@
}
},
|_| None,
+ |_| None,
),
- Definition::Adt(it) => {
- render_memory_layout(config.memory_layout, || it.layout(db), |_| None, |_| None)
- }
+ Definition::Adt(it @ Adt::Struct(strukt)) => render_memory_layout(
+ config.memory_layout,
+ || it.layout(db),
+ |_| None,
+ |layout| {
+ let mut field_size =
+ |i: usize| Some(strukt.fields(db).get(i)?.layout(db).ok()?.size());
+ if strukt.repr(db).is_some_and(|it| it.inhibit_struct_field_reordering()) {
+ Some(("tail padding", layout.tail_padding(&mut field_size)?))
+ } else {
+ Some(("largest padding", layout.largest_padding(&mut field_size)?))
+ }
+ },
+ |_| None,
+ ),
+ Definition::Adt(it) => render_memory_layout(
+ config.memory_layout,
+ || it.layout(db),
+ |_| None,
+ |_| None,
+ |_| None,
+ ),
Definition::Variant(it) => render_memory_layout(
config.memory_layout,
|| it.layout(db),
|_| None,
+ |_| None,
|layout| layout.enum_tag_size(),
),
- Definition::TypeAlias(it) => {
- render_memory_layout(config.memory_layout, || it.ty(db).layout(db), |_| None, |_| None)
- }
- Definition::Local(it) => {
- render_memory_layout(config.memory_layout, || it.ty(db).layout(db), |_| None, |_| None)
- }
+ Definition::TypeAlias(it) => render_memory_layout(
+ config.memory_layout,
+ || it.ty(db).layout(db),
+ |_| None,
+ |_| None,
+ |_| None,
+ ),
+ Definition::Local(it) => render_memory_layout(
+ config.memory_layout,
+ || it.ty(db).layout(db),
+ |_| None,
+ |_| None,
+ |_| None,
+ ),
Definition::SelfType(it) => render_memory_layout(
config.memory_layout,
|| it.self_ty(db).layout(db),
|_| None,
|_| None,
+ |_| None,
),
_ => None,
};
@@ -807,6 +843,7 @@
markup(
docs.map(Into::into),
+ range_map,
desc,
extra.is_empty().not().then_some(extra),
mod_path,
@@ -1048,9 +1085,13 @@
if let Some(trait_) = c.fn_trait(sema.db).get_id(sema.db, original.krate(sema.db).into()) {
push_new_def(hir::Trait::from(trait_).into())
}
- if let Some(layout) =
- render_memory_layout(config.memory_layout, || original.layout(sema.db), |_| None, |_| None)
- {
+ if let Some(layout) = render_memory_layout(
+ config.memory_layout,
+ || original.layout(sema.db),
+ |_| None,
+ |_| None,
+ |_| None,
+ ) {
format_to!(markup, "\n___\n{layout}");
}
format_to!(markup, "{adjusted}\n\n## Captures\n{}", captures_rendered,);
@@ -1083,11 +1124,12 @@
fn markup(
docs: Option<String>,
+ range_map: Option<DocsRangeMap>,
rust: String,
extra: Option<String>,
mod_path: Option<String>,
subst_types: String,
-) -> Markup {
+) -> (Markup, Option<DocsRangeMap>) {
let mut buf = String::new();
if let Some(mod_path) = mod_path {
@@ -1106,9 +1148,15 @@
}
if let Some(doc) = docs {
- format_to!(buf, "\n___\n\n{}", doc);
+ format_to!(buf, "\n___\n\n");
+ let offset = TextSize::new(buf.len() as u32);
+ let buf_range_map = range_map.map(|range_map| range_map.shift_docstring_line_range(offset));
+ format_to!(buf, "{}", doc);
+
+ (buf.into(), buf_range_map)
+ } else {
+ (buf.into(), None)
}
- buf.into()
}
fn find_std_module(
@@ -1128,6 +1176,7 @@
config: Option<MemoryLayoutHoverConfig>,
layout: impl FnOnce() -> Result<Layout, LayoutError>,
offset: impl FnOnce(&Layout) -> Option<u64>,
+ padding: impl FnOnce(&Layout) -> Option<(&str, u64)>,
tag: impl FnOnce(&Layout) -> Option<usize>,
) -> Option<String> {
let config = config?;
@@ -1185,6 +1234,23 @@
}
}
+ if let Some(render) = config.padding {
+ if let Some((padding_name, padding)) = padding(&layout) {
+ format_to!(label, "{padding_name} = ");
+ match render {
+ MemoryLayoutHoverRenderKind::Decimal => format_to!(label, "{padding}"),
+ MemoryLayoutHoverRenderKind::Hexadecimal => format_to!(label, "{padding:#X}"),
+ MemoryLayoutHoverRenderKind::Both if padding >= 10 => {
+ format_to!(label, "{padding} ({padding:#X})")
+ }
+ MemoryLayoutHoverRenderKind::Both => {
+ format_to!(label, "{padding}")
+ }
+ }
+ format_to!(label, ", ");
+ }
+ }
+
if config.niches {
if let Some(niches) = layout.niches() {
if niches > 1024 {
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index 7b7eef9..a281a49 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -12,6 +12,7 @@
size: Some(MemoryLayoutHoverRenderKind::Both),
offset: Some(MemoryLayoutHoverRenderKind::Both),
alignment: Some(MemoryLayoutHoverRenderKind::Both),
+ padding: Some(MemoryLayoutHoverRenderKind::Both),
niches: true,
}),
documentation: true,
@@ -933,7 +934,7 @@
---
- size = 4, align = 4, no Drop
+ size = 4, align = 4, largest padding = 0, no Drop
"#]],
);
}
@@ -959,7 +960,7 @@
---
- size = 4, align = 4, no Drop
+ size = 4, align = 4, largest padding = 0, no Drop
"#]],
);
check(
@@ -984,7 +985,7 @@
---
- size = 4, align = 4, no Drop
+ size = 4, align = 4, largest padding = 0, no Drop
"#]],
);
}
@@ -1013,7 +1014,7 @@
---
- size = 12 (0xC), align = 4, no Drop
+ size = 12 (0xC), align = 4, largest padding = 0, no Drop
"#]],
);
check_hover_fields_limit(
@@ -1036,7 +1037,7 @@
---
- size = 4, align = 4, no Drop
+ size = 4, align = 4, largest padding = 0, no Drop
"#]],
);
check_hover_fields_limit(
@@ -1062,7 +1063,7 @@
---
- size = 16 (0x10), align = 4, no Drop
+ size = 16 (0x10), align = 4, largest padding = 0, no Drop
"#]],
);
check_hover_fields_limit(
@@ -1083,7 +1084,7 @@
---
- size = 12 (0xC), align = 4, no Drop
+ size = 12 (0xC), align = 4, largest padding = 0, no Drop
"#]],
);
check_hover_fields_limit(
@@ -1104,7 +1105,7 @@
---
- size = 12 (0xC), align = 4, no Drop
+ size = 12 (0xC), align = 4, largest padding = 0, no Drop
"#]],
);
@@ -3114,7 +3115,7 @@
---
- size = 0, align = 1, no Drop
+ size = 0, align = 1, largest padding = 0, no Drop
"#]],
);
}
@@ -3148,6 +3149,111 @@
}
#[test]
+fn test_hover_layout_padding_info() {
+ check(
+ r#"struct $0Foo {
+ x: bool,
+ y: i64,
+ z: u32,
+ }"#,
+ expect![[r#"
+ *Foo*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ struct Foo {
+ x: bool,
+ y: i64,
+ z: u32,
+ }
+ ```
+
+ ---
+
+ size = 16 (0x10), align = 8, largest padding = 3, niches = 254, no Drop
+ "#]],
+ );
+
+ check(
+ r#"#[repr(align(32))]
+ struct $0Foo {
+ x: bool,
+ y: i64,
+ z: u32,
+ }"#,
+ expect![[r#"
+ *Foo*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ struct Foo {
+ x: bool,
+ y: i64,
+ z: u32,
+ }
+ ```
+
+ ---
+
+ size = 32 (0x20), align = 32 (0x20), largest padding = 19 (0x13), niches = 254, no Drop
+ "#]],
+ );
+
+ check(
+ r#"#[repr(C)]
+ struct $0Foo {
+ x: bool,
+ y: i64,
+ z: u32,
+ }"#,
+ expect![[r#"
+ *Foo*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ struct Foo {
+ x: bool,
+ y: i64,
+ z: u32,
+ }
+ ```
+
+ ---
+
+ size = 24 (0x18), align = 8, tail padding = 4, niches = 254, no Drop
+ "#]],
+ );
+
+ check(
+ r#"struct $0Foo(i16, u128, u64)"#,
+ expect![[r#"
+ *Foo*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ struct Foo(i16, u128, u64)
+ ```
+
+ ---
+
+ size = 32 (0x20), align = 8, largest padding = 6, no Drop
+ "#]],
+ );
+}
+
+#[test]
fn test_hover_no_memory_layout() {
check_hover_no_memory_layout(
r#"struct Foo { fiel$0d_a: u8, field_b: i32, field_c: i16 }"#,
@@ -7375,6 +7481,128 @@
}
#[test]
+fn hover_intra_inner_attr() {
+ check(
+ r#"
+/// outer comment for [`Foo`]
+#[doc = "Doc outer comment for [`Foo`]"]
+pub fn Foo {
+ //! inner comment for [`Foo$0`]
+ #![doc = "Doc inner comment for [`Foo`]"]
+}
+"#,
+ expect![[r#"
+ *[`Foo`]*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ pub fn Foo()
+ ```
+
+ ---
+
+ outer comment for [`Foo`](https://docs.rs/ra_test_fixture/*/ra_test_fixture/fn.Foo.html)
+ Doc outer comment for [`Foo`](https://docs.rs/ra_test_fixture/*/ra_test_fixture/fn.Foo.html)
+ inner comment for [`Foo`](https://docs.rs/ra_test_fixture/*/ra_test_fixture/fn.Foo.html)
+ Doc inner comment for [`Foo`](https://docs.rs/ra_test_fixture/*/ra_test_fixture/fn.Foo.html)
+ "#]],
+ );
+
+ check(
+ r#"
+/// outer comment for [`Foo`]
+#[doc = "Doc outer comment for [`Foo`]"]
+pub mod Foo {
+ //! inner comment for [`super::Foo$0`]
+ #![doc = "Doc inner comment for [`super::Foo`]"]
+}
+"#,
+ expect![[r#"
+ *[`super::Foo`]*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ pub mod Foo
+ ```
+
+ ---
+
+ outer comment for [`Foo`](https://docs.rs/ra_test_fixture/*/ra_test_fixture/Foo/index.html)
+ Doc outer comment for [`Foo`](https://docs.rs/ra_test_fixture/*/ra_test_fixture/Foo/index.html)
+ inner comment for [`super::Foo`](https://docs.rs/ra_test_fixture/*/ra_test_fixture/Foo/index.html)
+ Doc inner comment for [`super::Foo`](https://docs.rs/ra_test_fixture/*/ra_test_fixture/Foo/index.html)
+ "#]],
+ );
+}
+
+#[test]
+fn hover_intra_outer_attr() {
+ check(
+ r#"
+/// outer comment for [`Foo$0`]
+#[doc = "Doc outer comment for [`Foo`]"]
+pub fn Foo() {
+ //! inner comment for [`Foo`]
+ #![doc = "Doc inner comment for [`Foo`]"]
+}
+"#,
+ expect![[r#"
+ *[`Foo`]*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ pub fn Foo()
+ ```
+
+ ---
+
+ outer comment for [`Foo`](https://docs.rs/ra_test_fixture/*/ra_test_fixture/fn.Foo.html)
+ Doc outer comment for [`Foo`](https://docs.rs/ra_test_fixture/*/ra_test_fixture/fn.Foo.html)
+ inner comment for [`Foo`](https://docs.rs/ra_test_fixture/*/ra_test_fixture/fn.Foo.html)
+ Doc inner comment for [`Foo`](https://docs.rs/ra_test_fixture/*/ra_test_fixture/fn.Foo.html)
+ "#]],
+ );
+
+ check(
+ r#"
+/// outer comment for [`Foo$0`]
+#[doc = "Doc outer comment for [`Foo`]"]
+pub mod Foo {
+ //! inner comment for [`super::Foo`]
+ #![doc = "Doc inner comment for [`super::Foo`]"]
+}
+"#,
+ expect![[r#"
+ *[`Foo`]*
+
+ ```rust
+ ra_test_fixture
+ ```
+
+ ```rust
+ pub mod Foo
+ ```
+
+ ---
+
+ outer comment for [`Foo`](https://docs.rs/ra_test_fixture/*/ra_test_fixture/Foo/index.html)
+ Doc outer comment for [`Foo`](https://docs.rs/ra_test_fixture/*/ra_test_fixture/Foo/index.html)
+ inner comment for [`super::Foo`](https://docs.rs/ra_test_fixture/*/ra_test_fixture/Foo/index.html)
+ Doc inner comment for [`super::Foo`](https://docs.rs/ra_test_fixture/*/ra_test_fixture/Foo/index.html)
+ "#]],
+ );
+}
+
+#[test]
fn hover_intra_generics() {
check(
r#"
@@ -9076,7 +9304,7 @@
---
- size = 16 (0x10), align = 8, niches = 1, no Drop
+ size = 16 (0x10), align = 8, largest padding = 0, niches = 1, no Drop
"#]],
)
}
@@ -10437,7 +10665,7 @@
---
- size = 4, align = 4, needs Drop
+ size = 4, align = 4, largest padding = 0, needs Drop
"#]],
);
check(
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index aa525a8..d649dff 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -62,7 +62,7 @@
use cfg::CfgOptions;
use fetch_crates::CrateInfo;
-use hir::{ChangeWithProcMacros, EditionedFileId, sym};
+use hir::{ChangeWithProcMacros, EditionedFileId, crate_def_map, sym};
use ide_db::{
FxHashMap, FxIndexSet, LineIndexDatabase,
base_db::{
@@ -627,7 +627,7 @@
/// Returns true if this crate has `no_std` or `no_core` specified.
pub fn is_crate_no_std(&self, crate_id: Crate) -> Cancellable<bool> {
- self.with_db(|db| hir::db::DefDatabase::crate_def_map(db, crate_id).is_no_std())
+ self.with_db(|db| crate_def_map(db, crate_id).is_no_std())
}
/// Returns the root file of the given crate.
diff --git a/crates/ide/src/parent_module.rs b/crates/ide/src/parent_module.rs
index 6dc01c4..50219ce 100644
--- a/crates/ide/src/parent_module.rs
+++ b/crates/ide/src/parent_module.rs
@@ -1,4 +1,4 @@
-use hir::{Semantics, db::DefDatabase};
+use hir::{Semantics, crate_def_map};
use ide_db::{
FileId, FilePosition, RootDatabase,
base_db::{Crate, RootQueryDb},
@@ -58,7 +58,7 @@
.iter()
.copied()
.filter(|&crate_id| {
- db.crate_def_map(crate_id).modules_for_file(db, file_id).next().is_some()
+ crate_def_map(db, crate_id).modules_for_file(db, file_id).next().is_some()
})
.sorted()
.collect()
diff --git a/crates/ide/src/syntax_highlighting/inject.rs b/crates/ide/src/syntax_highlighting/inject.rs
index 0998e14..7f5c2c1 100644
--- a/crates/ide/src/syntax_highlighting/inject.rs
+++ b/crates/ide/src/syntax_highlighting/inject.rs
@@ -129,11 +129,18 @@
extract_definitions_from_docs(&docs)
.into_iter()
.filter_map(|(range, link, ns)| {
- doc_mapping.map(range).filter(|mapping| mapping.file_id == src_file_id).and_then(
- |InFile { value: mapped_range, .. }| {
- Some(mapped_range).zip(resolve_doc_path_for_def(sema.db, def, &link, ns))
- },
- )
+ doc_mapping
+ .map(range)
+ .filter(|(mapping, _)| mapping.file_id == src_file_id)
+ .and_then(|(InFile { value: mapped_range, .. }, attr_id)| {
+ Some(mapped_range).zip(resolve_doc_path_for_def(
+ sema.db,
+ def,
+ &link,
+ ns,
+ attr_id.is_inner_attr(),
+ ))
+ })
})
.for_each(|(range, def)| {
hl.add(HlRange {
diff --git a/crates/intern/src/symbol/symbols.rs b/crates/intern/src/symbol/symbols.rs
index abde48d..fc922dd 100644
--- a/crates/intern/src/symbol/symbols.rs
+++ b/crates/intern/src/symbol/symbols.rs
@@ -164,7 +164,6 @@
completion,
compile_error,
concat_bytes,
- concat_idents,
concat,
const_format_args,
const_panic_fmt,
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index 2686a75..30e2d54 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -512,10 +512,6 @@
Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
}
}
-
- fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
- other.as_any().downcast_ref::<Self>().is_some_and(|other| self == other)
- }
}
#[cfg(test)]
diff --git a/crates/mbe/src/tests.rs b/crates/mbe/src/tests.rs
index a5672e4..3369dff 100644
--- a/crates/mbe/src/tests.rs
+++ b/crates/mbe/src/tests.rs
@@ -356,3 +356,120 @@
;"#]],
);
}
+
+#[test]
+fn minus_belongs_to_literal() {
+ let decl = r#"
+(-1) => {-1};
+(- 2) => {- 2};
+(- 3.0) => {- 3.0};
+(@$lit:literal) => {$lit}
+"#;
+ let check = |args, expect| check(Edition::CURRENT, Edition::CURRENT, decl, args, expect);
+ check(
+ "-1",
+ expect![[r#"
+ SUBTREE $$ 1:0@0..2#ROOT2024 1:0@0..2#ROOT2024
+ PUNCH - [alone] 0:0@10..11#ROOT2024
+ LITERAL Integer 1 0:0@11..12#ROOT2024
+
+ -1"#]],
+ );
+ check(
+ "- 1",
+ expect![[r#"
+ SUBTREE $$ 1:0@0..3#ROOT2024 1:0@0..3#ROOT2024
+ PUNCH - [alone] 0:0@10..11#ROOT2024
+ LITERAL Integer 1 0:0@11..12#ROOT2024
+
+ -1"#]],
+ );
+ check(
+ "-2",
+ expect![[r#"
+ SUBTREE $$ 1:0@0..2#ROOT2024 1:0@0..2#ROOT2024
+ PUNCH - [alone] 0:0@25..26#ROOT2024
+ LITERAL Integer 2 0:0@27..28#ROOT2024
+
+ -2"#]],
+ );
+ check(
+ "- 2",
+ expect![[r#"
+ SUBTREE $$ 1:0@0..3#ROOT2024 1:0@0..3#ROOT2024
+ PUNCH - [alone] 0:0@25..26#ROOT2024
+ LITERAL Integer 2 0:0@27..28#ROOT2024
+
+ -2"#]],
+ );
+ check(
+ "-3.0",
+ expect![[r#"
+ SUBTREE $$ 1:0@0..4#ROOT2024 1:0@0..4#ROOT2024
+ PUNCH - [alone] 0:0@43..44#ROOT2024
+ LITERAL Float 3.0 0:0@45..48#ROOT2024
+
+ -3.0"#]],
+ );
+ check(
+ "- 3.0",
+ expect![[r#"
+ SUBTREE $$ 1:0@0..5#ROOT2024 1:0@0..5#ROOT2024
+ PUNCH - [alone] 0:0@43..44#ROOT2024
+ LITERAL Float 3.0 0:0@45..48#ROOT2024
+
+ -3.0"#]],
+ );
+ check(
+ "@1",
+ expect![[r#"
+ SUBTREE $$ 1:0@0..2#ROOT2024 1:0@0..2#ROOT2024
+ LITERAL Integer 1 1:0@1..2#ROOT2024
+
+ 1"#]],
+ );
+ check(
+ "@-1",
+ expect![[r#"
+ SUBTREE $$ 1:0@0..3#ROOT2024 1:0@0..3#ROOT2024
+ PUNCH - [alone] 1:0@1..2#ROOT2024
+ LITERAL Integer 1 1:0@2..3#ROOT2024
+
+ -1"#]],
+ );
+ check(
+ "@1.0",
+ expect![[r#"
+ SUBTREE $$ 1:0@0..4#ROOT2024 1:0@0..4#ROOT2024
+ LITERAL Float 1.0 1:0@1..4#ROOT2024
+
+ 1.0"#]],
+ );
+ check(
+ "@-1.0",
+ expect![[r#"
+ SUBTREE $$ 1:0@0..5#ROOT2024 1:0@0..5#ROOT2024
+ PUNCH - [alone] 1:0@1..2#ROOT2024
+ LITERAL Float 1.0 1:0@2..5#ROOT2024
+
+ -1.0"#]],
+ );
+ check(
+ "@--1.0",
+ expect![[r#"
+ ExpandError {
+ inner: (
+ 1:0@1..2#ROOT2024,
+ BindingError(
+ "expected literal",
+ ),
+ ),
+ }
+
+ SUBTREE $$ 1:0@0..6#ROOT2024 1:0@0..6#ROOT2024
+ PUNCH - [joint] 1:0@1..2#ROOT2024
+ PUNCH - [alone] 1:0@2..3#ROOT2024
+
+ --"#]],
+ );
+}
diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs
index 5faf6fc..8cc332d 100644
--- a/crates/parser/src/grammar/expressions/atom.rs
+++ b/crates/parser/src/grammar/expressions/atom.rs
@@ -381,10 +381,14 @@
op.complete(p, ASM_REG_OPERAND);
op_n.complete(p, ASM_OPERAND_NAMED);
} else if p.eat_contextual_kw(T![label]) {
+ // test asm_label
+ // fn foo() {
+ // builtin#asm("", label {});
+ // }
dir_spec.abandon(p);
block_expr(p);
- op.complete(p, ASM_OPERAND_NAMED);
- op_n.complete(p, ASM_LABEL);
+ op.complete(p, ASM_LABEL);
+ op_n.complete(p, ASM_OPERAND_NAMED);
} else if p.eat(T![const]) {
dir_spec.abandon(p);
expr(p);
diff --git a/crates/parser/src/lexed_str.rs b/crates/parser/src/lexed_str.rs
index 0a5c16d..0fa9a26 100644
--- a/crates/parser/src/lexed_str.rs
+++ b/crates/parser/src/lexed_str.rs
@@ -179,7 +179,10 @@
COMMENT
}
- rustc_lexer::TokenKind::Frontmatter { has_invalid_preceding_whitespace, invalid_infostring } => {
+ rustc_lexer::TokenKind::Frontmatter {
+ has_invalid_preceding_whitespace,
+ invalid_infostring,
+ } => {
if *has_invalid_preceding_whitespace {
err = "invalid preceding whitespace for frontmatter opening"
} else if *invalid_infostring {
diff --git a/crates/parser/test_data/generated/runner.rs b/crates/parser/test_data/generated/runner.rs
index 24db947..030d8e0 100644
--- a/crates/parser/test_data/generated/runner.rs
+++ b/crates/parser/test_data/generated/runner.rs
@@ -21,6 +21,8 @@
#[test]
fn asm_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/asm_expr.rs"); }
#[test]
+ fn asm_label() { run_and_expect_no_errors("test_data/parser/inline/ok/asm_label.rs"); }
+ #[test]
fn assoc_const_eq() {
run_and_expect_no_errors("test_data/parser/inline/ok/assoc_const_eq.rs");
}
diff --git a/crates/parser/test_data/parser/inline/ok/asm_label.rast b/crates/parser/test_data/parser/inline/ok/asm_label.rast
new file mode 100644
index 0000000..38999c9
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/asm_label.rast
@@ -0,0 +1,37 @@
+SOURCE_FILE
+ FN
+ FN_KW "fn"
+ WHITESPACE " "
+ NAME
+ IDENT "foo"
+ PARAM_LIST
+ L_PAREN "("
+ R_PAREN ")"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ WHITESPACE "\n "
+ EXPR_STMT
+ ASM_EXPR
+ BUILTIN_KW "builtin"
+ POUND "#"
+ ASM_KW "asm"
+ L_PAREN "("
+ LITERAL
+ STRING "\"\""
+ COMMA ","
+ WHITESPACE " "
+ ASM_OPERAND_NAMED
+ ASM_LABEL
+ LABEL_KW "label"
+ WHITESPACE " "
+ BLOCK_EXPR
+ STMT_LIST
+ L_CURLY "{"
+ R_CURLY "}"
+ R_PAREN ")"
+ SEMICOLON ";"
+ WHITESPACE "\n"
+ R_CURLY "}"
+ WHITESPACE "\n"
diff --git a/crates/parser/test_data/parser/inline/ok/asm_label.rs b/crates/parser/test_data/parser/inline/ok/asm_label.rs
new file mode 100644
index 0000000..996c1c8
--- /dev/null
+++ b/crates/parser/test_data/parser/inline/ok/asm_label.rs
@@ -0,0 +1,3 @@
+fn foo() {
+ builtin#asm("", label {});
+}
diff --git a/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs b/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
index dfdbb4c..6820e4b 100644
--- a/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
+++ b/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
@@ -31,12 +31,17 @@
TokenTree::from(Literal::byte_string(b"byte_string")),
TokenTree::from(Literal::character('c')),
TokenTree::from(Literal::string("string")),
+ TokenTree::from(Literal::c_string(c"cstring")),
// as of 2022-07-21, there's no method on `Literal` to build a raw
// string or a raw byte string
TokenTree::from(Literal::f64_suffixed(3.14)),
+ TokenTree::from(Literal::f64_suffixed(-3.14)),
TokenTree::from(Literal::f64_unsuffixed(3.14)),
+ TokenTree::from(Literal::f64_unsuffixed(-3.14)),
TokenTree::from(Literal::i64_suffixed(123)),
+ TokenTree::from(Literal::i64_suffixed(-123)),
TokenTree::from(Literal::i64_unsuffixed(123)),
+ TokenTree::from(Literal::i64_unsuffixed(-123)),
];
TokenStream::from_iter(trees)
}
diff --git a/crates/proc-macro-srv/src/server_impl.rs b/crates/proc-macro-srv/src/server_impl.rs
index 3d99942..11dbd92 100644
--- a/crates/proc-macro-srv/src/server_impl.rs
+++ b/crates/proc-macro-srv/src/server_impl.rs
@@ -16,9 +16,8 @@
pub use token_stream::TokenStream;
pub mod rust_analyzer_span;
-// mod symbol;
pub mod token_id;
-// pub use symbol::*;
+
use tt::Spacing;
#[derive(Clone)]
diff --git a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
index 47555a5..e0c6e68 100644
--- a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
+++ b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
@@ -168,16 +168,38 @@
}
bridge::TokenTree::Literal(literal) => {
- let literal = tt::Literal {
- symbol: literal.symbol,
- suffix: literal.suffix,
- span: literal.span,
- kind: literal_kind_to_internal(literal.kind),
- };
+ let token_trees =
+ if let Some((_minus, symbol)) = literal.symbol.as_str().split_once('-') {
+ let punct = tt::Punct {
+ spacing: tt::Spacing::Alone,
+ span: literal.span,
+ char: '-' as char,
+ };
+ let leaf: tt::Leaf = tt::Leaf::from(punct);
+ let minus_tree = tt::TokenTree::from(leaf);
- let leaf: tt::Leaf = tt::Leaf::from(literal);
- let tree = tt::TokenTree::from(leaf);
- TokenStream { token_trees: vec![tree] }
+ let literal = tt::Literal {
+ symbol: Symbol::intern(symbol),
+ suffix: literal.suffix,
+ span: literal.span,
+ kind: literal_kind_to_internal(literal.kind),
+ };
+ let leaf: tt::Leaf = tt::Leaf::from(literal);
+ let tree = tt::TokenTree::from(leaf);
+ vec![minus_tree, tree]
+ } else {
+ let literal = tt::Literal {
+ symbol: literal.symbol,
+ suffix: literal.suffix,
+ span: literal.span,
+ kind: literal_kind_to_internal(literal.kind),
+ };
+
+ let leaf: tt::Leaf = tt::Leaf::from(literal);
+ let tree = tt::TokenTree::from(leaf);
+ vec![tree]
+ };
+ TokenStream { token_trees }
}
bridge::TokenTree::Punct(p) => {
@@ -236,7 +258,9 @@
&mut self,
stream: Self::TokenStream,
) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
- stream.into_bridge()
+ stream.into_bridge(&mut |first, second| {
+ server::Span::join(self, first, second).unwrap_or(first)
+ })
}
}
diff --git a/crates/proc-macro-srv/src/server_impl/symbol.rs b/crates/proc-macro-srv/src/server_impl/symbol.rs
deleted file mode 100644
index 6863ce9..0000000
--- a/crates/proc-macro-srv/src/server_impl/symbol.rs
+++ /dev/null
@@ -1,47 +0,0 @@
-//! Symbol interner for proc-macro-srv
-
-use std::{cell::RefCell, collections::HashMap, thread::LocalKey};
-
-thread_local! {
- pub(crate) static SYMBOL_INTERNER: RefCell<SymbolInterner> = Default::default();
-}
-
-// ID for an interned symbol.
-#[derive(Hash, Eq, PartialEq, Copy, Clone)]
-pub struct Symbol(u32);
-
-pub(crate) type SymbolInternerRef = &'static LocalKey<RefCell<SymbolInterner>>;
-
-impl Symbol {
- pub(super) fn intern(interner: SymbolInternerRef, data: &str) -> Symbol {
- interner.with(|i| i.borrow_mut().intern(data))
- }
-
- pub(super) fn text(&self, interner: SymbolInternerRef) -> SmolStr {
- interner.with(|i| i.borrow().get(self).clone())
- }
-}
-
-#[derive(Default)]
-pub(crate) struct SymbolInterner {
- idents: HashMap<SmolStr, u32>,
- ident_data: Vec<SmolStr>,
-}
-
-impl SymbolInterner {
- fn intern(&mut self, data: &str) -> Symbol {
- if let Some(index) = self.idents.get(data) {
- return Symbol(*index);
- }
-
- let index = self.idents.len() as u32;
- let data = SmolStr::from(data);
- self.ident_data.push(data.clone());
- self.idents.insert(data, index);
- Symbol(index)
- }
-
- fn get(&self, sym: &Symbol) -> &SmolStr {
- &self.ident_data[sym.0 as usize]
- }
-}
diff --git a/crates/proc-macro-srv/src/server_impl/token_id.rs b/crates/proc-macro-srv/src/server_impl/token_id.rs
index c002be4..d55b269 100644
--- a/crates/proc-macro-srv/src/server_impl/token_id.rs
+++ b/crates/proc-macro-srv/src/server_impl/token_id.rs
@@ -153,16 +153,38 @@
}
bridge::TokenTree::Literal(literal) => {
- let literal = Literal {
- symbol: literal.symbol,
- suffix: literal.suffix,
- span: literal.span,
- kind: literal_kind_to_internal(literal.kind),
- };
+ let token_trees =
+ if let Some((_minus, symbol)) = literal.symbol.as_str().split_once('-') {
+ let punct = tt::Punct {
+ spacing: tt::Spacing::Alone,
+ span: literal.span,
+ char: '-' as char,
+ };
+ let leaf: tt::Leaf = tt::Leaf::from(punct);
+ let minus_tree = tt::TokenTree::from(leaf);
- let leaf = tt::Leaf::from(literal);
- let tree = TokenTree::from(leaf);
- TokenStream { token_trees: vec![tree] }
+ let literal = Literal {
+ symbol: Symbol::intern(symbol),
+ suffix: literal.suffix,
+ span: literal.span,
+ kind: literal_kind_to_internal(literal.kind),
+ };
+ let leaf: tt::Leaf = tt::Leaf::from(literal);
+ let tree = tt::TokenTree::from(leaf);
+ vec![minus_tree, tree]
+ } else {
+ let literal = Literal {
+ symbol: literal.symbol,
+ suffix: literal.suffix,
+ span: literal.span,
+ kind: literal_kind_to_internal(literal.kind),
+ };
+
+ let leaf: tt::Leaf = tt::Leaf::from(literal);
+ let tree = tt::TokenTree::from(leaf);
+ vec![tree]
+ };
+ TokenStream { token_trees }
}
bridge::TokenTree::Punct(p) => {
@@ -216,7 +238,8 @@
&mut self,
stream: Self::TokenStream,
) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
- stream.into_bridge()
+ // Can't join with `TokenId`.
+ stream.into_bridge(&mut |first, _second| first)
}
}
diff --git a/crates/proc-macro-srv/src/server_impl/token_stream.rs b/crates/proc-macro-srv/src/server_impl/token_stream.rs
index 4946a4f..c5019a5 100644
--- a/crates/proc-macro-srv/src/server_impl/token_stream.rs
+++ b/crates/proc-macro-srv/src/server_impl/token_stream.rs
@@ -56,7 +56,10 @@
self.token_trees.is_empty()
}
- pub(crate) fn into_bridge(self) -> Vec<bridge::TokenTree<Self, S, intern::Symbol>> {
+ pub(crate) fn into_bridge(
+ self,
+ join_spans: &mut dyn FnMut(S, S) -> S,
+ ) -> Vec<bridge::TokenTree<Self, S, intern::Symbol>> {
let mut result = Vec::new();
let mut iter = self.token_trees.into_iter();
while let Some(tree) = iter.next() {
@@ -68,6 +71,11 @@
span: ident.span,
}))
}
+ // Note, we do not have to assemble our `-` punct and literal split into a single
+ // negative bridge literal here. As the proc-macro docs state
+ // > Literals created from negative numbers might not survive round-trips through
+ // > TokenStream or strings and may be broken into two tokens (- and positive
+ // > literal).
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
result.push(bridge::TokenTree::Literal(bridge::Literal {
span: lit.span,
@@ -93,7 +101,11 @@
token_trees: iter.by_ref().take(subtree.usize_len()).collect(),
})
},
- span: bridge::DelimSpan::from_single(subtree.delimiter.open),
+ span: bridge::DelimSpan {
+ open: subtree.delimiter.open,
+ close: subtree.delimiter.close,
+ entire: join_spans(subtree.delimiter.open, subtree.delimiter.close),
+ },
}))
}
}
diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs
index a81fea7..3a6ce63 100644
--- a/crates/proc-macro-srv/src/tests/mod.rs
+++ b/crates/proc-macro-srv/src/tests/mod.rs
@@ -11,8 +11,24 @@
assert_expand(
"DeriveEmpty",
r#"struct S;"#,
- expect!["SUBTREE $$ 1 1"],
- expect!["SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024"],
+ expect![[r#"
+ SUBTREE $$ 1 1
+ IDENT struct 1
+ IDENT S 1
+ PUNCH ; [alone] 1
+
+
+
+ SUBTREE $$ 1 1"#]],
+ expect![[r#"
+ SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+ IDENT struct 42:2@0..6#ROOT2024
+ IDENT S 42:2@7..8#ROOT2024
+ PUNCH ; [alone] 42:2@8..9#ROOT2024
+
+
+
+ SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024"#]],
);
}
@@ -23,6 +39,13 @@
r#"struct S;"#,
expect![[r#"
SUBTREE $$ 1 1
+ IDENT struct 1
+ IDENT S 1
+ PUNCH ; [alone] 1
+
+
+
+ SUBTREE $$ 1 1
IDENT compile_error 1
PUNCH ! [alone] 1
SUBTREE () 1 1
@@ -30,6 +53,13 @@
PUNCH ; [alone] 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+ IDENT struct 42:2@0..6#ROOT2024
+ IDENT S 42:2@7..8#ROOT2024
+ PUNCH ; [alone] 42:2@8..9#ROOT2024
+
+
+
+ SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
IDENT compile_error 42:2@0..100#ROOT2024
PUNCH ! [alone] 42:2@0..100#ROOT2024
SUBTREE () 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
@@ -51,6 +81,17 @@
PUNCH , [alone] 1
LITERAL Integer 1 1
PUNCH , [alone] 1
+ SUBTREE [] 1 1
+
+
+
+ SUBTREE $$ 1 1
+ IDENT ident 1
+ PUNCH , [alone] 1
+ LITERAL Integer 0 1
+ PUNCH , [alone] 1
+ LITERAL Integer 1 1
+ PUNCH , [alone] 1
SUBTREE [] 1 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
@@ -60,6 +101,17 @@
PUNCH , [alone] 42:2@8..9#ROOT2024
LITERAL Integer 1 42:2@10..11#ROOT2024
PUNCH , [alone] 42:2@11..12#ROOT2024
+ SUBTREE [] 42:2@13..14#ROOT2024 42:2@14..15#ROOT2024
+
+
+
+ SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+ IDENT ident 42:2@0..5#ROOT2024
+ PUNCH , [alone] 42:2@5..6#ROOT2024
+ LITERAL Integer 0 42:2@7..8#ROOT2024
+ PUNCH , [alone] 42:2@8..9#ROOT2024
+ LITERAL Integer 1 42:2@10..11#ROOT2024
+ PUNCH , [alone] 42:2@11..12#ROOT2024
SUBTREE [] 42:2@13..14#ROOT2024 42:2@14..15#ROOT2024"#]],
);
}
@@ -73,12 +125,26 @@
SUBTREE $$ 1 1
IDENT ident 1
PUNCH , [alone] 1
+ SUBTREE [] 1 1
+
+
+
+ SUBTREE $$ 1 1
+ IDENT ident 1
+ PUNCH , [alone] 1
SUBTREE [] 1 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
IDENT ident 42:2@0..5#ROOT2024
PUNCH , [alone] 42:2@5..6#ROOT2024
- SUBTREE [] 42:2@7..8#ROOT2024 42:2@7..8#ROOT2024"#]],
+ SUBTREE [] 42:2@7..8#ROOT2024 42:2@8..9#ROOT2024
+
+
+
+ SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+ IDENT ident 42:2@0..5#ROOT2024
+ PUNCH , [alone] 42:2@5..6#ROOT2024
+ SUBTREE [] 42:2@7..9#ROOT2024 42:2@7..9#ROOT2024"#]],
);
}
@@ -89,9 +155,19 @@
"r#async",
expect![[r#"
SUBTREE $$ 1 1
+ IDENT r#async 1
+
+
+
+ SUBTREE $$ 1 1
IDENT r#async 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+ IDENT r#async 42:2@0..7#ROOT2024
+
+
+
+ SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
IDENT r#async 42:2@0..7#ROOT2024"#]],
);
}
@@ -103,9 +179,21 @@
"foo bar",
expect![[r#"
SUBTREE $$ 1 1
+ IDENT foo 1
+ IDENT bar 1
+
+
+
+ SUBTREE $$ 1 1
IDENT r#joined 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+ IDENT foo 42:2@0..3#ROOT2024
+ IDENT bar 42:2@8..11#ROOT2024
+
+
+
+ SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
IDENT r#joined 42:2@0..11#ROOT2024"#]],
);
}
@@ -117,11 +205,25 @@
"set_def_site resolved_at_def_site start_span",
expect![[r#"
SUBTREE $$ 1 1
+ IDENT set_def_site 1
+ IDENT resolved_at_def_site 1
+ IDENT start_span 1
+
+
+
+ SUBTREE $$ 1 1
IDENT set_def_site 0
IDENT resolved_at_def_site 1
IDENT start_span 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+ IDENT set_def_site 42:2@0..12#ROOT2024
+ IDENT resolved_at_def_site 42:2@13..33#ROOT2024
+ IDENT start_span 42:2@34..44#ROOT2024
+
+
+
+ SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
IDENT set_def_site 41:1@0..150#ROOT2024
IDENT resolved_at_def_site 42:2@13..33#ROOT2024
IDENT start_span 42:2@34..34#ROOT2024"#]],
@@ -135,21 +237,47 @@
r#""#,
expect![[r#"
SUBTREE $$ 1 1
+
+
+
+ SUBTREE $$ 1 1
LITERAL ByteStr byte_string 1
LITERAL Char c 1
LITERAL Str string 1
+ LITERAL CStr cstring 1
+ LITERAL Float 3.14f64 1
+ PUNCH - [alone] 1
LITERAL Float 3.14f64 1
LITERAL Float 3.14 1
+ PUNCH - [alone] 1
+ LITERAL Float 3.14 1
LITERAL Integer 123i64 1
+ PUNCH - [alone] 1
+ LITERAL Integer 123i64 1
+ LITERAL Integer 123 1
+ PUNCH - [alone] 1
LITERAL Integer 123 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+
+
+
+ SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
LITERAL ByteStr byte_string 42:2@0..100#ROOT2024
LITERAL Char c 42:2@0..100#ROOT2024
LITERAL Str string 42:2@0..100#ROOT2024
+ LITERAL CStr cstring 42:2@0..100#ROOT2024
+ LITERAL Float 3.14f64 42:2@0..100#ROOT2024
+ PUNCH - [alone] 42:2@0..100#ROOT2024
LITERAL Float 3.14f64 42:2@0..100#ROOT2024
LITERAL Float 3.14 42:2@0..100#ROOT2024
+ PUNCH - [alone] 42:2@0..100#ROOT2024
+ LITERAL Float 3.14 42:2@0..100#ROOT2024
LITERAL Integer 123i64 42:2@0..100#ROOT2024
+ PUNCH - [alone] 42:2@0..100#ROOT2024
+ LITERAL Integer 123i64 42:2@0..100#ROOT2024
+ LITERAL Integer 123 42:2@0..100#ROOT2024
+ PUNCH - [alone] 42:2@0..100#ROOT2024
LITERAL Integer 123 42:2@0..100#ROOT2024"#]],
);
}
@@ -161,10 +289,18 @@
r#""#,
expect![[r#"
SUBTREE $$ 1 1
+
+
+
+ SUBTREE $$ 1 1
IDENT standard 1
IDENT r#raw 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+
+
+
+ SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
IDENT standard 42:2@0..100#ROOT2024
IDENT r#raw 42:2@0..100#ROOT2024"#]],
);
@@ -196,6 +332,30 @@
PUNCH , [alone] 1
LITERAL Byte b 1
PUNCH , [alone] 1
+ LITERAL CStr null 1
+
+
+
+ SUBTREE $$ 1 1
+ LITERAL Integer 1u16 1
+ PUNCH , [alone] 1
+ LITERAL Integer 2_u32 1
+ PUNCH , [alone] 1
+ PUNCH - [alone] 1
+ LITERAL Integer 4i64 1
+ PUNCH , [alone] 1
+ LITERAL Float 3.14f32 1
+ PUNCH , [alone] 1
+ LITERAL Str hello bridge 1
+ PUNCH , [alone] 1
+ LITERAL Str suffixedsuffix 1
+ PUNCH , [alone] 1
+ LITERAL StrRaw(2) raw 1
+ PUNCH , [alone] 1
+ LITERAL Char a 1
+ PUNCH , [alone] 1
+ LITERAL Byte b 1
+ PUNCH , [alone] 1
LITERAL CStr null 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
@@ -218,11 +378,99 @@
PUNCH , [alone] 42:2@78..79#ROOT2024
LITERAL Byte b 42:2@80..84#ROOT2024
PUNCH , [alone] 42:2@84..85#ROOT2024
+ LITERAL CStr null 42:2@86..93#ROOT2024
+
+
+
+ SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+ LITERAL Integer 1u16 42:2@0..4#ROOT2024
+ PUNCH , [alone] 42:2@4..5#ROOT2024
+ LITERAL Integer 2_u32 42:2@6..11#ROOT2024
+ PUNCH , [alone] 42:2@11..12#ROOT2024
+ PUNCH - [alone] 42:2@13..14#ROOT2024
+ LITERAL Integer 4i64 42:2@14..18#ROOT2024
+ PUNCH , [alone] 42:2@18..19#ROOT2024
+ LITERAL Float 3.14f32 42:2@20..27#ROOT2024
+ PUNCH , [alone] 42:2@27..28#ROOT2024
+ LITERAL Str hello bridge 42:2@29..43#ROOT2024
+ PUNCH , [alone] 42:2@43..44#ROOT2024
+ LITERAL Str suffixedsuffix 42:2@45..61#ROOT2024
+ PUNCH , [alone] 42:2@61..62#ROOT2024
+ LITERAL StrRaw(2) raw 42:2@63..73#ROOT2024
+ PUNCH , [alone] 42:2@73..74#ROOT2024
+ LITERAL Char a 42:2@75..78#ROOT2024
+ PUNCH , [alone] 42:2@78..79#ROOT2024
+ LITERAL Byte b 42:2@80..84#ROOT2024
+ PUNCH , [alone] 42:2@84..85#ROOT2024
LITERAL CStr null 42:2@86..93#ROOT2024"#]],
);
}
#[test]
+fn test_fn_like_macro_negative_literals() {
+ assert_expand(
+ "fn_like_clone_tokens",
+ r###"-1u16, - 2_u32, -3.14f32, - 2.7"###,
+ expect![[r#"
+ SUBTREE $$ 1 1
+ PUNCH - [alone] 1
+ LITERAL Integer 1u16 1
+ PUNCH , [alone] 1
+ PUNCH - [alone] 1
+ LITERAL Integer 2_u32 1
+ PUNCH , [alone] 1
+ PUNCH - [alone] 1
+ LITERAL Float 3.14f32 1
+ PUNCH , [alone] 1
+ PUNCH - [alone] 1
+ LITERAL Float 2.7 1
+
+
+
+ SUBTREE $$ 1 1
+ PUNCH - [alone] 1
+ LITERAL Integer 1u16 1
+ PUNCH , [alone] 1
+ PUNCH - [alone] 1
+ LITERAL Integer 2_u32 1
+ PUNCH , [alone] 1
+ PUNCH - [alone] 1
+ LITERAL Float 3.14f32 1
+ PUNCH , [alone] 1
+ PUNCH - [alone] 1
+ LITERAL Float 2.7 1"#]],
+ expect![[r#"
+ SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+ PUNCH - [alone] 42:2@0..1#ROOT2024
+ LITERAL Integer 1u16 42:2@1..5#ROOT2024
+ PUNCH , [alone] 42:2@5..6#ROOT2024
+ PUNCH - [alone] 42:2@7..8#ROOT2024
+ LITERAL Integer 2_u32 42:2@9..14#ROOT2024
+ PUNCH , [alone] 42:2@14..15#ROOT2024
+ PUNCH - [alone] 42:2@16..17#ROOT2024
+ LITERAL Float 3.14f32 42:2@17..24#ROOT2024
+ PUNCH , [alone] 42:2@24..25#ROOT2024
+ PUNCH - [alone] 42:2@26..27#ROOT2024
+ LITERAL Float 2.7 42:2@28..31#ROOT2024
+
+
+
+ SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+ PUNCH - [alone] 42:2@0..1#ROOT2024
+ LITERAL Integer 1u16 42:2@1..5#ROOT2024
+ PUNCH , [alone] 42:2@5..6#ROOT2024
+ PUNCH - [alone] 42:2@7..8#ROOT2024
+ LITERAL Integer 2_u32 42:2@9..14#ROOT2024
+ PUNCH , [alone] 42:2@14..15#ROOT2024
+ PUNCH - [alone] 42:2@16..17#ROOT2024
+ LITERAL Float 3.14f32 42:2@17..24#ROOT2024
+ PUNCH , [alone] 42:2@24..25#ROOT2024
+ PUNCH - [alone] 42:2@26..27#ROOT2024
+ LITERAL Float 2.7 42:2@28..31#ROOT2024"#]],
+ );
+}
+
+#[test]
fn test_attr_macro() {
// Corresponds to
// #[proc_macro_test::attr_error(some arguments)]
@@ -233,6 +481,15 @@
r#"some arguments"#,
expect![[r#"
SUBTREE $$ 1 1
+ IDENT mod 1
+ IDENT m 1
+ SUBTREE {} 1 1
+
+ SUBTREE $$ 1 1
+ IDENT some 1
+ IDENT arguments 1
+
+ SUBTREE $$ 1 1
IDENT compile_error 1
PUNCH ! [alone] 1
SUBTREE () 1 1
@@ -240,6 +497,15 @@
PUNCH ; [alone] 1"#]],
expect![[r#"
SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+ IDENT mod 42:2@0..3#ROOT2024
+ IDENT m 42:2@4..5#ROOT2024
+ SUBTREE {} 42:2@6..7#ROOT2024 42:2@7..8#ROOT2024
+
+ SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
+ IDENT some 42:2@0..4#ROOT2024
+ IDENT arguments 42:2@5..14#ROOT2024
+
+ SUBTREE $$ 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
IDENT compile_error 42:2@0..100#ROOT2024
PUNCH ! [alone] 42:2@0..100#ROOT2024
SUBTREE () 42:2@0..100#ROOT2024 42:2@0..100#ROOT2024
diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs
index a476a70..a0a45b2 100644
--- a/crates/proc-macro-srv/src/tests/utils.rs
+++ b/crates/proc-macro-srv/src/tests/utils.rs
@@ -32,9 +32,9 @@
macro_name: &str,
#[rust_analyzer::rust_fixture] ra_fixture: &str,
expect: Expect,
- expect_s: Expect,
+ expect_spanned: Expect,
) {
- assert_expand_impl(macro_name, ra_fixture, None, expect, expect_s);
+ assert_expand_impl(macro_name, ra_fixture, None, expect, expect_spanned);
}
pub fn assert_expand_attr(
@@ -42,9 +42,9 @@
#[rust_analyzer::rust_fixture] ra_fixture: &str,
attr_args: &str,
expect: Expect,
- expect_s: Expect,
+ expect_spanned: Expect,
) {
- assert_expand_impl(macro_name, ra_fixture, Some(attr_args), expect, expect_s);
+ assert_expand_impl(macro_name, ra_fixture, Some(attr_args), expect, expect_spanned);
}
fn assert_expand_impl(
@@ -52,7 +52,7 @@
input: &str,
attr: Option<&str>,
expect: Expect,
- expect_s: Expect,
+ expect_spanned: Expect,
) {
let path = proc_macro_test_dylib_path();
let expander = dylib::Expander::new(&path).unwrap();
@@ -60,20 +60,17 @@
let def_site = TokenId(0);
let call_site = TokenId(1);
let mixed_site = TokenId(2);
- let input_ts = parse_string(call_site, input);
+ let input_ts = parse_string(call_site, input).into_subtree(call_site);
let attr_ts = attr.map(|attr| parse_string(call_site, attr).into_subtree(call_site));
+ let input_ts_string = format!("{input_ts:?}");
+ let attr_ts_string = attr_ts.as_ref().map(|it| format!("{it:?}"));
- let res = expander
- .expand(
- macro_name,
- input_ts.into_subtree(call_site),
- attr_ts,
- def_site,
- call_site,
- mixed_site,
- )
- .unwrap();
- expect.assert_eq(&format!("{res:?}"));
+ let res =
+ expander.expand(macro_name, input_ts, attr_ts, def_site, call_site, mixed_site).unwrap();
+ expect.assert_eq(&format!(
+ "{input_ts_string}\n\n{}\n\n{res:?}",
+ attr_ts_string.unwrap_or_default()
+ ));
let def_site = Span {
range: TextRange::new(0.into(), 150.into()),
@@ -93,15 +90,17 @@
};
let mixed_site = call_site;
- let fixture = parse_string_spanned(call_site.anchor, call_site.ctx, input);
+ let fixture =
+ parse_string_spanned(call_site.anchor, call_site.ctx, input).into_subtree(call_site);
let attr = attr.map(|attr| {
parse_string_spanned(call_site.anchor, call_site.ctx, attr).into_subtree(call_site)
});
+ let fixture_string = format!("{fixture:?}");
+ let attr_string = attr.as_ref().map(|it| format!("{it:?}"));
- let res = expander
- .expand(macro_name, fixture.into_subtree(call_site), attr, def_site, call_site, mixed_site)
- .unwrap();
- expect_s.assert_eq(&format!("{res:#?}"));
+ let res = expander.expand(macro_name, fixture, attr, def_site, call_site, mixed_site).unwrap();
+ expect_spanned
+ .assert_eq(&format!("{fixture_string}\n\n{}\n\n{res:#?}", attr_string.unwrap_or_default()));
}
pub(crate) fn list() -> Vec<String> {
diff --git a/crates/project-model/src/env.rs b/crates/project-model/src/env.rs
index e7293b0..450def5 100644
--- a/crates/project-model/src/env.rs
+++ b/crates/project-model/src/env.rs
@@ -18,6 +18,7 @@
let manifest_dir = package.manifest.parent();
env.set("CARGO_MANIFEST_DIR", manifest_dir.as_str());
+ env.set("CARGO_MANIFEST_PATH", package.manifest.as_str());
env.set("CARGO_PKG_VERSION", package.version.to_string());
env.set("CARGO_PKG_VERSION_MAJOR", package.version.major.to_string());
diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs
index c7c1b04..d4055d9 100644
--- a/crates/project-model/src/sysroot.rs
+++ b/crates/project-model/src/sysroot.rs
@@ -137,7 +137,12 @@
}
let mut cmd = toolchain::command(tool.prefer_proxy(), current_dir, envs);
- cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(root));
+ if !envs.contains_key("RUSTUP_TOOLCHAIN")
+ && std::env::var_os("RUSTUP_TOOLCHAIN").is_none()
+ {
+ cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(root));
+ }
+
cmd
}
_ => toolchain::command(tool.path(), current_dir, envs),
diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
index 4ef9d816..3722e2c 100644
--- a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
+++ b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
@@ -52,6 +52,7 @@
"CARGO": "$CARGO$",
"CARGO_CRATE_NAME": "hello_world",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_MANIFEST_PATH": "$ROOT$hello-world/Cargo.toml",
"CARGO_PKG_AUTHORS": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_HOMEPAGE": "",
@@ -136,6 +137,7 @@
"CARGO": "$CARGO$",
"CARGO_CRATE_NAME": "hello_world",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_MANIFEST_PATH": "$ROOT$hello-world/Cargo.toml",
"CARGO_PKG_AUTHORS": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_HOMEPAGE": "",
@@ -220,6 +222,7 @@
"CARGO": "$CARGO$",
"CARGO_CRATE_NAME": "an_example",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_MANIFEST_PATH": "$ROOT$hello-world/Cargo.toml",
"CARGO_PKG_AUTHORS": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_HOMEPAGE": "",
@@ -304,6 +307,7 @@
"CARGO": "$CARGO$",
"CARGO_CRATE_NAME": "it",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_MANIFEST_PATH": "$ROOT$hello-world/Cargo.toml",
"CARGO_PKG_AUTHORS": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_HOMEPAGE": "",
@@ -384,6 +388,7 @@
"CARGO": "$CARGO$",
"CARGO_CRATE_NAME": "libc",
"CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ "CARGO_MANIFEST_PATH": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98/Cargo.toml",
"CARGO_PKG_AUTHORS": "The Rust Project Developers",
"CARGO_PKG_DESCRIPTION": "Raw FFI bindings to platform libraries like libc.\n",
"CARGO_PKG_HOMEPAGE": "https://github.com/rust-lang/libc",
diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
index 4ef9d816..3722e2c 100644
--- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
+++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
@@ -52,6 +52,7 @@
"CARGO": "$CARGO$",
"CARGO_CRATE_NAME": "hello_world",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_MANIFEST_PATH": "$ROOT$hello-world/Cargo.toml",
"CARGO_PKG_AUTHORS": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_HOMEPAGE": "",
@@ -136,6 +137,7 @@
"CARGO": "$CARGO$",
"CARGO_CRATE_NAME": "hello_world",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_MANIFEST_PATH": "$ROOT$hello-world/Cargo.toml",
"CARGO_PKG_AUTHORS": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_HOMEPAGE": "",
@@ -220,6 +222,7 @@
"CARGO": "$CARGO$",
"CARGO_CRATE_NAME": "an_example",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_MANIFEST_PATH": "$ROOT$hello-world/Cargo.toml",
"CARGO_PKG_AUTHORS": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_HOMEPAGE": "",
@@ -304,6 +307,7 @@
"CARGO": "$CARGO$",
"CARGO_CRATE_NAME": "it",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_MANIFEST_PATH": "$ROOT$hello-world/Cargo.toml",
"CARGO_PKG_AUTHORS": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_HOMEPAGE": "",
@@ -384,6 +388,7 @@
"CARGO": "$CARGO$",
"CARGO_CRATE_NAME": "libc",
"CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ "CARGO_MANIFEST_PATH": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98/Cargo.toml",
"CARGO_PKG_AUTHORS": "The Rust Project Developers",
"CARGO_PKG_DESCRIPTION": "Raw FFI bindings to platform libraries like libc.\n",
"CARGO_PKG_HOMEPAGE": "https://github.com/rust-lang/libc",
diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
index 52089d1..7b156ea 100644
--- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
+++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
@@ -51,6 +51,7 @@
"CARGO": "$CARGO$",
"CARGO_CRATE_NAME": "hello_world",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_MANIFEST_PATH": "$ROOT$hello-world/Cargo.toml",
"CARGO_PKG_AUTHORS": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_HOMEPAGE": "",
@@ -134,6 +135,7 @@
"CARGO": "$CARGO$",
"CARGO_CRATE_NAME": "hello_world",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_MANIFEST_PATH": "$ROOT$hello-world/Cargo.toml",
"CARGO_PKG_AUTHORS": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_HOMEPAGE": "",
@@ -217,6 +219,7 @@
"CARGO": "$CARGO$",
"CARGO_CRATE_NAME": "an_example",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_MANIFEST_PATH": "$ROOT$hello-world/Cargo.toml",
"CARGO_PKG_AUTHORS": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_HOMEPAGE": "",
@@ -300,6 +303,7 @@
"CARGO": "$CARGO$",
"CARGO_CRATE_NAME": "it",
"CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_MANIFEST_PATH": "$ROOT$hello-world/Cargo.toml",
"CARGO_PKG_AUTHORS": "",
"CARGO_PKG_DESCRIPTION": "",
"CARGO_PKG_HOMEPAGE": "",
@@ -380,6 +384,7 @@
"CARGO": "$CARGO$",
"CARGO_CRATE_NAME": "libc",
"CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ "CARGO_MANIFEST_PATH": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98/Cargo.toml",
"CARGO_PKG_AUTHORS": "The Rust Project Developers",
"CARGO_PKG_DESCRIPTION": "Raw FFI bindings to platform libraries like libc.\n",
"CARGO_PKG_HOMEPAGE": "https://github.com/rust-lang/libc",
diff --git a/crates/query-group-macro/tests/logger_db.rs b/crates/query-group-macro/tests/logger_db.rs
index bade0c2..71af63a 100644
--- a/crates/query-group-macro/tests/logger_db.rs
+++ b/crates/query-group-macro/tests/logger_db.rs
@@ -1,33 +1,41 @@
use std::sync::{Arc, Mutex};
#[salsa_macros::db]
-#[derive(Default, Clone)]
+#[derive(Clone)]
pub(crate) struct LoggerDb {
storage: salsa::Storage<Self>,
logger: Logger,
}
+impl Default for LoggerDb {
+ fn default() -> Self {
+ let logger = Logger::default();
+ Self {
+ storage: salsa::Storage::new(Some(Box::new({
+ let logger = logger.clone();
+ move |event| match event.kind {
+ salsa::EventKind::WillExecute { .. }
+ | salsa::EventKind::WillCheckCancellation
+ | salsa::EventKind::DidValidateMemoizedValue { .. }
+ | salsa::EventKind::WillDiscardStaleOutput { .. }
+ | salsa::EventKind::DidDiscard { .. } => {
+ logger.logs.lock().unwrap().push(format!("salsa_event({:?})", event.kind));
+ }
+ _ => {}
+ }
+ }))),
+ logger,
+ }
+ }
+}
+
#[derive(Default, Clone)]
struct Logger {
logs: Arc<Mutex<Vec<String>>>,
}
#[salsa_macros::db]
-impl salsa::Database for LoggerDb {
- fn salsa_event(&self, event: &dyn Fn() -> salsa::Event) {
- let event = event();
- match event.kind {
- salsa::EventKind::WillExecute { .. }
- | salsa::EventKind::WillCheckCancellation
- | salsa::EventKind::DidValidateMemoizedValue { .. }
- | salsa::EventKind::WillDiscardStaleOutput { .. }
- | salsa::EventKind::DidDiscard { .. } => {
- self.push_log(format!("salsa_event({:?})", event.kind));
- }
- _ => {}
- }
- }
-}
+impl salsa::Database for LoggerDb {}
impl LoggerDb {
/// Log an event from inside a tracked function.
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index a1e4adf..12b393b 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -4,6 +4,7 @@
use std::{
env, fmt,
ops::AddAssign,
+ panic::{AssertUnwindSafe, catch_unwind},
time::{SystemTime, UNIX_EPOCH},
};
@@ -721,6 +722,7 @@
let mut num_pats_unknown = 0;
let mut num_pats_partially_unknown = 0;
let mut num_pat_type_mismatches = 0;
+ let mut panics = 0;
for &body_id in bodies {
let name = body_id.name(db).unwrap_or_else(Name::missing);
let module = body_id.module(db);
@@ -774,7 +776,20 @@
}
bar.set_message(msg);
let body = db.body(body_id.into());
- let inference_result = db.infer(body_id.into());
+ let inference_result = catch_unwind(AssertUnwindSafe(|| db.infer(body_id.into())));
+ let inference_result = match inference_result {
+ Ok(inference_result) => inference_result,
+ Err(p) => {
+ if let Some(s) = p.downcast_ref::<&str>() {
+ eprintln!("infer panicked for {}: {}", full_name(), s);
+ } else if let Some(s) = p.downcast_ref::<String>() {
+ eprintln!("infer panicked for {}: {}", full_name(), s);
+ }
+ panics += 1;
+ bar.inc(1);
+ continue;
+ }
+ };
// This query is LRU'd, so actually calling it will skew the timing results.
let sm = || db.body_with_source_map(body_id.into()).1;
@@ -1008,6 +1023,7 @@
percentage(num_pats_partially_unknown, num_pats),
num_pat_type_mismatches
);
+ eprintln!(" panics: {panics}");
eprintln!("{:<20} {}", "Inference:", inference_time);
report_metric("unknown type", num_exprs_unknown, "#");
report_metric("type mismatches", num_expr_type_mismatches, "#");
diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs
index 57f95d1..16f3512 100644
--- a/crates/rust-analyzer/src/cli/flags.rs
+++ b/crates/rust-analyzer/src/cli/flags.rs
@@ -150,8 +150,8 @@
optional --disable-proc-macros
/// Run the proc-macro-srv binary at the specified path.
optional --proc-macro-srv path: PathBuf
- /// Run cache priming in parallel.
- optional --parallel
+ /// The number of threads to use. Defaults to the number of physical cores.
+ optional --num-threads num_threads: usize
}
cmd ssr {
@@ -299,7 +299,7 @@
pub disable_build_scripts: bool,
pub disable_proc_macros: bool,
pub proc_macro_srv: Option<PathBuf>,
- pub parallel: bool,
+ pub num_threads: Option<usize>,
}
#[derive(Debug)]
diff --git a/crates/rust-analyzer/src/cli/prime_caches.rs b/crates/rust-analyzer/src/cli/prime_caches.rs
index 46fb701..467d8a5 100644
--- a/crates/rust-analyzer/src/cli/prime_caches.rs
+++ b/crates/rust-analyzer/src/cli/prime_caches.rs
@@ -52,7 +52,7 @@
elapsed.memory.allocated.megabytes() as u64
);
- let threads = if self.parallel { num_cpus::get() } else { 1 };
+ let threads = self.num_threads.unwrap_or_else(num_cpus::get_physical);
ide_db::prime_caches::parallel_prime_caches(&db, threads, &|_| ());
let elapsed = stop_watch.elapsed();
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 03e5b1f..d1ca8c1 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -149,6 +149,8 @@
hover_memoryLayout_niches: Option<bool> = Some(false),
/// How to render the offset information in a memory layout hover.
hover_memoryLayout_offset: Option<MemoryLayoutHoverRenderKindDef> = Some(MemoryLayoutHoverRenderKindDef::Hexadecimal),
+ /// How to render the padding information in a memory layout hover.
+ hover_memoryLayout_padding: Option<MemoryLayoutHoverRenderKindDef> = None,
/// How to render the size information in a memory layout hover.
hover_memoryLayout_size: Option<MemoryLayoutHoverRenderKindDef> = Some(MemoryLayoutHoverRenderKindDef::Both),
@@ -544,7 +546,7 @@
/// Whether to prefer import paths containing a `prelude` module.
imports_preferPrelude: bool = false,
/// The path structure for newly inserted paths to use.
- imports_prefix: ImportPrefixDef = ImportPrefixDef::Plain,
+ imports_prefix: ImportPrefixDef = ImportPrefixDef::ByCrate,
/// Whether to prefix external (including std, core) crate imports with `::`. e.g. "use ::std::io::Read;".
imports_prefixExternPrelude: bool = false,
}
@@ -1635,6 +1637,7 @@
size: self.hover_memoryLayout_size().map(mem_kind),
offset: self.hover_memoryLayout_offset().map(mem_kind),
alignment: self.hover_memoryLayout_alignment().map(mem_kind),
+ padding: self.hover_memoryLayout_padding().map(mem_kind),
niches: self.hover_memoryLayout_niches().unwrap_or_default(),
}),
documentation: self.hover_documentation_enable().to_owned(),
diff --git a/crates/rust-analyzer/src/flycheck.rs b/crates/rust-analyzer/src/flycheck.rs
index fc31243..0e41824 100644
--- a/crates/rust-analyzer/src/flycheck.rs
+++ b/crates/rust-analyzer/src/flycheck.rs
@@ -470,7 +470,11 @@
let mut cmd =
toolchain::command(Tool::Cargo.path(), &*self.root, &options.extra_env);
if let Some(sysroot_root) = &self.sysroot_root {
- cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(sysroot_root));
+ if !options.extra_env.contains_key("RUSTUP_TOOLCHAIN")
+ && std::env::var_os("RUSTUP_TOOLCHAIN").is_none()
+ {
+ cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(sysroot_root));
+ }
}
cmd.arg(command);
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index 3b3b9c8..a870232 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -3,7 +3,11 @@
//!
//! Each tick provides an immutable snapshot of the state as `WorldSnapshot`.
-use std::{ops::Not as _, time::Instant};
+use std::{
+ ops::Not as _,
+ panic::AssertUnwindSafe,
+ time::{Duration, Instant},
+};
use crossbeam_channel::{Receiver, Sender, unbounded};
use hir::ChangeWithProcMacros;
@@ -19,6 +23,7 @@
use proc_macro_api::ProcMacroClient;
use project_model::{ManifestPath, ProjectWorkspace, ProjectWorkspaceKind, WorkspaceBuildScripts};
use rustc_hash::{FxHashMap, FxHashSet};
+use stdx::thread;
use tracing::{Level, span, trace};
use triomphe::Arc;
use vfs::{AbsPathBuf, AnchoredPathBuf, ChangeKind, Vfs, VfsPath};
@@ -40,6 +45,7 @@
test_runner::{CargoTestHandle, CargoTestMessage},
};
+#[derive(Debug)]
pub(crate) struct FetchWorkspaceRequest {
pub(crate) path: Option<AbsPathBuf>,
pub(crate) force_crate_graph_reload: bool,
@@ -78,6 +84,7 @@
pub(crate) task_pool: Handle<TaskPool<Task>, Receiver<Task>>,
pub(crate) fmt_pool: Handle<TaskPool<Task>, Receiver<Task>>,
+ pub(crate) cancellation_pool: thread::Pool,
pub(crate) config: Arc<Config>,
pub(crate) config_errors: Option<ConfigErrors>,
@@ -114,6 +121,11 @@
pub(crate) discover_sender: Sender<discover::DiscoverProjectMessage>,
pub(crate) discover_receiver: Receiver<discover::DiscoverProjectMessage>,
+ // Debouncing channel for fetching the workspace
+ // we want to delay it until the VFS looks stable-ish (and thus is not currently in the middle
+ // of a VCS operation like `git switch`)
+ pub(crate) fetch_ws_receiver: Option<(Receiver<Instant>, FetchWorkspaceRequest)>,
+
// VFS
pub(crate) loader: Handle<Box<dyn vfs::loader::Handle>, Receiver<vfs::loader::Message>>,
pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
@@ -210,6 +222,7 @@
let handle = TaskPool::new_with_threads(sender, 1);
Handle { handle, receiver }
};
+ let cancellation_pool = thread::Pool::new(1);
let task_queue = {
let (sender, receiver) = unbounded();
@@ -230,6 +243,7 @@
req_queue: ReqQueue::default(),
task_pool,
fmt_pool,
+ cancellation_pool,
loader,
config: Arc::new(config.clone()),
analysis_host,
@@ -264,6 +278,8 @@
discover_sender,
discover_receiver,
+ fetch_ws_receiver: None,
+
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), Default::default()))),
vfs_config_version: 0,
vfs_progress_config_version: 0,
@@ -290,7 +306,6 @@
pub(crate) fn process_changes(&mut self) -> bool {
let _p = span!(Level::INFO, "GlobalState::process_changes").entered();
-
// We cannot directly resolve a change in a ratoml file to a format
// that can be used by the config module because config talks
// in `SourceRootId`s instead of `FileId`s and `FileId` -> `SourceRootId`
@@ -298,66 +313,75 @@
let mut modified_ratoml_files: FxHashMap<FileId, (ChangeKind, vfs::VfsPath)> =
FxHashMap::default();
- let (change, modified_rust_files, workspace_structure_change) = {
- let mut change = ChangeWithProcMacros::default();
- let mut guard = self.vfs.write();
- let changed_files = guard.0.take_changes();
- if changed_files.is_empty() {
- return false;
- }
+ let mut change = ChangeWithProcMacros::default();
+ let mut guard = self.vfs.write();
+ let changed_files = guard.0.take_changes();
+ if changed_files.is_empty() {
+ return false;
+ }
- // downgrade to read lock to allow more readers while we are normalizing text
- let guard = RwLockWriteGuard::downgrade_to_upgradable(guard);
- let vfs: &Vfs = &guard.0;
+ let (change, modified_rust_files, workspace_structure_change) =
+ self.cancellation_pool.scoped(|s| {
+ // start cancellation in parallel, this will kick off lru eviction
+ // allowing us to do meaningful work while waiting
+ let analysis_host = AssertUnwindSafe(&mut self.analysis_host);
+ s.spawn(thread::ThreadIntent::LatencySensitive, || {
+ { analysis_host }.0.request_cancellation()
+ });
- let mut workspace_structure_change = None;
- // A file was added or deleted
- let mut has_structure_changes = false;
- let mut bytes = vec![];
- let mut modified_rust_files = vec![];
- for file in changed_files.into_values() {
- let vfs_path = vfs.file_path(file.file_id);
- if let Some(("rust-analyzer", Some("toml"))) = vfs_path.name_and_extension() {
- // Remember ids to use them after `apply_changes`
- modified_ratoml_files.insert(file.file_id, (file.kind(), vfs_path.clone()));
- }
+ // downgrade to read lock to allow more readers while we are normalizing text
+ let guard = RwLockWriteGuard::downgrade_to_upgradable(guard);
+ let vfs: &Vfs = &guard.0;
- if let Some(path) = vfs_path.as_path() {
- has_structure_changes |= file.is_created_or_deleted();
-
- if file.is_modified() && path.extension() == Some("rs") {
- modified_rust_files.push(file.file_id);
+ let mut workspace_structure_change = None;
+ // A file was added or deleted
+ let mut has_structure_changes = false;
+ let mut bytes = vec![];
+ let mut modified_rust_files = vec![];
+ for file in changed_files.into_values() {
+ let vfs_path = vfs.file_path(file.file_id);
+ if let Some(("rust-analyzer", Some("toml"))) = vfs_path.name_and_extension() {
+ // Remember ids to use them after `apply_changes`
+ modified_ratoml_files.insert(file.file_id, (file.kind(), vfs_path.clone()));
}
- let additional_files = self
- .config
- .discover_workspace_config()
- .map(|cfg| {
- cfg.files_to_watch.iter().map(String::as_str).collect::<Vec<&str>>()
- })
- .unwrap_or_default();
+ if let Some(path) = vfs_path.as_path() {
+ has_structure_changes |= file.is_created_or_deleted();
- let path = path.to_path_buf();
- if file.is_created_or_deleted() {
- workspace_structure_change.get_or_insert((path, false)).1 |=
- self.crate_graph_file_dependencies.contains(vfs_path);
- } else if reload::should_refresh_for_change(
- &path,
- file.kind(),
- &additional_files,
- ) {
- trace!(?path, kind = ?file.kind(), "refreshing for a change");
- workspace_structure_change.get_or_insert((path.clone(), false));
+ if file.is_modified() && path.extension() == Some("rs") {
+ modified_rust_files.push(file.file_id);
+ }
+
+ let additional_files = self
+ .config
+ .discover_workspace_config()
+ .map(|cfg| {
+ cfg.files_to_watch.iter().map(String::as_str).collect::<Vec<&str>>()
+ })
+ .unwrap_or_default();
+
+ let path = path.to_path_buf();
+ if file.is_created_or_deleted() {
+ workspace_structure_change.get_or_insert((path, false)).1 |=
+ self.crate_graph_file_dependencies.contains(vfs_path);
+ } else if reload::should_refresh_for_change(
+ &path,
+ file.kind(),
+ &additional_files,
+ ) {
+ trace!(?path, kind = ?file.kind(), "refreshing for a change");
+ workspace_structure_change.get_or_insert((path.clone(), false));
+ }
}
- }
- // Clear native diagnostics when their file gets deleted
- if !file.exists() {
- self.diagnostics.clear_native_for(file.file_id);
- }
+ // Clear native diagnostics when their file gets deleted
+ if !file.exists() {
+ self.diagnostics.clear_native_for(file.file_id);
+ }
- let text =
- if let vfs::Change::Create(v, _) | vfs::Change::Modify(v, _) = file.change {
+ let text = if let vfs::Change::Create(v, _) | vfs::Change::Modify(v, _) =
+ file.change
+ {
String::from_utf8(v).ok().map(|text| {
// FIXME: Consider doing normalization in the `vfs` instead? That allows
// getting rid of some locking
@@ -367,29 +391,28 @@
} else {
None
};
- // delay `line_endings_map` changes until we are done normalizing the text
- // this allows delaying the re-acquisition of the write lock
- bytes.push((file.file_id, text));
- }
- let (vfs, line_endings_map) = &mut *RwLockUpgradableReadGuard::upgrade(guard);
- bytes.into_iter().for_each(|(file_id, text)| {
- let text = match text {
- None => None,
- Some((text, line_endings)) => {
- line_endings_map.insert(file_id, line_endings);
- Some(text)
- }
- };
- change.change_file(file_id, text);
+ // delay `line_endings_map` changes until we are done normalizing the text
+ // this allows delaying the re-acquisition of the write lock
+ bytes.push((file.file_id, text));
+ }
+ let (vfs, line_endings_map) = &mut *RwLockUpgradableReadGuard::upgrade(guard);
+ bytes.into_iter().for_each(|(file_id, text)| {
+ let text = match text {
+ None => None,
+ Some((text, line_endings)) => {
+ line_endings_map.insert(file_id, line_endings);
+ Some(text)
+ }
+ };
+ change.change_file(file_id, text);
+ });
+ if has_structure_changes {
+ let roots = self.source_root_config.partition(vfs);
+ change.set_roots(roots);
+ }
+ (change, modified_rust_files, workspace_structure_change)
});
- if has_structure_changes {
- let roots = self.source_root_config.partition(vfs);
- change.set_roots(roots);
- }
- (change, modified_rust_files, workspace_structure_change)
- };
- let _p = span!(Level::INFO, "GlobalState::process_changes/apply_change").entered();
self.analysis_host.apply_change(change);
if !modified_ratoml_files.is_empty()
|| !self.config.same_source_root_parent_map(&self.local_roots_parent_map)
@@ -508,11 +531,7 @@
if let Some((path, force_crate_graph_reload)) = workspace_structure_change {
let _p = span!(Level::INFO, "GlobalState::process_changes/ws_structure_change")
.entered();
-
- self.fetch_workspaces_queue.request_op(
- format!("workspace vfs file change: {path}"),
- FetchWorkspaceRequest { path: Some(path), force_crate_graph_reload },
- );
+ self.enqueue_workspace_fetch(path, force_crate_graph_reload);
}
}
@@ -660,6 +679,30 @@
None
})
}
+
+ fn enqueue_workspace_fetch(&mut self, path: AbsPathBuf, force_crate_graph_reload: bool) {
+ let already_requested = self.fetch_workspaces_queue.op_requested()
+ && !self.fetch_workspaces_queue.op_in_progress();
+ if self.fetch_ws_receiver.is_none() && already_requested {
+ // Don't queue up a new fetch request if we already have done so
+ // Otherwise we will re-fetch in quick succession which is unnecessary
+ // Note though, that if one is already in progress, we *want* to re-queue
+ // as the in-progress fetch might not have the latest changes in it anymore
+ // FIXME: We should cancel the in-progress fetch here
+ return;
+ }
+
+ self.fetch_ws_receiver = Some((
+ crossbeam_channel::after(Duration::from_millis(100)),
+ FetchWorkspaceRequest { path: Some(path), force_crate_graph_reload },
+ ));
+ }
+
+ pub(crate) fn debounce_workspace_fetch(&mut self) {
+ if let Some((fetch_receiver, _)) = &mut self.fetch_ws_receiver {
+ *fetch_receiver = crossbeam_channel::after(Duration::from_millis(100));
+ }
+ }
}
impl Drop for GlobalState {
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs
index 49ebffa..84b7888 100644
--- a/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -147,7 +147,7 @@
let _it = stdx::timeit("change");
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
let completion_offset =
- patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
+ patch(&mut text, "db.struct_signature(self.id)", "sel;\ndb.struct_signature(self.id)")
+ "sel".len();
let mut change = ChangeWithProcMacros::default();
change.change_file(file_id, Some(text));
@@ -197,9 +197,11 @@
let completion_offset = {
let _it = stdx::timeit("change");
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
- let completion_offset =
- patch(&mut text, "sel;\ndb.struct_data(self.id)", ";sel;\ndb.struct_data(self.id)")
- + ";sel".len();
+ let completion_offset = patch(
+ &mut text,
+ "sel;\ndb.struct_signature(self.id)",
+ ";sel;\ndb.struct_signature(self.id)",
+ ) + ";sel".len();
let mut change = ChangeWithProcMacros::default();
change.change_file(file_id, Some(text));
host.apply_change(change);
@@ -247,9 +249,11 @@
let completion_offset = {
let _it = stdx::timeit("change");
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
- let completion_offset =
- patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)")
- + "self.".len();
+ let completion_offset = patch(
+ &mut text,
+ "sel;\ndb.struct_signature(self.id)",
+ "self.;\ndb.struct_signature(self.id)",
+ ) + "self.".len();
let mut change = ChangeWithProcMacros::default();
change.change_file(file_id, Some(text));
host.apply_change(change);
@@ -366,7 +370,7 @@
{
let _it = stdx::timeit("change");
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
- patch(&mut text, "db.struct_data(self.id)", "();\ndb.struct_data(self.id)");
+ patch(&mut text, "db.struct_signature(self.id)", "();\ndb.struct_signature(self.id)");
let mut change = ChangeWithProcMacros::default();
change.change_file(file_id, Some(text));
host.apply_change(change);
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index bd213ff..0c0438c 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -8,7 +8,7 @@
time::{Duration, Instant},
};
-use crossbeam_channel::{Receiver, select};
+use crossbeam_channel::{Receiver, never, select};
use ide_db::base_db::{SourceDatabase, VfsPath, salsa::Database as _};
use lsp_server::{Connection, Notification, Request};
use lsp_types::{TextDocumentIdentifier, notification::Notification as _};
@@ -71,6 +71,7 @@
Flycheck(FlycheckMessage),
TestResult(CargoTestMessage),
DiscoverProject(DiscoverProjectMessage),
+ FetchWorkspaces(FetchWorkspaceRequest),
}
impl fmt::Display for Event {
@@ -83,6 +84,7 @@
Event::QueuedTask(_) => write!(f, "Event::QueuedTask"),
Event::TestResult(_) => write!(f, "Event::TestResult"),
Event::DiscoverProject(_) => write!(f, "Event::DiscoverProject"),
+ Event::FetchWorkspaces(_) => write!(f, "Event::SwitchWorkspaces"),
}
}
}
@@ -150,6 +152,7 @@
}
_ => (),
}
+
match self {
Event::Lsp(it) => fmt::Debug::fmt(it, f),
Event::Task(it) => fmt::Debug::fmt(it, f),
@@ -158,6 +161,7 @@
Event::Flycheck(it) => fmt::Debug::fmt(it, f),
Event::TestResult(it) => fmt::Debug::fmt(it, f),
Event::DiscoverProject(it) => fmt::Debug::fmt(it, f),
+ Event::FetchWorkspaces(it) => fmt::Debug::fmt(it, f),
}
}
}
@@ -251,7 +255,7 @@
}
fn next_event(
- &self,
+ &mut self,
inbox: &Receiver<lsp_server::Message>,
) -> Result<Option<Event>, crossbeam_channel::RecvError> {
// Make sure we reply to formatting requests ASAP so the editor doesn't block
@@ -283,6 +287,10 @@
recv(self.discover_receiver) -> task =>
task.map(Event::DiscoverProject),
+
+ recv(self.fetch_ws_receiver.as_ref().map_or(&never(), |(chan, _)| chan)) -> _instant => {
+ Ok(Event::FetchWorkspaces(self.fetch_ws_receiver.take().unwrap().1))
+ },
}
.map(Some)
}
@@ -412,6 +420,9 @@
self.handle_discover_msg(message);
}
}
+ Event::FetchWorkspaces(req) => {
+ self.fetch_workspaces_queue.request_op("project structure change".to_owned(), req)
+ }
}
let event_handling_duration = loop_start.elapsed();
let (state_changed, memdocs_added_or_removed) = if self.vfs_done {
@@ -830,6 +841,7 @@
match message {
vfs::loader::Message::Changed { files } | vfs::loader::Message::Loaded { files } => {
let _p = tracing::info_span!("GlobalState::handle_vfs_msg{changed/load}").entered();
+ self.debounce_workspace_fetch();
let vfs = &mut self.vfs.write().0;
for (path, contents) in files {
let path = VfsPath::from(path);
diff --git a/crates/rust-analyzer/src/op_queue.rs b/crates/rust-analyzer/src/op_queue.rs
index 709d99b..7af5b48 100644
--- a/crates/rust-analyzer/src/op_queue.rs
+++ b/crates/rust-analyzer/src/op_queue.rs
@@ -36,7 +36,7 @@
}
}
-impl<Args, Output> OpQueue<Args, Output> {
+impl<Args: std::fmt::Debug, Output> OpQueue<Args, Output> {
/// Request an operation to start.
pub(crate) fn request_op(&mut self, reason: Cause, args: Args) {
self.op_requested = Some((reason, args));
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 55ed192..ae9e3e9 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -69,6 +69,7 @@
/// are ready to do semantic work.
pub(crate) fn is_quiescent(&self) -> bool {
self.vfs_done
+ && self.fetch_ws_receiver.is_none()
&& !self.fetch_workspaces_queue.op_in_progress()
&& !self.fetch_build_data_queue.op_in_progress()
&& !self.fetch_proc_macros_queue.op_in_progress()
@@ -659,6 +660,10 @@
.chain(
ws.sysroot
.root()
+ .filter(|_| {
+ !self.config.extra_env(None).contains_key("RUSTUP_TOOLCHAIN")
+ && std::env::var_os("RUSTUP_TOOLCHAIN").is_none()
+ })
.map(|it| ("RUSTUP_TOOLCHAIN".to_owned(), Some(it.to_string()))),
)
.collect(),
diff --git a/crates/stdx/Cargo.toml b/crates/stdx/Cargo.toml
index 7bda106..b37aded 100644
--- a/crates/stdx/Cargo.toml
+++ b/crates/stdx/Cargo.toml
@@ -17,6 +17,7 @@
crossbeam-channel.workspace = true
itertools.workspace = true
tracing.workspace = true
+crossbeam-utils = "0.8.21"
# Think twice before adding anything here
[target.'cfg(unix)'.dependencies]
diff --git a/crates/stdx/src/thread/pool.rs b/crates/stdx/src/thread/pool.rs
index a8de4db..8d76c5f 100644
--- a/crates/stdx/src/thread/pool.rs
+++ b/crates/stdx/src/thread/pool.rs
@@ -8,6 +8,7 @@
//! the threading utilities in [`crate::thread`].
use std::{
+ marker::PhantomData,
panic::{self, UnwindSafe},
sync::{
Arc,
@@ -16,8 +17,9 @@
};
use crossbeam_channel::{Receiver, Sender};
+use crossbeam_utils::sync::WaitGroup;
-use super::{Builder, JoinHandle, ThreadIntent};
+use crate::thread::{Builder, JoinHandle, ThreadIntent};
pub struct Pool {
// `_handles` is never read: the field is present
@@ -79,9 +81,6 @@
Self { _handles: handles.into_boxed_slice(), extant_tasks, job_sender }
}
- /// # Panics
- ///
- /// Panics if job panics
pub fn spawn<F>(&self, intent: ThreadIntent, f: F)
where
F: FnOnce() + Send + UnwindSafe + 'static,
@@ -97,6 +96,17 @@
self.job_sender.send(job).unwrap();
}
+ pub fn scoped<'pool, 'scope, F, R>(&'pool self, f: F) -> R
+ where
+ F: FnOnce(&Scope<'pool, 'scope>) -> R,
+ {
+ let wg = WaitGroup::new();
+ let scope = Scope { pool: self, wg, _marker: PhantomData };
+ let r = f(&scope);
+ scope.wg.wait();
+ r
+ }
+
#[must_use]
pub fn len(&self) -> usize {
self.extant_tasks.load(Ordering::SeqCst)
@@ -107,3 +117,36 @@
self.len() == 0
}
}
+
+pub struct Scope<'pool, 'scope> {
+ pool: &'pool Pool,
+ wg: WaitGroup,
+ _marker: PhantomData<fn(&'scope ()) -> &'scope ()>,
+}
+
+impl<'scope> Scope<'_, 'scope> {
+ pub fn spawn<F>(&self, intent: ThreadIntent, f: F)
+ where
+ F: 'scope + FnOnce() + Send + UnwindSafe,
+ {
+ let wg = self.wg.clone();
+ let f = Box::new(move || {
+ if cfg!(debug_assertions) {
+ intent.assert_is_used_on_current_thread();
+ }
+ f();
+ drop(wg);
+ });
+
+ let job = Job {
+ requested_intent: intent,
+ f: unsafe {
+ std::mem::transmute::<
+ Box<dyn 'scope + FnOnce() + Send + UnwindSafe>,
+ Box<dyn 'static + FnOnce() + Send + UnwindSafe>,
+ >(f)
+ },
+ };
+ self.pool.job_sender.send(job).unwrap();
+ }
+}
diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs
index da0bfd4..e60243f 100644
--- a/crates/syntax/src/ast/edit_in_place.rs
+++ b/crates/syntax/src/ast/edit_in_place.rs
@@ -109,6 +109,67 @@
}
}
+impl GenericParamsOwnerEdit for ast::TraitAlias {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = if let Some(name) = self.name() {
+ Position::after(name.syntax)
+ } else if let Some(trait_token) = self.trait_token() {
+ Position::after(trait_token)
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let position = match self.semicolon_token() {
+ Some(tok) => Position::before(tok),
+ None => Position::last_child_of(self.syntax()),
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+impl GenericParamsOwnerEdit for ast::TypeAlias {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = if let Some(name) = self.name() {
+ Position::after(name.syntax)
+ } else if let Some(trait_token) = self.type_token() {
+ Position::after(trait_token)
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let position = match self.eq_token() {
+ Some(tok) => Position::before(tok),
+ None => match self.semicolon_token() {
+ Some(tok) => Position::before(tok),
+ None => Position::last_child_of(self.syntax()),
+ },
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
impl GenericParamsOwnerEdit for ast::Struct {
fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
match self.generic_param_list() {
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index 596f73e..fab4cb2 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -13,6 +13,7 @@
mod quote;
+use either::Either;
use itertools::Itertools;
use parser::{Edition, T};
use rowan::NodeOrToken;
@@ -881,7 +882,7 @@
}
pub fn where_pred(
- path: ast::Type,
+ path: Either<ast::Lifetime, ast::Type>,
bounds: impl IntoIterator<Item = ast::TypeBound>,
) -> ast::WherePred {
let bounds = bounds.into_iter().join(" + ");
diff --git a/crates/syntax/src/ast/syntax_factory/constructors.rs b/crates/syntax/src/ast/syntax_factory/constructors.rs
index 8dee396..429e51b 100644
--- a/crates/syntax/src/ast/syntax_factory/constructors.rs
+++ b/crates/syntax/src/ast/syntax_factory/constructors.rs
@@ -585,6 +585,18 @@
ast
}
+ pub fn expr_underscore(&self) -> ast::UnderscoreExpr {
+ let ast::Expr::UnderscoreExpr(ast) = make::ext::expr_underscore().clone_for_update() else {
+ unreachable!()
+ };
+
+ if let Some(mut mapping) = self.mappings() {
+ SyntaxMappingBuilder::new(ast.syntax().clone()).finish(&mut mapping);
+ }
+
+ ast
+ }
+
pub fn expr_if(
&self,
condition: ast::Expr,
diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs
index f6ca5ab..96e1301 100644
--- a/crates/test-fixture/src/lib.rs
+++ b/crates/test-fixture/src/lib.rs
@@ -662,10 +662,6 @@
) -> Result<TopSubtree, ProcMacroExpansionError> {
Ok(subtree.clone())
}
-
- fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
- other.as_any().type_id() == std::any::TypeId::of::<Self>()
- }
}
// Expands to a macro_rules! macro, for issue #18089.
@@ -697,10 +693,6 @@
#subtree
})
}
-
- fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
- other.as_any().type_id() == std::any::TypeId::of::<Self>()
- }
}
// Pastes the attribute input as its output
@@ -721,10 +713,6 @@
.cloned()
.ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into()))
}
-
- fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
- other.as_any().type_id() == std::any::TypeId::of::<Self>()
- }
}
#[derive(Debug)]
@@ -756,10 +744,6 @@
top_subtree_delimiter_mut.close = def_site;
Ok(result)
}
-
- fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
- other.as_any().type_id() == std::any::TypeId::of::<Self>()
- }
}
#[derive(Debug)]
@@ -791,10 +775,6 @@
traverse(&mut builder, input.iter());
Ok(builder.build())
}
-
- fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
- other.as_any().type_id() == std::any::TypeId::of::<Self>()
- }
}
// Replaces every literal with an empty string literal and every identifier with its first letter,
@@ -835,10 +815,6 @@
}
}
}
-
- fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
- other.as_any().type_id() == std::any::TypeId::of::<Self>()
- }
}
// Reads ident type within string quotes, for issue #17479.
@@ -864,10 +840,6 @@
#symbol()
})
}
-
- fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
- other.as_any().type_id() == std::any::TypeId::of::<Self>()
- }
}
// Reads ident type within string quotes, for issue #17479.
@@ -919,10 +891,6 @@
}
})
}
-
- fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
- other.as_any().type_id() == std::any::TypeId::of::<Self>()
- }
}
// Reads ident type within string quotes, for issue #17479.
@@ -950,8 +918,4 @@
}
Ok(subtree.clone())
}
-
- fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool {
- other.as_any().type_id() == std::any::TypeId::of::<Self>()
- }
}
diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs
index 1dbc07c..14574a6 100644
--- a/crates/tt/src/lib.rs
+++ b/crates/tt/src/lib.rs
@@ -817,6 +817,58 @@
}
}
+impl<S> Literal<S> {
+ pub fn display_no_minus(&self) -> impl fmt::Display {
+ struct NoMinus<'a, S>(&'a Literal<S>);
+ impl<S> fmt::Display for NoMinus<'_, S> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let symbol =
+ self.0.symbol.as_str().strip_prefix('-').unwrap_or(self.0.symbol.as_str());
+ match self.0.kind {
+ LitKind::Byte => write!(f, "b'{symbol}'"),
+ LitKind::Char => write!(f, "'{symbol}'"),
+ LitKind::Integer | LitKind::Float | LitKind::Err(_) => write!(f, "{symbol}"),
+ LitKind::Str => write!(f, "\"{symbol}\""),
+ LitKind::ByteStr => write!(f, "b\"{symbol}\""),
+ LitKind::CStr => write!(f, "c\"{symbol}\""),
+ LitKind::StrRaw(num_of_hashes) => {
+ let num_of_hashes = num_of_hashes as usize;
+ write!(
+ f,
+ r#"r{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
+ "",
+ text = symbol
+ )
+ }
+ LitKind::ByteStrRaw(num_of_hashes) => {
+ let num_of_hashes = num_of_hashes as usize;
+ write!(
+ f,
+ r#"br{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
+ "",
+ text = symbol
+ )
+ }
+ LitKind::CStrRaw(num_of_hashes) => {
+ let num_of_hashes = num_of_hashes as usize;
+ write!(
+ f,
+ r#"cr{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
+ "",
+ text = symbol
+ )
+ }
+ }?;
+ if let Some(suffix) = &self.0.suffix {
+ write!(f, "{suffix}")?;
+ }
+ Ok(())
+ }
+ }
+ NoMinus(self)
+ }
+}
+
impl<S> fmt::Display for Literal<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.kind {
diff --git a/docs/book/src/configuration_generated.md b/docs/book/src/configuration_generated.md
index 2ae73df..0e07dad 100644
--- a/docs/book/src/configuration_generated.md
+++ b/docs/book/src/configuration_generated.md
@@ -763,6 +763,13 @@
How to render the offset information in a memory layout hover.
+## rust-analyzer.hover.memoryLayout.padding {#hover.memoryLayout.padding}
+
+Default: `null`
+
+How to render the padding information in a memory layout hover.
+
+
## rust-analyzer.hover.memoryLayout.size {#hover.memoryLayout.size}
Default: `"both"`
@@ -835,7 +842,7 @@
## rust-analyzer.imports.prefix {#imports.prefix}
-Default: `"plain"`
+Default: `"crate"`
The path structure for newly inserted paths to use.
diff --git a/docs/book/src/other_editors.md b/docs/book/src/other_editors.md
index 1eac7dd..896df52 100644
--- a/docs/book/src/other_editors.md
+++ b/docs/book/src/other_editors.md
@@ -364,30 +364,6 @@
There are multiple rust-analyzer extensions for Visual Studio 2022 on
Windows:
-### rust-analyzer.vs
-
-(License: Creative Commons Attribution-NonCommercial-ShareAlike 4.0
-International)
-
-[Visual Studio
-Marketplace](https://marketplace.visualstudio.com/items?itemName=kitamstudios.RustAnalyzer)
-
-[GitHub](https://github.com/kitamstudios/rust-analyzer/)
-
-Support for Rust development in the Visual Studio IDE is enabled by the
-[rust-analyzer](https://marketplace.visualstudio.com/items?itemName=kitamstudios.RustAnalyzer)
-package. Either click on the download link or install from IDE’s
-extension manager. For now [Visual Studio
-2022](https://visualstudio.microsoft.com/downloads/) is required. All
-editions are supported viz. Community, Professional & Enterprise. The
-package aims to provide 0-friction installation and therefore comes
-loaded with most things required including rust-analyzer binary. If
-anything it needs is missing, appropriate errors / warnings will guide
-the user. E.g. cargo.exe needs to be in path and the package will tell
-you as much. This package is under rapid active development. So if you
-encounter any issues please file it at
-[rust-analyzer.vs](https://github.com/kitamstudios/rust-analyzer/).
-
### VS RustAnalyzer
(License: GPL)
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json
index 11a37c2..18fb097 100644
--- a/editors/code/package-lock.json
+++ b/editors/code/package-lock.json
@@ -5730,9 +5730,9 @@
"license": "MIT"
},
"node_modules/undici": {
- "version": "6.21.1",
- "resolved": "https://registry.npmjs.org/undici/-/undici-6.21.1.tgz",
- "integrity": "sha512-q/1rj5D0/zayJB2FraXdaWxbhWiNKDvu8naDT2dl1yTlvJp4BLtOcp2a5BvgGNQpYYJzau7tf1WgKv3b+7mqpQ==",
+ "version": "6.21.3",
+ "resolved": "https://registry.npmjs.org/undici/-/undici-6.21.3.tgz",
+ "integrity": "sha512-gBLkYIlEnSp8pFbT64yFgGE6UIB9tAkhukC23PmMDCe5Nd+cRqKxSjw5y54MK2AZMgZfJWMaNE4nYUHgi1XEOw==",
"dev": true,
"license": "MIT",
"engines": {
diff --git a/editors/code/package.json b/editors/code/package.json
index a282eea..c8c36cd 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -1782,6 +1782,33 @@
{
"title": "hover",
"properties": {
+ "rust-analyzer.hover.memoryLayout.padding": {
+ "markdownDescription": "How to render the padding information in a memory layout hover.",
+ "default": null,
+ "anyOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "string",
+ "enum": [
+ "both",
+ "decimal",
+ "hexadecimal"
+ ],
+ "enumDescriptions": [
+ "Render as 12 (0xC)",
+ "Render as 12",
+ "Render as 0xC"
+ ]
+ }
+ ]
+ }
+ }
+ },
+ {
+ "title": "hover",
+ "properties": {
"rust-analyzer.hover.memoryLayout.size": {
"markdownDescription": "How to render the size information in a memory layout hover.",
"default": "both",
@@ -1927,7 +1954,7 @@
"properties": {
"rust-analyzer.imports.prefix": {
"markdownDescription": "The path structure for newly inserted paths to use.",
- "default": "plain",
+ "default": "crate",
"type": "string",
"enum": [
"plain",
diff --git a/rust-version b/rust-version
index 90e4d65..5b47d1b 100644
--- a/rust-version
+++ b/rust-version
@@ -1 +1 @@
-6e23095adf9209614a45f7f75fea36dad7b92afb
+a8e4c68dcb4dc1e48a0db294c5323cab0227fcb9
diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml
index 6195de5..bb7d83c 100644
--- a/xtask/Cargo.toml
+++ b/xtask/Cargo.toml
@@ -14,7 +14,7 @@
xshell.workspace = true
xflags = "0.3.2"
time = { version = "0.3", default-features = false }
-zip = { version = "2.4", default-features = false, features = ["deflate-flate2", "flate2", "time"] }
+zip = { version = "3.0", default-features = false, features = ["deflate-flate2", "time"] }
stdx.workspace = true
proc-macro2 = "1.0.94"
quote = "1.0.40"