Merge pull request #19432 from ShoyuVanilla/issue-19431
fix: Yet another false positive invalid cast diagnostic
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 645b596..fa08fc2 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -15,7 +15,6 @@
CARGO_NET_RETRY: 10
CI: 1
RUST_BACKTRACE: short
- RUSTFLAGS: "-D warnings"
RUSTUP_MAX_RETRIES: 10
jobs:
@@ -25,7 +24,6 @@
pull-requests: read
outputs:
typescript: ${{ steps.filter.outputs.typescript }}
- proc_macros: ${{ steps.filter.outputs.proc_macros }}
steps:
- uses: actions/checkout@v4
- uses: dorny/paths-filter@1441771bbfdd59dcd748680ee64ebd8faab1a242
@@ -34,17 +32,14 @@
filters: |
typescript:
- 'editors/code/**'
- proc_macros:
- - 'crates/tt/**'
- - 'crates/proc-macro-api/**'
- - 'crates/proc-macro-srv/**'
- - 'crates/proc-macro-srv-cli/**'
proc-macro-srv:
- needs: changes
- if: github.repository == 'rust-lang/rust-analyzer' && needs.changes.outputs.proc_macros == 'true'
+ if: github.repository == 'rust-lang/rust-analyzer'
name: proc-macro-srv
runs-on: ubuntu-latest
+ env:
+ RUSTFLAGS: "-D warnings"
+
steps:
- name: Checkout repository
uses: actions/checkout@v4
@@ -62,17 +57,18 @@
# We don't cache this job, as it will be invalidated every day due to nightly usage
- - name: Bump opt-level
- run: sed -i '/\[profile.dev]/a opt-level=1' Cargo.toml
-
- name: Test
run: cargo test --features sysroot-abi -p proc-macro-srv -p proc-macro-srv-cli -p proc-macro-api -- --quiet
+ - name: Check salsa dependency
+ run: "! (cargo tree -p proc-macro-srv-cli | grep -q salsa)"
+
rust:
if: github.repository == 'rust-lang/rust-analyzer'
name: Rust
runs-on: ${{ matrix.os }}
env:
+ RUSTFLAGS: "-Dwarnings"
CC: deny_c
strategy:
@@ -89,7 +85,7 @@
run: |
rustup update --no-self-update stable
rustup default stable
- rustup component add --toolchain stable rust-src
+ rustup component add --toolchain stable rust-src clippy
# We always use a nightly rustfmt, regardless of channel, because we need
# --file-lines.
rustup toolchain install nightly --profile minimal --component rustfmt
@@ -98,69 +94,34 @@
if: matrix.os == 'ubuntu-latest'
run: echo "::add-matcher::.github/rust.json"
- - name: Cache Dependencies
- uses: Swatinem/rust-cache@27b8ea9368cf428f0bfe41b0876b1a7e809d9844
- with:
- workspaces: |
- . -> target
- crates/proc-macro-srv/proc-macro-test/imp -> target
+ # - name: Cache Dependencies
+ # uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
+ # with:
+ # workspaces: |
+ # . -> target
+ # ./crates/proc-macro-srv/proc-macro-test/imp -> target
- uses: taiki-e/install-action@nextest
- - name: Bump opt-level
- if: matrix.os == 'ubuntu-latest'
- run: sed -i '/\[profile.dev]/a opt-level=1' Cargo.toml
-
- name: Codegen checks (rust-analyzer)
+ if: matrix.os == 'ubuntu-latest'
run: cargo codegen --check
- name: Compile (tests)
- run: cargo test --no-run --locked
-
- # It's faster to `test` before `build` ¯\_(ツ)_/¯
- - name: Compile (rust-analyzer)
- if: matrix.os == 'ubuntu-latest'
- run: cargo build --quiet
+ run: cargo test --no-run
- name: Test
- if: matrix.os == 'ubuntu-latest' || matrix.os == 'windows-latest' || github.event_name == 'push'
run: cargo nextest run --no-fail-fast --hide-progress-bar --status-level fail
- - name: Switch to stable toolchain
- run: |
- rustup update --no-self-update stable
- rustup component add --toolchain stable rust-src clippy
- rustup default stable
-
- - name: Run analysis-stats on rust-analyzer
- if: matrix.os == 'ubuntu-latest'
- run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats .
-
- - name: Run analysis-stats on the rust standard libraries
- if: matrix.os == 'ubuntu-latest'
- env:
- RUSTC_BOOTSTRAP: 1
- run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats --with-deps --no-sysroot --no-test $(rustc --print sysroot)/lib/rustlib/src/rust/library/
-
- name: clippy
- if: matrix.os == 'windows-latest'
+ if: matrix.os == 'macos-latest'
run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr
- - name: rustfmt
- if: matrix.os == 'ubuntu-latest'
- run: cargo fmt -- --check
-
- # Weird targets to catch non-portable code
- rust-cross:
+ analysis-stats:
if: github.repository == 'rust-lang/rust-analyzer'
- name: Rust Cross
runs-on: ubuntu-latest
-
env:
- targets: "powerpc-unknown-linux-gnu x86_64-unknown-linux-musl"
- # The rust-analyzer binary is not expected to compile on WASM, but the IDE
- # crate should
- targets_ide: "wasm32-unknown-unknown"
+ RUSTC_BOOTSTRAP: 1
steps:
- name: Checkout repository
@@ -169,19 +130,91 @@
- name: Install Rust toolchain
run: |
rustup update --no-self-update stable
- rustup target add ${{ env.targets }} ${{ env.targets_ide }}
+ rustup default stable
+ rustup component add rustfmt
- - name: Cache Dependencies
- uses: Swatinem/rust-cache@9bdad043e88c75890e36ad3bbc8d27f0090dd609
+ # - name: Cache Dependencies
+ # uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
- - name: Check
+ - name: Bump opt-level
+ run: sed -i '/\[profile.dev]/a opt-level=1' Cargo.toml
+
+ - run: cargo build -p rust-analyzer
+
+ - name: ./rust-analyzer
+ run: ./target/debug/rust-analyzer analysis-stats . -q
+
+ - name: sysroot/lib/rustlib/src/rust/library/
+ run: ./target/debug/rust-analyzer analysis-stats --with-deps --no-sysroot --no-test $(rustc --print sysroot)/lib/rustlib/src/rust/library/ -q
+
+ rustfmt:
+ if: github.repository == 'rust-lang/rust-analyzer'
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Install Rust toolchain
run: |
- for target in ${{ env.targets }}; do
- cargo check --target=$target --all-targets
- done
- for target in ${{ env.targets_ide }}; do
- cargo check -p ide --target=$target --all-targets
- done
+ rustup update --no-self-update stable
+ rustup default stable
+ rustup component add rustfmt
+
+ - run: cargo fmt -- --check
+
+ miri:
+ if: github.repository == 'rust-lang/rust-analyzer'
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Install Rust toolchain
+ run: |
+ rustup update --no-self-update nightly
+ rustup default nightly
+ rustup component add miri
+
+ # - name: Cache Dependencies
+ # uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
+
+ - run: cargo miri test -p intern
+
+ # Weird targets to catch non-portable code
+ rust-cross:
+ if: github.repository == 'rust-lang/rust-analyzer'
+ name: Rust Cross
+ runs-on: ubuntu-latest
+
+ strategy:
+ matrix:
+ target: [powerpc-unknown-linux-gnu, x86_64-unknown-linux-musl, wasm32-unknown-unknown]
+ include:
+ # The rust-analyzer binary is not expected to compile on WASM, but the IDE
+ # crate should
+ - target: wasm32-unknown-unknown
+ ide-only: true
+ env:
+ RUSTFLAGS: "-Dwarnings"
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Install Rust toolchain
+ run: |
+ rustup update --no-self-update stable
+ rustup target add ${{ matrix.target }}
+
+ # - name: Cache Dependencies
+ # uses: Swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
+
+ - run: cargo check --target=${{ matrix.target }} --all-targets -p ide
+ if: ${{ matrix.ide-only }}
+ - run: cargo check --target=${{ matrix.target }} --all-targets
+ if: ${{ !matrix.ide-only }}
typescript:
needs: changes
@@ -263,7 +296,7 @@
run: typos
conclusion:
- needs: [rust, rust-cross, typescript, typo-check, proc-macro-srv]
+ needs: [rust, rust-cross, typescript, typo-check, proc-macro-srv, miri, rustfmt, analysis-stats]
# We need to ensure this job does *not* get skipped if its dependencies fail,
# because a skipped job is considered a success by GitHub. So we have to
# overwrite `if:`. We use `!cancelled()` to ensure the job does still not get run
diff --git a/Cargo.lock b/Cargo.lock
index 4310cfc..0d509f5 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -43,6 +43,9 @@
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223"
+dependencies = [
+ "derive_arbitrary",
+]
[[package]]
name = "arrayvec"
@@ -66,7 +69,7 @@
"cfg-if",
"libc",
"miniz_oxide",
- "object 0.36.7",
+ "object",
"rustc-demangle",
"windows-targets 0.52.6",
]
@@ -127,12 +130,6 @@
]
[[package]]
-name = "byteorder"
-version = "1.5.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
-
-[[package]]
name = "camino"
version = "1.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -152,16 +149,16 @@
[[package]]
name = "cargo_metadata"
-version = "0.18.1"
+version = "0.19.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2d886547e41f740c616ae73108f6eb70afe6d940c7bc697cb30f13daec073037"
+checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba"
dependencies = [
"camino",
"cargo-platform",
"semver",
"serde",
"serde_json",
- "thiserror",
+ "thiserror 2.0.12",
]
[[package]]
@@ -246,7 +243,7 @@
"chalk-ir",
"ena",
"indexmap",
- "itertools",
+ "itertools 0.12.1",
"petgraph",
"rustc-hash 1.1.0",
"tracing",
@@ -355,9 +352,9 @@
[[package]]
name = "deranged"
-version = "0.3.11"
+version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4"
+checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e"
dependencies = [
"powerfmt",
]
@@ -375,32 +372,32 @@
[[package]]
name = "directories"
-version = "5.0.1"
+version = "6.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35"
+checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d"
dependencies = [
"dirs-sys",
]
[[package]]
name = "dirs"
-version = "5.0.1"
+version = "6.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225"
+checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e"
dependencies = [
"dirs-sys",
]
[[package]]
name = "dirs-sys"
-version = "0.4.1"
+version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
+checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab"
dependencies = [
"libc",
"option-ext",
"redox_users",
- "windows-sys 0.48.0",
+ "windows-sys 0.59.0",
]
[[package]]
@@ -607,7 +604,7 @@
"hir-ty",
"indexmap",
"intern",
- "itertools",
+ "itertools 0.14.0",
"rustc-hash 2.1.1",
"smallvec",
"span",
@@ -637,7 +634,7 @@
"hir-expand",
"indexmap",
"intern",
- "itertools",
+ "itertools 0.14.0",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"mbe",
"query-group-macro",
@@ -654,6 +651,7 @@
"test-fixture",
"test-utils",
"text-size",
+ "thin-vec",
"tracing",
"triomphe",
"tt",
@@ -669,7 +667,7 @@
"either",
"expect-test",
"intern",
- "itertools",
+ "itertools 0.14.0",
"mbe",
"parser",
"query-group-macro",
@@ -704,7 +702,7 @@
"hir-expand",
"indexmap",
"intern",
- "itertools",
+ "itertools 0.14.0",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"oorandom",
"project-model",
@@ -872,7 +870,7 @@
"ide-db",
"ide-diagnostics",
"ide-ssr",
- "itertools",
+ "itertools 0.14.0",
"nohash-hasher",
"oorandom",
"profile",
@@ -900,7 +898,7 @@
"expect-test",
"hir",
"ide-db",
- "itertools",
+ "itertools 0.14.0",
"smallvec",
"stdx",
"syntax",
@@ -918,7 +916,7 @@
"expect-test",
"hir",
"ide-db",
- "itertools",
+ "itertools 0.14.0",
"smallvec",
"stdx",
"syntax",
@@ -941,7 +939,7 @@
"fst",
"hir",
"indexmap",
- "itertools",
+ "itertools 0.14.0",
"line-index 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr",
"nohash-hasher",
@@ -971,7 +969,7 @@
"expect-test",
"hir",
"ide-db",
- "itertools",
+ "itertools 0.14.0",
"paths",
"serde_json",
"stdx",
@@ -989,7 +987,7 @@
"expect-test",
"hir",
"ide-db",
- "itertools",
+ "itertools 0.14.0",
"parser",
"syntax",
"test-fixture",
@@ -1020,9 +1018,9 @@
[[package]]
name = "indexmap"
-version = "2.7.1"
+version = "2.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652"
+checksum = "3954d50fe15b02142bf25d3b8bdadb634ec3948f103d04ffe3031bc8fe9d7058"
dependencies = [
"equivalent",
"hashbrown 0.15.2",
@@ -1069,6 +1067,15 @@
]
[[package]]
+name = "itertools"
+version = "0.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285"
+dependencies = [
+ "either",
+]
+
+[[package]]
name = "itoa"
version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1076,9 +1083,9 @@
[[package]]
name = "jod-thread"
-version = "0.1.2"
+version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8b23360e99b8717f20aaa4598f5a6541efbe30630039fbc7706cf954a87947ae"
+checksum = "a037eddb7d28de1d0fc42411f501b53b75838d313908078d6698d064f3029b24"
[[package]]
name = "kqueue"
@@ -1118,9 +1125,9 @@
[[package]]
name = "libc"
-version = "0.2.170"
+version = "0.2.171"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "875b3680cb2f8f71bdcf9a30f38d48282f5d3c95cbf9b3fa57269bb5d5c06828"
+checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6"
[[package]]
name = "libloading"
@@ -1134,9 +1141,9 @@
[[package]]
name = "libmimalloc-sys"
-version = "0.1.39"
+version = "0.1.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44"
+checksum = "07d0e07885d6a754b9c7993f2625187ad694ee985d60f23355ff0e7077261502"
dependencies = [
"cc",
"libc",
@@ -1187,7 +1194,7 @@
"hir-expand",
"ide-db",
"intern",
- "itertools",
+ "itertools 0.14.0",
"proc-macro-api",
"project-model",
"span",
@@ -1302,9 +1309,9 @@
[[package]]
name = "memmap2"
-version = "0.5.10"
+version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327"
+checksum = "fd3f7eed9d3848f8b98834af67102b720745c4ec028fcd0aa0239277e7de374f"
dependencies = [
"libc",
]
@@ -1320,9 +1327,9 @@
[[package]]
name = "mimalloc"
-version = "0.1.43"
+version = "0.1.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "68914350ae34959d83f732418d51e2427a794055d0b9529f48259ac07af65633"
+checksum = "99585191385958383e13f6b822e6b6d8d9cf928e7d286ceb092da92b43c87bc1"
dependencies = [
"libmimalloc-sys",
]
@@ -1446,15 +1453,6 @@
[[package]]
name = "object"
-version = "0.33.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d8dd6c0cdf9429bce006e1362bfce61fa1bfd8c898a643ed8d2b471934701d3d"
-dependencies = [
- "memchr",
-]
-
-[[package]]
-name = "object"
version = "0.36.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87"
@@ -1464,9 +1462,9 @@
[[package]]
name = "once_cell"
-version = "1.21.0"
+version = "1.21.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cde51589ab56b20a6f686b2c68f7a0bd6add753d697abf720d63f8db3ab7b1ad"
+checksum = "d75b0bedcc4fe52caa0e03d9f1151a323e4aa5e2d78ba3580400cd3c9e2bc4bc"
[[package]]
name = "oorandom"
@@ -1613,7 +1611,7 @@
"libc",
"libloading",
"memmap2",
- "object 0.33.0",
+ "object",
"paths",
"proc-macro-test",
"ra-ap-rustc_lexer",
@@ -1680,7 +1678,7 @@
"cfg",
"expect-test",
"intern",
- "itertools",
+ "itertools 0.14.0",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"paths",
"rustc-hash 2.1.1",
@@ -1703,7 +1701,7 @@
dependencies = [
"once_cell",
"protobuf-support",
- "thiserror",
+ "thiserror 1.0.69",
]
[[package]]
@@ -1712,7 +1710,7 @@
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b088fd20b938a875ea00843b6faf48579462630015c3788d397ad6a786663252"
dependencies = [
- "thiserror",
+ "thiserror 1.0.69",
]
[[package]]
@@ -1748,9 +1746,9 @@
[[package]]
name = "quote"
-version = "1.0.39"
+version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1f1914ce909e1658d9907913b4b91947430c7d9be598b15a1912935b8c04801"
+checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
dependencies = [
"proc-macro2",
]
@@ -1862,13 +1860,13 @@
[[package]]
name = "redox_users"
-version = "0.4.6"
+version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43"
+checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b"
dependencies = [
"getrandom",
"libredox",
- "thiserror",
+ "thiserror 2.0.12",
]
[[package]]
@@ -1949,7 +1947,7 @@
"ide-ssr",
"indexmap",
"intern",
- "itertools",
+ "itertools 0.14.0",
"load-cargo",
"lsp-server 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)",
"lsp-types",
@@ -2229,7 +2227,7 @@
dependencies = [
"backtrace",
"crossbeam-channel",
- "itertools",
+ "itertools 0.14.0",
"jod-thread",
"libc",
"miow",
@@ -2265,7 +2263,7 @@
dependencies = [
"either",
"expect-test",
- "itertools",
+ "itertools 0.14.0",
"parser",
"ra-ap-rustc_lexer",
"rayon",
@@ -2307,6 +2305,7 @@
"cfg",
"hir-expand",
"intern",
+ "paths",
"rustc-hash 2.1.1",
"span",
"stdx",
@@ -2334,12 +2333,27 @@
checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233"
[[package]]
+name = "thin-vec"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "144f754d318415ac792f9d69fc87abbbfc043ce2ef041c60f16ad828f638717d"
+
+[[package]]
name = "thiserror"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
dependencies = [
- "thiserror-impl",
+ "thiserror-impl 1.0.69",
+]
+
+[[package]]
+name = "thiserror"
+version = "2.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708"
+dependencies = [
+ "thiserror-impl 2.0.12",
]
[[package]]
@@ -2354,6 +2368,17 @@
]
[[package]]
+name = "thiserror-impl"
+version = "2.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
name = "thread_local"
version = "1.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2396,9 +2421,9 @@
[[package]]
name = "time"
-version = "0.3.39"
+version = "0.3.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dad298b01a40a23aac4580b67e3dbedb7cc8402f3592d7f49469de2ea4aecdd8"
+checksum = "9d9c75b47bdff86fa3334a3db91356b8d7d86a9b839dab7d0bdc5c3d3a077618"
dependencies = [
"deranged",
"itoa",
@@ -2413,15 +2438,15 @@
[[package]]
name = "time-core"
-version = "0.1.3"
+version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "765c97a5b985b7c11d7bc27fa927dc4fe6af3a6dfb021d28deb60d3bf51e76ef"
+checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c"
[[package]]
name = "time-macros"
-version = "0.2.20"
+version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e8093bc3e81c3bc5f7879de09619d06c9a5a5e45ca44dfeeb7225bae38005c5c"
+checksum = "29aa485584182073ed57fd5004aa09c371f021325014694e432313345865fd04"
dependencies = [
"num-conv",
"time-core",
@@ -2543,9 +2568,9 @@
[[package]]
name = "tracing-tree"
-version = "0.3.1"
+version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b56c62d2c80033cb36fae448730a2f2ef99410fe3ecbffc916681a32f6807dbe"
+checksum = "f459ca79f1b0d5f71c54ddfde6debfc59c8b6eeb46808ae492077f739dc7b49c"
dependencies = [
"nu-ansi-term 0.50.1",
"tracing-core",
@@ -3123,7 +3148,7 @@
"edition",
"either",
"flate2",
- "itertools",
+ "itertools 0.14.0",
"proc-macro2",
"quote",
"stdx",
@@ -3204,13 +3229,17 @@
[[package]]
name = "zip"
-version = "0.6.6"
+version = "2.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261"
+checksum = "fabe6324e908f85a1c52063ce7aa26b68dcb7eb6dbc83a2d148403c9bc3eba50"
dependencies = [
- "byteorder",
+ "arbitrary",
"crc32fast",
"crossbeam-utils",
+ "displaydoc",
"flate2",
+ "indexmap",
+ "memchr",
+ "thiserror 2.0.12",
"time",
]
diff --git a/Cargo.toml b/Cargo.toml
index 03ecc8f..d9c57b4 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -85,7 +85,6 @@
vfs = { path = "./crates/vfs", version = "0.0.0" }
edition = { path = "./crates/edition", version = "0.0.0" }
-ra-ap-rustc_hashes = { version = "0.100", default-features = false }
ra-ap-rustc_lexer = { version = "0.100", default-features = false }
ra-ap-rustc_parse_format = { version = "0.100", default-features = false }
ra-ap-rustc_index = { version = "0.100", default-features = false }
@@ -97,60 +96,57 @@
# in-tree crates that are published separately and follow semver. See lib/README.md
line-index = { version = "0.1.2" }
la-arena = { version = "0.3.1" }
-lsp-server = { version = "0.7.6" }
+lsp-server = { version = "0.7.8" }
# non-local crates
-anyhow = "1.0.75"
-arrayvec = "0.7.4"
-bitflags = "2.4.1"
-cargo_metadata = "0.18.1"
-camino = "1.1.6"
+anyhow = "1.0.97"
+arrayvec = "0.7.6"
+bitflags = "2.9.0"
+cargo_metadata = "0.19.2"
+camino = "1.1.9"
chalk-solve = { version = "0.100.0", default-features = false }
chalk-ir = "0.100.0"
chalk-recursive = { version = "0.100.0", default-features = false }
chalk-derive = "0.100.0"
-crossbeam-channel = "0.5.8"
-dissimilar = "1.0.7"
+crossbeam-channel = "0.5.14"
+dissimilar = "1.0.10"
dot = "0.1.4"
-either = "1.9.0"
-expect-test = "1.4.0"
-hashbrown = { version = "0.14", features = [
- "inline-more",
-], default-features = false }
-indexmap = { version = "2.1.0", features = ["serde"] }
-itertools = "0.12.0"
-libc = "0.2.150"
-libloading = "0.8.0"
-memmap2 = "0.5.4"
+either = "1.15.0"
+expect-test = "1.5.1"
+indexmap = { version = "2.8.0", features = ["serde"] }
+itertools = "0.14.0"
+libc = "0.2.171"
+libloading = "0.8.6"
+memmap2 = "0.9.5"
nohash-hasher = "0.2.0"
-oorandom = "11.1.3"
-object = { version = "0.33.0", default-features = false, features = [
+oorandom = "11.1.5"
+object = { version = "0.36.7", default-features = false, features = [
"std",
"read_core",
"elf",
"macho",
"pe",
] }
-process-wrap = { version = "8.0.2", features = ["std"] }
+process-wrap = { version = "8.2.0", features = ["std"] }
pulldown-cmark-to-cmark = "10.0.4"
-pulldown-cmark = { version = "0.9.0", default-features = false }
-rayon = "1.8.0"
+pulldown-cmark = { version = "0.9.6", default-features = false }
+rayon = "1.10.0"
salsa = "0.19"
-rustc-hash = "2.0.0"
-semver = "1.0.14"
-serde = { version = "1.0.192" }
-serde_derive = { version = "1.0.192" }
-serde_json = "1.0.108"
-smallvec = { version = "1.10.0", features = [
+rustc-hash = "2.1.1"
+semver = "1.0.26"
+serde = { version = "1.0.219" }
+serde_derive = { version = "1.0.219" }
+serde_json = "1.0.140"
+smallvec = { version = "1.14.0", features = [
"const_new",
"union",
"const_generics",
] }
smol_str = "0.3.2"
text-size = "1.1.1"
-tracing = "0.1.40"
-tracing-tree = "0.3.0"
-tracing-subscriber = { version = "0.3.18", default-features = false, features = [
+tracing = "0.1.41"
+tracing-tree = "0.4.0"
+tracing-subscriber = { version = "0.3.19", default-features = false, features = [
"registry",
"fmt",
"local-time",
@@ -159,12 +155,16 @@
"tracing-log",
] }
triomphe = { version = "0.1.14", default-features = false, features = ["std"] }
-url = "2.3.1"
-xshell = "0.2.5"
+url = "2.5.4"
+xshell = "0.2.7"
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
dashmap = { version = "=5.5.3", features = ["raw-api"] }
+# We need to freeze the version of the crate, as it needs to match with dashmap
+hashbrown = { version = "=0.14.5", features = [
+ "inline-more",
+], default-features = false }
[workspace.lints.rust]
# remember to update RUSTFLAGS in ci.yml if you add something here
diff --git a/crates/base-db/src/change.rs b/crates/base-db/src/change.rs
index b5964ff..da2fb27 100644
--- a/crates/base-db/src/change.rs
+++ b/crates/base-db/src/change.rs
@@ -34,10 +34,6 @@
}
impl FileChange {
- pub fn new() -> Self {
- FileChange::default()
- }
-
pub fn set_roots(&mut self, roots: Vec<SourceRoot>) {
self.roots = Some(roots);
}
@@ -55,7 +51,7 @@
if let Some(roots) = self.roots {
for (idx, root) in roots.into_iter().enumerate() {
let root_id = SourceRootId(idx as u32);
- let durability = durability(&root);
+ let durability = source_root_durability(&root);
for file_id in root.iter() {
db.set_file_source_root_with_durability(file_id, root_id, durability);
}
@@ -68,7 +64,7 @@
let source_root_id = db.file_source_root(file_id);
let source_root = db.source_root(source_root_id.source_root_id(db));
- let durability = durability(&source_root.source_root(db));
+ let durability = file_text_durability(&source_root.source_root(db));
// XXX: can't actually remove the file, just reset the text
let text = text.unwrap_or_default();
db.set_file_text_with_durability(file_id, &text, durability)
@@ -81,6 +77,10 @@
}
}
-fn durability(source_root: &SourceRoot) -> Durability {
+fn source_root_durability(source_root: &SourceRoot) -> Durability {
+ if source_root.is_library { Durability::MEDIUM } else { Durability::LOW }
+}
+
+fn file_text_durability(source_root: &SourceRoot) -> Durability {
if source_root.is_library { Durability::HIGH } else { Durability::LOW }
}
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index 2fe4f68..cfc22b7 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -303,9 +303,11 @@
pub dependencies: Vec<Dependency<Id>>,
pub origin: CrateOrigin,
pub is_proc_macro: bool,
- /// The working directory to run proc-macros in. This is the workspace root of the cargo workspace
- /// for workspace members, the crate manifest dir otherwise.
- pub proc_macro_cwd: Option<AbsPathBuf>,
+ /// The working directory to run proc-macros in invoked in the context of this crate.
+ /// This is the workspace root of the cargo workspace for workspace members, the crate manifest
+ /// dir otherwise.
+ // FIXME: This ought to be a `VfsPath` or something opaque.
+ pub proc_macro_cwd: Arc<AbsPathBuf>,
}
pub type CrateDataBuilder = CrateData<CrateBuilderId>;
@@ -425,7 +427,7 @@
mut env: Env,
origin: CrateOrigin,
is_proc_macro: bool,
- proc_macro_cwd: Option<AbsPathBuf>,
+ proc_macro_cwd: Arc<AbsPathBuf>,
ws_data: Arc<CrateWorkspaceData>,
) -> CrateBuilderId {
env.entries.shrink_to_fit();
@@ -491,7 +493,7 @@
if **old_all_crates != *all_crates {
db.set_all_crates_with_durability(
Arc::new(all_crates.into_boxed_slice()),
- Durability::HIGH,
+ Durability::MEDIUM,
);
}
@@ -549,30 +551,30 @@
Entry::Occupied(entry) => {
let old_crate = *entry.get();
if crate_data != *old_crate.data(db) {
- old_crate.set_data(db).with_durability(Durability::HIGH).to(crate_data);
+ old_crate.set_data(db).with_durability(Durability::MEDIUM).to(crate_data);
}
if krate.extra != *old_crate.extra_data(db) {
old_crate
.set_extra_data(db)
- .with_durability(Durability::HIGH)
+ .with_durability(Durability::MEDIUM)
.to(krate.extra.clone());
}
if krate.cfg_options != *old_crate.cfg_options(db) {
old_crate
.set_cfg_options(db)
- .with_durability(Durability::HIGH)
+ .with_durability(Durability::MEDIUM)
.to(krate.cfg_options.clone());
}
if krate.env != *old_crate.env(db) {
old_crate
.set_env(db)
- .with_durability(Durability::HIGH)
+ .with_durability(Durability::MEDIUM)
.to(krate.env.clone());
}
if krate.ws_data != *old_crate.workspace_data(db) {
old_crate
.set_workspace_data(db)
- .with_durability(Durability::HIGH)
+ .with_durability(Durability::MEDIUM)
.to(krate.ws_data.clone());
}
old_crate
@@ -585,7 +587,7 @@
krate.cfg_options.clone(),
krate.env.clone(),
)
- .durability(Durability::HIGH)
+ .durability(Durability::MEDIUM)
.new(db);
entry.insert(input);
input
@@ -861,6 +863,7 @@
#[cfg(test)]
mod tests {
use triomphe::Arc;
+ use vfs::AbsPathBuf;
use crate::{CrateWorkspaceData, DependencyBuilder};
@@ -883,7 +886,7 @@
Env::default(),
CrateOrigin::Local { repo: None, name: None },
false,
- None,
+ Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
);
let crate2 = graph.add_crate_root(
@@ -896,7 +899,7 @@
Env::default(),
CrateOrigin::Local { repo: None, name: None },
false,
- None,
+ Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
);
let crate3 = graph.add_crate_root(
@@ -909,7 +912,7 @@
Env::default(),
CrateOrigin::Local { repo: None, name: None },
false,
- None,
+ Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
);
assert!(
@@ -942,7 +945,7 @@
Env::default(),
CrateOrigin::Local { repo: None, name: None },
false,
- None,
+ Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
);
let crate2 = graph.add_crate_root(
@@ -955,7 +958,7 @@
Env::default(),
CrateOrigin::Local { repo: None, name: None },
false,
- None,
+ Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
);
assert!(
@@ -983,7 +986,7 @@
Env::default(),
CrateOrigin::Local { repo: None, name: None },
false,
- None,
+ Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
);
let crate2 = graph.add_crate_root(
@@ -996,7 +999,7 @@
Env::default(),
CrateOrigin::Local { repo: None, name: None },
false,
- None,
+ Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
);
let crate3 = graph.add_crate_root(
@@ -1009,7 +1012,7 @@
Env::default(),
CrateOrigin::Local { repo: None, name: None },
false,
- None,
+ Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
);
assert!(
@@ -1037,7 +1040,7 @@
Env::default(),
CrateOrigin::Local { repo: None, name: None },
false,
- None,
+ Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
);
let crate2 = graph.add_crate_root(
@@ -1050,7 +1053,7 @@
Env::default(),
CrateOrigin::Local { repo: None, name: None },
false,
- None,
+ Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap())),
empty_ws_data(),
);
assert!(
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index e6059e9..9fbeace 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -64,8 +64,7 @@
}
pub fn set_file_text(&self, db: &mut dyn SourceDatabase, file_id: vfs::FileId, text: &str) {
- let files = Arc::clone(&self.files);
- match files.entry(file_id) {
+ match self.files.entry(file_id) {
Entry::Occupied(mut occupied) => {
occupied.get_mut().set_text(db).to(Arc::from(text));
}
@@ -83,8 +82,7 @@
text: &str,
durability: Durability,
) {
- let files = Arc::clone(&self.files);
- match files.entry(file_id) {
+ match self.files.entry(file_id) {
Entry::Occupied(mut occupied) => {
occupied.get_mut().set_text(db).with_durability(durability).to(Arc::from(text));
}
@@ -113,8 +111,7 @@
source_root: Arc<SourceRoot>,
durability: Durability,
) {
- let source_roots = Arc::clone(&self.source_roots);
- match source_roots.entry(source_root_id) {
+ match self.source_roots.entry(source_root_id) {
Entry::Occupied(mut occupied) => {
occupied.get_mut().set_source_root(db).with_durability(durability).to(source_root);
}
@@ -141,9 +138,7 @@
source_root_id: SourceRootId,
durability: Durability,
) {
- let file_source_roots = Arc::clone(&self.file_source_roots);
- // let db = self;
- match file_source_roots.entry(id) {
+ match self.file_source_roots.entry(id) {
Entry::Occupied(mut occupied) => {
occupied
.get_mut()
@@ -203,7 +198,8 @@
fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
/// Returns the set of errors obtained from parsing the file including validation errors.
- fn parse_errors(&self, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>>;
+ #[salsa::transparent]
+ fn parse_errors(&self, file_id: EditionedFileId) -> Option<&[SyntaxError]>;
#[salsa::transparent]
fn toolchain_channel(&self, krate: Crate) -> Option<ReleaseChannel>;
@@ -307,6 +303,19 @@
pub toolchain: Option<Version>,
}
+impl CrateWorkspaceData {
+ pub fn is_atleast_187(&self) -> bool {
+ const VERSION_187: Version = Version {
+ major: 1,
+ minor: 87,
+ patch: 0,
+ pre: Prerelease::EMPTY,
+ build: BuildMetadata::EMPTY,
+ };
+ self.toolchain.as_ref().map_or(false, |v| *v >= VERSION_187)
+ }
+}
+
fn toolchain_channel(db: &dyn RootQueryDb, krate: Crate) -> Option<ReleaseChannel> {
krate.workspace_data(db).toolchain.as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre))
}
@@ -318,12 +327,16 @@
ast::SourceFile::parse(&text, edition)
}
-fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>> {
- let errors = db.parse(file_id).errors();
- match &*errors {
- [] => None,
- [..] => Some(errors.into()),
+fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<&[SyntaxError]> {
+ #[salsa::tracked(return_ref)]
+ fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<Box<[SyntaxError]>> {
+ let errors = db.parse(file_id).errors();
+ match &*errors {
+ [] => None,
+ [..] => Some(errors.into()),
+ }
}
+ parse_errors(db, file_id).as_ref().map(|it| &**it)
}
fn source_root_crates(db: &dyn RootQueryDb, id: SourceRootId) -> Arc<[Crate]> {
diff --git a/crates/cfg/Cargo.toml b/crates/cfg/Cargo.toml
index e887368..d7764a1 100644
--- a/crates/cfg/Cargo.toml
+++ b/crates/cfg/Cargo.toml
@@ -20,13 +20,13 @@
intern.workspace = true
[dev-dependencies]
-expect-test = "1.4.1"
-oorandom = "11.1.3"
+expect-test = "1.5.1"
+oorandom = "11.1.5"
# We depend on both individually instead of using `features = ["derive"]` to microoptimize the
# build graph: if the feature was enabled, syn would be built early on in the graph if `smolstr`
# supports `arbitrary`. This way, we avoid feature unification.
-arbitrary = "1.3.2"
-derive_arbitrary = "1.3.2"
+arbitrary = "1.4.1"
+derive_arbitrary = "1.4.1"
# local deps
syntax-bridge.workspace = true
diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml
index 628a78c..f97597f 100644
--- a/crates/hir-def/Cargo.toml
+++ b/crates/hir-def/Cargo.toml
@@ -14,7 +14,7 @@
[dependencies]
arrayvec.workspace = true
bitflags.workspace = true
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
drop_bomb = "0.1.5"
either.workspace = true
fst = { version = "0.4.7", default-features = false }
@@ -25,7 +25,7 @@
tracing.workspace = true
smallvec.workspace = true
triomphe.workspace = true
-rustc_apfloat = "0.2.0"
+rustc_apfloat = "0.2.2"
text-size.workspace = true
salsa.workspace = true
query-group.workspace = true
@@ -43,6 +43,7 @@
cfg.workspace = true
tt.workspace = true
span.workspace = true
+thin-vec = "0.2.14"
[dev-dependencies]
expect-test.workspace = true
diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs
index b251564..e1fe13f 100644
--- a/crates/hir-def/src/data.rs
+++ b/crates/hir-def/src/data.rs
@@ -12,7 +12,7 @@
ConstId, ExternCrateId, FunctionId, HasModule, ImplId, ItemContainerId, ItemLoc, Lookup,
Macro2Id, MacroRulesId, ProcMacroId, StaticId, TraitAliasId, TraitId, TypeAliasId,
db::DefDatabase,
- item_tree::{self, FnFlags, ModItem},
+ item_tree::{self, FnFlags, ModItem, StaticFlags},
nameres::proc_macro::{ProcMacroKind, parse_macro_name_and_helper_attrs},
path::ImportAlias,
type_ref::{TraitRef, TypeBound, TypeRefId, TypesMap},
@@ -27,9 +27,8 @@
pub visibility: RawVisibility,
pub abi: Option<Symbol>,
pub legacy_const_generics_indices: Option<Box<Box<[u32]>>>,
- pub rustc_allow_incoherent_impl: bool,
pub types_map: Arc<TypesMap>,
- flags: FnFlags,
+ pub flags: FnFlags,
}
impl FunctionData {
@@ -72,7 +71,9 @@
}
let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
- let rustc_allow_incoherent_impl = attrs.by_key(&sym::rustc_allow_incoherent_impl).exists();
+ if attrs.by_key(&sym::rustc_allow_incoherent_impl).exists() {
+ flags |= FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL;
+ }
if flags.contains(FnFlags::HAS_UNSAFE_KW)
&& attrs.by_key(&sym::rustc_deprecated_safe_2024).exists()
{
@@ -101,51 +102,65 @@
legacy_const_generics_indices: attrs.rustc_legacy_const_generics(),
types_map: func.types_map.clone(),
flags,
- rustc_allow_incoherent_impl,
})
}
+ #[inline]
pub fn has_body(&self) -> bool {
self.flags.contains(FnFlags::HAS_BODY)
}
/// True if the first param is `self`. This is relevant to decide whether this
/// can be called as a method.
+ #[inline]
pub fn has_self_param(&self) -> bool {
self.flags.contains(FnFlags::HAS_SELF_PARAM)
}
+ #[inline]
pub fn is_default(&self) -> bool {
self.flags.contains(FnFlags::HAS_DEFAULT_KW)
}
+ #[inline]
pub fn is_const(&self) -> bool {
self.flags.contains(FnFlags::HAS_CONST_KW)
}
+ #[inline]
pub fn is_async(&self) -> bool {
self.flags.contains(FnFlags::HAS_ASYNC_KW)
}
+ #[inline]
pub fn is_unsafe(&self) -> bool {
self.flags.contains(FnFlags::HAS_UNSAFE_KW)
}
+ #[inline]
pub fn is_deprecated_safe_2024(&self) -> bool {
self.flags.contains(FnFlags::DEPRECATED_SAFE_2024)
}
+ #[inline]
pub fn is_safe(&self) -> bool {
self.flags.contains(FnFlags::HAS_SAFE_KW)
}
+ #[inline]
pub fn is_varargs(&self) -> bool {
self.flags.contains(FnFlags::IS_VARARGS)
}
+ #[inline]
pub fn has_target_feature(&self) -> bool {
self.flags.contains(FnFlags::HAS_TARGET_FEATURE)
}
+
+ #[inline]
+ pub fn rustc_allow_incoherent_impl(&self) -> bool {
+ self.flags.contains(FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL)
+ }
}
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -153,15 +168,37 @@
pub name: Name,
pub type_ref: Option<TypeRefId>,
pub visibility: RawVisibility,
- pub is_extern: bool,
- pub rustc_has_incoherent_inherent_impls: bool,
- pub rustc_allow_incoherent_impl: bool,
+ pub flags: TypeAliasFlags,
/// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl).
pub bounds: Box<[TypeBound]>,
pub types_map: Arc<TypesMap>,
}
+bitflags::bitflags! {
+ #[derive(Debug, Clone, PartialEq, Eq)]
+ pub struct TypeAliasFlags: u8 {
+ const IS_EXTERN = 1 << 0;
+ const RUSTC_HAS_INCOHERENT_INHERENT_IMPLS = 1 << 1;
+ const RUSTC_ALLOW_INCOHERENT_IMPL = 1 << 2;
+ }
+}
+
impl TypeAliasData {
+ #[inline]
+ pub fn is_extern(&self) -> bool {
+ self.flags.contains(TypeAliasFlags::IS_EXTERN)
+ }
+
+ #[inline]
+ pub fn rustc_has_incoherent_inherent_impls(&self) -> bool {
+ self.flags.contains(TypeAliasFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS)
+ }
+
+ #[inline]
+ pub fn rustc_allow_incoherent_impl(&self) -> bool {
+ self.flags.contains(TypeAliasFlags::RUSTC_ALLOW_INCOHERENT_IMPL)
+ }
+
pub(crate) fn type_alias_data_query(
db: &dyn DefDatabase,
typ: TypeAliasId,
@@ -180,17 +217,24 @@
loc.container.module(db).krate(),
ModItem::from(loc.id.value).into(),
);
- let rustc_has_incoherent_inherent_impls =
- attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists();
- let rustc_allow_incoherent_impl = attrs.by_key(&sym::rustc_allow_incoherent_impl).exists();
+
+ let mut flags = TypeAliasFlags::empty();
+
+ if matches!(loc.container, ItemContainerId::ExternBlockId(_)) {
+ flags |= TypeAliasFlags::IS_EXTERN;
+ }
+ if attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists() {
+ flags |= TypeAliasFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS;
+ }
+ if attrs.by_key(&sym::rustc_allow_incoherent_impl).exists() {
+ flags |= TypeAliasFlags::RUSTC_ALLOW_INCOHERENT_IMPL;
+ }
Arc::new(TypeAliasData {
name: typ.name.clone(),
type_ref: typ.type_ref,
visibility,
- is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
- rustc_has_incoherent_inherent_impls,
- rustc_allow_incoherent_impl,
+ flags,
bounds: typ.bounds.clone(),
types_map: typ.types_map.clone(),
})
@@ -199,7 +243,7 @@
bitflags::bitflags! {
#[derive(Debug, Clone, Copy, Eq, PartialEq, Default)]
- pub struct TraitFlags: u8 {
+ pub struct TraitFlags: u16 {
const IS_AUTO = 1 << 0;
const IS_UNSAFE = 1 << 1;
const IS_FUNDAMENTAL = 1 << 2;
@@ -332,9 +376,9 @@
let loc = makro.lookup(db);
let item_tree = loc.id.item_tree(db);
let makro = &item_tree[loc.id.value];
+ let attrs = item_tree.attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into());
- let helpers = item_tree
- .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into())
+ let helpers = attrs
.by_key(&sym::rustc_builtin_macro)
.tt_values()
.next()
@@ -362,11 +406,9 @@
let loc = makro.lookup(db);
let item_tree = loc.id.item_tree(db);
let makro = &item_tree[loc.id.value];
+ let attrs = item_tree.attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into());
- let macro_export = item_tree
- .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into())
- .by_key(&sym::macro_export)
- .exists();
+ let macro_export = attrs.by_key(&sym::macro_export).exists();
Arc::new(MacroRulesData { name: makro.name.clone(), macro_export })
}
@@ -387,11 +429,9 @@
let loc = makro.lookup(db);
let item_tree = loc.id.item_tree(db);
let makro = &item_tree[loc.id.value];
+ let attrs = item_tree.attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into());
- let (name, helpers) = if let Some(def) = item_tree
- .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into())
- .parse_proc_macro_decl(&makro.name)
- {
+ let (name, helpers) = if let Some(def) = attrs.parse_proc_macro_decl(&makro.name) {
(
def.name,
match def.kind {
@@ -404,6 +444,7 @@
stdx::never!("proc macro declaration is not a proc macro");
(makro.name.clone(), None)
};
+
Arc::new(ProcMacroData { name, helpers })
}
}
@@ -450,9 +491,16 @@
pub name: Option<Name>,
pub type_ref: TypeRefId,
pub visibility: RawVisibility,
- pub rustc_allow_incoherent_impl: bool,
- pub has_body: bool,
pub types_map: Arc<TypesMap>,
+ pub flags: ConstFlags,
+}
+
+bitflags::bitflags! {
+ #[derive(Debug, Clone, Copy, PartialEq, Eq)]
+ pub struct ConstFlags: u8 {
+ const RUSTC_ALLOW_INCOHERENT_IMPL = 1 << 0;
+ const HAS_BODY = 1 << 1;
+ }
}
impl ConstData {
@@ -465,21 +513,38 @@
} else {
item_tree[konst.visibility].clone()
};
+ let attrs = item_tree.attrs(
+ db,
+ loc.container.module(db).krate(),
+ ModItem::from(loc.id.value).into(),
+ );
- let rustc_allow_incoherent_impl = item_tree
- .attrs(db, loc.container.module(db).krate(), ModItem::from(loc.id.value).into())
- .by_key(&sym::rustc_allow_incoherent_impl)
- .exists();
+ let mut flags = ConstFlags::empty();
+ if attrs.by_key(&sym::rustc_allow_incoherent_impl).exists() {
+ flags |= ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL;
+ }
+ if konst.has_body {
+ flags |= ConstFlags::HAS_BODY;
+ }
Arc::new(ConstData {
name: konst.name.clone(),
type_ref: konst.type_ref,
visibility,
- rustc_allow_incoherent_impl,
- has_body: konst.has_body,
+ flags,
types_map: konst.types_map.clone(),
})
}
+
+ #[inline]
+ pub fn rustc_allow_incoherent_impl(&self) -> bool {
+ self.flags.contains(ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL)
+ }
+
+ #[inline]
+ pub fn has_body(&self) -> bool {
+ self.flags.contains(ConstFlags::HAS_BODY)
+ }
}
#[derive(Debug, Clone, PartialEq, Eq)]
@@ -487,11 +552,8 @@
pub name: Name,
pub type_ref: TypeRefId,
pub visibility: RawVisibility,
- pub mutable: bool,
- pub is_extern: bool,
- pub has_safe_kw: bool,
- pub has_unsafe_kw: bool,
pub types_map: Arc<TypesMap>,
+ pub flags: StaticFlags,
}
impl StaticData {
@@ -500,17 +562,39 @@
let item_tree = loc.id.item_tree(db);
let statik = &item_tree[loc.id.value];
+ let mut flags = statik.flags;
+ if matches!(loc.container, ItemContainerId::ExternBlockId(_)) {
+ flags |= StaticFlags::IS_EXTERN;
+ }
+
Arc::new(StaticData {
name: statik.name.clone(),
type_ref: statik.type_ref,
visibility: item_tree[statik.visibility].clone(),
- mutable: statik.mutable,
- is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
- has_safe_kw: statik.has_safe_kw,
- has_unsafe_kw: statik.has_unsafe_kw,
+ flags,
types_map: statik.types_map.clone(),
})
}
+
+ #[inline]
+ pub fn is_extern(&self) -> bool {
+ self.flags.contains(StaticFlags::IS_EXTERN)
+ }
+
+ #[inline]
+ pub fn mutable(&self) -> bool {
+ self.flags.contains(StaticFlags::MUTABLE)
+ }
+
+ #[inline]
+ pub fn has_safe_kw(&self) -> bool {
+ self.flags.contains(StaticFlags::HAS_SAFE_KW)
+ }
+
+ #[inline]
+ pub fn has_unsafe_kw(&self) -> bool {
+ self.flags.contains(StaticFlags::HAS_UNSAFE_KW)
+ }
}
fn trait_vis(db: &dyn DefDatabase, trait_id: TraitId) -> RawVisibility {
diff --git a/crates/hir-def/src/data/adt.rs b/crates/hir-def/src/data/adt.rs
index 8ea7940..a54d766 100644
--- a/crates/hir-def/src/data/adt.rs
+++ b/crates/hir-def/src/data/adt.rs
@@ -171,6 +171,7 @@
pub name: Name,
pub type_ref: TypeRefId,
pub visibility: RawVisibility,
+ pub is_unsafe: bool,
}
fn repr_from_value(
@@ -225,6 +226,7 @@
let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
let attrs = item_tree.attrs(db, krate, ModItem::from(loc.id.value).into());
let mut flags = StructFlags::NO_FLAGS;
+
if attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists() {
flags |= StructFlags::IS_RUSTC_HAS_INCOHERENT_INHERENT_IMPL;
}
@@ -289,10 +291,10 @@
let krate = loc.container.krate;
let item_tree = loc.id.item_tree(db);
let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into());
- let rustc_has_incoherent_inherent_impls = item_tree
- .attrs(db, loc.container.krate, ModItem::from(loc.id.value).into())
- .by_key(&sym::rustc_has_incoherent_inherent_impls)
- .exists();
+ let attrs = item_tree.attrs(db, loc.container.krate, ModItem::from(loc.id.value).into());
+
+ let rustc_has_incoherent_inherent_impls =
+ attrs.by_key(&sym::rustc_has_incoherent_inherent_impls).exists();
let enum_ = &item_tree[loc.id.value];
@@ -329,14 +331,14 @@
impl VariantData {
pub fn fields(&self) -> &Arena<FieldData> {
const EMPTY: &Arena<FieldData> = &Arena::new();
- match &self {
+ match self {
VariantData::Record { fields, .. } | VariantData::Tuple { fields, .. } => fields,
_ => EMPTY,
}
}
pub fn types_map(&self) -> &TypesMap {
- match &self {
+ match self {
VariantData::Record { types_map, .. } | VariantData::Tuple { types_map, .. } => {
types_map
}
@@ -405,5 +407,6 @@
name: field.name.clone(),
type_ref: field.type_ref,
visibility: item_tree[override_visibility.unwrap_or(field.visibility)].clone(),
+ is_unsafe: field.is_unsafe,
}
}
diff --git a/crates/hir-def/src/dyn_map.rs b/crates/hir-def/src/dyn_map.rs
index b17a2b0..eed1490 100644
--- a/crates/hir-def/src/dyn_map.rs
+++ b/crates/hir-def/src/dyn_map.rs
@@ -112,6 +112,10 @@
}
impl<K, V, P> Key<K, V, P> {
+ #[allow(
+ clippy::new_without_default,
+ reason = "this a const fn, so it can't be default yet. See <https://github.com/rust-lang/rust/issues/63065>"
+ )]
pub(crate) const fn new() -> Key<K, V, P> {
Key { _phantom: PhantomData }
}
@@ -148,16 +152,11 @@
}
}
+#[derive(Default)]
pub struct DynMap {
pub(crate) map: Map,
}
-impl Default for DynMap {
- fn default() -> Self {
- DynMap { map: Map::new() }
- }
-}
-
#[repr(transparent)]
pub struct KeyMap<KEY> {
map: DynMap,
diff --git a/crates/hir-def/src/expr_store/lower.rs b/crates/hir-def/src/expr_store/lower.rs
index 86f31dc..1791a1c 100644
--- a/crates/hir-def/src/expr_store/lower.rs
+++ b/crates/hir-def/src/expr_store/lower.rs
@@ -1360,8 +1360,7 @@
else {
panic!("just expanded a macro, ExpansionSpanMap should be available");
};
- let old_span_map =
- mem::replace(&mut self.current_span_map, Some(new_span_map.clone()));
+ let old_span_map = self.current_span_map.replace(new_span_map.clone());
let id = collector(self, Some(expansion.tree()));
self.current_span_map = old_span_map;
self.ast_id_map = prev_ast_id_map;
@@ -2054,7 +2053,7 @@
f: ast::FormatArgsExpr,
syntax_ptr: AstPtr<ast::Expr>,
) -> ExprId {
- let mut args = FormatArgumentsCollector::new();
+ let mut args = FormatArgumentsCollector::default();
f.args().for_each(|arg| {
args.add(FormatArgument {
kind: match arg.name() {
@@ -2322,54 +2321,99 @@
zero_pad,
debug_hex,
} = &placeholder.format_options;
- let fill = self.alloc_expr_desugared(Expr::Literal(Literal::Char(fill.unwrap_or(' '))));
- let align = {
- let align = LangItem::FormatAlignment.ty_rel_path(
- self.db,
- self.krate,
- match alignment {
- Some(FormatAlignment::Left) => Name::new_symbol_root(sym::Left.clone()),
- Some(FormatAlignment::Right) => Name::new_symbol_root(sym::Right.clone()),
- Some(FormatAlignment::Center) => Name::new_symbol_root(sym::Center.clone()),
- None => Name::new_symbol_root(sym::Unknown.clone()),
- },
- );
- match align {
- Some(path) => self.alloc_expr_desugared(Expr::Path(path)),
- None => self.missing_expr(),
- }
- };
- // This needs to match `Flag` in library/core/src/fmt/rt.rs.
- let flags: u32 = ((sign == Some(FormatSign::Plus)) as u32)
- | (((sign == Some(FormatSign::Minus)) as u32) << 1)
- | ((alternate as u32) << 2)
- | ((zero_pad as u32) << 3)
- | (((debug_hex == Some(FormatDebugHex::Lower)) as u32) << 4)
- | (((debug_hex == Some(FormatDebugHex::Upper)) as u32) << 5);
- let flags = self.alloc_expr_desugared(Expr::Literal(Literal::Uint(
- flags as u128,
- Some(BuiltinUint::U32),
- )));
- let precision = self.make_count(precision, argmap);
- let width = self.make_count(width, argmap);
+ let precision_expr = self.make_count(precision, argmap);
+ let width_expr = self.make_count(width, argmap);
- let format_placeholder_new = {
- let format_placeholder_new = LangItem::FormatPlaceholder.ty_rel_path(
- self.db,
- self.krate,
- Name::new_symbol_root(sym::new.clone()),
- );
- match format_placeholder_new {
- Some(path) => self.alloc_expr_desugared(Expr::Path(path)),
- None => self.missing_expr(),
- }
- };
+ if self.krate.workspace_data(self.db).is_atleast_187() {
+ // These need to match the constants in library/core/src/fmt/rt.rs.
+ let align = match alignment {
+ Some(FormatAlignment::Left) => 0,
+ Some(FormatAlignment::Right) => 1,
+ Some(FormatAlignment::Center) => 2,
+ None => 3,
+ };
+ // This needs to match `Flag` in library/core/src/fmt/rt.rs.
+ let flags = fill.unwrap_or(' ') as u32
+ | ((sign == Some(FormatSign::Plus)) as u32) << 21
+ | ((sign == Some(FormatSign::Minus)) as u32) << 22
+ | (alternate as u32) << 23
+ | (zero_pad as u32) << 24
+ | ((debug_hex == Some(FormatDebugHex::Lower)) as u32) << 25
+ | ((debug_hex == Some(FormatDebugHex::Upper)) as u32) << 26
+ | (width.is_some() as u32) << 27
+ | (precision.is_some() as u32) << 28
+ | align << 29
+ | 1 << 31; // Highest bit always set.
+ let flags = self.alloc_expr_desugared(Expr::Literal(Literal::Uint(
+ flags as u128,
+ Some(BuiltinUint::U32),
+ )));
- self.alloc_expr_desugared(Expr::Call {
- callee: format_placeholder_new,
- args: Box::new([position, fill, align, flags, precision, width]),
- })
+ let position = RecordLitField {
+ name: Name::new_symbol_root(sym::position.clone()),
+ expr: position,
+ };
+ let flags =
+ RecordLitField { name: Name::new_symbol_root(sym::flags.clone()), expr: flags };
+ let precision = RecordLitField {
+ name: Name::new_symbol_root(sym::precision.clone()),
+ expr: precision_expr,
+ };
+ let width = RecordLitField {
+ name: Name::new_symbol_root(sym::width.clone()),
+ expr: width_expr,
+ };
+ self.alloc_expr_desugared(Expr::RecordLit {
+ path: LangItem::FormatPlaceholder.path(self.db, self.krate).map(Box::new),
+ fields: Box::new([position, flags, precision, width]),
+ spread: None,
+ })
+ } else {
+ let format_placeholder_new = {
+ let format_placeholder_new = LangItem::FormatPlaceholder.ty_rel_path(
+ self.db,
+ self.krate,
+ Name::new_symbol_root(sym::new.clone()),
+ );
+ match format_placeholder_new {
+ Some(path) => self.alloc_expr_desugared(Expr::Path(path)),
+ None => self.missing_expr(),
+ }
+ };
+ // This needs to match `Flag` in library/core/src/fmt/rt.rs.
+ let flags: u32 = ((sign == Some(FormatSign::Plus)) as u32)
+ | (((sign == Some(FormatSign::Minus)) as u32) << 1)
+ | ((alternate as u32) << 2)
+ | ((zero_pad as u32) << 3)
+ | (((debug_hex == Some(FormatDebugHex::Lower)) as u32) << 4)
+ | (((debug_hex == Some(FormatDebugHex::Upper)) as u32) << 5);
+ let flags = self.alloc_expr_desugared(Expr::Literal(Literal::Uint(
+ flags as u128,
+ Some(BuiltinUint::U32),
+ )));
+ let fill = self.alloc_expr_desugared(Expr::Literal(Literal::Char(fill.unwrap_or(' '))));
+ let align = {
+ let align = LangItem::FormatAlignment.ty_rel_path(
+ self.db,
+ self.krate,
+ match alignment {
+ Some(FormatAlignment::Left) => Name::new_symbol_root(sym::Left.clone()),
+ Some(FormatAlignment::Right) => Name::new_symbol_root(sym::Right.clone()),
+ Some(FormatAlignment::Center) => Name::new_symbol_root(sym::Center.clone()),
+ None => Name::new_symbol_root(sym::Unknown.clone()),
+ },
+ );
+ match align {
+ Some(path) => self.alloc_expr_desugared(Expr::Path(path)),
+ None => self.missing_expr(),
+ }
+ };
+ self.alloc_expr_desugared(Expr::Call {
+ callee: format_placeholder_new,
+ args: Box::new([position, fill, align, flags, precision_expr, width_expr]),
+ })
+ }
}
/// Generate a hir expression for a format_args Count.
@@ -2400,7 +2444,8 @@
Some(FormatCount::Literal(n)) => {
let args = self.alloc_expr_desugared(Expr::Literal(Literal::Uint(
*n as u128,
- Some(BuiltinUint::Usize),
+ // FIXME: Change this to Some(BuiltinUint::U16) once we drop support for toolchains < 1.88
+ None,
)));
let count_is = match LangItem::FormatCount.ty_rel_path(
self.db,
diff --git a/crates/hir-def/src/expr_store/tests.rs b/crates/hir-def/src/expr_store/tests.rs
index a6fcfaa..55b95eb 100644
--- a/crates/hir-def/src/expr_store/tests.rs
+++ b/crates/hir-def/src/expr_store/tests.rs
@@ -216,7 +216,7 @@
8u32,
builtin#lang(Count::Implied),
builtin#lang(Count::Is)(
- 2usize,
+ 2,
),
), builtin#lang(Placeholder::new)(
1usize,
diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs
index ee02650..71fb253 100644
--- a/crates/hir-def/src/generics.rs
+++ b/crates/hir-def/src/generics.rs
@@ -12,11 +12,9 @@
};
use intern::sym;
use la_arena::{Arena, RawIdx};
-use stdx::{
- impl_from,
- thin_vec::{EmptyOptimizedThinVec, ThinVec},
-};
+use stdx::impl_from;
use syntax::ast::{self, HasGenericParams, HasName, HasTypeBounds};
+use thin_vec::ThinVec;
use triomphe::Arc;
use crate::{
@@ -331,26 +329,30 @@
params.clone()
} else {
Arc::new(GenericParams {
- type_or_consts: all_type_or_consts_enabled
- .then(|| params.type_or_consts.clone())
- .unwrap_or_else(|| {
+ type_or_consts: if all_type_or_consts_enabled {
+ params.type_or_consts.clone()
+ } else {
+ {
params
.type_or_consts
.iter()
.filter(|&(idx, _)| enabled(attr_owner_ct(idx)))
.map(|(_, param)| param.clone())
.collect()
- }),
- lifetimes: all_lifetimes_enabled
- .then(|| params.lifetimes.clone())
- .unwrap_or_else(|| {
+ }
+ },
+ lifetimes: if all_lifetimes_enabled {
+ params.lifetimes.clone()
+ } else {
+ {
params
.lifetimes
.iter()
.filter(|&(idx, _)| enabled(attr_owner_lt(idx)))
.map(|(_, param)| param.clone())
.collect()
- }),
+ }
+ },
where_predicates: params.where_predicates.clone(),
types_map: params.types_map.clone(),
})
@@ -753,12 +755,17 @@
) -> TypeRefId {
let result = match &from[type_ref] {
TypeRef::Fn(fn_) => {
- let params = fn_.params().iter().map(|(name, param_type)| {
+ let params = fn_.params.iter().map(|(name, param_type)| {
(name.clone(), copy_type_ref(*param_type, from, from_source_map, to, to_source_map))
});
- TypeRef::Fn(FnType::new(fn_.is_varargs(), fn_.is_unsafe(), fn_.abi().clone(), params))
+ TypeRef::Fn(Box::new(FnType {
+ params: params.collect(),
+ is_varargs: fn_.is_varargs,
+ is_unsafe: fn_.is_unsafe,
+ abi: fn_.abi.clone(),
+ }))
}
- TypeRef::Tuple(types) => TypeRef::Tuple(EmptyOptimizedThinVec::from_iter(
+ TypeRef::Tuple(types) => TypeRef::Tuple(ThinVec::from_iter(
types.iter().map(|&t| copy_type_ref(t, from, from_source_map, to, to_source_map)),
)),
&TypeRef::RawPtr(type_ref, mutbl) => TypeRef::RawPtr(
@@ -817,13 +824,17 @@
Path::BarePath(mod_path) => Path::BarePath(mod_path.clone()),
Path::Normal(path) => {
let type_anchor = path
- .type_anchor()
+ .type_anchor
.map(|type_ref| copy_type_ref(type_ref, from, from_source_map, to, to_source_map));
- let mod_path = path.mod_path().clone();
- let generic_args = path.generic_args().iter().map(|generic_args| {
+ let mod_path = path.mod_path.clone();
+ let generic_args = path.generic_args.iter().map(|generic_args| {
copy_generic_args(generic_args, from, from_source_map, to, to_source_map)
});
- Path::Normal(NormalPath::new(type_anchor, mod_path, generic_args))
+ Path::Normal(Box::new(NormalPath {
+ generic_args: generic_args.collect(),
+ type_anchor,
+ mod_path,
+ }))
}
Path::LangItem(lang_item, name) => Path::LangItem(*lang_item, name.clone()),
}
@@ -879,7 +890,7 @@
from_source_map: &'a TypesSourceMap,
to: &'a mut TypesMap,
to_source_map: &'a mut TypesSourceMap,
-) -> impl stdx::thin_vec::TrustedLen<Item = TypeBound> + 'a {
+) -> impl Iterator<Item = TypeBound> + 'a {
bounds.iter().map(|bound| copy_type_bound(bound, from, from_source_map, to, to_source_map))
}
diff --git a/crates/hir-def/src/hir/format_args.rs b/crates/hir-def/src/hir/format_args.rs
index 821ec56..ca51f49 100644
--- a/crates/hir-def/src/hir/format_args.rs
+++ b/crates/hir-def/src/hir/format_args.rs
@@ -460,10 +460,6 @@
}
}
- pub fn new() -> Self {
- Default::default()
- }
-
pub fn add(&mut self, arg: FormatArgument) -> usize {
let index = self.arguments.len();
if let Some(name) = arg.kind.ident() {
diff --git a/crates/hir-def/src/hir/type_ref.rs b/crates/hir-def/src/hir/type_ref.rs
index 7bb558d..fd50d2f 100644
--- a/crates/hir-def/src/hir/type_ref.rs
+++ b/crates/hir-def/src/hir/type_ref.rs
@@ -12,11 +12,11 @@
use intern::{Symbol, sym};
use la_arena::{Arena, ArenaMap, Idx};
use span::Edition;
-use stdx::thin_vec::{EmptyOptimizedThinVec, ThinVec, thin_vec_with_header_struct};
use syntax::{
AstPtr,
ast::{self, HasGenericArgs, HasName, IsString},
};
+use thin_vec::ThinVec;
use crate::{
SyntheticSyntax,
@@ -120,13 +120,12 @@
}
}
-thin_vec_with_header_struct! {
- pub new(pub(crate)) struct FnType, FnTypeHeader {
- pub params: [(Option<Name>, TypeRefId)],
- pub is_varargs: bool,
- pub is_unsafe: bool,
- pub abi: Option<Symbol>; ref,
- }
+#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+pub struct FnType {
+ pub params: Box<[(Option<Name>, TypeRefId)]>,
+ pub is_varargs: bool,
+ pub is_unsafe: bool,
+ pub abi: Option<Symbol>,
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
@@ -148,14 +147,14 @@
pub enum TypeRef {
Never,
Placeholder,
- Tuple(EmptyOptimizedThinVec<TypeRefId>),
+ Tuple(ThinVec<TypeRefId>),
Path(Path),
RawPtr(TypeRefId, Mutability),
Reference(Box<RefType>),
Array(Box<ArrayType>),
Slice(TypeRefId),
/// A fn pointer. Last element of the vector is the return type.
- Fn(FnType),
+ Fn(Box<FnType>),
ImplTrait(ThinVec<TypeBound>),
DynTrait(ThinVec<TypeBound>),
Macro(AstId<ast::MacroCall>),
@@ -273,9 +272,9 @@
pub fn from_ast(ctx: &mut LowerCtx<'_>, node: ast::Type) -> TypeRefId {
let ty = match &node {
ast::Type::ParenType(inner) => return TypeRef::from_ast_opt(ctx, inner.ty()),
- ast::Type::TupleType(inner) => TypeRef::Tuple(EmptyOptimizedThinVec::from_iter(
- Vec::from_iter(inner.fields().map(|it| TypeRef::from_ast(ctx, it))),
- )),
+ ast::Type::TupleType(inner) => TypeRef::Tuple(ThinVec::from_iter(Vec::from_iter(
+ inner.fields().map(|it| TypeRef::from_ast(ctx, it)),
+ ))),
ast::Type::NeverType(..) => TypeRef::Never,
ast::Type::PathType(inner) => {
// FIXME: Use `Path::from_src`
@@ -342,7 +341,12 @@
let abi = inner.abi().map(lower_abi);
params.push((None, ret_ty));
- TypeRef::Fn(FnType::new(is_varargs, inner.unsafe_token().is_some(), abi, params))
+ TypeRef::Fn(Box::new(FnType {
+ params: params.into(),
+ is_varargs,
+ is_unsafe: inner.unsafe_token().is_some(),
+ abi,
+ }))
}
// for types are close enough for our purposes to the inner type for now...
ast::Type::ForType(inner) => return TypeRef::from_ast_opt(ctx, inner.ty()),
@@ -375,7 +379,7 @@
}
pub(crate) fn unit() -> TypeRef {
- TypeRef::Tuple(EmptyOptimizedThinVec::empty())
+ TypeRef::Tuple(ThinVec::new())
}
pub fn walk(this: TypeRefId, map: &TypesMap, f: &mut impl FnMut(&TypeRef)) {
@@ -386,7 +390,7 @@
f(type_ref);
match type_ref {
TypeRef::Fn(fn_) => {
- fn_.params().iter().for_each(|&(_, param_type)| go(param_type, f, map))
+ fn_.params.iter().for_each(|&(_, param_type)| go(param_type, f, map))
}
TypeRef::Tuple(types) => types.iter().for_each(|&t| go(t, f, map)),
TypeRef::RawPtr(type_ref, _) | TypeRef::Slice(type_ref) => go(*type_ref, f, map),
diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs
index 717566f..b1622ea 100644
--- a/crates/hir-def/src/import_map.rs
+++ b/crates/hir-def/src/import_map.rs
@@ -9,11 +9,11 @@
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
use span::Edition;
-use stdx::{TupleExt, format_to};
+use stdx::format_to;
use triomphe::Arc;
use crate::{
- AssocItemId, FxIndexMap, ModuleDefId, ModuleId, TraitId,
+ AssocItemId, AttrDefId, Complete, FxIndexMap, ModuleDefId, ModuleId, TraitId,
db::DefDatabase,
item_scope::{ImportOrExternCrate, ItemInNs},
nameres::DefMap,
@@ -31,6 +31,8 @@
pub is_doc_hidden: bool,
/// Whether this item is annotated with `#[unstable(..)]`.
pub is_unstable: bool,
+ /// The value of `#[rust_analyzer::completions(...)]`, if exists.
+ pub complete: Complete,
}
/// A map from publicly exported items to its name.
@@ -172,16 +174,22 @@
ItemInNs::Macros(id) => Some(id.into()),
}
};
- let (is_doc_hidden, is_unstable) = attr_id.map_or((false, false), |attr_id| {
- let attrs = db.attrs(attr_id);
- (attrs.has_doc_hidden(), attrs.is_unstable())
- });
+ let (is_doc_hidden, is_unstable, do_not_complete) = match attr_id {
+ None => (false, false, Complete::Yes),
+ Some(attr_id) => {
+ let attrs = db.attrs(attr_id);
+ let do_not_complete =
+ Complete::extract(matches!(attr_id, AttrDefId::TraitId(_)), &attrs);
+ (attrs.has_doc_hidden(), attrs.is_unstable(), do_not_complete)
+ }
+ };
let import_info = ImportInfo {
name: name.clone(),
container: module,
is_doc_hidden,
is_unstable,
+ complete: do_not_complete,
};
if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() {
@@ -235,12 +243,17 @@
ItemInNs::Values(module_def_id)
};
- let attrs = &db.attrs(item.into());
+ let attr_id = item.into();
+ let attrs = &db.attrs(attr_id);
+ let item_do_not_complete = Complete::extract(false, attrs);
+ let do_not_complete =
+ Complete::for_trait_item(trait_import_info.complete, item_do_not_complete);
let assoc_item_info = ImportInfo {
container: trait_import_info.container,
name: assoc_item_name.clone(),
is_doc_hidden: attrs.has_doc_hidden(),
is_unstable: attrs.is_unstable(),
+ complete: do_not_complete,
};
let (infos, _) =
@@ -398,7 +411,7 @@
db: &dyn DefDatabase,
krate: Crate,
query: &Query,
-) -> FxHashSet<ItemInNs> {
+) -> FxHashSet<(ItemInNs, Complete)> {
let _p = tracing::info_span!("search_dependencies", ?query).entered();
let import_maps: Vec<_> =
@@ -439,7 +452,7 @@
import_maps: &[Arc<ImportMap>],
mut stream: fst::map::Union<'_>,
query: &Query,
-) -> FxHashSet<ItemInNs> {
+) -> FxHashSet<(ItemInNs, Complete)> {
let mut res = FxHashSet::default();
while let Some((_, indexed_values)) = stream.next() {
for &IndexedValue { index: import_map_idx, value } in indexed_values {
@@ -459,8 +472,9 @@
})
.filter(|&(_, info)| {
query.search_mode.check(&query.query, query.case_sensitive, info.name.as_str())
- });
- res.extend(iter.map(TupleExt::head));
+ })
+ .map(|(item, import_info)| (item, import_info.complete));
+ res.extend(iter);
}
}
@@ -521,7 +535,7 @@
let actual = search_dependencies(db.upcast(), krate, &query)
.into_iter()
- .filter_map(|dependency| {
+ .filter_map(|(dependency, _)| {
let dependency_krate = dependency.krate(db.upcast())?;
let dependency_imports = db.import_map(dependency_krate);
diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs
index 47ad020..a299cfb 100644
--- a/crates/hir-def/src/item_scope.rs
+++ b/crates/hir-def/src/item_scope.rs
@@ -358,7 +358,7 @@
}
/// Get a name from current module scope, legacy macros are not included
- pub(crate) fn get(&self, name: &Name) -> PerNs {
+ pub fn get(&self, name: &Name) -> PerNs {
PerNs {
types: self.types.get(name).copied(),
values: self.values.get(name).copied(),
diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs
index ea87b0f..1cabb66 100644
--- a/crates/hir-def/src/item_tree.rs
+++ b/crates/hir-def/src/item_tree.rs
@@ -218,6 +218,22 @@
Attrs::filter(db, krate, self.raw_attrs(of).clone())
}
+ /// Returns a count of a few, expensive items.
+ ///
+ /// For more detail, see [`ItemTreeDataStats`].
+ pub fn item_tree_stats(&self) -> ItemTreeDataStats {
+ match self.data {
+ Some(ref data) => ItemTreeDataStats {
+ traits: data.traits.len(),
+ impls: data.impls.len(),
+ mods: data.mods.len(),
+ macro_calls: data.macro_calls.len(),
+ macro_rules: data.macro_rules.len(),
+ },
+ None => ItemTreeDataStats::default(),
+ }
+ }
+
pub fn pretty_print(&self, db: &dyn DefDatabase, edition: Edition) -> String {
pretty::print_item_tree(db, self, edition)
}
@@ -329,6 +345,15 @@
}
#[derive(Default, Debug, Eq, PartialEq)]
+pub struct ItemTreeDataStats {
+ pub traits: usize,
+ pub impls: usize,
+ pub mods: usize,
+ pub macro_calls: usize,
+ pub macro_rules: usize,
+}
+
+#[derive(Default, Debug, Eq, PartialEq)]
pub struct ItemTreeSourceMaps {
all_concatenated: Box<[TypesSourceMap]>,
structs_offset: u32,
@@ -937,7 +962,7 @@
bitflags::bitflags! {
#[derive(Debug, Clone, Copy, Eq, PartialEq, Default)]
- pub(crate) struct FnFlags: u16 {
+ pub struct FnFlags: u16 {
const HAS_SELF_PARAM = 1 << 0;
const HAS_BODY = 1 << 1;
const HAS_DEFAULT_KW = 1 << 2;
@@ -952,6 +977,7 @@
/// it if needed.
const HAS_TARGET_FEATURE = 1 << 8;
const DEPRECATED_SAFE_2024 = 1 << 9;
+ const RUSTC_ALLOW_INCOHERENT_IMPL = 1 << 10;
}
}
@@ -1007,6 +1033,7 @@
pub name: Name,
pub type_ref: TypeRefId,
pub visibility: RawVisibilityId,
+ pub is_unsafe: bool,
}
#[derive(Debug, Clone, Eq, PartialEq)]
@@ -1024,15 +1051,22 @@
pub struct Static {
pub name: Name,
pub visibility: RawVisibilityId,
- // TODO: use bitflags when we have more flags
- pub mutable: bool,
- pub has_safe_kw: bool,
- pub has_unsafe_kw: bool,
+ pub flags: StaticFlags,
pub type_ref: TypeRefId,
pub ast_id: FileAstId<ast::Static>,
pub types_map: Arc<TypesMap>,
}
+bitflags::bitflags! {
+ #[derive(Debug, Clone, Copy, PartialEq, Eq)]
+ pub struct StaticFlags: u8 {
+ const MUTABLE = 1 << 0;
+ const IS_EXTERN = 1 << 1;
+ const HAS_SAFE_KW = 1 << 2;
+ const HAS_UNSAFE_KW = 1 << 3;
+ }
+}
+
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Trait {
pub name: Name,
diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs
index 776ee98..4a3deec 100644
--- a/crates/hir-def/src/item_tree/lower.rs
+++ b/crates/hir-def/src/item_tree/lower.rs
@@ -12,11 +12,11 @@
use la_arena::Arena;
use rustc_hash::FxHashMap;
use span::{AstIdMap, SyntaxContext};
-use stdx::thin_vec::ThinVec;
use syntax::{
AstNode,
ast::{self, HasModuleItem, HasName, HasTypeBounds, IsString},
};
+use thin_vec::ThinVec;
use triomphe::Arc;
use crate::{
@@ -29,8 +29,8 @@
GenericModItem, Idx, Impl, ImportAlias, Interned, ItemTree, ItemTreeData,
ItemTreeSourceMaps, ItemTreeSourceMapsBuilder, Macro2, MacroCall, MacroRules, Mod, ModItem,
ModKind, ModPath, Mutability, Name, Param, Path, Range, RawAttrs, RawIdx, RawVisibilityId,
- Static, Struct, StructKind, Trait, TraitAlias, TypeAlias, Union, Use, UseTree, UseTreeKind,
- Variant,
+ Static, StaticFlags, Struct, StructKind, Trait, TraitAlias, TypeAlias, Union, Use, UseTree,
+ UseTreeKind, Variant,
},
lower::LowerCtx,
path::AssociatedTypeBinding,
@@ -320,7 +320,7 @@
let visibility = self.lower_visibility(field);
let type_ref = TypeRef::from_ast_opt(body_ctx, field.ty());
- Field { name, type_ref, visibility }
+ Field { name, type_ref, visibility, is_unsafe: field.unsafe_token().is_some() }
}
fn lower_tuple_field(
@@ -332,7 +332,7 @@
let name = Name::new_tuple_field(idx);
let visibility = self.lower_visibility(field);
let type_ref = TypeRef::from_ast_opt(body_ctx, field.ty());
- Field { name, type_ref, visibility }
+ Field { name, type_ref, visibility, is_unsafe: false }
}
fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
@@ -620,22 +620,23 @@
let name = static_.name()?.as_name();
let type_ref = TypeRef::from_ast_opt(&mut body_ctx, static_.ty());
let visibility = self.lower_visibility(static_);
- let mutable = static_.mut_token().is_some();
- let has_safe_kw = static_.safe_token().is_some();
- let has_unsafe_kw = static_.unsafe_token().is_some();
+
+ let mut flags = StaticFlags::empty();
+ if static_.mut_token().is_some() {
+ flags |= StaticFlags::MUTABLE;
+ }
+ if static_.safe_token().is_some() {
+ flags |= StaticFlags::HAS_SAFE_KW;
+ }
+ if static_.unsafe_token().is_some() {
+ flags |= StaticFlags::HAS_UNSAFE_KW;
+ }
+
let ast_id = self.source_ast_id_map.ast_id(static_);
types_map.shrink_to_fit();
types_source_map.shrink_to_fit();
- let res = Static {
- name,
- visibility,
- mutable,
- type_ref,
- ast_id,
- has_safe_kw,
- has_unsafe_kw,
- types_map: Arc::new(types_map),
- };
+ let res =
+ Static { name, visibility, type_ref, ast_id, flags, types_map: Arc::new(types_map) };
self.source_maps.statics.push(types_source_map);
Some(id(self.data().statics.alloc(res)))
}
diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs
index e666b1e..b79b8a2 100644
--- a/crates/hir-def/src/item_tree/pretty.rs
+++ b/crates/hir-def/src/item_tree/pretty.rs
@@ -11,8 +11,8 @@
AttrOwner, Const, DefDatabase, Enum, ExternBlock, ExternCrate, Field, FieldParent,
FieldsShape, FileItemTreeId, FnFlags, Function, GenericModItem, GenericParams, Impl,
ItemTree, Macro2, MacroCall, MacroRules, Mod, ModItem, ModKind, Param, Path, RawAttrs,
- RawVisibilityId, Static, Struct, Trait, TraitAlias, TypeAlias, TypeBound, Union, Use,
- UseTree, UseTreeKind, Variant,
+ RawVisibilityId, Static, StaticFlags, Struct, Trait, TraitAlias, TypeAlias, TypeBound,
+ Union, Use, UseTree, UseTreeKind, Variant,
},
pretty::{print_path, print_type_bounds, print_type_ref},
type_ref::{TypeRefId, TypesMap},
@@ -135,12 +135,17 @@
self.whitespace();
w!(self, "{{");
self.indented(|this| {
- for (idx, Field { name, type_ref, visibility }) in fields.iter().enumerate() {
+ for (idx, Field { name, type_ref, visibility, is_unsafe }) in
+ fields.iter().enumerate()
+ {
this.print_attrs_of(
AttrOwner::Field(parent, Idx::from_raw(RawIdx::from(idx as u32))),
"\n",
);
this.print_visibility(*visibility);
+ if *is_unsafe {
+ w!(this, "unsafe ");
+ }
w!(this, "{}: ", name.display(self.db.upcast(), edition));
this.print_type_ref(*type_ref, map);
wln!(this, ",");
@@ -151,12 +156,17 @@
FieldsShape::Tuple => {
w!(self, "(");
self.indented(|this| {
- for (idx, Field { name, type_ref, visibility }) in fields.iter().enumerate() {
+ for (idx, Field { name, type_ref, visibility, is_unsafe }) in
+ fields.iter().enumerate()
+ {
this.print_attrs_of(
AttrOwner::Field(parent, Idx::from_raw(RawIdx::from(idx as u32))),
"\n",
);
this.print_visibility(*visibility);
+ if *is_unsafe {
+ w!(this, "unsafe ");
+ }
w!(this, "{}: ", name.display(self.db.upcast(), edition));
this.print_type_ref(*type_ref, map);
wln!(this, ",");
@@ -408,26 +418,18 @@
wln!(self, " = _;");
}
ModItem::Static(it) => {
- let Static {
- name,
- visibility,
- mutable,
- type_ref,
- ast_id,
- has_safe_kw,
- has_unsafe_kw,
- types_map,
- } = &self.tree[it];
+ let Static { name, visibility, type_ref, ast_id, types_map, flags } =
+ &self.tree[it];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
- if *has_safe_kw {
+ if flags.contains(StaticFlags::HAS_SAFE_KW) {
w!(self, "safe ");
}
- if *has_unsafe_kw {
+ if flags.contains(StaticFlags::HAS_UNSAFE_KW) {
w!(self, "unsafe ");
}
w!(self, "static ");
- if *mutable {
+ if flags.contains(StaticFlags::MUTABLE) {
w!(self, "mut ");
}
w!(self, "{}: ", name.display(self.db.upcast(), self.edition));
diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs
index fad0b56..ab897f4 100644
--- a/crates/hir-def/src/lib.rs
+++ b/crates/hir-def/src/lib.rs
@@ -18,9 +18,6 @@
#[cfg(feature = "in-rust-tree")]
extern crate rustc_abi;
-#[cfg(feature = "in-rust-tree")]
-extern crate rustc_hashes;
-
#[cfg(not(feature = "in-rust-tree"))]
extern crate ra_ap_rustc_abi as rustc_abi;
@@ -56,7 +53,7 @@
pub mod import_map;
pub mod visibility;
-use intern::Interned;
+use intern::{Interned, sym};
pub use rustc_abi as layout;
use triomphe::Arc;
@@ -89,6 +86,7 @@
pub use hir_expand::{Intern, Lookup, tt};
use crate::{
+ attr::Attrs,
builtin_type::BuiltinType,
data::adt::VariantData,
db::DefDatabase,
@@ -1505,3 +1503,81 @@
#[derive(Default, Debug, Eq, PartialEq, Clone, Copy)]
pub struct SyntheticSyntax;
+
+// Feature: Completions Attribute
+// Crate authors can opt their type out of completions in some cases.
+// This is done with the `#[rust_analyzer::completions(...)]` attribute.
+//
+// All completeable things support `#[rust_analyzer::completions(ignore_flyimport)]`,
+// which causes the thing to get excluded from flyimport completion. It will still
+// be completed when in scope. This is analogous to the setting `rust-analyzer.completion.autoimport.exclude`
+// with `"type": "always"`.
+//
+// In addition, traits support two more modes: `#[rust_analyzer::completions(ignore_flyimport_methods)]`,
+// which means the trait itself may still be flyimported but its methods won't, and
+// `#[rust_analyzer::completions(ignore_methods)]`, which means the methods won't be completed even when
+// the trait is in scope (but the trait itself may still be completed). The methods will still be completed
+// on `dyn Trait`, `impl Trait` or where the trait is specified in bounds. These modes correspond to
+// the settings `rust-analyzer.completion.autoimport.exclude` with `"type": "methods"` and
+// `rust-analyzer.completion.excludeTraits`, respectively.
+//
+// Malformed attributes will be ignored without warnings.
+//
+// Note that users have no way to override this attribute, so be careful and only include things
+// users definitely do not want to be completed!
+
+/// `#[rust_analyzer::completions(...)]` options.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum Complete {
+ /// No `#[rust_analyzer::completions(...)]`.
+ Yes,
+ /// `#[rust_analyzer::completions(ignore_flyimport)]`.
+ IgnoreFlyimport,
+ /// `#[rust_analyzer::completions(ignore_flyimport_methods)]` (on a trait only).
+ IgnoreFlyimportMethods,
+ /// `#[rust_analyzer::completions(ignore_methods)]` (on a trait only).
+ IgnoreMethods,
+}
+
+impl Complete {
+ pub fn extract(is_trait: bool, attrs: &Attrs) -> Complete {
+ let mut do_not_complete = Complete::Yes;
+ for ra_attr in attrs.rust_analyzer_tool() {
+ let segments = ra_attr.path.segments();
+ if segments.len() != 2 {
+ continue;
+ }
+ let action = segments[1].symbol();
+ if *action == sym::completions {
+ match ra_attr.token_tree_value().map(|tt| tt.token_trees().flat_tokens()) {
+ Some([tt::TokenTree::Leaf(tt::Leaf::Ident(ident))]) => {
+ if ident.sym == sym::ignore_flyimport {
+ do_not_complete = Complete::IgnoreFlyimport;
+ } else if is_trait {
+ if ident.sym == sym::ignore_methods {
+ do_not_complete = Complete::IgnoreMethods;
+ } else if ident.sym == sym::ignore_flyimport_methods {
+ do_not_complete = Complete::IgnoreFlyimportMethods;
+ }
+ }
+ }
+ _ => {}
+ }
+ }
+ }
+ do_not_complete
+ }
+
+ #[inline]
+ pub fn for_trait_item(trait_attr: Complete, item_attr: Complete) -> Complete {
+ match (trait_attr, item_attr) {
+ (
+ Complete::IgnoreFlyimportMethods
+ | Complete::IgnoreFlyimport
+ | Complete::IgnoreMethods,
+ _,
+ ) => Complete::IgnoreFlyimport,
+ _ => item_attr,
+ }
+ }
+}
diff --git a/crates/hir-def/src/lower.rs b/crates/hir-def/src/lower.rs
index c0f6e1a..b3acfe4 100644
--- a/crates/hir-def/src/lower.rs
+++ b/crates/hir-def/src/lower.rs
@@ -3,8 +3,8 @@
use hir_expand::{AstId, HirFileId, InFile, span_map::SpanMap};
use span::{AstIdMap, AstIdNode, Edition, EditionedFileId, FileId, RealSpanMap};
-use stdx::thin_vec::ThinVec;
use syntax::ast;
+use thin_vec::ThinVec;
use triomphe::Arc;
use crate::{
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs
index ddf1a21..f990309 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -1979,3 +1979,51 @@
"#]],
);
}
+
+#[test]
+fn semicolon_does_not_glue() {
+ check(
+ r#"
+macro_rules! bug {
+ ($id: expr) => {
+ true
+ };
+ ($id: expr; $($attr: ident),*) => {
+ true
+ };
+ ($id: expr; $($attr: ident),*; $norm: expr) => {
+ true
+ };
+ ($id: expr; $($attr: ident),*;; $print: expr) => {
+ true
+ };
+ ($id: expr; $($attr: ident),*; $norm: expr; $print: expr) => {
+ true
+ };
+}
+
+let _ = bug!(a;;;test);
+ "#,
+ expect![[r#"
+macro_rules! bug {
+ ($id: expr) => {
+ true
+ };
+ ($id: expr; $($attr: ident),*) => {
+ true
+ };
+ ($id: expr; $($attr: ident),*; $norm: expr) => {
+ true
+ };
+ ($id: expr; $($attr: ident),*;; $print: expr) => {
+ true
+ };
+ ($id: expr; $($attr: ident),*; $norm: expr; $print: expr) => {
+ true
+ };
+}
+
+let _ = true;
+ "#]],
+ );
+}
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
index 1bbed01..cb4fcd8 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs
@@ -582,8 +582,8 @@
}
impl <A: Arbitrary> $crate::arbitrary::Arbitrary for Vec<A> {
- type Parameters = RangedParams1<A::Parameters>;
- type Strategy = VecStrategy<A::Strategy>;
+ type Parameters = RangedParams1<A::Parameters> ;
+ type Strategy = VecStrategy<A::Strategy> ;
fn arbitrary_with(args: Self::Parameters) -> Self::Strategy { {
let product_unpack![range, a] = args;
vec(any_with::<A>(a), range)
diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs
index d9fbf4b..15eb5db 100644
--- a/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -362,7 +362,7 @@
_: Span,
_: Span,
_: Span,
- _: Option<String>,
+ _: String,
) -> Result<TopSubtree, ProcMacroExpansionError> {
let (parse, _) = syntax_bridge::token_tree_to_syntax_node(
subtree,
diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs
index 14653c6..d366318 100644
--- a/crates/hir-def/src/nameres/tests/incremental.rs
+++ b/crates/hir-def/src/nameres/tests/incremental.rs
@@ -52,6 +52,7 @@
{
// Add a dependency a -> b.
let mut new_crate_graph = CrateGraphBuilder::default();
+
let mut add_crate = |crate_name, root_file_idx: usize| {
new_crate_graph.add_crate_root(
files[root_file_idx].file_id(),
@@ -63,7 +64,13 @@
Env::default(),
CrateOrigin::Local { repo: None, name: Some(Symbol::intern(crate_name)) },
false,
- None,
+ Arc::new(
+ // FIXME: This is less than ideal
+ TryFrom::try_from(
+ &*std::env::current_dir().unwrap().as_path().to_string_lossy(),
+ )
+ .unwrap(),
+ ),
Arc::new(CrateWorkspaceData { data_layout: Err("".into()), toolchain: None }),
)
};
diff --git a/crates/hir-def/src/path.rs b/crates/hir-def/src/path.rs
index 1f7365f..7ef31d0 100644
--- a/crates/hir-def/src/path.rs
+++ b/crates/hir-def/src/path.rs
@@ -16,7 +16,6 @@
use hir_expand::name::Name;
use intern::Interned;
use span::Edition;
-use stdx::thin_vec::thin_vec_with_header_struct;
use syntax::ast;
pub use hir_expand::mod_path::{ModPath, PathKind, path};
@@ -58,7 +57,7 @@
/// this is not a problem since many more paths have generics than a type anchor).
BarePath(Interned<ModPath>),
/// `Path::Normal` will always have either generics or type anchor.
- Normal(NormalPath),
+ Normal(Box<NormalPath>),
/// A link to a lang item. It is used in desugaring of things like `it?`. We can show these
/// links via a normal path since they might be private and not accessible in the usage place.
LangItem(LangItemTarget, Option<Name>),
@@ -71,12 +70,11 @@
assert!(size_of::<Option<Path>>() == 16);
};
-thin_vec_with_header_struct! {
- pub new(pub(crate)) struct NormalPath, NormalPathHeader {
- pub generic_args: [Option<GenericArgs>],
- pub type_anchor: Option<TypeRefId>,
- pub mod_path: Interned<ModPath>; ref,
- }
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct NormalPath {
+ pub generic_args: Box<[Option<GenericArgs>]>,
+ pub type_anchor: Option<TypeRefId>,
+ pub mod_path: Interned<ModPath>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -143,7 +141,11 @@
/// Converts a known mod path to `Path`.
pub fn from_known_path(path: ModPath, generic_args: Vec<Option<GenericArgs>>) -> Path {
- Path::Normal(NormalPath::new(None, Interned::new(path), generic_args))
+ Path::Normal(Box::new(NormalPath {
+ generic_args: generic_args.into_boxed_slice(),
+ type_anchor: None,
+ mod_path: Interned::new(path),
+ }))
}
/// Converts a known mod path to `Path`.
@@ -155,7 +157,7 @@
pub fn kind(&self) -> &PathKind {
match self {
Path::BarePath(mod_path) => &mod_path.kind,
- Path::Normal(path) => &path.mod_path().kind,
+ Path::Normal(path) => &path.mod_path.kind,
Path::LangItem(..) => &PathKind::Abs,
}
}
@@ -163,7 +165,7 @@
#[inline]
pub fn type_anchor(&self) -> Option<TypeRefId> {
match self {
- Path::Normal(path) => path.type_anchor(),
+ Path::Normal(path) => path.type_anchor,
Path::LangItem(..) | Path::BarePath(_) => None,
}
}
@@ -171,7 +173,7 @@
#[inline]
pub fn generic_args(&self) -> Option<&[Option<GenericArgs>]> {
match self {
- Path::Normal(path) => Some(path.generic_args()),
+ Path::Normal(path) => Some(&path.generic_args),
Path::LangItem(..) | Path::BarePath(_) => None,
}
}
@@ -182,8 +184,8 @@
PathSegments { segments: mod_path.segments(), generic_args: None }
}
Path::Normal(path) => PathSegments {
- segments: path.mod_path().segments(),
- generic_args: Some(path.generic_args()),
+ segments: path.mod_path.segments(),
+ generic_args: Some(&path.generic_args),
},
Path::LangItem(_, seg) => PathSegments { segments: seg.as_slice(), generic_args: None },
}
@@ -192,7 +194,7 @@
pub fn mod_path(&self) -> Option<&ModPath> {
match self {
Path::BarePath(mod_path) => Some(mod_path),
- Path::Normal(path) => Some(path.mod_path()),
+ Path::Normal(path) => Some(&path.mod_path),
Path::LangItem(..) => None,
}
}
@@ -209,12 +211,12 @@
))))
}
Path::Normal(path) => {
- let mod_path = path.mod_path();
+ let mod_path = &path.mod_path;
if mod_path.is_ident() {
return None;
}
- let type_anchor = path.type_anchor();
- let generic_args = path.generic_args();
+ let type_anchor = path.type_anchor;
+ let generic_args = &path.generic_args;
let qualifier_mod_path = Interned::new(ModPath::from_segments(
mod_path.kind,
mod_path.segments()[..mod_path.segments().len() - 1].iter().cloned(),
@@ -223,11 +225,11 @@
if type_anchor.is_none() && qualifier_generic_args.iter().all(|it| it.is_none()) {
Some(Path::BarePath(qualifier_mod_path))
} else {
- Some(Path::Normal(NormalPath::new(
+ Some(Path::Normal(Box::new(NormalPath {
type_anchor,
- qualifier_mod_path,
- qualifier_generic_args.iter().cloned(),
- )))
+ mod_path: qualifier_mod_path,
+ generic_args: qualifier_generic_args.iter().cloned().collect(),
+ })))
}
}
Path::LangItem(..) => None,
@@ -238,9 +240,9 @@
match self {
Path::BarePath(mod_path) => mod_path.is_Self(),
Path::Normal(path) => {
- path.type_anchor().is_none()
- && path.mod_path().is_Self()
- && path.generic_args().iter().all(|args| args.is_none())
+ path.type_anchor.is_none()
+ && path.mod_path.is_Self()
+ && path.generic_args.iter().all(|args| args.is_none())
}
Path::LangItem(..) => false,
}
diff --git a/crates/hir-def/src/path/lower.rs b/crates/hir-def/src/path/lower.rs
index c8269db..78f3ec0 100644
--- a/crates/hir-def/src/path/lower.rs
+++ b/crates/hir-def/src/path/lower.rs
@@ -9,8 +9,8 @@
name::{AsName, Name},
};
use intern::{Interned, sym};
-use stdx::thin_vec::EmptyOptimizedThinVec;
use syntax::ast::{self, AstNode, HasGenericArgs, HasTypeBounds};
+use thin_vec::ThinVec;
use crate::{
path::{
@@ -213,7 +213,11 @@
if type_anchor.is_none() && generic_args.is_empty() {
return Some(Path::BarePath(mod_path));
} else {
- return Some(Path::Normal(NormalPath::new(type_anchor, mod_path, generic_args)));
+ return Some(Path::Normal(Box::new(NormalPath {
+ generic_args: generic_args.into_boxed_slice(),
+ type_anchor,
+ mod_path,
+ })));
}
fn qualifier(path: &ast::Path) -> Option<ast::Path> {
@@ -344,7 +348,7 @@
param_types.push(type_ref);
}
let args = Box::new([GenericArg::Type(
- ctx.alloc_type_ref_desugared(TypeRef::Tuple(EmptyOptimizedThinVec::from_iter(param_types))),
+ ctx.alloc_type_ref_desugared(TypeRef::Tuple(ThinVec::from_iter(param_types))),
)]);
let bindings = if let Some(ret_type) = ret_type {
let type_ref = TypeRef::from_ast_opt(ctx, ret_type.ty());
diff --git a/crates/hir-def/src/pretty.rs b/crates/hir-def/src/pretty.rs
index c431b45..8d5f6b8 100644
--- a/crates/hir-def/src/pretty.rs
+++ b/crates/hir-def/src/pretty.rs
@@ -220,11 +220,11 @@
}
TypeRef::Fn(fn_) => {
let ((_, return_type), args) =
- fn_.params().split_last().expect("TypeRef::Fn is missing return type");
- if fn_.is_unsafe() {
+ fn_.params.split_last().expect("TypeRef::Fn is missing return type");
+ if fn_.is_unsafe {
write!(buf, "unsafe ")?;
}
- if let Some(abi) = fn_.abi() {
+ if let Some(abi) = &fn_.abi {
buf.write_str("extern ")?;
buf.write_str(abi.as_str())?;
buf.write_char(' ')?;
@@ -236,7 +236,7 @@
}
print_type_ref(db, *typeref, map, buf, edition)?;
}
- if fn_.is_varargs() {
+ if fn_.is_varargs {
if !args.is_empty() {
write!(buf, ", ")?;
}
diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs
index 28ebaad..4f1be72 100644
--- a/crates/hir-def/src/resolver.rs
+++ b/crates/hir-def/src/resolver.rs
@@ -181,7 +181,7 @@
{
let path = match path {
Path::BarePath(mod_path) => mod_path,
- Path::Normal(it) => it.mod_path(),
+ Path::Normal(it) => &it.mod_path,
Path::LangItem(l, seg) => {
let type_ns = match *l {
LangItemTarget::Union(it) => TypeNs::AdtId(it.into()),
@@ -304,7 +304,7 @@
) -> Option<(ResolveValueResult, ResolvePathResultPrefixInfo)> {
let path = match path {
Path::BarePath(mod_path) => mod_path,
- Path::Normal(it) => it.mod_path(),
+ Path::Normal(it) => &it.mod_path,
Path::LangItem(l, None) => {
return Some((
ResolveValueResult::ValueNs(
diff --git a/crates/hir-expand/Cargo.toml b/crates/hir-expand/Cargo.toml
index 5884c0a..b83efca 100644
--- a/crates/hir-expand/Cargo.toml
+++ b/crates/hir-expand/Cargo.toml
@@ -12,7 +12,7 @@
[lib]
[dependencies]
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
tracing.workspace = true
either.workspace = true
rustc-hash.workspace = true
@@ -35,7 +35,7 @@
syntax-bridge.workspace = true
[dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
[features]
in-rust-tree = ["syntax/in-rust-tree"]
diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs
index 862e3c7..4331997 100644
--- a/crates/hir-expand/src/attrs.rs
+++ b/crates/hir-expand/src/attrs.rs
@@ -305,13 +305,12 @@
Some(Box::new(AttrInput::TokenTree(tt::TopSubtree::from_subtree(tree))))
}
(Some(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '=', .. }))), _) => {
- let input = match input.flat_tokens().get(1) {
+ match input.flat_tokens().get(1) {
Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) => {
Some(Box::new(AttrInput::Literal(lit.clone())))
}
_ => None,
- };
- input
+ }
}
_ => None,
};
diff --git a/crates/hir-expand/src/builtin/derive_macro.rs b/crates/hir-expand/src/builtin/derive_macro.rs
index 2ad8cd8..428bed8 100644
--- a/crates/hir-expand/src/builtin/derive_macro.rs
+++ b/crates/hir-expand/src/builtin/derive_macro.rs
@@ -1314,15 +1314,15 @@
}
})
});
- let self_for_traits = make::path_from_segments(
+
+ make::path_from_segments(
[make::generic_ty_path_segment(
make::name_ref(&struct_name.text()),
self_params_for_traits,
)],
false,
)
- .clone_for_update();
- self_for_traits
+ .clone_for_update()
};
let mut span_map = span::SpanMap::empty();
diff --git a/crates/hir-expand/src/change.rs b/crates/hir-expand/src/change.rs
index 3f5b8fd..6873cb7 100644
--- a/crates/hir-expand/src/change.rs
+++ b/crates/hir-expand/src/change.rs
@@ -14,10 +14,6 @@
}
impl ChangeWithProcMacros {
- pub fn new() -> Self {
- Self::default()
- }
-
pub fn apply(self, db: &mut impl ExpandDatabase) {
let crates_id_map = self.source_change.apply(db);
if let Some(proc_macros) = self.proc_macros {
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index d089d4c..9dc08dd 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -208,7 +208,8 @@
},
None => RenderedExpandError {
message: format!(
- "internal error: proc-macro map is missing error entry for crate {def_crate:?}"
+ "internal error: proc-macro map is missing error entry for crate {:?}",
+ def_crate
),
error: true,
kind: RenderedExpandError::GENERAL_KIND,
diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs
index ceb6972..4920d90 100644
--- a/crates/hir-expand/src/proc_macro.rs
+++ b/crates/hir-expand/src/proc_macro.rs
@@ -41,7 +41,7 @@
def_site: Span,
call_site: Span,
mixed_site: Span,
- current_dir: Option<String>,
+ current_dir: String,
) -> Result<tt::TopSubtree, ProcMacroExpansionError>;
fn eq_dyn(&self, other: &dyn ProcMacroExpander) -> bool;
@@ -318,8 +318,8 @@
// Proc macros have access to the environment variables of the invoking crate.
let env = calling_crate.env(db);
- let current_dir =
- calling_crate.data(db).proc_macro_cwd.as_deref().map(ToString::to_string);
+ // FIXME: Can we avoid the string allocation here?
+ let current_dir = calling_crate.data(db).proc_macro_cwd.to_string();
match proc_macro.expander.expand(
tt,
diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml
index 49d0c5e..69ad770 100644
--- a/crates/hir-ty/Cargo.toml
+++ b/crates/hir-ty/Cargo.toml
@@ -12,26 +12,26 @@
[lib]
[dependencies]
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
itertools.workspace = true
arrayvec.workspace = true
bitflags.workspace = true
smallvec.workspace = true
-ena = "0.14.0"
+ena = "0.14.3"
either.workspace = true
-oorandom = "11.1.3"
+oorandom = "11.1.5"
tracing.workspace = true
rustc-hash.workspace = true
-scoped-tls = "1.0.0"
+scoped-tls = "1.0.1"
chalk-solve.workspace = true
chalk-ir.workspace = true
chalk-recursive.workspace = true
chalk-derive.workspace = true
la-arena.workspace = true
triomphe.workspace = true
-typed-arena = "2.0.1"
+typed-arena = "2.0.2"
indexmap.workspace = true
-rustc_apfloat = "0.2.0"
+rustc_apfloat = "0.2.2"
query-group.workspace = true
salsa.workspace = true
@@ -50,7 +50,7 @@
span.workspace = true
[dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
tracing.workspace = true
tracing-subscriber.workspace = true
tracing-tree.workspace = true
diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs
index e0975b5..6aec56b 100644
--- a/crates/hir-ty/src/chalk_db.rs
+++ b/crates/hir-ty/src/chalk_db.rs
@@ -21,7 +21,7 @@
};
use crate::{
- AliasEq, AliasTy, BoundVar, DebruijnIndex, FnDefId, Interner, ProjectionTy, ProjectionTyExt,
+ AliasEq, AliasTy, BoundVar, DebruijnIndex, Interner, ProjectionTy, ProjectionTyExt,
QuantifiedWhereClause, Substitution, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
WhereClause,
db::{HirDatabase, InternedCoroutine},
@@ -53,7 +53,7 @@
impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
fn associated_ty_data(&self, id: AssocTypeId) -> Arc<AssociatedTyDatum> {
- self.db.associated_ty_data(id)
+ self.db.associated_ty_data(from_assoc_type_id(id))
}
fn trait_datum(&self, trait_id: TraitId) -> Arc<TraitDatum> {
self.db.trait_datum(self.krate, trait_id)
@@ -105,7 +105,7 @@
&self,
fn_def_id: chalk_ir::FnDefId<Interner>,
) -> Arc<rust_ir::FnDefDatum<Interner>> {
- self.db.fn_def_datum(fn_def_id)
+ self.db.fn_def_datum(from_chalk(self.db, fn_def_id))
}
fn impls_for_trait(
@@ -144,22 +144,21 @@
let id_to_chalk = |id: hir_def::ImplId| id.to_chalk(self.db);
let mut result = vec![];
- if fps.is_empty() {
- debug!("Unrestricted search for {:?} impls...", trait_);
- self.for_trait_impls(trait_, self_ty_fp, |impls| {
- result.extend(impls.for_trait(trait_).map(id_to_chalk));
- ControlFlow::Continue(())
- })
- } else {
- self.for_trait_impls(trait_, self_ty_fp, |impls| {
- result.extend(
- fps.iter().flat_map(move |fp| {
+ _ =
+ if fps.is_empty() {
+ debug!("Unrestricted search for {:?} impls...", trait_);
+ self.for_trait_impls(trait_, self_ty_fp, |impls| {
+ result.extend(impls.for_trait(trait_).map(id_to_chalk));
+ ControlFlow::Continue(())
+ })
+ } else {
+ self.for_trait_impls(trait_, self_ty_fp, |impls| {
+ result.extend(fps.iter().flat_map(move |fp| {
impls.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk)
- }),
- );
- ControlFlow::Continue(())
- })
- };
+ }));
+ ControlFlow::Continue(())
+ })
+ };
debug!("impls_for_trait returned {} impls", result.len());
result
@@ -448,7 +447,7 @@
Arc::new(rust_ir::AdtSizeAlign::from_one_zst(false))
}
fn assoc_type_name(&self, assoc_ty_id: chalk_ir::AssocTypeId<Interner>) -> String {
- let id = self.db.associated_ty_data(assoc_ty_id).name;
+ let id = self.db.associated_ty_data(from_assoc_type_id(assoc_ty_id)).name;
self.db.type_alias_data(id).name.display(self.db.upcast(), self.edition()).to_string()
}
fn opaque_type_name(&self, opaque_ty_id: chalk_ir::OpaqueTyId<Interner>) -> String {
@@ -553,7 +552,7 @@
let block_impls = iter::successors(self.block, |&block_id| {
cov_mark::hit!(block_local_impls);
- self.db.block_def_map(block_id).parent().and_then(|module| module.containing_block())
+ block_id.loc(self.db).module.containing_block()
})
.inspect(|&block_id| {
// make sure we don't search the same block twice
@@ -584,11 +583,11 @@
&self,
fn_def_id: chalk_ir::FnDefId<Interner>,
) -> chalk_ir::Variances<Interner> {
- HirDatabase::fn_def_variance(*self, fn_def_id)
+ HirDatabase::fn_def_variance(*self, from_chalk(*self, fn_def_id))
}
fn adt_variance(&self, adt_id: chalk_ir::AdtId<Interner>) -> chalk_ir::Variances<Interner> {
- HirDatabase::adt_variance(*self, adt_id)
+ HirDatabase::adt_variance(*self, adt_id.0)
}
}
@@ -603,10 +602,9 @@
pub(crate) fn associated_ty_data_query(
db: &dyn HirDatabase,
- id: AssocTypeId,
+ type_alias: TypeAliasId,
) -> Arc<AssociatedTyDatum> {
- debug!("associated_ty_data {:?}", id);
- let type_alias: TypeAliasId = from_assoc_type_id(id);
+ debug!("associated_ty_data {:?}", type_alias);
let trait_ = match type_alias.lookup(db.upcast()).container {
ItemContainerId::TraitId(t) => t,
_ => panic!("associated type not in trait"),
@@ -657,7 +655,7 @@
let bound_data = rust_ir::AssociatedTyDatumBound { bounds, where_clauses: vec![] };
let datum = AssociatedTyDatum {
trait_id: to_chalk_trait_id(trait_),
- id,
+ id: to_assoc_type_id(type_alias),
name: type_alias,
binders: make_binders(db, &generic_params, bound_data),
};
@@ -924,8 +922,10 @@
Arc::new(value)
}
-pub(crate) fn fn_def_datum_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Arc<FnDefDatum> {
- let callable_def: CallableDefId = from_chalk(db, fn_def_id);
+pub(crate) fn fn_def_datum_query(
+ db: &dyn HirDatabase,
+ callable_def: CallableDefId,
+) -> Arc<FnDefDatum> {
let generic_def = GenericDefId::from_callable(db.upcast(), callable_def);
let generic_params = generics(db.upcast(), generic_def);
let (sig, binders) = db.callable_item_signature(callable_def).into_value_and_skipped_binders();
@@ -944,7 +944,7 @@
where_clauses,
};
let datum = FnDefDatum {
- id: fn_def_id,
+ id: callable_def.to_chalk(db),
sig: chalk_ir::FnSig {
abi: sig.abi,
safety: chalk_ir::Safety::Safe,
@@ -955,8 +955,10 @@
Arc::new(datum)
}
-pub(crate) fn fn_def_variance_query(db: &dyn HirDatabase, fn_def_id: FnDefId) -> Variances {
- let callable_def: CallableDefId = from_chalk(db, fn_def_id);
+pub(crate) fn fn_def_variance_query(
+ db: &dyn HirDatabase,
+ callable_def: CallableDefId,
+) -> Variances {
Variances::from_iter(
Interner,
db.variances_of(GenericDefId::from_callable(db.upcast(), callable_def))
@@ -972,10 +974,7 @@
)
}
-pub(crate) fn adt_variance_query(
- db: &dyn HirDatabase,
- chalk_ir::AdtId(adt_id): AdtId,
-) -> Variances {
+pub(crate) fn adt_variance_query(db: &dyn HirDatabase, adt_id: hir_def::AdtId) -> Variances {
Variances::from_iter(
Interner,
db.variances_of(adt_id.into()).as_deref().unwrap_or_default().iter().map(|v| match v {
diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs
index 6c61979..1bbffc3 100644
--- a/crates/hir-ty/src/db.rs
+++ b/crates/hir-ty/src/db.rs
@@ -17,8 +17,8 @@
use triomphe::Arc;
use crate::{
- Binders, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult, Interner,
- PolyFnSig, Substitution, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId, chalk_db,
+ Binders, Const, ImplTraitId, ImplTraits, InferenceResult, Interner, PolyFnSig, Substitution,
+ TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId, chalk_db,
consteval::ConstEvalError,
drop::DropGlue,
dyn_compatibility::DynCompatibilityViolation,
@@ -39,8 +39,8 @@
#[salsa::cycle(crate::mir::mir_body_recover)]
fn mir_body(&self, def: DefWithBodyId) -> Result<Arc<MirBody>, MirLowerError>;
- #[salsa::invoke(crate::mir::mir_body_for_closure_query)]
- fn mir_body_for_closure(&self, def: ClosureId) -> Result<Arc<MirBody>, MirLowerError>;
+ #[salsa::invoke_actual(crate::mir::mir_body_for_closure_query)]
+ fn mir_body_for_closure(&self, def: InternedClosureId) -> Result<Arc<MirBody>, MirLowerError>;
#[salsa::invoke(crate::mir::monomorphized_mir_body_query)]
#[salsa::cycle(crate::mir::monomorphized_mir_body_recover)]
@@ -54,12 +54,12 @@
#[salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)]
fn monomorphized_mir_body_for_closure(
&self,
- def: ClosureId,
+ def: InternedClosureId,
subst: Substitution,
env: Arc<TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError>;
- #[salsa::invoke(crate::mir::borrowck_query)]
+ #[salsa::invoke_actual(crate::mir::borrowck_query)]
#[salsa::lru(2024)]
fn borrowck(&self, def: DefWithBodyId) -> Result<Arc<[BorrowckResult]>, MirLowerError>;
@@ -110,9 +110,10 @@
fn dyn_compatibility_of_trait(&self, trait_: TraitId) -> Option<DynCompatibilityViolation>;
#[salsa::invoke(crate::lower::ty_query)]
- #[salsa::cycle(crate::lower::ty_recover)]
+ #[salsa::transparent]
fn ty(&self, def: TyDefId) -> Binders<Ty>;
+ #[salsa::cycle(crate::lower::type_for_type_alias_with_diagnostics_query_recover)]
#[salsa::invoke_actual(crate::lower::type_for_type_alias_with_diagnostics_query)]
fn type_for_type_alias_with_diagnostics(&self, def: TypeAliasId) -> (Binders<Ty>, Diagnostics);
@@ -244,11 +245,8 @@
#[salsa::interned]
fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId;
- #[salsa::invoke(chalk_db::associated_ty_data_query)]
- fn associated_ty_data(
- &self,
- id: chalk_db::AssocTypeId,
- ) -> sync::Arc<chalk_db::AssociatedTyDatum>;
+ #[salsa::invoke_actual(chalk_db::associated_ty_data_query)]
+ fn associated_ty_data(&self, id: TypeAliasId) -> sync::Arc<chalk_db::AssociatedTyDatum>;
#[salsa::invoke(chalk_db::trait_datum_query)]
fn trait_datum(
@@ -264,14 +262,14 @@
fn impl_datum(&self, krate: Crate, impl_id: chalk_db::ImplId)
-> sync::Arc<chalk_db::ImplDatum>;
- #[salsa::invoke(chalk_db::fn_def_datum_query)]
- fn fn_def_datum(&self, fn_def_id: FnDefId) -> sync::Arc<chalk_db::FnDefDatum>;
+ #[salsa::invoke_actual(chalk_db::fn_def_datum_query)]
+ fn fn_def_datum(&self, fn_def_id: CallableDefId) -> sync::Arc<chalk_db::FnDefDatum>;
- #[salsa::invoke(chalk_db::fn_def_variance_query)]
- fn fn_def_variance(&self, fn_def_id: FnDefId) -> chalk_db::Variances;
+ #[salsa::invoke_actual(chalk_db::fn_def_variance_query)]
+ fn fn_def_variance(&self, fn_def_id: CallableDefId) -> chalk_db::Variances;
- #[salsa::invoke(chalk_db::adt_variance_query)]
- fn adt_variance(&self, adt_id: chalk_db::AdtId) -> chalk_db::Variances;
+ #[salsa::invoke_actual(chalk_db::adt_variance_query)]
+ fn adt_variance(&self, adt_id: AdtId) -> chalk_db::Variances;
#[salsa::invoke_actual(crate::variance::variances_of)]
#[salsa::cycle(crate::variance::variances_of_cycle)]
diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs
index ce0ffb1..5fb8e8e 100644
--- a/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -558,7 +558,7 @@
fn validate_static(&mut self, static_id: StaticId) {
let data = self.db.static_data(static_id);
- if data.is_extern {
+ if data.is_extern() {
cov_mark::hit!(extern_static_incorrect_case_ignored);
return;
}
diff --git a/crates/hir-ty/src/diagnostics/match_check.rs b/crates/hir-ty/src/diagnostics/match_check.rs
index 1c58485..3542f9b 100644
--- a/crates/hir-ty/src/diagnostics/match_check.rs
+++ b/crates/hir-ty/src/diagnostics/match_check.rs
@@ -242,7 +242,7 @@
ty: &Ty,
subpatterns: Vec<FieldPat>,
) -> PatKind {
- let kind = match self.infer.variant_resolution_for_pat(pat) {
+ match self.infer.variant_resolution_for_pat(pat) {
Some(variant_id) => {
if let VariantId::EnumVariantId(enum_variant) = variant_id {
let substs = match ty.kind(Interner) {
@@ -266,8 +266,7 @@
self.errors.push(PatternError::UnresolvedVariant);
PatKind::Wild
}
- };
- kind
+ }
}
fn lower_path(&mut self, pat: PatId, _path: &hir_def::path::Path) -> Pat {
diff --git a/crates/hir-ty/src/diagnostics/unsafe_check.rs b/crates/hir-ty/src/diagnostics/unsafe_check.rs
index 562a9aa..b4fe417 100644
--- a/crates/hir-ty/src/diagnostics/unsafe_check.rs
+++ b/crates/hir-ty/src/diagnostics/unsafe_check.rs
@@ -362,9 +362,9 @@
self.resolver.resolve_path_in_value_ns(self.db.upcast(), path, hygiene);
if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id), _)) = value_or_partial {
let static_data = self.db.static_data(id);
- if static_data.mutable {
+ if static_data.mutable() {
self.on_unsafe_op(node, UnsafetyReason::MutableStatic);
- } else if static_data.is_extern && !static_data.has_safe_kw {
+ } else if static_data.is_extern() && !static_data.has_safe_kw() {
self.on_unsafe_op(node, UnsafetyReason::ExternStatic);
}
}
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index d72b195..52ed052 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -2128,16 +2128,16 @@
write!(f, "]")?;
}
TypeRef::Fn(fn_) => {
- if fn_.is_unsafe() {
+ if fn_.is_unsafe {
write!(f, "unsafe ")?;
}
- if let Some(abi) = fn_.abi() {
+ if let Some(abi) = &fn_.abi {
f.write_str("extern \"")?;
f.write_str(abi.as_str())?;
f.write_str("\" ")?;
}
write!(f, "fn(")?;
- if let Some(((_, return_type), function_parameters)) = fn_.params().split_last() {
+ if let Some(((_, return_type), function_parameters)) = fn_.params.split_last() {
for index in 0..function_parameters.len() {
let (param_name, param_type) = &function_parameters[index];
if let Some(name) = param_name {
@@ -2150,8 +2150,8 @@
write!(f, ", ")?;
}
}
- if fn_.is_varargs() {
- write!(f, "{}...", if fn_.params().len() == 1 { "" } else { ", " })?;
+ if fn_.is_varargs {
+ write!(f, "{}...", if fn_.params.len() == 1 { "" } else { ", " })?;
}
write!(f, ")")?;
match &types_map[*return_type] {
diff --git a/crates/hir-ty/src/drop.rs b/crates/hir-ty/src/drop.rs
index 5484a39..6bfd9e9 100644
--- a/crates/hir-ty/src/drop.rs
+++ b/crates/hir-ty/src/drop.rs
@@ -32,8 +32,8 @@
},
None => db.trait_impls_in_crate(module.krate()),
};
- let result = impls.for_trait_and_self_ty(drop_trait, TyFingerprint::Adt(adt)).next().is_some();
- result
+
+ impls.for_trait_and_self_ty(drop_trait, TyFingerprint::Adt(adt)).next().is_some()
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
diff --git a/crates/hir-ty/src/dyn_compatibility.rs b/crates/hir-ty/src/dyn_compatibility.rs
index cb186c4..d4cb76b 100644
--- a/crates/hir-ty/src/dyn_compatibility.rs
+++ b/crates/hir-ty/src/dyn_compatibility.rs
@@ -22,7 +22,7 @@
from_assoc_type_id, from_chalk_trait_id,
generics::{generics, trait_self_param_idx},
lower::callable_item_sig,
- to_assoc_type_id, to_chalk_trait_id,
+ to_chalk_trait_id,
utils::elaborate_clause_supertraits,
};
@@ -115,7 +115,7 @@
trait_: TraitId,
) -> Option<DynCompatibilityViolation> {
let mut res = None;
- dyn_compatibility_of_trait_with_callback(db, trait_, &mut |osv| {
+ _ = dyn_compatibility_of_trait_with_callback(db, trait_, &mut |osv| {
res = Some(osv);
ControlFlow::Break(())
});
@@ -171,8 +171,7 @@
.iter()
.filter_map(|(_, it)| match *it {
AssocItemId::TypeAliasId(id) => {
- let assoc_ty_id = to_assoc_type_id(id);
- let assoc_ty_data = db.associated_ty_data(assoc_ty_id);
+ let assoc_ty_data = db.associated_ty_data(id);
Some(assoc_ty_data)
}
_ => None,
@@ -592,7 +591,7 @@
let ret = sig.skip_binders().ret();
let mut visitor = OpaqueTypeCollector(FxHashSet::default());
- ret.visit_with(visitor.as_dyn(), DebruijnIndex::INNERMOST);
+ _ = ret.visit_with(visitor.as_dyn(), DebruijnIndex::INNERMOST);
// Since we haven't implemented RPITIT in proper way like rustc yet,
// just check whether `ret` contains RPIT for now
diff --git a/crates/hir-ty/src/dyn_compatibility/tests.rs b/crates/hir-ty/src/dyn_compatibility/tests.rs
index 3060b61..618fc73 100644
--- a/crates/hir-ty/src/dyn_compatibility/tests.rs
+++ b/crates/hir-ty/src/dyn_compatibility/tests.rs
@@ -53,7 +53,7 @@
continue;
};
let mut osvs = FxHashSet::default();
- dyn_compatibility_with_callback(&db, trait_id, &mut |osv| {
+ _ = dyn_compatibility_with_callback(&db, trait_id, &mut |osv| {
osvs.insert(match osv {
DynCompatibilityViolation::SizedSelf => SizedSelf,
DynCompatibilityViolation::SelfReferential => SelfReferential,
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index 7388d2a..0448ecd 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -1143,7 +1143,7 @@
non_assocs: FxHashMap::default(),
};
for ty in tait_candidates {
- ty.visit_with(collector.as_dyn(), DebruijnIndex::INNERMOST);
+ _ = ty.visit_with(collector.as_dyn(), DebruijnIndex::INNERMOST);
}
// Non-assoc TAITs can be define-used everywhere as long as they are
@@ -1534,10 +1534,6 @@
None => return (self.err_ty(), None),
}
};
- let Some(mod_path) = path.mod_path() else {
- never!("resolver should always resolve lang item paths");
- return (self.err_ty(), None);
- };
return match resolution {
TypeNs::AdtId(AdtId::StructId(strukt)) => {
let substs = path_ctx.substs_from_path(strukt.into(), true);
@@ -1567,6 +1563,10 @@
let Some(remaining_idx) = unresolved else {
drop(ctx);
+ let Some(mod_path) = path.mod_path() else {
+ never!("resolver should always resolve lang item paths");
+ return (self.err_ty(), None);
+ };
return self.resolve_variant_on_alias(ty, None, mod_path);
};
@@ -1630,6 +1630,10 @@
(ty, variant)
}
TypeNs::TypeAliasId(it) => {
+ let Some(mod_path) = path.mod_path() else {
+ never!("resolver should always resolve lang item paths");
+ return (self.err_ty(), None);
+ };
let substs = path_ctx.substs_from_path_segment(it.into(), true, None);
drop(ctx);
let ty = self.db.ty(it.into());
diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs
index 69de555..201f85f 100644
--- a/crates/hir-ty/src/infer/closure.rs
+++ b/crates/hir-ty/src/infer/closure.rs
@@ -31,7 +31,7 @@
DynTyExt, FnAbi, FnPointer, FnSig, Interner, OpaqueTy, ProjectionTyExt, Substitution, Ty,
TyExt, WhereClause,
db::{HirDatabase, InternedClosure},
- error_lifetime, from_chalk_trait_id, from_placeholder_idx,
+ error_lifetime, from_assoc_type_id, from_chalk_trait_id, from_placeholder_idx,
generics::Generics,
infer::coerce::CoerceNever,
make_binders,
@@ -153,7 +153,8 @@
if let WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection), ty }) =
bound.skip_binders()
{
- let assoc_data = self.db.associated_ty_data(projection.associated_ty_id);
+ let assoc_data =
+ self.db.associated_ty_data(from_assoc_type_id(projection.associated_ty_id));
if !fn_traits.contains(&assoc_data.trait_id) {
return None;
}
@@ -223,7 +224,7 @@
kind: MutBorrowKind::Default | MutBorrowKind::TwoPhasedBorrow,
}) = current_capture
{
- if self.projections[len..].iter().any(|it| *it == ProjectionElem::Deref) {
+ if self.projections[len..].contains(&ProjectionElem::Deref) {
current_capture =
CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture });
}
@@ -517,10 +518,9 @@
return None;
}
let hygiene = self.body.expr_or_pat_path_hygiene(id);
- let result = self
- .resolver
- .resolve_path_in_value_ns_fully(self.db.upcast(), path, hygiene)
- .and_then(|result| match result {
+
+ self.resolver.resolve_path_in_value_ns_fully(self.db.upcast(), path, hygiene).and_then(
+ |result| match result {
ValueNs::LocalBinding(binding) => {
let mir_span = match id {
ExprOrPatId::ExprId(id) => MirSpan::ExprId(id),
@@ -530,8 +530,8 @@
Some(HirPlace { local: binding, projections: Vec::new() })
}
_ => None,
- });
- result
+ },
+ )
}
/// Changes `current_capture_span_stack` to contain the stack of spans for this expr.
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index c5a6c21..815a3d1 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -198,7 +198,7 @@
match &self.body[expr] {
// Lang item paths cannot currently be local variables or statics.
Expr::Path(Path::LangItem(_, _)) => false,
- Expr::Path(Path::Normal(path)) => path.type_anchor().is_none(),
+ Expr::Path(Path::Normal(path)) => path.type_anchor.is_none(),
Expr::Path(path) => self
.resolver
.resolve_path_in_value_ns_fully(
@@ -472,8 +472,7 @@
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let prev_closure = mem::replace(&mut self.current_closure, id);
let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
- let prev_ret_coercion =
- mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty)));
+ let prev_ret_coercion = self.return_coercion.replace(CoerceMany::new(ret_ty));
let prev_resume_yield_tys =
mem::replace(&mut self.resume_yield_tys, resume_yield_tys);
@@ -1168,8 +1167,7 @@
let ret_ty = self.table.new_type_var();
let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
- let prev_ret_coercion =
- mem::replace(&mut self.return_coercion, Some(CoerceMany::new(ret_ty.clone())));
+ let prev_ret_coercion = self.return_coercion.replace(CoerceMany::new(ret_ty.clone()));
// FIXME: We should handle async blocks like we handle closures
let expected = &Expectation::has_type(ret_ty);
diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs
index 67796b9..d0c9b23 100644
--- a/crates/hir-ty/src/infer/unify.rs
+++ b/crates/hir-ty/src/infer/unify.rs
@@ -1094,7 +1094,8 @@
.assert_ty_ref(Interner)
.clone();
}
- let result = if let Some(known_ty) = self.table.var_unification_table.probe_var(var) {
+
+ if let Some(known_ty) = self.table.var_unification_table.probe_var(var) {
// known_ty may contain other variables that are known by now
self.var_stack.push(var);
let result = known_ty.fold_with(self, outer_binder);
@@ -1105,8 +1106,7 @@
(self.fallback)(var, VariableKind::Ty(kind), default, outer_binder)
.assert_ty_ref(Interner)
.clone()
- };
- result
+ }
}
fn fold_inference_const(
diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs
index 8292e80..7ca32c7 100644
--- a/crates/hir-ty/src/lib.rs
+++ b/crates/hir-ty/src/lib.rs
@@ -1013,7 +1013,7 @@
T: ?Sized + TypeVisitable<Interner>,
{
let mut collector = PlaceholderCollector { db, placeholders: FxHashSet::default() };
- value.visit_with(&mut collector, DebruijnIndex::INNERMOST);
+ _ = value.visit_with(&mut collector, DebruijnIndex::INNERMOST);
collector.placeholders.into_iter().collect()
}
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index e5f3c4c..5238a65 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -318,15 +318,15 @@
let substs = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
Substitution::from_iter(
Interner,
- fn_.params().iter().map(|&(_, tr)| ctx.lower_ty(tr)),
+ fn_.params.iter().map(|&(_, tr)| ctx.lower_ty(tr)),
)
});
TyKind::Function(FnPointer {
num_binders: 0, // FIXME lower `for<'a> fn()` correctly
sig: FnSig {
- abi: fn_.abi().as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol),
- safety: if fn_.is_unsafe() { Safety::Unsafe } else { Safety::Safe },
- variadic: fn_.is_varargs(),
+ abi: fn_.abi.as_ref().map_or(FnAbi::Rust, FnAbi::from_symbol),
+ safety: if fn_.is_unsafe { Safety::Unsafe } else { Safety::Safe },
+ variadic: fn_.is_varargs,
},
substitution: FnSubst(substs),
})
@@ -1560,6 +1560,20 @@
}
}
+#[salsa::tracked(recovery_fn = type_for_adt_recovery)]
+fn type_for_adt_tracked(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
+ type_for_adt(db, adt)
+}
+
+pub(crate) fn type_for_adt_recovery(
+ db: &dyn HirDatabase,
+ _cycle: &salsa::Cycle,
+ adt: AdtId,
+) -> Binders<Ty> {
+ let generics = generics(db.upcast(), adt.into());
+ make_binders(db, &generics, TyKind::Error.intern(Interner))
+}
+
fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
let generics = generics(db.upcast(), adt.into());
let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
@@ -1577,7 +1591,7 @@
let mut ctx = TyLoweringContext::new(db, &resolver, &type_alias_data.types_map, t.into())
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
.with_type_param_mode(ParamLoweringMode::Variable);
- let inner = if type_alias_data.is_extern {
+ let inner = if type_alias_data.is_extern() {
TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner)
} else {
type_alias_data
@@ -1589,6 +1603,15 @@
(make_binders(db, &generics, inner), create_diagnostics(ctx.diagnostics))
}
+pub(crate) fn type_for_type_alias_with_diagnostics_query_recover(
+ db: &dyn HirDatabase,
+ _cycle: &salsa::Cycle,
+ adt: TypeAliasId,
+) -> (Binders<Ty>, Diagnostics) {
+ let generics = generics(db.upcast(), adt.into());
+ (make_binders(db, &generics, TyKind::Error.intern(Interner)), None)
+}
+
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum TyDefId {
BuiltinType(BuiltinType),
@@ -1628,25 +1651,11 @@
pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders<Ty> {
match def {
TyDefId::BuiltinType(it) => Binders::empty(Interner, TyBuilder::builtin(it)),
- TyDefId::AdtId(it) => type_for_adt(db, it),
+ TyDefId::AdtId(it) => type_for_adt_tracked(db, it),
TyDefId::TypeAliasId(it) => db.type_for_type_alias_with_diagnostics(it).0,
}
}
-pub(crate) fn ty_recover(
- db: &dyn HirDatabase,
- _cycle: &salsa::Cycle,
- _: HirDatabaseData,
- def: TyDefId,
-) -> Binders<Ty> {
- let generics = match def {
- TyDefId::BuiltinType(_) => return Binders::empty(Interner, TyKind::Error.intern(Interner)),
- TyDefId::AdtId(it) => generics(db.upcast(), it.into()),
- TyDefId::TypeAliasId(it) => generics(db.upcast(), it.into()),
- };
- make_binders(db, &generics, TyKind::Error.intern(Interner))
-}
-
pub(crate) fn value_ty_query(db: &dyn HirDatabase, def: ValueTyDefId) -> Option<Binders<Ty>> {
match def {
ValueTyDefId::FunctionId(it) => Some(type_for_fn(db, it)),
diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs
index d887013..1076bc2 100644
--- a/crates/hir-ty/src/method_resolution.rs
+++ b/crates/hir-ty/src/method_resolution.rs
@@ -401,7 +401,7 @@
}
&TyKind::Foreign(id) => {
let alias = from_foreign_def_id(id);
- Some(if db.type_alias_data(alias).rustc_has_incoherent_inherent_impls {
+ Some(if db.type_alias_data(alias).rustc_has_incoherent_inherent_impls() {
db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::ForeignType(id))
} else {
smallvec![alias.module(db.upcast()).krate()]
@@ -585,7 +585,7 @@
mut callback: impl FnMut(ReceiverAdjustments, AssocItemId, bool) -> Option<T>,
) -> Option<T> {
let mut slot = None;
- iterate_method_candidates_dyn(
+ _ = iterate_method_candidates_dyn(
ty,
db,
env,
@@ -843,9 +843,11 @@
rustc_has_incoherent_inherent_impls
&& !items.items.is_empty()
&& items.items.iter().all(|&(_, assoc)| match assoc {
- AssocItemId::FunctionId(it) => db.function_data(it).rustc_allow_incoherent_impl,
- AssocItemId::ConstId(it) => db.const_data(it).rustc_allow_incoherent_impl,
- AssocItemId::TypeAliasId(it) => db.type_alias_data(it).rustc_allow_incoherent_impl,
+ AssocItemId::FunctionId(it) => db.function_data(it).rustc_allow_incoherent_impl(),
+ AssocItemId::ConstId(it) => db.const_data(it).rustc_allow_incoherent_impl(),
+ AssocItemId::TypeAliasId(it) => {
+ db.type_alias_data(it).rustc_allow_incoherent_impl()
+ }
})
}
}
@@ -874,24 +876,32 @@
return true;
}
- let unwrap_fundamental = |ty: Ty| match ty.kind(Interner) {
- TyKind::Ref(_, _, referenced) => referenced.clone(),
- &TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), ref subs) => {
- let struct_data = db.struct_data(s);
- if struct_data.flags.contains(StructFlags::IS_FUNDAMENTAL) {
- let next = subs.type_parameters(Interner).next();
- match next {
- Some(ty) => ty,
- None => ty,
+ let unwrap_fundamental = |mut ty: Ty| {
+ // Unwrap all layers of fundamental types with a loop.
+ loop {
+ match ty.kind(Interner) {
+ TyKind::Ref(_, _, referenced) => ty = referenced.clone(),
+ &TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), ref subs) => {
+ let struct_data = db.struct_data(s);
+ if struct_data.flags.contains(StructFlags::IS_FUNDAMENTAL) {
+ let next = subs.type_parameters(Interner).next();
+ match next {
+ Some(it) => ty = it,
+ None => break ty,
+ }
+ } else {
+ break ty;
+ }
}
- } else {
- ty
+ _ => break ty,
}
}
- _ => ty,
};
// - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type.
- let is_not_orphan = trait_ref.substitution.type_parameters(Interner).any(|ty| {
+
+ // FIXME: param coverage
+ // - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
+ trait_ref.substitution.type_parameters(Interner).any(|ty| {
match unwrap_fundamental(ty).kind(Interner) {
&TyKind::Adt(AdtId(id), _) => is_local(id.module(db.upcast()).krate()),
TyKind::Error => true,
@@ -900,10 +910,7 @@
}),
_ => false,
}
- });
- // FIXME: param coverage
- // - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
- is_not_orphan
+ })
}
pub fn iterate_path_candidates(
diff --git a/crates/hir-ty/src/mir/borrowck.rs b/crates/hir-ty/src/mir/borrowck.rs
index d9938fc..85bf850 100644
--- a/crates/hir-ty/src/mir/borrowck.rs
+++ b/crates/hir-ty/src/mir/borrowck.rs
@@ -71,7 +71,7 @@
c: ClosureId,
cb: &mut impl FnMut(Arc<MirBody>),
) -> Result<(), MirLowerError> {
- match db.mir_body_for_closure(c) {
+ match db.mir_body_for_closure(c.into()) {
Ok(body) => {
cb(body.clone());
body.closures.iter().try_for_each(|&it| for_closure(db, it, cb))
diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs
index ee412dd..a2e6093 100644
--- a/crates/hir-ty/src/mir/eval.rs
+++ b/crates/hir-ty/src/mir/eval.rs
@@ -2452,7 +2452,7 @@
let mir_body = self
.db
.monomorphized_mir_body_for_closure(
- closure,
+ closure.into(),
generic_args.clone(),
self.trait_env.clone(),
)
@@ -2756,7 +2756,7 @@
return Ok(*o);
};
let static_data = self.db.static_data(st);
- let result = if !static_data.is_extern {
+ let result = if !static_data.is_extern() {
let konst = self.db.const_eval_static(st).map_err(|e| {
MirEvalError::ConstEvalError(static_data.name.as_str().to_owned(), Box::new(e))
})?;
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index 95c93b5..03456fe 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -29,7 +29,7 @@
use crate::{
Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt,
consteval::ConstEvalError,
- db::{HirDatabase, InternedClosure},
+ db::{HirDatabase, InternedClosure, InternedClosureId},
display::{DisplayTarget, HirDisplay, hir_display_with_types_map},
error_lifetime,
generics::generics,
@@ -38,12 +38,12 @@
layout::LayoutError,
mapping::ToChalk,
mir::{
- AggregateKind, Arena, BasicBlock, BasicBlockId, BinOp, BorrowKind, CastKind, ClosureId,
- ConstScalar, Either, Expr, FieldId, Idx, InferenceResult, Interner, Local, LocalId,
- MemoryMap, MirBody, MirSpan, Mutability, Operand, Place, PlaceElem, PointerCast,
- ProjectionElem, ProjectionStore, RawIdx, Rvalue, Statement, StatementKind, Substitution,
- SwitchTargets, Terminator, TerminatorKind, TupleFieldId, Ty, UnOp, VariantId,
- intern_const_scalar, return_slot,
+ AggregateKind, Arena, BasicBlock, BasicBlockId, BinOp, BorrowKind, CastKind, ConstScalar,
+ Either, Expr, FieldId, Idx, InferenceResult, Interner, Local, LocalId, MemoryMap, MirBody,
+ MirSpan, Mutability, Operand, Place, PlaceElem, PointerCast, ProjectionElem,
+ ProjectionStore, RawIdx, Rvalue, Statement, StatementKind, Substitution, SwitchTargets,
+ Terminator, TerminatorKind, TupleFieldId, Ty, UnOp, VariantId, intern_const_scalar,
+ return_slot,
},
static_lifetime,
traits::FnTrait,
@@ -1635,10 +1635,12 @@
f: impl FnOnce(&mut MirLowerCtx<'_>, BasicBlockId) -> Result<()>,
) -> Result<Option<BasicBlockId>> {
let begin = self.new_basic_block();
- let prev = mem::replace(
- &mut self.current_loop_blocks,
- Some(LoopBlocks { begin, end: None, place, drop_scope_index: self.drop_scopes.len() }),
- );
+ let prev = self.current_loop_blocks.replace(LoopBlocks {
+ begin,
+ end: None,
+ place,
+ drop_scope_index: self.drop_scopes.len(),
+ });
let prev_label = if let Some(label) = label {
// We should generate the end now, to make sure that it wouldn't change later. It is
// bad as we may emit end (unnecessary unreachable block) for unterminating loop, but
@@ -2015,9 +2017,9 @@
pub fn mir_body_for_closure_query(
db: &dyn HirDatabase,
- closure: ClosureId,
+ closure: InternedClosureId,
) -> Result<Arc<MirBody>> {
- let InternedClosure(owner, expr) = db.lookup_intern_closure(closure.into());
+ let InternedClosure(owner, expr) = db.lookup_intern_closure(closure);
let body = db.body(owner);
let infer = db.infer(owner);
let Expr::Closure { args, body: root, .. } = &body[expr] else {
@@ -2026,7 +2028,7 @@
let TyKind::Closure(_, substs) = &infer[expr].kind(Interner) else {
implementation_error!("closure expression is not closure");
};
- let (captures, kind) = infer.closure_info(&closure);
+ let (captures, kind) = infer.closure_info(&closure.into());
let mut ctx = MirLowerCtx::new(db, owner, &body, &infer);
// 0 is return local
ctx.result.locals.alloc(Local { ty: infer[*root].clone() });
diff --git a/crates/hir-ty/src/mir/monomorphization.rs b/crates/hir-ty/src/mir/monomorphization.rs
index c733c7e..ee7e4ce 100644
--- a/crates/hir-ty/src/mir/monomorphization.rs
+++ b/crates/hir-ty/src/mir/monomorphization.rs
@@ -17,9 +17,9 @@
use triomphe::Arc;
use crate::{
- ClosureId, Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind,
+ Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind,
consteval::{intern_const_scalar, unknown_const},
- db::{HirDatabase, HirDatabaseData, InternedClosure},
+ db::{HirDatabase, HirDatabaseData, InternedClosure, InternedClosureId},
from_placeholder_idx,
generics::{Generics, generics},
infer::normalize,
@@ -326,11 +326,11 @@
pub fn monomorphized_mir_body_for_closure_query(
db: &dyn HirDatabase,
- closure: ClosureId,
+ closure: InternedClosureId,
subst: Substitution,
trait_env: Arc<crate::TraitEnvironment>,
) -> Result<Arc<MirBody>, MirLowerError> {
- let InternedClosure(owner, _) = db.lookup_intern_closure(closure.into());
+ let InternedClosure(owner, _) = db.lookup_intern_closure(closure);
let generics = owner.as_generic_def_id(db.upcast()).map(|g_def| generics(db.upcast(), g_def));
let filler = &mut Filler { db, subst: &subst, trait_env, generics, owner };
let body = db.mir_body_for_closure(closure)?;
diff --git a/crates/hir-ty/src/mir/pretty.rs b/crates/hir-ty/src/mir/pretty.rs
index d9c0000..30fe45b 100644
--- a/crates/hir-ty/src/mir/pretty.rs
+++ b/crates/hir-ty/src/mir/pretty.rs
@@ -154,7 +154,7 @@
}
fn for_closure(&mut self, closure: ClosureId) {
- let body = match self.db.mir_body_for_closure(closure) {
+ let body = match self.db.mir_body_for_closure(closure.into()) {
Ok(it) => it,
Err(e) => {
wln!(self, "// error in {closure:?}: {e:?}");
diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs
index 7063702..e71b51b 100644
--- a/crates/hir/src/attrs.rs
+++ b/crates/hir/src/attrs.rs
@@ -260,7 +260,7 @@
// attributes here. Use path resolution directly instead.
//
// FIXME: resolve type aliases (which are not yielded by iterate_path_candidates)
- method_resolution::iterate_path_candidates(
+ _ = method_resolution::iterate_path_candidates(
&canonical,
db,
environment,
diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs
index afa1f6d..651ec15 100644
--- a/crates/hir/src/diagnostics.rs
+++ b/crates/hir/src/diagnostics.rs
@@ -570,10 +570,17 @@
source_map: &hir_def::expr_store::BodySourceMap,
) -> Option<AnyDiagnostic> {
let expr_syntax = |expr| {
- source_map.expr_syntax(expr).inspect_err(|_| stdx::never!("synthetic syntax")).ok()
+ source_map
+ .expr_syntax(expr)
+ .inspect_err(|_| stdx::never!("inference diagnostic in desugared expr"))
+ .ok()
};
- let pat_syntax =
- |pat| source_map.pat_syntax(pat).inspect_err(|_| stdx::never!("synthetic syntax")).ok();
+ let pat_syntax = |pat| {
+ source_map
+ .pat_syntax(pat)
+ .inspect_err(|_| stdx::never!("inference diagnostic in desugared pattern"))
+ .ok()
+ };
let expr_or_pat_syntax = |id| match id {
ExprOrPatId::ExprId(expr) => expr_syntax(expr),
ExprOrPatId::PatId(pat) => pat_syntax(pat),
diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs
index ec34fd8..673c336 100644
--- a/crates/hir/src/display.rs
+++ b/crates/hir/src/display.rs
@@ -739,7 +739,7 @@
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
let data = f.db.static_data(self.id);
f.write_str("static ")?;
- if data.mutable {
+ if data.mutable() {
f.write_str("mut ")?;
}
write!(f, "{}: ", data.name.display(f.db.upcast(), f.edition()))?;
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index 1e58c07..870967e 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -114,6 +114,7 @@
pub use {
cfg::{CfgAtom, CfgExpr, CfgOptions},
hir_def::{
+ Complete,
ImportPathConfig,
attr::{AttrSourceMap, Attrs, AttrsWithOwner},
data::adt::StructKind,
@@ -254,14 +255,17 @@
self,
db: &dyn DefDatabase,
query: import_map::Query,
- ) -> impl Iterator<Item = Either<ModuleDef, Macro>> {
+ ) -> impl Iterator<Item = (Either<ModuleDef, Macro>, Complete)> {
let _p = tracing::info_span!("query_external_importables").entered();
- import_map::search_dependencies(db, self.into(), &query).into_iter().map(|item| {
- match ItemInNs::from(item) {
- ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id),
- ItemInNs::Macros(mac_id) => Either::Right(mac_id),
- }
- })
+ import_map::search_dependencies(db, self.into(), &query).into_iter().map(
+ |(item, do_not_complete)| {
+ let item = match ItemInNs::from(item) {
+ ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id),
+ ItemInNs::Macros(mac_id) => Either::Right(mac_id),
+ };
+ (item, do_not_complete)
+ },
+ )
}
pub fn all(db: &dyn HirDatabase) -> Vec<Crate> {
@@ -290,15 +294,13 @@
}
fn core(db: &dyn HirDatabase) -> Option<Crate> {
- let result = db
- .all_crates()
+ db.all_crates()
.iter()
.copied()
.find(|&krate| {
matches!(krate.data(db).origin, CrateOrigin::Lang(LangCrateOrigin::Core))
})
- .map(Crate::from);
- result
+ .map(Crate::from)
}
}
@@ -811,7 +813,7 @@
let items = &db.trait_items(trait_.into()).items;
let required_items = items.iter().filter(|&(_, assoc)| match *assoc {
AssocItemId::FunctionId(it) => !db.function_data(it).has_body(),
- AssocItemId::ConstId(id) => !db.const_data(id).has_body,
+ AssocItemId::ConstId(id) => !db.const_data(id).has_body(),
AssocItemId::TypeAliasId(it) => db.type_alias_data(it).type_ref.is_none(),
});
impl_assoc_items_scratch.extend(db.impl_items(impl_def.id).items.iter().cloned());
@@ -1221,15 +1223,15 @@
.nth(derive_attr_index.ast_index())
.and_then(|x| Either::left(x.1))?;
let token_tree = derive_attr.meta()?.token_tree()?;
- let group_by = token_tree
+ let chunk_by = token_tree
.syntax()
.children_with_tokens()
.filter_map(|elem| match elem {
syntax::NodeOrToken::Token(tok) => Some(tok),
_ => None,
})
- .group_by(|t| t.kind() == T![,]);
- let (_, mut group) = group_by
+ .chunk_by(|t| t.kind() == T![,]);
+ let (_, mut group) = chunk_by
.into_iter()
.filter(|&(comma, _)| !comma)
.nth(*derive_index as usize)?;
@@ -2812,7 +2814,7 @@
}
pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
- db.static_data(self.id).mutable
+ db.static_data(self.id).mutable()
}
pub fn value(self, db: &dyn HirDatabase) -> Option<ast::Expr> {
@@ -2918,10 +2920,14 @@
db: &dyn HirDatabase,
) -> Option<Vec<DynCompatibilityViolation>> {
let mut violations = vec![];
- hir_ty::dyn_compatibility::dyn_compatibility_with_callback(db, self.id, &mut |violation| {
- violations.push(violation);
- ControlFlow::Continue(())
- });
+ _ = hir_ty::dyn_compatibility::dyn_compatibility_with_callback(
+ db,
+ self.id,
+ &mut |violation| {
+ violations.push(violation);
+ ControlFlow::Continue(())
+ },
+ );
violations.is_empty().not().then_some(violations)
}
@@ -2932,6 +2938,11 @@
.map(|it| it.as_ref().clone().into_boxed_slice())
.unwrap_or_default()
}
+
+ /// `#[rust_analyzer::completions(...)]` mode.
+ pub fn complete(self, db: &dyn HirDatabase) -> Complete {
+ Complete::extract(true, &self.attrs(db))
+ }
}
impl HasVisibility for Trait {
@@ -5505,7 +5516,7 @@
.generic_def()
.map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
- method_resolution::iterate_method_candidates_dyn(
+ _ = method_resolution::iterate_method_candidates_dyn(
&canonical,
db,
environment,
@@ -5592,7 +5603,7 @@
.generic_def()
.map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
- method_resolution::iterate_path_candidates(
+ _ = method_resolution::iterate_path_candidates(
&canonical,
db,
environment,
@@ -6359,3 +6370,48 @@
self(item)
}
}
+
+pub fn resolve_absolute_path<'a, I: Iterator<Item = Symbol> + Clone + 'a>(
+ db: &'a dyn HirDatabase,
+ mut segments: I,
+) -> impl Iterator<Item = ItemInNs> + use<'a, I> {
+ segments
+ .next()
+ .into_iter()
+ .flat_map(move |crate_name| {
+ db.all_crates()
+ .iter()
+ .filter(|&krate| {
+ krate
+ .extra_data(db)
+ .display_name
+ .as_ref()
+ .is_some_and(|name| *name.crate_name().symbol() == crate_name)
+ })
+ .filter_map(|&krate| {
+ let segments = segments.clone();
+ let mut def_map = db.crate_def_map(krate);
+ let mut module = &def_map[DefMap::ROOT];
+ let mut segments = segments.with_position().peekable();
+ while let Some((_, segment)) = segments.next_if(|&(position, _)| {
+ !matches!(position, itertools::Position::Last | itertools::Position::Only)
+ }) {
+ let res = module
+ .scope
+ .get(&Name::new_symbol_root(segment))
+ .take_types()
+ .and_then(|res| match res {
+ ModuleDefId::ModuleId(it) => Some(it),
+ _ => None,
+ })?;
+ def_map = res.def_map(db.upcast());
+ module = &def_map[res.local_id];
+ }
+ let (_, item_name) = segments.next()?;
+ let res = module.scope.get(&Name::new_symbol_root(item_name));
+ Some(res.iter_items().map(|(item, _)| item.into()))
+ })
+ .collect::<Vec<_>>()
+ })
+ .flatten()
+}
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index bb5c7c0..ba5ceef 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -929,7 +929,7 @@
// FIXME: Multiple derives can have the same helper
let name_ref = name_ref.as_name();
for (macro_id, mut helpers) in
- helpers.iter().group_by(|(_, macro_id, ..)| macro_id).into_iter()
+ helpers.iter().chunk_by(|(_, macro_id, ..)| macro_id).into_iter()
{
if let Some(idx) = helpers.position(|(name, ..)| *name == name_ref)
{
diff --git a/crates/hir/src/symbols.rs b/crates/hir/src/symbols.rs
index ae70f6f..679f775 100644
--- a/crates/hir/src/symbols.rs
+++ b/crates/hir/src/symbols.rs
@@ -2,7 +2,7 @@
use either::Either;
use hir_def::{
- AdtId, AssocItemId, DefWithBodyId, ExternCrateId, HasModule, ImplId, Lookup, MacroId,
+ AdtId, AssocItemId, Complete, DefWithBodyId, ExternCrateId, HasModule, ImplId, Lookup, MacroId,
ModuleDefId, ModuleId, TraitId,
db::DefDatabase,
item_scope::{ImportId, ImportOrExternCrate, ImportOrGlob},
@@ -34,6 +34,7 @@
/// Whether this symbol is a doc alias for the original symbol.
pub is_alias: bool,
pub is_assoc: bool,
+ pub do_not_complete: Complete,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -122,35 +123,43 @@
match def {
ModuleDefId::ModuleId(id) => this.push_module(id, name),
ModuleDefId::FunctionId(id) => {
- this.push_decl(id, name, false);
+ this.push_decl(id, name, false, None);
this.collect_from_body(id, Some(name.clone()));
}
- ModuleDefId::AdtId(AdtId::StructId(id)) => this.push_decl(id, name, false),
- ModuleDefId::AdtId(AdtId::EnumId(id)) => this.push_decl(id, name, false),
- ModuleDefId::AdtId(AdtId::UnionId(id)) => this.push_decl(id, name, false),
+ ModuleDefId::AdtId(AdtId::StructId(id)) => {
+ this.push_decl(id, name, false, None);
+ }
+ ModuleDefId::AdtId(AdtId::EnumId(id)) => {
+ this.push_decl(id, name, false, None);
+ }
+ ModuleDefId::AdtId(AdtId::UnionId(id)) => {
+ this.push_decl(id, name, false, None);
+ }
ModuleDefId::ConstId(id) => {
- this.push_decl(id, name, false);
+ this.push_decl(id, name, false, None);
this.collect_from_body(id, Some(name.clone()));
}
ModuleDefId::StaticId(id) => {
- this.push_decl(id, name, false);
+ this.push_decl(id, name, false, None);
this.collect_from_body(id, Some(name.clone()));
}
ModuleDefId::TraitId(id) => {
- this.push_decl(id, name, false);
- this.collect_from_trait(id);
+ let trait_do_not_complete = this.push_decl(id, name, false, None);
+ this.collect_from_trait(id, trait_do_not_complete);
}
ModuleDefId::TraitAliasId(id) => {
- this.push_decl(id, name, false);
+ this.push_decl(id, name, false, None);
}
ModuleDefId::TypeAliasId(id) => {
- this.push_decl(id, name, false);
+ this.push_decl(id, name, false, None);
}
- ModuleDefId::MacroId(id) => match id {
- MacroId::Macro2Id(id) => this.push_decl(id, name, false),
- MacroId::MacroRulesId(id) => this.push_decl(id, name, false),
- MacroId::ProcMacroId(id) => this.push_decl(id, name, false),
- },
+ ModuleDefId::MacroId(id) => {
+ match id {
+ MacroId::Macro2Id(id) => this.push_decl(id, name, false, None),
+ MacroId::MacroRulesId(id) => this.push_decl(id, name, false, None),
+ MacroId::ProcMacroId(id) => this.push_decl(id, name, false, None),
+ };
+ }
// Don't index these.
ModuleDefId::BuiltinType(_) => {}
ModuleDefId::EnumVariantId(_) => {}
@@ -194,6 +203,7 @@
loc: dec_loc,
is_alias: false,
is_assoc: false,
+ do_not_complete: Complete::Yes,
});
};
@@ -223,6 +233,7 @@
loc: dec_loc,
is_alias: false,
is_assoc: false,
+ do_not_complete: Complete::Yes,
});
};
@@ -281,10 +292,10 @@
for &id in id {
if id.module(self.db.upcast()) == module_id {
match id {
- MacroId::Macro2Id(id) => self.push_decl(id, name, false),
- MacroId::MacroRulesId(id) => self.push_decl(id, name, false),
- MacroId::ProcMacroId(id) => self.push_decl(id, name, false),
- }
+ MacroId::Macro2Id(id) => self.push_decl(id, name, false, None),
+ MacroId::MacroRulesId(id) => self.push_decl(id, name, false, None),
+ MacroId::ProcMacroId(id) => self.push_decl(id, name, false, None),
+ };
}
}
}
@@ -314,16 +325,16 @@
);
self.with_container_name(impl_name, |s| {
for &(ref name, assoc_item_id) in &self.db.impl_items(impl_id).items {
- s.push_assoc_item(assoc_item_id, name)
+ s.push_assoc_item(assoc_item_id, name, None)
}
})
}
- fn collect_from_trait(&mut self, trait_id: TraitId) {
+ fn collect_from_trait(&mut self, trait_id: TraitId, trait_do_not_complete: Complete) {
let trait_data = self.db.trait_data(trait_id);
self.with_container_name(Some(trait_data.name.as_str().into()), |s| {
for &(ref name, assoc_item_id) in &self.db.trait_items(trait_id).items {
- s.push_assoc_item(assoc_item_id, name);
+ s.push_assoc_item(assoc_item_id, name, Some(trait_do_not_complete));
}
});
}
@@ -338,15 +349,26 @@
}
}
- fn push_assoc_item(&mut self, assoc_item_id: AssocItemId, name: &Name) {
+ fn push_assoc_item(
+ &mut self,
+ assoc_item_id: AssocItemId,
+ name: &Name,
+ trait_do_not_complete: Option<Complete>,
+ ) {
match assoc_item_id {
- AssocItemId::FunctionId(id) => self.push_decl(id, name, true),
- AssocItemId::ConstId(id) => self.push_decl(id, name, true),
- AssocItemId::TypeAliasId(id) => self.push_decl(id, name, true),
- }
+ AssocItemId::FunctionId(id) => self.push_decl(id, name, true, trait_do_not_complete),
+ AssocItemId::ConstId(id) => self.push_decl(id, name, true, trait_do_not_complete),
+ AssocItemId::TypeAliasId(id) => self.push_decl(id, name, true, trait_do_not_complete),
+ };
}
- fn push_decl<L>(&mut self, id: L, name: &Name, is_assoc: bool)
+ fn push_decl<L>(
+ &mut self,
+ id: L,
+ name: &Name,
+ is_assoc: bool,
+ trait_do_not_complete: Option<Complete>,
+ ) -> Complete
where
L: Lookup<Database = dyn DefDatabase> + Into<ModuleDefId>,
<L as Lookup>::Data: HasSource,
@@ -354,7 +376,7 @@
{
let loc = id.lookup(self.db.upcast());
let source = loc.source(self.db.upcast());
- let Some(name_node) = source.value.name() else { return };
+ let Some(name_node) = source.value.name() else { return Complete::Yes };
let def = ModuleDef::from(id.into());
let dec_loc = DeclarationLocation {
hir_file_id: source.file_id,
@@ -362,7 +384,14 @@
name_ptr: AstPtr::new(&name_node).wrap_left(),
};
+ let mut do_not_complete = Complete::Yes;
+
if let Some(attrs) = def.attrs(self.db) {
+ do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), &attrs);
+ if let Some(trait_do_not_complete) = trait_do_not_complete {
+ do_not_complete = Complete::for_trait_item(trait_do_not_complete, do_not_complete);
+ }
+
for alias in attrs.doc_aliases() {
self.symbols.insert(FileSymbol {
name: alias.clone(),
@@ -371,6 +400,7 @@
container_name: self.current_container_name.clone(),
is_alias: true,
is_assoc,
+ do_not_complete,
});
}
}
@@ -382,7 +412,10 @@
loc: dec_loc,
is_alias: false,
is_assoc,
+ do_not_complete,
});
+
+ do_not_complete
}
fn push_module(&mut self, module_id: ModuleId, name: &Name) {
@@ -399,7 +432,10 @@
let def = ModuleDef::Module(module_id.into());
+ let mut do_not_complete = Complete::Yes;
if let Some(attrs) = def.attrs(self.db) {
+ do_not_complete = Complete::extract(matches!(def, ModuleDef::Trait(_)), &attrs);
+
for alias in attrs.doc_aliases() {
self.symbols.insert(FileSymbol {
name: alias.clone(),
@@ -408,6 +444,7 @@
container_name: self.current_container_name.clone(),
is_alias: true,
is_assoc: false,
+ do_not_complete,
});
}
}
@@ -419,6 +456,7 @@
loc: dec_loc,
is_alias: false,
is_assoc: false,
+ do_not_complete,
});
}
}
diff --git a/crates/ide-assists/Cargo.toml b/crates/ide-assists/Cargo.toml
index 3768c22..53af980 100644
--- a/crates/ide-assists/Cargo.toml
+++ b/crates/ide-assists/Cargo.toml
@@ -12,7 +12,7 @@
[lib]
[dependencies]
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
itertools.workspace = true
either.workspace = true
@@ -26,7 +26,7 @@
hir.workspace = true
[dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
# local deps
test-utils.workspace = true
diff --git a/crates/ide-assists/src/handlers/add_braces.rs b/crates/ide-assists/src/handlers/add_braces.rs
index d8db7cc..b6e3b0a 100644
--- a/crates/ide-assists/src/handlers/add_braces.rs
+++ b/crates/ide-assists/src/handlers/add_braces.rs
@@ -39,7 +39,7 @@
},
expr.syntax().text_range(),
|builder| {
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let mut editor = builder.make_editor(expr.syntax());
let block_expr = make.block_expr(None, Some(expr.clone()));
diff --git a/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/crates/ide-assists/src/handlers/add_missing_match_arms.rs
index 4cabf4b..777e40e 100644
--- a/crates/ide-assists/src/handlers/add_missing_match_arms.rs
+++ b/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -77,7 +77,7 @@
let cfg = ctx.config.import_path_config();
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let module = ctx.sema.scope(expr.syntax())?.module();
let (mut missing_pats, is_non_exhaustive, has_hidden_variants): (
@@ -467,7 +467,7 @@
let fields = var.fields(db);
let pat: ast::Pat = match var.kind(db) {
hir::StructKind::Tuple => {
- let mut name_generator = suggest_name::NameGenerator::new();
+ let mut name_generator = suggest_name::NameGenerator::default();
let pats = fields.into_iter().map(|f| {
let name = name_generator.for_type(&f.ty(db), db, edition);
match name {
diff --git a/crates/ide-assists/src/handlers/add_turbo_fish.rs b/crates/ide-assists/src/handlers/add_turbo_fish.rs
index 4901962..245aa3a 100644
--- a/crates/ide-assists/src/handlers/add_turbo_fish.rs
+++ b/crates/ide-assists/src/handlers/add_turbo_fish.rs
@@ -141,7 +141,7 @@
|builder| {
builder.trigger_parameter_hints();
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let mut editor = match &turbofish_target {
Either::Left(it) => builder.make_editor(it.syntax()),
Either::Right(it) => builder.make_editor(it.syntax()),
diff --git a/crates/ide-assists/src/handlers/apply_demorgan.rs b/crates/ide-assists/src/handlers/apply_demorgan.rs
index daab992..e03b4ab 100644
--- a/crates/ide-assists/src/handlers/apply_demorgan.rs
+++ b/crates/ide-assists/src/handlers/apply_demorgan.rs
@@ -64,7 +64,7 @@
_ => return None,
};
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let demorganed = bin_expr.clone_subtree();
let mut editor = SyntaxEditor::new(demorganed.syntax().clone());
@@ -111,7 +111,7 @@
"Apply De Morgan's law",
op_range,
|builder| {
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let paren_expr = bin_expr.syntax().parent().and_then(ast::ParenExpr::cast);
let neg_expr = paren_expr
.clone()
@@ -194,7 +194,7 @@
label,
op_range,
|builder| {
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let mut editor = builder.make_editor(method_call.syntax());
// replace the method name
let new_name = match name.text().as_str() {
diff --git a/crates/ide-assists/src/handlers/convert_bool_then.rs b/crates/ide-assists/src/handlers/convert_bool_then.rs
index f3210a6..ba5488e 100644
--- a/crates/ide-assists/src/handlers/convert_bool_then.rs
+++ b/crates/ide-assists/src/handlers/convert_bool_then.rs
@@ -98,7 +98,7 @@
let closure_body = ast::Expr::cast(edit.new_root().clone()).unwrap();
let mut editor = builder.make_editor(expr.syntax());
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let closure_body = match closure_body {
ast::Expr::BlockExpr(block) => unwrap_trivial_block(block),
e => e,
@@ -216,7 +216,7 @@
let closure_body = ast::BlockExpr::cast(edit.new_root().clone()).unwrap();
let mut editor = builder.make_editor(mcall.syntax());
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let cond = match &receiver {
ast::Expr::ParenExpr(expr) => expr.expr().unwrap_or(receiver),
diff --git a/crates/ide-assists/src/handlers/convert_for_to_while_let.rs b/crates/ide-assists/src/handlers/convert_for_to_while_let.rs
index 801a57b..51b16ca 100644
--- a/crates/ide-assists/src/handlers/convert_for_to_while_let.rs
+++ b/crates/ide-assists/src/handlers/convert_for_to_while_let.rs
@@ -51,7 +51,7 @@
"Replace this for loop with `while let`",
for_loop.syntax().text_range(),
|builder| {
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let mut editor = builder.make_editor(for_loop.syntax());
let (iterable, method) = if impls_core_iter(&ctx.sema, &iterable) {
diff --git a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
index 1212bb7..54699a9 100644
--- a/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
+++ b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs
@@ -79,11 +79,11 @@
Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest,
_ => arg,
};
- let arg = match arg.split_last() {
+
+ match arg.split_last() {
Some((NodeOrToken::Token(t), rest)) if t.kind() == WHITESPACE => rest,
_ => arg,
- };
- arg
+ }
});
args.collect()
diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs
index abc0698..5a6a7ed 100644
--- a/crates/ide-assists/src/handlers/extract_function.rs
+++ b/crates/ide-assists/src/handlers/extract_function.rs
@@ -751,7 +751,7 @@
ast::Stmt::Item(_) => (),
ast::Stmt::LetStmt(stmt) => {
if let Some(pat) = stmt.pat() {
- walk_pat(&pat, &mut |pat| {
+ _ = walk_pat(&pat, &mut |pat| {
cb(pat);
std::ops::ControlFlow::<(), ()>::Continue(())
});
diff --git a/crates/ide-assists/src/handlers/extract_variable.rs b/crates/ide-assists/src/handlers/extract_variable.rs
index 95ea8b3..f44f4ba 100644
--- a/crates/ide-assists/src/handlers/extract_variable.rs
+++ b/crates/ide-assists/src/handlers/extract_variable.rs
@@ -170,7 +170,7 @@
|edit| {
let (var_name, expr_replace) = kind.get_name_and_expr(ctx, &to_extract);
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let mut editor = edit.make_editor(&expr_replace);
let pat_name = make.name(&var_name);
diff --git a/crates/ide-assists/src/handlers/flip_binexpr.rs b/crates/ide-assists/src/handlers/flip_binexpr.rs
index 94d7b73..2ac9fd8 100644
--- a/crates/ide-assists/src/handlers/flip_binexpr.rs
+++ b/crates/ide-assists/src/handlers/flip_binexpr.rs
@@ -48,7 +48,7 @@
op_token.text_range(),
|builder| {
let mut editor = builder.make_editor(&expr.syntax().parent().unwrap());
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
if let FlipAction::FlipAndReplaceOp(binary_op) = action {
editor.replace(op_token, make.token(binary_op))
};
diff --git a/crates/ide-assists/src/handlers/flip_comma.rs b/crates/ide-assists/src/handlers/flip_comma.rs
index 25e514b..7045e4b 100644
--- a/crates/ide-assists/src/handlers/flip_comma.rs
+++ b/crates/ide-assists/src/handlers/flip_comma.rs
@@ -101,7 +101,7 @@
]
.concat();
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let new_token_tree = make.token_tree(tree.left_delimiter_token().unwrap().kind(), result);
(new_token_tree, make.finish_with_mappings())
}
diff --git a/crates/ide-assists/src/handlers/generate_enum_variant.rs b/crates/ide-assists/src/handlers/generate_enum_variant.rs
index 8a20a2d..4c45412 100644
--- a/crates/ide-assists/src/handlers/generate_enum_variant.rs
+++ b/crates/ide-assists/src/handlers/generate_enum_variant.rs
@@ -60,7 +60,7 @@
acc.add(AssistId::generate("generate_enum_variant"), "Generate variant", target, |builder| {
let mut editor = builder.make_editor(enum_node.syntax());
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let field_list = parent.make_field_list(ctx, &make);
let variant = make.variant(None, make.name(&name_ref.text()), field_list, None);
if let Some(it) = enum_node.variant_list() {
diff --git a/crates/ide-assists/src/handlers/inline_local_variable.rs b/crates/ide-assists/src/handlers/inline_local_variable.rs
index f1a3f72..297a53a 100644
--- a/crates/ide-assists/src/handlers/inline_local_variable.rs
+++ b/crates/ide-assists/src/handlers/inline_local_variable.rs
@@ -91,7 +91,7 @@
}
}
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
for (name, should_wrap) in wrap_in_parens {
let replacement = if should_wrap {
diff --git a/crates/ide-assists/src/handlers/introduce_named_type_parameter.rs b/crates/ide-assists/src/handlers/introduce_named_type_parameter.rs
index 9c39a7a..37dc92b 100644
--- a/crates/ide-assists/src/handlers/introduce_named_type_parameter.rs
+++ b/crates/ide-assists/src/handlers/introduce_named_type_parameter.rs
@@ -24,7 +24,7 @@
let fn_ = param.syntax().ancestors().nth(2).and_then(ast::Fn::cast)?;
let type_bound_list = impl_trait_type.type_bound_list()?;
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let target = fn_.syntax().text_range();
acc.add(
AssistId::refactor_rewrite("introduce_named_type_parameter"),
diff --git a/crates/ide-assists/src/handlers/remove_dbg.rs b/crates/ide-assists/src/handlers/remove_dbg.rs
index 809ef6d..f2767a3 100644
--- a/crates/ide-assists/src/handlers/remove_dbg.rs
+++ b/crates/ide-assists/src/handlers/remove_dbg.rs
@@ -74,7 +74,7 @@
}
let mac_input = tt.syntax().children_with_tokens().skip(1).take_while(|it| *it != r_delim);
- let input_expressions = mac_input.group_by(|tok| tok.kind() == T![,]);
+ let input_expressions = mac_input.chunk_by(|tok| tok.kind() == T![,]);
let input_expressions = input_expressions
.into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
diff --git a/crates/ide-assists/src/handlers/remove_parentheses.rs b/crates/ide-assists/src/handlers/remove_parentheses.rs
index f5b3e00..9349c53 100644
--- a/crates/ide-assists/src/handlers/remove_parentheses.rs
+++ b/crates/ide-assists/src/handlers/remove_parentheses.rs
@@ -54,7 +54,7 @@
None => false,
};
if need_to_add_ws {
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
editor.insert(Position::before(parens.syntax()), make.whitespace(" "));
editor.add_mappings(make.finish_with_mappings());
}
diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
index 691ae93..feeea88 100644
--- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
+++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -78,7 +78,7 @@
NameToImport::exact_case_sensitive(path.segments().last()?.to_string()),
items_locator::AssocSearchMode::Exclude,
)
- .filter_map(|item| match item.into_module_def() {
+ .filter_map(|(item, _)| match item.into_module_def() {
ModuleDef::Trait(trait_) => Some(trait_),
_ => None,
})
diff --git a/crates/ide-assists/src/handlers/replace_if_let_with_match.rs b/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
index 734bd17..2b356a1 100644
--- a/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
+++ b/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
@@ -105,7 +105,7 @@
format!("Replace if{let_} with match"),
available_range,
move |builder| {
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let match_expr = {
let else_arm = make_else_arm(ctx, &make, else_block, &cond_bodies);
let make_match_arm = |(pat, body): (_, ast::BlockExpr)| {
@@ -253,7 +253,7 @@
format!("Replace match with if{let_}"),
match_expr.syntax().text_range(),
move |builder| {
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let make_block_expr = |expr: ast::Expr| {
// Blocks with modifiers (unsafe, async, etc.) are parsed as BlockExpr, but are
// formatted without enclosing braces. If we encounter such block exprs,
diff --git a/crates/ide-assists/src/handlers/replace_let_with_if_let.rs b/crates/ide-assists/src/handlers/replace_let_with_if_let.rs
index a2dcbf9..c92a494 100644
--- a/crates/ide-assists/src/handlers/replace_let_with_if_let.rs
+++ b/crates/ide-assists/src/handlers/replace_let_with_if_let.rs
@@ -43,21 +43,33 @@
target,
|builder| {
let mut editor = builder.make_editor(let_stmt.syntax());
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let ty = ctx.sema.type_of_expr(&init);
- let happy_variant = ty
- .and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty.adjusted()))
- .map(|it| it.happy_case());
- let pat = match happy_variant {
- None => original_pat,
- Some(var_name) => {
- make.tuple_struct_pat(make.ident_path(var_name), [original_pat]).into()
+ let pat = if let_stmt.let_else().is_some() {
+ // Do not add the wrapper type that implements `Try`,
+ // since the statement already wraps the pattern.
+ original_pat
+ } else {
+ let happy_variant = ty
+ .and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty.adjusted()))
+ .map(|it| it.happy_case());
+ match happy_variant {
+ None => original_pat,
+ Some(var_name) => {
+ make.tuple_struct_pat(make.ident_path(var_name), [original_pat]).into()
+ }
}
};
let block = make.block_expr([], None);
block.indent(IndentLevel::from_node(let_stmt.syntax()));
- let if_expr = make.expr_if(make.expr_let(pat, init).into(), block, None);
+ let if_expr = make.expr_if(
+ make.expr_let(pat, init).into(),
+ block,
+ let_stmt
+ .let_else()
+ .and_then(|let_else| let_else.block_expr().map(ast::ElseBranch::from)),
+ );
let if_stmt = make.expr_stmt(if_expr.into());
editor.replace(let_stmt.syntax(), if_stmt.syntax());
@@ -94,4 +106,25 @@
",
)
}
+
+ #[test]
+ fn replace_let_else() {
+ check_assist(
+ replace_let_with_if_let,
+ r"
+//- minicore: option
+fn main() {
+ let a = Some(1);
+ $0let Some(_) = a else { unreachable!() };
+}
+ ",
+ r"
+fn main() {
+ let a = Some(1);
+ if let Some(_) = a {
+ } else { unreachable!() }
+}
+ ",
+ )
+ }
}
diff --git a/crates/ide-assists/src/handlers/toggle_ignore.rs b/crates/ide-assists/src/handlers/toggle_ignore.rs
index 7a29928..386625b 100644
--- a/crates/ide-assists/src/handlers/toggle_ignore.rs
+++ b/crates/ide-assists/src/handlers/toggle_ignore.rs
@@ -30,13 +30,13 @@
match has_ignore_attribute(&func) {
None => acc.add(
- AssistId::none("toggle_ignore"),
+ AssistId::refactor("toggle_ignore"),
"Ignore this test",
attr.syntax().text_range(),
|builder| builder.insert(attr.syntax().text_range().end(), "\n#[ignore]"),
),
Some(ignore_attr) => acc.add(
- AssistId::none("toggle_ignore"),
+ AssistId::refactor("toggle_ignore"),
"Re-enable this test",
ignore_attr.syntax().text_range(),
|builder| {
diff --git a/crates/ide-assists/src/handlers/unwrap_return_type.rs b/crates/ide-assists/src/handlers/unwrap_return_type.rs
index 8804fea..1c4c373 100644
--- a/crates/ide-assists/src/handlers/unwrap_return_type.rs
+++ b/crates/ide-assists/src/handlers/unwrap_return_type.rs
@@ -67,7 +67,7 @@
acc.add(kind.assist_id(), kind.label(), type_ref.syntax().text_range(), |builder| {
let mut editor = builder.make_editor(&parent);
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let mut exprs_to_unwrap = Vec::new();
let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_unwrap, e);
diff --git a/crates/ide-assists/src/handlers/wrap_return_type.rs b/crates/ide-assists/src/handlers/wrap_return_type.rs
index a044068..64251ed 100644
--- a/crates/ide-assists/src/handlers/wrap_return_type.rs
+++ b/crates/ide-assists/src/handlers/wrap_return_type.rs
@@ -77,7 +77,7 @@
type_ref.syntax().text_range(),
|builder| {
let mut editor = builder.make_editor(&parent);
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let alias = wrapper_alias(ctx, &make, &core_wrapper, type_ref, kind.symbol());
let new_return_ty = alias.unwrap_or_else(|| match kind {
WrapperKind::Option => make.ty_option(type_ref.clone()),
diff --git a/crates/ide-completion/Cargo.toml b/crates/ide-completion/Cargo.toml
index 68cc7a0..94c01e3 100644
--- a/crates/ide-completion/Cargo.toml
+++ b/crates/ide-completion/Cargo.toml
@@ -12,7 +12,7 @@
[lib]
[dependencies]
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
itertools.workspace = true
tracing.workspace = true
@@ -29,7 +29,7 @@
hir.workspace = true
[dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
# local deps
test-utils.workspace = true
diff --git a/crates/ide-completion/src/completions/attribute.rs b/crates/ide-completion/src/completions/attribute.rs
index fb7df8c..3c195f8 100644
--- a/crates/ide-completion/src/completions/attribute.rs
+++ b/crates/ide-completion/src/completions/attribute.rs
@@ -380,7 +380,7 @@
.children_with_tokens()
.skip(1)
.take_while(|it| it.as_token() != Some(&r_paren));
- let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]);
+ let input_expressions = tokens.chunk_by(|tok| tok.kind() == T![,]);
Some(
input_expressions
.into_iter()
diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs
index dea983b..4f21136 100644
--- a/crates/ide-completion/src/completions/dot.rs
+++ b/crates/ide-completion/src/completions/dot.rs
@@ -2,7 +2,7 @@
use std::ops::ControlFlow;
-use hir::{HasContainer, ItemContainer, MethodCandidateCallback, Name};
+use hir::{Complete, HasContainer, ItemContainer, MethodCandidateCallback, Name};
use ide_db::FxHashSet;
use syntax::SmolStr;
@@ -259,7 +259,9 @@
// This needs to come before the `seen_methods` test, so that if we see the same method twice,
// once as inherent and once not, we will include it.
if let ItemContainer::Trait(trait_) = func.container(self.ctx.db) {
- if self.ctx.exclude_traits.contains(&trait_) {
+ if self.ctx.exclude_traits.contains(&trait_)
+ || trait_.complete(self.ctx.db) == Complete::IgnoreMethods
+ {
return ControlFlow::Continue(());
}
}
diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs
index 7219a5f..0494d42 100644
--- a/crates/ide-completion/src/completions/expr.rs
+++ b/crates/ide-completion/src/completions/expr.rs
@@ -2,7 +2,7 @@
use std::ops::ControlFlow;
-use hir::{Name, PathCandidateCallback, ScopeDef, sym};
+use hir::{Complete, Name, PathCandidateCallback, ScopeDef, sym};
use ide_db::FxHashSet;
use syntax::ast;
@@ -33,10 +33,10 @@
fn on_trait_item(&mut self, item: hir::AssocItem) -> ControlFlow<()> {
// The excluded check needs to come before the `seen` test, so that if we see the same method twice,
// once as inherent and once not, we will include it.
- if item
- .container_trait(self.ctx.db)
- .is_none_or(|trait_| !self.ctx.exclude_traits.contains(&trait_))
- && self.seen.insert(item)
+ if item.container_trait(self.ctx.db).is_none_or(|trait_| {
+ !self.ctx.exclude_traits.contains(&trait_)
+ && trait_.complete(self.ctx.db) != Complete::IgnoreMethods
+ }) && self.seen.insert(item)
{
(self.add_assoc_item)(self.acc, item);
}
@@ -104,7 +104,9 @@
.iter()
.copied()
.map(hir::Trait::from)
- .filter(|it| !ctx.exclude_traits.contains(it))
+ .filter(|it| {
+ !ctx.exclude_traits.contains(it) && it.complete(ctx.db) != Complete::IgnoreMethods
+ })
.flat_map(|it| it.items(ctx.sema.db))
.for_each(|item| add_assoc_item(acc, item)),
Qualified::TypeAnchor { trait_: Some(trait_), .. } => {
diff --git a/crates/ide-completion/src/completions/flyimport.rs b/crates/ide-completion/src/completions/flyimport.rs
index b3ba076..a747561 100644
--- a/crates/ide-completion/src/completions/flyimport.rs
+++ b/crates/ide-completion/src/completions/flyimport.rs
@@ -268,19 +268,7 @@
&& !ctx.is_item_hidden(original_item)
&& ctx.check_stability(original_item.attrs(ctx.db).as_deref())
})
- .filter(|import| {
- let def = import.item_to_import.into_module_def();
- if let Some(&kind) = ctx.exclude_flyimport.get(&def) {
- if kind == AutoImportExclusionType::Always {
- return false;
- }
- let method_imported = import.item_to_import != import.original_item;
- if method_imported {
- return false;
- }
- }
- true
- })
+ .filter(|import| filter_excluded_flyimport(ctx, import))
.sorted_by(|a, b| {
let key = |import_path| {
(
@@ -366,24 +354,7 @@
!ctx.is_item_hidden(&import.item_to_import)
&& !ctx.is_item_hidden(&import.original_item)
})
- .filter(|import| {
- let def = import.item_to_import.into_module_def();
- if let Some(&kind) = ctx.exclude_flyimport.get(&def) {
- if kind == AutoImportExclusionType::Always {
- return false;
- }
- let method_imported = import.item_to_import != import.original_item;
- if method_imported {
- return false;
- }
- }
-
- if let ModuleDef::Trait(_) = import.item_to_import.into_module_def() {
- !ctx.exclude_flyimport.contains_key(&def)
- } else {
- true
- }
- })
+ .filter(|import| filter_excluded_flyimport(ctx, import))
.sorted_by(|a, b| {
let key = |import_path| {
(
@@ -401,6 +372,28 @@
Some(())
}
+fn filter_excluded_flyimport(ctx: &CompletionContext<'_>, import: &LocatedImport) -> bool {
+ let def = import.item_to_import.into_module_def();
+ let is_exclude_flyimport = ctx.exclude_flyimport.get(&def).copied();
+
+ if matches!(is_exclude_flyimport, Some(AutoImportExclusionType::Always))
+ || !import.complete_in_flyimport.0
+ {
+ return false;
+ }
+ let method_imported = import.item_to_import != import.original_item;
+ if method_imported
+ && (is_exclude_flyimport.is_some()
+ || ctx.exclude_flyimport.contains_key(&import.original_item.into_module_def()))
+ {
+ // If this is a method, exclude it either if it was excluded itself (which may not be caught above,
+ // because `item_to_import` is the trait), or if its trait was excluded. We don't need to check
+ // the attributes here, since they pass from trait to methods on import map construction.
+ return false;
+ }
+ true
+}
+
fn import_name(ctx: &CompletionContext<'_>) -> String {
let token_kind = ctx.token.kind();
diff --git a/crates/ide-completion/src/completions/item_list.rs b/crates/ide-completion/src/completions/item_list.rs
index 4ae00cc..58e7f58 100644
--- a/crates/ide-completion/src/completions/item_list.rs
+++ b/crates/ide-completion/src/completions/item_list.rs
@@ -114,6 +114,7 @@
add_keyword("trait", "trait $1 {\n $0\n}");
if no_vis_qualifiers {
add_keyword("impl", "impl $1 {\n $0\n}");
+ add_keyword("impl for", "impl $1 for $2 {\n $0\n}");
}
}
@@ -144,6 +145,7 @@
add_keyword("use", "use $0");
if no_vis_qualifiers {
add_keyword("impl", "impl $1 {\n $0\n}");
+ add_keyword("impl for", "impl $1 for $2 {\n $0\n}");
}
}
diff --git a/crates/ide-completion/src/completions/keyword.rs b/crates/ide-completion/src/completions/keyword.rs
index 14b0d54..0397424 100644
--- a/crates/ide-completion/src/completions/keyword.rs
+++ b/crates/ide-completion/src/completions/keyword.rs
@@ -56,6 +56,7 @@
kw extern
kw fn
kw impl
+ kw impl for
kw trait
"#]],
);
diff --git a/crates/ide-completion/src/completions/pattern.rs b/crates/ide-completion/src/completions/pattern.rs
index 59ef94d..ea3511d 100644
--- a/crates/ide-completion/src/completions/pattern.rs
+++ b/crates/ide-completion/src/completions/pattern.rs
@@ -48,7 +48,7 @@
// Suggest name only in let-stmt and fn param
if pattern_ctx.should_suggest_name {
- let mut name_generator = suggest_name::NameGenerator::new();
+ let mut name_generator = suggest_name::NameGenerator::default();
if let Some(suggested) = ctx
.expected_type
.as_ref()
diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs
index 3043796..fd25ee0 100644
--- a/crates/ide-completion/src/context.rs
+++ b/crates/ide-completion/src/context.rs
@@ -8,8 +8,8 @@
use base_db::{RootQueryDb as _, salsa::AsDynDatabase};
use hir::{
- DisplayTarget, HasAttrs, Local, ModPath, ModuleDef, ModuleSource, Name, PathResolution,
- ScopeDef, Semantics, SemanticsScope, Symbol, Type, TypeInfo,
+ DisplayTarget, HasAttrs, Local, ModuleDef, ModuleSource, Name, PathResolution, ScopeDef,
+ Semantics, SemanticsScope, Symbol, Type, TypeInfo,
};
use ide_db::{
FilePosition, FxHashMap, FxHashSet, RootDatabase, famous_defs::FamousDefs,
@@ -796,15 +796,12 @@
.exclude_traits
.iter()
.filter_map(|path| {
- scope
- .resolve_mod_path(&ModPath::from_segments(
- hir::PathKind::Plain,
- path.split("::").map(Symbol::intern).map(Name::new_symbol_root),
- ))
- .find_map(|it| match it {
+ hir::resolve_absolute_path(db, path.split("::").map(Symbol::intern)).find_map(
+ |it| match it {
hir::ItemInNs::Types(ModuleDef::Trait(t)) => Some(t),
_ => None,
- })
+ },
+ )
})
.collect();
@@ -812,11 +809,7 @@
.exclude_flyimport
.iter()
.flat_map(|(path, kind)| {
- scope
- .resolve_mod_path(&ModPath::from_segments(
- hir::PathKind::Plain,
- path.split("::").map(Symbol::intern).map(Name::new_symbol_root),
- ))
+ hir::resolve_absolute_path(db, path.split("::").map(Symbol::intern))
.map(|it| (it.into_module_def(), *kind))
})
.collect();
diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs
index 82c1266..b0adbad 100644
--- a/crates/ide-completion/src/render.rs
+++ b/crates/ide-completion/src/render.rs
@@ -334,7 +334,7 @@
continue;
};
- item.add_import(LocatedImport::new(path, trait_item, trait_item));
+ item.add_import(LocatedImport::new_no_completion(path, trait_item, trait_item));
}
Some(item)
diff --git a/crates/ide-completion/src/render/pattern.rs b/crates/ide-completion/src/render/pattern.rs
index 93e6c52..8332cd0 100644
--- a/crates/ide-completion/src/render/pattern.rs
+++ b/crates/ide-completion/src/render/pattern.rs
@@ -64,11 +64,11 @@
),
None => {
let name = local_name.unwrap_or_else(|| variant.name(ctx.db()));
- let it = (
+
+ (
name.as_str().to_smolstr(),
name.display(ctx.db(), ctx.completion.edition).to_smolstr(),
- );
- it
+ )
}
};
diff --git a/crates/ide-completion/src/snippet.rs b/crates/ide-completion/src/snippet.rs
index 07f33a8..9dc0c02 100644
--- a/crates/ide-completion/src/snippet.rs
+++ b/crates/ide-completion/src/snippet.rs
@@ -174,7 +174,7 @@
ctx.config.insert_use.prefix_kind,
import_cfg,
)?;
- Some((path.len() > 1).then(|| LocatedImport::new(path.clone(), item, item)))
+ Some((path.len() > 1).then(|| LocatedImport::new_no_completion(path.clone(), item, item)))
};
let mut res = Vec::with_capacity(requires.len());
for import in requires {
diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs
index 22d42ba..b30ac43 100644
--- a/crates/ide-completion/src/tests/expression.rs
+++ b/crates/ide-completion/src/tests/expression.rs
@@ -171,6 +171,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
@@ -249,6 +250,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
@@ -300,6 +302,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
@@ -375,6 +378,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
@@ -961,6 +965,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
@@ -1003,6 +1008,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
@@ -1095,6 +1101,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
@@ -1137,6 +1144,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
@@ -1179,6 +1187,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
@@ -1231,6 +1240,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
@@ -1285,6 +1295,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
@@ -1529,6 +1540,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
@@ -1555,7 +1567,10 @@
#[test]
fn excluded_trait_method_is_excluded() {
check_with_config(
- CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG },
+ CompletionConfig {
+ exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()],
+ ..TEST_CONFIG
+ },
r#"
trait ExcludedTrait {
fn foo(&self) {}
@@ -1575,23 +1590,20 @@
}
"#,
expect![[r#"
- me bar() (as ExcludedTrait) fn(&self)
- me baz() (as ExcludedTrait) fn(&self)
- me foo() (as ExcludedTrait) fn(&self)
- me inherent() fn(&self)
- sn box Box::new(expr)
- sn call function(expr)
- sn const const {}
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn let let
- sn letm let mut
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
+ me inherent() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn const const {}
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
"#]],
);
}
@@ -1599,7 +1611,10 @@
#[test]
fn excluded_trait_not_excluded_when_inherent() {
check_with_config(
- CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG },
+ CompletionConfig {
+ exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()],
+ ..TEST_CONFIG
+ },
r#"
trait ExcludedTrait {
fn foo(&self) {}
@@ -1633,7 +1648,10 @@
"#]],
);
check_with_config(
- CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG },
+ CompletionConfig {
+ exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()],
+ ..TEST_CONFIG
+ },
r#"
trait ExcludedTrait {
fn foo(&self) {}
@@ -1667,7 +1685,10 @@
"#]],
);
check_with_config(
- CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG },
+ CompletionConfig {
+ exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()],
+ ..TEST_CONFIG
+ },
r#"
trait ExcludedTrait {
fn foo(&self) {}
@@ -1706,7 +1727,7 @@
fn excluded_trait_method_is_excluded_from_flyimport() {
check_with_config(
CompletionConfig {
- exclude_traits: &["test::module2::ExcludedTrait".to_owned()],
+ exclude_traits: &["ra_test_fixture::module2::ExcludedTrait".to_owned()],
..TEST_CONFIG
},
r#"
@@ -1730,23 +1751,20 @@
}
"#,
expect![[r#"
- me bar() (use module2::ExcludedTrait) fn(&self)
- me baz() (use module2::ExcludedTrait) fn(&self)
- me foo() (use module2::ExcludedTrait) fn(&self)
- me inherent() fn(&self)
- sn box Box::new(expr)
- sn call function(expr)
- sn const const {}
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn let let
- sn letm let mut
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
+ me inherent() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn const const {}
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
"#]],
);
}
@@ -1756,7 +1774,7 @@
check_with_config(
CompletionConfig {
exclude_flyimport: vec![(
- "test::module2::ExcludedTrait".to_owned(),
+ "ra_test_fixture::module2::ExcludedTrait".to_owned(),
AutoImportExclusionType::Methods,
)],
..TEST_CONFIG
@@ -1782,23 +1800,20 @@
}
"#,
expect![[r#"
- me bar() (use module2::ExcludedTrait) fn(&self)
- me baz() (use module2::ExcludedTrait) fn(&self)
- me foo() (use module2::ExcludedTrait) fn(&self)
- me inherent() fn(&self)
- sn box Box::new(expr)
- sn call function(expr)
- sn const const {}
- sn dbg dbg!(expr)
- sn dbgr dbg!(&expr)
- sn deref *expr
- sn let let
- sn letm let mut
- sn match match expr {}
- sn ref &expr
- sn refm &mut expr
- sn return return expr
- sn unsafe unsafe {}
+ me inherent() fn(&self)
+ sn box Box::new(expr)
+ sn call function(expr)
+ sn const const {}
+ sn dbg dbg!(expr)
+ sn dbgr dbg!(&expr)
+ sn deref *expr
+ sn let let
+ sn letm let mut
+ sn match match expr {}
+ sn ref &expr
+ sn refm &mut expr
+ sn return return expr
+ sn unsafe unsafe {}
"#]],
);
}
@@ -1806,7 +1821,10 @@
#[test]
fn excluded_trait_method_is_excluded_from_path_completion() {
check_with_config(
- CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG },
+ CompletionConfig {
+ exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()],
+ ..TEST_CONFIG
+ },
r#"
pub trait ExcludedTrait {
fn foo(&self) {}
@@ -1826,10 +1844,7 @@
}
"#,
expect![[r#"
- me bar(…) (as ExcludedTrait) fn(&self)
- me baz(…) (as ExcludedTrait) fn(&self)
- me foo(…) (as ExcludedTrait) fn(&self)
- me inherent(…) fn(&self)
+ me inherent(…) fn(&self)
"#]],
);
}
@@ -1837,7 +1852,10 @@
#[test]
fn excluded_trait_method_is_not_excluded_when_trait_is_specified() {
check_with_config(
- CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG },
+ CompletionConfig {
+ exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()],
+ ..TEST_CONFIG
+ },
r#"
pub trait ExcludedTrait {
fn foo(&self) {}
@@ -1863,7 +1881,10 @@
"#]],
);
check_with_config(
- CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG },
+ CompletionConfig {
+ exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()],
+ ..TEST_CONFIG
+ },
r#"
pub trait ExcludedTrait {
fn foo(&self) {}
@@ -1893,7 +1914,10 @@
#[test]
fn excluded_trait_not_excluded_when_inherent_path() {
check_with_config(
- CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG },
+ CompletionConfig {
+ exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()],
+ ..TEST_CONFIG
+ },
r#"
trait ExcludedTrait {
fn foo(&self) {}
@@ -1914,7 +1938,10 @@
"#]],
);
check_with_config(
- CompletionConfig { exclude_traits: &["test::ExcludedTrait".to_owned()], ..TEST_CONFIG },
+ CompletionConfig {
+ exclude_traits: &["ra_test_fixture::ExcludedTrait".to_owned()],
+ ..TEST_CONFIG
+ },
r#"
trait ExcludedTrait {
fn foo(&self) {}
@@ -1986,6 +2013,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
@@ -2058,6 +2086,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
diff --git a/crates/ide-completion/src/tests/flyimport.rs b/crates/ide-completion/src/tests/flyimport.rs
index 8bba44c..27c91bc 100644
--- a/crates/ide-completion/src/tests/flyimport.rs
+++ b/crates/ide-completion/src/tests/flyimport.rs
@@ -1810,9 +1810,10 @@
#[test]
fn excluded_trait_item_included_when_exact_match() {
+ // FIXME: This does not work, we need to change the code.
check_with_config(
CompletionConfig {
- exclude_traits: &["test::module2::ExcludedTrait".to_owned()],
+ exclude_traits: &["ra_test_fixture::module2::ExcludedTrait".to_owned()],
..TEST_CONFIG
},
r#"
@@ -1830,8 +1831,120 @@
true.foo$0
}
"#,
+ expect![""],
+ );
+}
+
+#[test]
+fn excluded_via_attr() {
+ check(
+ r#"
+mod module2 {
+ #[rust_analyzer::completions(ignore_flyimport)]
+ pub trait ExcludedTrait {
+ fn foo(&self) {}
+ fn bar(&self) {}
+ fn baz(&self) {}
+ }
+
+ impl<T> ExcludedTrait for T {}
+}
+
+fn foo() {
+ true.$0
+}
+ "#,
+ expect![""],
+ );
+ check(
+ r#"
+mod module2 {
+ #[rust_analyzer::completions(ignore_flyimport_methods)]
+ pub trait ExcludedTrait {
+ fn foo(&self) {}
+ fn bar(&self) {}
+ fn baz(&self) {}
+ }
+
+ impl<T> ExcludedTrait for T {}
+}
+
+fn foo() {
+ true.$0
+}
+ "#,
+ expect![""],
+ );
+ check(
+ r#"
+mod module2 {
+ #[rust_analyzer::completions(ignore_methods)]
+ pub trait ExcludedTrait {
+ fn foo(&self) {}
+ fn bar(&self) {}
+ fn baz(&self) {}
+ }
+
+ impl<T> ExcludedTrait for T {}
+}
+
+fn foo() {
+ true.$0
+}
+ "#,
+ expect![""],
+ );
+ check(
+ r#"
+mod module2 {
+ #[rust_analyzer::completions(ignore_flyimport)]
+ pub trait ExcludedTrait {
+ fn foo(&self) {}
+ fn bar(&self) {}
+ fn baz(&self) {}
+ }
+
+ impl<T> ExcludedTrait for T {}
+}
+
+fn foo() {
+ ExcludedTrait$0
+}
+ "#,
+ expect![""],
+ );
+ check(
+ r#"
+mod module2 {
+ #[rust_analyzer::completions(ignore_methods)]
+ pub trait ExcludedTrait {
+ fn foo(&self) {}
+ fn bar(&self) {}
+ fn baz(&self) {}
+ }
+
+ impl<T> ExcludedTrait for T {}
+}
+
+fn foo() {
+ ExcludedTrait$0
+}
+ "#,
expect![[r#"
- me foo() (use module2::ExcludedTrait) fn(&self)
+ tt ExcludedTrait (use module2::ExcludedTrait)
"#]],
);
+ check(
+ r#"
+mod module2 {
+ #[rust_analyzer::completions(ignore_flyimport)]
+ pub struct Foo {}
+}
+
+fn foo() {
+ Foo$0
+}
+ "#,
+ expect![""],
+ );
}
diff --git a/crates/ide-completion/src/tests/item.rs b/crates/ide-completion/src/tests/item.rs
index be2c37d..5568903 100644
--- a/crates/ide-completion/src/tests/item.rs
+++ b/crates/ide-completion/src/tests/item.rs
@@ -284,6 +284,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
diff --git a/crates/ide-completion/src/tests/item_list.rs b/crates/ide-completion/src/tests/item_list.rs
index 841c421..fcdf10c 100644
--- a/crates/ide-completion/src/tests/item_list.rs
+++ b/crates/ide-completion/src/tests/item_list.rs
@@ -16,6 +16,7 @@
kw extern
kw fn
kw impl
+ kw impl for
kw mod
kw pub
kw pub(crate)
@@ -50,6 +51,7 @@
kw extern
kw fn
kw impl
+ kw impl for
kw mod
kw pub
kw pub(crate)
@@ -83,6 +85,7 @@
kw extern
kw fn
kw impl
+ kw impl for
kw mod
kw pub
kw pub(crate)
@@ -122,6 +125,7 @@
kw extern
kw fn
kw impl
+ kw impl for
kw trait
"#]],
);
@@ -385,6 +389,7 @@
kw extern
kw fn
kw impl
+ kw impl for
kw mod
kw pub
kw pub(crate)
diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs
index 70caeac..15518e9 100644
--- a/crates/ide-completion/src/tests/special.rs
+++ b/crates/ide-completion/src/tests/special.rs
@@ -1008,6 +1008,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
@@ -1059,6 +1060,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
@@ -1184,6 +1186,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
@@ -1441,6 +1444,7 @@
kw if
kw if let
kw impl
+ kw impl for
kw let
kw letm
kw loop
diff --git a/crates/ide-db/Cargo.toml b/crates/ide-db/Cargo.toml
index 485a720..f1d6b60 100644
--- a/crates/ide-db/Cargo.toml
+++ b/crates/ide-db/Cargo.toml
@@ -12,7 +12,7 @@
[lib]
[dependencies]
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
crossbeam-channel.workspace = true
tracing.workspace = true
rayon.workspace = true
@@ -22,7 +22,7 @@
itertools.workspace = true
arrayvec.workspace = true
indexmap.workspace = true
-memchr = "2.6.4"
+memchr = "2.7.4"
salsa.workspace = true
query-group.workspace = true
triomphe.workspace = true
@@ -44,7 +44,7 @@
line-index.workspace = true
[dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
# local deps
test-utils.workspace = true
diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs
index 36745b0..008b6fd 100644
--- a/crates/ide-db/src/apply_change.rs
+++ b/crates/ide-db/src/apply_change.rs
@@ -29,8 +29,8 @@
local_roots.insert(root_id);
}
}
- self.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
- self.set_library_roots_with_durability(Arc::new(library_roots), Durability::HIGH);
+ self.set_local_roots_with_durability(Arc::new(local_roots), Durability::MEDIUM);
+ self.set_library_roots_with_durability(Arc::new(library_roots), Durability::MEDIUM);
}
change.apply(self);
}
diff --git a/crates/ide-db/src/assists.rs b/crates/ide-db/src/assists.rs
index 9ff3e10..90ae4a3 100644
--- a/crates/ide-db/src/assists.rs
+++ b/crates/ide-db/src/assists.rs
@@ -43,9 +43,6 @@
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum AssistKind {
- // FIXME: does the None variant make sense? Probably not.
- None,
-
QuickFix,
Generate,
Refactor,
@@ -61,7 +58,7 @@
}
match self {
- AssistKind::None | AssistKind::Generate => true,
+ AssistKind::Generate => true,
AssistKind::Refactor => matches!(
other,
AssistKind::RefactorExtract
@@ -74,7 +71,6 @@
pub fn name(&self) -> &str {
match self {
- AssistKind::None => "None",
AssistKind::QuickFix => "QuickFix",
AssistKind::Generate => "Generate",
AssistKind::Refactor => "Refactor",
@@ -90,7 +86,6 @@
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
- "None" => Ok(AssistKind::None),
"QuickFix" => Ok(AssistKind::QuickFix),
"Generate" => Ok(AssistKind::Generate),
"Refactor" => Ok(AssistKind::Refactor),
@@ -108,10 +103,6 @@
pub struct AssistId(pub &'static str, pub AssistKind, pub Option<usize>);
impl AssistId {
- pub fn none(id: &'static str) -> AssistId {
- AssistId(id, AssistKind::None, None)
- }
-
pub fn quick_fix(id: &'static str) -> AssistId {
AssistId(id, AssistKind::QuickFix, None)
}
diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs
index 650c957..ac592df 100644
--- a/crates/ide-db/src/imports/import_assets.rs
+++ b/crates/ide-db/src/imports/import_assets.rs
@@ -3,8 +3,8 @@
use std::ops::ControlFlow;
use hir::{
- AsAssocItem, AssocItem, AssocItemContainer, Crate, HasCrate, ImportPathConfig, ItemInNs,
- ModPath, Module, ModuleDef, Name, PathResolution, PrefixKind, ScopeDef, Semantics,
+ AsAssocItem, AssocItem, AssocItemContainer, Complete, Crate, HasCrate, ImportPathConfig,
+ ItemInNs, ModPath, Module, ModuleDef, Name, PathResolution, PrefixKind, ScopeDef, Semantics,
SemanticsScope, Trait, TyFingerprint, Type, db::HirDatabase,
};
use itertools::Itertools;
@@ -183,6 +183,9 @@
}
}
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct CompleteInFlyimport(pub bool);
+
/// An import (not necessary the only one) that corresponds a certain given [`PathImportCandidate`].
/// (the structure is not entirely correct, since there can be situations requiring two imports, see FIXME below for the details)
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -198,11 +201,31 @@
/// the original item is the associated constant, but the import has to be a trait that
/// defines this constant.
pub original_item: ItemInNs,
+ /// The value of `#[rust_analyzer::completions(...)]`, if existing.
+ pub complete_in_flyimport: CompleteInFlyimport,
}
impl LocatedImport {
- pub fn new(import_path: ModPath, item_to_import: ItemInNs, original_item: ItemInNs) -> Self {
- Self { import_path, item_to_import, original_item }
+ pub fn new(
+ import_path: ModPath,
+ item_to_import: ItemInNs,
+ original_item: ItemInNs,
+ complete_in_flyimport: CompleteInFlyimport,
+ ) -> Self {
+ Self { import_path, item_to_import, original_item, complete_in_flyimport }
+ }
+
+ pub fn new_no_completion(
+ import_path: ModPath,
+ item_to_import: ItemInNs,
+ original_item: ItemInNs,
+ ) -> Self {
+ Self {
+ import_path,
+ item_to_import,
+ original_item,
+ complete_in_flyimport: CompleteInFlyimport(true),
+ }
}
}
@@ -351,12 +374,17 @@
// see also an ignored test under FIXME comment in the qualify_path.rs module
AssocSearchMode::Exclude,
)
- .filter_map(|item| {
+ .filter_map(|(item, do_not_complete)| {
if !scope_filter(item) {
return None;
}
let mod_path = mod_path(item)?;
- Some(LocatedImport::new(mod_path, item, item))
+ Some(LocatedImport::new(
+ mod_path,
+ item,
+ item,
+ CompleteInFlyimport(do_not_complete != Complete::IgnoreFlyimport),
+ ))
})
.take(DEFAULT_QUERY_SEARCH_LIMIT)
.collect()
@@ -371,7 +399,7 @@
NameToImport::Exact(first_qsegment.as_str().to_owned(), true),
AssocSearchMode::Exclude,
)
- .filter_map(|item| {
+ .filter_map(|(item, do_not_complete)| {
// we found imports for `first_qsegment`, now we need to filter these imports by whether
// they result in resolving the rest of the path successfully
validate_resolvable(
@@ -382,6 +410,7 @@
&path_candidate.name,
item,
qualifier_rest,
+ CompleteInFlyimport(do_not_complete != Complete::IgnoreFlyimport),
)
})
.take(DEFAULT_QUERY_SEARCH_LIMIT)
@@ -399,6 +428,7 @@
candidate: &NameToImport,
resolved_qualifier: ItemInNs,
unresolved_qualifier: &[Name],
+ complete_in_flyimport: CompleteInFlyimport,
) -> Option<LocatedImport> {
let _p = tracing::info_span!("ImportAssets::import_for_item").entered();
@@ -434,7 +464,14 @@
false => ControlFlow::Continue(()),
},
)
- .map(|item| LocatedImport::new(import_path_candidate, resolved_qualifier, item));
+ .map(|item| {
+ LocatedImport::new(
+ import_path_candidate,
+ resolved_qualifier,
+ item,
+ complete_in_flyimport,
+ )
+ });
}
// FIXME
ModuleDef::Trait(_) => return None,
@@ -472,6 +509,7 @@
import_path_candidate.clone(),
resolved_qualifier,
assoc_to_item(assoc),
+ complete_in_flyimport,
))
})
}
@@ -510,15 +548,15 @@
let env_traits = trait_candidate.receiver_ty.env_traits(db);
let related_traits = inherent_traits.chain(env_traits).collect::<FxHashSet<_>>();
- let mut required_assoc_items = FxHashSet::default();
+ let mut required_assoc_items = FxHashMap::default();
let mut trait_candidates: FxHashSet<_> = items_locator::items_with_name(
db,
current_crate,
trait_candidate.assoc_item_name.clone(),
AssocSearchMode::AssocItemsOnly,
)
- .filter_map(|input| item_as_assoc(db, input))
- .filter_map(|assoc| {
+ .filter_map(|(input, do_not_complete)| Some((item_as_assoc(db, input)?, do_not_complete)))
+ .filter_map(|(assoc, do_not_complete)| {
if !trait_assoc_item && matches!(assoc, AssocItem::Const(_) | AssocItem::TypeAlias(_)) {
return None;
}
@@ -527,7 +565,8 @@
if related_traits.contains(&assoc_item_trait) {
return None;
}
- required_assoc_items.insert(assoc);
+ required_assoc_items
+ .insert(assoc, CompleteInFlyimport(do_not_complete != Complete::IgnoreFlyimport));
Some(assoc_item_trait.into())
})
.collect();
@@ -599,7 +638,7 @@
None,
None,
|assoc| {
- if required_assoc_items.contains(&assoc) {
+ if let Some(&complete_in_flyimport) = required_assoc_items.get(&assoc) {
let located_trait = assoc.container_trait(db).filter(|&it| scope_filter(it))?;
let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
let import_path = trait_import_paths
@@ -610,6 +649,7 @@
import_path,
trait_item,
assoc_to_item(assoc),
+ complete_in_flyimport,
));
}
None::<()>
@@ -624,7 +664,7 @@
None,
|function| {
let assoc = function.as_assoc_item(db)?;
- if required_assoc_items.contains(&assoc) {
+ if let Some(&complete_in_flyimport) = required_assoc_items.get(&assoc) {
let located_trait = assoc.container_trait(db).filter(|&it| scope_filter(it))?;
let trait_item = ItemInNs::from(ModuleDef::from(located_trait));
let import_path = trait_import_paths
@@ -635,6 +675,7 @@
import_path,
trait_item,
assoc_to_item(assoc),
+ complete_in_flyimport,
));
}
None::<()>
diff --git a/crates/ide-db/src/items_locator.rs b/crates/ide-db/src/items_locator.rs
index 7a543d6..e938525 100644
--- a/crates/ide-db/src/items_locator.rs
+++ b/crates/ide-db/src/items_locator.rs
@@ -5,7 +5,7 @@
use std::ops::ControlFlow;
use either::Either;
-use hir::{Crate, ItemInNs, Module, import_map};
+use hir::{Complete, Crate, ItemInNs, Module, import_map};
use crate::{
RootDatabase,
@@ -25,7 +25,7 @@
krate: Crate,
name: NameToImport,
assoc_item_search: AssocSearchMode,
-) -> impl Iterator<Item = ItemInNs> {
+) -> impl Iterator<Item = (ItemInNs, Complete)> {
let _p = tracing::info_span!("items_with_name", name = name.text(), assoc_item_search = ?assoc_item_search, crate = ?krate.display_name(db).map(|name| name.to_string()))
.entered();
@@ -123,26 +123,29 @@
krate: Crate,
local_query: symbol_index::Query,
external_query: import_map::Query,
-) -> impl Iterator<Item = ItemInNs> {
+) -> impl Iterator<Item = (ItemInNs, Complete)> {
let _p = tracing::info_span!("find_items").entered();
// NOTE: `external_query` includes `assoc_item_search`, so we don't need to
// filter on our own.
- let external_importables =
- krate.query_external_importables(db, external_query).map(|external_importable| {
- match external_importable {
+ let external_importables = krate.query_external_importables(db, external_query).map(
+ |(external_importable, do_not_complete)| {
+ let external_importable = match external_importable {
Either::Left(module_def) => ItemInNs::from(module_def),
Either::Right(macro_def) => ItemInNs::from(macro_def),
- }
- });
+ };
+ (external_importable, do_not_complete)
+ },
+ );
// Query the local crate using the symbol index.
let mut local_results = Vec::new();
local_query.search(&symbol_index::crate_symbols(db, krate), |local_candidate| {
- local_results.push(match local_candidate.def {
+ let def = match local_candidate.def {
hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def),
def => ItemInNs::from(def),
- });
+ };
+ local_results.push((def, local_candidate.do_not_complete));
ControlFlow::<()>::Continue(())
});
local_results.into_iter().chain(external_importables)
diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs
index c6bd803..b5b4a9e 100644
--- a/crates/ide-db/src/lib.rs
+++ b/crates/ide-db/src/lib.rs
@@ -220,9 +220,9 @@
// This needs to be here otherwise `CrateGraphBuilder` will panic.
db.set_all_crates(Arc::new(Box::new([])));
CrateGraphBuilder::default().set_in_db(&mut db);
- db.set_proc_macros_with_durability(Default::default(), Durability::HIGH);
- db.set_local_roots_with_durability(Default::default(), Durability::HIGH);
- db.set_library_roots_with_durability(Default::default(), Durability::HIGH);
+ db.set_proc_macros_with_durability(Default::default(), Durability::MEDIUM);
+ db.set_local_roots_with_durability(Default::default(), Durability::MEDIUM);
+ db.set_library_roots_with_durability(Default::default(), Durability::MEDIUM);
db.set_expand_proc_attr_macros_with_durability(false, Durability::HIGH);
db.update_base_query_lru_capacities(lru_capacity);
db
diff --git a/crates/ide-db/src/prime_caches/topologic_sort.rs b/crates/ide-db/src/prime_caches/topologic_sort.rs
index 7353d71..c8a0386 100644
--- a/crates/ide-db/src/prime_caches/topologic_sort.rs
+++ b/crates/ide-db/src/prime_caches/topologic_sort.rs
@@ -7,14 +7,20 @@
nodes: FxHashMap<T, Entry<T>>,
}
+// this implementation has different bounds on T than would be implied by #[derive(Default)]
+impl<T> Default for TopologicSortIterBuilder<T>
+where
+ T: Copy + Eq + PartialEq + Hash,
+{
+ fn default() -> Self {
+ Self { nodes: Default::default() }
+ }
+}
+
impl<T> TopologicSortIterBuilder<T>
where
T: Copy + Eq + PartialEq + Hash,
{
- fn new() -> Self {
- Self { nodes: Default::default() }
- }
-
fn get_or_create_entry(&mut self, item: T) -> &mut Entry<T> {
self.nodes.entry(item).or_default()
}
@@ -54,7 +60,7 @@
T: Copy + Eq + PartialEq + Hash,
{
pub(crate) fn builder() -> TopologicSortIterBuilder<T> {
- TopologicSortIterBuilder::new()
+ TopologicSortIterBuilder::default()
}
pub(crate) fn pending(&self) -> usize {
diff --git a/crates/ide-db/src/source_change.rs b/crates/ide-db/src/source_change.rs
index 741dc6b..b1b58d6 100644
--- a/crates/ide-db/src/source_change.rs
+++ b/crates/ide-db/src/source_change.rs
@@ -469,7 +469,7 @@
}
fn add_snippet_annotation(&mut self, kind: AnnotationSnippet) -> SyntaxAnnotation {
- let annotation = SyntaxAnnotation::new();
+ let annotation = SyntaxAnnotation::default();
self.snippet_annotations.push((kind, annotation));
self.source_change.is_snippet = true;
annotation
diff --git a/crates/ide-db/src/syntax_helpers/node_ext.rs b/crates/ide-db/src/syntax_helpers/node_ext.rs
index f0aa3da..bdff64d 100644
--- a/crates/ide-db/src/syntax_helpers/node_ext.rs
+++ b/crates/ide-db/src/syntax_helpers/node_ext.rs
@@ -121,7 +121,7 @@
match ast::Stmt::cast(node.clone()) {
Some(ast::Stmt::LetStmt(l)) => {
if let Some(pat) = l.pat() {
- walk_pat(&pat, &mut |pat| {
+ _ = walk_pat(&pat, &mut |pat| {
cb(pat);
ControlFlow::<(), ()>::Continue(())
});
@@ -159,7 +159,7 @@
}
} else if let Some(pat) = ast::Pat::cast(node) {
preorder.skip_subtree();
- walk_pat(&pat, &mut |pat| {
+ _ = walk_pat(&pat, &mut |pat| {
cb(pat);
ControlFlow::<(), ()>::Continue(())
});
@@ -484,7 +484,7 @@
None => None,
Some(tok) => Some(tok),
});
- let input_expressions = tokens.group_by(|tok| tok.kind() == T![,]);
+ let input_expressions = tokens.chunk_by(|tok| tok.kind() == T![,]);
let paths = input_expressions
.into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then_some(group))
diff --git a/crates/ide-db/src/syntax_helpers/suggest_name.rs b/crates/ide-db/src/syntax_helpers/suggest_name.rs
index 6801856..51ce9b4 100644
--- a/crates/ide-db/src/syntax_helpers/suggest_name.rs
+++ b/crates/ide-db/src/syntax_helpers/suggest_name.rs
@@ -96,21 +96,16 @@
}
impl NameGenerator {
- /// Create a new empty generator
- pub fn new() -> Self {
- Self { pool: FxHashMap::default() }
- }
-
/// Create a new generator with existing names. When suggesting a name, it will
/// avoid conflicts with existing names.
pub fn new_with_names<'a>(existing_names: impl Iterator<Item = &'a str>) -> Self {
- let mut generator = Self::new();
+ let mut generator = Self::default();
existing_names.for_each(|name| generator.insert(name));
generator
}
pub fn new_from_scope_locals(scope: Option<SemanticsScope<'_>>) -> Self {
- let mut generator = Self::new();
+ let mut generator = Self::default();
if let Some(scope) = scope {
scope.process_all_names(&mut |name, scope| {
if let hir::ScopeDef::Local(_) = scope {
@@ -471,7 +466,7 @@
frange.range,
"selection is not an expression(yet contained in one)"
);
- let name = NameGenerator::new().for_variable(&expr, &sema);
+ let name = NameGenerator::default().for_variable(&expr, &sema);
assert_eq!(&name, expected);
}
@@ -1118,7 +1113,7 @@
#[test]
fn conflicts_with_existing_names() {
- let mut generator = NameGenerator::new();
+ let mut generator = NameGenerator::default();
assert_eq!(generator.suggest_name("a"), "a");
assert_eq!(generator.suggest_name("a"), "a1");
assert_eq!(generator.suggest_name("a"), "a2");
diff --git a/crates/ide-db/src/test_data/test_doc_alias.txt b/crates/ide-db/src/test_data/test_doc_alias.txt
index ea50745..8e342ec 100644
--- a/crates/ide-db/src/test_data/test_doc_alias.txt
+++ b/crates/ide-db/src/test_data/test_doc_alias.txt
@@ -42,6 +42,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "Struct",
@@ -75,6 +76,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "mul1",
@@ -108,6 +110,7 @@
container_name: None,
is_alias: true,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "mul2",
@@ -141,6 +144,7 @@
container_name: None,
is_alias: true,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "s1",
@@ -174,6 +178,7 @@
container_name: None,
is_alias: true,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "s1",
@@ -207,6 +212,7 @@
container_name: None,
is_alias: true,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "s2",
@@ -240,6 +246,7 @@
container_name: None,
is_alias: true,
is_assoc: false,
+ do_not_complete: Yes,
},
],
),
diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
index d2d2426..6de25c0 100644
--- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt
+++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -40,6 +40,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "CONST",
@@ -71,6 +72,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "CONST_WITH_INNER",
@@ -102,6 +104,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "Enum",
@@ -135,6 +138,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "ItemLikeMacro",
@@ -168,6 +172,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "Macro",
@@ -201,6 +206,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "STATIC",
@@ -232,6 +238,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "Struct",
@@ -265,6 +272,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "StructFromMacro",
@@ -295,6 +303,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "StructInFn",
@@ -330,6 +339,7 @@
),
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "StructInNamedConst",
@@ -365,6 +375,7 @@
),
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "StructInUnnamedConst",
@@ -398,6 +409,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "StructT",
@@ -431,6 +443,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "Trait",
@@ -462,6 +475,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "Trait",
@@ -495,6 +509,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "Union",
@@ -528,6 +543,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "a_mod",
@@ -563,6 +579,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "b_mod",
@@ -598,6 +615,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "define_struct",
@@ -631,6 +649,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "generic_impl_fn",
@@ -664,6 +683,7 @@
),
is_alias: false,
is_assoc: true,
+ do_not_complete: Yes,
},
FileSymbol {
name: "impl_fn",
@@ -697,6 +717,7 @@
),
is_alias: false,
is_assoc: true,
+ do_not_complete: Yes,
},
FileSymbol {
name: "macro_rules_macro",
@@ -730,6 +751,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "main",
@@ -761,6 +783,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "really_define_struct",
@@ -794,6 +817,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "trait_fn",
@@ -827,6 +851,7 @@
),
is_alias: false,
is_assoc: true,
+ do_not_complete: Yes,
},
],
),
@@ -873,6 +898,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
],
),
@@ -917,6 +943,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "IsThisJustATrait",
@@ -950,6 +977,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "StructInModB",
@@ -983,6 +1011,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "SuperItemLikeMacro",
@@ -1016,6 +1045,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
FileSymbol {
name: "ThisStruct",
@@ -1049,6 +1079,7 @@
container_name: None,
is_alias: false,
is_assoc: false,
+ do_not_complete: Yes,
},
],
),
diff --git a/crates/ide-diagnostics/Cargo.toml b/crates/ide-diagnostics/Cargo.toml
index 483cb6d..96be51e 100644
--- a/crates/ide-diagnostics/Cargo.toml
+++ b/crates/ide-diagnostics/Cargo.toml
@@ -12,7 +12,7 @@
[lib]
[dependencies]
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
either.workspace = true
itertools.workspace = true
serde_json.workspace = true
@@ -27,7 +27,7 @@
paths.workspace = true
[dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
# local deps
test-utils.workspace = true
diff --git a/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs b/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs
index 78a04e1..35dc9b0 100644
--- a/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs
+++ b/crates/ide-diagnostics/src/handlers/trait_impl_orphan.rs
@@ -104,4 +104,17 @@
"#,
);
}
+
+ #[test]
+ fn twice_fundamental() {
+ check_diagnostics(
+ r#"
+//- /foo.rs crate:foo
+pub trait Trait {}
+//- /bar.rs crate:bar deps:foo
+struct Foo;
+impl foo::Trait for &&Foo {}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index 4ec8c74..591213d 100644
--- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -231,7 +231,7 @@
.and_then(Either::<ast::ReturnExpr, ast::StmtList>::cast)?;
editor = builder.make_editor(parent.syntax());
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
match parent {
Either::Left(ret_expr) => {
@@ -1042,19 +1042,6 @@
}
#[test]
- fn closure_mismatch_show_different_type() {
- check_diagnostics(
- r#"
-fn f() {
- let mut x = (|| 1, 2);
- x = (|| 3, 4);
- //^^^^ error: expected {closure#23552}, found {closure#23553}
-}
- "#,
- );
- }
-
- #[test]
fn type_mismatch_range_adjustment() {
cov_mark::check!(type_mismatch_range_adjustment);
check_diagnostics(
diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs
index a8d9b67..e667d48 100644
--- a/crates/ide-diagnostics/src/lib.rs
+++ b/crates/ide-diagnostics/src/lib.rs
@@ -332,7 +332,6 @@
// [#3434] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily.
db.parse_errors(editioned_file_id_wrapper)
- .as_deref()
.into_iter()
.flatten()
.take(128)
@@ -409,8 +408,7 @@
// A bunch of parse errors in a file indicate some bigger structural parse changes in the
// file, so we skip semantic diagnostics so we can show these faster.
Some(m) => {
- if db.parse_errors(editioned_file_id_wrapper).as_deref().is_none_or(|es| es.len() < 16)
- {
+ if db.parse_errors(editioned_file_id_wrapper).is_none_or(|es| es.len() < 16) {
m.diagnostics(db, &mut diags, config.style_lints);
}
}
diff --git a/crates/ide-ssr/Cargo.toml b/crates/ide-ssr/Cargo.toml
index 39a71c1..1212fa9 100644
--- a/crates/ide-ssr/Cargo.toml
+++ b/crates/ide-ssr/Cargo.toml
@@ -12,7 +12,7 @@
[lib]
[dependencies]
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
itertools.workspace = true
# local deps
@@ -22,7 +22,7 @@
syntax.workspace = true
[dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
triomphe.workspace = true
# local deps
diff --git a/crates/ide-ssr/src/matching.rs b/crates/ide-ssr/src/matching.rs
index 0776b8b..84a5943 100644
--- a/crates/ide-ssr/src/matching.rs
+++ b/crates/ide-ssr/src/matching.rs
@@ -629,7 +629,8 @@
let krate = self.sema.scope(expr.syntax()).map(|it| it.krate()).unwrap_or_else(|| {
hir::Crate::from(*self.sema.db.all_crates().last().expect("no crate graph present"))
});
- let res = code_type
+
+ code_type
.autoderef(self.sema.db)
.enumerate()
.find(|(_, deref_code_type)| pattern_type == deref_code_type)
@@ -642,8 +643,7 @@
pattern_type.display(self.sema.db, display_target),
code_type.display(self.sema.db, display_target)
)
- });
- res
+ })
}
fn get_placeholder_for_node(&self, node: &SyntaxNode) -> Option<&Placeholder> {
diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml
index 9af56c4..1d19daf 100644
--- a/crates/ide/Cargo.toml
+++ b/crates/ide/Cargo.toml
@@ -12,7 +12,7 @@
[lib]
[dependencies]
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
arrayvec.workspace = true
either.workspace = true
itertools.workspace = true
@@ -25,7 +25,7 @@
smallvec.workspace = true
triomphe.workspace = true
nohash-hasher.workspace = true
-rustc_apfloat = "0.2.0"
+rustc_apfloat = "0.2.2"
# local deps
cfg.workspace = true
@@ -46,7 +46,7 @@
toolchain.workspace = true
[dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
# local deps
test-utils.workspace = true
diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs
index a01afd2..258d80e 100644
--- a/crates/ide/src/doc_links.rs
+++ b/crates/ide/src/doc_links.rs
@@ -627,7 +627,7 @@
return Some((def, file, Some(format!("variant.{}", ev.name(db).as_str()))));
}
Definition::Const(c) => {
- format!("const.{}.html", c.name(db)?.as_str())
+ format!("constant.{}.html", c.name(db)?.as_str())
}
Definition::Static(s) => {
format!("static.{}.html", s.name(db).as_str())
diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs
index 4811f1f..7766897 100644
--- a/crates/ide/src/expand_macro.rs
+++ b/crates/ide/src/expand_macro.rs
@@ -677,4 +677,26 @@
crate::Foo;"#]],
);
}
+
+ #[test]
+ fn semi_glueing() {
+ check(
+ r#"
+macro_rules! __log_value {
+ ($key:ident :$capture:tt =) => {};
+}
+
+macro_rules! __log {
+ ($key:tt $(:$capture:tt)? $(= $value:expr)?; $($arg:tt)+) => {
+ __log_value!($key $(:$capture)* = $($value)*);
+ };
+}
+
+__log!(written:%; "Test"$0);
+ "#,
+ expect![[r#"
+ __log!
+ "#]],
+ );
+ }
}
diff --git a/crates/ide/src/goto_type_definition.rs b/crates/ide/src/goto_type_definition.rs
index bf48a6e..a3d1679 100644
--- a/crates/ide/src/goto_type_definition.rs
+++ b/crates/ide/src/goto_type_definition.rs
@@ -71,7 +71,7 @@
sema.descend_into_macros_no_opaque(token)
.into_iter()
.filter_map(|token| {
- let ty = sema
+ sema
.token_ancestors_with_macros(token)
// When `token` is within a macro call, we can't determine its type. Don't continue
// this traversal because otherwise we'll end up returning the type of *that* macro
@@ -103,8 +103,7 @@
};
Some(ty)
- });
- ty
+ })
})
.for_each(process_ty);
Some(RangeInfo::new(range, res))
diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs
index 632893a..0718e5a 100644
--- a/crates/ide/src/inlay_hints/bind_pat.rs
+++ b/crates/ide/src/inlay_hints/bind_pat.rs
@@ -861,28 +861,6 @@
check_with_config(
InlayHintsConfig {
type_hints: true,
- closure_style: ClosureStyle::ClosureWithId,
- ..DISABLED_CONFIG
- },
- r#"
-//- minicore: fn
-fn main() {
- let x = || 2;
- //^ {closure#25600}
- let y = |t: i32| x() + t;
- //^ {closure#25601}
- let mut t = 5;
- //^ i32
- let z = |k: i32| { t += k; };
- //^ {closure#25602}
- let p = (y, z);
- //^ ({closure#25601}, {closure#25602})
-}
- "#,
- );
- check_with_config(
- InlayHintsConfig {
- type_hints: true,
closure_style: ClosureStyle::Hide,
..DISABLED_CONFIG
},
diff --git a/crates/ide/src/inlay_hints/closure_ret.rs b/crates/ide/src/inlay_hints/closure_ret.rs
index 0014c81..f9b21c6 100644
--- a/crates/ide/src/inlay_hints/closure_ret.rs
+++ b/crates/ide/src/inlay_hints/closure_ret.rs
@@ -35,8 +35,9 @@
let param_list = closure.param_list()?;
- let closure = sema.descend_node_into_attributes(closure).pop()?;
- let ty = sema.type_of_expr(&ast::Expr::ClosureExpr(closure.clone()))?.adjusted();
+ let resolve_parent = Some(closure.syntax().text_range());
+ let descended_closure = sema.descend_node_into_attributes(closure.clone()).pop()?;
+ let ty = sema.type_of_expr(&ast::Expr::ClosureExpr(descended_closure.clone()))?.adjusted();
let callable = ty.as_callable(sema.db)?;
let ty = callable.return_type();
if arrow.is_none() && ty.is_unit() {
@@ -52,7 +53,7 @@
ty_to_text_edit(
sema,
config,
- closure.syntax(),
+ descended_closure.syntax(),
&ty,
arrow
.as_ref()
@@ -70,7 +71,7 @@
let mut builder = TextEdit::builder();
let insert_pos = param_list.syntax().text_range().end();
- let rendered = match sema.scope(closure.syntax()).and_then(|scope| {
+ let rendered = match sema.scope(descended_closure.syntax()).and_then(|scope| {
ty.display_source_code(scope.db, scope.module().into(), false).ok()
}) {
Some(rendered) => rendered,
@@ -95,7 +96,7 @@
position: InlayHintPosition::After,
pad_left: false,
pad_right: false,
- resolve_parent: Some(closure.syntax().text_range()),
+ resolve_parent,
});
Some(())
}
diff --git a/crates/ide/src/inlay_hints/generic_param.rs b/crates/ide/src/inlay_hints/generic_param.rs
index fc1083f..730732d 100644
--- a/crates/ide/src/inlay_hints/generic_param.rs
+++ b/crates/ide/src/inlay_hints/generic_param.rs
@@ -1,4 +1,5 @@
//! Implementation of inlay hints for generic parameters.
+use either::Either;
use ide_db::{active_parameter::generic_def_for_node, famous_defs::FamousDefs};
use syntax::{
AstNode,
@@ -6,7 +7,8 @@
};
use crate::{
- InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind, inlay_hints::GenericParameterHints,
+ InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind,
+ inlay_hints::{GenericParameterHints, param_name},
};
use super::param_name::is_argument_similar_to_param_name;
@@ -62,8 +64,17 @@
let param_name = param.name(sema.db);
let should_hide = {
- let argument = get_string_representation(&arg)?;
- is_argument_similar_to_param_name(&argument, param_name.as_str())
+ let param_name = param_name.as_str();
+ get_segment_representation(&arg).map_or(false, |seg| match seg {
+ Either::Left(Either::Left(argument)) => {
+ is_argument_similar_to_param_name(&argument, param_name)
+ }
+ Either::Left(Either::Right(argument)) => argument
+ .segment()
+ .and_then(|it| it.name_ref())
+ .is_some_and(|it| it.text().eq_ignore_ascii_case(param_name)),
+ Either::Right(lifetime) => lifetime.text().eq_ignore_ascii_case(param_name),
+ })
};
if should_hide {
@@ -111,32 +122,34 @@
Some(())
}
-fn get_string_representation(arg: &ast::GenericArg) -> Option<String> {
+fn get_segment_representation(
+ arg: &ast::GenericArg,
+) -> Option<Either<Either<Vec<ast::NameRef>, ast::Path>, ast::Lifetime>> {
return match arg {
ast::GenericArg::AssocTypeArg(_) => None,
- ast::GenericArg::ConstArg(const_arg) => Some(const_arg.to_string()),
+ ast::GenericArg::ConstArg(const_arg) => {
+ param_name::get_segment_representation(&const_arg.expr()?).map(Either::Left)
+ }
ast::GenericArg::LifetimeArg(lifetime_arg) => {
let lifetime = lifetime_arg.lifetime()?;
- Some(lifetime.to_string())
+ Some(Either::Right(lifetime))
}
ast::GenericArg::TypeArg(type_arg) => {
let ty = type_arg.ty()?;
- Some(
- type_path_segment(&ty)
- .map_or_else(|| type_arg.to_string(), |segment| segment.to_string()),
- )
+ type_path(&ty).map(Either::Right).map(Either::Left)
}
};
- fn type_path_segment(ty: &ast::Type) -> Option<ast::PathSegment> {
+ fn type_path(ty: &ast::Type) -> Option<ast::Path> {
match ty {
- ast::Type::ArrayType(it) => type_path_segment(&it.ty()?),
- ast::Type::ForType(it) => type_path_segment(&it.ty()?),
- ast::Type::ParenType(it) => type_path_segment(&it.ty()?),
- ast::Type::PathType(path_type) => path_type.path()?.segment(),
- ast::Type::PtrType(it) => type_path_segment(&it.ty()?),
- ast::Type::RefType(it) => type_path_segment(&it.ty()?),
- ast::Type::SliceType(it) => type_path_segment(&it.ty()?),
+ ast::Type::ArrayType(it) => type_path(&it.ty()?),
+ ast::Type::ForType(it) => type_path(&it.ty()?),
+ ast::Type::ParenType(it) => type_path(&it.ty()?),
+ ast::Type::PathType(path_type) => path_type.path(),
+ ast::Type::PtrType(it) => type_path(&it.ty()?),
+ ast::Type::RefType(it) => type_path(&it.ty()?),
+ ast::Type::SliceType(it) => type_path(&it.ty()?),
+ ast::Type::MacroType(macro_type) => macro_type.macro_call()?.path(),
_ => None,
}
}
diff --git a/crates/ide/src/inlay_hints/param_name.rs b/crates/ide/src/inlay_hints/param_name.rs
index 44ea535..99c698c 100644
--- a/crates/ide/src/inlay_hints/param_name.rs
+++ b/crates/ide/src/inlay_hints/param_name.rs
@@ -4,16 +4,15 @@
//! _ = max(/*x*/4, /*y*/4);
//! ```
+use std::iter::zip;
+
use either::Either;
-use hir::{Callable, Semantics};
+use hir::Semantics;
use ide_db::{RootDatabase, famous_defs::FamousDefs};
use span::EditionedFileId;
use stdx::to_lower_snake_case;
-use syntax::{
- ToSmolStr,
- ast::{self, AstNode, HasArgList, HasName, UnaryOp},
-};
+use syntax::ast::{self, AstNode, HasArgList, HasName, UnaryOp};
use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind};
@@ -29,6 +28,12 @@
}
let (callable, arg_list) = get_callable(sema, &expr)?;
+ let unary_function = callable.n_params() == 1;
+ let function_name = match callable.kind() {
+ hir::CallableKind::Function(function) => Some(function.name(sema.db)),
+ _ => None,
+ };
+ let function_name = function_name.as_ref().map(|it| it.as_str());
let hints = callable
.params()
.into_iter()
@@ -40,7 +45,13 @@
Some((p, param_name, arg, range))
})
.filter(|(_, param_name, arg, _)| {
- !should_hide_param_name_hint(sema, &callable, param_name.as_str(), arg)
+ !should_hide_param_name_hint(
+ sema,
+ unary_function,
+ function_name,
+ param_name.as_str(),
+ arg,
+ )
})
.map(|(param, param_name, _, hir::FileRange { range, .. })| {
let colon = if config.render_colons { ":" } else { "" };
@@ -94,9 +105,13 @@
}
}
+const INSIGNIFICANT_METHOD_NAMES: &[&str] = &["clone", "as_ref", "into"];
+const INSIGNIFICANT_PARAMETER_NAMES: &[&str] = &["predicate", "value", "pat", "rhs", "other"];
+
fn should_hide_param_name_hint(
sema: &Semantics<'_, RootDatabase>,
- callable: &hir::Callable,
+ unary_function: bool,
+ function_name: Option<&str>,
param_name: &str,
argument: &ast::Expr,
) -> bool {
@@ -114,95 +129,128 @@
return true;
}
- if matches!(argument, ast::Expr::PrefixExpr(prefix) if prefix.op_kind() == Some(UnaryOp::Not)) {
- return false;
+ if param_name.starts_with("ra_fixture") {
+ return true;
}
- let fn_name = match callable.kind() {
- hir::CallableKind::Function(it) => Some(it.name(sema.db).as_str().to_smolstr()),
- _ => None,
- };
- let fn_name = fn_name.as_deref();
- is_param_name_suffix_of_fn_name(param_name, callable, fn_name)
- || is_argument_expr_similar_to_param_name(argument, param_name)
- || param_name.starts_with("ra_fixture")
- || (callable.n_params() == 1 && is_obvious_param(param_name))
- || is_adt_constructor_similar_to_param_name(sema, argument, param_name)
+ if unary_function {
+ if let Some(function_name) = function_name {
+ if is_param_name_suffix_of_fn_name(param_name, function_name) {
+ return true;
+ }
+ }
+ if is_obvious_param(param_name) {
+ return true;
+ }
+ }
+
+ is_argument_expr_similar_to_param_name(sema, argument, param_name)
}
/// Hide the parameter name of a unary function if it is a `_` - prefixed suffix of the function's name, or equal.
///
/// `fn strip_suffix(suffix)` will be hidden.
/// `fn stripsuffix(suffix)` will not be hidden.
-fn is_param_name_suffix_of_fn_name(
- param_name: &str,
- callable: &Callable,
- fn_name: Option<&str>,
-) -> bool {
- match (callable.n_params(), fn_name) {
- (1, Some(function)) => {
- function == param_name
- || function
- .len()
- .checked_sub(param_name.len())
- .and_then(|at| function.is_char_boundary(at).then(|| function.split_at(at)))
- .is_some_and(|(prefix, suffix)| {
- suffix.eq_ignore_ascii_case(param_name) && prefix.ends_with('_')
- })
- }
- _ => false,
- }
+fn is_param_name_suffix_of_fn_name(param_name: &str, fn_name: &str) -> bool {
+ fn_name == param_name
+ || fn_name
+ .len()
+ .checked_sub(param_name.len())
+ .and_then(|at| fn_name.is_char_boundary(at).then(|| fn_name.split_at(at)))
+ .is_some_and(|(prefix, suffix)| {
+ suffix.eq_ignore_ascii_case(param_name) && prefix.ends_with('_')
+ })
}
-fn is_argument_expr_similar_to_param_name(argument: &ast::Expr, param_name: &str) -> bool {
- let argument = match get_string_representation(argument) {
- Some(argument) => argument,
- None => return false,
- };
- is_argument_similar_to_param_name(&argument, param_name)
+fn is_argument_expr_similar_to_param_name(
+ sema: &Semantics<'_, RootDatabase>,
+ argument: &ast::Expr,
+ param_name: &str,
+) -> bool {
+ match get_segment_representation(argument) {
+ Some(Either::Left(argument)) => is_argument_similar_to_param_name(&argument, param_name),
+ Some(Either::Right(path)) => {
+ path.segment()
+ .and_then(|it| it.name_ref())
+ .is_some_and(|name_ref| name_ref.text().eq_ignore_ascii_case(param_name))
+ || is_adt_constructor_similar_to_param_name(sema, &path, param_name)
+ }
+ None => false,
+ }
}
/// Check whether param_name and argument are the same or
/// whether param_name is a prefix/suffix of argument(split at `_`).
-pub(super) fn is_argument_similar_to_param_name(argument: &str, param_name: &str) -> bool {
- // std is honestly too panic happy...
- let str_split_at = |str: &str, at| str.is_char_boundary(at).then(|| argument.split_at(at));
+pub(super) fn is_argument_similar_to_param_name(
+ argument: &[ast::NameRef],
+ param_name: &str,
+) -> bool {
+ debug_assert!(!argument.is_empty());
+ debug_assert!(!param_name.is_empty());
+ let param_name = param_name.split('_');
+ let argument = argument.iter().flat_map(|it| it.text_non_mutable().split('_'));
- let param_name = param_name.trim_start_matches('_');
- let argument = argument.trim_start_matches('_');
-
- match str_split_at(argument, param_name.len()) {
- Some((prefix, rest)) if prefix.eq_ignore_ascii_case(param_name) => {
- return rest.is_empty() || rest.starts_with('_');
- }
- _ => (),
- }
- match argument.len().checked_sub(param_name.len()).and_then(|at| str_split_at(argument, at)) {
- Some((rest, suffix)) if param_name.eq_ignore_ascii_case(suffix) => {
- return rest.is_empty() || rest.ends_with('_');
- }
- _ => (),
- }
- false
+ let prefix_match = zip(argument.clone(), param_name.clone())
+ .all(|(arg, param)| arg.eq_ignore_ascii_case(param));
+ let postfix_match = || {
+ zip(argument.rev(), param_name.rev()).all(|(arg, param)| arg.eq_ignore_ascii_case(param))
+ };
+ prefix_match || postfix_match()
}
-fn get_string_representation(expr: &ast::Expr) -> Option<String> {
+pub(super) fn get_segment_representation(
+ expr: &ast::Expr,
+) -> Option<Either<Vec<ast::NameRef>, ast::Path>> {
match expr {
ast::Expr::MethodCallExpr(method_call_expr) => {
+ let receiver =
+ method_call_expr.receiver().and_then(|expr| get_segment_representation(&expr));
let name_ref = method_call_expr.name_ref()?;
- match name_ref.text().as_str() {
- "clone" | "as_ref" => method_call_expr.receiver().map(|rec| rec.to_string()),
- name_ref => Some(name_ref.to_owned()),
+ if INSIGNIFICANT_METHOD_NAMES.contains(&name_ref.text().as_str()) {
+ return receiver;
}
+ Some(Either::Left(match receiver {
+ Some(Either::Left(mut left)) => {
+ left.push(name_ref);
+ left
+ }
+ Some(Either::Right(_)) | None => vec![name_ref],
+ }))
}
- ast::Expr::MacroExpr(macro_expr) => {
- Some(macro_expr.macro_call()?.path()?.segment()?.to_string())
+ ast::Expr::FieldExpr(field_expr) => {
+ let expr = field_expr.expr().and_then(|expr| get_segment_representation(&expr));
+ let name_ref = field_expr.name_ref()?;
+ let res = match expr {
+ Some(Either::Left(mut left)) => {
+ left.push(name_ref);
+ left
+ }
+ Some(Either::Right(_)) | None => vec![name_ref],
+ };
+ Some(Either::Left(res))
}
- ast::Expr::FieldExpr(field_expr) => Some(field_expr.name_ref()?.to_string()),
- ast::Expr::PathExpr(path_expr) => Some(path_expr.path()?.segment()?.to_string()),
- ast::Expr::PrefixExpr(prefix_expr) => get_string_representation(&prefix_expr.expr()?),
- ast::Expr::RefExpr(ref_expr) => get_string_representation(&ref_expr.expr()?),
- ast::Expr::CastExpr(cast_expr) => get_string_representation(&cast_expr.expr()?),
+ // paths
+ ast::Expr::MacroExpr(macro_expr) => macro_expr.macro_call()?.path().map(Either::Right),
+ ast::Expr::RecordExpr(record_expr) => record_expr.path().map(Either::Right),
+ ast::Expr::PathExpr(path_expr) => {
+ let path = path_expr.path()?;
+ // single segment paths are likely locals
+ Some(match path.as_single_name_ref() {
+ None => Either::Right(path),
+ Some(name_ref) => Either::Left(vec![name_ref]),
+ })
+ }
+ ast::Expr::PrefixExpr(prefix_expr) if prefix_expr.op_kind() == Some(UnaryOp::Not) => None,
+ // recurse
+ ast::Expr::PrefixExpr(prefix_expr) => get_segment_representation(&prefix_expr.expr()?),
+ ast::Expr::RefExpr(ref_expr) => get_segment_representation(&ref_expr.expr()?),
+ ast::Expr::CastExpr(cast_expr) => get_segment_representation(&cast_expr.expr()?),
+ ast::Expr::CallExpr(call_expr) => get_segment_representation(&call_expr.expr()?),
+ ast::Expr::AwaitExpr(await_expr) => get_segment_representation(&await_expr.expr()?),
+ ast::Expr::IndexExpr(index_expr) => get_segment_representation(&index_expr.base()?),
+ ast::Expr::ParenExpr(paren_expr) => get_segment_representation(&paren_expr.expr()?),
+ ast::Expr::TryExpr(try_expr) => get_segment_representation(&try_expr.expr()?),
+ // ast::Expr::ClosureExpr(closure_expr) => todo!(),
_ => None,
}
}
@@ -210,30 +258,15 @@
fn is_obvious_param(param_name: &str) -> bool {
// avoid displaying hints for common functions like map, filter, etc.
// or other obvious words used in std
- let is_obvious_param_name =
- matches!(param_name, "predicate" | "value" | "pat" | "rhs" | "other");
- param_name.len() == 1 || is_obvious_param_name
+ param_name.len() == 1 || INSIGNIFICANT_PARAMETER_NAMES.contains(¶m_name)
}
fn is_adt_constructor_similar_to_param_name(
sema: &Semantics<'_, RootDatabase>,
- argument: &ast::Expr,
+ path: &ast::Path,
param_name: &str,
) -> bool {
- let path = match argument {
- ast::Expr::CallExpr(c) => c.expr().and_then(|e| match e {
- ast::Expr::PathExpr(p) => p.path(),
- _ => None,
- }),
- ast::Expr::PathExpr(p) => p.path(),
- ast::Expr::RecordExpr(r) => r.path(),
- _ => return false,
- };
- let path = match path {
- Some(it) => it,
- None => return false,
- };
- (|| match sema.resolve_path(&path)? {
+ (|| match sema.resolve_path(path)? {
hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => {
Some(to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name)
}
@@ -501,6 +534,7 @@
fn foo(param: u32) {}
fn bar(param_eter: u32) {}
+fn baz(a_d_e: u32) {}
enum CompletionKind {
Keyword,
@@ -553,6 +587,14 @@
//^^^^^^^^^^^ param_eter
non_ident_pat((0, 0));
+
+ baz(a.d.e);
+ baz(a.dc.e);
+ // ^^^^^^ a_d_e
+ baz(ac.d.e);
+ // ^^^^^^ a_d_e
+ baz(a.d.ec);
+ // ^^^^^^ a_d_e
}"#,
);
}
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index f85a7bf..13b161e 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -238,12 +238,18 @@
file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_owned()));
let source_root = SourceRoot::new_local(file_set);
- let mut change = ChangeWithProcMacros::new();
+ let mut change = ChangeWithProcMacros::default();
change.set_roots(vec![source_root]);
let mut crate_graph = CrateGraphBuilder::default();
// FIXME: cfg options
// Default to enable test for single file.
let mut cfg_options = CfgOptions::default();
+
+ // FIXME: This is less than ideal
+ let proc_macro_cwd = Arc::new(
+ TryFrom::try_from(&*std::env::current_dir().unwrap().as_path().to_string_lossy())
+ .unwrap(),
+ );
cfg_options.insert_atom(sym::test.clone());
crate_graph.add_crate_root(
file_id,
@@ -255,7 +261,7 @@
Env::default(),
CrateOrigin::Local { repo: None, name: None },
false,
- None,
+ proc_macro_cwd,
Arc::new(CrateWorkspaceData {
data_layout: Err("fixture has no layout".into()),
toolchain: None,
@@ -747,7 +753,7 @@
frange: FileRange,
) -> Cancellable<Vec<Assist>> {
let include_fixes = match &assist_config.allowed {
- Some(it) => it.iter().any(|&it| it == AssistKind::None || it == AssistKind::QuickFix),
+ Some(it) => it.contains(&AssistKind::QuickFix),
None => true,
};
diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs
index 1244132..057d635 100644
--- a/crates/ide/src/static_index.rs
+++ b/crates/ide/src/static_index.rs
@@ -1,17 +1,17 @@
//! This module provides `StaticIndex` which is used for powering
//! read-only code browsers and emitting LSIF
+use arrayvec::ArrayVec;
use hir::{Crate, HirFileIdExt, Module, Semantics, db::HirDatabase};
use ide_db::{
FileId, FileRange, FxHashMap, FxHashSet, RootDatabase,
base_db::{RootQueryDb, SourceDatabase, VfsPath},
- defs::Definition,
+ defs::{Definition, IdentClass},
documentation::Documentation,
famous_defs::FamousDefs,
- helpers::get_definition,
};
use span::Edition;
-use syntax::{AstNode, SyntaxKind::*, SyntaxNode, T, TextRange};
+use syntax::{AstNode, SyntaxKind::*, SyntaxNode, SyntaxToken, T, TextRange};
use crate::navigation_target::UpmappingResult;
use crate::{
@@ -126,6 +126,22 @@
)
}
+// FIXME: This is a weird function
+fn get_definitions(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+) -> Option<ArrayVec<Definition, 2>> {
+ for token in sema.descend_into_macros_exact(token) {
+ let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
+ if let Some(defs) = def {
+ if !defs.is_empty() {
+ return Some(defs);
+ }
+ }
+ }
+ None
+}
+
pub enum VendoredLibrariesConfig<'a> {
Included { workspace_root: &'a VfsPath },
Excluded,
@@ -257,11 +273,14 @@
for token in tokens {
let range = token.text_range();
let node = token.parent().unwrap();
- let def = match get_definition(&sema, token.clone()) {
- Some(it) => it,
+ match get_definitions(&sema, token.clone()) {
+ Some(it) => {
+ for i in it {
+ add_token(i, range, &node);
+ }
+ }
None => continue,
};
- add_token(def, range, &node);
}
self.files.push(result);
}
@@ -308,7 +327,7 @@
#[cfg(test)]
mod tests {
use crate::{StaticIndex, fixture};
- use ide_db::{FileRange, FxHashSet, base_db::VfsPath};
+ use ide_db::{FileRange, FxHashMap, FxHashSet, base_db::VfsPath};
use syntax::TextSize;
use super::VendoredLibrariesConfig;
@@ -363,6 +382,71 @@
}
}
+ #[track_caller]
+ fn check_references(
+ #[rust_analyzer::rust_fixture] ra_fixture: &str,
+ vendored_libs_config: VendoredLibrariesConfig<'_>,
+ ) {
+ let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
+ let s = StaticIndex::compute(&analysis, vendored_libs_config);
+ let mut range_set: FxHashMap<_, i32> = ranges.iter().map(|it| (it.0, 0)).collect();
+
+ // Make sure that all references have at least one range. We use a HashMap instead of a
+ // a HashSet so that we can have more than one reference at the same range.
+ for (_, t) in s.tokens.iter() {
+ for r in &t.references {
+ if r.is_definition {
+ continue;
+ }
+ if r.range.range.start() == TextSize::from(0) {
+ // ignore whole file range corresponding to module definition
+ continue;
+ }
+ match range_set.entry(r.range) {
+ std::collections::hash_map::Entry::Occupied(mut entry) => {
+ let count = entry.get_mut();
+ *count += 1;
+ }
+ std::collections::hash_map::Entry::Vacant(_) => {
+ panic!("additional reference {r:?}");
+ }
+ }
+ }
+ }
+ for (range, count) in range_set.iter() {
+ if *count == 0 {
+ panic!("unfound reference {range:?}");
+ }
+ }
+ }
+
+ #[test]
+ fn field_initialization() {
+ check_references(
+ r#"
+struct Point {
+ x: f64,
+ //^^^
+ y: f64,
+ //^^^
+}
+ fn foo() {
+ let x = 5.;
+ let y = 10.;
+ let mut p = Point { x, y };
+ //^^^^^ ^ ^
+ p.x = 9.;
+ //^ ^
+ p.y = 10.;
+ //^ ^
+ }
+"#,
+ VendoredLibrariesConfig::Included {
+ workspace_root: &VfsPath::new_virtual_path("/workspace".to_owned()),
+ },
+ );
+ }
+
#[test]
fn struct_and_enum() {
check_all_ranges(
@@ -387,6 +471,17 @@
workspace_root: &VfsPath::new_virtual_path("/workspace".to_owned()),
},
);
+
+ check_references(
+ r#"
+struct Foo;
+enum E { X(Foo) }
+ // ^^^
+"#,
+ VendoredLibrariesConfig::Included {
+ workspace_root: &VfsPath::new_virtual_path("/workspace".to_owned()),
+ },
+ );
}
#[test]
diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs
index 6f51819..5914a8f 100644
--- a/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/crates/ide/src/syntax_highlighting/highlight.rs
@@ -113,7 +113,8 @@
) -> Highlight {
let operator_parent = token.parent();
let parent_kind = operator_parent.as_ref().map_or(EOF, SyntaxNode::kind);
- let h = match (kind, parent_kind) {
+
+ match (kind, parent_kind) {
(T![?], TRY_EXPR) => HlTag::Operator(HlOperator::Other) | HlMod::ControlFlow,
(T![&], BIN_EXPR) => HlOperator::Bitwise.into(),
(T![&], REF_EXPR | REF_PAT) => HlTag::Operator(HlOperator::Other).into(),
@@ -240,8 +241,7 @@
_ => HlPunct::Other,
}
.into(),
- };
- h
+ }
}
fn keyword(token: SyntaxToken, kind: SyntaxKind) -> Highlight {
diff --git a/crates/ide/src/view_crate_graph.rs b/crates/ide/src/view_crate_graph.rs
index e878c9a..4696fef 100644
--- a/crates/ide/src/view_crate_graph.rs
+++ b/crates/ide/src/view_crate_graph.rs
@@ -1,4 +1,5 @@
use dot::{Id, LabelText};
+use ide_db::base_db::salsa::plumbing::AsId;
use ide_db::{
FxHashMap, RootDatabase,
base_db::{
@@ -78,7 +79,8 @@
}
fn node_id(&'a self, n: &Crate) -> Id<'a> {
- Id::new(format!("_{:?}", n)).unwrap()
+ let id = n.as_id().as_u32();
+ Id::new(format!("_{:?}", id)).unwrap()
}
fn node_shape(&'a self, _node: &Crate) -> Option<LabelText<'a>> {
diff --git a/crates/intern/Cargo.toml b/crates/intern/Cargo.toml
index 397eba0..9ff656c 100644
--- a/crates/intern/Cargo.toml
+++ b/crates/intern/Cargo.toml
@@ -13,7 +13,6 @@
[dependencies]
-# We need to freeze the version of the crate, as the raw-api feature is considered unstable
dashmap.workspace = true
hashbrown.workspace = true
rustc-hash.workspace = true
diff --git a/crates/intern/src/lib.rs b/crates/intern/src/lib.rs
index 6548bb1..e990490 100644
--- a/crates/intern/src/lib.rs
+++ b/crates/intern/src/lib.rs
@@ -177,7 +177,10 @@
map: OnceLock<InternMap<T>>,
}
-#[allow(clippy::new_without_default)] // this a const fn, so it can't be default
+#[allow(
+ clippy::new_without_default,
+ reason = "this a const fn, so it can't be default yet. See <https://github.com/rust-lang/rust/issues/63065>"
+)]
impl<T: ?Sized> InternStorage<T> {
pub const fn new() -> Self {
Self { map: OnceLock::new() }
diff --git a/crates/intern/src/symbol/symbols.rs b/crates/intern/src/symbol/symbols.rs
index cc9b3ef..a9ed185 100644
--- a/crates/intern/src/symbol/symbols.rs
+++ b/crates/intern/src/symbol/symbols.rs
@@ -161,6 +161,7 @@
bitxor_assign,
bitxor,
bool,
+ bootstrap,
box_free,
Box,
boxed,
@@ -521,4 +522,12 @@
win64,
array,
boxed_slice,
+ completions,
+ ignore_flyimport,
+ ignore_flyimport_methods,
+ ignore_methods,
+ position,
+ flags,
+ precision,
+ width,
}
diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs
index 243619b..c50e63d 100644
--- a/crates/load-cargo/src/lib.rs
+++ b/crates/load-cargo/src/lib.rs
@@ -426,7 +426,7 @@
) -> RootDatabase {
let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<u16>().ok());
let mut db = RootDatabase::new(lru_cap);
- let mut analysis_change = ChangeWithProcMacros::new();
+ let mut analysis_change = ChangeWithProcMacros::default();
db.enable_proc_attr_macros();
@@ -496,7 +496,7 @@
def_site: Span,
call_site: Span,
mixed_site: Span,
- current_dir: Option<String>,
+ current_dir: String,
) -> Result<tt::TopSubtree<Span>, ProcMacroExpansionError> {
match self.0.expand(
subtree.view(),
diff --git a/crates/mbe/Cargo.toml b/crates/mbe/Cargo.toml
index 6ee505e..f3ab093 100644
--- a/crates/mbe/Cargo.toml
+++ b/crates/mbe/Cargo.toml
@@ -12,7 +12,7 @@
[lib]
[dependencies]
-cov-mark = "2.0.0-pre.1"
+cov-mark = "2.0.0"
rustc-hash.workspace = true
smallvec.workspace = true
arrayvec.workspace = true
diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs
index b1f542e..f3f9f29 100644
--- a/crates/mbe/src/expander/transcriber.rs
+++ b/crates/mbe/src/expander/transcriber.rs
@@ -389,8 +389,13 @@
match ctx.bindings.get_fragment(v, id, &mut ctx.nesting, marker) {
Ok(fragment) => {
match fragment {
- Fragment::Tokens(tt) => builder.extend_with_tt(tt.strip_invisible()),
- Fragment::TokensOwned(tt) => builder.extend_with_tt(tt.view().strip_invisible()),
+ // rustc spacing is not like ours. Ours is like proc macros', it dictates how puncts will actually be joined.
+ // rustc uses them mostly for pretty printing. So we have to deviate a bit from what rustc does here.
+ // Basically, a metavariable can never be joined with whatever after it.
+ Fragment::Tokens(tt) => builder.extend_with_tt_alone(tt.strip_invisible()),
+ Fragment::TokensOwned(tt) => {
+ builder.extend_with_tt_alone(tt.view().strip_invisible())
+ }
Fragment::Expr(sub) => {
let sub = sub.strip_invisible();
let mut span = id;
@@ -402,7 +407,7 @@
if wrap_in_parens {
builder.open(tt::DelimiterKind::Parenthesis, span);
}
- builder.extend_with_tt(sub);
+ builder.extend_with_tt_alone(sub);
if wrap_in_parens {
builder.close(span);
}
diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs
index 7be49cb..8a2f124 100644
--- a/crates/mbe/src/parser.rs
+++ b/crates/mbe/src/parser.rs
@@ -6,7 +6,10 @@
use arrayvec::ArrayVec;
use intern::{Symbol, sym};
use span::{Edition, Span, SyntaxContext};
-use tt::iter::{TtElement, TtIter};
+use tt::{
+ MAX_GLUED_PUNCT_LEN,
+ iter::{TtElement, TtIter},
+};
use crate::ParseError;
@@ -96,7 +99,7 @@
delimiter: tt::Delimiter<Span>,
},
Literal(tt::Literal<Span>),
- Punct(Box<ArrayVec<tt::Punct<Span>, 3>>),
+ Punct(Box<ArrayVec<tt::Punct<Span>, MAX_GLUED_PUNCT_LEN>>),
Ident(tt::Ident<Span>),
}
@@ -151,7 +154,7 @@
pub(crate) enum Separator {
Literal(tt::Literal<Span>),
Ident(tt::Ident<Span>),
- Puncts(ArrayVec<tt::Punct<Span>, 3>),
+ Puncts(ArrayVec<tt::Punct<Span>, MAX_GLUED_PUNCT_LEN>),
}
// Note that when we compare a Separator, we just care about its textual value.
diff --git a/crates/parser/Cargo.toml b/crates/parser/Cargo.toml
index a36a39d..a90edfc 100644
--- a/crates/parser/Cargo.toml
+++ b/crates/parser/Cargo.toml
@@ -19,7 +19,7 @@
edition.workspace = true
[dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
stdx.workspace = true
diff --git a/crates/parser/src/grammar/items/adt.rs b/crates/parser/src/grammar/items/adt.rs
index 9a16c9d..a375696 100644
--- a/crates/parser/src/grammar/items/adt.rs
+++ b/crates/parser/src/grammar/items/adt.rs
@@ -107,7 +107,7 @@
}
// test record_field_list
-// struct S { a: i32, b: f32 }
+// struct S { a: i32, b: f32, unsafe c: u8 }
pub(crate) fn record_field_list(p: &mut Parser<'_>) {
assert!(p.at(T!['{']));
let m = p.start();
@@ -131,6 +131,7 @@
// struct S { #[attr] f: f32 }
attributes::outer_attrs(p);
opt_visibility(p, false);
+ p.eat(T![unsafe]);
if p.at(IDENT) {
name(p);
p.expect(T![:]);
diff --git a/crates/parser/test_data/parser/inline/ok/record_field_list.rast b/crates/parser/test_data/parser/inline/ok/record_field_list.rast
index 065d7e7..07686f5 100644
--- a/crates/parser/test_data/parser/inline/ok/record_field_list.rast
+++ b/crates/parser/test_data/parser/inline/ok/record_field_list.rast
@@ -30,6 +30,20 @@
PATH_SEGMENT
NAME_REF
IDENT "f32"
+ COMMA ","
+ WHITESPACE " "
+ RECORD_FIELD
+ UNSAFE_KW "unsafe"
+ WHITESPACE " "
+ NAME
+ IDENT "c"
+ COLON ":"
+ WHITESPACE " "
+ PATH_TYPE
+ PATH
+ PATH_SEGMENT
+ NAME_REF
+ IDENT "u8"
WHITESPACE " "
R_CURLY "}"
WHITESPACE "\n"
diff --git a/crates/parser/test_data/parser/inline/ok/record_field_list.rs b/crates/parser/test_data/parser/inline/ok/record_field_list.rs
index a3bd778..1f4612f 100644
--- a/crates/parser/test_data/parser/inline/ok/record_field_list.rs
+++ b/crates/parser/test_data/parser/inline/ok/record_field_list.rs
@@ -1 +1 @@
-struct S { a: i32, b: f32 }
+struct S { a: i32, b: f32, unsafe c: u8 }
diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs
index c692d5a..d67d605 100644
--- a/crates/proc-macro-api/src/lib.rs
+++ b/crates/proc-macro-api/src/lib.rs
@@ -170,7 +170,7 @@
def_site: Span,
call_site: Span,
mixed_site: Span,
- current_dir: Option<String>,
+ current_dir: String,
) -> Result<Result<tt::TopSubtree<Span>, PanicMessage>, ServerError> {
let version = self.process.version();
@@ -198,7 +198,7 @@
},
lib: self.dylib_path.to_path_buf().into(),
env,
- current_dir,
+ current_dir: Some(current_dir),
};
let response = self.process.send_task(Request::ExpandMacro(Box::new(task)))?;
diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs
index 7be6686..27bf751 100644
--- a/crates/proc-macro-api/src/process.rs
+++ b/crates/proc-macro-api/src/process.rs
@@ -62,8 +62,7 @@
let mut srv = create_srv()?;
tracing::info!("sending proc-macro server version check");
match srv.version_check() {
- Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new(
- io::ErrorKind::Other,
+ Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::other(
format!( "The version of the proc-macro server ({v}) in your Rust toolchain is newer than the version supported by your rust-analyzer ({CURRENT_API_VERSION}).
This will prevent proc-macro expansion from working. Please consider updating your rust-analyzer to ensure compatibility with your current toolchain."
),
@@ -82,7 +81,7 @@
Err(e) => {
tracing::info!(%e, "proc-macro version check failed");
Err(
- io::Error::new(io::ErrorKind::Other, format!("proc-macro server version check failed: {e}")),
+ io::Error::other(format!("proc-macro server version check failed: {e}")),
)
}
}
diff --git a/crates/proc-macro-srv-cli/Cargo.toml b/crates/proc-macro-srv-cli/Cargo.toml
index 57a28b0..ab42102 100644
--- a/crates/proc-macro-srv-cli/Cargo.toml
+++ b/crates/proc-macro-srv-cli/Cargo.toml
@@ -8,6 +8,7 @@
edition.workspace = true
license.workspace = true
rust-version.workspace = true
+publish = false
[dependencies]
proc-macro-srv.workspace = true
diff --git a/crates/proc-macro-srv/proc-macro-test/Cargo.toml b/crates/proc-macro-srv/proc-macro-test/Cargo.toml
index 2a5bfdd..eddefb3 100644
--- a/crates/proc-macro-srv/proc-macro-test/Cargo.toml
+++ b/crates/proc-macro-srv/proc-macro-test/Cargo.toml
@@ -9,4 +9,4 @@
[lib]
[build-dependencies]
-cargo_metadata = "0.18.1"
+cargo_metadata = "0.19.2"
diff --git a/crates/proc-macro-srv/proc-macro-test/build.rs b/crates/proc-macro-srv/proc-macro-test/build.rs
index d3d58a6..b97569d 100644
--- a/crates/proc-macro-srv/proc-macro-test/build.rs
+++ b/crates/proc-macro-srv/proc-macro-test/build.rs
@@ -110,7 +110,7 @@
let mut artifact_path = None;
for message in Message::parse_stream(output.stdout.as_slice()) {
if let Message::CompilerArtifact(artifact) = message.unwrap() {
- if artifact.target.kind.contains(&"proc-macro".to_string())
+ if artifact.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro)
&& (artifact.package_id.repr.starts_with(&repr)
|| artifact.package_id.repr == pkgid)
{
diff --git a/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs b/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
index 749a776..dfdbb4c 100644
--- a/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
+++ b/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
@@ -90,7 +90,7 @@
#[proc_macro_derive(DeriveEmpty)]
pub fn derive_empty(_item: TokenStream) -> TokenStream {
- TokenStream::new()
+ TokenStream::default()
}
#[proc_macro_derive(DerivePanic)]
diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs
index 245b064..c49159d 100644
--- a/crates/proc-macro-srv/src/dylib.rs
+++ b/crates/proc-macro-srv/src/dylib.rs
@@ -21,13 +21,32 @@
/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1)
///
/// It seems that on Windows that behaviour is default, so we do nothing in that case.
+///
+/// # Safety
+///
+/// The caller is responsible for ensuring that the path is valid proc-macro library
#[cfg(windows)]
-fn load_library(file: &Utf8Path) -> Result<Library, libloading::Error> {
+unsafe fn load_library(file: &Utf8Path) -> Result<Library, libloading::Error> {
+ // SAFETY: The caller is responsible for ensuring that the path is valid proc-macro library
unsafe { Library::new(file) }
}
+/// Loads dynamic library in platform dependent manner.
+///
+/// For unix, you have to use RTLD_DEEPBIND flag to escape problems described
+/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample)
+/// and [here](https://github.com/rust-lang/rust/issues/60593).
+///
+/// Usage of RTLD_DEEPBIND
+/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1)
+///
+/// It seems that on Windows that behaviour is default, so we do nothing in that case.
+///
+/// # Safety
+///
+/// The caller is responsible for ensuring that the path is valid proc-macro library
#[cfg(unix)]
-fn load_library(file: &Utf8Path) -> Result<Library, libloading::Error> {
+unsafe fn load_library(file: &Utf8Path) -> Result<Library, libloading::Error> {
// not defined by POSIX, different values on mips vs other targets
#[cfg(target_env = "gnu")]
use libc::RTLD_DEEPBIND;
@@ -39,6 +58,7 @@
#[cfg(not(target_env = "gnu"))]
const RTLD_DEEPBIND: std::os::raw::c_int = 0x0;
+ // SAFETY: The caller is responsible for ensuring that the path is valid proc-macro library
unsafe { UnixLibrary::open(Some(file), RTLD_NOW | RTLD_DEEPBIND).map(|lib| lib.into()) }
}
@@ -84,26 +104,32 @@
impl ProcMacroLibrary {
fn open(path: &Utf8Path) -> Result<Self, LoadProcMacroDylibError> {
let file = fs::File::open(path)?;
+ #[allow(clippy::undocumented_unsafe_blocks)] // FIXME
let file = unsafe { memmap2::Mmap::map(&file) }?;
let obj = object::File::parse(&*file)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
let version_info = version::read_dylib_info(&obj)?;
+ if version_info.version_string != crate::RUSTC_VERSION_STRING {
+ return Err(LoadProcMacroDylibError::AbiMismatch(version_info.version_string));
+ }
+
let symbol_name =
find_registrar_symbol(&obj).map_err(invalid_data_err)?.ok_or_else(|| {
invalid_data_err(format!("Cannot find registrar symbol in file {path}"))
})?;
- let lib = load_library(path).map_err(invalid_data_err)?;
- let proc_macros = unsafe {
- // SAFETY: We extend the lifetime here to avoid referential borrow problems
- // We never reveal proc_macros to the outside and drop it before _lib
- std::mem::transmute::<&ProcMacros, &'static ProcMacros>(ProcMacros::from_lib(
- &lib,
- symbol_name,
- &version_info.version_string,
- )?)
- };
- Ok(ProcMacroLibrary { _lib: lib, proc_macros })
+ // SAFETY: We have verified the validity of the dylib as a proc-macro library
+ let lib = unsafe { load_library(path) }.map_err(invalid_data_err)?;
+ // SAFETY: We have verified the validity of the dylib as a proc-macro library
+ // The 'static lifetime is a lie, it's actually the lifetime of the library but unavoidable
+ // due to self-referentiality
+ // But we make sure that we do not drop it before the symbol is dropped
+ let proc_macros =
+ unsafe { lib.get::<&'static &'static ProcMacros>(symbol_name.as_bytes()) };
+ match proc_macros {
+ Ok(proc_macros) => Ok(ProcMacroLibrary { proc_macros: *proc_macros, _lib: lib }),
+ Err(e) => Err(e.into()),
+ }
}
}
diff --git a/crates/proc-macro-srv/src/dylib/version.rs b/crates/proc-macro-srv/src/dylib/version.rs
index 4e28aac..3b2551f 100644
--- a/crates/proc-macro-srv/src/dylib/version.rs
+++ b/crates/proc-macro-srv/src/dylib/version.rs
@@ -27,7 +27,7 @@
let mut items = ver_str.split_whitespace();
let tag = items.next().ok_or_else(|| err!("version format error"))?;
if tag != "rustc" {
- return Err(err!("version format error (No rustc tag)"));
+ return Err(err!("no rustc tag"));
}
let version_part = items.next().ok_or_else(|| err!("no version string"))?;
@@ -83,7 +83,7 @@
/// A proc macro crate binary's ".rustc" section has following byte layout:
/// * [b'r',b'u',b's',b't',0,0,0,5] is the first 8 bytes
/// * ff060000 734e6150 is followed, it's the snappy format magic bytes,
-/// means bytes from here(including this sequence) are compressed in
+/// means bytes from here (including this sequence) are compressed in
/// snappy compression format. Version info is inside here, so decompress
/// this.
///
@@ -110,7 +110,7 @@
));
}
let version = u32::from_be_bytes([dot_rustc[4], dot_rustc[5], dot_rustc[6], dot_rustc[7]]);
- // Last supported version is:
+ // Last version with breaking changes is:
// https://github.com/rust-lang/rust/commit/b94cfefc860715fb2adf72a6955423d384c69318
let (mut metadata_portion, bytes_before_version) = match version {
8 => {
@@ -118,7 +118,7 @@
let data_len = u32::from_be_bytes(len_bytes.try_into().unwrap()) as usize;
(&dot_rustc[12..data_len + 12], 13)
}
- 9 => {
+ 9 | 10 => {
let len_bytes = &dot_rustc[8..16];
let data_len = u64::from_le_bytes(len_bytes.try_into().unwrap()) as usize;
(&dot_rustc[16..data_len + 12], 17)
diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs
index 2623b2d..223c5a5 100644
--- a/crates/proc-macro-srv/src/lib.rs
+++ b/crates/proc-macro-srv/src/lib.rs
@@ -15,7 +15,7 @@
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)]
#![allow(unreachable_pub, internal_features, clippy::disallowed_types, clippy::print_stderr)]
-#![deny(deprecated_safe)]
+#![deny(deprecated_safe, clippy::undocumented_unsafe_blocks)]
extern crate proc_macro;
#[cfg(feature = "in-rust-tree")]
diff --git a/crates/proc-macro-srv/src/proc_macros.rs b/crates/proc-macro-srv/src/proc_macros.rs
index a5fa7f6..1853270 100644
--- a/crates/proc-macro-srv/src/proc_macros.rs
+++ b/crates/proc-macro-srv/src/proc_macros.rs
@@ -2,11 +2,7 @@
use proc_macro::bridge;
-use libloading::Library;
-
-use crate::{
- ProcMacroKind, ProcMacroSrvSpan, dylib::LoadProcMacroDylibError, server_impl::TopSubtree,
-};
+use crate::{ProcMacroKind, ProcMacroSrvSpan, server_impl::TopSubtree};
#[repr(transparent)]
pub(crate) struct ProcMacros([bridge::client::ProcMacro]);
@@ -18,28 +14,6 @@
}
impl ProcMacros {
- /// Load a new ABI.
- ///
- /// # Arguments
- ///
- /// *`lib` - The dynamic library containing the macro implementations
- /// *`symbol_name` - The symbol name the macros can be found attributes
- /// *`info` - RustCInfo about the compiler that was used to compile the
- /// macro crate. This is the information we use to figure out
- /// which ABI to return
- pub(crate) fn from_lib<'l>(
- lib: &'l Library,
- symbol_name: String,
- version_string: &str,
- ) -> Result<&'l ProcMacros, LoadProcMacroDylibError> {
- if version_string != crate::RUSTC_VERSION_STRING {
- return Err(LoadProcMacroDylibError::AbiMismatch(version_string.to_owned()));
- }
- unsafe { lib.get::<&'l &'l ProcMacros>(symbol_name.as_bytes()) }
- .map(|it| **it)
- .map_err(Into::into)
- }
-
pub(crate) fn expand<S: ProcMacroSrvSpan>(
&self,
macro_name: &str,
@@ -52,7 +26,7 @@
let parsed_body = crate::server_impl::TokenStream::with_subtree(macro_body);
let parsed_attributes = attributes
- .map_or_else(crate::server_impl::TokenStream::new, |attr| {
+ .map_or_else(crate::server_impl::TokenStream::default, |attr| {
crate::server_impl::TokenStream::with_subtree(attr)
});
diff --git a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
index 6254018..1d845da 100644
--- a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
+++ b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
@@ -212,7 +212,7 @@
base: Option<Self::TokenStream>,
trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
) -> Self::TokenStream {
- let mut builder = TokenStreamBuilder::new();
+ let mut builder = TokenStreamBuilder::default();
if let Some(base) = base {
builder.push(base);
}
@@ -227,7 +227,7 @@
base: Option<Self::TokenStream>,
streams: Vec<Self::TokenStream>,
) -> Self::TokenStream {
- let mut builder = TokenStreamBuilder::new();
+ let mut builder = TokenStreamBuilder::default();
if let Some(base) = base {
builder.push(base);
}
diff --git a/crates/proc-macro-srv/src/server_impl/token_id.rs b/crates/proc-macro-srv/src/server_impl/token_id.rs
index d0c7f23..ca9d329 100644
--- a/crates/proc-macro-srv/src/server_impl/token_id.rs
+++ b/crates/proc-macro-srv/src/server_impl/token_id.rs
@@ -190,7 +190,7 @@
base: Option<Self::TokenStream>,
trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
) -> Self::TokenStream {
- let mut builder = TokenStreamBuilder::new();
+ let mut builder = TokenStreamBuilder::default();
if let Some(base) = base {
builder.push(base);
}
@@ -205,7 +205,7 @@
base: Option<Self::TokenStream>,
streams: Vec<Self::TokenStream>,
) -> Self::TokenStream {
- let mut builder = TokenStreamBuilder::new();
+ let mut builder = TokenStreamBuilder::default();
if let Some(base) = base {
builder.push(base);
}
diff --git a/crates/proc-macro-srv/src/server_impl/token_stream.rs b/crates/proc-macro-srv/src/server_impl/token_stream.rs
index a3cf76d..4946a4f 100644
--- a/crates/proc-macro-srv/src/server_impl/token_stream.rs
+++ b/crates/proc-macro-srv/src/server_impl/token_stream.rs
@@ -9,6 +9,13 @@
pub(super) token_trees: Vec<tt::TokenTree<S>>,
}
+// #[derive(Default)] would mean that `S: Default`.
+impl<S> Default for TokenStream<S> {
+ fn default() -> Self {
+ Self { token_trees: Default::default() }
+ }
+}
+
impl<S: std::fmt::Debug + Copy> std::fmt::Debug for TokenStream<S> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("TokenStream")
@@ -17,17 +24,7 @@
}
}
-impl<S> Default for TokenStream<S> {
- fn default() -> Self {
- Self { token_trees: vec![] }
- }
-}
-
impl<S: Copy> TokenStream<S> {
- pub(crate) fn new() -> Self {
- TokenStream::default()
- }
-
pub(crate) fn with_subtree(subtree: TopSubtree<S>) -> Self {
let delimiter_kind = subtree.top_subtree().delimiter.kind;
let mut token_trees = subtree.0;
@@ -145,10 +142,6 @@
}
impl<S: Copy> TokenStreamBuilder<S> {
- pub(super) fn new() -> TokenStreamBuilder<S> {
- TokenStreamBuilder { acc: TokenStream::new() }
- }
-
pub(super) fn push(&mut self, stream: TokenStream<S>) {
self.acc.token_trees.extend(stream.token_trees)
}
@@ -157,3 +150,9 @@
self.acc
}
}
+
+impl<S: Copy> Default for TokenStreamBuilder<S> {
+ fn default() -> Self {
+ Self { acc: TokenStream::default() }
+ }
+}
diff --git a/crates/profile/Cargo.toml b/crates/profile/Cargo.toml
index 376737c..1fb1383 100644
--- a/crates/profile/Cargo.toml
+++ b/crates/profile/Cargo.toml
@@ -13,7 +13,7 @@
[dependencies]
cfg-if = "1.0.0"
-jemalloc-ctl = { version = "0.5.0", package = "tikv-jemalloc-ctl", optional = true }
+jemalloc-ctl = { version = "0.5.4", package = "tikv-jemalloc-ctl", optional = true }
[target.'cfg(all(target_os = "linux", not(target_env = "ohos")))'.dependencies]
perf-event = "=0.4.7"
diff --git a/crates/project-model/Cargo.toml b/crates/project-model/Cargo.toml
index 83def0e..64ea759 100644
--- a/crates/project-model/Cargo.toml
+++ b/crates/project-model/Cargo.toml
@@ -34,7 +34,7 @@
toolchain.workspace = true
[dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
[lints]
workspace = true
diff --git a/crates/project-model/src/build_dependencies.rs b/crates/project-model/src/build_dependencies.rs
index aa0099d..b26d19e 100644
--- a/crates/project-model/src/build_dependencies.rs
+++ b/crates/project-model/src/build_dependencies.rs
@@ -343,7 +343,8 @@
Message::CompilerArtifact(message) => {
with_output_for(&message.package_id.repr, &mut |name, data| {
progress(format!("building proc-macros: {name}"));
- if message.target.kind.iter().any(|k| k == "proc-macro") {
+ if message.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro)
+ {
// Skip rmeta file
if let Some(filename) =
message.filenames.iter().find(|file| is_dylib(file))
diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs
index 6a932d2..d304c97 100644
--- a/crates/project-model/src/cargo_workspace.rs
+++ b/crates/project-model/src/cargo_workspace.rs
@@ -108,7 +108,10 @@
pub invocation_strategy: InvocationStrategy,
/// Optional path to use instead of `target` when building
pub target_dir: Option<Utf8PathBuf>,
+ /// Gate `#[test]` behind `#[cfg(test)]`
pub set_test: bool,
+ /// Load the project without any dependencies
+ pub no_deps: bool,
}
pub type Package = Idx<PackageData>;
@@ -232,16 +235,20 @@
}
impl TargetKind {
- fn new(kinds: &[String]) -> TargetKind {
+ fn new(kinds: &[cargo_metadata::TargetKind]) -> TargetKind {
for kind in kinds {
- return match kind.as_str() {
- "bin" => TargetKind::Bin,
- "test" => TargetKind::Test,
- "bench" => TargetKind::Bench,
- "example" => TargetKind::Example,
- "custom-build" => TargetKind::BuildScript,
- "proc-macro" => TargetKind::Lib { is_proc_macro: true },
- _ if kind.contains("lib") => TargetKind::Lib { is_proc_macro: false },
+ return match kind {
+ cargo_metadata::TargetKind::Bin => TargetKind::Bin,
+ cargo_metadata::TargetKind::Test => TargetKind::Test,
+ cargo_metadata::TargetKind::Bench => TargetKind::Bench,
+ cargo_metadata::TargetKind::Example => TargetKind::Example,
+ cargo_metadata::TargetKind::CustomBuild => TargetKind::BuildScript,
+ cargo_metadata::TargetKind::ProcMacro => TargetKind::Lib { is_proc_macro: true },
+ cargo_metadata::TargetKind::Lib
+ | cargo_metadata::TargetKind::DyLib
+ | cargo_metadata::TargetKind::CDyLib
+ | cargo_metadata::TargetKind::StaticLib
+ | cargo_metadata::TargetKind::RLib => TargetKind::Lib { is_proc_macro: false },
_ => continue,
};
}
@@ -304,6 +311,7 @@
current_dir: &AbsPath,
config: &CargoMetadataConfig,
sysroot: &Sysroot,
+ no_deps: bool,
locked: bool,
progress: &dyn Fn(String),
) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> {
@@ -312,8 +320,8 @@
current_dir,
config,
sysroot,
+ no_deps,
locked,
- false,
progress,
);
if let Ok((_, Some(ref e))) = res {
@@ -331,8 +339,8 @@
current_dir: &AbsPath,
config: &CargoMetadataConfig,
sysroot: &Sysroot,
- locked: bool,
no_deps: bool,
+ locked: bool,
progress: &dyn Fn(String),
) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> {
let cargo = sysroot.tool(Tool::Cargo, current_dir);
@@ -476,7 +484,7 @@
cargo_metadata::Edition::E2015 => Edition::Edition2015,
cargo_metadata::Edition::E2018 => Edition::Edition2018,
cargo_metadata::Edition::E2021 => Edition::Edition2021,
- cargo_metadata::Edition::_E2024 => Edition::Edition2024,
+ cargo_metadata::Edition::E2024 => Edition::Edition2024,
_ => {
tracing::error!("Unsupported edition `{:?}`", edition);
Edition::CURRENT
diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs
index 7e8db8d..6ed030a 100644
--- a/crates/project-model/src/sysroot.rs
+++ b/crates/project-model/src/sysroot.rs
@@ -300,6 +300,7 @@
rust_lib_src_dir,
&cargo_config,
self,
+ false,
// Make sure we never attempt to write to the sysroot
true,
&|_| (),
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index 95148bb..7a139ea 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -220,6 +220,7 @@
sysroot,
sysroot_src,
target,
+ no_deps,
..
} = config;
let mut sysroot = match (sysroot, sysroot_src) {
@@ -301,6 +302,7 @@
extra_env: extra_env.clone(),
},
&sysroot,
+ *no_deps,
false,
&|_| (),
) {
@@ -343,6 +345,7 @@
extra_env: extra_env.clone(),
},
&sysroot,
+ *no_deps,
false,
&|_| (),
)
@@ -511,6 +514,7 @@
extra_env: config.extra_env.clone(),
},
&sysroot,
+ config.no_deps,
false,
&|_| (),
)
@@ -988,6 +992,7 @@
);
let mut cfg_cache: FxHashMap<&str, Vec<CfgAtom>> = FxHashMap::default();
+ let project_root = Arc::new(project.project_root().to_path_buf());
let idx_to_crate_id: FxHashMap<CrateArrayIdx, _> = project
.crates()
@@ -1067,7 +1072,10 @@
CrateOrigin::Local { repo: None, name: None }
},
*is_proc_macro,
- proc_macro_cwd.clone(),
+ match proc_macro_cwd {
+ Some(path) => Arc::new(path.clone()),
+ None => project_root.clone(),
+ },
crate_ws_data.clone(),
);
debug!(
@@ -1139,6 +1147,7 @@
let mut pkg_crates = FxHashMap::default();
// Does any crate signal to rust-analyzer that they need the rustc_private crates?
let mut has_private = false;
+ let workspace_proc_macro_cwd = Arc::new(cargo.workspace_root().to_path_buf());
// Next, create crates for each package, target pair
for pkg in cargo.packages() {
@@ -1161,8 +1170,9 @@
let mut lib_tgt = None;
for &tgt in cargo[pkg].targets.iter() {
+ let pkg_data = &cargo[pkg];
if !matches!(cargo[tgt].kind, TargetKind::Lib { .. })
- && (!cargo[pkg].is_member || cargo.is_sysroot())
+ && (!pkg_data.is_member || cargo.is_sysroot())
{
// For non-workspace-members, Cargo does not resolve dev-dependencies, so we don't
// add any targets except the library target, since those will not work correctly if
@@ -1176,7 +1186,6 @@
let Some(file_id) = load(root) else { continue };
let build_data = build_scripts.get_output(pkg);
- let pkg_data = &cargo[pkg];
let crate_id = add_target_crate_root(
crate_graph,
proc_macros,
@@ -1203,6 +1212,11 @@
}
},
crate_ws_data.clone(),
+ if pkg_data.is_member {
+ workspace_proc_macro_cwd.clone()
+ } else {
+ Arc::new(pkg_data.manifest.parent().to_path_buf())
+ },
);
if let TargetKind::Lib { .. } = kind {
lib_tgt = Some((crate_id, name.clone()));
@@ -1364,7 +1378,7 @@
name: display_name.map(|n| n.canonical_name().to_owned()),
},
false,
- None,
+ Arc::new(detached_file.parent().to_path_buf()),
crate_ws_data,
);
@@ -1372,6 +1386,7 @@
(crate_graph, FxHashMap::default())
}
+// FIXME: There shouldn't really be a need for duplicating all of this?
fn handle_rustc_crates(
crate_graph: &mut CrateGraphBuilder,
proc_macros: &mut ProcMacroPaths,
@@ -1391,6 +1406,7 @@
// The root package of the rustc-dev component is rustc_driver, so we match that
let root_pkg =
rustc_workspace.packages().find(|&package| rustc_workspace[package].name == "rustc_driver");
+ let workspace_proc_macro_cwd = Arc::new(cargo.workspace_root().to_path_buf());
// The rustc workspace might be incomplete (such as if rustc-dev is not
// installed for the current toolchain) and `rustc_source` is set to discover.
if let Some(root_pkg) = root_pkg {
@@ -1404,14 +1420,15 @@
if rustc_pkg_crates.contains_key(&pkg) {
continue;
}
- for dep in &rustc_workspace[pkg].dependencies {
+ let pkg_data = &rustc_workspace[pkg];
+ for dep in &pkg_data.dependencies {
queue.push_back(dep.pkg);
}
let mut cfg_options = cfg_options.clone();
- override_cfg.apply(&mut cfg_options, &rustc_workspace[pkg].name);
+ override_cfg.apply(&mut cfg_options, &pkg_data.name);
- for &tgt in rustc_workspace[pkg].targets.iter() {
+ for &tgt in pkg_data.targets.iter() {
let kind @ TargetKind::Lib { is_proc_macro } = rustc_workspace[tgt].kind else {
continue;
};
@@ -1421,14 +1438,19 @@
crate_graph,
proc_macros,
rustc_workspace,
- &rustc_workspace[pkg],
+ pkg_data,
build_scripts.get_output(pkg).zip(Some(build_scripts.error().is_some())),
cfg_options.clone(),
file_id,
&rustc_workspace[tgt].name,
kind,
- CrateOrigin::Rustc { name: Symbol::intern(&rustc_workspace[pkg].name) },
+ CrateOrigin::Rustc { name: Symbol::intern(&pkg_data.name) },
crate_ws_data.clone(),
+ if pkg_data.is_member {
+ workspace_proc_macro_cwd.clone()
+ } else {
+ Arc::new(pkg_data.manifest.parent().to_path_buf())
+ },
);
pkg_to_lib_crate.insert(pkg, crate_id);
// Add dependencies on core / std / alloc for this crate
@@ -1490,6 +1512,7 @@
kind: TargetKind,
origin: CrateOrigin,
crate_ws_data: Arc<CrateWorkspaceData>,
+ proc_macro_cwd: Arc<AbsPathBuf>,
) -> CrateBuilderId {
let edition = pkg.edition;
let potential_cfg_options = if pkg.features.is_empty() {
@@ -1531,9 +1554,7 @@
env,
origin,
matches!(kind, TargetKind::Lib { is_proc_macro: true }),
- matches!(kind, TargetKind::Lib { is_proc_macro: true }).then(|| {
- if pkg.is_member { cargo.workspace_root() } else { pkg.manifest.parent() }.to_path_buf()
- }),
+ proc_macro_cwd,
crate_ws_data,
);
if let TargetKind::Lib { is_proc_macro: true } = kind {
@@ -1643,6 +1664,7 @@
vec![
CfgAtom::Flag(sym::debug_assertions.clone()),
CfgAtom::Flag(sym::miri.clone()),
+ CfgAtom::Flag(sym::bootstrap.clone()),
],
vec![CfgAtom::Flag(sym::test.clone())],
),
@@ -1706,7 +1728,7 @@
Env::default(),
CrateOrigin::Lang(LangCrateOrigin::from(&*stitched[krate].name)),
false,
- None,
+ Arc::new(stitched[krate].root.parent().to_path_buf()),
crate_ws_data.clone(),
);
Some((krate, crate_id))
diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
index d30d67e..4ef9d816 100644
--- a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
+++ b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt
@@ -22,7 +22,9 @@
),
},
is_proc_macro: false,
- proc_macro_cwd: None,
+ proc_macro_cwd: AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
},
extra: ExtraCrateData {
version: Some(
@@ -104,7 +106,9 @@
),
},
is_proc_macro: false,
- proc_macro_cwd: None,
+ proc_macro_cwd: AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
},
extra: ExtraCrateData {
version: Some(
@@ -186,7 +190,9 @@
),
},
is_proc_macro: false,
- proc_macro_cwd: None,
+ proc_macro_cwd: AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
},
extra: ExtraCrateData {
version: Some(
@@ -268,7 +274,9 @@
),
},
is_proc_macro: false,
- proc_macro_cwd: None,
+ proc_macro_cwd: AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
},
extra: ExtraCrateData {
version: Some(
@@ -333,7 +341,9 @@
name: "libc",
},
is_proc_macro: false,
- proc_macro_cwd: None,
+ proc_macro_cwd: AbsPathBuf(
+ "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ ),
},
extra: ExtraCrateData {
version: Some(
diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
index d30d67e..4ef9d816 100644
--- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
+++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt
@@ -22,7 +22,9 @@
),
},
is_proc_macro: false,
- proc_macro_cwd: None,
+ proc_macro_cwd: AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
},
extra: ExtraCrateData {
version: Some(
@@ -104,7 +106,9 @@
),
},
is_proc_macro: false,
- proc_macro_cwd: None,
+ proc_macro_cwd: AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
},
extra: ExtraCrateData {
version: Some(
@@ -186,7 +190,9 @@
),
},
is_proc_macro: false,
- proc_macro_cwd: None,
+ proc_macro_cwd: AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
},
extra: ExtraCrateData {
version: Some(
@@ -268,7 +274,9 @@
),
},
is_proc_macro: false,
- proc_macro_cwd: None,
+ proc_macro_cwd: AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
},
extra: ExtraCrateData {
version: Some(
@@ -333,7 +341,9 @@
name: "libc",
},
is_proc_macro: false,
- proc_macro_cwd: None,
+ proc_macro_cwd: AbsPathBuf(
+ "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ ),
},
extra: ExtraCrateData {
version: Some(
diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
index 925233c..52089d1 100644
--- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
+++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt
@@ -22,7 +22,9 @@
),
},
is_proc_macro: false,
- proc_macro_cwd: None,
+ proc_macro_cwd: AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
},
extra: ExtraCrateData {
version: Some(
@@ -103,7 +105,9 @@
),
},
is_proc_macro: false,
- proc_macro_cwd: None,
+ proc_macro_cwd: AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
},
extra: ExtraCrateData {
version: Some(
@@ -184,7 +188,9 @@
),
},
is_proc_macro: false,
- proc_macro_cwd: None,
+ proc_macro_cwd: AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
},
extra: ExtraCrateData {
version: Some(
@@ -265,7 +271,9 @@
),
},
is_proc_macro: false,
- proc_macro_cwd: None,
+ proc_macro_cwd: AbsPathBuf(
+ "$ROOT$hello-world",
+ ),
},
extra: ExtraCrateData {
version: Some(
@@ -329,7 +337,9 @@
name: "libc",
},
is_proc_macro: false,
- proc_macro_cwd: None,
+ proc_macro_cwd: AbsPathBuf(
+ "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ ),
},
extra: ExtraCrateData {
version: Some(
diff --git a/crates/project-model/test_data/output/rust_project_cfg_groups.txt b/crates/project-model/test_data/output/rust_project_cfg_groups.txt
index 5f18d5e..98fe598 100644
--- a/crates/project-model/test_data/output/rust_project_cfg_groups.txt
+++ b/crates/project-model/test_data/output/rust_project_cfg_groups.txt
@@ -13,7 +13,9 @@
),
},
is_proc_macro: false,
- proc_macro_cwd: None,
+ proc_macro_cwd: AbsPathBuf(
+ "$ROOT$",
+ ),
},
extra: ExtraCrateData {
version: None,
@@ -61,7 +63,9 @@
),
},
is_proc_macro: false,
- proc_macro_cwd: None,
+ proc_macro_cwd: AbsPathBuf(
+ "$ROOT$",
+ ),
},
extra: ExtraCrateData {
version: None,
diff --git a/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt b/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt
index 5cb9992..0dc373b 100644
--- a/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt
+++ b/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt
@@ -13,7 +13,9 @@
),
},
is_proc_macro: false,
- proc_macro_cwd: None,
+ proc_macro_cwd: AbsPathBuf(
+ "$ROOT$",
+ ),
},
extra: ExtraCrateData {
version: None,
diff --git a/crates/query-group-macro/Cargo.toml b/crates/query-group-macro/Cargo.toml
index 116028b..8aeb262 100644
--- a/crates/query-group-macro/Cargo.toml
+++ b/crates/query-group-macro/Cargo.toml
@@ -15,8 +15,8 @@
[dependencies]
proc-macro2 = "1.0"
quote = "1.0"
-syn = { version = "2.0", features = ["full", "extra-traits"] }
+syn = { version = "2.0", features = ["full", "extra-traits", "visit-mut"] }
[dev-dependencies]
-expect-test = "1.5.0"
+expect-test = "1.5.1"
salsa.workspace = true
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index d470741..af7b4e6 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -22,34 +22,34 @@
anyhow.workspace = true
base64 = "0.22"
crossbeam-channel.workspace = true
-dirs = "5.0.1"
+dirs = "6.0.0"
dissimilar.workspace = true
ide-completion.workspace = true
indexmap.workspace = true
itertools.workspace = true
-scip = "0.5.1"
+scip = "0.5.2"
lsp-types = { version = "=0.95.0", features = ["proposed"] }
-parking_lot = "0.12.1"
-xflags = "0.3.0"
-oorandom = "11.1.3"
+parking_lot = "0.12.3"
+xflags = "0.3.2"
+oorandom = "11.1.5"
rayon.workspace = true
rustc-hash.workspace = true
serde_json = { workspace = true, features = ["preserve_order"] }
serde.workspace = true
serde_derive.workspace = true
tenthash = "1.0.0"
-num_cpus = "1.15.0"
-mimalloc = { version = "0.1.30", default-features = false, optional = true }
+num_cpus = "1.16.0"
+mimalloc = { version = "0.1.44", default-features = false, optional = true }
lsp-server.workspace = true
tracing.workspace = true
tracing-subscriber.workspace = true
tracing-tree.workspace = true
triomphe.workspace = true
-toml = "0.8.8"
+toml = "0.8.20"
nohash-hasher.workspace = true
-walkdir = "2.3.2"
+walkdir = "2.5.0"
semver.workspace = true
-memchr = "2.7.1"
+memchr = "2.7.4"
cargo_metadata.workspace = true
process-wrap.workspace = true
@@ -81,10 +81,10 @@
] }
[target.'cfg(not(target_env = "msvc"))'.dependencies]
-jemallocator = { version = "0.5.0", package = "tikv-jemallocator", optional = true }
+jemallocator = { version = "0.5.4", package = "tikv-jemallocator", optional = true }
[dev-dependencies]
-expect-test = "1.4.0"
+expect-test = "1.5.1"
xshell.workspace = true
test-utils.workspace = true
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index 2933100..66334e7 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -2,7 +2,8 @@
//! errors.
use std::{
- env,
+ env, fmt,
+ ops::AddAssign,
time::{SystemTime, UNIX_EPOCH},
};
@@ -118,29 +119,80 @@
}
let mut item_tree_sw = self.stop_watch();
- let mut num_item_trees = 0;
let source_roots = krates
.iter()
.cloned()
.map(|krate| db.file_source_root(krate.root_file(db)).source_root_id(db))
.unique();
+
+ let mut dep_loc = 0;
+ let mut workspace_loc = 0;
+ let mut dep_item_trees = 0;
+ let mut workspace_item_trees = 0;
+
+ let mut workspace_item_stats = PrettyItemStats::default();
+ let mut dep_item_stats = PrettyItemStats::default();
+
for source_root_id in source_roots {
let source_root = db.source_root(source_root_id).source_root(db);
- if !source_root.is_library || self.with_deps {
- for file_id in source_root.iter() {
- if let Some(p) = source_root.path_for_file(&file_id) {
- if let Some((_, Some("rs"))) = p.name_and_extension() {
- db.file_item_tree(EditionedFileId::current_edition(file_id).into());
- num_item_trees += 1;
+ for file_id in source_root.iter() {
+ if let Some(p) = source_root.path_for_file(&file_id) {
+ if let Some((_, Some("rs"))) = p.name_and_extension() {
+ // measure workspace/project code
+ if !source_root.is_library || self.with_deps {
+ let length = db.file_text(file_id).text(db).lines().count();
+ let item_stats = db
+ .file_item_tree(EditionedFileId::current_edition(file_id).into())
+ .item_tree_stats()
+ .into();
+
+ workspace_loc += length;
+ workspace_item_trees += 1;
+ workspace_item_stats += item_stats;
+ } else {
+ let length = db.file_text(file_id).text(db).lines().count();
+ let item_stats = db
+ .file_item_tree(EditionedFileId::current_edition(file_id).into())
+ .item_tree_stats()
+ .into();
+
+ dep_loc += length;
+ dep_item_trees += 1;
+ dep_item_stats += item_stats;
}
}
}
}
}
- eprintln!(" item trees: {num_item_trees}");
+ eprintln!(" item trees: {workspace_item_trees}");
let item_tree_time = item_tree_sw.elapsed();
+
+ eprintln!(
+ " dependency lines of code: {}, item trees: {}",
+ UsizeWithUnderscore(dep_loc),
+ UsizeWithUnderscore(dep_item_trees),
+ );
+ eprintln!(" dependency item stats: {}", dep_item_stats);
+
+ // FIXME(salsa-transition): bring back stats for ParseQuery (file size)
+ // and ParseMacroExpansionQuery (macro expansion "file") size whenever we implement
+ // Salsa's memory usage tracking works with tracked functions.
+
+ // let mut total_file_size = Bytes::default();
+ // for e in ide_db::base_db::ParseQuery.in_db(db).entries::<Vec<_>>() {
+ // total_file_size += syntax_len(db.parse(e.key).syntax_node())
+ // }
+
+ // let mut total_macro_file_size = Bytes::default();
+ // for e in hir::db::ParseMacroExpansionQuery.in_db(db).entries::<Vec<_>>() {
+ // let val = db.parse_macro_expansion(e.key).value.0;
+ // total_macro_file_size += syntax_len(val.syntax_node())
+ // }
+ // eprintln!("source files: {total_file_size}, macro files: {total_macro_file_size}");
+
eprintln!("{:<20} {}", "Item Tree Collection:", item_tree_time);
report_metric("item tree time", item_tree_time.time.as_millis() as u64, "ms");
+ eprintln!(" Total Statistics:");
let mut crate_def_map_sw = self.stop_watch();
let mut num_crates = 0;
@@ -163,11 +215,16 @@
shuffle(&mut rng, &mut visit_queue);
}
- eprint!(" crates: {num_crates}");
+ eprint!(" crates: {num_crates}");
let mut num_decls = 0;
let mut bodies = Vec::new();
let mut adts = Vec::new();
let mut file_ids = Vec::new();
+
+ let mut num_traits = 0;
+ let mut num_macro_rules_macros = 0;
+ let mut num_proc_macros = 0;
+
while let Some(module) = visit_queue.pop() {
if visited_modules.insert(module) {
file_ids.extend(module.as_source_file_id(db));
@@ -189,6 +246,14 @@
bodies.push(DefWithBody::from(c));
}
ModuleDef::Static(s) => bodies.push(DefWithBody::from(s)),
+ ModuleDef::Trait(_) => num_traits += 1,
+ ModuleDef::Macro(m) => match m.kind(db) {
+ hir::MacroKind::Declarative => num_macro_rules_macros += 1,
+ hir::MacroKind::Derive
+ | hir::MacroKind::Attr
+ | hir::MacroKind::ProcMacro => num_proc_macros += 1,
+ _ => (),
+ },
_ => (),
};
}
@@ -217,6 +282,26 @@
.filter(|it| matches!(it, DefWithBody::Const(_) | DefWithBody::Static(_)))
.count(),
);
+
+ eprintln!(" Workspace:");
+ eprintln!(
+ " traits: {num_traits}, macro_rules macros: {num_macro_rules_macros}, proc_macros: {num_proc_macros}"
+ );
+ eprintln!(
+ " lines of code: {}, item trees: {}",
+ UsizeWithUnderscore(workspace_loc),
+ UsizeWithUnderscore(workspace_item_trees),
+ );
+ eprintln!(" usages: {}", workspace_item_stats);
+
+ eprintln!(" Dependencies:");
+ eprintln!(
+ " lines of code: {}, item trees: {}",
+ UsizeWithUnderscore(dep_loc),
+ UsizeWithUnderscore(dep_item_trees),
+ );
+ eprintln!(" declarations: {}", dep_item_stats);
+
let crate_def_map_time = crate_def_map_sw.elapsed();
eprintln!("{:<20} {}", "Item Collection:", crate_def_map_time);
report_metric("crate def map time", crate_def_map_time.time.as_millis() as u64, "ms");
@@ -264,24 +349,6 @@
}
report_metric("total memory", total_span.memory.allocated.megabytes() as u64, "MB");
- if self.source_stats {
- // FIXME(salsa-transition): bring back stats for ParseQuery (file size)
- // and ParseMacroExpansionQuery (mcaro expansion "file") size whenever we implement
- // Salsa's memory usage tracking works with tracked functions.
-
- // let mut total_file_size = Bytes::default();
- // for e in ide_db::base_db::ParseQuery.in_db(db).entries::<Vec<_>>() {
- // total_file_size += syntax_len(db.parse(e.key).syntax_node())
- // }
-
- // let mut total_macro_file_size = Bytes::default();
- // for e in hir::db::ParseMacroExpansionQuery.in_db(db).entries::<Vec<_>>() {
- // let val = db.parse_macro_expansion(e.key).value.0;
- // total_macro_file_size += syntax_len(val.syntax_node())
- // }
- // eprintln!("source files: {total_file_size}, macro files: {total_macro_file_size}");
- }
-
if verbosity.is_verbose() {
print_memory_usage(host, vfs);
}
@@ -1217,6 +1284,78 @@
(n * 100).checked_div(total).unwrap_or(100)
}
+#[derive(Default, Debug, Eq, PartialEq)]
+struct UsizeWithUnderscore(usize);
+
+impl fmt::Display for UsizeWithUnderscore {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let num_str = self.0.to_string();
+
+ if num_str.len() <= 3 {
+ return write!(f, "{}", num_str);
+ }
+
+ let mut result = String::new();
+
+ for (count, ch) in num_str.chars().rev().enumerate() {
+ if count > 0 && count % 3 == 0 {
+ result.push('_');
+ }
+ result.push(ch);
+ }
+
+ let result = result.chars().rev().collect::<String>();
+ write!(f, "{}", result)
+ }
+}
+
+impl std::ops::AddAssign for UsizeWithUnderscore {
+ fn add_assign(&mut self, other: UsizeWithUnderscore) {
+ self.0 += other.0;
+ }
+}
+
+#[derive(Default, Debug, Eq, PartialEq)]
+struct PrettyItemStats {
+ traits: UsizeWithUnderscore,
+ impls: UsizeWithUnderscore,
+ mods: UsizeWithUnderscore,
+ macro_calls: UsizeWithUnderscore,
+ macro_rules: UsizeWithUnderscore,
+}
+
+impl From<hir_def::item_tree::ItemTreeDataStats> for PrettyItemStats {
+ fn from(value: hir_def::item_tree::ItemTreeDataStats) -> Self {
+ Self {
+ traits: UsizeWithUnderscore(value.traits),
+ impls: UsizeWithUnderscore(value.impls),
+ mods: UsizeWithUnderscore(value.mods),
+ macro_calls: UsizeWithUnderscore(value.macro_calls),
+ macro_rules: UsizeWithUnderscore(value.macro_rules),
+ }
+ }
+}
+
+impl AddAssign for PrettyItemStats {
+ fn add_assign(&mut self, rhs: Self) {
+ self.traits += rhs.traits;
+ self.impls += rhs.impls;
+ self.mods += rhs.mods;
+ self.macro_calls += rhs.macro_calls;
+ self.macro_rules += rhs.macro_rules;
+ }
+}
+
+impl fmt::Display for PrettyItemStats {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(
+ f,
+ "traits: {}, impl: {}, mods: {}, macro calls: {}, macro rules: {}",
+ self.traits, self.impls, self.mods, self.macro_calls, self.macro_rules
+ )
+ }
+}
+
// FIXME(salsa-transition): bring this back whenever we implement
// Salsa's memory usage tracking to work with tracked functions.
// fn syntax_len(node: SyntaxNode) -> usize {
diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs
index ff24602..13075d4 100644
--- a/crates/rust-analyzer/src/cli/flags.rs
+++ b/crates/rust-analyzer/src/cli/flags.rs
@@ -62,8 +62,6 @@
optional --randomize
/// Run type inference in parallel.
optional --parallel
- /// Print the total length of all source and macro files (whitespace is not counted).
- optional --source-stats
/// Only analyze items matching this path.
optional -o, --only path: String
@@ -231,7 +229,6 @@
pub output: Option<OutputFormat>,
pub randomize: bool,
pub parallel: bool,
- pub source_stats: bool,
pub only: Option<String>,
pub with_deps: bool,
pub no_sysroot: bool,
diff --git a/crates/rust-analyzer/src/cli/progress_report.rs b/crates/rust-analyzer/src/cli/progress_report.rs
index 8b143da..c1b1d3f 100644
--- a/crates/rust-analyzer/src/cli/progress_report.rs
+++ b/crates/rust-analyzer/src/cli/progress_report.rs
@@ -79,8 +79,8 @@
// Backtrack to the first differing character
let mut output = String::new();
output += &'\x08'.to_string().repeat(self.text.len() - common_prefix_length);
- // Output new suffix
- output += &text[common_prefix_length..text.len()];
+ // Output new suffix, using chars() iter to ensure unicode compatibility
+ output.extend(text.chars().skip(common_prefix_length));
// If the new text is shorter than the old one: delete overlapping characters
if let Some(overlap_count) = self.text.len().checked_sub(text.len()) {
diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs
index 62cbe85..c042c26 100644
--- a/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -139,7 +139,7 @@
FxHashMap::default()
};
let text = read_to_string(&p).unwrap();
- let mut change = ChangeWithProcMacros::new();
+ let mut change = ChangeWithProcMacros::default();
// Ignore unstable tests, since they move too fast and we do not intend to support all of them.
let mut ignore_test = text.contains("#![feature");
// Ignore test with extern crates, as this infra don't support them yet.
diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs
index 1d27b05..d296788 100644
--- a/crates/rust-analyzer/src/cli/scip.rs
+++ b/crates/rust-analyzer/src/cli/scip.rs
@@ -128,7 +128,7 @@
};
// Generates symbols from token monikers.
- let mut symbol_generator = SymbolGenerator::new();
+ let mut symbol_generator = SymbolGenerator::default();
for StaticIndexedFile { file_id, tokens, .. } in si.files {
symbol_generator.clear_document_local_state();
@@ -417,16 +417,13 @@
is_inherent_impl: bool,
}
+#[derive(Default)]
struct SymbolGenerator {
token_to_symbols: FxHashMap<TokenId, Option<TokenSymbols>>,
local_count: usize,
}
impl SymbolGenerator {
- fn new() -> Self {
- SymbolGenerator { token_to_symbols: FxHashMap::default(), local_count: 0 }
- }
-
fn clear_document_local_state(&mut self) {
self.local_count = 0;
}
diff --git a/crates/rust-analyzer/src/command.rs b/crates/rust-analyzer/src/command.rs
index 81a20ee..0035d94 100644
--- a/crates/rust-analyzer/src/command.rs
+++ b/crates/rust-analyzer/src/command.rs
@@ -166,12 +166,9 @@
if read_at_least_one_message || exit_status.success() {
Ok(())
} else {
- Err(io::Error::new(
- io::ErrorKind::Other,
- format!(
- "Cargo watcher failed, the command produced no valid metadata (exit code: {exit_status:?}):\n{error}"
- ),
- ))
+ Err(io::Error::other(format!(
+ "Cargo watcher failed, the command produced no valid metadata (exit code: {exit_status:?}):\n{error}"
+ )))
}
}
}
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 351b2a6..aceacff 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -609,6 +609,9 @@
cargo_features: CargoFeaturesDef = CargoFeaturesDef::Selected(vec![]),
/// Whether to pass `--no-default-features` to cargo.
cargo_noDefaultFeatures: bool = false,
+ /// Whether to skip fetching dependencies. If set to "true", the analysis is performed
+ /// entirely offline, and Cargo metadata for dependencies is not fetched.
+ cargo_noDeps: bool = false,
/// Relative path to the sysroot, or "discover" to try to automatically find it via
/// "rustc --print sysroot".
///
@@ -2027,6 +2030,7 @@
extra_env: self.cargo_extraEnv(source_root).clone(),
target_dir: self.target_dir_from_config(source_root),
set_test: *self.cfg_setTest(source_root),
+ no_deps: *self.cargo_noDeps(source_root),
}
}
diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs
index a09d96d..9b1463b 100644
--- a/crates/rust-analyzer/src/diagnostics.rs
+++ b/crates/rust-analyzer/src/diagnostics.rs
@@ -258,7 +258,7 @@
for (file_id, group) in odd_ones
.into_iter()
.sorted_by_key(|it| it.range.file_id)
- .group_by(|it| it.range.file_id)
+ .chunk_by(|it| it.range.file_id)
.into_iter()
{
if !subscriptions.contains(&file_id) {
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index 6fc0ba9..820276e 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -299,7 +299,7 @@
FxHashMap::default();
let (change, modified_rust_files, workspace_structure_change) = {
- let mut change = ChangeWithProcMacros::new();
+ let mut change = ChangeWithProcMacros::default();
let mut guard = self.vfs.write();
let changed_files = guard.0.take_changes();
if changed_files.is_empty() {
diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs
index 4ad028e..49ebffa 100644
--- a/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -86,7 +86,7 @@
"self.data.cargo_buildScripts_rebuildOnSave",
"self. data. cargo_buildScripts_rebuildOnSave",
);
- let mut change = ChangeWithProcMacros::new();
+ let mut change = ChangeWithProcMacros::default();
change.change_file(file_id, Some(text));
host.apply_change(change);
}
@@ -149,7 +149,7 @@
let completion_offset =
patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
+ "sel".len();
- let mut change = ChangeWithProcMacros::new();
+ let mut change = ChangeWithProcMacros::default();
change.change_file(file_id, Some(text));
host.apply_change(change);
completion_offset
@@ -200,7 +200,7 @@
let completion_offset =
patch(&mut text, "sel;\ndb.struct_data(self.id)", ";sel;\ndb.struct_data(self.id)")
+ ";sel".len();
- let mut change = ChangeWithProcMacros::new();
+ let mut change = ChangeWithProcMacros::default();
change.change_file(file_id, Some(text));
host.apply_change(change);
completion_offset
@@ -250,7 +250,7 @@
let completion_offset =
patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)")
+ "self.".len();
- let mut change = ChangeWithProcMacros::new();
+ let mut change = ChangeWithProcMacros::default();
change.change_file(file_id, Some(text));
host.apply_change(change);
completion_offset
@@ -367,7 +367,7 @@
let _it = stdx::timeit("change");
let mut text = host.analysis().file_text(file_id).unwrap().to_string();
patch(&mut text, "db.struct_data(self.id)", "();\ndb.struct_data(self.id)");
- let mut change = ChangeWithProcMacros::new();
+ let mut change = ChangeWithProcMacros::default();
change.change_file(file_id, Some(text));
host.apply_change(change);
};
diff --git a/crates/rust-analyzer/src/lsp/from_proto.rs b/crates/rust-analyzer/src/lsp/from_proto.rs
index 37eff23..fb8a983 100644
--- a/crates/rust-analyzer/src/lsp/from_proto.rs
+++ b/crates/rust-analyzer/src/lsp/from_proto.rs
@@ -103,7 +103,6 @@
pub(crate) fn assist_kind(kind: lsp_types::CodeActionKind) -> Option<AssistKind> {
let assist_kind = match &kind {
- k if k == &lsp_types::CodeActionKind::EMPTY => AssistKind::None,
k if k == &lsp_types::CodeActionKind::QUICKFIX => AssistKind::QuickFix,
k if k == &lsp_types::CodeActionKind::REFACTOR => AssistKind::Refactor,
k if k == &lsp_types::CodeActionKind::REFACTOR_EXTRACT => AssistKind::RefactorExtract,
diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs
index c30ee0f..4efe330 100644
--- a/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -1477,7 +1477,7 @@
pub(crate) fn code_action_kind(kind: AssistKind) -> lsp_types::CodeActionKind {
match kind {
- AssistKind::None | AssistKind::Generate => lsp_types::CodeActionKind::EMPTY,
+ AssistKind::Generate => lsp_types::CodeActionKind::EMPTY,
AssistKind::QuickFix => lsp_types::CodeActionKind::QUICKFIX,
AssistKind::Refactor => lsp_types::CodeActionKind::REFACTOR,
AssistKind::RefactorExtract => lsp_types::CodeActionKind::REFACTOR_EXTRACT,
diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs
index 283b8f3..b73019b 100644
--- a/crates/rust-analyzer/src/reload.rs
+++ b/crates/rust-analyzer/src/reload.rs
@@ -413,35 +413,26 @@
.map(|res| res.as_ref().map_err(|e| e.to_string()))
.chain(iter::repeat_with(|| Err("proc-macro-srv is not running".into())));
for (client, paths) in proc_macro_clients.zip(paths) {
- paths
- .into_iter()
- .map(move |(crate_id, res)| {
- (
- crate_id,
- res.map_or_else(
- |e| Err((e, true)),
- |(crate_name, path)| {
- progress(path.to_string());
- client.as_ref().map_err(|it| (it.clone(), true)).and_then(
- |client| {
- load_proc_macro(
- client,
- &path,
- ignored_proc_macros
- .iter()
- .find_map(|(name, macros)| {
- eq_ignore_underscore(name, &crate_name)
- .then_some(&**macros)
- })
- .unwrap_or_default(),
- )
- },
- )
- },
- ),
- )
- })
- .for_each(|(krate, res)| builder.insert(krate, res));
+ for (crate_id, res) in paths.iter() {
+ let expansion_res = match client {
+ Ok(client) => match res {
+ Ok((crate_name, path)) => {
+ progress(path.to_string());
+ let ignored_proc_macros = ignored_proc_macros
+ .iter()
+ .find_map(|(name, macros)| {
+ eq_ignore_underscore(name, crate_name).then_some(&**macros)
+ })
+ .unwrap_or_default();
+
+ load_proc_macro(client, path, ignored_proc_macros)
+ }
+ Err(e) => Err((e.clone(), true)),
+ },
+ Err(ref e) => Err((e.clone(), true)),
+ };
+ builder.insert(*crate_id, expansion_res)
+ }
}
change.set_proc_macros(builder);
@@ -645,7 +636,7 @@
Config::user_config_dir_path().as_deref(),
);
- if (self.proc_macro_clients.is_empty() || !same_workspaces)
+ if (self.proc_macro_clients.len() < self.workspaces.len() || !same_workspaces)
&& self.config.expand_proc_macros()
{
info!("Spawning proc-macro servers");
@@ -739,7 +730,7 @@
ws_to_crate_graph(&self.workspaces, self.config.extra_env(None), load)
};
- let mut change = ChangeWithProcMacros::new();
+ let mut change = ChangeWithProcMacros::default();
if initial_build || !self.config.expand_proc_macros() {
if self.config.expand_proc_macros() {
change.set_proc_macros(
diff --git a/crates/span/Cargo.toml b/crates/span/Cargo.toml
index 3381dac..b3b401c 100644
--- a/crates/span/Cargo.toml
+++ b/crates/span/Cargo.toml
@@ -12,7 +12,7 @@
[dependencies]
la-arena.workspace = true
-salsa.workspace = true
+salsa = { workspace = true, optional = true }
rustc-hash.workspace = true
hashbrown.workspace = true
text-size.workspace = true
@@ -22,5 +22,8 @@
syntax.workspace = true
stdx.workspace = true
+[features]
+default = ["salsa"]
+
[lints]
workspace = true
diff --git a/crates/span/src/hygiene.rs b/crates/span/src/hygiene.rs
index 9ecd188..a2923cd 100644
--- a/crates/span/src/hygiene.rs
+++ b/crates/span/src/hygiene.rs
@@ -21,16 +21,19 @@
//! `ExpnData::call_site` in rustc, [`MacroCallLoc::call_site`] in rust-analyzer.
use std::fmt;
-use crate::{Edition, MacroCallId};
+use crate::Edition;
/// A syntax context describes a hierarchy tracking order of macro definitions.
+#[cfg(feature = "salsa")]
#[derive(Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
pub struct SyntaxContext(
salsa::Id,
std::marker::PhantomData<&'static salsa::plumbing::interned::Value<SyntaxContext>>,
);
+#[cfg(feature = "salsa")]
const _: () = {
+ use crate::MacroCallId;
use salsa::plumbing as zalsa_;
use salsa::plumbing::interned as zalsa_struct_;
@@ -291,8 +294,6 @@
};
impl SyntaxContext {
- const MAX_ID: u32 = salsa::Id::MAX_U32 - 1;
-
pub fn is_root(self) -> bool {
(SyntaxContext::MAX_ID - Edition::LATEST as u32) <= self.into_u32()
&& self.into_u32() <= (SyntaxContext::MAX_ID - Edition::Edition2015 as u32)
@@ -308,20 +309,43 @@
/// The root context, which is the parent of all other contexts. All [`FileId`]s have this context.
pub const fn root(edition: Edition) -> Self {
let edition = edition as u32;
- SyntaxContext(
- salsa::Id::from_u32(SyntaxContext::MAX_ID - edition),
- std::marker::PhantomData,
- )
+ SyntaxContext::from_u32(SyntaxContext::MAX_ID - edition)
}
+}
- pub fn into_u32(self) -> u32 {
+#[cfg(feature = "salsa")]
+impl SyntaxContext {
+ const MAX_ID: u32 = salsa::Id::MAX_U32 - 1;
+
+ pub const fn into_u32(self) -> u32 {
self.0.as_u32()
}
- pub fn from_u32(u32: u32) -> Self {
+ pub const fn from_u32(u32: u32) -> Self {
Self(salsa::Id::from_u32(u32), std::marker::PhantomData)
}
}
+#[cfg(not(feature = "salsa"))]
+#[derive(Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
+pub struct SyntaxContext(u32);
+
+#[allow(dead_code)]
+const SALSA_MAX_ID_MIRROR: u32 = u32::MAX - 0xFF;
+#[cfg(feature = "salsa")]
+const _: () = assert!(salsa::Id::MAX_U32 == SALSA_MAX_ID_MIRROR);
+
+#[cfg(not(feature = "salsa"))]
+impl SyntaxContext {
+ const MAX_ID: u32 = SALSA_MAX_ID_MIRROR - 1;
+
+ pub const fn into_u32(self) -> u32 {
+ self.0
+ }
+
+ pub const fn from_u32(u32: u32) -> Self {
+ Self(u32)
+ }
+}
/// A property of a macro expansion that determines how identifiers
/// produced by that expansion are resolved.
@@ -354,9 +378,9 @@
impl fmt::Display for SyntaxContext {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.is_root() {
- write!(f, "ROOT{}", Edition::from_u32(SyntaxContext::MAX_ID - self.0.as_u32()).number())
+ write!(f, "ROOT{}", Edition::from_u32(SyntaxContext::MAX_ID - self.into_u32()).number())
} else {
- write!(f, "{}", self.0.as_u32())
+ write!(f, "{}", self.into_u32())
}
}
}
diff --git a/crates/span/src/lib.rs b/crates/span/src/lib.rs
index fbd1b25..f3f6d80 100644
--- a/crates/span/src/lib.rs
+++ b/crates/span/src/lib.rs
@@ -180,6 +180,22 @@
}
}
+#[cfg(not(feature = "salsa"))]
+mod salsa {
+ #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+ pub(crate) struct Id(u32);
+
+ impl Id {
+ pub(crate) const fn from_u32(u32: u32) -> Self {
+ Self(u32)
+ }
+
+ pub(crate) const fn as_u32(self) -> u32 {
+ self.0
+ }
+ }
+}
+
/// Input to the analyzer is a set of files, where each file is identified by
/// `FileId` and contains source code. However, another source of source code in
/// Rust are macros: each macro can be thought of as producing a "temporary
@@ -201,12 +217,14 @@
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct HirFileId(salsa::Id);
+#[cfg(feature = "salsa")]
impl salsa::plumbing::AsId for HirFileId {
fn as_id(&self) -> salsa::Id {
self.0
}
}
+#[cfg(feature = "salsa")]
impl salsa::plumbing::FromId for HirFileId {
fn from_id(id: salsa::Id) -> Self {
HirFileId(id)
@@ -273,12 +291,14 @@
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MacroCallId(salsa::Id);
+#[cfg(feature = "salsa")]
impl salsa::plumbing::AsId for MacroCallId {
fn as_id(&self) -> salsa::Id {
self.0
}
}
+#[cfg(feature = "salsa")]
impl salsa::plumbing::FromId for MacroCallId {
fn from_id(id: salsa::Id) -> Self {
MacroCallId(id)
diff --git a/crates/stdx/Cargo.toml b/crates/stdx/Cargo.toml
index f6a5ded..7bda106 100644
--- a/crates/stdx/Cargo.toml
+++ b/crates/stdx/Cargo.toml
@@ -12,8 +12,8 @@
[lib]
[dependencies]
-backtrace = { version = "0.3.67", optional = true }
-jod-thread = "0.1.2"
+backtrace = { version = "0.3.74", optional = true }
+jod-thread = "1.0.0"
crossbeam-channel.workspace = true
itertools.workspace = true
tracing.workspace = true
diff --git a/crates/stdx/src/anymap.rs b/crates/stdx/src/anymap.rs
index faf2e6c..f55698e 100644
--- a/crates/stdx/src/anymap.rs
+++ b/crates/stdx/src/anymap.rs
@@ -1,4 +1,5 @@
//! This file is a port of only the necessary features from <https://github.com/chris-morgan/anymap> version 1.0.0-beta.2 for use within rust-analyzer.
+//!
//! Copyright © 2014–2022 Chris Morgan.
//! COPYING: <https://github.com/chris-morgan/anymap/blob/master/COPYING>
//! Note that the license is changed from Blue Oak Model 1.0.0 or MIT or Apache-2.0 to MIT OR Apache-2.0
@@ -20,14 +21,14 @@
use core::hash::Hasher;
-/// A hasher designed to eke a little more speed out, given `TypeId`’s known characteristics.
+/// A hasher designed to eke a little more speed out, given `TypeId`'s known characteristics.
///
-/// Specifically, this is a no-op hasher that expects to be fed a u64’s worth of
+/// Specifically, this is a no-op hasher that expects to be fed a u64's worth of
/// randomly-distributed bits. It works well for `TypeId` (eliminating start-up time, so that my
-/// get_missing benchmark is ~30ns rather than ~900ns, and being a good deal faster after that, so
-/// that my insert_and_get_on_260_types benchmark is ~12μs instead of ~21.5μs), but will
+/// `get_missing` benchmark is ~30ns rather than ~900ns, and being a good deal faster after that, so
+/// that my `insert_and_get_on_260_types` benchmark is ~12μs instead of ~21.5μs), but will
/// panic in debug mode and always emit zeros in release mode for any other sorts of inputs, so
-/// yeah, don’t use it! 😀
+/// yeah, don't use it! 😀
#[derive(Default)]
pub struct TypeIdHasher {
value: u64,
@@ -36,9 +37,9 @@
impl Hasher for TypeIdHasher {
#[inline]
fn write(&mut self, bytes: &[u8]) {
- // This expects to receive exactly one 64-bit value, and there’s no realistic chance of
- // that changing, but I don’t want to depend on something that isn’t expressly part of the
- // contract for safety. But I’m OK with release builds putting everything in one bucket
+ // This expects to receive exactly one 64-bit value, and there's no realistic chance of
+ // that changing, but I don't want to depend on something that isn't expressly part of the
+ // contract for safety. But I'm OK with release builds putting everything in one bucket
// if it *did* change (and debug builds panicking).
debug_assert_eq!(bytes.len(), 8);
let _ = bytes.try_into().map(|array| self.value = u64::from_ne_bytes(array));
@@ -59,7 +60,7 @@
/// Raw access to the underlying `HashMap`.
///
/// This alias is provided for convenience because of the ugly third generic parameter.
-#[allow(clippy::disallowed_types)] // Uses a custom hasher
+#[expect(clippy::disallowed_types, reason = "Uses a custom hasher")]
pub type RawMap<A> = hash_map::HashMap<TypeId, Box<A>, BuildHasherDefault<TypeIdHasher>>;
/// A collection containing zero or one values for any given type and allowing convenient,
@@ -73,19 +74,20 @@
///
/// Cumulatively, there are thus six forms of map:
///
-/// - <code>[Map]<dyn [core::any::Any]></code>,
+/// - `[Map]<dyn [core::any::Any]>`,
/// also spelled [`AnyMap`] for convenience.
-/// - <code>[Map]<dyn [core::any::Any] + Send></code>
-/// - <code>[Map]<dyn [core::any::Any] + Send + Sync></code>
+/// - `[Map]<dyn [core::any::Any] + Send>`
+/// - `[Map]<dyn [core::any::Any] + Send + Sync>`
///
/// ## Example
///
-/// (Here using the [`AnyMap`] convenience alias; the first line could use
-/// <code>[anymap::Map][Map]::<[core::any::Any]>::new()</code> instead if desired.)
+/// (Here, the [`AnyMap`] convenience alias is used;
+/// the first line could use `[anymap::Map][Map]::<[core::any::Any]>::new()`
+/// instead if desired.)
///
/// ```
/// # use stdx::anymap;
-#[doc = "let mut data = anymap::AnyMap::new();"]
+/// let mut data = anymap::AnyMap::new();
/// assert_eq!(data.get(), None::<&i32>);
/// ```
///
@@ -95,29 +97,25 @@
raw: RawMap<A>,
}
-/// The most common type of `Map`: just using `Any`; <code>[Map]<dyn [Any]></code>.
+/// The most common type of `Map`: just using `Any`; `[Map]<dyn [Any]>`.
///
/// Why is this a separate type alias rather than a default value for `Map<A>`?
-/// `Map::new()` doesn’t seem to be happy to infer that it should go with the default
-/// value. It’s a bit sad, really. Ah well, I guess this approach will do.
+/// `Map::new()` doesn't seem to be happy to infer that it should go with the default
+/// value. It's a bit sad, really. Ah well, I guess this approach will do.
pub type AnyMap = Map<dyn Any>;
+
impl<A: ?Sized + Downcast> Default for Map<A> {
#[inline]
fn default() -> Map<A> {
- Map::new()
+ Map { raw: RawMap::with_hasher(Default::default()) }
}
}
impl<A: ?Sized + Downcast> Map<A> {
- /// Create an empty collection.
- #[inline]
- pub fn new() -> Map<A> {
- Map { raw: RawMap::with_hasher(Default::default()) }
- }
-
/// Returns a reference to the value stored in the collection for the type `T`,
/// if it exists.
#[inline]
+ #[must_use]
pub fn get<T: IntoBox<A>>(&self) -> Option<&T> {
self.raw.get(&TypeId::of::<T>()).map(|any| unsafe { any.downcast_ref_unchecked::<T>() })
}
@@ -137,30 +135,30 @@
}
/// A view into a single occupied location in an `Map`.
-pub struct OccupiedEntry<'a, A: ?Sized + Downcast, V: 'a> {
- inner: hash_map::OccupiedEntry<'a, TypeId, Box<A>>,
+pub struct OccupiedEntry<'map, A: ?Sized + Downcast, V: 'map> {
+ inner: hash_map::OccupiedEntry<'map, TypeId, Box<A>>,
type_: PhantomData<V>,
}
/// A view into a single empty location in an `Map`.
-pub struct VacantEntry<'a, A: ?Sized + Downcast, V: 'a> {
- inner: hash_map::VacantEntry<'a, TypeId, Box<A>>,
+pub struct VacantEntry<'map, A: ?Sized + Downcast, V: 'map> {
+ inner: hash_map::VacantEntry<'map, TypeId, Box<A>>,
type_: PhantomData<V>,
}
/// A view into a single location in an `Map`, which may be vacant or occupied.
-pub enum Entry<'a, A: ?Sized + Downcast, V> {
+pub enum Entry<'map, A: ?Sized + Downcast, V> {
/// An occupied Entry
- Occupied(OccupiedEntry<'a, A, V>),
+ Occupied(OccupiedEntry<'map, A, V>),
/// A vacant Entry
- Vacant(VacantEntry<'a, A, V>),
+ Vacant(VacantEntry<'map, A, V>),
}
-impl<'a, A: ?Sized + Downcast, V: IntoBox<A>> Entry<'a, A, V> {
+impl<'map, A: ?Sized + Downcast, V: IntoBox<A>> Entry<'map, A, V> {
/// Ensures a value is in the entry by inserting the result of the default function if
/// empty, and returns a mutable reference to the value in the entry.
#[inline]
- pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V {
+ pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'map mut V {
match self {
Entry::Occupied(inner) => inner.into_mut(),
Entry::Vacant(inner) => inner.insert(default()),
@@ -168,20 +166,21 @@
}
}
-impl<'a, A: ?Sized + Downcast, V: IntoBox<A>> OccupiedEntry<'a, A, V> {
- /// Converts the OccupiedEntry into a mutable reference to the value in the entry
+impl<'map, A: ?Sized + Downcast, V: IntoBox<A>> OccupiedEntry<'map, A, V> {
+ /// Converts the `OccupiedEntry` into a mutable reference to the value in the entry
/// with a lifetime bound to the collection itself
#[inline]
- pub fn into_mut(self) -> &'a mut V {
+ #[must_use]
+ pub fn into_mut(self) -> &'map mut V {
unsafe { self.inner.into_mut().downcast_mut_unchecked() }
}
}
-impl<'a, A: ?Sized + Downcast, V: IntoBox<A>> VacantEntry<'a, A, V> {
- /// Sets the value of the entry with the VacantEntry's key,
+impl<'map, A: ?Sized + Downcast, V: IntoBox<A>> VacantEntry<'map, A, V> {
+ /// Sets the value of the entry with the `VacantEntry`'s key,
/// and returns a mutable reference to it
#[inline]
- pub fn insert(self, value: V) -> &'a mut V {
+ pub fn insert(self, value: V) -> &'map mut V {
unsafe { self.inner.insert(value.into_box()).downcast_mut_unchecked() }
}
}
@@ -206,14 +205,13 @@
#[test]
fn type_id_hasher() {
use core::any::TypeId;
- use core::hash::Hash;
+ use core::hash::Hash as _;
fn verify_hashing_with(type_id: TypeId) {
let mut hasher = TypeIdHasher::default();
type_id.hash(&mut hasher);
- // SAFETY: u64 is valid for all bit patterns.
- let _ = hasher.finish();
+ _ = hasher.finish();
}
- // Pick a variety of types, just to demonstrate it’s all sane. Normal, zero-sized, unsized, &c.
+ // Pick a variety of types, just to demonstrate it's all sane. Normal, zero-sized, unsized, &c.
verify_hashing_with(TypeId::of::<usize>());
verify_hashing_with(TypeId::of::<()>());
verify_hashing_with(TypeId::of::<str>());
@@ -225,34 +223,34 @@
/// Methods for downcasting from an `Any`-like trait object.
///
/// This should only be implemented on trait objects for subtraits of `Any`, though you can
-/// implement it for other types and it’ll work fine, so long as your implementation is correct.
+/// implement it for other types and it'll work fine, so long as your implementation is correct.
pub trait Downcast {
/// Gets the `TypeId` of `self`.
fn type_id(&self) -> TypeId;
// Note the bound through these downcast methods is 'static, rather than the inexpressible
// concept of Self-but-as-a-trait (where Self is `dyn Trait`). This is sufficient, exceeding
- // TypeId’s requirements. Sure, you *can* do CloneAny.downcast_unchecked::<NotClone>() and the
- // type system won’t protect you, but that doesn’t introduce any unsafety: the method is
+ // TypeId's requirements. Sure, you *can* do CloneAny.downcast_unchecked::<NotClone>() and the
+ // type system won't protect you, but that doesn't introduce any unsafety: the method is
// already unsafe because you can specify the wrong type, and if this were exposing safe
// downcasting, CloneAny.downcast::<NotClone>() would just return an error, which is just as
// correct.
//
- // Now in theory we could also add T: ?Sized, but that doesn’t play nicely with the common
- // implementation, so I’m doing without it.
+ // Now in theory we could also add T: ?Sized, but that doesn't play nicely with the common
+ // implementation, so I'm doing without it.
/// Downcast from `&Any` to `&T`, without checking the type matches.
///
/// # Safety
///
- /// The caller must ensure that `T` matches the trait object, on pain of *undefined behaviour*.
+ /// The caller must ensure that `T` matches the trait object, on pain of *undefined behavior*.
unsafe fn downcast_ref_unchecked<T: 'static>(&self) -> &T;
/// Downcast from `&mut Any` to `&mut T`, without checking the type matches.
///
/// # Safety
///
- /// The caller must ensure that `T` matches the trait object, on pain of *undefined behaviour*.
+ /// The caller must ensure that `T` matches the trait object, on pain of *undefined behavior*.
unsafe fn downcast_mut_unchecked<T: 'static>(&mut self) -> &mut T;
}
@@ -272,12 +270,12 @@
#[inline]
unsafe fn downcast_ref_unchecked<T: 'static>(&self) -> &T {
- unsafe { &*(self as *const Self as *const T) }
+ unsafe { &*std::ptr::from_ref::<Self>(self).cast::<T>() }
}
#[inline]
unsafe fn downcast_mut_unchecked<T: 'static>(&mut self) -> &mut T {
- unsafe { &mut *(self as *mut Self as *mut T) }
+ unsafe { &mut *std::ptr::from_mut::<Self>(self).cast::<T>() }
}
}
diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs
index ce56a81..9a292ea 100644
--- a/crates/stdx/src/lib.rs
+++ b/crates/stdx/src/lib.rs
@@ -12,13 +12,12 @@
pub mod panic_context;
pub mod process;
pub mod rand;
-pub mod thin_vec;
pub mod thread;
pub use itertools;
#[inline(always)]
-pub fn is_ci() -> bool {
+pub const fn is_ci() -> bool {
option_env!("CI").is_some()
}
@@ -27,14 +26,14 @@
}
#[must_use]
-#[allow(clippy::print_stderr)]
+#[expect(clippy::print_stderr, reason = "only visible to developers")]
pub fn timeit(label: &'static str) -> impl Drop {
let start = Instant::now();
- defer(move || eprintln!("{}: {:.2?}", label, start.elapsed()))
+ defer(move || eprintln!("{}: {:.2}", label, start.elapsed().as_nanos()))
}
/// Prints backtrace to stderr, useful for debugging.
-#[allow(clippy::print_stderr)]
+#[expect(clippy::print_stderr, reason = "only visible to developers")]
pub fn print_backtrace() {
#[cfg(feature = "backtrace")]
eprintln!("{:?}", backtrace::Backtrace::new());
@@ -127,6 +126,7 @@
}
// Taken from rustc.
+#[must_use]
pub fn to_camel_case(ident: &str) -> String {
ident
.trim_matches('_')
@@ -157,7 +157,7 @@
camel_cased_component
})
- .fold((String::new(), None), |(acc, prev): (_, Option<String>), next| {
+ .fold((String::new(), None), |(mut acc, prev): (_, Option<String>), next| {
// separate two components with an underscore if their boundary cannot
// be distinguished using an uppercase/lowercase case distinction
let join = prev
@@ -167,16 +167,20 @@
Some(!char_has_case(l) && !char_has_case(f))
})
.unwrap_or(false);
- (acc + if join { "_" } else { "" } + &next, Some(next))
+ acc.push_str(if join { "_" } else { "" });
+ acc.push_str(&next);
+ (acc, Some(next))
})
.0
}
// Taken from rustc.
-pub fn char_has_case(c: char) -> bool {
+#[must_use]
+pub const fn char_has_case(c: char) -> bool {
c.is_lowercase() || c.is_uppercase()
}
+#[must_use]
pub fn is_upper_snake_case(s: &str) -> bool {
s.chars().all(|c| c.is_uppercase() || c == '_' || c.is_numeric())
}
@@ -189,6 +193,7 @@
*buf = buf.replace(from, to);
}
+#[must_use]
pub fn trim_indent(mut text: &str) -> String {
if text.starts_with('\n') {
text = &text[1..];
@@ -250,8 +255,8 @@
impl Drop for JodChild {
fn drop(&mut self) {
- let _ = self.0.kill();
- let _ = self.0.wait();
+ _ = self.0.kill();
+ _ = self.0.wait();
}
}
@@ -260,12 +265,11 @@
command.spawn().map(Self)
}
+ #[must_use]
+ #[cfg(not(target_arch = "wasm32"))]
pub fn into_inner(self) -> std::process::Child {
- if cfg!(target_arch = "wasm32") {
- panic!("no processes on wasm");
- }
// SAFETY: repr transparent, except on WASM
- unsafe { std::mem::transmute::<JodChild, std::process::Child>(self) }
+ unsafe { std::mem::transmute::<Self, std::process::Child>(self) }
}
}
diff --git a/crates/stdx/src/non_empty_vec.rs b/crates/stdx/src/non_empty_vec.rs
index 342194c..faa322d 100644
--- a/crates/stdx/src/non_empty_vec.rs
+++ b/crates/stdx/src/non_empty_vec.rs
@@ -8,8 +8,8 @@
impl<T> NonEmptyVec<T> {
#[inline]
- pub fn new(first: T) -> Self {
- NonEmptyVec { first, rest: Vec::new() }
+ pub const fn new(first: T) -> Self {
+ Self { first, rest: Vec::new() }
}
#[inline]
@@ -24,7 +24,7 @@
#[inline]
pub fn push(&mut self, value: T) {
- self.rest.push(value)
+ self.rest.push(value);
}
#[inline]
diff --git a/crates/stdx/src/panic_context.rs b/crates/stdx/src/panic_context.rs
index a35d50b..b220451 100644
--- a/crates/stdx/src/panic_context.rs
+++ b/crates/stdx/src/panic_context.rs
@@ -16,7 +16,7 @@
}
pub fn enter(frame: String) -> PanicContext {
- #[allow(clippy::print_stderr)]
+ #[expect(clippy::print_stderr, reason = "already panicking anyway")]
fn set_hook() {
let default_hook = panic::take_hook();
panic::set_hook(Box::new(move |panic_info| {
diff --git a/crates/stdx/src/process.rs b/crates/stdx/src/process.rs
index 3b3955c..2efeed4 100644
--- a/crates/stdx/src/process.rs
+++ b/crates/stdx/src/process.rs
@@ -54,6 +54,9 @@
Ok((stdout, stderr))
}
+/// # Panics
+///
+/// Panics if `cmd` is not configured to have `stdout` and `stderr` as `piped`.
pub fn spawn_with_streaming_output(
mut cmd: Command,
on_stdout_line: &mut dyn FnMut(&str),
diff --git a/crates/stdx/src/rand.rs b/crates/stdx/src/rand.rs
index 115a073..e028990 100644
--- a/crates/stdx/src/rand.rs
+++ b/crates/stdx/src/rand.rs
@@ -1,8 +1,7 @@
-//! We don't use `rand`, as that's too many things for us.
+//! We don't use `rand` because that is too many things for us.
//!
-//! We currently use oorandom instead, but it's missing these two utilities.
-//! Perhaps we should switch to `fastrand`, or our own small PRNG, it's not like
-//! we need anything more complicated than xor-shift.
+//! `oorandom` is used instead, but it's missing these two utilities.
+//! Switching to `fastrand` or our own small PRNG may be good because only xor-shift is needed.
pub fn shuffle<T>(slice: &mut [T], mut rand_index: impl FnMut(usize) -> usize) {
let mut remaining = slice.len() - 1;
diff --git a/crates/stdx/src/thin_vec.rs b/crates/stdx/src/thin_vec.rs
deleted file mode 100644
index 69d8ee7..0000000
--- a/crates/stdx/src/thin_vec.rs
+++ /dev/null
@@ -1,468 +0,0 @@
-use std::alloc::{Layout, dealloc, handle_alloc_error};
-use std::fmt;
-use std::hash::{Hash, Hasher};
-use std::marker::PhantomData;
-use std::ops::{Deref, DerefMut};
-use std::ptr::{NonNull, addr_of_mut, slice_from_raw_parts_mut};
-
-/// A type that is functionally equivalent to `(Header, Box<[Item]>)`,
-/// but all data is stored in one heap allocation and the pointer is thin,
-/// so the whole thing's size is like a pointer.
-pub struct ThinVecWithHeader<Header, Item> {
- /// INVARIANT: Points to a valid heap allocation that contains `ThinVecInner<Header>`,
- /// followed by (suitably aligned) `len` `Item`s.
- ptr: NonNull<ThinVecInner<Header>>,
- _marker: PhantomData<(Header, Box<[Item]>)>,
-}
-
-// SAFETY: We essentially own both the header and the items.
-unsafe impl<Header: Send, Item: Send> Send for ThinVecWithHeader<Header, Item> {}
-unsafe impl<Header: Sync, Item: Sync> Sync for ThinVecWithHeader<Header, Item> {}
-
-#[derive(Clone)]
-struct ThinVecInner<Header> {
- header: Header,
- len: usize,
-}
-
-impl<Header, Item> ThinVecWithHeader<Header, Item> {
- /// # Safety
- ///
- /// The iterator must produce `len` elements.
- #[inline]
- unsafe fn from_trusted_len_iter(
- header: Header,
- len: usize,
- items: impl Iterator<Item = Item>,
- ) -> Self {
- let (ptr, layout, items_offset) = Self::allocate(len);
-
- struct DeallocGuard(*mut u8, Layout);
- impl Drop for DeallocGuard {
- fn drop(&mut self) {
- // SAFETY: We allocated this above.
- unsafe {
- dealloc(self.0, self.1);
- }
- }
- }
- let _dealloc_guard = DeallocGuard(ptr.as_ptr().cast::<u8>(), layout);
-
- // INVARIANT: Between `0..1` there are only initialized items.
- struct ItemsGuard<Item>(*mut Item, *mut Item);
- impl<Item> Drop for ItemsGuard<Item> {
- fn drop(&mut self) {
- // SAFETY: Our invariant.
- unsafe {
- slice_from_raw_parts_mut(self.0, self.1.offset_from(self.0) as usize)
- .drop_in_place();
- }
- }
- }
-
- // SAFETY: We allocated enough space.
- let mut items_ptr = unsafe { ptr.as_ptr().byte_add(items_offset).cast::<Item>() };
- // INVARIANT: There are zero elements in this range.
- let mut items_guard = ItemsGuard(items_ptr, items_ptr);
- items.for_each(|item| {
- // SAFETY: Our precondition guarantee we won't get more than `len` items, and we allocated
- // enough space for `len` items.
- unsafe {
- items_ptr.write(item);
- items_ptr = items_ptr.add(1);
- }
- // INVARIANT: We just initialized this item.
- items_guard.1 = items_ptr;
- });
-
- // SAFETY: We allocated enough space.
- unsafe {
- ptr.write(ThinVecInner { header, len });
- }
-
- std::mem::forget(items_guard);
-
- std::mem::forget(_dealloc_guard);
-
- // INVARIANT: We allocated and initialized all fields correctly.
- Self { ptr, _marker: PhantomData }
- }
-
- #[inline]
- fn allocate(len: usize) -> (NonNull<ThinVecInner<Header>>, Layout, usize) {
- let (layout, items_offset) = Self::layout(len);
- // SAFETY: We always have `len`, so our allocation cannot be zero-sized.
- let ptr = unsafe { std::alloc::alloc(layout).cast::<ThinVecInner<Header>>() };
- let Some(ptr) = NonNull::<ThinVecInner<Header>>::new(ptr) else {
- handle_alloc_error(layout);
- };
- (ptr, layout, items_offset)
- }
-
- #[inline]
- #[allow(clippy::should_implement_trait)]
- pub fn from_iter<I>(header: Header, items: I) -> Self
- where
- I: IntoIterator,
- I::IntoIter: TrustedLen<Item = Item>,
- {
- let items = items.into_iter();
- // SAFETY: `TrustedLen` guarantees the iterator length is exact.
- unsafe { Self::from_trusted_len_iter(header, items.len(), items) }
- }
-
- #[inline]
- fn items_offset(&self) -> usize {
- // SAFETY: We `pad_to_align()` in `layout()`, so at most where accessing past the end of the allocation,
- // which is allowed.
- unsafe {
- Layout::new::<ThinVecInner<Header>>().extend(Layout::new::<Item>()).unwrap_unchecked().1
- }
- }
-
- #[inline]
- fn header_and_len(&self) -> &ThinVecInner<Header> {
- // SAFETY: By `ptr`'s invariant, it is correctly allocated and initialized.
- unsafe { &*self.ptr.as_ptr() }
- }
-
- #[inline]
- fn items_ptr(&self) -> *mut [Item] {
- let len = self.header_and_len().len;
- // SAFETY: `items_offset()` returns the correct offset of the items, where they are allocated.
- let ptr = unsafe { self.ptr.as_ptr().byte_add(self.items_offset()).cast::<Item>() };
- slice_from_raw_parts_mut(ptr, len)
- }
-
- #[inline]
- pub fn header(&self) -> &Header {
- &self.header_and_len().header
- }
-
- #[inline]
- pub fn header_mut(&mut self) -> &mut Header {
- // SAFETY: By `ptr`'s invariant, it is correctly allocated and initialized.
- unsafe { &mut *addr_of_mut!((*self.ptr.as_ptr()).header) }
- }
-
- #[inline]
- pub fn items(&self) -> &[Item] {
- // SAFETY: `items_ptr()` gives a valid pointer.
- unsafe { &*self.items_ptr() }
- }
-
- #[inline]
- pub fn items_mut(&mut self) -> &mut [Item] {
- // SAFETY: `items_ptr()` gives a valid pointer.
- unsafe { &mut *self.items_ptr() }
- }
-
- #[inline]
- pub fn len(&self) -> usize {
- self.header_and_len().len
- }
-
- #[inline]
- fn layout(len: usize) -> (Layout, usize) {
- let (layout, items_offset) = Layout::new::<ThinVecInner<Header>>()
- .extend(Layout::array::<Item>(len).expect("too big `ThinVec` requested"))
- .expect("too big `ThinVec` requested");
- let layout = layout.pad_to_align();
- (layout, items_offset)
- }
-}
-
-/// # Safety
-///
-/// The length reported must be exactly the number of items yielded.
-pub unsafe trait TrustedLen: ExactSizeIterator {}
-
-unsafe impl<T> TrustedLen for std::vec::IntoIter<T> {}
-unsafe impl<T> TrustedLen for std::slice::Iter<'_, T> {}
-unsafe impl<'a, T: Clone + 'a, I: TrustedLen<Item = &'a T>> TrustedLen for std::iter::Cloned<I> {}
-unsafe impl<T, I: TrustedLen, F: FnMut(I::Item) -> T> TrustedLen for std::iter::Map<I, F> {}
-unsafe impl<T> TrustedLen for std::vec::Drain<'_, T> {}
-unsafe impl<T, const N: usize> TrustedLen for std::array::IntoIter<T, N> {}
-
-impl<Header: Clone, Item: Clone> Clone for ThinVecWithHeader<Header, Item> {
- #[inline]
- fn clone(&self) -> Self {
- Self::from_iter(self.header().clone(), self.items().iter().cloned())
- }
-}
-
-impl<Header, Item> Drop for ThinVecWithHeader<Header, Item> {
- #[inline]
- fn drop(&mut self) {
- // This must come before we drop `header`, because after that we cannot make a reference to it in `len()`.
- let len = self.len();
-
- // SAFETY: The contents are allocated and initialized.
- unsafe {
- addr_of_mut!((*self.ptr.as_ptr()).header).drop_in_place();
- self.items_ptr().drop_in_place();
- }
-
- let (layout, _) = Self::layout(len);
- // SAFETY: This was allocated in `new()` with the same layout calculation.
- unsafe {
- dealloc(self.ptr.as_ptr().cast::<u8>(), layout);
- }
- }
-}
-
-impl<Header: fmt::Debug, Item: fmt::Debug> fmt::Debug for ThinVecWithHeader<Header, Item> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("ThinVecWithHeader")
- .field("header", self.header())
- .field("items", &self.items())
- .finish()
- }
-}
-
-impl<Header: PartialEq, Item: PartialEq> PartialEq for ThinVecWithHeader<Header, Item> {
- #[inline]
- fn eq(&self, other: &Self) -> bool {
- self.header() == other.header() && self.items() == other.items()
- }
-}
-
-impl<Header: Eq, Item: Eq> Eq for ThinVecWithHeader<Header, Item> {}
-
-impl<Header: Hash, Item: Hash> Hash for ThinVecWithHeader<Header, Item> {
- #[inline]
- fn hash<H: Hasher>(&self, state: &mut H) {
- self.header().hash(state);
- self.items().hash(state);
- }
-}
-
-#[derive(Clone, PartialEq, Eq, Hash)]
-pub struct ThinVec<T>(ThinVecWithHeader<(), T>);
-
-impl<T> ThinVec<T> {
- #[inline]
- #[allow(clippy::should_implement_trait)]
- pub fn from_iter<I>(values: I) -> Self
- where
- I: IntoIterator,
- I::IntoIter: TrustedLen<Item = T>,
- {
- Self(ThinVecWithHeader::from_iter((), values))
- }
-
- #[inline]
- pub fn len(&self) -> usize {
- self.0.len()
- }
-
- #[inline]
- pub fn iter(&self) -> std::slice::Iter<'_, T> {
- (**self).iter()
- }
-
- #[inline]
- pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, T> {
- (**self).iter_mut()
- }
-}
-
-impl<T> Deref for ThinVec<T> {
- type Target = [T];
-
- #[inline]
- fn deref(&self) -> &Self::Target {
- self.0.items()
- }
-}
-
-impl<T> DerefMut for ThinVec<T> {
- #[inline]
- fn deref_mut(&mut self) -> &mut Self::Target {
- self.0.items_mut()
- }
-}
-
-impl<'a, T> IntoIterator for &'a ThinVec<T> {
- type IntoIter = std::slice::Iter<'a, T>;
- type Item = &'a T;
-
- #[inline]
- fn into_iter(self) -> Self::IntoIter {
- self.iter()
- }
-}
-
-impl<'a, T> IntoIterator for &'a mut ThinVec<T> {
- type IntoIter = std::slice::IterMut<'a, T>;
- type Item = &'a mut T;
-
- #[inline]
- fn into_iter(self) -> Self::IntoIter {
- self.iter_mut()
- }
-}
-
-impl<T: fmt::Debug> fmt::Debug for ThinVec<T> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_list().entries(&**self).finish()
- }
-}
-
-/// A [`ThinVec`] that requires no allocation for the empty case.
-#[derive(Clone, PartialEq, Eq, Hash)]
-pub struct EmptyOptimizedThinVec<T>(Option<ThinVec<T>>);
-
-impl<T> EmptyOptimizedThinVec<T> {
- #[inline]
- #[allow(clippy::should_implement_trait)]
- pub fn from_iter<I>(values: I) -> Self
- where
- I: IntoIterator,
- I::IntoIter: TrustedLen<Item = T>,
- {
- let values = values.into_iter();
- if values.len() == 0 { Self::empty() } else { Self(Some(ThinVec::from_iter(values))) }
- }
-
- #[inline]
- pub fn empty() -> Self {
- Self(None)
- }
-
- #[inline]
- pub fn len(&self) -> usize {
- self.0.as_ref().map_or(0, ThinVec::len)
- }
-
- #[inline]
- pub fn iter(&self) -> std::slice::Iter<'_, T> {
- (**self).iter()
- }
-
- #[inline]
- pub fn iter_mut(&mut self) -> std::slice::IterMut<'_, T> {
- (**self).iter_mut()
- }
-}
-
-impl<T> Default for EmptyOptimizedThinVec<T> {
- #[inline]
- fn default() -> Self {
- Self::empty()
- }
-}
-
-impl<T> Deref for EmptyOptimizedThinVec<T> {
- type Target = [T];
-
- #[inline]
- fn deref(&self) -> &Self::Target {
- self.0.as_deref().unwrap_or_default()
- }
-}
-
-impl<T> DerefMut for EmptyOptimizedThinVec<T> {
- #[inline]
- fn deref_mut(&mut self) -> &mut Self::Target {
- self.0.as_deref_mut().unwrap_or_default()
- }
-}
-
-impl<'a, T> IntoIterator for &'a EmptyOptimizedThinVec<T> {
- type IntoIter = std::slice::Iter<'a, T>;
- type Item = &'a T;
-
- #[inline]
- fn into_iter(self) -> Self::IntoIter {
- self.iter()
- }
-}
-
-impl<'a, T> IntoIterator for &'a mut EmptyOptimizedThinVec<T> {
- type IntoIter = std::slice::IterMut<'a, T>;
- type Item = &'a mut T;
-
- #[inline]
- fn into_iter(self) -> Self::IntoIter {
- self.iter_mut()
- }
-}
-
-impl<T: fmt::Debug> fmt::Debug for EmptyOptimizedThinVec<T> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_list().entries(&**self).finish()
- }
-}
-
-/// Syntax:
-///
-/// ```ignore
-/// thin_vec_with_header_struct! {
-/// pub new(pub(crate)) struct MyCoolStruct, MyCoolStructHeader {
-/// pub(crate) variable_length: [Ty],
-/// pub field1: CopyTy,
-/// pub field2: NonCopyTy; ref,
-/// }
-/// }
-/// ```
-#[doc(hidden)]
-#[macro_export]
-macro_rules! thin_vec_with_header_struct_ {
- (@maybe_ref (ref) $($t:tt)*) => { &$($t)* };
- (@maybe_ref () $($t:tt)*) => { $($t)* };
- (
- $vis:vis new($new_vis:vis) struct $struct:ident, $header:ident {
- $items_vis:vis $items:ident : [$items_ty:ty],
- $( $header_var_vis:vis $header_var:ident : $header_var_ty:ty $(; $ref:ident)?, )+
- }
- ) => {
- #[derive(Debug, Clone, Eq, PartialEq, Hash)]
- struct $header {
- $( $header_var : $header_var_ty, )+
- }
-
- #[derive(Clone, Eq, PartialEq, Hash)]
- $vis struct $struct($crate::thin_vec::ThinVecWithHeader<$header, $items_ty>);
-
- impl $struct {
- #[inline]
- #[allow(unused)]
- $new_vis fn new<I>(
- $( $header_var: $header_var_ty, )+
- $items: I,
- ) -> Self
- where
- I: ::std::iter::IntoIterator,
- I::IntoIter: $crate::thin_vec::TrustedLen<Item = $items_ty>,
- {
- Self($crate::thin_vec::ThinVecWithHeader::from_iter(
- $header { $( $header_var, )+ },
- $items,
- ))
- }
-
- #[inline]
- $items_vis fn $items(&self) -> &[$items_ty] {
- self.0.items()
- }
-
- $(
- #[inline]
- $header_var_vis fn $header_var(&self) -> $crate::thin_vec_with_header_struct_!(@maybe_ref ($($ref)?) $header_var_ty) {
- $crate::thin_vec_with_header_struct_!(@maybe_ref ($($ref)?) self.0.header().$header_var)
- }
- )+
- }
-
- impl ::std::fmt::Debug for $struct {
- fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
- f.debug_struct(stringify!($struct))
- $( .field(stringify!($header_var), &self.$header_var()) )*
- .field(stringify!($items), &self.$items())
- .finish()
- }
- }
- };
-}
-pub use crate::thin_vec_with_header_struct_ as thin_vec_with_header_struct;
diff --git a/crates/stdx/src/thread.rs b/crates/stdx/src/thread.rs
index e577eb4..6c742fe 100644
--- a/crates/stdx/src/thread.rs
+++ b/crates/stdx/src/thread.rs
@@ -1,12 +1,12 @@
//! A utility module for working with threads that automatically joins threads upon drop
-//! and abstracts over operating system quality of service (QoS) APIs
+//! and abstracts over operating system quality of service (`QoS`) APIs
//! through the concept of a “thread intent”.
//!
//! The intent of a thread is frozen at thread creation time,
//! i.e. there is no API to change the intent of a thread once it has been spawned.
//!
//! As a system, rust-analyzer should have the property that
-//! old manual scheduling APIs are replaced entirely by QoS.
+//! old manual scheduling APIs are replaced entirely by `QoS`.
//! To maintain this invariant, we panic when it is clear that
//! old scheduling APIs have been used.
//!
@@ -23,10 +23,12 @@
pub use intent::ThreadIntent;
pub use pool::Pool;
+/// # Panics
+///
+/// Panics if failed to spawn the thread.
pub fn spawn<F, T>(intent: ThreadIntent, f: F) -> JoinHandle<T>
where
- F: FnOnce() -> T,
- F: Send + 'static,
+ F: (FnOnce() -> T) + Send + 'static,
T: Send + 'static,
{
Builder::new(intent).spawn(f).expect("failed to spawn thread")
@@ -39,26 +41,29 @@
}
impl Builder {
- pub fn new(intent: ThreadIntent) -> Builder {
- Builder { intent, inner: jod_thread::Builder::new(), allow_leak: false }
+ #[must_use]
+ pub fn new(intent: ThreadIntent) -> Self {
+ Self { intent, inner: jod_thread::Builder::new(), allow_leak: false }
}
- pub fn name(self, name: String) -> Builder {
- Builder { inner: self.inner.name(name), ..self }
+ #[must_use]
+ pub fn name(self, name: String) -> Self {
+ Self { inner: self.inner.name(name), ..self }
}
- pub fn stack_size(self, size: usize) -> Builder {
- Builder { inner: self.inner.stack_size(size), ..self }
+ #[must_use]
+ pub fn stack_size(self, size: usize) -> Self {
+ Self { inner: self.inner.stack_size(size), ..self }
}
- pub fn allow_leak(self, b: bool) -> Builder {
- Builder { allow_leak: b, ..self }
+ #[must_use]
+ pub fn allow_leak(self, allow_leak: bool) -> Self {
+ Self { allow_leak, ..self }
}
pub fn spawn<F, T>(self, f: F) -> std::io::Result<JoinHandle<T>>
where
- F: FnOnce() -> T,
- F: Send + 'static,
+ F: (FnOnce() -> T) + Send + 'static,
T: Send + 'static,
{
let inner_handle = self.inner.spawn(move || {
@@ -78,6 +83,10 @@
}
impl<T> JoinHandle<T> {
+ /// # Panics
+ ///
+ /// Panics if there is no thread to join.
+ #[must_use]
pub fn join(mut self) -> T {
self.inner.take().unwrap().join()
}
@@ -95,6 +104,7 @@
}
}
+#[expect(clippy::min_ident_chars, reason = "trait impl")]
impl<T> fmt::Debug for JoinHandle<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.pad("JoinHandle { .. }")
diff --git a/crates/stdx/src/thread/intent.rs b/crates/stdx/src/thread/intent.rs
index 7b65db3..1203bfc 100644
--- a/crates/stdx/src/thread/intent.rs
+++ b/crates/stdx/src/thread/intent.rs
@@ -1,9 +1,9 @@
-//! An opaque façade around platform-specific QoS APIs.
+//! An opaque façade around platform-specific `QoS` APIs.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
// Please maintain order from least to most priority for the derived `Ord` impl.
pub enum ThreadIntent {
- /// Any thread which does work that isn’t in the critical path of the user typing
+ /// Any thread which does work that isn't in the critical path of the user typing
/// (e.g. processing Go To Definition).
Worker,
@@ -34,6 +34,7 @@
const IS_QOS_AVAILABLE: bool = imp::IS_QOS_AVAILABLE;
+#[expect(clippy::semicolon_if_nothing_returned, reason = "thin wrapper")]
fn set_current_thread_qos_class(class: QoSClass) {
imp::set_current_thread_qos_class(class)
}
@@ -63,7 +64,7 @@
///
/// * **You do not care about how long it takes for work to finish.**
/// * **You do not care about work being deferred temporarily.**
- /// (e.g. if the device’s battery is in a critical state)
+ /// (e.g. if the device's battery is in a critical state)
///
/// Examples:
///
@@ -84,7 +85,7 @@
/// All other work is prioritized over background tasks.
Background,
- /// TLDR: tasks that don’t block using your app
+ /// TLDR: tasks that don't block using your app
///
/// Contract:
///
@@ -110,7 +111,7 @@
/// for tasks using this class.
///
/// This QoS class provides a balance between
- /// performance, responsiveness and efficiency.
+ /// performance, responsiveness, and efficiency.
Utility,
/// TLDR: tasks that block using your app
@@ -126,10 +127,10 @@
/// * in a video editor:
/// opening a saved project
/// * in a browser:
- /// loading a list of the user’s bookmarks and top sites
+ /// loading a list of the user's bookmarks and top sites
/// when a new tab is created
/// * in a collaborative word processor:
- /// running a search on the document’s content
+ /// running a search on the document's content
///
/// Use this QoS class for tasks which were initiated by the user
/// and block the usage of your app while they are in progress.
@@ -208,7 +209,7 @@
}
_ => {
- // `pthread_set_qos_class_self_np`’s documentation
+ // `pthread_set_qos_class_self_np`'s documentation
// does not mention any other errors.
unreachable!("`pthread_set_qos_class_self_np` returned unexpected error {errno}")
}
@@ -223,7 +224,7 @@
};
if code != 0 {
- // `pthread_get_qos_class_np`’s documentation states that
+ // `pthread_get_qos_class_np`'s documentation states that
// an error value is placed into errno if the return code is not zero.
// However, it never states what errors are possible.
// Inspecting the source[0] shows that, as of this writing, it always returns zero.
diff --git a/crates/stdx/src/thread/pool.rs b/crates/stdx/src/thread/pool.rs
index 0efff38..074cd74 100644
--- a/crates/stdx/src/thread/pool.rs
+++ b/crates/stdx/src/thread/pool.rs
@@ -38,7 +38,11 @@
}
impl Pool {
- pub fn new(threads: usize) -> Pool {
+ /// # Panics
+ ///
+ /// Panics if job panics
+ #[must_use]
+ pub fn new(threads: usize) -> Self {
const STACK_SIZE: usize = 8 * 1024 * 1024;
const INITIAL_INTENT: ThreadIntent = ThreadIntent::Worker;
@@ -63,7 +67,7 @@
}
extant_tasks.fetch_add(1, Ordering::SeqCst);
// discard the panic, we should've logged the backtrace already
- _ = panic::catch_unwind(job.f);
+ drop(panic::catch_unwind(job.f));
extant_tasks.fetch_sub(1, Ordering::SeqCst);
}
}
@@ -73,9 +77,12 @@
handles.push(handle);
}
- Pool { _handles: handles.into_boxed_slice(), extant_tasks, job_sender }
+ Self { _handles: handles.into_boxed_slice(), extant_tasks, job_sender }
}
+ /// # Panics
+ ///
+ /// Panics if job panics
pub fn spawn<F>(&self, intent: ThreadIntent, f: F)
where
F: FnOnce() + Send + UnwindSafe + 'static,
@@ -84,14 +91,20 @@
if cfg!(debug_assertions) {
intent.assert_is_used_on_current_thread();
}
- f()
+ f();
});
let job = Job { requested_intent: intent, f };
self.job_sender.send(job).unwrap();
}
+ #[must_use]
pub fn len(&self) -> usize {
self.extant_tasks.load(Ordering::SeqCst)
}
+
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
}
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml
index 7db1a4e..2e90208 100644
--- a/crates/syntax/Cargo.toml
+++ b/crates/syntax/Cargo.toml
@@ -27,8 +27,8 @@
[dev-dependencies]
rayon.workspace = true
-expect-test = "1.4.0"
-rustc_apfloat = "0.2.0"
+expect-test = "1.5.1"
+rustc_apfloat = "0.2.2"
test-utils.workspace = true
diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram
index 70a91af..673334b 100644
--- a/crates/syntax/rust.ungram
+++ b/crates/syntax/rust.ungram
@@ -240,7 +240,7 @@
'{' fields:(RecordField (',' RecordField)* ','?)? '}'
RecordField =
- Attr* Visibility?
+ Attr* Visibility? 'unsafe'?
Name ':' Type ('=' Expr)?
TupleFieldList =
diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs
index 638b961..fd23cdc 100644
--- a/crates/syntax/src/ast/generated/nodes.rs
+++ b/crates/syntax/src/ast/generated/nodes.rs
@@ -1337,6 +1337,8 @@
pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
#[inline]
pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ #[inline]
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
}
pub struct RecordFieldList {
pub(crate) syntax: SyntaxNode,
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index a5ac59d..b9ccd34 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -36,6 +36,16 @@
pub fn text(&self) -> TokenText<'_> {
text_of_first_token(self.syntax())
}
+ pub fn text_non_mutable(&self) -> &str {
+ fn first_token(green_ref: &GreenNodeData) -> &GreenTokenData {
+ green_ref.children().next().and_then(NodeOrToken::into_token).unwrap()
+ }
+
+ match self.syntax().green() {
+ Cow::Borrowed(green_ref) => first_token(green_ref).text(),
+ Cow::Owned(_) => unreachable!(),
+ }
+ }
pub fn as_tuple_field(&self) -> Option<usize> {
self.text().parse().ok()
diff --git a/crates/syntax/src/ast/syntax_factory.rs b/crates/syntax/src/ast/syntax_factory.rs
index 1c517ac..7142e4f 100644
--- a/crates/syntax/src/ast/syntax_factory.rs
+++ b/crates/syntax/src/ast/syntax_factory.rs
@@ -19,8 +19,8 @@
impl SyntaxFactory {
/// Creates a new [`SyntaxFactory`], generating mappings between input nodes and generated nodes.
- pub fn new() -> Self {
- Self { mappings: Some(RefCell::new(SyntaxMapping::new())) }
+ pub fn with_mappings() -> Self {
+ Self { mappings: Some(RefCell::new(SyntaxMapping::default())) }
}
/// Creates a [`SyntaxFactory`] without generating mappings.
diff --git a/crates/syntax/src/syntax_editor.rs b/crates/syntax/src/syntax_editor.rs
index 15515dd..473d9c0 100644
--- a/crates/syntax/src/syntax_editor.rs
+++ b/crates/syntax/src/syntax_editor.rs
@@ -33,7 +33,7 @@
impl SyntaxEditor {
/// Creates a syntax editor to start editing from `root`
pub fn new(root: SyntaxNode) -> Self {
- Self { root, changes: vec![], mappings: SyntaxMapping::new(), annotations: vec![] }
+ Self { root, changes: vec![], mappings: SyntaxMapping::default(), annotations: vec![] }
}
pub fn add_annotation(&mut self, element: impl Element, annotation: SyntaxAnnotation) {
@@ -151,9 +151,8 @@
#[repr(transparent)]
pub struct SyntaxAnnotation(NonZeroU32);
-impl SyntaxAnnotation {
- /// Creates a unique syntax annotation to attach data to.
- pub fn new() -> Self {
+impl Default for SyntaxAnnotation {
+ fn default() -> Self {
static COUNTER: AtomicU32 = AtomicU32::new(1);
// Only consistency within a thread matters, as SyntaxElements are !Send
@@ -163,12 +162,6 @@
}
}
-impl Default for SyntaxAnnotation {
- fn default() -> Self {
- Self::new()
- }
-}
-
/// Position describing where to insert elements
#[derive(Debug)]
pub struct Position {
@@ -411,12 +404,12 @@
let to_replace = root.syntax().descendants().find_map(ast::BinExpr::cast).unwrap();
let mut editor = SyntaxEditor::new(root.syntax().clone());
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let name = make::name("var_name");
let name_ref = make::name_ref("var_name").clone_for_update();
- let placeholder_snippet = SyntaxAnnotation::new();
+ let placeholder_snippet = SyntaxAnnotation::default();
editor.add_annotation(name.syntax(), placeholder_snippet);
editor.add_annotation(name_ref.syntax(), placeholder_snippet);
@@ -522,7 +515,7 @@
let second_let = root.syntax().descendants().find_map(ast::LetStmt::cast).unwrap();
let mut editor = SyntaxEditor::new(root.syntax().clone());
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let new_block_expr = make.block_expr([], Some(ast::Expr::BlockExpr(inner_block.clone())));
@@ -574,7 +567,7 @@
let inner_block = root.clone();
let mut editor = SyntaxEditor::new(root.syntax().clone());
- let make = SyntaxFactory::new();
+ let make = SyntaxFactory::with_mappings();
let new_block_expr = make.block_expr([], Some(ast::Expr::BlockExpr(inner_block.clone())));
diff --git a/crates/syntax/src/syntax_editor/mapping.rs b/crates/syntax/src/syntax_editor/mapping.rs
index f71925a..1eaef03 100644
--- a/crates/syntax/src/syntax_editor/mapping.rs
+++ b/crates/syntax/src/syntax_editor/mapping.rs
@@ -20,10 +20,6 @@
}
impl SyntaxMapping {
- pub fn new() -> Self {
- Self::default()
- }
-
/// Like [`SyntaxMapping::upmap_child`] but for syntax elements.
pub fn upmap_child_element(
&self,
diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs
index 14df485..6397b04 100644
--- a/crates/syntax/src/validation.rs
+++ b/crates/syntax/src/validation.rs
@@ -4,6 +4,7 @@
mod block;
+use itertools::Itertools;
use rowan::Direction;
use rustc_lexer::unescape::{self, Mode, unescape_mixed, unescape_unicode};
@@ -37,7 +38,8 @@
ast::FnPtrType(it) => validate_trait_object_fn_ptr_ret_ty(it, errors),
ast::MacroRules(it) => validate_macro_rules(it, errors),
ast::LetExpr(it) => validate_let_expr(it, errors),
- ast::ImplTraitType(it) => validate_impl_object_ty(it, errors),
+ ast::DynTraitType(it) => errors.extend(validate_trait_object_ty(it)),
+ ast::ImplTraitType(it) => errors.extend(validate_impl_object_ty(it)),
_ => (),
}
}
@@ -316,58 +318,104 @@
}
fn validate_trait_object_ref_ty(ty: ast::RefType, errors: &mut Vec<SyntaxError>) {
- if let Some(ast::Type::DynTraitType(ty)) = ty.ty() {
- if let Some(err) = validate_trait_object_ty(ty) {
- errors.push(err);
+ match ty.ty() {
+ Some(ast::Type::DynTraitType(ty)) => {
+ if let Some(err) = validate_trait_object_ty_plus(ty) {
+ errors.push(err);
+ }
}
+ Some(ast::Type::ImplTraitType(ty)) => {
+ if let Some(err) = validate_impl_object_ty_plus(ty) {
+ errors.push(err);
+ }
+ }
+ _ => (),
}
}
fn validate_trait_object_ptr_ty(ty: ast::PtrType, errors: &mut Vec<SyntaxError>) {
- if let Some(ast::Type::DynTraitType(ty)) = ty.ty() {
- if let Some(err) = validate_trait_object_ty(ty) {
- errors.push(err);
+ match ty.ty() {
+ Some(ast::Type::DynTraitType(ty)) => {
+ if let Some(err) = validate_trait_object_ty_plus(ty) {
+ errors.push(err);
+ }
}
+ Some(ast::Type::ImplTraitType(ty)) => {
+ if let Some(err) = validate_impl_object_ty_plus(ty) {
+ errors.push(err);
+ }
+ }
+ _ => (),
}
}
fn validate_trait_object_fn_ptr_ret_ty(ty: ast::FnPtrType, errors: &mut Vec<SyntaxError>) {
- if let Some(ast::Type::DynTraitType(ty)) = ty.ret_type().and_then(|ty| ty.ty()) {
- if let Some(err) = validate_trait_object_ty(ty) {
- errors.push(err);
+ match ty.ret_type().and_then(|ty| ty.ty()) {
+ Some(ast::Type::DynTraitType(ty)) => {
+ if let Some(err) = validate_trait_object_ty_plus(ty) {
+ errors.push(err);
+ }
}
+ Some(ast::Type::ImplTraitType(ty)) => {
+ if let Some(err) = validate_impl_object_ty_plus(ty) {
+ errors.push(err);
+ }
+ }
+ _ => (),
}
}
fn validate_trait_object_ty(ty: ast::DynTraitType) -> Option<SyntaxError> {
let tbl = ty.type_bound_list()?;
- let bounds_count = tbl.bounds().count();
+ let no_bounds = tbl.bounds().filter_map(|it| it.ty()).next().is_none();
- match bounds_count {
- 0 => Some(SyntaxError::new(
+ match no_bounds {
+ true => Some(SyntaxError::new(
"At least one trait is required for an object type",
ty.syntax().text_range(),
)),
- _ if bounds_count > 1 => {
- let dyn_token = ty.dyn_token()?;
- let preceding_token =
- algo::skip_trivia_token(dyn_token.prev_token()?, Direction::Prev)?;
-
- if !matches!(preceding_token.kind(), T!['('] | T![<] | T![=]) {
- return Some(SyntaxError::new("ambiguous `+` in a type", ty.syntax().text_range()));
- }
- None
- }
- _ => None,
+ false => None,
}
}
-fn validate_impl_object_ty(ty: ast::ImplTraitType, errors: &mut Vec<SyntaxError>) {
- if ty.type_bound_list().map_or(0, |tbl| tbl.bounds().count()) == 0 {
- errors.push(SyntaxError::new(
- "At least one trait must be specified",
+fn validate_impl_object_ty(ty: ast::ImplTraitType) -> Option<SyntaxError> {
+ let tbl = ty.type_bound_list()?;
+ let no_bounds = tbl.bounds().filter_map(|it| it.ty()).next().is_none();
+
+ match no_bounds {
+ true => Some(SyntaxError::new(
+ "At least one trait is required for an object type",
ty.syntax().text_range(),
- ));
+ )),
+ false => None,
+ }
+}
+
+// FIXME: This is not a validation error, this is a context dependent parse error
+fn validate_trait_object_ty_plus(ty: ast::DynTraitType) -> Option<SyntaxError> {
+ let dyn_token = ty.dyn_token()?;
+ let preceding_token = algo::skip_trivia_token(dyn_token.prev_token()?, Direction::Prev)?;
+ let tbl = ty.type_bound_list()?;
+ let more_than_one_bound = tbl.bounds().next_tuple::<(_, _)>().is_some();
+
+ if more_than_one_bound && !matches!(preceding_token.kind(), T!['('] | T![<] | T![=]) {
+ Some(SyntaxError::new("ambiguous `+` in a type", ty.syntax().text_range()))
+ } else {
+ None
+ }
+}
+
+// FIXME: This is not a validation error, this is a context dependent parse error
+fn validate_impl_object_ty_plus(ty: ast::ImplTraitType) -> Option<SyntaxError> {
+ let dyn_token = ty.impl_token()?;
+ let preceding_token = algo::skip_trivia_token(dyn_token.prev_token()?, Direction::Prev)?;
+ let tbl = ty.type_bound_list()?;
+ let more_than_one_bound = tbl.bounds().next_tuple::<(_, _)>().is_some();
+
+ if more_than_one_bound && !matches!(preceding_token.kind(), T!['('] | T![<] | T![=]) {
+ Some(SyntaxError::new("ambiguous `+` in a type", ty.syntax().text_range()))
+ } else {
+ None
}
}
diff --git a/crates/syntax/test_data/parser/validation/0261_dangling_impl_undeclared_lifetime.rast b/crates/syntax/test_data/parser/validation/0261_dangling_impl_undeclared_lifetime.rast
new file mode 100644
index 0000000..bd71c61
--- /dev/null
+++ b/crates/syntax/test_data/parser/validation/0261_dangling_impl_undeclared_lifetime.rast
@@ -0,0 +1,38 @@
+SOURCE_FILE@0..28
+ FN@0..28
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..4
+ IDENT@3..4 "f"
+ PARAM_LIST@4..25
+ L_PAREN@4..5 "("
+ PARAM@5..24
+ WILDCARD_PAT@5..6
+ UNDERSCORE@5..6 "_"
+ COLON@6..7 ":"
+ WHITESPACE@7..8 " "
+ REF_TYPE@8..24
+ AMP@8..9 "&"
+ IMPL_TRAIT_TYPE@9..24
+ IMPL_KW@9..13 "impl"
+ WHITESPACE@13..14 " "
+ TYPE_BOUND_LIST@14..24
+ TYPE_BOUND@14..16
+ LIFETIME@14..16
+ LIFETIME_IDENT@14..16 "'a"
+ WHITESPACE@16..17 " "
+ PLUS@17..18 "+"
+ WHITESPACE@18..19 " "
+ TYPE_BOUND@19..24
+ PATH_TYPE@19..24
+ PATH@19..24
+ PATH_SEGMENT@19..24
+ NAME_REF@19..24
+ IDENT@19..24 "Sized"
+ R_PAREN@24..25 ")"
+ WHITESPACE@25..26 " "
+ BLOCK_EXPR@26..28
+ STMT_LIST@26..28
+ L_CURLY@26..27 "{"
+ R_CURLY@27..28 "}"
+error 9..24: ambiguous `+` in a type
diff --git a/crates/syntax/test_data/parser/validation/0261_dangling_impl_undeclared_lifetime.rs b/crates/syntax/test_data/parser/validation/0261_dangling_impl_undeclared_lifetime.rs
new file mode 100644
index 0000000..670a6f0
--- /dev/null
+++ b/crates/syntax/test_data/parser/validation/0261_dangling_impl_undeclared_lifetime.rs
@@ -0,0 +1 @@
+fn f(_: &impl 'a + Sized) {}
\ No newline at end of file
diff --git a/crates/syntax/test_data/parser/validation/dangling_impl.rast b/crates/syntax/test_data/parser/validation/dangling_impl.rast
index 2db07ae..c337ee8 100644
--- a/crates/syntax/test_data/parser/validation/dangling_impl.rast
+++ b/crates/syntax/test_data/parser/validation/dangling_impl.rast
@@ -20,4 +20,4 @@
STMT_LIST@14..16
L_CURLY@14..15 "{"
R_CURLY@15..16 "}"
-error 8..12: At least one trait must be specified
+error 8..12: At least one trait is required for an object type
diff --git a/crates/syntax/test_data/parser/validation/dangling_impl_reference.rast b/crates/syntax/test_data/parser/validation/dangling_impl_reference.rast
index dbe6535..cb73cb4 100644
--- a/crates/syntax/test_data/parser/validation/dangling_impl_reference.rast
+++ b/crates/syntax/test_data/parser/validation/dangling_impl_reference.rast
@@ -22,4 +22,4 @@
STMT_LIST@15..17
L_CURLY@15..16 "{"
R_CURLY@16..17 "}"
-error 9..13: At least one trait must be specified
+error 9..13: At least one trait is required for an object type
diff --git a/crates/syntax/test_data/parser/validation/impl_trait_lifetime_only.rast b/crates/syntax/test_data/parser/validation/impl_trait_lifetime_only.rast
new file mode 100644
index 0000000..ed938d6
--- /dev/null
+++ b/crates/syntax/test_data/parser/validation/impl_trait_lifetime_only.rast
@@ -0,0 +1,29 @@
+SOURCE_FILE@0..20
+ FN@0..20
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..4
+ IDENT@3..4 "f"
+ PARAM_LIST@4..17
+ L_PAREN@4..5 "("
+ PARAM@5..16
+ WILDCARD_PAT@5..6
+ UNDERSCORE@5..6 "_"
+ COLON@6..7 ":"
+ WHITESPACE@7..8 " "
+ REF_TYPE@8..16
+ AMP@8..9 "&"
+ IMPL_TRAIT_TYPE@9..16
+ IMPL_KW@9..13 "impl"
+ WHITESPACE@13..14 " "
+ TYPE_BOUND_LIST@14..16
+ TYPE_BOUND@14..16
+ LIFETIME@14..16
+ LIFETIME_IDENT@14..16 "'a"
+ R_PAREN@16..17 ")"
+ WHITESPACE@17..18 " "
+ BLOCK_EXPR@18..20
+ STMT_LIST@18..20
+ L_CURLY@18..19 "{"
+ R_CURLY@19..20 "}"
+error 9..16: At least one trait is required for an object type
diff --git a/crates/syntax/test_data/parser/validation/impl_trait_lifetime_only.rs b/crates/syntax/test_data/parser/validation/impl_trait_lifetime_only.rs
new file mode 100644
index 0000000..5b18a54
--- /dev/null
+++ b/crates/syntax/test_data/parser/validation/impl_trait_lifetime_only.rs
@@ -0,0 +1 @@
+fn f(_: &impl 'a) {}
\ No newline at end of file
diff --git a/crates/test-fixture/Cargo.toml b/crates/test-fixture/Cargo.toml
index 2547a02..353d4c3 100644
--- a/crates/test-fixture/Cargo.toml
+++ b/crates/test-fixture/Cargo.toml
@@ -19,6 +19,7 @@
stdx.workspace = true
intern.workspace = true
triomphe.workspace = true
+paths.workspace = true
[lints]
workspace = true
diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs
index f0b85b4..059397b 100644
--- a/crates/test-fixture/src/lib.rs
+++ b/crates/test-fixture/src/lib.rs
@@ -19,6 +19,7 @@
tt::{Leaf, TokenTree, TopSubtree, TopSubtreeBuilder, TtElement, TtIter},
};
use intern::{Symbol, sym};
+use paths::AbsPathBuf;
use rustc_hash::FxHashMap;
use span::{Edition, EditionedFileId, FileId, Span};
use stdx::itertools::Itertools;
@@ -138,7 +139,7 @@
let channel = toolchain.as_deref().unwrap_or("stable");
Version::parse(&format!("1.76.0-{channel}")).unwrap()
});
- let mut source_change = FileChange::new();
+ let mut source_change = FileChange::default();
let mut files = Vec::new();
let mut crate_graph = CrateGraphBuilder::default();
@@ -162,6 +163,9 @@
let crate_ws_data =
Arc::new(CrateWorkspaceData { data_layout: target_data_layout, toolchain });
+ // FIXME: This is less than ideal
+ let proc_macro_cwd = Arc::new(AbsPathBuf::assert_utf8(std::env::current_dir().unwrap()));
+
for entry in fixture {
let mut range_or_offset = None;
let text = if entry.text.contains(CURSOR_MARKER) {
@@ -213,7 +217,7 @@
meta.env,
origin,
false,
- None,
+ proc_macro_cwd.clone(),
crate_ws_data.clone(),
);
let prev = crates.insert(crate_name.clone(), crate_id);
@@ -254,7 +258,7 @@
default_env,
CrateOrigin::Local { repo: None, name: None },
false,
- None,
+ proc_macro_cwd.clone(),
crate_ws_data.clone(),
);
} else {
@@ -296,7 +300,7 @@
)]),
CrateOrigin::Lang(LangCrateOrigin::Core),
false,
- None,
+ proc_macro_cwd.clone(),
crate_ws_data.clone(),
);
@@ -345,7 +349,7 @@
)]),
CrateOrigin::Local { repo: None, name: None },
true,
- None,
+ proc_macro_cwd.clone(),
crate_ws_data,
);
proc_macros.insert(proc_macros_crate, Ok(proc_macro));
@@ -650,7 +654,7 @@
_: Span,
_: Span,
_: Span,
- _: Option<String>,
+ _: String,
) -> Result<TopSubtree, ProcMacroExpansionError> {
Ok(subtree.clone())
}
@@ -672,7 +676,7 @@
_: Span,
call_site: Span,
_: Span,
- _: Option<String>,
+ _: String,
) -> Result<TopSubtree, ProcMacroExpansionError> {
let tt::TokenTree::Leaf(macro_name) = &subtree.0[2] else {
return Err(ProcMacroExpansionError::Panic("incorrect input".to_owned()));
@@ -707,7 +711,7 @@
_: Span,
_: Span,
_: Span,
- _: Option<String>,
+ _: String,
) -> Result<TopSubtree, ProcMacroExpansionError> {
attrs
.cloned()
@@ -730,7 +734,7 @@
def_site: Span,
_: Span,
_: Span,
- _: Option<String>,
+ _: String,
) -> Result<TopSubtree, ProcMacroExpansionError> {
// Input:
// ```
@@ -765,7 +769,7 @@
_: Span,
_: Span,
_: Span,
- _: Option<String>,
+ _: String,
) -> Result<TopSubtree, ProcMacroExpansionError> {
fn traverse(builder: &mut TopSubtreeBuilder, iter: TtIter<'_>) {
for tt in iter.collect_vec().into_iter().rev() {
@@ -803,7 +807,7 @@
_: Span,
_: Span,
_: Span,
- _: Option<String>,
+ _: String,
) -> Result<TopSubtree, ProcMacroExpansionError> {
let mut result = input.0.clone();
for it in &mut result {
@@ -845,7 +849,7 @@
_: Span,
_: Span,
_: Span,
- _: Option<String>,
+ _: String,
) -> Result<TopSubtree, ProcMacroExpansionError> {
let TokenTree::Leaf(Leaf::Literal(lit)) = &subtree.0[1] else {
return Err(ProcMacroExpansionError::Panic("incorrect Input".into()));
@@ -874,7 +878,7 @@
def_site: Span,
_: Span,
_: Span,
- _: Option<String>,
+ _: String,
) -> Result<TopSubtree, ProcMacroExpansionError> {
let span = subtree
.token_trees()
@@ -929,7 +933,7 @@
_: Span,
_: Span,
_: Span,
- _: Option<String>,
+ _: String,
) -> Result<TopSubtree, ProcMacroExpansionError> {
for tt in subtree.token_trees().flat_tokens() {
if let tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) = tt {
diff --git a/crates/test-utils/Cargo.toml b/crates/test-utils/Cargo.toml
index f25512b..c27e850 100644
--- a/crates/test-utils/Cargo.toml
+++ b/crates/test-utils/Cargo.toml
@@ -13,7 +13,7 @@
[dependencies]
# Avoid adding deps here, this crate is widely used in tests it should compile fast!
-dissimilar = "1.0.7"
+dissimilar = "1.0.10"
text-size.workspace = true
rustc-hash.workspace = true
diff --git a/crates/toolchain/Cargo.toml b/crates/toolchain/Cargo.toml
index 38daacd..315a3a2 100644
--- a/crates/toolchain/Cargo.toml
+++ b/crates/toolchain/Cargo.toml
@@ -12,7 +12,7 @@
[lib]
[dependencies]
-home = "0.5.4"
+home = "0.5.11"
camino.workspace = true
[lints]
diff --git a/crates/tt/src/iter.rs b/crates/tt/src/iter.rs
index 1d88218..0418c00 100644
--- a/crates/tt/src/iter.rs
+++ b/crates/tt/src/iter.rs
@@ -6,7 +6,7 @@
use arrayvec::ArrayVec;
use intern::sym;
-use crate::{Ident, Leaf, Punct, Spacing, Subtree, TokenTree, TokenTreesView};
+use crate::{Ident, Leaf, MAX_GLUED_PUNCT_LEN, Punct, Spacing, Subtree, TokenTree, TokenTreesView};
#[derive(Clone)]
pub struct TtIter<'a, S> {
@@ -111,7 +111,7 @@
///
/// This method currently may return a single quotation, which is part of lifetime ident and
/// conceptually not a punct in the context of mbe. Callers should handle this.
- pub fn expect_glued_punct(&mut self) -> Result<ArrayVec<Punct<S>, 3>, ()> {
+ pub fn expect_glued_punct(&mut self) -> Result<ArrayVec<Punct<S>, MAX_GLUED_PUNCT_LEN>, ()> {
let TtElement::Leaf(&Leaf::Punct(first)) = self.next().ok_or(())? else {
return Err(());
};
@@ -145,7 +145,6 @@
}
('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _)
| ('-' | '=' | '>', '>', _)
- | (_, _, Some(';'))
| ('<', '-', _)
| (':', ':', _)
| ('.', '.', _)
diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs
index 1cfead5..36ccb67 100644
--- a/crates/tt/src/lib.rs
+++ b/crates/tt/src/lib.rs
@@ -22,6 +22,8 @@
pub use text_size::{TextRange, TextSize};
+pub const MAX_GLUED_PUNCT_LEN: usize = 3;
+
#[derive(Clone, PartialEq, Debug)]
pub struct Lit {
pub kind: LitKind,
@@ -243,6 +245,23 @@
self.token_trees.extend(tt.0.iter().cloned());
}
+ /// Like [`Self::extend_with_tt()`], but makes sure the new tokens will never be
+ /// joint with whatever comes after them.
+ pub fn extend_with_tt_alone(&mut self, tt: TokenTreesView<'_, S>) {
+ if let Some((last, before_last)) = tt.0.split_last() {
+ self.token_trees.reserve(tt.0.len());
+ self.token_trees.extend(before_last.iter().cloned());
+ let last = if let TokenTree::Leaf(Leaf::Punct(last)) = last {
+ let mut last = *last;
+ last.spacing = Spacing::Alone;
+ TokenTree::Leaf(Leaf::Punct(last))
+ } else {
+ last.clone()
+ };
+ self.token_trees.push(last);
+ }
+ }
+
pub fn expected_delimiters(&self) -> impl Iterator<Item = &Delimiter<S>> {
self.unclosed_subtree_indices.iter().rev().map(|&subtree_idx| {
let TokenTree::Subtree(subtree) = &self.token_trees[subtree_idx] else {
@@ -363,7 +382,8 @@
) -> impl Iterator<Item = TokenTreesView<'a, S>> {
let mut subtree_iter = self.iter();
let mut need_to_yield_even_if_empty = true;
- let result = std::iter::from_fn(move || {
+
+ std::iter::from_fn(move || {
if subtree_iter.is_empty() && !need_to_yield_even_if_empty {
return None;
};
@@ -379,8 +399,7 @@
result = subtree_iter.from_savepoint(savepoint);
}
Some(result)
- });
- result
+ })
}
}
diff --git a/crates/vfs-notify/Cargo.toml b/crates/vfs-notify/Cargo.toml
index 48b4d22..9b32ee1 100644
--- a/crates/vfs-notify/Cargo.toml
+++ b/crates/vfs-notify/Cargo.toml
@@ -13,7 +13,7 @@
[dependencies]
tracing.workspace = true
-walkdir = "2.3.2"
+walkdir = "2.5.0"
crossbeam-channel.workspace = true
notify = "8.0.0"
rayon = "1.10.0"
diff --git a/docs/book/src/configuration_generated.md b/docs/book/src/configuration_generated.md
index 0a612d2..860d824 100644
--- a/docs/book/src/configuration_generated.md
+++ b/docs/book/src/configuration_generated.md
@@ -130,6 +130,12 @@
Whether to pass `--no-default-features` to cargo.
+**rust-analyzer.cargo.noDeps** (default: false)
+
+ Whether to skip fetching dependencies. If set to "true", the analysis is performed
+entirely offline, and Cargo metadata for dependencies is not fetched.
+
+
**rust-analyzer.cargo.sysroot** (default: "discover")
Relative path to the sysroot, or "discover" to try to automatically find it via
diff --git a/docs/book/src/contributing/README.md b/docs/book/src/contributing/README.md
index 9e1cdb0..05286b5 100644
--- a/docs/book/src/contributing/README.md
+++ b/docs/book/src/contributing/README.md
@@ -140,9 +140,10 @@
`--log-file <PATH>` CLI argument allows logging to file.
Setting the `RA_LOG_FILE=<PATH>` environment variable will also log to file, it will also override `--log-file`.
-To see stderr in the running VS Code instance, go to the "Output" tab of the panel and select `Rust Analyzer Client`.
+To see the server stderr output in the running VS Code instance, go to the "Output" tab of the panel
+and select `rust-analyzer Language Server`.
This shows `eprintln!` as well.
-Note that `stdout` is used for the actual protocol, so `println!` will break things.
+Note that `stdout` is used by LSP messages, so using `println!`—or anything that writes to `stdout`—will break rust-analyzer!
To log all communication between the server and the client, there are two choices:
@@ -153,9 +154,11 @@
```
* You can log on the client side, by the `rust-analyzer: Toggle LSP Logs` command or enabling `"rust-analyzer.trace.server": "verbose"` workspace setting.
- These logs are shown in a separate tab in the output and could be used with LSP inspector.
+ These logs are shown in a separate tab named `rust-analyzer LSP Trace` in the output and could be used with LSP inspector.
Kudos to [@DJMcNab](https://github.com/DJMcNab) for setting this awesome infra up!
+Finally there are the logs of the VSCode extension itself which go into the `rust-analyzer Extension` output tab.
+
There are also several VS Code commands which might be of interest:
* `rust-analyzer: Status` shows some memory-usage statistics.
diff --git a/editors/code/language-configuration.json b/editors/code/language-configuration.json
index 6619d0c..aacd48b 100644
--- a/editors/code/language-configuration.json
+++ b/editors/code/language-configuration.json
@@ -14,9 +14,9 @@
["(", ")"]
],
"autoClosingPairs": [
- { "open": "{", "close": "}" },
- { "open": "[", "close": "]" },
- { "open": "(", "close": ")" },
+ { "open": "{", "close": "}", "notIn": ["string"] },
+ { "open": "[", "close": "]", "notIn": ["string"] },
+ { "open": "(", "close": ")", "notIn": ["string"] },
{ "open": "\"", "close": "\"", "notIn": ["string"] },
{ "open": "/*", "close": " */", "notIn": ["string"] },
{ "open": "`", "close": "`", "notIn": ["string"] },
diff --git a/editors/code/package.json b/editors/code/package.json
index 11732dd..a048862 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -606,11 +606,6 @@
"/rustc/<id>": "${env:USERPROFILE}/.rustup/toolchains/<toolchain-id>/lib/rustlib/src/rust"
}
},
- "rust-analyzer.debug.openDebugPane": {
- "markdownDescription": "Whether to open up the `Debug Panel` on debugging start.",
- "type": "boolean",
- "default": false
- },
"rust-analyzer.debug.buildBeforeRestart": {
"markdownDescription": "Whether to rebuild the project modules before debugging the same test again",
"type": "boolean",
@@ -899,6 +894,16 @@
{
"title": "cargo",
"properties": {
+ "rust-analyzer.cargo.noDeps": {
+ "markdownDescription": "Whether to skip fetching dependencies. If set to \"true\", the analysis is performed\nentirely offline, and Cargo metadata for dependencies is not fetched.",
+ "default": false,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "cargo",
+ "properties": {
"rust-analyzer.cargo.sysroot": {
"markdownDescription": "Relative path to the sysroot, or \"discover\" to try to automatically find it via\n\"rustc --print sysroot\".\n\nUnsetting this disables sysroot loading.\n\nThis option does not take effect until rust-analyzer is restarted.",
"default": "discover",
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts
index 896b3c1..9b8ac66 100644
--- a/editors/code/src/config.ts
+++ b/editors/code/src/config.ts
@@ -323,7 +323,6 @@
return {
engine: this.get<string>("debug.engine"),
engineSettings: this.get<object>("debug.engineSettings") ?? {},
- openDebugPane: this.get<boolean>("debug.openDebugPane"),
buildBeforeRestart: this.get<boolean>("debug.buildBeforeRestart"),
sourceFileMap: sourceFileMap,
};
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts
index 37a2ee2..1149523 100644
--- a/editors/code/src/ctx.ts
+++ b/editors/code/src/ctx.ts
@@ -190,11 +190,11 @@
}
if (!this.traceOutputChannel) {
- this.traceOutputChannel = new LazyOutputChannel("Rust Analyzer Language Server Trace");
+ this.traceOutputChannel = new LazyOutputChannel("rust-analyzer LSP Trace");
this.pushExtCleanup(this.traceOutputChannel);
}
if (!this.outputChannel) {
- this.outputChannel = vscode.window.createOutputChannel("Rust Analyzer Language Server");
+ this.outputChannel = vscode.window.createOutputChannel("rust-analyzer Language Server");
this.pushExtCleanup(this.outputChannel);
}
diff --git a/editors/code/src/debug.ts b/editors/code/src/debug.ts
index 72a9aab..a04a6db 100644
--- a/editors/code/src/debug.ts
+++ b/editors/code/src/debug.ts
@@ -6,11 +6,9 @@
import { Cargo } from "./toolchain";
import type { Ctx } from "./ctx";
import { createTaskFromRunnable, prepareEnv } from "./run";
-import { execute, isCargoRunnableArgs, unwrapUndefinable } from "./util";
+import { execute, isCargoRunnableArgs, unwrapUndefinable, log } from "./util";
import type { Config } from "./config";
-const debugOutput = vscode.window.createOutputChannel("Debug");
-
// Here we want to keep track on everything that's currently running
const activeDebugSessionIds: string[] = [];
@@ -56,15 +54,14 @@
if (-1 !== index) {
debugConfig = configurations[index];
message = " (from launch.json)";
- debugOutput.clear();
} else {
debugConfig = await getDebugConfiguration(ctx.config, runnable);
}
if (!debugConfig) return false;
- debugOutput.appendLine(`Launching debug configuration${message}:`);
- debugOutput.appendLine(JSON.stringify(debugConfig, null, 2));
+ log.debug(`Launching debug configuration${message}:`);
+ log.debug(JSON.stringify(debugConfig, null, 2));
return vscode.debug.startDebugging(undefined, debugConfig);
}
@@ -118,10 +115,6 @@
return;
}
- debugOutput.clear();
- if (config.debug.openDebugPane) {
- debugOutput.show(true);
- }
// folder exists or RA is not active.
const workspaceFolders = vscode.workspace.workspaceFolders!;
@@ -232,7 +225,7 @@
const commitHash = rx.exec(data)?.[1];
if (commitHash) {
const rustlib = path.normalize(sysroot + "/lib/rustlib/src/rust");
- return { source: rustlib, destination: rustlib };
+ return { source: "/rustc/" + commitHash, destination: rustlib };
}
}
@@ -321,7 +314,7 @@
runnableArgs: ra.CargoRunnableArgs,
env: Record<string, string>,
): Promise<string> {
- const cargo = new Cargo(runnableArgs.workspaceRoot || ".", debugOutput, env);
+ const cargo = new Cargo(runnableArgs.workspaceRoot || ".", env);
const executable = await cargo.executableFromArgs(runnableArgs);
// if we are here, there were no compilation errors.
diff --git a/editors/code/src/toolchain.ts b/editors/code/src/toolchain.ts
index bb06144..a859ce6 100644
--- a/editors/code/src/toolchain.ts
+++ b/editors/code/src/toolchain.ts
@@ -37,7 +37,6 @@
export class Cargo {
constructor(
readonly rootFolder: string,
- readonly output: vscode.OutputChannel,
readonly env: Record<string, string>,
) {}
@@ -93,14 +92,14 @@
});
}
} else if (message.reason === "compiler-message") {
- this.output.append(message.message.rendered);
+ log.info(message.message.rendered);
}
},
- (stderr) => this.output.append(stderr),
+ (stderr) => log.error(stderr),
env,
);
} catch (err) {
- this.output.show(true);
+ log.error(`Cargo invocation has failed: ${err}`);
throw new Error(`Cargo invocation has failed: ${err}`);
}
diff --git a/editors/code/src/util.ts b/editors/code/src/util.ts
index 93c7bf8..4b3a697 100644
--- a/editors/code/src/util.ts
+++ b/editors/code/src/util.ts
@@ -18,7 +18,7 @@
};
class Log {
- private readonly output = vscode.window.createOutputChannel("Rust Analyzer Client", {
+ private readonly output = vscode.window.createOutputChannel("rust-analyzer Extension", {
log: true,
});
diff --git a/lib/line-index/Cargo.toml b/lib/line-index/Cargo.toml
index f15c2e3..81cd364 100644
--- a/lib/line-index/Cargo.toml
+++ b/lib/line-index/Cargo.toml
@@ -11,7 +11,7 @@
nohash-hasher = "0.2.0"
[dev-dependencies]
-oorandom = "11.1.3"
+oorandom = "11.1.5"
[lints]
workspace = true
diff --git a/lib/lsp-server/Cargo.toml b/lib/lsp-server/Cargo.toml
index 39b9315..1dc6d3c 100644
--- a/lib/lsp-server/Cargo.toml
+++ b/lib/lsp-server/Cargo.toml
@@ -7,15 +7,15 @@
edition = "2024"
[dependencies]
-log = "0.4.17"
-serde_json = "1.0.108"
-serde = { version = "1.0.216" }
-serde_derive = { version = "1.0.216" }
+log = "0.4.26"
+serde_json = "1.0.140"
+serde = { version = "1.0.219" }
+serde_derive = { version = "1.0.219" }
crossbeam-channel.workspace = true
[dev-dependencies]
lsp-types = "=0.95"
-ctrlc = "3.4.1"
+ctrlc = "3.4.5"
[lints]
workspace = true
diff --git a/lib/lsp-server/src/stdio.rs b/lib/lsp-server/src/stdio.rs
index 4ca432d..c558b6c 100644
--- a/lib/lsp-server/src/stdio.rs
+++ b/lib/lsp-server/src/stdio.rs
@@ -40,7 +40,7 @@
debug!("sending message {:#?}", msg);
if let Err(e) = reader_sender.send(msg) {
- return Err(io::Error::new(io::ErrorKind::Other, e));
+ return Err(io::Error::other(e));
}
if is_exit {
diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml
index 01a561d..6195de5 100644
--- a/xtask/Cargo.toml
+++ b/xtask/Cargo.toml
@@ -8,16 +8,16 @@
[dependencies]
anyhow.workspace = true
-directories = "5.0"
-flate2 = "1.0.24"
-write-json = "0.1.2"
+directories = "6.0"
+flate2 = "1.1.0"
+write-json = "0.1.4"
xshell.workspace = true
-xflags = "0.3.0"
+xflags = "0.3.2"
time = { version = "0.3", default-features = false }
-zip = { version = "0.6", default-features = false, features = ["deflate", "time"] }
+zip = { version = "2.4", default-features = false, features = ["deflate-flate2", "flate2", "time"] }
stdx.workspace = true
-proc-macro2 = "1.0.93"
-quote = "1.0.20"
+proc-macro2 = "1.0.94"
+quote = "1.0.40"
ungrammar = "1.16.1"
either.workspace = true
itertools.workspace = true
diff --git a/xtask/src/dist.rs b/xtask/src/dist.rs
index b436392..35ae796 100644
--- a/xtask/src/dist.rs
+++ b/xtask/src/dist.rs
@@ -8,7 +8,7 @@
use flate2::{Compression, write::GzEncoder};
use time::OffsetDateTime;
use xshell::{Shell, cmd};
-use zip::{DateTime, ZipWriter, write::FileOptions};
+use zip::{DateTime, ZipWriter, write::SimpleFileOptions};
use crate::{
date_iso,
@@ -125,7 +125,7 @@
let mut writer = ZipWriter::new(BufWriter::new(file));
writer.start_file(
src_path.file_name().unwrap().to_str().unwrap(),
- FileOptions::default()
+ SimpleFileOptions::default()
.last_modified_time(
DateTime::try_from(OffsetDateTime::from(std::fs::metadata(src_path)?.modified()?))
.unwrap(),
@@ -139,7 +139,7 @@
if let Some(symbols_path) = symbols_path {
writer.start_file(
symbols_path.file_name().unwrap().to_str().unwrap(),
- FileOptions::default()
+ SimpleFileOptions::default()
.last_modified_time(
DateTime::try_from(OffsetDateTime::from(
std::fs::metadata(src_path)?.modified()?,
diff --git a/xtask/src/publish/notes.rs b/xtask/src/publish/notes.rs
index 7245ce2..93592d4 100644
--- a/xtask/src/publish/notes.rs
+++ b/xtask/src/publish/notes.rs
@@ -85,7 +85,7 @@
}
fn process_list(&mut self) -> anyhow::Result<()> {
- let mut nesting = ListNesting::new();
+ let mut nesting = ListNesting::default();
while let Some(line) = self.iter.peek() {
let line = line.as_deref().map_err(|e| anyhow!("{e}"))?;
@@ -385,10 +385,6 @@
struct ListNesting(Vec<ListMarker>);
impl ListNesting {
- fn new() -> Self {
- Self(Vec::<ListMarker>::with_capacity(6))
- }
-
fn current(&mut self) -> Option<&ListMarker> {
self.0.last()
}
@@ -417,6 +413,12 @@
}
}
+impl Default for ListNesting {
+ fn default() -> Self {
+ Self(Vec::<ListMarker>::with_capacity(6))
+ }
+}
+
#[derive(Debug, PartialEq, Eq)]
enum ListMarker {
Asterisk(usize),
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs
index bf51760..343f76f 100644
--- a/xtask/src/tidy.rs
+++ b/xtask/src/tidy.rs
@@ -254,7 +254,7 @@
d.file_name()
.unwrap_or_default()
.to_str()
- .map(|f_n| file_names.iter().any(|name| *name == f_n))
+ .map(|f_n| file_names.contains(&f_n))
.unwrap_or(false)
}
}