Auto merge of #149079 - zachs18:clone_from_ref, r=Mark-Simulacrum
Add `Box::clone_from_ref` and similar under `feature(clone_from_ref)`
Tracking issue: https://github.com/rust-lang/rust/issues/149075
Accepted ACP: https://github.com/rust-lang/libs-team/issues/483
This PR implements `clone_from_ref` (and `try_*` and `_*in` variants), to get a `Box<T>`, `Arc<T>`, or `Rc<T>` by cloning from a `&T` where `T: CloneToUninit`.
The "Implement..." commits replace some ad-hoc conversions with `clone_from_ref` variants, which can be split out to a separate PR if desired.
This PR will conflict with https://github.com/rust-lang/rust/pull/148769 due to usage of `Layout::dangling` (which that PR is renaming to `dangling_ptr`), so they should not be rolled up together, and the one which merges later will need to be amended.
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index e3384c9..5975272 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -16,6 +16,7 @@
CI: 1
RUST_BACKTRACE: short
RUSTUP_MAX_RETRIES: 10
+ RUSTFLAGS: "-D warnings -W unreachable-pub --cfg no_salsa_async_drops"
defaults:
run:
@@ -41,8 +42,6 @@
if: github.repository == 'rust-lang/rust-analyzer'
name: proc-macro-srv
runs-on: ubuntu-latest
- env:
- RUSTFLAGS: "-D warnings"
steps:
- name: Checkout repository
@@ -80,7 +79,6 @@
name: Rust
runs-on: ${{ matrix.os }}
env:
- RUSTFLAGS: "-Dwarnings"
CC: deny_c
strategy:
@@ -207,8 +205,6 @@
# crate should
- target: wasm32-unknown-unknown
ide-only: true
- env:
- RUSTFLAGS: "-Dwarnings"
steps:
- name: Checkout repository
diff --git a/.github/workflows/metrics.yaml b/.github/workflows/metrics.yaml
index dc2f432..860837d 100644
--- a/.github/workflows/metrics.yaml
+++ b/.github/workflows/metrics.yaml
@@ -7,7 +7,7 @@
env:
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
- RUSTFLAGS: "-D warnings -W unreachable-pub"
+ RUSTFLAGS: "-D warnings -W unreachable-pub --cfg no_salsa_async_drops"
RUSTUP_MAX_RETRIES: 10
jobs:
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index b6f430f..2891411 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -140,7 +140,7 @@
if: matrix.target == 'x86_64-unknown-linux-gnu'
env:
RUSTC_BOOTSTRAP: 1
- run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std -q
+ run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats --with-deps --no-sysroot --no-test $(rustc --print sysroot)/lib/rustlib/src/rust/library/std -q
- name: Upload artifacts
uses: actions/upload-artifact@v4
diff --git a/Cargo.lock b/Cargo.lock
index 5bc4d9f..4de8d09 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -187,69 +187,21 @@
[[package]]
name = "cargo-platform"
-version = "0.2.0"
+version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "84982c6c0ae343635a3a4ee6dedef965513735c8b183caa7289fa6e27399ebd4"
+checksum = "122ec45a44b270afd1402f351b782c676b173e3c3fb28d86ff7ebfb4d86a4ee4"
dependencies = [
"serde",
]
[[package]]
-name = "cargo-util-schemas"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e63d2780ac94487eb9f1fea7b0d56300abc9eb488800854ca217f102f5caccca"
-dependencies = [
- "semver",
- "serde",
- "serde-untagged",
- "serde-value",
- "thiserror 1.0.69",
- "toml",
- "unicode-xid",
- "url",
-]
-
-[[package]]
-name = "cargo-util-schemas"
-version = "0.8.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7dc1a6f7b5651af85774ae5a34b4e8be397d9cf4bc063b7e6dbd99a841837830"
-dependencies = [
- "semver",
- "serde",
- "serde-untagged",
- "serde-value",
- "thiserror 2.0.16",
- "toml",
- "unicode-xid",
- "url",
-]
-
-[[package]]
name = "cargo_metadata"
-version = "0.20.0"
+version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4f7835cfc6135093070e95eb2b53e5d9b5c403dc3a6be6040ee026270aa82502"
+checksum = "981a6f317983eec002839b90fae7411a85621410ae591a9cab2ecf5cb5744873"
dependencies = [
"camino",
"cargo-platform",
- "cargo-util-schemas 0.2.0",
- "semver",
- "serde",
- "serde_json",
- "thiserror 2.0.16",
-]
-
-[[package]]
-name = "cargo_metadata"
-version = "0.21.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5cfca2aaa699835ba88faf58a06342a314a950d2b9686165e038286c30316868"
-dependencies = [
- "camino",
- "cargo-platform",
- "cargo-util-schemas 0.8.2",
"semver",
"serde",
"serde_json",
@@ -624,17 +576,6 @@
checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
[[package]]
-name = "erased-serde"
-version = "0.4.8"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "259d404d09818dec19332e31d94558aeb442fea04c817006456c24b5460bbd4b"
-dependencies = [
- "serde",
- "serde_core",
- "typeid",
-]
-
-[[package]]
name = "errno"
version = "0.3.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -831,7 +772,6 @@
"hir-def",
"hir-expand",
"hir-ty",
- "indexmap",
"intern",
"itertools 0.14.0",
"ra-ap-rustc_type_ir",
@@ -883,7 +823,6 @@
"syntax-bridge",
"test-fixture",
"test-utils",
- "text-size 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"thin-vec",
"tracing",
"triomphe",
@@ -923,7 +862,6 @@
dependencies = [
"arrayvec",
"base-db",
- "bitflags 2.9.4",
"cov-mark",
"either",
"ena",
@@ -949,7 +887,6 @@
"rustc_apfloat",
"salsa",
"salsa-macros",
- "scoped-tls",
"smallvec",
"span",
"stdx",
@@ -1144,7 +1081,6 @@
"expect-test",
"fst",
"hir",
- "indexmap",
"itertools 0.14.0",
"line-index 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"macros",
@@ -1717,15 +1653,6 @@
checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]]
-name = "ordered-float"
-version = "2.10.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c"
-dependencies = [
- "num-traits",
-]
-
-[[package]]
name = "parking_lot"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1893,6 +1820,7 @@
"indexmap",
"intern",
"paths",
+ "proc-macro-srv",
"rustc-hash 2.1.1",
"serde",
"serde_derive",
@@ -1917,9 +1845,7 @@
"proc-macro-test",
"ra-ap-rustc_lexer",
"span",
- "syntax-bridge",
"temp-dir",
- "tt",
]
[[package]]
@@ -1937,7 +1863,7 @@
name = "proc-macro-test"
version = "0.0.0"
dependencies = [
- "cargo_metadata 0.20.0",
+ "cargo_metadata",
]
[[package]]
@@ -1978,7 +1904,7 @@
dependencies = [
"anyhow",
"base-db",
- "cargo_metadata 0.21.0",
+ "cargo_metadata",
"cfg",
"expect-test",
"intern",
@@ -1993,6 +1919,7 @@
"span",
"stdx",
"temp-dir",
+ "toml",
"toolchain",
"tracing",
"triomphe",
@@ -2092,9 +2019,9 @@
[[package]]
name = "ra-ap-rustc_abi"
-version = "0.137.0"
+version = "0.139.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a4ce5c9ea794353e02beae390c4674f74ffb23a2ad9de763469fdcef5c1026ef"
+checksum = "ce480c45c05462cf6b700468118201b00132613a968a1849da5f7a555c0f1db9"
dependencies = [
"bitflags 2.9.4",
"ra-ap-rustc_hashes",
@@ -2104,24 +2031,24 @@
[[package]]
name = "ra-ap-rustc_ast_ir"
-version = "0.137.0"
+version = "0.139.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1696b77af9bbfe1fcc7a09c907561061c6ef4c8bd6d5f1675b927bc62d349103"
+checksum = "453da2376de406d740ca28412a31ae3d5a6039cd45698c1c2fb01b577dff64ae"
[[package]]
name = "ra-ap-rustc_hashes"
-version = "0.137.0"
+version = "0.139.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c055d8b0d8a592d8cf9547495189f52c1ee5c691d28df1628253a816214e8521"
+checksum = "bf411a55deaa3ea348594c8273fb2d1200265bf87b881b40c62b32f75caf8323"
dependencies = [
"rustc-stable-hash",
]
[[package]]
name = "ra-ap-rustc_index"
-version = "0.137.0"
+version = "0.139.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a08a03e3d4a452144b68f48130eda3a2894d4d79e99ddb44bdb4e0ab8c384e10"
+checksum = "1d0dd4cf1417ea8a809e9e7bf296c6ce6e05b75b043483872d1bd2951a08142c"
dependencies = [
"ra-ap-rustc_index_macros",
"smallvec",
@@ -2129,9 +2056,9 @@
[[package]]
name = "ra-ap-rustc_index_macros"
-version = "0.137.0"
+version = "0.139.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1e0446b4d65a8ce19d8fd12826c4bf2365ffa4b8fe0ee94daf5968fe36e920c"
+checksum = "a1b0d218fb91f8969716a962142c722d88b3cd3fd1f7ef03093261bf37e85dfd"
dependencies = [
"proc-macro2",
"quote",
@@ -2140,9 +2067,9 @@
[[package]]
name = "ra-ap-rustc_lexer"
-version = "0.137.0"
+version = "0.139.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ac80365383a3c749f38af567fdcfaeff3fa6ea5df3846852abbce73e943921b9"
+checksum = "5ec7c26e92c44d5433b29cf661faf0027e263b70a411d0f28996bd67e3bdb57e"
dependencies = [
"memchr",
"unicode-properties",
@@ -2151,9 +2078,9 @@
[[package]]
name = "ra-ap-rustc_next_trait_solver"
-version = "0.137.0"
+version = "0.139.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a39b419d2d6f7fdec7e0981b7fb7d5beb5dda7140064f1199704ec9dadbb6f73"
+checksum = "029686fdbc8a058cf3d81ad157e1cdc81a37b9de0400289ccb86a62465484313"
dependencies = [
"derive-where",
"ra-ap-rustc_index",
@@ -2164,9 +2091,9 @@
[[package]]
name = "ra-ap-rustc_parse_format"
-version = "0.137.0"
+version = "0.139.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b743b0c8f795842e41b1720bbc5af6e896129fb9acf04e9785774bfb0dc5947c"
+checksum = "509d279f1e87acc33476da3fbd05a6054e9ffeb4427cb38ba01b9d2656aec268"
dependencies = [
"ra-ap-rustc_lexer",
"rustc-literal-escaper 0.0.5",
@@ -2174,9 +2101,9 @@
[[package]]
name = "ra-ap-rustc_pattern_analysis"
-version = "0.137.0"
+version = "0.139.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cf944dce80137195528f89a576f70153c2060a6f8ca49c3fa9f55f9da14ab937"
+checksum = "9bb2c9930854314b03bd7aab060a14bca6f194b76381a4c309e3905ec3a02bbc"
dependencies = [
"ra-ap-rustc_index",
"rustc-hash 2.1.1",
@@ -2187,9 +2114,9 @@
[[package]]
name = "ra-ap-rustc_type_ir"
-version = "0.137.0"
+version = "0.139.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1bfe2722b20bc889a9d7711bd3a1f4f7b082940491241615aa643c17e0deffec"
+checksum = "0e4a92a3e4dbdebb0d4c9caceb52eff45c4df784d21fb2da90dac50e218f95c0"
dependencies = [
"arrayvec",
"bitflags 2.9.4",
@@ -2207,9 +2134,9 @@
[[package]]
name = "ra-ap-rustc_type_ir_macros"
-version = "0.137.0"
+version = "0.139.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6fad1527df26aaa77367393fae86f42818b33e02b3737a19f3846d1c7671e7f9"
+checksum = "ca368eca2472367f2e6fdfb431c8342e99d848e4ce89cb20dd3b3bdcc43cbc28"
dependencies = [
"proc-macro2",
"quote",
@@ -2343,7 +2270,7 @@
dependencies = [
"anyhow",
"base64",
- "cargo_metadata 0.21.0",
+ "cargo_metadata",
"cfg",
"crossbeam-channel",
"dhat",
@@ -2358,14 +2285,12 @@
"ide-db",
"ide-ssr",
"indexmap",
- "intern",
"itertools 0.14.0",
"load-cargo",
"lsp-server 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)",
"lsp-types",
"memchr",
"mimalloc",
- "nohash-hasher",
"num_cpus",
"oorandom",
"parking_lot",
@@ -2559,12 +2484,6 @@
]
[[package]]
-name = "scoped-tls"
-version = "1.0.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294"
-
-[[package]]
name = "scopeguard"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2591,28 +2510,6 @@
]
[[package]]
-name = "serde-untagged"
-version = "0.1.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f9faf48a4a2d2693be24c6289dbe26552776eb7737074e6722891fadbe6c5058"
-dependencies = [
- "erased-serde",
- "serde",
- "serde_core",
- "typeid",
-]
-
-[[package]]
-name = "serde-value"
-version = "0.7.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c"
-dependencies = [
- "ordered-float",
- "serde",
-]
-
-[[package]]
name = "serde_core"
version = "1.0.226"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2659,11 +2556,11 @@
[[package]]
name = "serde_spanned"
-version = "0.6.9"
+version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3"
+checksum = "e24345aa0fe688594e73770a5f6d1b216508b4f93484c0026d521acd30134392"
dependencies = [
- "serde",
+ "serde_core",
]
[[package]]
@@ -3042,44 +2939,42 @@
[[package]]
name = "toml"
-version = "0.8.23"
+version = "0.9.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362"
-dependencies = [
- "serde",
- "serde_spanned",
- "toml_datetime",
- "toml_edit",
-]
-
-[[package]]
-name = "toml_datetime"
-version = "0.6.11"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c"
-dependencies = [
- "serde",
-]
-
-[[package]]
-name = "toml_edit"
-version = "0.22.27"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a"
+checksum = "f0dc8b1fb61449e27716ec0e1bdf0f6b8f3e8f6b05391e8497b8b6d7804ea6d8"
dependencies = [
"indexmap",
- "serde",
+ "serde_core",
"serde_spanned",
"toml_datetime",
- "toml_write",
+ "toml_parser",
+ "toml_writer",
"winnow",
]
[[package]]
-name = "toml_write"
-version = "0.1.2"
+name = "toml_datetime"
+version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801"
+checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533"
+dependencies = [
+ "serde_core",
+]
+
+[[package]]
+name = "toml_parser"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e"
+dependencies = [
+ "winnow",
+]
+
+[[package]]
+name = "toml_writer"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df8b2b54733674ad286d16267dcfc7a71ed5c776e4ac7aa3c3e2561f7c637bf2"
[[package]]
name = "toolchain"
@@ -3181,12 +3076,6 @@
checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a"
[[package]]
-name = "typeid"
-version = "1.0.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c"
-
-[[package]]
name = "unarray"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3654,9 +3543,6 @@
version = "0.7.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf"
-dependencies = [
- "memchr",
-]
[[package]]
name = "wit-bindgen"
diff --git a/Cargo.toml b/Cargo.toml
index 6f5ea44..8ff7e0e 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -86,14 +86,14 @@
vfs = { path = "./crates/vfs", version = "0.0.0" }
edition = { path = "./crates/edition", version = "0.0.0" }
-ra-ap-rustc_lexer = { version = "0.137", default-features = false }
-ra-ap-rustc_parse_format = { version = "0.137", default-features = false }
-ra-ap-rustc_index = { version = "0.137", default-features = false }
-ra-ap-rustc_abi = { version = "0.137", default-features = false }
-ra-ap-rustc_pattern_analysis = { version = "0.137", default-features = false }
-ra-ap-rustc_ast_ir = { version = "0.137", default-features = false }
-ra-ap-rustc_type_ir = { version = "0.137", default-features = false }
-ra-ap-rustc_next_trait_solver = { version = "0.137", default-features = false }
+ra-ap-rustc_lexer = { version = "0.139", default-features = false }
+ra-ap-rustc_parse_format = { version = "0.139", default-features = false }
+ra-ap-rustc_index = { version = "0.139", default-features = false }
+ra-ap-rustc_abi = { version = "0.139", default-features = false }
+ra-ap-rustc_pattern_analysis = { version = "0.139", default-features = false }
+ra-ap-rustc_ast_ir = { version = "0.139", default-features = false }
+ra-ap-rustc_type_ir = { version = "0.139", default-features = false }
+ra-ap-rustc_next_trait_solver = { version = "0.139", default-features = false }
# local crates that aren't published to crates.io. These should not have versions.
@@ -106,7 +106,7 @@
anyhow = "1.0.98"
arrayvec = "0.7.6"
bitflags = "2.9.1"
-cargo_metadata = "0.21.0"
+cargo_metadata = "0.23.0"
camino = "1.1.10"
crossbeam-channel = "0.5.15"
dissimilar = "1.0.10"
@@ -134,10 +134,11 @@
rowan = "=0.15.15"
# Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work
# on impls without it
-salsa = { version = "0.24.0", default-features = true, features = [
+salsa = { version = "0.24.0", default-features = false, features = [
"rayon",
"salsa_unstable",
"macros",
+ "inventory",
] }
salsa-macros = "0.24.0"
semver = "1.0.26"
@@ -154,7 +155,8 @@
smol_str = "0.3.2"
temp-dir = "0.1.16"
text-size = "1.1.1"
-tracing = "0.1.41"
+toml = "0.9.8"
+tracing = { version = "0.1.41", default-features = false, features = ["std"] }
tracing-tree = "0.4.0"
tracing-subscriber = { version = "0.3.20", default-features = false, features = [
"registry",
@@ -182,7 +184,7 @@
elided_lifetimes_in_paths = "warn"
explicit_outlives_requirements = "warn"
unsafe_op_in_unsafe_fn = "warn"
-unexpected_cfgs = { level = "warn", check-cfg = ['cfg(bootstrap)'] }
+unexpected_cfgs = { level = "warn", check-cfg = ['cfg(bootstrap)', "cfg(no_salsa_async_drops)"] }
unused_extern_crates = "warn"
unused_lifetimes = "warn"
unreachable_pub = "warn"
diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs
index cac7477..5149d2d 100644
--- a/crates/base-db/src/input.rs
+++ b/crates/base-db/src/input.rs
@@ -460,6 +460,61 @@
pub env: Env,
}
+impl Crate {
+ /// Returns an iterator over all transitive dependencies of the given crate,
+ /// including the crate itself.
+ ///
+ /// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications.
+ pub fn transitive_deps(self, db: &dyn salsa::Database) -> Box<[Crate]> {
+ // There is a bit of duplication here and in `CrateGraphBuilder` in the same method, but it's not terrible
+ // and removing that is a bit difficult.
+ let mut worklist = vec![self];
+ let mut deps_seen = FxHashSet::default();
+ let mut deps = Vec::new();
+
+ while let Some(krate) = worklist.pop() {
+ if !deps_seen.insert(krate) {
+ continue;
+ }
+ deps.push(krate);
+
+ worklist.extend(krate.data(db).dependencies.iter().map(|dep| dep.crate_id));
+ }
+ deps.into_boxed_slice()
+ }
+
+ /// Returns all transitive reverse dependencies of the given crate,
+ /// including the crate itself.
+ ///
+ /// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications.
+ pub fn transitive_rev_deps(self, db: &dyn RootQueryDb) -> Box<[Crate]> {
+ let mut worklist = vec![self];
+ let mut rev_deps = FxHashSet::default();
+ rev_deps.insert(self);
+
+ let mut inverted_graph = FxHashMap::<_, Vec<_>>::default();
+ db.all_crates().iter().for_each(|&krate| {
+ krate
+ .data(db)
+ .dependencies
+ .iter()
+ .for_each(|dep| inverted_graph.entry(dep.crate_id).or_default().push(krate))
+ });
+
+ while let Some(krate) = worklist.pop() {
+ if let Some(crate_rev_deps) = inverted_graph.get(&krate) {
+ crate_rev_deps
+ .iter()
+ .copied()
+ .filter(|&rev_dep| rev_deps.insert(rev_dep))
+ .for_each(|rev_dep| worklist.push(rev_dep));
+ }
+ }
+
+ rev_deps.into_iter().collect::<Box<_>>()
+ }
+}
+
/// The mapping from [`UniqueCrateData`] to their [`Crate`] input.
#[derive(Debug, Default)]
pub struct CratesMap(DashMap<UniqueCrateData, Crate, BuildHasherDefault<FxHasher>>);
@@ -802,33 +857,6 @@
}
}
-pub(crate) fn transitive_rev_deps(db: &dyn RootQueryDb, of: Crate) -> FxHashSet<Crate> {
- let mut worklist = vec![of];
- let mut rev_deps = FxHashSet::default();
- rev_deps.insert(of);
-
- let mut inverted_graph = FxHashMap::<_, Vec<_>>::default();
- db.all_crates().iter().for_each(|&krate| {
- krate
- .data(db)
- .dependencies
- .iter()
- .for_each(|dep| inverted_graph.entry(dep.crate_id).or_default().push(krate))
- });
-
- while let Some(krate) = worklist.pop() {
- if let Some(crate_rev_deps) = inverted_graph.get(&krate) {
- crate_rev_deps
- .iter()
- .copied()
- .filter(|&rev_dep| rev_deps.insert(rev_dep))
- .for_each(|rev_dep| worklist.push(rev_dep));
- }
- }
-
- rev_deps
-}
-
impl BuiltCrateData {
pub fn root_file_id(&self, db: &dyn salsa::Database) -> EditionedFileId {
EditionedFileId::new(db, self.root_file_id, self.edition)
@@ -867,6 +895,10 @@
pub fn insert(&mut self, k: impl Into<String>, v: impl Into<String>) -> Option<String> {
self.entries.insert(k.into(), v.into())
}
+
+ pub fn contains_key(&self, arg: &str) -> bool {
+ self.entries.contains_key(arg)
+ }
}
impl From<Env> for Vec<(String, String)> {
diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs
index 0e411bc..3629a00 100644
--- a/crates/base-db/src/lib.rs
+++ b/crates/base-db/src/lib.rs
@@ -26,7 +26,7 @@
};
use dashmap::{DashMap, mapref::entry::Entry};
pub use query_group::{self};
-use rustc_hash::{FxHashSet, FxHasher};
+use rustc_hash::FxHasher;
use salsa::{Durability, Setter};
pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
use span::Edition;
@@ -256,38 +256,6 @@
/// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications.
#[salsa::input]
fn all_crates(&self) -> Arc<Box<[Crate]>>;
-
- /// Returns an iterator over all transitive dependencies of the given crate,
- /// including the crate itself.
- ///
- /// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications.
- #[salsa::transparent]
- fn transitive_deps(&self, crate_id: Crate) -> FxHashSet<Crate>;
-
- /// Returns all transitive reverse dependencies of the given crate,
- /// including the crate itself.
- ///
- /// **Warning**: do not use this query in `hir-*` crates! It kills incrementality across crate metadata modifications.
- #[salsa::invoke(input::transitive_rev_deps)]
- #[salsa::transparent]
- fn transitive_rev_deps(&self, of: Crate) -> FxHashSet<Crate>;
-}
-
-pub fn transitive_deps(db: &dyn SourceDatabase, crate_id: Crate) -> FxHashSet<Crate> {
- // There is a bit of duplication here and in `CrateGraphBuilder` in the same method, but it's not terrible
- // and removing that is a bit difficult.
- let mut worklist = vec![crate_id];
- let mut deps = FxHashSet::default();
-
- while let Some(krate) = worklist.pop() {
- if !deps.insert(krate) {
- continue;
- }
-
- worklist.extend(krate.data(db).dependencies.iter().map(|dep| dep.crate_id));
- }
-
- deps
}
#[salsa_macros::db]
diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml
index abb4819..e174ca5 100644
--- a/crates/hir-def/Cargo.toml
+++ b/crates/hir-def/Cargo.toml
@@ -23,11 +23,10 @@
itertools.workspace = true
la-arena.workspace = true
rustc-hash.workspace = true
-tracing.workspace = true
+tracing = { workspace = true, features = ["attributes"] }
smallvec.workspace = true
triomphe.workspace = true
rustc_apfloat = "0.2.3"
-text-size.workspace = true
salsa.workspace = true
salsa-macros.workspace = true
query-group.workspace = true
diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs
index 4e1d598..925a078 100644
--- a/crates/hir-def/src/db.rs
+++ b/crates/hir-def/src/db.rs
@@ -273,10 +273,9 @@
// endregion:visibilities
- #[salsa::invoke(crate::lang_item::notable_traits_in_deps)]
- fn notable_traits_in_deps(&self, krate: Crate) -> Arc<[Arc<[TraitId]>]>;
#[salsa::invoke(crate::lang_item::crate_notable_traits)]
- fn crate_notable_traits(&self, krate: Crate) -> Option<Arc<[TraitId]>>;
+ #[salsa::transparent]
+ fn crate_notable_traits(&self, krate: Crate) -> Option<&[TraitId]>;
#[salsa::invoke(crate_supports_no_std)]
fn crate_supports_no_std(&self, crate_id: Crate) -> bool;
diff --git a/crates/hir-def/src/expr_store/lower.rs b/crates/hir-def/src/expr_store/lower.rs
index 3794cb1..e3bfc5b 100644
--- a/crates/hir-def/src/expr_store/lower.rs
+++ b/crates/hir-def/src/expr_store/lower.rs
@@ -2409,7 +2409,11 @@
};
let start = range_part_lower(p.start());
let end = range_part_lower(p.end());
- Pat::Range { start, end }
+ // FIXME: Exclusive ended pattern range is stabilised
+ match p.op_kind() {
+ Some(range_type) => Pat::Range { start, end, range_type },
+ None => Pat::Missing,
+ }
}
};
let ptr = AstPtr::new(&pat);
diff --git a/crates/hir-def/src/expr_store/pretty.rs b/crates/hir-def/src/expr_store/pretty.rs
index 5b9da3c..4ba7093 100644
--- a/crates/hir-def/src/expr_store/pretty.rs
+++ b/crates/hir-def/src/expr_store/pretty.rs
@@ -9,7 +9,7 @@
use hir_expand::{Lookup, mod_path::PathKind};
use itertools::Itertools;
use span::Edition;
-use syntax::ast::HasName;
+use syntax::ast::{HasName, RangeOp};
use crate::{
AdtId, DefWithBodyId, GenericDefId, TypeParamId, VariantId,
@@ -510,7 +510,22 @@
}
fn print_expr(&mut self, expr: ExprId) {
+ self.print_expr_in(None, expr);
+ }
+
+ fn print_expr_in(&mut self, prec: Option<ast::prec::ExprPrecedence>, expr: ExprId) {
let expr = &self.store[expr];
+ let needs_parens = match (prec, expr.precedence()) {
+ (Some(ast::prec::ExprPrecedence::LOr), ast::prec::ExprPrecedence::LOr) => false,
+ (Some(ast::prec::ExprPrecedence::LAnd), ast::prec::ExprPrecedence::LAnd) => false,
+ (Some(parent), prec) => prec.needs_parentheses_in(parent),
+ (None, _) => false,
+ };
+ let prec = Some(expr.precedence());
+
+ if needs_parens {
+ w!(self, "(");
+ }
match expr {
Expr::Missing => w!(self, "�"),
@@ -544,7 +559,7 @@
w!(self, "let ");
self.print_pat(*pat);
w!(self, " = ");
- self.print_expr(*expr);
+ self.print_expr_in(prec, *expr);
}
Expr::Loop { body, label } => {
if let Some(lbl) = label {
@@ -554,7 +569,7 @@
self.print_expr(*body);
}
Expr::Call { callee, args } => {
- self.print_expr(*callee);
+ self.print_expr_in(prec, *callee);
w!(self, "(");
if !args.is_empty() {
self.indented(|p| {
@@ -567,7 +582,7 @@
w!(self, ")");
}
Expr::MethodCall { receiver, method_name, args, generic_args } => {
- self.print_expr(*receiver);
+ self.print_expr_in(prec, *receiver);
w!(self, ".{}", method_name.display(self.db, self.edition));
if let Some(args) = generic_args {
w!(self, "::<");
@@ -616,26 +631,26 @@
}
if let Some(expr) = expr {
self.whitespace();
- self.print_expr(*expr);
+ self.print_expr_in(prec, *expr);
}
}
Expr::Return { expr } => {
w!(self, "return");
if let Some(expr) = expr {
self.whitespace();
- self.print_expr(*expr);
+ self.print_expr_in(prec, *expr);
}
}
Expr::Become { expr } => {
w!(self, "become");
self.whitespace();
- self.print_expr(*expr);
+ self.print_expr_in(prec, *expr);
}
Expr::Yield { expr } => {
w!(self, "yield");
if let Some(expr) = expr {
self.whitespace();
- self.print_expr(*expr);
+ self.print_expr_in(prec, *expr);
}
}
Expr::Yeet { expr } => {
@@ -644,7 +659,7 @@
w!(self, "yeet");
if let Some(expr) = expr {
self.whitespace();
- self.print_expr(*expr);
+ self.print_expr_in(prec, *expr);
}
}
Expr::RecordLit { path, fields, spread } => {
@@ -670,15 +685,15 @@
w!(self, "}}");
}
Expr::Field { expr, name } => {
- self.print_expr(*expr);
+ self.print_expr_in(prec, *expr);
w!(self, ".{}", name.display(self.db, self.edition));
}
Expr::Await { expr } => {
- self.print_expr(*expr);
+ self.print_expr_in(prec, *expr);
w!(self, ".await");
}
Expr::Cast { expr, type_ref } => {
- self.print_expr(*expr);
+ self.print_expr_in(prec, *expr);
w!(self, " as ");
self.print_type_ref(*type_ref);
}
@@ -690,11 +705,11 @@
if mutability.is_mut() {
w!(self, "mut ");
}
- self.print_expr(*expr);
+ self.print_expr_in(prec, *expr);
}
Expr::Box { expr } => {
w!(self, "box ");
- self.print_expr(*expr);
+ self.print_expr_in(prec, *expr);
}
Expr::UnaryOp { expr, op } => {
let op = match op {
@@ -703,43 +718,32 @@
ast::UnaryOp::Neg => "-",
};
w!(self, "{}", op);
- self.print_expr(*expr);
+ self.print_expr_in(prec, *expr);
}
Expr::BinaryOp { lhs, rhs, op } => {
- let (bra, ket) = match op {
- None | Some(ast::BinaryOp::Assignment { .. }) => ("", ""),
- _ => ("(", ")"),
- };
- w!(self, "{}", bra);
- self.print_expr(*lhs);
- w!(self, "{} ", ket);
+ self.print_expr_in(prec, *lhs);
+ self.whitespace();
match op {
Some(op) => w!(self, "{}", op),
None => w!(self, "�"), // :)
}
- w!(self, " {}", bra);
- self.print_expr(*rhs);
- w!(self, "{}", ket);
+ self.whitespace();
+ self.print_expr_in(prec, *rhs);
}
Expr::Range { lhs, rhs, range_type } => {
if let Some(lhs) = lhs {
- w!(self, "(");
- self.print_expr(*lhs);
- w!(self, ") ");
+ self.print_expr_in(prec, *lhs);
}
- let range = match range_type {
- ast::RangeOp::Exclusive => "..",
- ast::RangeOp::Inclusive => "..=",
+ match range_type {
+ RangeOp::Exclusive => w!(self, ".."),
+ RangeOp::Inclusive => w!(self, "..="),
};
- w!(self, "{}", range);
if let Some(rhs) = rhs {
- w!(self, "(");
- self.print_expr(*rhs);
- w!(self, ") ");
+ self.print_expr_in(prec, *rhs);
}
}
Expr::Index { base, index } => {
- self.print_expr(*base);
+ self.print_expr_in(prec, *base);
w!(self, "[");
self.print_expr(*index);
w!(self, "]");
@@ -826,9 +830,13 @@
&Expr::Assignment { target, value } => {
self.print_pat(target);
w!(self, " = ");
- self.print_expr(value);
+ self.print_expr_in(prec, value);
}
}
+
+ if needs_parens {
+ w!(self, ")");
+ }
}
fn print_block(
@@ -857,6 +865,7 @@
}
fn print_pat(&mut self, pat: PatId) {
+ let prec = Some(ast::prec::ExprPrecedence::Shift);
let pat = &self.store[pat];
match pat {
@@ -928,13 +937,16 @@
});
w!(self, "}}");
}
- Pat::Range { start, end } => {
+ Pat::Range { start, end, range_type } => {
if let Some(start) = start {
- self.print_expr(*start);
+ self.print_expr_in(prec, *start);
}
- w!(self, "..=");
+ match range_type {
+ RangeOp::Inclusive => w!(self, "..="),
+ RangeOp::Exclusive => w!(self, ".."),
+ }
if let Some(end) = end {
- self.print_expr(*end);
+ self.print_expr_in(prec, *end);
}
}
Pat::Slice { prefix, slice, suffix } => {
@@ -954,7 +966,7 @@
w!(self, "]");
}
Pat::Path(path) => self.print_path(path),
- Pat::Lit(expr) => self.print_expr(*expr),
+ Pat::Lit(expr) => self.print_expr_in(prec, *expr),
Pat::Bind { id, subpat } => {
self.print_binding(*id);
if let Some(pat) = subpat {
@@ -996,7 +1008,7 @@
self.print_expr(*c);
}
Pat::Expr(expr) => {
- self.print_expr(*expr);
+ self.print_expr_in(prec, *expr);
}
}
}
@@ -1181,7 +1193,9 @@
pub(crate) fn print_generic_arg(&mut self, arg: &GenericArg) {
match arg {
GenericArg::Type(ty) => self.print_type_ref(*ty),
- GenericArg::Const(ConstRef { expr }) => self.print_expr(*expr),
+ GenericArg::Const(ConstRef { expr }) => {
+ self.print_expr_in(Some(ast::prec::ExprPrecedence::Unambiguous), *expr)
+ }
GenericArg::Lifetime(lt) => self.print_lifetime_ref(*lt),
}
}
diff --git a/crates/hir-def/src/expr_store/tests/body.rs b/crates/hir-def/src/expr_store/tests/body.rs
index c31428b..4a77556 100644
--- a/crates/hir-def/src/expr_store/tests/body.rs
+++ b/crates/hir-def/src/expr_store/tests/body.rs
@@ -159,7 +159,7 @@
expect![[r#"
fn main() {
match builtin#lang(into_iter)(
- (0) ..(10) ,
+ 0..10,
) {
mut <ra@gennew>11 => loop {
match builtin#lang(next)(
@@ -580,7 +580,7 @@
let MatchArm { pat, .. } = mtch_arms[1];
match body[pat] {
- Pat::Range { start, end } => {
+ Pat::Range { start, end, range_type: _ } => {
let hir_start = &body[start.unwrap()];
let hir_end = &body[end.unwrap()];
@@ -590,3 +590,30 @@
_ => {}
}
}
+
+#[test]
+fn print_hir_precedences() {
+ let (db, body, def) = lower(
+ r#"
+fn main() {
+ _ = &(1 - (2 - 3) + 4 * 5 * (6 + 7));
+ _ = 1 + 2 < 3 && true && 4 < 5 && (a || b || c) || d && e;
+ if let _ = 2 && true && let _ = 3 {}
+ break a && b || (return) || (return 2);
+ let r = &2;
+ let _ = &mut (*r as i32)
+}
+"#,
+ );
+
+ expect![[r#"
+ fn main() {
+ _ = &((1 - (2 - 3)) + (4 * 5) * (6 + 7));
+ _ = 1 + 2 < 3 && true && 4 < 5 && (a || b || c) || d && e;
+ if let _ = 2 && true && let _ = 3 {}
+ break a && b || (return) || (return 2);
+ let r = &2;
+ let _ = &mut (*r as i32);
+ }"#]]
+ .assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
+}
diff --git a/crates/hir-def/src/expr_store/tests/body/block.rs b/crates/hir-def/src/expr_store/tests/body/block.rs
index c770737..e8334cd 100644
--- a/crates/hir-def/src/expr_store/tests/body/block.rs
+++ b/crates/hir-def/src/expr_store/tests/body/block.rs
@@ -12,12 +12,12 @@
}
"#,
expect![[r#"
- block scope
- inner: v
+ (block scope)
+ - inner : value
crate
- inner: t
- outer: v
+ - inner : type
+ - outer : value
"#]],
);
}
@@ -37,16 +37,16 @@
}
"#,
expect![[r#"
- block scope
- CrateStruct: ti
- PlainStruct: ti vi
- SelfStruct: ti
- Struct: v
- SuperStruct: _
+ (block scope)
+ - CrateStruct : type (import)
+ - PlainStruct : type (import) value (import)
+ - SelfStruct : type (import)
+ - Struct : value
+ - SuperStruct : _
crate
- Struct: t
- outer: v
+ - Struct : type
+ - outer : value
"#]],
);
}
@@ -65,13 +65,13 @@
}
"#,
expect![[r#"
- block scope
- imported: ti vi
- name: v
+ (block scope)
+ - imported : type (import) value (import)
+ - name : value
crate
- name: t
- outer: v
+ - name : type
+ - outer : value
"#]],
);
}
@@ -91,17 +91,17 @@
}
"#,
expect![[r#"
- block scope
- inner1: ti
- inner2: v
- outer: vi
+ (block scope)
+ - inner1 : type (import)
+ - inner2 : value
+ - outer : value (import)
- block scope
- inner: v
- inner1: t
+ (block scope)
+ - inner : value
+ - inner1 : type
crate
- outer: v
+ - outer : value
"#]],
);
}
@@ -120,15 +120,15 @@
struct Struct {}
"#,
expect![[r#"
- block scope
- Struct: ti
+ (block scope)
+ - Struct : type (import)
crate
- Struct: t
- module: t
+ - Struct : type
+ - module : type
crate::module
- f: v
+ - f : value
"#]],
);
}
@@ -152,24 +152,24 @@
}
"#,
expect![[r#"
- block scope
- ResolveMe: ti
+ (block scope)
+ - ResolveMe : type (import)
- block scope
- m2: t
+ (block scope)
+ - m2 : type
- block scope::m2
- inner: v
+ (block scope)::m2
+ - inner : value
- block scope
- m: t
+ (block scope)
+ - m : type
- block scope::m
- ResolveMe: t
- middle: v
+ (block scope)::m
+ - ResolveMe : type
+ - middle : value
crate
- outer: v
+ - outer : value
"#]],
);
}
@@ -213,21 +213,21 @@
}
"#,
expect![[r#"
- block scope
- ResolveMe: ti
+ (block scope)
+ - ResolveMe : type (import)
- block scope
- h: v
+ (block scope)
+ - h : value
- block scope
- m: t
+ (block scope)
+ - m : type
- block scope::m
- ResolveMe: t
- g: v
+ (block scope)::m
+ - ResolveMe : type
+ - g : value
crate
- f: v
+ - f : value
"#]],
);
}
@@ -250,11 +250,12 @@
}
"#,
expect![[r#"
- block scope
- Hit: t
+ (block scope)
+ - Hit : type
crate
- f: v
+ - f : value
+ - (legacy) mark : macro!
"#]],
);
}
@@ -285,15 +286,15 @@
}
"#,
expect![[r#"
- block scope
- Hit: t
+ (block scope)
+ - Hit : type
- block scope
- nested: v
+ (block scope)
+ - nested : value
crate
- cov_mark: ti
- f: v
+ - cov_mark : type (import)
+ - f : value
"#]],
);
}
@@ -318,16 +319,18 @@
}
"#,
expect![[r#"
- block scope
- module: t
+ (block scope)
+ - module : type
- block scope::module
- BarWorks: t v
- FooWorks: t v
+ (block scope)::module
+ - BarWorks : type value
+ - FooWorks : type value
crate
- foo: m
- main: v
+ - foo : macro!
+ - main : value
+ - (legacy) bar : macro!
+ - (legacy) foo : macro!
"#]],
);
}
@@ -354,14 +357,15 @@
}
"#,
expect![[r#"
- block scope
- Def: t
+ (block scope)
+ - Def : type
crate
- module: t
+ - module : type
crate::module
- f: v
+ - f : value
+ - (legacy) m : macro!
"#]],
)
}
@@ -380,16 +384,16 @@
}
"#,
expect![[r#"
- block scope
- Struct: t
- module: t
+ (block scope)
+ - Struct : type
+ - module : type
- block scope::module
- Struct: _
+ (block scope)::module
+ - Struct : _
- crate
- main: v
- "#]],
+ crate
+ - main : value
+ "#]],
);
}
@@ -408,16 +412,16 @@
}
"#,
expect![[r#"
- block scope
- _: t
- Tr: t
+ (block scope)
+ - _ : type
+ - Tr : type
- crate
- m: t
+ crate
+ - m : type
- crate::m
- main: v
- "#]],
+ crate::m
+ - main : value
+ "#]],
);
}
@@ -444,11 +448,13 @@
}
"#,
expect![[r#"
- block scope
- bar: v
+ (block scope)
+ - bar : value
crate
- foo: v
+ - foo : value
+ - (legacy) declare : macro!
+ - (legacy) inner_declare : macro!
"#]],
)
}
@@ -467,16 +473,16 @@
}
"#,
expect![[r#"
- block scope
- name: _
- tests: t
+ (block scope)
+ - name : _
+ - tests : type
- block scope::tests
- name: _
- outer: vg
+ (block scope)::tests
+ - name : _
+ - outer : value (glob)
crate
- outer: v
+ - outer : value
"#]],
);
}
@@ -496,11 +502,12 @@
}
"#,
expect![[r#"
- block scope
- inner: v
+ (block scope)
+ - inner : value
crate
- foo: v
+ - foo : value
+ - (legacy) mac : macro!
"#]],
)
}
@@ -517,12 +524,13 @@
};
"#,
expect![[r#"
- block scope
- BAR: v
- FOO: v
+ (block scope)
+ - BAR : value
+ - FOO : value
crate
- f: v
+ - f : value
+ - (legacy) foo : macro!
"#]],
)
}
@@ -543,14 +551,14 @@
pub struct S;
"#,
expect![[r#"
- block scope
- f: t
+ (block scope)
+ - f : type
- block scope::f
- S: ti vi
+ (block scope)::f
+ - S : type (import) value (import)
crate
- main: v
+ - main : value
"#]],
)
}
@@ -573,18 +581,18 @@
pub const S;
"#,
expect![[r#"
- block scope
- S: ti vi
- inner: v
+ (block scope)
+ - S : type (import) value (import)
+ - inner : value
- block scope
- core: t
+ (block scope)
+ - core : type
- block scope::core
- S: t v
+ (block scope)::core
+ - S : type value
crate
- main: v
+ - main : value
"#]],
)
}
diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs
index e70cd2c..66eade2 100644
--- a/crates/hir-def/src/hir.rs
+++ b/crates/hir-def/src/hir.rs
@@ -322,6 +322,72 @@
InlineAsm(InlineAsm),
}
+impl Expr {
+ pub fn precedence(&self) -> ast::prec::ExprPrecedence {
+ use ast::prec::ExprPrecedence;
+
+ match self {
+ Expr::Array(_)
+ | Expr::InlineAsm(_)
+ | Expr::Block { .. }
+ | Expr::Unsafe { .. }
+ | Expr::Const(_)
+ | Expr::Async { .. }
+ | Expr::If { .. }
+ | Expr::Literal(_)
+ | Expr::Loop { .. }
+ | Expr::Match { .. }
+ | Expr::Missing
+ | Expr::Path(_)
+ | Expr::RecordLit { .. }
+ | Expr::Tuple { .. }
+ | Expr::OffsetOf(_)
+ | Expr::Underscore => ExprPrecedence::Unambiguous,
+
+ Expr::Await { .. }
+ | Expr::Call { .. }
+ | Expr::Field { .. }
+ | Expr::Index { .. }
+ | Expr::MethodCall { .. } => ExprPrecedence::Postfix,
+
+ Expr::Box { .. } | Expr::Let { .. } | Expr::UnaryOp { .. } | Expr::Ref { .. } => {
+ ExprPrecedence::Prefix
+ }
+
+ Expr::Cast { .. } => ExprPrecedence::Cast,
+
+ Expr::BinaryOp { op, .. } => match op {
+ None => ExprPrecedence::Unambiguous,
+ Some(BinaryOp::LogicOp(LogicOp::Or)) => ExprPrecedence::LOr,
+ Some(BinaryOp::LogicOp(LogicOp::And)) => ExprPrecedence::LAnd,
+ Some(BinaryOp::CmpOp(_)) => ExprPrecedence::Compare,
+ Some(BinaryOp::Assignment { .. }) => ExprPrecedence::Assign,
+ Some(BinaryOp::ArithOp(arith_op)) => match arith_op {
+ ArithOp::Add | ArithOp::Sub => ExprPrecedence::Sum,
+ ArithOp::Mul | ArithOp::Div | ArithOp::Rem => ExprPrecedence::Product,
+ ArithOp::Shl | ArithOp::Shr => ExprPrecedence::Shift,
+ ArithOp::BitXor => ExprPrecedence::BitXor,
+ ArithOp::BitOr => ExprPrecedence::BitOr,
+ ArithOp::BitAnd => ExprPrecedence::BitAnd,
+ },
+ },
+
+ Expr::Assignment { .. } => ExprPrecedence::Assign,
+
+ Expr::Become { .. }
+ | Expr::Break { .. }
+ | Expr::Closure { .. }
+ | Expr::Return { .. }
+ | Expr::Yeet { .. }
+ | Expr::Yield { .. } => ExprPrecedence::Jump,
+
+ Expr::Continue { .. } => ExprPrecedence::Unambiguous,
+
+ Expr::Range { .. } => ExprPrecedence::Range,
+ }
+ }
+}
+
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct OffsetOf {
pub container: TypeRefId,
@@ -595,6 +661,7 @@
Range {
start: Option<ExprId>,
end: Option<ExprId>,
+ range_type: RangeOp,
},
Slice {
prefix: Box<[PatId]>,
diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs
index 77ed664..51c42c9 100644
--- a/crates/hir-def/src/item_scope.rs
+++ b/crates/hir-def/src/item_scope.rs
@@ -1,10 +1,10 @@
//! Describes items defined or visible (ie, imported) in a certain scope.
//! This is shared between modules and blocks.
-use std::sync::LazyLock;
+use std::{fmt, sync::LazyLock};
use base_db::Crate;
-use hir_expand::{AstId, MacroCallId, attrs::AttrId, db::ExpandDatabase, name::Name};
+use hir_expand::{AstId, MacroCallId, attrs::AttrId, name::Name};
use indexmap::map::Entry;
use itertools::Itertools;
use la_arena::Idx;
@@ -19,6 +19,7 @@
AdtId, BuiltinType, ConstId, ExternBlockId, ExternCrateId, FxIndexMap, HasModule, ImplId,
LocalModuleId, Lookup, MacroId, ModuleDefId, ModuleId, TraitId, UseId,
db::DefDatabase,
+ nameres::MacroSubNs,
per_ns::{Item, MacrosItem, PerNs, TypesItem, ValuesItem},
visibility::Visibility,
};
@@ -735,40 +736,47 @@
}
}
- pub(crate) fn dump(&self, db: &dyn ExpandDatabase, buf: &mut String) {
+ pub(crate) fn dump(&self, db: &dyn DefDatabase, buf: &mut String) {
let mut entries: Vec<_> = self.resolutions().collect();
entries.sort_by_key(|(name, _)| name.clone());
+ let print_macro_sub_ns =
+ |buf: &mut String, macro_id: MacroId| match MacroSubNs::from_id(db, macro_id) {
+ MacroSubNs::Bang => buf.push('!'),
+ MacroSubNs::Attr => buf.push('#'),
+ };
+
for (name, def) in entries {
- format_to!(
- buf,
- "{}:",
- name.map_or("_".to_owned(), |name| name.display(db, Edition::LATEST).to_string())
- );
+ let display_name: &dyn fmt::Display = match &name {
+ Some(name) => &name.display(db, Edition::LATEST),
+ None => &"_",
+ };
+ format_to!(buf, "- {display_name} :");
if let Some(Item { import, .. }) = def.types {
- buf.push_str(" t");
+ buf.push_str(" type");
match import {
- Some(ImportOrExternCrate::Import(_)) => buf.push('i'),
- Some(ImportOrExternCrate::Glob(_)) => buf.push('g'),
- Some(ImportOrExternCrate::ExternCrate(_)) => buf.push('e'),
+ Some(ImportOrExternCrate::Import(_)) => buf.push_str(" (import)"),
+ Some(ImportOrExternCrate::Glob(_)) => buf.push_str(" (glob)"),
+ Some(ImportOrExternCrate::ExternCrate(_)) => buf.push_str(" (extern)"),
None => (),
}
}
if let Some(Item { import, .. }) = def.values {
- buf.push_str(" v");
+ buf.push_str(" value");
match import {
- Some(ImportOrGlob::Import(_)) => buf.push('i'),
- Some(ImportOrGlob::Glob(_)) => buf.push('g'),
+ Some(ImportOrGlob::Import(_)) => buf.push_str(" (import)"),
+ Some(ImportOrGlob::Glob(_)) => buf.push_str(" (glob)"),
None => (),
}
}
- if let Some(Item { import, .. }) = def.macros {
- buf.push_str(" m");
+ if let Some(Item { def: macro_id, import, .. }) = def.macros {
+ buf.push_str(" macro");
+ print_macro_sub_ns(buf, macro_id);
match import {
- Some(ImportOrExternCrate::Import(_)) => buf.push('i'),
- Some(ImportOrExternCrate::Glob(_)) => buf.push('g'),
- Some(ImportOrExternCrate::ExternCrate(_)) => buf.push('e'),
+ Some(ImportOrExternCrate::Import(_)) => buf.push_str(" (import)"),
+ Some(ImportOrExternCrate::Glob(_)) => buf.push_str(" (glob)"),
+ Some(ImportOrExternCrate::ExternCrate(_)) => buf.push_str(" (extern)"),
None => (),
}
}
@@ -778,6 +786,21 @@
buf.push('\n');
}
+
+ // Also dump legacy-textual-scope macros visible at the _end_ of the scope.
+ //
+ // For tests involving a cursor position, this might include macros that
+ // are _not_ visible at the cursor position.
+ let mut legacy_macros = self.legacy_macros().collect::<Vec<_>>();
+ legacy_macros.sort_by(|(a, _), (b, _)| Ord::cmp(a, b));
+ for (name, macros) in legacy_macros {
+ format_to!(buf, "- (legacy) {} :", name.display(db, Edition::LATEST));
+ for ¯o_id in macros {
+ buf.push_str(" macro");
+ print_macro_sub_ns(buf, macro_id);
+ }
+ buf.push('\n');
+ }
}
pub(crate) fn shrink_to_fit(&mut self) {
diff --git a/crates/hir-def/src/lang_item.rs b/crates/hir-def/src/lang_item.rs
index df0705b..91a90f6 100644
--- a/crates/hir-def/src/lang_item.rs
+++ b/crates/hir-def/src/lang_item.rs
@@ -5,7 +5,6 @@
use hir_expand::name::Name;
use intern::{Symbol, sym};
use rustc_hash::FxHashMap;
-use triomphe::Arc;
use crate::{
AdtId, AssocItemId, AttrDefId, Crate, EnumId, EnumVariantId, FunctionId, ImplId, ModuleDefId,
@@ -223,16 +222,8 @@
db.attrs(item).lang_item()
}
-pub(crate) fn notable_traits_in_deps(db: &dyn DefDatabase, krate: Crate) -> Arc<[Arc<[TraitId]>]> {
- let _p = tracing::info_span!("notable_traits_in_deps", ?krate).entered();
- Arc::from_iter(
- db.transitive_deps(krate).into_iter().filter_map(|krate| db.crate_notable_traits(krate)),
- )
-}
-
-pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option<Arc<[TraitId]>> {
- let _p = tracing::info_span!("crate_notable_traits", ?krate).entered();
-
+#[salsa::tracked(returns(as_deref))]
+pub(crate) fn crate_notable_traits(db: &dyn DefDatabase, krate: Crate) -> Option<Box<[TraitId]>> {
let mut traits = Vec::new();
let crate_def_map = crate_def_map(db, krate);
diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs
index 7d5e627..f44187e 100644
--- a/crates/hir-def/src/nameres.rs
+++ b/crates/hir-def/src/nameres.rs
@@ -602,7 +602,7 @@
let mut arc;
let mut current_map = self;
while let Some(block) = current_map.block {
- go(&mut buf, db, current_map, "block scope", Self::ROOT);
+ go(&mut buf, db, current_map, "(block scope)", Self::ROOT);
buf.push('\n');
arc = block.parent.def_map(db, self.krate);
current_map = arc;
@@ -814,7 +814,7 @@
}
impl MacroSubNs {
- fn from_id(db: &dyn DefDatabase, macro_id: MacroId) -> Self {
+ pub(crate) fn from_id(db: &dyn DefDatabase, macro_id: MacroId) -> Self {
let expander = match macro_id {
MacroId::Macro2Id(it) => it.lookup(db).expander,
MacroId::MacroRulesId(it) => it.lookup(db).expander,
diff --git a/crates/hir-def/src/nameres/tests.rs b/crates/hir-def/src/nameres/tests.rs
index 4a7974c..23d60d5 100644
--- a/crates/hir-def/src/nameres/tests.rs
+++ b/crates/hir-def/src/nameres/tests.rs
@@ -61,22 +61,22 @@
"#,
expect![[r#"
crate
- E: _
- S: t v
- V: _
- foo: t
+ - E : _
+ - S : type value
+ - V : _
+ - foo : type
crate::foo
- bar: t
- f: v
+ - bar : type
+ - f : value
crate::foo::bar
- Baz: t v
- E: t
- EXT: v
- Ext: t
- U: t
- ext: v
+ - Baz : type value
+ - E : type
+ - EXT : value
+ - Ext : type
+ - U : type
+ - ext : value
"#]],
);
}
@@ -97,19 +97,19 @@
"#,
expect![[r#"
crate
- a: t
+ - a : type
crate::a
- A: v
- b: t
+ - A : value
+ - b : type
crate::a::b
- B: v
- c: t
+ - B : value
+ - c : type
crate::a::b::c
- A: vg
- b: tg
+ - A : value (glob)
+ - b : type (glob)
"#]],
);
}
@@ -125,10 +125,10 @@
"#,
expect![[r#"
crate
- m: t
+ - m : type
crate::m
- z: t v
+ - z : type value
crate::m::z
"#]],
@@ -151,8 +151,8 @@
"#,
expect![[r#"
crate
- S: t v
- foo: t
+ - S : type value
+ - foo : type
crate::foo
"#]],
@@ -172,11 +172,11 @@
"#,
expect![[r#"
crate
- Foo: ti vi
- foo: t
+ - Foo : type (import) value (import)
+ - foo : type
crate::foo
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -198,16 +198,16 @@
"#,
expect![[r#"
crate
- Baz: ti vi
- Quux: ti
- foo: t
+ - Baz : type (import) value (import)
+ - Quux : type (import)
+ - foo : type
crate::foo
- bar: t
+ - bar : type
crate::foo::bar
- Baz: t v
- Quux: t
+ - Baz : type value
+ - Quux : type
"#]],
);
}
@@ -229,15 +229,15 @@
"#,
expect![[r#"
crate
- Baz: ti vi
- foo: t
+ - Baz : type (import) value (import)
+ - foo : type
crate::foo
- Baz: ti vi
- bar: t
+ - Baz : type (import) value (import)
+ - bar : type
crate::foo::bar
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -261,8 +261,8 @@
"#,
expect![[r#"
crate
- Bar: tg vg
- Baz: tg vg
+ - Bar : type (glob) value (glob)
+ - Baz : type (glob) value (glob)
"#]],
);
}
@@ -277,8 +277,8 @@
"#,
expect![[r#"
crate
- E: t
- V: ti vi
+ - E : type
+ - V : type (import) value (import)
"#]],
);
}
@@ -303,15 +303,15 @@
"#,
expect![[r#"
crate
- bar: t
- foo: t
+ - bar : type
+ - foo : type
crate::bar
- Bar: t v
+ - Bar : type value
crate::foo
- Bar: _
- FromLib: ti vi
+ - Bar : _
+ - FromLib : type (import) value (import)
"#]],
);
}
@@ -332,14 +332,14 @@
"#,
expect![[r#"
crate
- Baz: ti
- foo: t
+ - Baz : type (import)
+ - foo : type
crate::foo
- bar: t
+ - bar : type
crate::foo::bar
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -356,7 +356,7 @@
"#,
expect![[r#"
crate
- Baz: ti vi
+ - Baz : type (import) value (import)
"#]],
);
}
@@ -378,14 +378,14 @@
"#,
expect![[r#"
crate
- alloc: t
- alloc_crate: te
- sync: t
+ - alloc : type
+ - alloc_crate : type (extern)
+ - sync : type
crate::alloc
crate::sync
- Arc: ti vi
+ - Arc : type (import) value (import)
"#]],
);
}
@@ -426,12 +426,12 @@
"#,
expect![[r#"
crate
- Exported: tg vg
- PublicItem: tg vg
- allowed_reexport: tg
- exported: tg
- not_allowed_reexport1: _
- not_allowed_reexport2: _
+ - Exported : type (glob) value (glob)
+ - PublicItem : type (glob) value (glob)
+ - allowed_reexport : type (glob)
+ - exported : type (glob)
+ - not_allowed_reexport1 : _
+ - not_allowed_reexport2 : _
"#]],
);
}
@@ -453,14 +453,14 @@
"#,
expect![[r#"
crate
- alloc: t
- alloc_crate: te
- sync: t
+ - alloc : type
+ - alloc_crate : type (extern)
+ - sync : type
crate::alloc
crate::sync
- Arc: ti vi
+ - Arc : type (import) value (import)
"#]],
);
}
@@ -475,7 +475,7 @@
"#,
expect![[r#"
crate
- bla: te
+ - bla : type (extern)
"#]],
);
}
@@ -496,7 +496,7 @@
"#,
expect![[r#"
crate
- Baz: ti vi
+ - Baz : type (import) value (import)
"#]],
);
}
@@ -514,8 +514,8 @@
"#,
expect![[r#"
crate
- Bar: ti vi
- foo: v
+ - Bar : type (import) value (import)
+ - foo : value
"#]],
);
}
@@ -542,7 +542,7 @@
"#,
expect![[r#"
crate
- Rust: ti vi
+ - Rust : type (import) value (import)
"#]],
);
}
@@ -566,7 +566,7 @@
"#,
expect![[r#"
crate
- Rust2018: ti vi
+ - Rust2018 : type (import) value (import)
"#]],
);
check(
@@ -583,7 +583,7 @@
"#,
expect![[r#"
crate
- Rust2021: ti vi
+ - Rust2021 : type (import) value (import)
"#]],
);
}
@@ -612,8 +612,8 @@
"#,
expect![[r#"
crate
- Bar: ti vi
- Foo: ti vi
+ - Bar : type (import) value (import)
+ - Foo : type (import) value (import)
"#]],
);
}
@@ -639,9 +639,9 @@
"#,
expect![[r#"
crate
- Bar: ti vi
- Baz: _
- Foo: _
+ - Bar : type (import) value (import)
+ - Baz : _
+ - Foo : _
"#]],
);
}
@@ -667,9 +667,9 @@
"#,
expect![[r#"
crate
- Bar: _
- Baz: ti vi
- Foo: ti vi
+ - Bar : _
+ - Baz : type (import) value (import)
+ - Foo : type (import) value (import)
"#]],
);
}
@@ -692,15 +692,15 @@
"#,
expect![[r#"
crate
- T: ti vi
- a: t
- b: t
+ - T : type (import) value (import)
+ - a : type
+ - b : type
crate::a
- T: t vg
+ - T : type value (glob)
crate::b
- T: v
+ - T : value
"#]],
);
}
@@ -720,13 +720,13 @@
"#,
expect![[r#"
crate
- _: t
- _: t
- tr: t
+ - _ : type
+ - _ : type
+ - tr : type
crate::tr
- Tr: t
- Tr2: t
+ - Tr : type
+ - Tr2 : type
"#]],
);
}
@@ -748,17 +748,17 @@
"#,
expect![[r#"
crate
- _: t
- reex: t
- tr: t
+ - _ : type
+ - reex : type
+ - tr : type
crate::reex
- _: t
- _: t
+ - _ : type
+ - _ : type
crate::tr
- PrivTr: t
- PubTr: t
+ - PrivTr : type
+ - PubTr : type
"#]],
);
}
@@ -781,7 +781,7 @@
"#,
expect![[r#"
crate
- _: t
+ - _ : type
"#]],
);
}
@@ -800,12 +800,12 @@
"#,
expect![[r#"
crate
- m: t
+ - m : type
crate::m
- CONST: v
- Enum: t
- Struct: t v
+ - CONST : value
+ - Enum : type
+ - Struct : type value
"#]],
);
}
@@ -825,12 +825,12 @@
"#,
expect![[r#"
crate
- _: t
- Tr: t v
- tr: t
+ - _ : type
+ - Tr : type value
+ - tr : type
crate::tr
- Tr: t
+ - Tr : type
"#]],
);
}
@@ -864,9 +864,9 @@
"#,
expect![[r#"
crate
- bar: v
- baz: vi
- foo: ti
+ - bar : value
+ - baz : value (import)
+ - foo : type (import)
"#]],
);
}
@@ -885,11 +885,11 @@
"#,
expect![[r#"
crate
- S: ti
- m: t
+ - S : type (import)
+ - m : type
crate::m
- S: t v m
+ - S : type value macro!
"#]],
);
}
@@ -909,8 +909,8 @@
"#,
expect![[r#"
crate
- Settings: ti vi
- settings: vi
+ - Settings : type (import) value (import)
+ - settings : value (import)
"#]],
)
}
@@ -926,7 +926,7 @@
"#,
expect![[r#"
crate
- Struct: _
+ - Struct : _
"#]],
);
check(
@@ -939,8 +939,8 @@
"#,
expect![[r#"
crate
- Struct: ti vi
- dep: te
+ - Struct : type (import) value (import)
+ - dep : type (extern)
"#]],
);
}
@@ -964,18 +964,18 @@
"#,
expect![[r#"
crate
- other_module: t
- some_module: t
- unknown_func: vi
+ - other_module : type
+ - some_module : type
+ - unknown_func : value (import)
crate::other_module
- some_submodule: t
+ - some_submodule : type
crate::other_module::some_submodule
- unknown_func: vi
+ - unknown_func : value (import)
crate::some_module
- unknown_func: v
+ - unknown_func : value
"#]],
)
}
diff --git a/crates/hir-def/src/nameres/tests/globs.rs b/crates/hir-def/src/nameres/tests/globs.rs
index ddb9d4a..62887e2 100644
--- a/crates/hir-def/src/nameres/tests/globs.rs
+++ b/crates/hir-def/src/nameres/tests/globs.rs
@@ -18,18 +18,18 @@
"#,
expect![[r#"
crate
- Baz: tg vg
- Foo: tg vg
- bar: tg
- foo: t
+ - Baz : type (glob) value (glob)
+ - Foo : type (glob) value (glob)
+ - bar : type (glob)
+ - foo : type
crate::foo
- Baz: ti vi
- Foo: t v
- bar: t
+ - Baz : type (import) value (import)
+ - Foo : type value
+ - bar : type
crate::foo::bar
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -53,20 +53,20 @@
"#,
expect![[r#"
crate
- Baz: tg vg
- Foo: tg vg
- bar: tg
- foo: t
+ - Baz : type (glob) value (glob)
+ - Foo : type (glob) value (glob)
+ - bar : type (glob)
+ - foo : type
crate::foo
- Baz: tg vg
- Foo: t v
- bar: t
+ - Baz : type (glob) value (glob)
+ - Foo : type value
+ - bar : type
crate::foo::bar
- Baz: t v
- Foo: tg vg
- bar: tg
+ - Baz : type value
+ - Foo : type (glob) value (glob)
+ - bar : type (glob)
"#]],
);
}
@@ -91,20 +91,20 @@
",
expect![[r#"
crate
- Baz: tg vg
- bar: tg
- foo: t
+ - Baz : type (glob) value (glob)
+ - bar : type (glob)
+ - foo : type
crate::foo
- Baz: tg vg
- PrivateStructFoo: t v
- bar: t
+ - Baz : type (glob) value (glob)
+ - PrivateStructFoo : type value
+ - bar : type
crate::foo::bar
- Baz: t v
- PrivateStructBar: t v
- PrivateStructFoo: tg vg
- bar: tg
+ - Baz : type value
+ - PrivateStructBar : type value
+ - PrivateStructFoo : type (glob) value (glob)
+ - bar : type (glob)
"#]],
);
}
@@ -130,19 +130,19 @@
",
expect![[r#"
crate
- Foo: tg
- PubCrateStruct: tg vg
- bar: tg
- foo: t
+ - Foo : type (glob)
+ - PubCrateStruct : type (glob) value (glob)
+ - bar : type (glob)
+ - foo : type
crate::foo
- Foo: t v
- bar: t
+ - Foo : type value
+ - bar : type
crate::foo::bar
- PrivateBar: t v
- PrivateBaz: t v
- PubCrateStruct: t v
+ - PrivateBar : type value
+ - PrivateBaz : type value
+ - PubCrateStruct : type value
"#]],
);
}
@@ -160,7 +160,7 @@
"#,
expect![[r#"
crate
- Baz: tg vg
+ - Baz : type (glob) value (glob)
"#]],
);
}
@@ -178,7 +178,7 @@
"#,
expect![[r#"
crate
- Baz: tg vg
+ - Baz : type (glob) value (glob)
"#]],
);
}
@@ -193,9 +193,9 @@
"#,
expect![[r#"
crate
- Bar: tg vg
- Baz: tg vg
- Foo: t
+ - Bar : type (glob) value (glob)
+ - Baz : type (glob) value (glob)
+ - Foo : type
"#]],
);
}
@@ -210,9 +210,9 @@
"#,
expect![[r#"
crate
- Bar: tg vg
- Baz: tg vg
- Foo: t
+ - Bar : type (glob) value (glob)
+ - Baz : type (glob) value (glob)
+ - Foo : type
"#]],
);
}
@@ -237,22 +237,22 @@
"#,
expect![[r#"
crate
- Bar: ti vi
- bar: t
- baz: ti
- foo: t
+ - Bar : type (import) value (import)
+ - bar : type
+ - baz : type (import)
+ - foo : type
crate::bar
- baz: t
+ - baz : type
crate::bar::baz
- Bar: t v
+ - Bar : type value
crate::foo
- baz: t
+ - baz : type
crate::foo::baz
- Foo: t v
+ - Foo : type value
"#]],
);
}
@@ -276,22 +276,22 @@
"#,
expect![[r#"
crate
- Bar: ti vi
- bar: t
- baz: ti
- foo: t
+ - Bar : type (import) value (import)
+ - bar : type
+ - baz : type (import)
+ - foo : type
crate::bar
- baz: t
+ - baz : type
crate::bar::baz
- Bar: t v
+ - Bar : type value
crate::foo
- baz: t
+ - baz : type
crate::foo::baz
- Foo: t v
+ - Foo : type value
"#]],
);
}
@@ -311,29 +311,29 @@
"#,
expect![[r#"
crate
- a: t
- b: t
- c: t
- d: t
+ - a : type
+ - b : type
+ - c : type
+ - d : type
crate::a
- foo: t
+ - foo : type
crate::a::foo
- X: t v
+ - X : type value
crate::b
- foo: ti
+ - foo : type (import)
crate::c
- foo: t
+ - foo : type
crate::c::foo
- Y: t v
+ - Y : type value
crate::d
- Y: ti vi
- foo: ti
+ - Y : type (import) value (import)
+ - foo : type (import)
"#]],
);
}
@@ -355,15 +355,15 @@
"#,
expect![[r#"
crate
- Event: ti
- event: t
+ - Event : type (import)
+ - event : type
crate::event
- Event: t vg
- serenity: t
+ - Event : type value (glob)
+ - serenity : type
crate::event::serenity
- Event: v
+ - Event : value
"#]],
);
}
@@ -388,27 +388,27 @@
"#,
expect![[r#"
crate
- Trait: tg
- defs: t
- function: vg
- makro: mg
- reexport: t
+ - Trait : type (glob)
+ - defs : type
+ - function : value (glob)
+ - makro : macro! (glob)
+ - reexport : type
crate::defs
- Trait: t
- function: v
- makro: m
+ - Trait : type
+ - function : value
+ - makro : macro!
crate::reexport
- Trait: tg
- function: vg
- inner: t
- makro: mg
+ - Trait : type (glob)
+ - function : value (glob)
+ - inner : type
+ - makro : macro! (glob)
crate::reexport::inner
- Trait: ti
- function: vi
- makro: mi
+ - Trait : type (import)
+ - function : value (import)
+ - makro : macro! (import)
"#]],
);
}
@@ -435,19 +435,19 @@
"#,
expect![[r#"
crate
- glob_target: t
- outer: t
+ - glob_target : type
+ - outer : type
crate::glob_target
- ShouldBePrivate: t v
+ - ShouldBePrivate : type value
crate::outer
- ShouldBePrivate: tg vg
- inner_superglob: t
+ - ShouldBePrivate : type (glob) value (glob)
+ - inner_superglob : type
crate::outer::inner_superglob
- ShouldBePrivate: tg vg
- inner_superglob: tg
+ - ShouldBePrivate : type (glob) value (glob)
+ - inner_superglob : type (glob)
"#]],
);
}
@@ -473,20 +473,20 @@
"#,
expect![[r#"
crate
- Placeholder: tg vg
- libs: t
- reexport_1: tg
- reexport_2: t
+ - Placeholder : type (glob) value (glob)
+ - libs : type
+ - reexport_1 : type (glob)
+ - reexport_2 : type
crate::libs
- Placeholder: t v
+ - Placeholder : type value
crate::reexport_2
- Placeholder: tg vg
- reexport_1: t
+ - Placeholder : type (glob) value (glob)
+ - reexport_1 : type
crate::reexport_2::reexport_1
- Placeholder: tg vg
+ - Placeholder : type (glob) value (glob)
"#]],
);
}
diff --git a/crates/hir-def/src/nameres/tests/macros.rs b/crates/hir-def/src/nameres/tests/macros.rs
index 3cba88e..43b6e12 100644
--- a/crates/hir-def/src/nameres/tests/macros.rs
+++ b/crates/hir-def/src/nameres/tests/macros.rs
@@ -23,12 +23,14 @@
"#,
expect![[r#"
crate
- Foo: t
- nested: t
+ - Foo : type
+ - nested : type
+ - (legacy) structs : macro!
crate::nested
- Bar: t
- Baz: t
+ - Bar : type
+ - Baz : type
+ - (legacy) structs : macro!
"#]],
);
}
@@ -53,20 +55,25 @@
"#,
expect![[r#"
crate
- m: t
- n1: t
+ - m : type
+ - n1 : type
+ - (legacy) m : macro!
crate::m
- n3: t
+ - n3 : type
+ - (legacy) m : macro!
crate::m::n3
- Y: t v
+ - Y : type value
+ - (legacy) m : macro!
crate::n1
- n2: t
+ - n2 : type
+ - (legacy) m : macro!
crate::n1::n2
- X: t v
+ - X : type value
+ - (legacy) m : macro!
"#]],
);
}
@@ -92,14 +99,14 @@
"#,
expect![[r#"
crate
- Bar: t
- Foo: t
- bar: t
+ - Bar : type
+ - Foo : type
+ - bar : type
crate::bar
- Bar: tg
- Foo: tg
- bar: tg
+ - Bar : type (glob)
+ - Foo : type (glob)
+ - bar : type (glob)
"#]],
);
}
@@ -125,14 +132,14 @@
"#,
expect![[r#"
crate
- Bar: t
- Foo: t
- bar: t
+ - Bar : type
+ - Foo : type
+ - bar : type
crate::bar
- Bar: tg
- Foo: tg
- bar: tg
+ - Bar : type (glob)
+ - Foo : type (glob)
+ - bar : type (glob)
"#]],
);
}
@@ -164,14 +171,14 @@
"#,
expect![[r#"
crate
- Bar: t
- Foo: t
- bar: t
+ - Bar : type
+ - Foo : type
+ - bar : type
crate::bar
- Bar: tg
- Foo: tg
- bar: tg
+ - Bar : type (glob)
+ - Foo : type (glob)
+ - bar : type (glob)
"#]],
);
}
@@ -206,9 +213,10 @@
"#,
expect![[r#"
crate
- Foo: t
- bar: mi
- foo: mi
+ - Foo : type
+ - bar : macro! (import)
+ - foo : macro! (import)
+ - (legacy) baz : macro!
"#]],
);
}
@@ -252,13 +260,13 @@
"#,
expect![[r#"
crate
- Bar: t v
- Foo: t v
- bar: t
- foo: te
+ - Bar : type value
+ - Foo : type value
+ - bar : type
+ - foo : type (extern)
crate::bar
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -316,15 +324,15 @@
"#,
expect![[r#"
crate
- OkBar1: t v
- OkBar2: t v
- OkBar3: t v
- OkBaz1: t v
- OkBaz2: t v
- OkBaz3: t v
- all: te
- empty: te
- multiple: te
+ - OkBar1 : type value
+ - OkBar2 : type value
+ - OkBar3 : type value
+ - OkBaz1 : type value
+ - OkBaz2 : type value
+ - OkBaz3 : type value
+ - all : type (extern)
+ - empty : type (extern)
+ - multiple : type (extern)
"#]],
);
}
@@ -370,13 +378,13 @@
"#,
expect![[r#"
crate
- Bar: t v
- Foo: t v
- Out: t v
- bar: t
+ - Bar : type value
+ - Foo : type value
+ - Out : type value
+ - bar : type
crate::bar
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -398,9 +406,10 @@
"#,
expect![[r#"
crate
- prelude: t
+ - prelude : type
crate::prelude
+ - (legacy) declare_mod : macro!
"#]],
);
}
@@ -419,7 +428,8 @@
"#,
expect![[r#"
crate
- S: t v
+ - S : type value
+ - (legacy) m : macro!
"#]],
);
// FIXME: should not expand. legacy macro scoping is not implemented.
@@ -499,40 +509,55 @@
"#,
expect![[r#"
crate
- NotFoundBefore: t v
- Ok: t v
- OkAfter: t v
- OkShadowStop: t v
- m1: t
- m2: t
- m3: t
- m5: t
- m7: t
- ok_double_macro_use_shadow: v
+ - NotFoundBefore : type value
+ - Ok : type value
+ - OkAfter : type value
+ - OkShadowStop : type value
+ - m1 : type
+ - m2 : type
+ - m3 : type
+ - m5 : type
+ - m7 : type
+ - ok_double_macro_use_shadow : value
+ - (legacy) baz : macro!
+ - (legacy) foo : macro! macro! macro!
crate::m1
+ - (legacy) bar : macro!
crate::m2
crate::m3
- OkAfterInside: t v
- OkMacroUse: t v
- OkMacroUseInner: t v
- m4: t
- m5: t
- ok_shadow: v
+ - OkAfterInside : type value
+ - OkMacroUse : type value
+ - OkMacroUseInner : type value
+ - m4 : type
+ - m5 : type
+ - ok_shadow : value
+ - (legacy) bar : macro! macro!
+ - (legacy) baz : macro!
+ - (legacy) foo : macro! macro! macro! macro!
crate::m3::m4
- ok_shadow_deep: v
+ - ok_shadow_deep : value
+ - (legacy) bar : macro!
+ - (legacy) foo : macro! macro!
crate::m3::m5
+ - (legacy) bar : macro!
+ - (legacy) baz : macro!
+ - (legacy) foo : macro! macro! macro!
crate::m5
- m6: t
+ - m6 : type
+ - (legacy) foo : macro! macro!
crate::m5::m6
+ - (legacy) foo : macro! macro!
crate::m7
+ - (legacy) baz : macro!
+ - (legacy) foo : macro! macro!
"#]],
);
// FIXME: should not see `NotFoundBefore`
@@ -555,9 +580,10 @@
"#,
expect![[r#"
crate
- bar: ti mi
- baz: ti v mi
- foo: t m
+ - bar : type (import) macro! (import)
+ - baz : type (import) value macro! (import)
+ - foo : type macro!
+ - (legacy) foo : macro!
"#]],
);
}
@@ -585,9 +611,9 @@
"#,
expect![[r#"
crate
- Alias: t v
- Direct: t v
- foo: te
+ - Alias : type value
+ - Direct : type value
+ - foo : type (extern)
"#]],
);
}
@@ -623,19 +649,22 @@
"#,
expect![[r#"
crate
- OkAliasCrate: t v
- OkAliasPlain: t v
- OkAliasSuper: t v
- OkCrate: t v
- OkPlain: t v
- bar: m
- m: t
+ - OkAliasCrate : type value
+ - OkAliasPlain : type value
+ - OkAliasSuper : type value
+ - OkCrate : type value
+ - OkPlain : type value
+ - bar : macro!
+ - m : type
+ - (legacy) foo : macro!
crate::m
- alias1: mi
- alias2: mi
- alias3: mi
- not_found: _
+ - alias1 : macro! (import)
+ - alias2 : macro! (import)
+ - alias3 : macro! (import)
+ - not_found : _
+ - (legacy) bar : macro!
+ - (legacy) foo : macro!
"#]],
);
}
@@ -686,14 +715,16 @@
"#,
expect![[r#"
crate
- Bar: ti vi
- Baz: ti vi
- Foo: t v
- FooSelf: ti vi
- foo: te
- m: t
+ - Bar : type (import) value (import)
+ - Baz : type (import) value (import)
+ - Foo : type value
+ - FooSelf : type (import) value (import)
+ - foo : type (extern)
+ - m : type
+ - (legacy) current : macro!
crate::m
+ - (legacy) current : macro!
"#]],
);
}
@@ -729,7 +760,7 @@
"#,
expect![[r#"
crate
- bar: ti vi
+ - bar : type (import) value (import)
"#]],
);
}
@@ -794,7 +825,7 @@
"#,
expect![[r#"
crate
- Clone: tg mg
+ - Clone : type (glob) macro# (glob)
"#]],
);
}
@@ -842,11 +873,11 @@
"#,
expect![[r#"
crate
- Foo: t v
- submod: t
+ - Foo : type value
+ - submod : type
crate::submod
- Bar: t v
+ - Bar : type value
"#]],
);
}
@@ -863,9 +894,9 @@
}
"#,
expect![[r#"
- crate
- f: v
- "#]],
+ crate
+ - f : value
+ "#]],
);
}
@@ -883,7 +914,8 @@
"#,
expect![[r#"
crate
- S: v
+ - S : value
+ - (legacy) m : macro!
"#]],
);
}
@@ -909,8 +941,8 @@
"#,
expect![[r#"
crate
- S: t v
- derive: m
+ - S : type value
+ - derive : macro#
"#]],
);
}
@@ -932,7 +964,7 @@
"#,
expect![[r#"
crate
- E: t
+ - E : type
"#]],
);
}
@@ -947,7 +979,7 @@
"#,
expect![[r#"
crate
- S: t v
+ - S : type value
"#]],
);
}
@@ -975,6 +1007,8 @@
"#,
expect![[r#"
crate
+ - (legacy) a : macro!
+ - (legacy) b : macro!
"#]],
);
}
@@ -995,7 +1029,9 @@
"#,
expect![[r#"
crate
- S: t
+ - S : type
+ - (legacy) indirect_macro : macro!
+ - (legacy) item : macro!
"#]],
);
}
@@ -1029,13 +1065,13 @@
"#,
expect![[r#"
crate
- AnotherTrait: m
- DummyTrait: m
- TokenStream: t v
- attribute_macro: v m
- derive_macro: v
- derive_macro_2: v
- function_like_macro: v m
+ - AnotherTrait : macro#
+ - DummyTrait : macro#
+ - TokenStream : type value
+ - attribute_macro : value macro#
+ - derive_macro : value
+ - derive_macro_2 : value
+ - function_like_macro : value macro!
"#]],
);
}
@@ -1075,9 +1111,9 @@
"#,
expect![[r#"
crate
- DummyTrait: mg
- attribute_macro: mg
- function_like_macro: mg
+ - DummyTrait : macro# (glob)
+ - attribute_macro : macro# (glob)
+ - function_like_macro : macro! (glob)
"#]],
);
}
@@ -1119,8 +1155,8 @@
"#,
expect![[r#"
crate
- Foo: t
- structs: m
+ - Foo : type
+ - structs : macro!
"#]],
);
}
@@ -1143,7 +1179,7 @@
"#,
expect![[r#"
crate
- S: t v
+ - S : type value
"#]],
)
}
@@ -1161,6 +1197,7 @@
"#,
expect![[r#"
crate
+ - (legacy) m : macro!
"#]],
)
}
@@ -1193,12 +1230,15 @@
struct B;
"#,
expect![[r#"
- crate
- A: t v
- B: t v
- inner_a: m
- inner_b: m
- "#]],
+ crate
+ - A : type value
+ - B : type value
+ - inner_a : macro!
+ - inner_b : macro!
+ - (legacy) include : macro!
+ - (legacy) inner_a : macro!
+ - (legacy) inner_b : macro!
+ "#]],
);
}
@@ -1227,8 +1267,11 @@
"#,
expect![[r#"
crate
- A: t v
- inner: m
+ - A : type value
+ - inner : macro!
+ - (legacy) include : macro!
+ - (legacy) inner : macro!
+ - (legacy) m : macro!
"#]],
);
// eager -> MBE -> $crate::mbe
@@ -1256,8 +1299,11 @@
"#,
expect![[r#"
crate
- A: t v
- inner: m
+ - A : type value
+ - inner : macro!
+ - (legacy) include : macro!
+ - (legacy) inner : macro!
+ - (legacy) n : macro!
"#]],
);
}
@@ -1292,20 +1338,20 @@
"#,
expect![[r#"
crate
- company_name: t
+ - company_name : type
crate::company_name
- network: t
+ - network : type
crate::company_name::network
- v1: t
+ - v1 : type
crate::company_name::network::v1
- IpAddress: t
- ip_address: t
+ - IpAddress : type
+ - ip_address : type
crate::company_name::network::v1::ip_address
- IpType: t
+ - IpType : type
"#]],
);
}
@@ -1338,20 +1384,20 @@
"#,
expect![[r#"
crate
- company_name: t
+ - company_name : type
crate::company_name
- network: t
+ - network : type
crate::company_name::network
- v1: t
+ - v1 : type
crate::company_name::network::v1
- IpAddress: t
- ip_address: t
+ - IpAddress : type
+ - ip_address : type
crate::company_name::network::v1::ip_address
- IpType: t
+ - IpType : type
"#]],
);
}
@@ -1392,30 +1438,38 @@
"#,
expect![[r#"
crate
- nested: t
+ - nested : type
+ - (legacy) include : macro!
crate::nested
- company_name: t
- different_company: t
- util: t
+ - company_name : type
+ - different_company : type
+ - util : type
+ - (legacy) include : macro!
crate::nested::company_name
- network: t
+ - network : type
+ - (legacy) include : macro!
crate::nested::company_name::network
- v1: t
+ - v1 : type
+ - (legacy) include : macro!
crate::nested::company_name::network::v1
- IpAddress: t
+ - IpAddress : type
+ - (legacy) include : macro!
crate::nested::different_company
- network: t
+ - network : type
+ - (legacy) include : macro!
crate::nested::different_company::network
- Url: t
+ - Url : type
+ - (legacy) include : macro!
crate::nested::util
- Helper: t
+ - Helper : type
+ - (legacy) include : macro!
"#]],
);
}
@@ -1500,11 +1554,11 @@
"#,
expect![[r#"
crate
- Ok: t v
- bar: m
- dep: te
- foo: m
- ok: v
+ - Ok : type value
+ - bar : macro!
+ - dep : type (extern)
+ - foo : macro!
+ - ok : value
"#]],
);
}
@@ -1533,11 +1587,13 @@
"#,
expect![[r#"
crate
- a: t
- lib: te
+ - a : type
+ - lib : type (extern)
+ - (legacy) foo : macro!
crate::a
- Ok: t v
+ - Ok : type value
+ - (legacy) foo : macro!
"#]],
);
}
@@ -1588,10 +1644,10 @@
"#,
expect![[r#"
crate
- Ok: t v
- bar: mi
- foo: mi
- ok: v
+ - Ok : type value
+ - bar : macro# (import)
+ - foo : macro# (import)
+ - ok : value
"#]],
);
}
diff --git a/crates/hir-def/src/nameres/tests/mod_resolution.rs b/crates/hir-def/src/nameres/tests/mod_resolution.rs
index 9c97e42..e54bcc2 100644
--- a/crates/hir-def/src/nameres/tests/mod_resolution.rs
+++ b/crates/hir-def/src/nameres/tests/mod_resolution.rs
@@ -18,8 +18,8 @@
",
expect![[r#"
crate
- Baz: _
- foo: t
+ - Baz : _
+ - foo : type
crate::foo
"#]],
@@ -41,13 +41,13 @@
"#,
expect![[r#"
crate
- n1: t
+ - n1 : type
crate::n1
- n2: t
+ - n2 : type
crate::n1::n2
- X: t v
+ - X : type value
"#]],
);
}
@@ -76,22 +76,22 @@
"#,
expect![[r#"
crate
- iter: t
- prelude: t
+ - iter : type
+ - prelude : type
crate::iter
- Iterator: ti
- traits: t
+ - Iterator : type (import)
+ - traits : type
crate::iter::traits
- Iterator: ti
- iterator: t
+ - Iterator : type (import)
+ - iterator : type
crate::iter::traits::iterator
- Iterator: t
+ - Iterator : type
crate::prelude
- Iterator: ti
+ - Iterator : type (import)
"#]],
);
}
@@ -109,11 +109,11 @@
"#,
expect![[r#"
crate
- Bar: ti vi
- foo: t
+ - Bar : type (import) value (import)
+ - foo : type
crate::foo
- Bar: t v
+ - Bar : type value
"#]],
);
}
@@ -139,19 +139,19 @@
"#,
expect![[r#"
crate
- Bar: ti vi
- r#async: t
+ - Bar : type (import) value (import)
+ - r#async : type
crate::r#async
- Bar: t v
- r#async: t
- foo: t
+ - Bar : type value
+ - r#async : type
+ - foo : type
crate::r#async::r#async
- Baz: t v
+ - Baz : type value
crate::r#async::foo
- Foo: t v
+ - Foo : type value
"#]],
);
}
@@ -176,19 +176,19 @@
"#,
expect![[r#"
crate
- Bar: ti vi
- Foo: ti vi
- r#async: t
+ - Bar : type (import) value (import)
+ - Foo : type (import) value (import)
+ - r#async : type
crate::r#async
- a: t
- r#async: t
+ - a : type
+ - r#async : type
crate::r#async::a
- Foo: t v
+ - Foo : type value
crate::r#async::r#async
- Bar: t v
+ - Bar : type value
"#]],
);
}
@@ -207,11 +207,11 @@
"#,
expect![[r#"
crate
- Bar: ti vi
- foo: t
+ - Bar : type (import) value (import)
+ - foo : type
crate::foo
- Bar: t v
+ - Bar : type value
"#]],
);
}
@@ -233,14 +233,14 @@
"#,
expect![[r#"
crate
- foo: t
+ - foo : type
crate::foo
- Baz: ti vi
- bar: t
+ - Baz : type (import) value (import)
+ - bar : type
crate::foo::bar
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -262,14 +262,14 @@
"#,
expect![[r#"
crate
- foo: t
+ - foo : type
crate::foo
- Baz: ti vi
- bar: t
+ - Baz : type (import) value (import)
+ - bar : type
crate::foo::bar
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -288,11 +288,11 @@
"#,
expect![[r#"
crate
- Baz: t v
- foo: t
+ - Baz : type value
+ - foo : type
crate::foo
- Baz: ti vi
+ - Baz : type (import) value (import)
"#]],
);
}
@@ -310,10 +310,10 @@
"#,
expect![[r#"
crate
- foo: t
+ - foo : type
crate::foo
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -334,13 +334,13 @@
"#,
expect![[r#"
crate
- foo: t
+ - foo : type
crate::foo
- foo_bar: t
+ - foo_bar : type
crate::foo::foo_bar
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -361,13 +361,13 @@
"#,
expect![[r#"
crate
- foo: t
+ - foo : type
crate::foo
- foo_bar: t
+ - foo_bar : type
crate::foo::foo_bar
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -388,14 +388,14 @@
"#,
expect![[r#"
crate
- foo: t
+ - foo : type
crate::foo
- bar: t
+ - bar : type
crate::foo::bar
- Baz: t v
-"#]],
+ - Baz : type value
+ "#]],
);
}
@@ -412,10 +412,10 @@
"#,
expect![[r#"
crate
- foo: t
+ - foo : type
crate::foo
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -433,10 +433,10 @@
"#,
expect![[r#"
crate
- foo: t
+ - foo : type
crate::foo
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -454,13 +454,13 @@
"#,
expect![[r#"
crate
- foo: t
+ - foo : type
crate::foo
- bar: t
+ - bar : type
crate::foo::bar
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -477,13 +477,13 @@
"#,
expect![[r#"
crate
- foo: t
+ - foo : type
crate::foo
- bar: t
+ - bar : type
crate::foo::bar
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -501,13 +501,13 @@
"#,
expect![[r#"
crate
- foo: t
+ - foo : type
crate::foo
- bar: t
+ - bar : type
crate::foo::bar
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -528,13 +528,13 @@
"#,
expect![[r#"
crate
- foo: t
+ - foo : type
crate::foo
- bar: t
+ - bar : type
crate::foo::bar
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -555,13 +555,13 @@
"#,
expect![[r#"
crate
- foo: t
+ - foo : type
crate::foo
- bar: t
+ - bar : type
crate::foo::bar
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -579,7 +579,7 @@
"#,
expect![[r#"
crate
- foo: t
+ - foo : type
crate::foo
"#]],
@@ -599,13 +599,13 @@
"#,
expect![[r#"
crate
- foo: t
+ - foo : type
crate::foo
- bar: t
+ - bar : type
crate::foo::bar
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -626,14 +626,14 @@
"#,
expect![[r#"
crate
- Baz: ti vi
- foo: t
+ - Baz : type (import) value (import)
+ - foo : type
crate::foo
- bar: t
+ - bar : type
crate::foo::bar
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -657,17 +657,17 @@
"#,
expect![[r#"
crate
- foo: t
+ - foo : type
crate::foo
- Baz: ti vi
- bar: t
+ - Baz : type (import) value (import)
+ - bar : type
crate::foo::bar
- baz: t
+ - baz : type
crate::foo::bar::baz
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -691,17 +691,17 @@
"#,
expect![[r#"
crate
- foo: t
+ - foo : type
crate::foo
- Baz: ti vi
- bar: t
+ - Baz : type (import) value (import)
+ - bar : type
crate::foo::bar
- baz: t
+ - baz : type
crate::foo::bar::baz
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -725,17 +725,17 @@
"#,
expect![[r#"
crate
- foo: t
+ - foo : type
crate::foo
- Baz: ti vi
- bar: t
+ - Baz : type (import) value (import)
+ - bar : type
crate::foo::bar
- baz: t
+ - baz : type
crate::foo::bar::baz
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -756,13 +756,13 @@
"#,
expect![[r#"
crate
- module: t
+ - module : type
crate::module
- submod: t
+ - submod : type
crate::module::submod
- Baz: t v
+ - Baz : type value
"#]],
);
}
@@ -783,16 +783,16 @@
"#,
expect![[r#"
crate
- a: t
+ - a : type
crate::a
- b: t
+ - b : type
crate::a::b
- c: t
+ - c : type
crate::a::b::c
- X: t v
+ - X : type value
"#]],
);
}
@@ -814,16 +814,16 @@
"#,
expect![[r#"
crate
- a: t
+ - a : type
crate::a
- b: t
+ - b : type
crate::a::b
- c: t
+ - c : type
crate::a::b::c
- X: t v
+ - X : type value
"#]],
);
}
@@ -870,8 +870,8 @@
"#,
expect![[r#"
crate
- Hash: ti
- core: t
+ - Hash : type (import)
+ - core : type
crate::core
"#]],
@@ -921,16 +921,16 @@
"#,
expect![[r#"
crate
- NoAssoc: _
- const_based: _
- module: t
- new: _
- unresolved: _
+ - NoAssoc : _
+ - const_based : _
+ - module : type
+ - new : _
+ - unresolved : _
crate::module
- C: v
- Enum: t
- S: t v
+ - C : value
+ - Enum : type
+ - S : type value
"#]],
);
}
@@ -957,14 +957,15 @@
"#,
expect![[r#"
crate
- ASSOC_CONST: _
- AssocType: _
- MACRO_CONST: _
- method: _
- module: t
+ - ASSOC_CONST : _
+ - AssocType : _
+ - MACRO_CONST : _
+ - method : _
+ - module : type
crate::module
- Trait: t
+ - Trait : type
+ - (legacy) m : macro!
"#]],
);
check(
@@ -987,10 +988,11 @@
"#,
expect![[r#"
crate
- module: t
+ - module : type
crate::module
- Trait: t
+ - Trait : type
+ - (legacy) m : macro!
"#]],
);
}
@@ -1015,10 +1017,10 @@
"#,
expect![[r#"
crate
- ASSOC_CONST: _
- AssocType: _
- MACRO_CONST: _
- method: _
+ - ASSOC_CONST : _
+ - AssocType : _
+ - MACRO_CONST : _
+ - method : _
"#]],
);
check(
diff --git a/crates/hir-def/src/nameres/tests/primitives.rs b/crates/hir-def/src/nameres/tests/primitives.rs
index 271eb1c..8616902 100644
--- a/crates/hir-def/src/nameres/tests/primitives.rs
+++ b/crates/hir-def/src/nameres/tests/primitives.rs
@@ -13,11 +13,11 @@
"#,
expect![[r#"
crate
- foo: t
- int: ti
+ - foo : type
+ - int : type (import)
crate::foo
- int: ti
+ - int : type (import)
"#]],
);
}
diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs
index abcf0a3..b5afbf3 100644
--- a/crates/hir-def/src/resolver.rs
+++ b/crates/hir-def/src/resolver.rs
@@ -708,6 +708,20 @@
self.item_scope_().0
}
+ #[inline]
+ pub fn top_level_def_map(&self) -> &'db DefMap {
+ self.module_scope.def_map
+ }
+
+ #[inline]
+ pub fn is_visible(&self, db: &dyn DefDatabase, visibility: Visibility) -> bool {
+ visibility.is_visible_from_def_map(
+ db,
+ self.module_scope.def_map,
+ self.module_scope.module_id,
+ )
+ }
+
pub fn generic_def(&self) -> Option<GenericDefId> {
self.scopes().find_map(|scope| match scope {
Scope::GenericParams { def, .. } => Some(*def),
diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml
index 378a0f0..8adf95b 100644
--- a/crates/hir-ty/Cargo.toml
+++ b/crates/hir-ty/Cargo.toml
@@ -16,14 +16,12 @@
cov-mark = "2.0.0"
itertools.workspace = true
arrayvec.workspace = true
-bitflags.workspace = true
smallvec.workspace = true
ena = "0.14.3"
either.workspace = true
oorandom = "11.1.5"
-tracing.workspace = true
+tracing = { workspace = true, features = ["attributes"] }
rustc-hash.workspace = true
-scoped-tls = "1.0.1"
la-arena.workspace = true
triomphe.workspace = true
typed-arena = "2.0.2"
diff --git a/crates/hir-ty/src/autoderef.rs b/crates/hir-ty/src/autoderef.rs
index 392b0b0..d21108f 100644
--- a/crates/hir-ty/src/autoderef.rs
+++ b/crates/hir-ty/src/autoderef.rs
@@ -13,11 +13,11 @@
use crate::{
TraitEnvironment,
db::HirDatabase,
- infer::unify::InferenceTable,
+ infer::InferenceContext,
next_solver::{
- Canonical, TraitRef, Ty, TyKind,
+ Canonical, DbInterner, ParamEnv, TraitRef, Ty, TyKind, TypingMode,
infer::{
- InferOk,
+ DbInternerInferExt, InferCtxt,
traits::{Obligation, ObligationCause, PredicateObligations},
},
obligation_ctxt::ObligationCtxt,
@@ -38,14 +38,15 @@
env: Arc<TraitEnvironment<'db>>,
ty: Canonical<'db, Ty<'db>>,
) -> impl Iterator<Item = Ty<'db>> + use<'db> {
- let mut table = InferenceTable::new(db, env, None);
- let ty = table.instantiate_canonical(ty);
- let mut autoderef = Autoderef::new_no_tracking(&mut table, ty);
+ let interner = DbInterner::new_with(db, Some(env.krate), env.block);
+ let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
+ let (ty, _) = infcx.instantiate_canonical(&ty);
+ let autoderef = Autoderef::new(&infcx, &env, ty);
let mut v = Vec::new();
- while let Some((ty, _steps)) = autoderef.next() {
+ for (ty, _steps) in autoderef {
// `ty` may contain unresolved inference variables. Since there's no chance they would be
// resolved, just replace with fallback type.
- let resolved = autoderef.table.resolve_completely(ty);
+ let resolved = infcx.resolve_vars_if_possible(ty).replace_infer_with_error(interner);
// If the deref chain contains a cycle (e.g. `A` derefs to `B` and `B` derefs to `A`), we
// would revisit some already visited types. Stop here to avoid duplication.
@@ -105,13 +106,48 @@
trait_target: TypeAliasId,
}
+// We use a trait here and a generic implementation unfortunately, because sometimes (specifically
+// in place_op.rs), you need to have mutable access to the `InferenceContext` while the `Autoderef`
+// borrows it.
+pub(crate) trait AutoderefCtx<'db> {
+ fn infcx(&self) -> &InferCtxt<'db>;
+ fn env(&self) -> &TraitEnvironment<'db>;
+}
+
+pub(crate) struct DefaultAutoderefCtx<'a, 'db> {
+ infcx: &'a InferCtxt<'db>,
+ env: &'a TraitEnvironment<'db>,
+}
+impl<'db> AutoderefCtx<'db> for DefaultAutoderefCtx<'_, 'db> {
+ #[inline]
+ fn infcx(&self) -> &InferCtxt<'db> {
+ self.infcx
+ }
+ #[inline]
+ fn env(&self) -> &TraitEnvironment<'db> {
+ self.env
+ }
+}
+
+pub(crate) struct InferenceContextAutoderefCtx<'a, 'b, 'db>(&'a mut InferenceContext<'b, 'db>);
+impl<'db> AutoderefCtx<'db> for InferenceContextAutoderefCtx<'_, '_, 'db> {
+ #[inline]
+ fn infcx(&self) -> &InferCtxt<'db> {
+ &self.0.table.infer_ctxt
+ }
+ #[inline]
+ fn env(&self) -> &TraitEnvironment<'db> {
+ &self.0.table.trait_env
+ }
+}
+
/// Recursively dereference a type, considering both built-in
/// dereferences (`*`) and the `Deref` trait.
/// Although called `Autoderef` it can be configured to use the
/// `Receiver` trait instead of the `Deref` trait.
-pub(crate) struct Autoderef<'a, 'db, Steps = Vec<(Ty<'db>, AutoderefKind)>> {
+pub(crate) struct GeneralAutoderef<'db, Ctx, Steps = Vec<(Ty<'db>, AutoderefKind)>> {
// Meta infos:
- pub(crate) table: &'a mut InferenceTable<'db>,
+ ctx: Ctx,
traits: Option<AutoderefTraits>,
// Current state:
@@ -122,7 +158,16 @@
use_receiver_trait: bool,
}
-impl<'a, 'db, Steps: TrackAutoderefSteps<'db>> Iterator for Autoderef<'a, 'db, Steps> {
+pub(crate) type Autoderef<'a, 'db, Steps = Vec<(Ty<'db>, AutoderefKind)>> =
+ GeneralAutoderef<'db, DefaultAutoderefCtx<'a, 'db>, Steps>;
+pub(crate) type InferenceContextAutoderef<'a, 'b, 'db, Steps = Vec<(Ty<'db>, AutoderefKind)>> =
+ GeneralAutoderef<'db, InferenceContextAutoderefCtx<'a, 'b, 'db>, Steps>;
+
+impl<'db, Ctx, Steps> Iterator for GeneralAutoderef<'db, Ctx, Steps>
+where
+ Ctx: AutoderefCtx<'db>,
+ Steps: TrackAutoderefSteps<'db>,
+{
type Item = (Ty<'db>, usize);
fn next(&mut self) -> Option<Self::Item> {
@@ -148,26 +193,26 @@
// be better to skip this clause and use the Overloaded case only, since &T
// and &mut T implement Receiver. But built-in derefs apply equally to Receiver
// and Deref, and this has benefits for const and the emitted MIR.
- let (kind, new_ty) = if let Some(ty) =
- self.state.cur_ty.builtin_deref(self.table.db, self.include_raw_pointers)
- {
- debug_assert_eq!(ty, self.table.infer_ctxt.resolve_vars_if_possible(ty));
- // NOTE: we may still need to normalize the built-in deref in case
- // we have some type like `&<Ty as Trait>::Assoc`, since users of
- // autoderef expect this type to have been structurally normalized.
- if let TyKind::Alias(..) = ty.kind() {
- let (normalized_ty, obligations) = structurally_normalize_ty(self.table, ty)?;
- self.state.obligations.extend(obligations);
- (AutoderefKind::Builtin, normalized_ty)
+ let (kind, new_ty) =
+ if let Some(ty) = self.state.cur_ty.builtin_deref(self.include_raw_pointers) {
+ debug_assert_eq!(ty, self.infcx().resolve_vars_if_possible(ty));
+ // NOTE: we may still need to normalize the built-in deref in case
+ // we have some type like `&<Ty as Trait>::Assoc`, since users of
+ // autoderef expect this type to have been structurally normalized.
+ if let TyKind::Alias(..) = ty.kind() {
+ let (normalized_ty, obligations) =
+ structurally_normalize_ty(self.infcx(), self.env().env, ty)?;
+ self.state.obligations.extend(obligations);
+ (AutoderefKind::Builtin, normalized_ty)
+ } else {
+ (AutoderefKind::Builtin, ty)
+ }
+ } else if let Some(ty) = self.overloaded_deref_ty(self.state.cur_ty) {
+ // The overloaded deref check already normalizes the pointee type.
+ (AutoderefKind::Overloaded, ty)
} else {
- (AutoderefKind::Builtin, ty)
- }
- } else if let Some(ty) = self.overloaded_deref_ty(self.state.cur_ty) {
- // The overloaded deref check already normalizes the pointee type.
- (AutoderefKind::Overloaded, ty)
- } else {
- return None;
- };
+ return None;
+ };
self.state.steps.push(self.state.cur_ty, kind);
debug!(
@@ -183,34 +228,84 @@
}
impl<'a, 'db> Autoderef<'a, 'db> {
- pub(crate) fn new(table: &'a mut InferenceTable<'db>, base_ty: Ty<'db>) -> Self {
- Self::new_impl(table, base_ty)
+ #[inline]
+ pub(crate) fn new_with_tracking(
+ infcx: &'a InferCtxt<'db>,
+ env: &'a TraitEnvironment<'db>,
+ base_ty: Ty<'db>,
+ ) -> Self {
+ Self::new_impl(DefaultAutoderefCtx { infcx, env }, base_ty)
+ }
+}
+
+impl<'a, 'b, 'db> InferenceContextAutoderef<'a, 'b, 'db> {
+ #[inline]
+ pub(crate) fn new_from_inference_context(
+ ctx: &'a mut InferenceContext<'b, 'db>,
+ base_ty: Ty<'db>,
+ ) -> Self {
+ Self::new_impl(InferenceContextAutoderefCtx(ctx), base_ty)
+ }
+
+ #[inline]
+ pub(crate) fn ctx(&mut self) -> &mut InferenceContext<'b, 'db> {
+ self.ctx.0
}
}
impl<'a, 'db> Autoderef<'a, 'db, usize> {
- pub(crate) fn new_no_tracking(table: &'a mut InferenceTable<'db>, base_ty: Ty<'db>) -> Self {
- Self::new_impl(table, base_ty)
+ #[inline]
+ pub(crate) fn new(
+ infcx: &'a InferCtxt<'db>,
+ env: &'a TraitEnvironment<'db>,
+ base_ty: Ty<'db>,
+ ) -> Self {
+ Self::new_impl(DefaultAutoderefCtx { infcx, env }, base_ty)
}
}
-impl<'a, 'db, Steps: TrackAutoderefSteps<'db>> Autoderef<'a, 'db, Steps> {
- fn new_impl(table: &'a mut InferenceTable<'db>, base_ty: Ty<'db>) -> Self {
- Autoderef {
+impl<'db, Ctx, Steps> GeneralAutoderef<'db, Ctx, Steps>
+where
+ Ctx: AutoderefCtx<'db>,
+ Steps: TrackAutoderefSteps<'db>,
+{
+ #[inline]
+ fn new_impl(ctx: Ctx, base_ty: Ty<'db>) -> Self {
+ GeneralAutoderef {
state: AutoderefSnapshot {
steps: Steps::default(),
- cur_ty: table.infer_ctxt.resolve_vars_if_possible(base_ty),
+ cur_ty: ctx.infcx().resolve_vars_if_possible(base_ty),
obligations: PredicateObligations::new(),
at_start: true,
reached_recursion_limit: false,
},
- table,
+ ctx,
traits: None,
include_raw_pointers: false,
use_receiver_trait: false,
}
}
+ #[inline]
+ fn infcx(&self) -> &InferCtxt<'db> {
+ self.ctx.infcx()
+ }
+
+ #[inline]
+ fn env(&self) -> &TraitEnvironment<'db> {
+ self.ctx.env()
+ }
+
+ #[inline]
+ fn interner(&self) -> DbInterner<'db> {
+ self.infcx().interner
+ }
+
+ #[inline]
+ fn db(&self) -> &'db dyn HirDatabase {
+ self.interner().db
+ }
+
fn autoderef_traits(&mut self) -> Option<AutoderefTraits> {
match &mut self.traits {
Some(it) => Some(*it),
@@ -219,25 +314,23 @@
(|| {
Some(AutoderefTraits {
trait_: LangItem::Receiver
- .resolve_trait(self.table.db, self.table.trait_env.krate)?,
+ .resolve_trait(self.db(), self.env().krate)?,
trait_target: LangItem::ReceiverTarget
- .resolve_type_alias(self.table.db, self.table.trait_env.krate)?,
+ .resolve_type_alias(self.db(), self.env().krate)?,
})
})()
.or_else(|| {
Some(AutoderefTraits {
- trait_: LangItem::Deref
- .resolve_trait(self.table.db, self.table.trait_env.krate)?,
+ trait_: LangItem::Deref.resolve_trait(self.db(), self.env().krate)?,
trait_target: LangItem::DerefTarget
- .resolve_type_alias(self.table.db, self.table.trait_env.krate)?,
+ .resolve_type_alias(self.db(), self.env().krate)?,
})
})?
} else {
AutoderefTraits {
- trait_: LangItem::Deref
- .resolve_trait(self.table.db, self.table.trait_env.krate)?,
+ trait_: LangItem::Deref.resolve_trait(self.db(), self.env().krate)?,
trait_target: LangItem::DerefTarget
- .resolve_type_alias(self.table.db, self.table.trait_env.krate)?,
+ .resolve_type_alias(self.db(), self.env().krate)?,
}
};
Some(*self.traits.insert(traits))
@@ -247,31 +340,32 @@
fn overloaded_deref_ty(&mut self, ty: Ty<'db>) -> Option<Ty<'db>> {
debug!("overloaded_deref_ty({:?})", ty);
- let interner = self.table.interner();
+ let interner = self.interner();
// <ty as Deref>, or whatever the equivalent trait is that we've been asked to walk.
let AutoderefTraits { trait_, trait_target } = self.autoderef_traits()?;
let trait_ref = TraitRef::new(interner, trait_.into(), [ty]);
let obligation =
- Obligation::new(interner, ObligationCause::new(), self.table.trait_env.env, trait_ref);
+ Obligation::new(interner, ObligationCause::new(), self.env().env, trait_ref);
// We detect whether the self type implements `Deref` before trying to
// structurally normalize. We use `predicate_may_hold_opaque_types_jank`
// to support not-yet-defined opaque types. It will succeed for `impl Deref`
// but fail for `impl OtherTrait`.
- if !self.table.infer_ctxt.predicate_may_hold_opaque_types_jank(&obligation) {
+ if !self.infcx().predicate_may_hold_opaque_types_jank(&obligation) {
debug!("overloaded_deref_ty: cannot match obligation");
return None;
}
let (normalized_ty, obligations) = structurally_normalize_ty(
- self.table,
+ self.infcx(),
+ self.env().env,
Ty::new_projection(interner, trait_target.into(), [ty]),
)?;
debug!("overloaded_deref_ty({:?}) = ({:?}, {:?})", ty, normalized_ty, obligations);
self.state.obligations.extend(obligations);
- Some(self.table.infer_ctxt.resolve_vars_if_possible(normalized_ty))
+ Some(self.infcx().resolve_vars_if_possible(normalized_ty))
}
/// Returns the final type we ended up with, which may be an unresolved
@@ -292,7 +386,6 @@
&self.state.steps
}
- #[expect(dead_code)]
pub(crate) fn reached_recursion_limit(&self) -> bool {
self.state.reached_recursion_limit
}
@@ -316,12 +409,12 @@
}
fn structurally_normalize_ty<'db>(
- table: &InferenceTable<'db>,
+ infcx: &InferCtxt<'db>,
+ param_env: ParamEnv<'db>,
ty: Ty<'db>,
) -> Option<(Ty<'db>, PredicateObligations<'db>)> {
- let mut ocx = ObligationCtxt::new(&table.infer_ctxt);
- let Ok(normalized_ty) =
- ocx.structurally_normalize_ty(&ObligationCause::misc(), table.trait_env.env, ty)
+ let mut ocx = ObligationCtxt::new(infcx);
+ let Ok(normalized_ty) = ocx.structurally_normalize_ty(&ObligationCause::misc(), param_env, ty)
else {
// We shouldn't have errors here in the old solver, except for
// evaluate/fulfill mismatches, but that's not a reason for an ICE.
@@ -334,17 +427,3 @@
Some((normalized_ty, ocx.into_pending_obligations()))
}
-
-pub(crate) fn overloaded_deref_ty<'db>(
- table: &InferenceTable<'db>,
- ty: Ty<'db>,
-) -> Option<InferOk<'db, Ty<'db>>> {
- let interner = table.interner();
-
- let trait_target = LangItem::DerefTarget.resolve_type_alias(table.db, table.trait_env.krate)?;
-
- let (normalized_ty, obligations) =
- structurally_normalize_ty(table, Ty::new_projection(interner, trait_target.into(), [ty]))?;
-
- Some(InferOk { value: normalized_ty, obligations })
-}
diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs
index 18ebe7d..61f29b4 100644
--- a/crates/hir-ty/src/consteval.rs
+++ b/crates/hir-ty/src/consteval.rs
@@ -5,7 +5,7 @@
use base_db::Crate;
use hir_def::{
- EnumVariantId, GeneralConstId, HasModule, StaticId,
+ ConstId, EnumVariantId, GeneralConstId, StaticId,
expr_store::Body,
hir::{Expr, ExprId},
type_ref::LiteralConstRef,
@@ -21,8 +21,8 @@
infer::InferenceContext,
mir::{MirEvalError, MirLowerError},
next_solver::{
- Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs,
- SolverDefId, Ty, ValueConst,
+ Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Ty,
+ ValueConst,
},
};
@@ -139,16 +139,17 @@
ConstKind::Infer(_) => None,
ConstKind::Bound(_, _) => None,
ConstKind::Placeholder(_) => None,
- ConstKind::Unevaluated(unevaluated_const) => {
- let c = match unevaluated_const.def {
- SolverDefId::ConstId(id) => GeneralConstId::ConstId(id),
- SolverDefId::StaticId(id) => GeneralConstId::StaticId(id),
- _ => unreachable!(),
- };
- let subst = unevaluated_const.args;
- let ec = db.const_eval(c, subst, None).ok()?;
- try_const_usize(db, ec)
- }
+ ConstKind::Unevaluated(unevaluated_const) => match unevaluated_const.def.0 {
+ GeneralConstId::ConstId(id) => {
+ let subst = unevaluated_const.args;
+ let ec = db.const_eval(id, subst, None).ok()?;
+ try_const_usize(db, ec)
+ }
+ GeneralConstId::StaticId(id) => {
+ let ec = db.const_eval_static(id).ok()?;
+ try_const_usize(db, ec)
+ }
+ },
ConstKind::Value(val) => Some(u128::from_le_bytes(pad16(&val.value.inner().memory, false))),
ConstKind::Error(_) => None,
ConstKind::Expr(_) => None,
@@ -161,16 +162,17 @@
ConstKind::Infer(_) => None,
ConstKind::Bound(_, _) => None,
ConstKind::Placeholder(_) => None,
- ConstKind::Unevaluated(unevaluated_const) => {
- let c = match unevaluated_const.def {
- SolverDefId::ConstId(id) => GeneralConstId::ConstId(id),
- SolverDefId::StaticId(id) => GeneralConstId::StaticId(id),
- _ => unreachable!(),
- };
- let subst = unevaluated_const.args;
- let ec = db.const_eval(c, subst, None).ok()?;
- try_const_isize(db, &ec)
- }
+ ConstKind::Unevaluated(unevaluated_const) => match unevaluated_const.def.0 {
+ GeneralConstId::ConstId(id) => {
+ let subst = unevaluated_const.args;
+ let ec = db.const_eval(id, subst, None).ok()?;
+ try_const_isize(db, &ec)
+ }
+ GeneralConstId::StaticId(id) => {
+ let ec = db.const_eval_static(id).ok()?;
+ try_const_isize(db, &ec)
+ }
+ },
ConstKind::Value(val) => Some(i128::from_le_bytes(pad16(&val.value.inner().memory, true))),
ConstKind::Error(_) => None,
ConstKind::Expr(_) => None,
@@ -254,7 +256,7 @@
pub(crate) fn const_eval_cycle_result<'db>(
_: &'db dyn HirDatabase,
- _: GeneralConstId,
+ _: ConstId,
_: GenericArgs<'db>,
_: Option<Arc<TraitEnvironment<'db>>>,
) -> Result<Const<'db>, ConstEvalError<'db>> {
@@ -277,19 +279,11 @@
pub(crate) fn const_eval_query<'db>(
db: &'db dyn HirDatabase,
- def: GeneralConstId,
+ def: ConstId,
subst: GenericArgs<'db>,
trait_env: Option<Arc<TraitEnvironment<'db>>>,
) -> Result<Const<'db>, ConstEvalError<'db>> {
- let body = match def {
- GeneralConstId::ConstId(c) => {
- db.monomorphized_mir_body(c.into(), subst, db.trait_environment(c.into()))?
- }
- GeneralConstId::StaticId(s) => {
- let krate = s.module(db).krate();
- db.monomorphized_mir_body(s.into(), subst, TraitEnvironment::empty(krate))?
- }
- };
+ let body = db.monomorphized_mir_body(def.into(), subst, db.trait_environment(def.into()))?;
let c = interpret_mir(db, body, false, trait_env)?.0?;
Ok(c)
}
diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs
index f1aa06d..6095250 100644
--- a/crates/hir-ty/src/consteval/tests.rs
+++ b/crates/hir-ty/src/consteval/tests.rs
@@ -142,7 +142,7 @@
_ => None,
})
.expect("No const named GOAL found in the test");
- db.const_eval(const_id.into(), GenericArgs::new_from_iter(interner, []), None)
+ db.const_eval(const_id, GenericArgs::new_from_iter(interner, []), None)
}
#[test]
@@ -851,6 +851,7 @@
fn loops() {
check_number(
r#"
+ //- minicore: add, builtin_impls
const GOAL: u8 = {
let mut x = 0;
loop {
@@ -871,6 +872,7 @@
);
check_number(
r#"
+ //- minicore: add, builtin_impls
const GOAL: u8 = {
let mut x = 0;
loop {
@@ -885,6 +887,7 @@
);
check_number(
r#"
+ //- minicore: add, builtin_impls
const GOAL: u8 = {
'a: loop {
let x = 'b: loop {
@@ -907,7 +910,7 @@
);
check_number(
r#"
- //- minicore: add
+ //- minicore: add, builtin_impls
const GOAL: u8 = {
let mut x = 0;
'a: loop {
@@ -1277,7 +1280,7 @@
fn destructing_assignment() {
check_number(
r#"
- //- minicore: add
+ //- minicore: add, builtin_impls
const fn f(i: &mut u8) -> &mut u8 {
*i += 1;
i
@@ -1469,11 +1472,11 @@
fn options() {
check_number(
r#"
- //- minicore: option
+ //- minicore: option, add, builtin_impls
const GOAL: u8 = {
let x = Some(2);
match x {
- Some(y) => 2 * y,
+ Some(y) => 2 + y,
_ => 10,
}
};
@@ -1482,7 +1485,7 @@
);
check_number(
r#"
- //- minicore: option
+ //- minicore: option, add, builtin_impls
fn f(x: Option<Option<i32>>) -> i32 {
if let Some(y) = x && let Some(z) = y {
z
@@ -1498,11 +1501,11 @@
);
check_number(
r#"
- //- minicore: option
+ //- minicore: option, add, builtin_impls
const GOAL: u8 = {
let x = None;
match x {
- Some(y) => 2 * y,
+ Some(y) => 2 + y,
_ => 10,
}
};
diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs
index 9b58abb..40e58aa 100644
--- a/crates/hir-ty/src/db.rs
+++ b/crates/hir-ty/src/db.rs
@@ -3,15 +3,12 @@
use base_db::{Crate, target::TargetLoadError};
use hir_def::{
- AdtId, BlockId, CallableDefId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId,
- GeneralConstId, GenericDefId, ImplId, LifetimeParamId, LocalFieldId, StaticId, TraitId,
- TypeAliasId, TypeOrConstParamId, VariantId, db::DefDatabase, hir::ExprId,
- layout::TargetDataLayout,
+ AdtId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId,
+ GenericDefId, ImplId, LifetimeParamId, LocalFieldId, StaticId, TraitId, TypeAliasId, VariantId,
+ db::DefDatabase, hir::ExprId, layout::TargetDataLayout,
};
-use hir_expand::name::Name;
use la_arena::ArenaMap;
use salsa::plumbing::AsId;
-use smallvec::SmallVec;
use triomphe::Arc;
use crate::{
@@ -19,8 +16,7 @@
consteval::ConstEvalError,
dyn_compatibility::DynCompatibilityViolation,
layout::{Layout, LayoutError},
- lower::{Diagnostics, GenericDefaults, GenericPredicates, ImplTraits},
- method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
+ lower::{Diagnostics, GenericDefaults},
mir::{BorrowckResult, MirBody, MirLowerError},
next_solver::{Const, EarlyBinder, GenericArgs, PolyFnSig, TraitRef, Ty, VariancesOf},
};
@@ -33,6 +29,8 @@
// region:mir
+ // FXME: Collapse `mir_body_for_closure` into `mir_body`
+ // and `monomorphized_mir_body_for_closure` into `monomorphized_mir_body`
#[salsa::invoke(crate::mir::mir_body_query)]
#[salsa::cycle(cycle_result = crate::mir::mir_body_cycle_result)]
fn mir_body<'db>(
@@ -74,7 +72,7 @@
#[salsa::cycle(cycle_result = crate::consteval::const_eval_cycle_result)]
fn const_eval<'db>(
&'db self,
- def: GeneralConstId,
+ def: ConstId,
subst: GenericArgs<'db>,
trait_env: Option<Arc<TraitEnvironment<'db>>>,
) -> Result<Const<'db>, ConstEvalError<'db>>;
@@ -190,43 +188,6 @@
def: CallableDefId,
) -> EarlyBinder<'db, PolyFnSig<'db>>;
- #[salsa::invoke(crate::lower::return_type_impl_traits)]
- fn return_type_impl_traits<'db>(
- &'db self,
- def: FunctionId,
- ) -> Option<Arc<EarlyBinder<'db, ImplTraits<'db>>>>;
-
- #[salsa::invoke(crate::lower::type_alias_impl_traits)]
- fn type_alias_impl_traits<'db>(
- &'db self,
- def: TypeAliasId,
- ) -> Option<Arc<EarlyBinder<'db, ImplTraits<'db>>>>;
-
- #[salsa::invoke(crate::lower::generic_predicates_without_parent_with_diagnostics_query)]
- fn generic_predicates_without_parent_with_diagnostics<'db>(
- &'db self,
- def: GenericDefId,
- ) -> (GenericPredicates<'db>, Diagnostics);
-
- #[salsa::invoke(crate::lower::generic_predicates_without_parent_query)]
- #[salsa::transparent]
- fn generic_predicates_without_parent<'db>(
- &'db self,
- def: GenericDefId,
- ) -> GenericPredicates<'db>;
-
- #[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
- #[salsa::cycle(cycle_result = crate::lower::generic_predicates_for_param_cycle_result)]
- fn generic_predicates_for_param<'db>(
- &'db self,
- def: GenericDefId,
- param_id: TypeOrConstParamId,
- assoc_name: Option<Name>,
- ) -> GenericPredicates<'db>;
-
- #[salsa::invoke(crate::lower::generic_predicates_query)]
- fn generic_predicates<'db>(&'db self, def: GenericDefId) -> GenericPredicates<'db>;
-
#[salsa::invoke(crate::lower::trait_environment_for_body_query)]
#[salsa::transparent]
fn trait_environment_for_body<'db>(&'db self, def: DefWithBodyId)
@@ -249,32 +210,6 @@
#[salsa::transparent]
fn generic_defaults<'db>(&'db self, def: GenericDefId) -> GenericDefaults<'db>;
- #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
- fn inherent_impls_in_crate(&self, krate: Crate) -> Arc<InherentImpls>;
-
- #[salsa::invoke(InherentImpls::inherent_impls_in_block_query)]
- fn inherent_impls_in_block(&self, block: BlockId) -> Option<Arc<InherentImpls>>;
-
- /// Collects all crates in the dependency graph that have impls for the
- /// given fingerprint. This is only used for primitive types and types
- /// annotated with `rustc_has_incoherent_inherent_impls`; for other types
- /// we just look at the crate where the type is defined.
- #[salsa::invoke(crate::method_resolution::incoherent_inherent_impl_crates)]
- fn incoherent_inherent_impl_crates(
- &self,
- krate: Crate,
- fp: TyFingerprint,
- ) -> SmallVec<[Crate; 2]>;
-
- #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
- fn trait_impls_in_crate(&self, krate: Crate) -> Arc<TraitImpls>;
-
- #[salsa::invoke(TraitImpls::trait_impls_in_block_query)]
- fn trait_impls_in_block(&self, block: BlockId) -> Option<Arc<TraitImpls>>;
-
- #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
- fn trait_impls_in_deps(&self, krate: Crate) -> Arc<[Arc<TraitImpls>]>;
-
// Interned IDs for solver integration
#[salsa::interned]
fn intern_impl_trait_id(&self, id: ImplTraitId<'_>) -> InternedOpaqueTyId;
@@ -301,13 +236,6 @@
#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)]
#[derive(PartialOrd, Ord)]
-pub struct InternedTypeOrConstParamId {
- /// This stores the param and its index.
- pub loc: (TypeOrConstParamId, u32),
-}
-
-#[salsa_macros::interned(no_lifetime, debug, revisions = usize::MAX)]
-#[derive(PartialOrd, Ord)]
pub struct InternedLifetimeParamId {
/// This stores the param and its index.
pub loc: (LifetimeParamId, u32),
diff --git a/crates/hir-ty/src/diagnostics/decl_check.rs b/crates/hir-ty/src/diagnostics/decl_check.rs
index 0815e62..f6992df 100644
--- a/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -563,6 +563,10 @@
cov_mark::hit!(extern_static_incorrect_case_ignored);
return;
}
+ if self.db.attrs(static_id.into()).by_key(sym::no_mangle).exists() {
+ cov_mark::hit!(no_mangle_static_incorrect_case_ignored);
+ return;
+ }
self.create_incorrect_case_diagnostic_for_item_name(
static_id,
diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs
index 0a37966..6767bd0 100644
--- a/crates/hir-ty/src/display.rs
+++ b/crates/hir-ty/src/display.rs
@@ -10,8 +10,7 @@
use base_db::Crate;
use either::Either;
use hir_def::{
- FindPathConfig, GeneralConstId, GenericDefId, HasModule, LocalFieldId, Lookup, ModuleDefId,
- ModuleId, TraitId,
+ FindPathConfig, GenericDefId, HasModule, LocalFieldId, Lookup, ModuleDefId, ModuleId, TraitId,
db::DefDatabase,
expr_store::{ExpressionStore, path::Path},
find_path::{self, PrefixKind},
@@ -52,6 +51,7 @@
db::{HirDatabase, InternedClosure, InternedCoroutine},
generics::generics,
layout::Layout,
+ lower::GenericPredicates,
mir::pad16,
next_solver::{
AliasTy, Clause, ClauseKind, Const, ConstKind, DbInterner, EarlyBinder,
@@ -625,23 +625,20 @@
{
// FIXME: We shouldn't use `param.id`, it should be removed. We should know the
// `GenericDefId` from the formatted type (store it inside the `HirFormatter`).
- let bounds =
- f.db.generic_predicates(param.id.parent())
- .instantiate_identity()
- .into_iter()
- .flatten()
- .filter(|wc| {
- let ty = match wc.kind().skip_binder() {
- ClauseKind::Trait(tr) => tr.self_ty(),
- ClauseKind::TypeOutlives(t) => t.0,
- _ => return false,
- };
- let TyKind::Alias(AliasTyKind::Projection, a) = ty.kind() else {
- return false;
- };
- a == *alias
- })
- .collect::<Vec<_>>();
+ let bounds = GenericPredicates::query_all(f.db, param.id.parent())
+ .iter_identity_copied()
+ .filter(|wc| {
+ let ty = match wc.kind().skip_binder() {
+ ClauseKind::Trait(tr) => tr.self_ty(),
+ ClauseKind::TypeOutlives(t) => t.0,
+ _ => return false,
+ };
+ let TyKind::Alias(AliasTyKind::Projection, a) = ty.kind() else {
+ return false;
+ };
+ a == *alias
+ })
+ .collect::<Vec<_>>();
if !bounds.is_empty() {
return f.format_bounds_with(*alias, |f| {
write_bounds_like_dyn_trait_with_prefix(
@@ -702,11 +699,7 @@
const_bytes.ty,
),
ConstKind::Unevaluated(unev) => {
- let c = match unev.def {
- SolverDefId::ConstId(id) => GeneralConstId::ConstId(id),
- SolverDefId::StaticId(id) => GeneralConstId::StaticId(id),
- _ => unreachable!(),
- };
+ let c = unev.def.0;
write!(f, "{}", c.name(f.db))?;
hir_fmt_generics(f, unev.args.as_slice(), c.generic_def(f.db), None)?;
Ok(())
@@ -1122,13 +1115,8 @@
_ => unreachable!(),
};
let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty_id);
- if let ImplTraitId::ReturnTypeImplTrait(func, idx) = impl_trait_id {
- let datas = db
- .return_type_impl_traits(func)
- .expect("impl trait id without data");
- let data = (*datas)
- .as_ref()
- .map_bound(|rpit| &rpit.impl_traits[idx].predicates);
+ if let ImplTraitId::ReturnTypeImplTrait(func, _) = impl_trait_id {
+ let data = impl_trait_id.predicates(db);
let bounds =
|| data.iter_instantiated_copied(f.interner, ty.args.as_slice());
let mut len = bounds().count();
@@ -1354,43 +1342,24 @@
));
}
let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty_id);
- match impl_trait_id {
- ImplTraitId::ReturnTypeImplTrait(func, idx) => {
- let datas =
- db.return_type_impl_traits(func).expect("impl trait id without data");
- let data =
- (*datas).as_ref().map_bound(|rpit| &rpit.impl_traits[idx].predicates);
- let bounds = data
- .iter_instantiated_copied(interner, alias_ty.args.as_slice())
- .collect::<Vec<_>>();
- let krate = func.krate(db);
- write_bounds_like_dyn_trait_with_prefix(
- f,
- "impl",
- Either::Left(*self),
- &bounds,
- SizedByDefault::Sized { anchor: krate },
- )?;
+ let data = impl_trait_id.predicates(db);
+ let bounds = data
+ .iter_instantiated_copied(interner, alias_ty.args.as_slice())
+ .collect::<Vec<_>>();
+ let krate = match impl_trait_id {
+ ImplTraitId::ReturnTypeImplTrait(func, _) => {
+ func.krate(db)
// FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution
}
- ImplTraitId::TypeAliasImplTrait(alias, idx) => {
- let datas =
- db.type_alias_impl_traits(alias).expect("impl trait id without data");
- let data =
- (*datas).as_ref().map_bound(|rpit| &rpit.impl_traits[idx].predicates);
- let bounds = data
- .iter_instantiated_copied(interner, alias_ty.args.as_slice())
- .collect::<Vec<_>>();
- let krate = alias.krate(db);
- write_bounds_like_dyn_trait_with_prefix(
- f,
- "impl",
- Either::Left(*self),
- &bounds,
- SizedByDefault::Sized { anchor: krate },
- )?;
- }
- }
+ ImplTraitId::TypeAliasImplTrait(alias, _) => alias.krate(db),
+ };
+ write_bounds_like_dyn_trait_with_prefix(
+ f,
+ "impl",
+ Either::Left(*self),
+ &bounds,
+ SizedByDefault::Sized { anchor: krate },
+ )?;
}
TyKind::Closure(id, substs) => {
let id = id.0;
@@ -1541,11 +1510,8 @@
)?
}
TypeParamProvenance::ArgumentImplTrait => {
- let bounds = db
- .generic_predicates(param.id.parent())
- .instantiate_identity()
- .into_iter()
- .flatten()
+ let bounds = GenericPredicates::query_all(f.db, param.id.parent())
+ .iter_identity_copied()
.filter(|wc| match wc.kind().skip_binder() {
ClauseKind::Trait(tr) => tr.self_ty() == *self,
ClauseKind::Projection(proj) => proj.self_ty() == *self,
diff --git a/crates/hir-ty/src/drop.rs b/crates/hir-ty/src/drop.rs
index b09d1fb..aebb6de 100644
--- a/crates/hir-ty/src/drop.rs
+++ b/crates/hir-ty/src/drop.rs
@@ -9,9 +9,9 @@
use crate::{
TraitEnvironment, consteval,
db::HirDatabase,
- method_resolution::TyFingerprint,
+ method_resolution::TraitImpls,
next_solver::{
- Ty, TyKind,
+ SimplifiedType, Ty, TyKind,
infer::{InferCtxt, traits::ObligationCause},
obligation_ctxt::ObligationCtxt,
},
@@ -27,13 +27,13 @@
return false;
};
let impls = match module.containing_block() {
- Some(block) => match db.trait_impls_in_block(block) {
- Some(it) => it,
+ Some(block) => match TraitImpls::for_block(db, block) {
+ Some(it) => &**it,
None => return false,
},
- None => db.trait_impls_in_crate(module.krate()),
+ None => TraitImpls::for_crate(db, module.krate()),
};
- impls.for_trait_and_self_ty(drop_trait, TyFingerprint::Adt(adt)).next().is_some()
+ !impls.for_trait_and_self_ty(drop_trait, &SimplifiedType::Adt(adt.into())).is_empty()
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
diff --git a/crates/hir-ty/src/dyn_compatibility.rs b/crates/hir-ty/src/dyn_compatibility.rs
index 437141e..1bd5981 100644
--- a/crates/hir-ty/src/dyn_compatibility.rs
+++ b/crates/hir-ty/src/dyn_compatibility.rs
@@ -18,7 +18,7 @@
use crate::{
ImplTraitId,
db::{HirDatabase, InternedOpaqueTyId},
- lower::associated_ty_item_bounds,
+ lower::{GenericPredicates, associated_ty_item_bounds},
next_solver::{
Binder, Clause, Clauses, DbInterner, EarlyBinder, GenericArgs, Goal, ParamEnv, ParamTy,
SolverDefId, TraitPredicate, TraitRef, Ty, TypingMode, infer::DbInternerInferExt, mk_param,
@@ -136,11 +136,11 @@
};
let interner = DbInterner::new_with(db, Some(krate), None);
- let predicates = db.generic_predicates(def);
+ let predicates = GenericPredicates::query_explicit(db, def);
// FIXME: We should use `explicit_predicates_of` here, which hasn't been implemented to
// rust-analyzer yet
// https://github.com/rust-lang/rust/blob/ddaf12390d3ffb7d5ba74491a48f3cd528e5d777/compiler/rustc_hir_analysis/src/collect/predicates_of.rs#L490
- elaborate::elaborate(interner, predicates.iter().copied()).any(|pred| {
+ elaborate::elaborate(interner, predicates.iter_identity_copied()).any(|pred| {
match pred.kind().skip_binder() {
ClauseKind::Trait(trait_pred) => {
if sized == trait_pred.def_id().0
@@ -162,8 +162,8 @@
// but we don't have good way to render such locations.
// So, just return single boolean value for existence of such `Self` reference
fn predicates_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool {
- db.generic_predicates(trait_.into())
- .iter()
+ GenericPredicates::query_explicit(db, trait_.into())
+ .iter_identity_copied()
.any(|pred| predicate_references_self(db, trait_, pred, AllowSelfProjection::No))
}
@@ -199,7 +199,7 @@
fn predicate_references_self<'db>(
db: &'db dyn HirDatabase,
trait_: TraitId,
- predicate: &Clause<'db>,
+ predicate: Clause<'db>,
allow_self_projection: AllowSelfProjection,
) -> bool {
match predicate.kind().skip_binder() {
@@ -363,8 +363,8 @@
cb(MethodViolationCode::UndispatchableReceiver)?;
}
- let predicates = &*db.generic_predicates_without_parent(func.into());
- for pred in predicates {
+ let predicates = GenericPredicates::query_own(db, func.into());
+ for pred in predicates.iter_identity_copied() {
let pred = pred.kind().skip_binder();
if matches!(pred, ClauseKind::TypeOutlives(_)) {
@@ -440,7 +440,7 @@
let unsized_receiver_ty = receiver_for_self_ty(interner, func, receiver_ty, unsized_self_ty);
let param_env = {
- let generic_predicates = &*db.generic_predicates(func.into());
+ let generic_predicates = GenericPredicates::query_all(db, func.into());
// Self: Unsize<U>
let unsize_predicate =
@@ -458,7 +458,7 @@
ParamEnv {
clauses: Clauses::new_from_iter(
interner,
- generic_predicates.iter().copied().chain([
+ generic_predicates.iter_identity_copied().chain([
unsize_predicate.upcast(interner),
trait_predicate.upcast(interner),
meta_sized_predicate.upcast(interner),
diff --git a/crates/hir-ty/src/generics.rs b/crates/hir-ty/src/generics.rs
index 26e03aa..5f02614 100644
--- a/crates/hir-ty/src/generics.rs
+++ b/crates/hir-ty/src/generics.rs
@@ -60,11 +60,6 @@
self.params.where_predicates().iter()
}
- pub(crate) fn has_no_predicates(&self) -> bool {
- self.params.has_no_predicates()
- && self.parent_generics.as_ref().is_none_or(|g| g.params.has_no_predicates())
- }
-
pub(crate) fn is_empty(&self) -> bool {
self.params.is_empty() && self.parent_generics.as_ref().is_none_or(|g| g.params.is_empty())
}
diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs
index 016edb2..02b8ab8 100644
--- a/crates/hir-ty/src/infer.rs
+++ b/crates/hir-ty/src/infer.rs
@@ -21,9 +21,11 @@
mod expr;
mod fallback;
mod mutability;
+mod op;
mod opaques;
mod pat;
mod path;
+mod place_op;
pub(crate) mod unify;
use std::{cell::OnceCell, convert::identity, iter, ops::Index};
@@ -45,12 +47,14 @@
use indexmap::IndexSet;
use intern::sym;
use la_arena::ArenaMap;
+use macros::{TypeFoldable, TypeVisitable};
use rustc_ast_ir::Mutability;
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_type_ir::{
AliasTyKind, TypeFoldable,
inherent::{AdtDef, IntoKind, Region as _, SliceLike, Ty as _},
};
+use span::Edition;
use stdx::never;
use triomphe::Arc;
@@ -65,10 +69,13 @@
lower::{
ImplTraitIdx, ImplTraitLoweringMode, LifetimeElisionKind, diagnostics::TyLoweringDiagnostic,
},
+ method_resolution::{CandidateId, MethodResolutionUnstableFeatures},
mir::MirSpan,
next_solver::{
AliasTy, Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, Region, Ty, TyKind,
- Tys, abi::Safety, infer::traits::ObligationCause,
+ Tys,
+ abi::Safety,
+ infer::{InferCtxt, traits::ObligationCause},
},
traits::FnTrait,
utils::TargetFeatureIsSafeInTarget,
@@ -330,16 +337,21 @@
/// At some point, of course, `Box` should move out of the compiler, in which
/// case this is analogous to transforming a struct. E.g., Box<[i32; 4]> ->
/// Box<[i32]> is an `Adjust::Unsize` with the target `Box<[i32]>`.
-#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+#[derive(Clone, Debug, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)]
pub struct Adjustment<'db> {
- pub kind: Adjust<'db>,
+ #[type_visitable(ignore)]
+ #[type_foldable(identity)]
+ pub kind: Adjust,
pub target: Ty<'db>,
}
impl<'db> Adjustment<'db> {
pub fn borrow(interner: DbInterner<'db>, m: Mutability, ty: Ty<'db>, lt: Region<'db>) -> Self {
let ty = Ty::new_ref(interner, lt, ty, m);
- Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(lt, m)), target: ty }
+ Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::new(m, AllowTwoPhase::No))),
+ target: ty,
+ }
}
}
@@ -357,20 +369,20 @@
/// capable mutable borrows.
/// See #49434 for tracking.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
-pub(crate) enum AllowTwoPhase {
+pub enum AllowTwoPhase {
// FIXME: We should use this when appropriate.
Yes,
No,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
-pub enum Adjust<'db> {
+pub enum Adjust {
/// Go from ! to any type.
NeverToAny,
/// Dereference once, producing a place.
Deref(Option<OverloadedDeref>),
/// Take the address and produce either a `&` or `*` pointer.
- Borrow(AutoBorrow<'db>),
+ Borrow(AutoBorrow),
Pointer(PointerCast),
}
@@ -381,18 +393,47 @@
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct OverloadedDeref(pub Option<Mutability>);
-#[derive(Clone, Debug, PartialEq, Eq, Hash)]
-pub enum AutoBorrow<'db> {
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
+pub enum AutoBorrowMutability {
+ Mut { allow_two_phase_borrow: AllowTwoPhase },
+ Not,
+}
+
+impl AutoBorrowMutability {
+ /// Creates an `AutoBorrowMutability` from a mutability and allowance of two phase borrows.
+ ///
+ /// Note that when `mutbl.is_not()`, `allow_two_phase_borrow` is ignored
+ pub fn new(mutbl: Mutability, allow_two_phase_borrow: AllowTwoPhase) -> Self {
+ match mutbl {
+ Mutability::Not => Self::Not,
+ Mutability::Mut => Self::Mut { allow_two_phase_borrow },
+ }
+ }
+}
+
+impl From<AutoBorrowMutability> for Mutability {
+ fn from(m: AutoBorrowMutability) -> Self {
+ match m {
+ AutoBorrowMutability::Mut { .. } => Mutability::Mut,
+ AutoBorrowMutability::Not => Mutability::Not,
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum AutoBorrow {
/// Converts from T to &T.
- Ref(Region<'db>, Mutability),
+ Ref(AutoBorrowMutability),
/// Converts from T to *T.
RawPtr(Mutability),
}
-impl<'db> AutoBorrow<'db> {
- fn mutability(&self) -> Mutability {
- let (AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m)) = self;
- *m
+impl AutoBorrow {
+ fn mutability(self) -> Mutability {
+ match self {
+ AutoBorrow::Ref(mutbl) => mutbl.into(),
+ AutoBorrow::RawPtr(mutbl) => mutbl,
+ }
}
}
@@ -442,7 +483,7 @@
/// For each struct literal or pattern, records the variant it resolves to.
variant_resolutions: FxHashMap<ExprOrPatId, VariantId>,
/// For each associated item record what it resolves to
- assoc_resolutions: FxHashMap<ExprOrPatId, (AssocItemId, GenericArgs<'db>)>,
+ assoc_resolutions: FxHashMap<ExprOrPatId, (CandidateId, GenericArgs<'db>)>,
/// Whenever a tuple field expression access a tuple field, we allocate a tuple id in
/// [`InferenceContext`] and store the tuples substitution there. This map is the reverse of
/// that which allows us to resolve a [`TupleFieldId`]s type.
@@ -457,7 +498,7 @@
pub(crate) type_of_pat: ArenaMap<PatId, Ty<'db>>,
pub(crate) type_of_binding: ArenaMap<BindingId, Ty<'db>>,
pub(crate) type_of_opaque: FxHashMap<InternedOpaqueTyId, Ty<'db>>,
- type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch<'db>>,
+ pub(crate) type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch<'db>>,
/// Whether there are any type-mismatching errors in the result.
// FIXME: This isn't as useful as initially thought due to us falling back placeholders to
// `TyKind::Error`.
@@ -535,16 +576,16 @@
pub fn assoc_resolutions_for_expr(
&self,
id: ExprId,
- ) -> Option<(AssocItemId, GenericArgs<'db>)> {
+ ) -> Option<(CandidateId, GenericArgs<'db>)> {
self.assoc_resolutions.get(&id.into()).copied()
}
- pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<(AssocItemId, GenericArgs<'db>)> {
+ pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<(CandidateId, GenericArgs<'db>)> {
self.assoc_resolutions.get(&id.into()).copied()
}
pub fn assoc_resolutions_for_expr_or_pat(
&self,
id: ExprOrPatId,
- ) -> Option<(AssocItemId, GenericArgs<'db>)> {
+ ) -> Option<(CandidateId, GenericArgs<'db>)> {
match id {
ExprOrPatId::ExprId(id) => self.assoc_resolutions_for_expr(id),
ExprOrPatId::PatId(id) => self.assoc_resolutions_for_pat(id),
@@ -718,7 +759,6 @@
re_erased: Region<'db>,
empty_args: GenericArgs<'db>,
- empty_tys: Tys<'db>,
}
impl<'db> InternedStandardTypes<'db> {
@@ -754,7 +794,6 @@
re_erased: Region::new_erased(interner),
empty_args: GenericArgs::new_from_iter(interner, []),
- empty_tys: Tys::new_from_iter(interner, []),
}
}
}
@@ -769,8 +808,10 @@
/// and resolve the path via its methods. This will ensure proper error reporting.
pub(crate) resolver: Resolver<'db>,
target_features: OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>,
+ pub(crate) unstable_features: MethodResolutionUnstableFeatures,
+ pub(crate) edition: Edition,
pub(crate) generic_def: GenericDefId,
- table: unify::InferenceTable<'db>,
+ pub(crate) table: unify::InferenceTable<'db>,
/// The traits in scope, disregarding block modules. This is used for caching purposes.
traits_in_scope: FxHashSet<TraitId>,
pub(crate) result: InferenceResult<'db>,
@@ -873,6 +914,10 @@
return_ty: types.error, // set in collect_* calls
types,
target_features: OnceCell::new(),
+ unstable_features: MethodResolutionUnstableFeatures::from_def_map(
+ resolver.top_level_def_map(),
+ ),
+ edition: resolver.krate().data(db).edition,
table,
tuple_field_accesses_rev: Default::default(),
resume_yield_tys: None,
@@ -906,18 +951,15 @@
self.resolver.krate()
}
- fn target_features<'a>(
- db: &dyn HirDatabase,
- target_features: &'a OnceCell<(TargetFeatures, TargetFeatureIsSafeInTarget)>,
- owner: DefWithBodyId,
- krate: Crate,
- ) -> (&'a TargetFeatures, TargetFeatureIsSafeInTarget) {
- let (target_features, target_feature_is_safe) = target_features.get_or_init(|| {
- let target_features = match owner {
- DefWithBodyId::FunctionId(id) => TargetFeatures::from_attrs(&db.attrs(id.into())),
+ fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) {
+ let (target_features, target_feature_is_safe) = self.target_features.get_or_init(|| {
+ let target_features = match self.owner {
+ DefWithBodyId::FunctionId(id) => {
+ TargetFeatures::from_attrs(&self.db.attrs(id.into()))
+ }
_ => TargetFeatures::default(),
};
- let target_feature_is_safe = match &krate.workspace_data(db).target {
+ let target_feature_is_safe = match &self.krate().workspace_data(self.db).target {
Ok(target) => crate::utils::target_feature_is_safe_in_target(target),
Err(_) => TargetFeatureIsSafeInTarget::No,
};
@@ -927,7 +969,7 @@
}
#[inline]
- pub(crate) fn set_tainted_by_errors(&mut self) {
+ fn set_tainted_by_errors(&mut self) {
self.result.has_errors = true;
}
@@ -1162,6 +1204,11 @@
self.table.interner()
}
+ #[inline]
+ pub(crate) fn infcx(&self) -> &InferCtxt<'db> {
+ &self.table.infer_ctxt
+ }
+
fn infer_body(&mut self) {
match self.return_coercion {
Some(_) => self.infer_return(self.body.body_expr),
@@ -1179,7 +1226,7 @@
self.result.type_of_expr.insert(expr, ty);
}
- fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment<'db>]>) {
+ pub(crate) fn write_expr_adj(&mut self, expr: ExprId, adjustments: Box<[Adjustment<'db>]>) {
if adjustments.is_empty() {
return;
}
@@ -1212,7 +1259,12 @@
self.result.pat_adjustments.entry(pat).or_default().extend(adjustments);
}
- fn write_method_resolution(&mut self, expr: ExprId, func: FunctionId, subst: GenericArgs<'db>) {
+ pub(crate) fn write_method_resolution(
+ &mut self,
+ expr: ExprId,
+ func: FunctionId,
+ subst: GenericArgs<'db>,
+ ) {
self.result.method_resolutions.insert(expr, (func, subst));
}
@@ -1223,7 +1275,7 @@
fn write_assoc_resolution(
&mut self,
id: ExprOrPatId,
- item: AssocItemId,
+ item: CandidateId,
subs: GenericArgs<'db>,
) {
self.result.assoc_resolutions.insert(id, (item, subs));
@@ -1237,7 +1289,7 @@
self.result.type_of_binding.insert(id, ty);
}
- fn push_diagnostic(&self, diagnostic: InferenceDiagnostic<'db>) {
+ pub(crate) fn push_diagnostic(&self, diagnostic: InferenceDiagnostic<'db>) {
self.diagnostics.push(diagnostic);
}
@@ -1284,7 +1336,7 @@
self.process_user_written_ty(ty)
}
- fn make_body_ty(&mut self, type_ref: TypeRefId) -> Ty<'db> {
+ pub(crate) fn make_body_ty(&mut self, type_ref: TypeRefId) -> Ty<'db> {
self.make_ty(
type_ref,
self.body,
@@ -1293,7 +1345,7 @@
)
}
- fn make_body_const(&mut self, const_ref: ConstRef, ty: Ty<'db>) -> Const<'db> {
+ pub(crate) fn make_body_const(&mut self, const_ref: ConstRef, ty: Ty<'db>) -> Const<'db> {
let const_ = self.with_ty_lowering(
self.body,
InferenceTyDiagnosticSource::Body,
@@ -1303,7 +1355,7 @@
self.insert_type_vars(const_)
}
- fn make_path_as_body_const(&mut self, path: &Path, ty: Ty<'db>) -> Const<'db> {
+ pub(crate) fn make_path_as_body_const(&mut self, path: &Path, ty: Ty<'db>) -> Const<'db> {
let const_ = self.with_ty_lowering(
self.body,
InferenceTyDiagnosticSource::Body,
@@ -1317,7 +1369,7 @@
self.types.error
}
- fn make_body_lifetime(&mut self, lifetime_ref: LifetimeRefId) -> Region<'db> {
+ pub(crate) fn make_body_lifetime(&mut self, lifetime_ref: LifetimeRefId) -> Region<'db> {
let lt = self.with_ty_lowering(
self.body,
InferenceTyDiagnosticSource::Body,
@@ -1399,19 +1451,13 @@
}
/// Whenever you lower a user-written type, you should call this.
- fn process_user_written_ty<T>(&mut self, ty: T) -> T
- where
- T: TypeFoldable<DbInterner<'db>>,
- {
+ fn process_user_written_ty(&mut self, ty: Ty<'db>) -> Ty<'db> {
self.table.process_user_written_ty(ty)
}
/// The difference of this method from `process_user_written_ty()` is that this method doesn't register a well-formed obligation,
/// while `process_user_written_ty()` should (but doesn't currently).
- fn process_remote_user_written_ty<T>(&mut self, ty: T) -> T
- where
- T: TypeFoldable<DbInterner<'db>>,
- {
+ fn process_remote_user_written_ty(&mut self, ty: Ty<'db>) -> Ty<'db> {
self.table.process_remote_user_written_ty(ty)
}
@@ -1427,18 +1473,72 @@
self.resolve_associated_type_with_params(inner_ty, assoc_ty, &[])
}
- fn demand_eqtype(&mut self, expected: Ty<'db>, actual: Ty<'db>) {
+ fn demand_eqtype(
+ &mut self,
+ id: ExprOrPatId,
+ expected: Ty<'db>,
+ actual: Ty<'db>,
+ ) -> Result<(), ()> {
+ let result = self.demand_eqtype_fixme_no_diag(expected, actual);
+ if result.is_err() {
+ self.result.type_mismatches.insert(id, TypeMismatch { expected, actual });
+ }
+ result
+ }
+
+ fn demand_eqtype_fixme_no_diag(
+ &mut self,
+ expected: Ty<'db>,
+ actual: Ty<'db>,
+ ) -> Result<(), ()> {
let result = self
.table
- .infer_ctxt
- .at(&ObligationCause::new(), self.table.trait_env.env)
+ .at(&ObligationCause::new())
.eq(expected, actual)
.map(|infer_ok| self.table.register_infer_ok(infer_ok));
+ result.map_err(drop)
+ }
+
+ fn demand_suptype(&mut self, expected: Ty<'db>, actual: Ty<'db>) {
+ let result = self
+ .table
+ .at(&ObligationCause::new())
+ .sup(expected, actual)
+ .map(|infer_ok| self.table.register_infer_ok(infer_ok));
if let Err(_err) = result {
// FIXME: Emit diagnostic.
}
}
+ fn demand_coerce(
+ &mut self,
+ expr: ExprId,
+ checked_ty: Ty<'db>,
+ expected: Ty<'db>,
+ allow_two_phase: AllowTwoPhase,
+ expr_is_read: ExprIsRead,
+ ) -> Ty<'db> {
+ let result = self.coerce(expr.into(), checked_ty, expected, allow_two_phase, expr_is_read);
+ if let Err(_err) = result {
+ // FIXME: Emit diagnostic.
+ }
+ result.unwrap_or(self.types.error)
+ }
+
+ fn expr_ty(&self, expr: ExprId) -> Ty<'db> {
+ self.result[expr]
+ }
+
+ fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty<'db> {
+ let mut ty = None;
+ if let Some(it) = self.result.expr_adjustments.get(&e)
+ && let Some(it) = it.last()
+ {
+ ty = Some(it.target);
+ }
+ ty.unwrap_or_else(|| self.expr_ty(e))
+ }
+
fn resolve_associated_type_with_params(
&mut self,
inner_ty: Ty<'db>,
@@ -1596,9 +1696,7 @@
(ty, _) = path_ctx.lower_partly_resolved_path(resolution, true);
tried_resolving_once = true;
- ty = self.table.insert_type_vars(ty);
- ty = self.table.normalize_associated_types_in(ty);
- ty = self.table.structurally_resolve_type(ty);
+ ty = self.table.process_user_written_ty(ty);
if ty.is_ty_error() {
return (self.err_ty(), None);
}
@@ -1709,18 +1807,6 @@
trait_.trait_items(self.db).associated_type_by_name(&Name::new_symbol_root(sym::Output))
}
- fn resolve_lang_trait(&self, lang: LangItem) -> Option<TraitId> {
- self.resolve_lang_item(lang)?.as_trait()
- }
-
- fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> {
- self.resolve_output_on(self.resolve_lang_trait(LangItem::Neg)?)
- }
-
- fn resolve_ops_not_output(&self) -> Option<TypeAliasId> {
- self.resolve_output_on(self.resolve_lang_trait(LangItem::Not)?)
- }
-
fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
let ItemContainerId::TraitId(trait_) = self
.resolve_lang_item(LangItem::IntoFutureIntoFuture)?
@@ -1768,24 +1854,17 @@
Some(struct_.into())
}
- fn resolve_ops_index_output(&self) -> Option<TypeAliasId> {
- self.resolve_output_on(self.resolve_lang_trait(LangItem::Index)?)
- }
-
fn resolve_va_list(&self) -> Option<AdtId> {
let struct_ = self.resolve_lang_item(LangItem::VaList)?.as_struct()?;
Some(struct_.into())
}
- fn get_traits_in_scope<'a>(
- resolver: &Resolver<'db>,
- traits_in_scope: &'a FxHashSet<TraitId>,
- ) -> Either<FxHashSet<TraitId>, &'a FxHashSet<TraitId>> {
- let mut b_traits = resolver.traits_in_scope_from_block_scopes().peekable();
+ pub(crate) fn get_traits_in_scope(&self) -> Either<FxHashSet<TraitId>, &FxHashSet<TraitId>> {
+ let mut b_traits = self.resolver.traits_in_scope_from_block_scopes().peekable();
if b_traits.peek().is_some() {
- Either::Left(traits_in_scope.iter().copied().chain(b_traits).collect())
+ Either::Left(self.traits_in_scope.iter().copied().chain(b_traits).collect())
} else {
- Either::Right(traits_in_scope)
+ Either::Right(&self.traits_in_scope)
}
}
}
diff --git a/crates/hir-ty/src/infer/autoderef.rs b/crates/hir-ty/src/infer/autoderef.rs
index ba133aa..1af102a 100644
--- a/crates/hir-ty/src/infer/autoderef.rs
+++ b/crates/hir-ty/src/infer/autoderef.rs
@@ -6,7 +6,7 @@
use crate::{
Adjust, Adjustment, OverloadedDeref,
- autoderef::{Autoderef, AutoderefKind},
+ autoderef::{Autoderef, AutoderefCtx, AutoderefKind, GeneralAutoderef},
infer::unify::InferenceTable,
next_solver::{
Ty,
@@ -15,18 +15,16 @@
};
impl<'db> InferenceTable<'db> {
- pub(crate) fn autoderef(&mut self, base_ty: Ty<'db>) -> Autoderef<'_, 'db> {
- Autoderef::new(self, base_ty)
+ pub(crate) fn autoderef(&self, base_ty: Ty<'db>) -> Autoderef<'_, 'db, usize> {
+ Autoderef::new(&self.infer_ctxt, &self.trait_env, base_ty)
+ }
+
+ pub(crate) fn autoderef_with_tracking(&self, base_ty: Ty<'db>) -> Autoderef<'_, 'db> {
+ Autoderef::new_with_tracking(&self.infer_ctxt, &self.trait_env, base_ty)
}
}
-impl<'db> Autoderef<'_, 'db> {
- /// Returns the adjustment steps.
- pub(crate) fn adjust_steps(mut self) -> Vec<Adjustment<'db>> {
- let infer_ok = self.adjust_steps_as_infer_ok();
- self.table.register_infer_ok(infer_ok)
- }
-
+impl<'db, Ctx: AutoderefCtx<'db>> GeneralAutoderef<'db, Ctx> {
pub(crate) fn adjust_steps_as_infer_ok(&mut self) -> InferOk<'db, Vec<Adjustment<'db>>> {
let steps = self.steps();
if steps.is_empty() {
diff --git a/crates/hir-ty/src/infer/cast.rs b/crates/hir-ty/src/infer/cast.rs
index c128977..00a1dff 100644
--- a/crates/hir-ty/src/infer/cast.rs
+++ b/crates/hir-ty/src/infer/cast.rs
@@ -11,7 +11,7 @@
use crate::{
InferenceDiagnostic,
db::HirDatabase,
- infer::{AllowTwoPhase, InferenceContext, coerce::CoerceNever},
+ infer::{AllowTwoPhase, InferenceContext, expr::ExprIsRead},
next_solver::{BoundExistentialPredicates, DbInterner, ParamTy, Ty, TyKind},
};
@@ -110,8 +110,8 @@
&mut self,
ctx: &mut InferenceContext<'_, 'db>,
) -> Result<(), InferenceDiagnostic<'db>> {
- self.expr_ty = ctx.table.eagerly_normalize_and_resolve_shallow_in(self.expr_ty);
- self.cast_ty = ctx.table.eagerly_normalize_and_resolve_shallow_in(self.cast_ty);
+ self.expr_ty = ctx.table.try_structurally_resolve_type(self.expr_ty);
+ self.cast_ty = ctx.table.try_structurally_resolve_type(self.cast_ty);
// This should always come first so that we apply the coercion, which impacts infer vars.
if ctx
@@ -120,7 +120,7 @@
self.expr_ty,
self.cast_ty,
AllowTwoPhase::No,
- CoerceNever::Yes,
+ ExprIsRead::Yes,
)
.is_ok()
{
@@ -159,7 +159,7 @@
TyKind::FnDef(..) => {
let sig =
self.expr_ty.callable_sig(ctx.interner()).expect("FnDef had no sig");
- let sig = ctx.table.eagerly_normalize_and_resolve_shallow_in(sig);
+ let sig = ctx.table.normalize_associated_types_in(sig);
let fn_ptr = Ty::new_fn_ptr(ctx.interner(), sig);
if ctx
.coerce(
@@ -167,7 +167,7 @@
self.expr_ty,
fn_ptr,
AllowTwoPhase::No,
- CoerceNever::Yes,
+ ExprIsRead::Yes,
)
.is_ok()
{
@@ -191,7 +191,7 @@
},
// array-ptr-cast
CastTy::Ptr(t, m) => {
- let t = ctx.table.eagerly_normalize_and_resolve_shallow_in(t);
+ let t = ctx.table.try_structurally_resolve_type(t);
if !ctx.table.is_sized(t) {
return Err(CastError::IllegalCast);
}
@@ -248,7 +248,7 @@
self.expr_ty,
array_ptr_type,
AllowTwoPhase::No,
- CoerceNever::Yes,
+ ExprIsRead::Yes,
)
.is_ok()
{
@@ -263,7 +263,7 @@
// This is a less strict condition than rustc's `demand_eqtype`,
// but false negative is better than false positive
if ctx
- .coerce(self.source_expr.into(), ety, t_cast, AllowTwoPhase::No, CoerceNever::Yes)
+ .coerce(self.source_expr.into(), ety, t_cast, AllowTwoPhase::No, ExprIsRead::Yes)
.is_ok()
{
return Ok(());
@@ -375,7 +375,7 @@
ty: Ty<'db>,
ctx: &mut InferenceContext<'_, 'db>,
) -> Result<Option<PointerKind<'db>>, ()> {
- let ty = ctx.table.eagerly_normalize_and_resolve_shallow_in(ty);
+ let ty = ctx.table.try_structurally_resolve_type(ty);
if ctx.table.is_sized(ty) {
return Ok(Some(PointerKind::Thin));
diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs
index 06f8307..54a06eb 100644
--- a/crates/hir-ty/src/infer/closure.rs
+++ b/crates/hir-ty/src/infer/closure.rs
@@ -31,7 +31,6 @@
BoundRegionConversionTime, InferOk, InferResult,
traits::{ObligationCause, PredicateObligations},
},
- util::explicit_item_bounds,
},
traits::FnTrait,
};
@@ -255,8 +254,10 @@
.deduce_closure_signature_from_predicates(
expected_ty,
closure_kind,
- explicit_item_bounds(self.interner(), def_id)
- .iter_instantiated(self.interner(), args)
+ def_id
+ .expect_opaque_ty()
+ .predicates(self.db)
+ .iter_instantiated_copied(self.interner(), args.as_slice())
.map(|clause| clause.as_predicate()),
),
TyKind::Dynamic(object_type, ..) => {
diff --git a/crates/hir-ty/src/infer/closure/analysis.rs b/crates/hir-ty/src/infer/closure/analysis.rs
index 763b145..944b359 100644
--- a/crates/hir-ty/src/infer/closure/analysis.rs
+++ b/crates/hir-ty/src/infer/closure/analysis.rs
@@ -11,11 +11,8 @@
Statement, UnaryOp,
},
item_tree::FieldsShape,
- lang_item::LangItem,
resolver::ValueNs,
};
-use hir_expand::name::Name;
-use intern::sym;
use rustc_ast_ir::Mutability;
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_type_ir::inherent::{IntoKind, SliceLike, Ty as _};
@@ -351,10 +348,12 @@
return Some(place);
}
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
- if matches!(
- self.expr_ty_after_adjustments(*expr).kind(),
- TyKind::Ref(..) | TyKind::RawPtr(..)
- ) {
+ let is_builtin_deref = match self.expr_ty(*expr).kind() {
+ TyKind::Ref(..) | TyKind::RawPtr(..) => true,
+ TyKind::Adt(adt_def, _) if adt_def.is_box() => true,
+ _ => false,
+ };
+ if is_builtin_deref {
let mut place = self.place_of_expr(*expr)?;
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
place.projections.push(ProjectionElem::Deref);
@@ -609,28 +608,19 @@
}
Expr::Field { expr, name: _ } => self.select_from_expr(*expr),
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
- if matches!(
- self.expr_ty_after_adjustments(*expr).kind(),
- TyKind::Ref(..) | TyKind::RawPtr(..)
- ) {
- self.select_from_expr(*expr);
- } else if let Some((f, _)) = self.result.method_resolution(tgt_expr) {
- let mutability = 'b: {
- if let Some(deref_trait) =
- self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait())
- && let Some(deref_fn) = deref_trait
- .trait_items(self.db)
- .method_by_name(&Name::new_symbol_root(sym::deref_mut))
- {
- break 'b deref_fn == f;
+ if self.result.method_resolution(tgt_expr).is_some() {
+ // Overloaded deref.
+ match self.expr_ty_after_adjustments(*expr).kind() {
+ TyKind::Ref(_, _, mutability) => {
+ let place = self.place_of_expr(*expr);
+ match mutability {
+ Mutability::Mut => self.mutate_expr(*expr, place),
+ Mutability::Not => self.ref_expr(*expr, place),
+ }
}
- false
- };
- let place = self.place_of_expr(*expr);
- if mutability {
- self.mutate_expr(*expr, place);
- } else {
- self.ref_expr(*expr, place);
+ // FIXME: Is this correct wrt. raw pointer derefs?
+ TyKind::RawPtr(..) => self.select_from_expr(*expr),
+ _ => never!("deref adjustments should include taking a mutable reference"),
}
} else {
self.select_from_expr(*expr);
@@ -806,20 +796,6 @@
self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut));
}
- fn expr_ty(&self, expr: ExprId) -> Ty<'db> {
- self.result[expr]
- }
-
- fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty<'db> {
- let mut ty = None;
- if let Some(it) = self.result.expr_adjustments.get(&e)
- && let Some(it) = it.last()
- {
- ty = Some(it.target);
- }
- ty.unwrap_or_else(|| self.expr_ty(e))
- }
-
fn is_upvar(&self, place: &HirPlace<'db>) -> bool {
if let Some(c) = self.current_closure {
let InternedClosure(_, root) = self.db.lookup_intern_closure(c);
diff --git a/crates/hir-ty/src/infer/coerce.rs b/crates/hir-ty/src/infer/coerce.rs
index 40de923..4acf964 100644
--- a/crates/hir-ty/src/infer/coerce.rs
+++ b/crates/hir-ty/src/infer/coerce.rs
@@ -44,7 +44,8 @@
use intern::sym;
use rustc_ast_ir::Mutability;
use rustc_type_ir::{
- BoundVar, TypeAndMut,
+ BoundVar, DebruijnIndex, TyVid, TypeAndMut, TypeFoldable, TypeFolder, TypeSuperFoldable,
+ TypeVisitableExt,
error::TypeError,
inherent::{Const as _, GenericArg as _, IntoKind, Safety, SliceLike, Ty as _},
};
@@ -56,13 +57,16 @@
Adjust, Adjustment, AutoBorrow, PointerCast, TargetFeatures, TraitEnvironment,
autoderef::Autoderef,
db::{HirDatabase, InternedClosureId},
- infer::{AllowTwoPhase, InferenceContext, TypeMismatch, unify::InferenceTable},
+ infer::{
+ AllowTwoPhase, AutoBorrowMutability, InferenceContext, TypeMismatch, expr::ExprIsRead,
+ },
next_solver::{
Binder, BoundConst, BoundRegion, BoundRegionKind, BoundTy, BoundTyKind, CallableIdWrapper,
Canonical, ClauseKind, CoercePredicate, Const, ConstKind, DbInterner, ErrorGuaranteed,
GenericArgs, PolyFnSig, PredicateKind, Region, RegionKind, TraitRef, Ty, TyKind,
+ TypingMode,
infer::{
- InferCtxt, InferOk, InferResult,
+ DbInternerInferExt, InferCtxt, InferOk, InferResult,
relate::RelateResult,
select::{ImplSource, SelectionError},
traits::{Obligation, ObligationCause, PredicateObligation, PredicateObligations},
@@ -72,10 +76,20 @@
utils::TargetFeatureIsSafeInTarget,
};
-struct Coerce<'a, 'b, 'db> {
- table: &'a mut InferenceTable<'db>,
- has_errors: &'a mut bool,
- target_features: &'a mut dyn FnMut() -> (&'b TargetFeatures, TargetFeatureIsSafeInTarget),
+trait CoerceDelegate<'db> {
+ fn infcx(&self) -> &InferCtxt<'db>;
+ fn env(&self) -> &TraitEnvironment<'db>;
+ fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget);
+
+ fn set_diverging(&mut self, diverging_ty: Ty<'db>);
+
+ fn set_tainted_by_errors(&mut self);
+
+ fn type_var_is_sized(&mut self, var: TyVid) -> bool;
+}
+
+struct Coerce<D> {
+ delegate: D,
use_lub: bool,
/// Determines whether or not allow_two_phase_borrow is set on any
/// autoref adjustments we create while coercing. We don't want to
@@ -109,43 +123,56 @@
Ok(InferOk { value: (adj, target), obligations })
}
-impl<'a, 'b, 'db> Coerce<'a, 'b, 'db> {
+impl<'db, D> Coerce<D>
+where
+ D: CoerceDelegate<'db>,
+{
#[inline]
fn set_tainted_by_errors(&mut self) {
- *self.has_errors = true;
+ self.delegate.set_tainted_by_errors();
+ }
+
+ #[inline]
+ fn infcx(&self) -> &InferCtxt<'db> {
+ self.delegate.infcx()
+ }
+
+ #[inline]
+ fn env(&self) -> &TraitEnvironment<'db> {
+ self.delegate.env()
}
#[inline]
fn interner(&self) -> DbInterner<'db> {
- self.table.interner()
+ self.infcx().interner
}
#[inline]
- fn infer_ctxt(&self) -> &InferCtxt<'db> {
- &self.table.infer_ctxt
+ fn db(&self) -> &'db dyn HirDatabase {
+ self.interner().db
}
pub(crate) fn commit_if_ok<T, E>(
&mut self,
f: impl FnOnce(&mut Self) -> Result<T, E>,
) -> Result<T, E> {
- let snapshot = self.table.snapshot();
+ let snapshot = self.infcx().start_snapshot();
let result = f(self);
match result {
Ok(_) => {}
Err(_) => {
- self.table.rollback_to(snapshot);
+ self.infcx().rollback_to(snapshot);
}
}
result
}
- fn unify_raw(&mut self, a: Ty<'db>, b: Ty<'db>) -> InferResult<'db, Ty<'db>> {
+ fn unify_raw(&self, a: Ty<'db>, b: Ty<'db>) -> InferResult<'db, Ty<'db>> {
debug!("unify(a: {:?}, b: {:?}, use_lub: {})", a, b, self.use_lub);
- self.commit_if_ok(|this| {
- let at = this.infer_ctxt().at(&this.cause, this.table.trait_env.env);
+ self.infcx().commit_if_ok(|_| {
+ let at = self.infcx().at(&self.cause, self.env().env);
- let res = if this.use_lub {
+ let res = if self.use_lub {
at.lub(b, a)
} else {
at.sup(b, a)
@@ -157,7 +184,7 @@
// Filter these cases out to make sure our coercion is more accurate.
match res {
Ok(InferOk { value, obligations }) => {
- let mut ocx = ObligationCtxt::new(this.infer_ctxt());
+ let mut ocx = ObligationCtxt::new(self.infcx());
ocx.register_obligations(obligations);
if ocx.try_evaluate_obligations().is_empty() {
Ok(InferOk { value, obligations: ocx.into_pending_obligations() })
@@ -182,7 +209,7 @@
a: Ty<'db>,
b: Ty<'db>,
adjustments: impl IntoIterator<Item = Adjustment<'db>>,
- final_adjustment: Adjust<'db>,
+ final_adjustment: Adjust,
) -> CoerceResult<'db> {
self.unify_raw(a, b).and_then(|InferOk { value: ty, obligations }| {
success(
@@ -199,15 +226,15 @@
#[instrument(skip(self))]
fn coerce(&mut self, a: Ty<'db>, b: Ty<'db>) -> CoerceResult<'db> {
// First, remove any resolved type variables (at the top level, at least):
- let a = self.table.shallow_resolve(a);
- let b = self.table.shallow_resolve(b);
+ let a = self.infcx().shallow_resolve(a);
+ let b = self.infcx().shallow_resolve(b);
debug!("Coerce.tys({:?} => {:?})", a, b);
// Coercing from `!` to any type is allowed:
if a.is_never() {
// If we're coercing into an inference var, mark it as possibly diverging.
if b.is_infer() {
- self.table.set_diverging(b);
+ self.delegate.set_diverging(b);
}
if self.coerce_never {
@@ -290,12 +317,12 @@
/// fall back to subtyping (`unify_and`).
fn coerce_from_inference_variable(&mut self, a: Ty<'db>, b: Ty<'db>) -> CoerceResult<'db> {
debug!("coerce_from_inference_variable(a={:?}, b={:?})", a, b);
- debug_assert!(a.is_infer() && self.table.shallow_resolve(a) == a);
- debug_assert!(self.table.shallow_resolve(b) == b);
+ debug_assert!(a.is_infer() && self.infcx().shallow_resolve(a) == a);
+ debug_assert!(self.infcx().shallow_resolve(b) == b);
if b.is_infer() {
// Two unresolved type variables: create a `Coerce` predicate.
- let target_ty = if self.use_lub { self.table.next_ty_var() } else { b };
+ let target_ty = if self.use_lub { self.infcx().next_ty_var() } else { b };
let mut obligations = PredicateObligations::with_capacity(2);
for &source_ty in &[a, b] {
@@ -303,7 +330,7 @@
obligations.push(Obligation::new(
self.interner(),
self.cause.clone(),
- self.table.trait_env.env,
+ self.env().env,
Binder::dummy(PredicateKind::Coerce(CoercePredicate {
a: source_ty,
b: target_ty,
@@ -335,8 +362,8 @@
mutbl_b: Mutability,
) -> CoerceResult<'db> {
debug!("coerce_borrowed_pointer(a={:?}, b={:?})", a, b);
- debug_assert!(self.table.shallow_resolve(a) == a);
- debug_assert!(self.table.shallow_resolve(b) == b);
+ debug_assert!(self.infcx().shallow_resolve(a) == a);
+ debug_assert!(self.infcx().shallow_resolve(b) == b);
// If we have a parameter of type `&M T_a` and the value
// provided is `expr`, we will be adding an implicit borrow,
@@ -355,10 +382,10 @@
let mut first_error = None;
let mut r_borrow_var = None;
- let mut autoderef = Autoderef::new(self.table, a);
+ let mut autoderef = Autoderef::new_with_tracking(self.infcx(), self.env(), a);
let mut found = None;
- while let Some((referent_ty, autoderefs)) = autoderef.next() {
+ for (referent_ty, autoderefs) in autoderef.by_ref() {
if autoderefs == 0 {
// Don't let this pass, otherwise it would cause
// &T to autoref to &&T.
@@ -442,28 +469,18 @@
} else {
if r_borrow_var.is_none() {
// create var lazily, at most once
- let r = autoderef.table.next_region_var();
+ let r = self.infcx().next_region_var();
r_borrow_var = Some(r); // [4] above
}
r_borrow_var.unwrap()
};
let derefd_ty_a = Ty::new_ref(
- autoderef.table.interner(),
+ self.interner(),
r,
referent_ty,
mutbl_b, // [1] above
);
- // We need to construct a new `Coerce` because of lifetimes.
- let mut coerce = Coerce {
- table: autoderef.table,
- has_errors: self.has_errors,
- target_features: self.target_features,
- use_lub: self.use_lub,
- allow_two_phase: self.allow_two_phase,
- coerce_never: self.coerce_never,
- cause: self.cause.clone(),
- };
- match coerce.unify_raw(derefd_ty_a, b) {
+ match self.unify_raw(derefd_ty_a, b) {
Ok(ok) => {
found = Some(ok);
break;
@@ -515,15 +532,9 @@
autoderef.adjust_steps_as_infer_ok();
obligations.extend(o);
- // Now apply the autoref. We have to extract the region out of
- // the final ref type we got.
- let TyKind::Ref(region, _, _) = ty.kind() else {
- panic!("expected a ref type, got {:?}", ty);
- };
- adjustments.push(Adjustment {
- kind: Adjust::Borrow(AutoBorrow::Ref(region, mutbl_b)),
- target: ty,
- });
+ // Now apply the autoref.
+ let mutbl = AutoBorrowMutability::new(mutbl_b, self.allow_two_phase);
+ adjustments.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target: ty });
debug!("coerce_borrowed_pointer: succeeded ty={:?} adjustments={:?}", ty, adjustments);
@@ -538,8 +549,8 @@
#[instrument(skip(self), level = "debug")]
fn coerce_unsized(&mut self, source: Ty<'db>, target: Ty<'db>) -> CoerceResult<'db> {
debug!(?source, ?target);
- debug_assert!(self.table.shallow_resolve(source) == source);
- debug_assert!(self.table.shallow_resolve(target) == target);
+ debug_assert!(self.infcx().shallow_resolve(source) == source);
+ debug_assert!(self.infcx().shallow_resolve(target) == target);
// We don't apply any coercions incase either the source or target
// aren't sufficiently well known but tend to instead just equate
@@ -602,8 +613,8 @@
}
let traits = (
- LangItem::Unsize.resolve_trait(self.table.db, self.table.trait_env.krate),
- LangItem::CoerceUnsized.resolve_trait(self.table.db, self.table.trait_env.krate),
+ LangItem::Unsize.resolve_trait(self.db(), self.env().krate),
+ LangItem::CoerceUnsized.resolve_trait(self.db(), self.env().krate),
);
let (Some(unsize_did), Some(coerce_unsized_did)) = traits else {
debug!("missing Unsize or CoerceUnsized traits");
@@ -620,18 +631,17 @@
(TyKind::Ref(_, ty_a, mutbl_a), TyKind::Ref(_, _, mutbl_b)) => {
coerce_mutbls(mutbl_a, mutbl_b)?;
- let r_borrow = self.table.next_region_var();
+ let r_borrow = self.infcx().next_region_var();
// We don't allow two-phase borrows here, at least for initial
// implementation. If it happens that this coercion is a function argument,
// the reborrow in coerce_borrowed_ptr will pick it up.
- // let mutbl = AutoBorrowMutability::new(mutbl_b, AllowTwoPhase::No);
- let mutbl = mutbl_b;
+ let mutbl = AutoBorrowMutability::new(mutbl_b, AllowTwoPhase::No);
Some((
Adjustment { kind: Adjust::Deref(None), target: ty_a },
Adjustment {
- kind: Adjust::Borrow(AutoBorrow::Ref(r_borrow, mutbl)),
+ kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)),
target: Ty::new_ref(self.interner(), r_borrow, ty_a, mutbl_b),
},
))
@@ -655,7 +665,7 @@
// the `CoerceUnsized` target type and the expected type.
// We only have the latter, so we use an inference variable
// for the former and let type inference do the rest.
- let coerce_target = self.table.next_ty_var();
+ let coerce_target = self.infcx().next_ty_var();
let mut coercion = self.unify_and(
coerce_target,
@@ -677,7 +687,7 @@
let mut queue: SmallVec<[PredicateObligation<'db>; 4]> = smallvec![Obligation::new(
self.interner(),
cause,
- self.table.trait_env.env,
+ self.env().env,
TraitRef::new(
self.interner(),
coerce_unsized_did.into(),
@@ -694,14 +704,14 @@
Some(PredicateKind::Clause(ClauseKind::Trait(trait_pred)))
if traits.contains(&trait_pred.def_id().0) =>
{
- self.infer_ctxt().resolve_vars_if_possible(trait_pred)
+ self.infcx().resolve_vars_if_possible(trait_pred)
}
// Eagerly process alias-relate obligations in new trait solver,
// since these can be emitted in the process of solving trait goals,
// but we need to constrain vars before processing goals mentioning
// them.
Some(PredicateKind::AliasRelate(..)) => {
- let mut ocx = ObligationCtxt::new(self.infer_ctxt());
+ let mut ocx = ObligationCtxt::new(self.infcx());
ocx.register_obligation(obligation);
if !ocx.try_evaluate_obligations().is_empty() {
return Err(TypeError::Mismatch);
@@ -715,7 +725,7 @@
}
};
debug!("coerce_unsized resolve step: {:?}", trait_pred);
- match self.infer_ctxt().select(&obligation.with(self.interner(), trait_pred)) {
+ match self.infcx().select(&obligation.with(self.interner(), trait_pred)) {
// Uncertain or unimplemented.
Ok(None) => {
if trait_pred.def_id().0 == unsize_did {
@@ -724,7 +734,7 @@
debug!("coerce_unsized: ambiguous unsize case for {:?}", trait_pred);
match (self_ty.kind(), unsize_ty.kind()) {
(TyKind::Infer(rustc_type_ir::TyVar(v)), TyKind::Dynamic(..))
- if self.table.type_var_is_sized(v) =>
+ if self.delegate.type_var_is_sized(v) =>
{
debug!("coerce_unsized: have sized infer {:?}", v);
coercion.obligations.push(obligation);
@@ -794,9 +804,9 @@
&mut self,
fn_ty_a: PolyFnSig<'db>,
b: Ty<'db>,
- adjustment: Option<Adjust<'db>>,
+ adjustment: Option<Adjust>,
) -> CoerceResult<'db> {
- debug_assert!(self.table.shallow_resolve(b) == b);
+ debug_assert!(self.infcx().shallow_resolve(b) == b);
self.commit_if_ok(|this| {
if let TyKind::FnPtr(_, hdr_b) = b.kind()
@@ -825,15 +835,15 @@
fn coerce_from_fn_pointer(&mut self, fn_ty_a: PolyFnSig<'db>, b: Ty<'db>) -> CoerceResult<'db> {
debug!(?fn_ty_a, ?b, "coerce_from_fn_pointer");
- debug_assert!(self.table.shallow_resolve(b) == b);
+ debug_assert!(self.infcx().shallow_resolve(b) == b);
self.coerce_from_safe_fn(fn_ty_a, b, None)
}
fn coerce_from_fn_item(&mut self, a: Ty<'db>, b: Ty<'db>) -> CoerceResult<'db> {
debug!("coerce_from_fn_item(a={:?}, b={:?})", a, b);
- debug_assert!(self.table.shallow_resolve(a) == a);
- debug_assert!(self.table.shallow_resolve(b) == b);
+ debug_assert!(self.infcx().shallow_resolve(a) == a);
+ debug_assert!(self.infcx().shallow_resolve(b) == b);
match b.kind() {
TyKind::FnPtr(_, b_hdr) => {
@@ -841,11 +851,11 @@
if let TyKind::FnDef(def_id, _) = a.kind() {
// Intrinsics are not coercible to function pointers
if let CallableDefId::FunctionId(def_id) = def_id.0 {
- if FunctionSignature::is_intrinsic(self.table.db, def_id) {
+ if FunctionSignature::is_intrinsic(self.db(), def_id) {
return Err(TypeError::IntrinsicCast);
}
- let attrs = self.table.db.attrs(def_id.into());
+ let attrs = self.db().attrs(def_id.into());
if attrs.by_key(sym::rustc_force_inline).exists() {
return Err(TypeError::ForceInlineCast);
}
@@ -856,7 +866,7 @@
// Allow the coercion if the current function has all the features that would be
// needed to call the coercee safely.
let (target_features, target_feature_is_safe) =
- (self.target_features)();
+ self.delegate.target_features();
if target_feature_is_safe == TargetFeatureIsSafeInTarget::No
&& !target_features.enabled.is_superset(&fn_target_features.enabled)
{
@@ -887,8 +897,8 @@
args_a: GenericArgs<'db>,
b: Ty<'db>,
) -> CoerceResult<'db> {
- debug_assert!(self.table.shallow_resolve(a) == a);
- debug_assert!(self.table.shallow_resolve(b) == b);
+ debug_assert!(self.infcx().shallow_resolve(a) == a);
+ debug_assert!(self.infcx().shallow_resolve(b) == b);
match b.kind() {
// FIXME: We need to have an `upvars_mentioned()` query:
@@ -930,8 +940,8 @@
fn coerce_raw_ptr(&mut self, a: Ty<'db>, b: Ty<'db>, mutbl_b: Mutability) -> CoerceResult<'db> {
debug!("coerce_raw_ptr(a={:?}, b={:?})", a, b);
- debug_assert!(self.table.shallow_resolve(a) == a);
- debug_assert!(self.table.shallow_resolve(b) == b);
+ debug_assert!(self.infcx().shallow_resolve(a) == a);
+ debug_assert!(self.infcx().shallow_resolve(b) == b);
let (is_ref, mt_a) = match a.kind() {
TyKind::Ref(_, ty, mutbl) => (true, TypeAndMut::<DbInterner<'db>> { ty, mutbl }),
@@ -960,10 +970,36 @@
}
}
-#[derive(Debug, Clone, Copy, PartialEq, Eq)]
-pub(crate) enum CoerceNever {
- No,
- Yes,
+struct InferenceCoercionDelegate<'a, 'b, 'db>(&'a mut InferenceContext<'b, 'db>);
+
+impl<'db> CoerceDelegate<'db> for InferenceCoercionDelegate<'_, '_, 'db> {
+ #[inline]
+ fn infcx(&self) -> &InferCtxt<'db> {
+ &self.0.table.infer_ctxt
+ }
+ #[inline]
+ fn env(&self) -> &TraitEnvironment<'db> {
+ &self.0.table.trait_env
+ }
+ #[inline]
+ fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) {
+ self.0.target_features()
+ }
+
+ #[inline]
+ fn set_diverging(&mut self, diverging_ty: Ty<'db>) {
+ self.0.table.set_diverging(diverging_ty);
+ }
+
+ #[inline]
+ fn set_tainted_by_errors(&mut self) {
+ self.0.set_tainted_by_errors();
+ }
+
+ #[inline]
+ fn type_var_is_sized(&mut self, var: TyVid) -> bool {
+ self.0.table.type_var_is_sized(var)
+ }
}
impl<'db> InferenceContext<'_, 'db> {
@@ -977,24 +1013,26 @@
expr_ty: Ty<'db>,
mut target: Ty<'db>,
allow_two_phase: AllowTwoPhase,
- coerce_never: CoerceNever,
+ expr_is_read: ExprIsRead,
) -> RelateResult<'db, Ty<'db>> {
let source = self.table.try_structurally_resolve_type(expr_ty);
target = self.table.try_structurally_resolve_type(target);
debug!("coercion::try({:?}: {:?} -> {:?})", expr, source, target);
let cause = ObligationCause::new();
- let krate = self.krate();
+ let coerce_never = match expr {
+ ExprOrPatId::ExprId(idx) => {
+ self.expr_guaranteed_to_constitute_read_for_never(idx, expr_is_read)
+ }
+ // `PatId` is passed for `PatKind::Path`.
+ ExprOrPatId::PatId(_) => false,
+ };
let mut coerce = Coerce {
- table: &mut self.table,
- has_errors: &mut self.result.has_errors,
+ delegate: InferenceCoercionDelegate(self),
cause,
allow_two_phase,
- coerce_never: matches!(coerce_never, CoerceNever::Yes),
+ coerce_never,
use_lub: false,
- target_features: &mut || {
- Self::target_features(self.db, &self.target_features, self.owner, krate)
- },
};
let ok = coerce.commit_if_ok(|coerce| coerce.coerce(source, target))?;
@@ -1157,23 +1195,18 @@
//
// NOTE: we set `coerce_never` to `true` here because coercion LUBs only
// operate on values and not places, so a never coercion is valid.
- let krate = self.krate();
let mut coerce = Coerce {
- table: &mut self.table,
- has_errors: &mut self.result.has_errors,
+ delegate: InferenceCoercionDelegate(self),
cause: ObligationCause::new(),
allow_two_phase: AllowTwoPhase::No,
coerce_never: true,
use_lub: true,
- target_features: &mut || {
- Self::target_features(self.db, &self.target_features, self.owner, krate)
- },
};
// First try to coerce the new expression to the type of the previous ones,
// but only if the new expression has no coercion already applied to it.
let mut first_error = None;
- if !self.result.expr_adjustments.contains_key(&new) {
+ if !coerce.delegate.0.result.expr_adjustments.contains_key(&new) {
let result = coerce.commit_if_ok(|coerce| coerce.coerce(new_ty, prev_ty));
match result {
Ok(ok) => {
@@ -1335,8 +1368,9 @@
cause: &ObligationCause,
expression: ExprId,
expression_ty: Ty<'db>,
+ expr_is_read: ExprIsRead,
) {
- self.coerce_inner(icx, cause, expression, expression_ty, false, false)
+ self.coerce_inner(icx, cause, expression, expression_ty, false, false, expr_is_read)
}
/// Indicates that one of the inputs is a "forced unit". This
@@ -1357,8 +1391,17 @@
expr: ExprId,
cause: &ObligationCause,
label_unit_as_expected: bool,
+ expr_is_read: ExprIsRead,
) {
- self.coerce_inner(icx, cause, expr, icx.types.unit, true, label_unit_as_expected)
+ self.coerce_inner(
+ icx,
+ cause,
+ expr,
+ icx.types.unit,
+ true,
+ label_unit_as_expected,
+ expr_is_read,
+ )
}
/// The inner coercion "engine". If `expression` is `None`, this
@@ -1372,6 +1415,7 @@
mut expression_ty: Ty<'db>,
force_unit: bool,
label_expression_as_expected: bool,
+ expr_is_read: ExprIsRead,
) {
// Incorporate whatever type inference information we have
// until now; in principle we might also want to process
@@ -1408,7 +1452,7 @@
expression_ty,
self.expected_ty,
AllowTwoPhase::No,
- CoerceNever::Yes,
+ expr_is_read,
)
} else {
match self.expressions {
@@ -1504,88 +1548,170 @@
coerce(db, env, tys).is_ok()
}
+struct HirCoercionDelegate<'a, 'db> {
+ infcx: &'a InferCtxt<'db>,
+ env: &'a TraitEnvironment<'db>,
+ target_features: &'a TargetFeatures,
+}
+
+impl<'db> CoerceDelegate<'db> for HirCoercionDelegate<'_, 'db> {
+ #[inline]
+ fn infcx(&self) -> &InferCtxt<'db> {
+ self.infcx
+ }
+ #[inline]
+ fn env(&self) -> &TraitEnvironment<'db> {
+ self.env
+ }
+ fn target_features(&self) -> (&TargetFeatures, TargetFeatureIsSafeInTarget) {
+ (self.target_features, TargetFeatureIsSafeInTarget::No)
+ }
+ fn set_diverging(&mut self, _diverging_ty: Ty<'db>) {}
+ fn set_tainted_by_errors(&mut self) {}
+ fn type_var_is_sized(&mut self, _var: TyVid) -> bool {
+ false
+ }
+}
+
fn coerce<'db>(
db: &'db dyn HirDatabase,
env: Arc<TraitEnvironment<'db>>,
tys: &Canonical<'db, (Ty<'db>, Ty<'db>)>,
) -> Result<(Vec<Adjustment<'db>>, Ty<'db>), TypeError<DbInterner<'db>>> {
- let mut table = InferenceTable::new(db, env, None);
- let interner = table.interner();
- let ((ty1_with_vars, ty2_with_vars), vars) = table.infer_ctxt.instantiate_canonical(tys);
+ let interner = DbInterner::new_with(db, Some(env.krate), env.block);
+ let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
+ let ((ty1_with_vars, ty2_with_vars), vars) = infcx.instantiate_canonical(tys);
let cause = ObligationCause::new();
// FIXME: Target features.
let target_features = TargetFeatures::default();
let mut coerce = Coerce {
- table: &mut table,
- has_errors: &mut false,
+ delegate: HirCoercionDelegate {
+ infcx: &infcx,
+ env: &env,
+ target_features: &target_features,
+ },
cause,
allow_two_phase: AllowTwoPhase::No,
coerce_never: true,
use_lub: false,
- target_features: &mut || (&target_features, TargetFeatureIsSafeInTarget::No),
};
- let InferOk { value: (adjustments, ty), obligations } =
- coerce.coerce(ty1_with_vars, ty2_with_vars)?;
- table.register_predicates(obligations);
+ let infer_ok = coerce.coerce(ty1_with_vars, ty2_with_vars)?;
+ let mut ocx = ObligationCtxt::new(&infcx);
+ let (adjustments, ty) = ocx.register_infer_ok_obligations(infer_ok);
+ _ = ocx.try_evaluate_obligations();
+ let (adjustments, ty) = infcx.resolve_vars_if_possible((adjustments, ty));
// default any type vars that weren't unified back to their original bound vars
// (kind of hacky)
- let mut fallback_ty = |debruijn, infer| {
- let var = vars.var_values.iter().position(|arg| {
- arg.as_type().is_some_and(|ty| match ty.kind() {
- TyKind::Infer(it) => infer == it,
- _ => false,
- })
- });
- var.map_or_else(
- || Ty::new_error(interner, ErrorGuaranteed),
- |i| {
- Ty::new_bound(
- interner,
- debruijn,
- BoundTy { kind: BoundTyKind::Anon, var: BoundVar::from_usize(i) },
+
+ struct Resolver<'db> {
+ interner: DbInterner<'db>,
+ debruijn: DebruijnIndex,
+ var_values: GenericArgs<'db>,
+ }
+
+ impl<'db> TypeFolder<DbInterner<'db>> for Resolver<'db> {
+ fn cx(&self) -> DbInterner<'db> {
+ self.interner
+ }
+
+ fn fold_binder<T>(&mut self, t: Binder<'db, T>) -> Binder<'db, T>
+ where
+ T: TypeFoldable<DbInterner<'db>>,
+ {
+ self.debruijn.shift_in(1);
+ let result = t.super_fold_with(self);
+ self.debruijn.shift_out(1);
+ result
+ }
+
+ fn fold_ty(&mut self, t: Ty<'db>) -> Ty<'db> {
+ if !t.has_infer() {
+ return t;
+ }
+
+ if let TyKind::Infer(infer) = t.kind() {
+ let var = self.var_values.iter().position(|arg| {
+ arg.as_type().is_some_and(|ty| match ty.kind() {
+ TyKind::Infer(it) => infer == it,
+ _ => false,
+ })
+ });
+ var.map_or_else(
+ || Ty::new_error(self.interner, ErrorGuaranteed),
+ |i| {
+ Ty::new_bound(
+ self.interner,
+ self.debruijn,
+ BoundTy { kind: BoundTyKind::Anon, var: BoundVar::from_usize(i) },
+ )
+ },
)
- },
- )
- };
- let mut fallback_const = |debruijn, infer| {
- let var = vars.var_values.iter().position(|arg| {
- arg.as_const().is_some_and(|ty| match ty.kind() {
- ConstKind::Infer(it) => infer == it,
- _ => false,
- })
- });
- var.map_or_else(
- || Const::new_error(interner, ErrorGuaranteed),
- |i| Const::new_bound(interner, debruijn, BoundConst { var: BoundVar::from_usize(i) }),
- )
- };
- let mut fallback_region = |debruijn, infer| {
- let var = vars.var_values.iter().position(|arg| {
- arg.as_region().is_some_and(|ty| match ty.kind() {
- RegionKind::ReVar(it) => infer == it,
- _ => false,
- })
- });
- var.map_or_else(
- || Region::error(interner),
- |i| {
- Region::new_bound(
- interner,
- debruijn,
- BoundRegion { kind: BoundRegionKind::Anon, var: BoundVar::from_usize(i) },
+ } else {
+ t.super_fold_with(self)
+ }
+ }
+
+ fn fold_const(&mut self, c: Const<'db>) -> Const<'db> {
+ if !c.has_infer() {
+ return c;
+ }
+
+ if let ConstKind::Infer(infer) = c.kind() {
+ let var = self.var_values.iter().position(|arg| {
+ arg.as_const().is_some_and(|ty| match ty.kind() {
+ ConstKind::Infer(it) => infer == it,
+ _ => false,
+ })
+ });
+ var.map_or_else(
+ || Const::new_error(self.interner, ErrorGuaranteed),
+ |i| {
+ Const::new_bound(
+ self.interner,
+ self.debruijn,
+ BoundConst { var: BoundVar::from_usize(i) },
+ )
+ },
)
- },
- )
- };
- // FIXME also map the types in the adjustments
- // FIXME: We don't fallback correctly since this is done on `InferenceContext` and we only have `InferenceTable`.
- let ty = table.resolve_with_fallback(
- ty,
- &mut fallback_ty,
- &mut fallback_const,
- &mut fallback_region,
- );
+ } else {
+ c.super_fold_with(self)
+ }
+ }
+
+ fn fold_region(&mut self, r: Region<'db>) -> Region<'db> {
+ if let RegionKind::ReVar(infer) = r.kind() {
+ let var = self.var_values.iter().position(|arg| {
+ arg.as_region().is_some_and(|ty| match ty.kind() {
+ RegionKind::ReVar(it) => infer == it,
+ _ => false,
+ })
+ });
+ var.map_or_else(
+ || Region::error(self.interner),
+ |i| {
+ Region::new_bound(
+ self.interner,
+ self.debruijn,
+ BoundRegion {
+ kind: BoundRegionKind::Anon,
+ var: BoundVar::from_usize(i),
+ },
+ )
+ },
+ )
+ } else {
+ r
+ }
+ }
+ }
+
+ // FIXME: We don't fallback correctly since this is done on `InferenceContext` and we only have `InferCtxt`.
+ let (adjustments, ty) = (adjustments, ty).fold_with(&mut Resolver {
+ interner,
+ debruijn: DebruijnIndex::ZERO,
+ var_values: vars.var_values,
+ });
Ok((adjustments, ty))
}
diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs
index b7ab109..7487660 100644
--- a/crates/hir-ty/src/infer/expr.rs
+++ b/crates/hir-ty/src/infer/expr.rs
@@ -3,19 +3,18 @@
use std::{iter::repeat_with, mem};
use either::Either;
-use hir_def::hir::ClosureKind;
use hir_def::{
- BlockId, FieldId, GenericDefId, GenericParamId, ItemContainerId, Lookup, TupleFieldId, TupleId,
- expr_store::path::{GenericArg as HirGenericArg, GenericArgs as HirGenericArgs, Path},
+ BlockId, FieldId, GenericDefId, ItemContainerId, Lookup, TupleFieldId, TupleId,
+ expr_store::path::{GenericArgs as HirGenericArgs, Path},
hir::{
- ArithOp, Array, AsmOperand, AsmOptions, BinaryOp, Expr, ExprId, ExprOrPatId, LabelId,
- Literal, Pat, PatId, Statement, UnaryOp, generics::GenericParamDataRef,
+ Array, AsmOperand, AsmOptions, BinaryOp, BindingAnnotation, Expr, ExprId, ExprOrPatId,
+ LabelId, Literal, Pat, PatId, Statement, UnaryOp,
},
lang_item::{LangItem, LangItemTarget},
resolver::ValueNs,
};
+use hir_def::{FunctionId, hir::ClosureKind};
use hir_expand::name::Name;
-use intern::sym;
use rustc_ast_ir::Mutability;
use rustc_type_ir::{
CoroutineArgs, CoroutineArgsParts, InferTy, Interner,
@@ -25,32 +24,25 @@
use tracing::debug;
use crate::{
- Adjust, Adjustment, AutoBorrow, CallableDefId, DeclContext, DeclOrigin,
- IncorrectGenericsLenKind, Rawness, TraitEnvironment,
- autoderef::overloaded_deref_ty,
+ Adjust, Adjustment, CallableDefId, DeclContext, DeclOrigin, Rawness, TraitEnvironment,
+ autoderef::InferenceContextAutoderef,
consteval,
db::InternedCoroutine,
generics::generics,
infer::{
- AllowTwoPhase, BreakableKind,
- coerce::{CoerceMany, CoerceNever},
- find_continuable,
+ AllowTwoPhase, BreakableKind, coerce::CoerceMany, find_continuable,
pat::contains_explicit_ref_binding,
},
- lang_items::lang_items_for_bin_op,
- lower::{
- LifetimeElisionKind, lower_mutability,
- path::{GenericArgsLowerer, TypeLikeConst, substs_from_args_and_bindings},
- },
- method_resolution::{self, VisibleFromModule},
+ lower::{GenericPredicates, lower_mutability},
+ method_resolution::{self, CandidateId, MethodCallee, MethodError},
next_solver::{
- Const, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs, TraitRef, Ty, TyKind,
- TypeError,
+ ErrorGuaranteed, FnSig, GenericArgs, TraitRef, Ty, TyKind, TypeError,
infer::{
- InferOk,
+ BoundRegionConversionTime, InferOk,
traits::{Obligation, ObligationCause},
},
obligation_ctxt::ObligationCtxt,
+ util::clauses_as_obligations,
},
traits::FnTrait,
};
@@ -103,12 +95,7 @@
) -> Ty<'db> {
let ty = self.infer_expr_inner(expr, expected, is_read);
if let Some(target) = expected.only_has_type(&mut self.table) {
- let coerce_never = if self.expr_guaranteed_to_constitute_read_for_never(expr, is_read) {
- CoerceNever::Yes
- } else {
- CoerceNever::No
- };
- match self.coerce(expr.into(), ty, target, AllowTwoPhase::No, coerce_never) {
+ match self.coerce(expr.into(), ty, target, AllowTwoPhase::No, is_read) {
Ok(res) => res,
Err(_) => {
self.result
@@ -203,6 +190,23 @@
}
}
+ /// Checks if the pattern contains any `ref` or `ref mut` bindings, and if
+ /// yes whether it contains mutable or just immutables ones.
+ //
+ // FIXME(tschottdorf): this is problematic as the HIR is being scraped, but
+ // ref bindings are be implicit after #42640 (default match binding modes). See issue #44848.
+ fn contains_explicit_ref_binding(&self, pat: PatId) -> bool {
+ if let Pat::Bind { id, .. } = self.body[pat]
+ && matches!(self.body[id].mode, BindingAnnotation::Ref | BindingAnnotation::RefMut)
+ {
+ return true;
+ }
+
+ let mut result = false;
+ self.body.walk_pats_shallow(pat, |pat| result |= self.contains_explicit_ref_binding(pat));
+ result
+ }
+
fn is_syntactic_place_expr(&self, expr: ExprId) -> bool {
match &self.body[expr] {
// Lang item paths cannot currently be local variables or statics.
@@ -250,6 +254,15 @@
}
}
+ #[expect(clippy::needless_return)]
+ pub(crate) fn check_lhs_assignable(&self, lhs: ExprId) {
+ if self.is_syntactic_place_expr(lhs) {
+ return;
+ }
+
+ // FIXME: Emit diagnostic.
+ }
+
fn infer_expr_coerce_never(
&mut self,
expr: ExprId,
@@ -269,7 +282,7 @@
}
if let Some(target) = expected.only_has_type(&mut self.table) {
- self.coerce(expr.into(), ty, target, AllowTwoPhase::No, CoerceNever::Yes)
+ self.coerce(expr.into(), ty, target, AllowTwoPhase::No, ExprIsRead::Yes)
.expect("never-to-any coercion should always succeed")
} else {
ty
@@ -320,16 +333,28 @@
expected.coercion_target_type(&mut self.table),
&coercion_sites,
);
- coerce.coerce(self, &ObligationCause::new(), then_branch, then_ty);
+ coerce.coerce(self, &ObligationCause::new(), then_branch, then_ty, ExprIsRead::Yes);
match else_branch {
Some(else_branch) => {
let else_ty = self.infer_expr_inner(else_branch, expected, ExprIsRead::Yes);
let else_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
- coerce.coerce(self, &ObligationCause::new(), else_branch, else_ty);
+ coerce.coerce(
+ self,
+ &ObligationCause::new(),
+ else_branch,
+ else_ty,
+ ExprIsRead::Yes,
+ );
self.diverges = condition_diverges | then_diverges & else_diverges;
}
None => {
- coerce.coerce_forced_unit(self, tgt_expr, &ObligationCause::new(), true);
+ coerce.coerce_forced_unit(
+ self,
+ tgt_expr,
+ &ObligationCause::new(),
+ true,
+ ExprIsRead::Yes,
+ );
self.diverges = condition_diverges;
}
}
@@ -407,12 +432,20 @@
expected,
),
Expr::Match { expr, arms } => {
- let scrutinee_is_read = arms
- .iter()
- .all(|arm| self.pat_guaranteed_to_constitute_read_for_never(arm.pat));
+ let mut scrutinee_is_read = true;
+ let mut contains_ref_bindings = false;
+ for arm in arms {
+ scrutinee_is_read &= self.pat_guaranteed_to_constitute_read_for_never(arm.pat);
+ contains_ref_bindings |= self.contains_explicit_ref_binding(arm.pat);
+ }
let scrutinee_is_read =
if scrutinee_is_read { ExprIsRead::Yes } else { ExprIsRead::No };
- let input_ty = self.infer_expr(*expr, &Expectation::none(), scrutinee_is_read);
+ let input_ty = self.demand_scrutinee_type(
+ *expr,
+ contains_ref_bindings,
+ arms.is_empty(),
+ scrutinee_is_read,
+ );
if arms.is_empty() {
self.diverges = Diverges::Always;
@@ -421,7 +454,6 @@
let matchee_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
let mut all_arms_diverge = Diverges::Always;
for arm in arms.iter() {
- let input_ty = self.table.structurally_resolve_type(input_ty);
self.infer_top_pat(arm.pat, input_ty, None);
}
@@ -447,7 +479,13 @@
let arm_ty = self.infer_expr_inner(arm.expr, &expected, ExprIsRead::Yes);
all_arms_diverge &= self.diverges;
- coerce.coerce(self, &ObligationCause::new(), arm.expr, arm_ty);
+ coerce.coerce(
+ self,
+ &ObligationCause::new(),
+ arm.expr,
+ arm_ty,
+ ExprIsRead::Yes,
+ );
}
self.diverges = matchee_diverges | all_arms_diverge;
@@ -499,6 +537,7 @@
&ObligationCause::new(),
expr.unwrap_or(tgt_expr),
val_ty,
+ ExprIsRead::Yes,
);
// Avoiding borrowck
@@ -536,7 +575,7 @@
unit,
yield_ty,
AllowTwoPhase::No,
- CoerceNever::Yes,
+ ExprIsRead::Yes,
);
}
resume_ty
@@ -662,70 +701,13 @@
}
}
&Expr::Box { expr } => self.infer_expr_box(expr, expected),
- Expr::UnaryOp { expr, op } => {
- let inner_ty = self.infer_expr_inner(*expr, &Expectation::none(), ExprIsRead::Yes);
- let inner_ty = self.table.try_structurally_resolve_type(inner_ty);
- // FIXME: Note down method resolution her
- match op {
- UnaryOp::Deref => {
- if let Some(deref_trait) = self.resolve_lang_trait(LangItem::Deref)
- && let Some(deref_fn) = deref_trait
- .trait_items(self.db)
- .method_by_name(&Name::new_symbol_root(sym::deref))
- {
- // FIXME: this is wrong in multiple ways, subst is empty, and we emit it even for builtin deref (note that
- // the mutability is not wrong, and will be fixed in `self.infer_mut`).
- self.write_method_resolution(tgt_expr, deref_fn, self.types.empty_args);
- }
- if let Some(derefed) = inner_ty.builtin_deref(self.db, true) {
- self.table.try_structurally_resolve_type(derefed)
- } else {
- let infer_ok = overloaded_deref_ty(&self.table, inner_ty);
- match infer_ok {
- Some(infer_ok) => self.table.register_infer_ok(infer_ok),
- None => self.err_ty(),
- }
- }
- }
- UnaryOp::Neg => {
- match inner_ty.kind() {
- // Fast path for builtins
- TyKind::Int(_)
- | TyKind::Uint(_)
- | TyKind::Float(_)
- | TyKind::Infer(InferTy::IntVar(_) | InferTy::FloatVar(_)) => inner_ty,
- // Otherwise we resolve via the std::ops::Neg trait
- _ => self
- .resolve_associated_type(inner_ty, self.resolve_ops_neg_output()),
- }
- }
- UnaryOp::Not => {
- match inner_ty.kind() {
- // Fast path for builtins
- TyKind::Bool
- | TyKind::Int(_)
- | TyKind::Uint(_)
- | TyKind::Float(_)
- | TyKind::Infer(InferTy::IntVar(_) | InferTy::FloatVar(_)) => inner_ty,
- // Otherwise we resolve via the std::ops::Not trait
- _ => self
- .resolve_associated_type(inner_ty, self.resolve_ops_not_output()),
- }
- }
- }
- }
+ Expr::UnaryOp { expr, op } => self.infer_unop_expr(*op, *expr, expected, tgt_expr),
Expr::BinaryOp { lhs, rhs, op } => match op {
- Some(BinaryOp::LogicOp(_)) => {
- let bool_ty = self.types.bool;
- self.infer_expr_coerce(*lhs, &Expectation::HasType(bool_ty), ExprIsRead::Yes);
- let lhs_diverges = self.diverges;
- self.infer_expr_coerce(*rhs, &Expectation::HasType(bool_ty), ExprIsRead::Yes);
- // Depending on the LHS' value, the RHS can never execute.
- self.diverges = lhs_diverges;
- bool_ty
+ Some(BinaryOp::Assignment { op: Some(op) }) => {
+ self.infer_assign_op_expr(tgt_expr, *op, *lhs, *rhs)
}
- Some(op) => self.infer_overloadable_binop(*lhs, *op, *rhs, tgt_expr),
- _ => self.err_ty(),
+ Some(op) => self.infer_binop_expr(tgt_expr, *op, *lhs, *rhs),
+ None => self.err_ty(),
},
&Expr::Assignment { target, value } => {
// In ordinary (non-destructuring) assignments, the type of
@@ -790,8 +772,7 @@
Expr::Range { lhs, rhs, range_type } => {
let lhs_ty =
lhs.map(|e| self.infer_expr_inner(e, &Expectation::none(), ExprIsRead::Yes));
- let rhs_expect =
- lhs_ty.as_ref().map_or_else(Expectation::none, |ty| Expectation::has_type(*ty));
+ let rhs_expect = lhs_ty.map_or_else(Expectation::none, Expectation::has_type);
let rhs_ty = rhs.map(|e| self.infer_expr(e, &rhs_expect, ExprIsRead::Yes));
let single_arg_adt = |adt, ty: Ty<'db>| {
Ty::new_adt(
@@ -833,55 +814,39 @@
}
}
Expr::Index { base, index } => {
- let base_ty = self.infer_expr_inner(*base, &Expectation::none(), ExprIsRead::Yes);
- let index_ty = self.infer_expr(*index, &Expectation::none(), ExprIsRead::Yes);
+ let base_t = self.infer_expr_no_expect(*base, ExprIsRead::Yes);
+ let idx_t = self.infer_expr_no_expect(*index, ExprIsRead::Yes);
- if let Some(index_trait) = self.resolve_lang_trait(LangItem::Index) {
- let canonicalized = self.canonicalize(base_ty);
- let receiver_adjustments = method_resolution::resolve_indexing_op(
- &mut self.table,
- canonicalized,
- index_trait,
- );
- let (self_ty, mut adj) = receiver_adjustments
- .map_or((self.err_ty(), Vec::new()), |adj| {
- adj.apply(&mut self.table, base_ty)
- });
-
- // mutability will be fixed up in `InferenceContext::infer_mut`;
- adj.push(Adjustment::borrow(
- self.interner(),
- Mutability::Not,
- self_ty,
- self.table.next_region_var(),
- ));
- self.write_expr_adj(*base, adj.into_boxed_slice());
- if let Some(func) = index_trait
- .trait_items(self.db)
- .method_by_name(&Name::new_symbol_root(sym::index))
- {
- let subst = GenericArgs::new_from_iter(
- self.interner(),
- [self_ty.into(), index_ty.into()],
+ let base_t = self.table.structurally_resolve_type(base_t);
+ match self.lookup_indexing(tgt_expr, *base, base_t, idx_t) {
+ Some((trait_index_ty, trait_element_ty)) => {
+ // two-phase not needed because index_ty is never mutable
+ self.demand_coerce(
+ *index,
+ idx_t,
+ trait_index_ty,
+ AllowTwoPhase::No,
+ ExprIsRead::Yes,
);
- self.write_method_resolution(tgt_expr, func, subst);
+ self.table.select_obligations_where_possible();
+ trait_element_ty
}
- let assoc = self.resolve_ops_index_output();
- self.resolve_associated_type_with_params(self_ty, assoc, &[index_ty.into()])
- } else {
- self.err_ty()
+ // FIXME: Report an error.
+ None => self.types.error,
}
}
Expr::Tuple { exprs, .. } => {
- let mut tys =
- match expected.only_has_type(&mut self.table).as_ref().map(|t| t.kind()) {
- Some(TyKind::Tuple(substs)) => substs
- .iter()
- .chain(repeat_with(|| self.table.next_ty_var()))
- .take(exprs.len())
- .collect::<Vec<_>>(),
- _ => (0..exprs.len()).map(|_| self.table.next_ty_var()).collect(),
- };
+ let mut tys = match expected
+ .only_has_type(&mut self.table)
+ .map(|t| self.table.try_structurally_resolve_type(t).kind())
+ {
+ Some(TyKind::Tuple(substs)) => substs
+ .iter()
+ .chain(repeat_with(|| self.table.next_ty_var()))
+ .take(exprs.len())
+ .collect::<Vec<_>>(),
+ _ => (0..exprs.len()).map(|_| self.table.next_ty_var()).collect(),
+ };
for (expr, ty) in exprs.iter().zip(tys.iter_mut()) {
*ty =
@@ -1014,7 +979,7 @@
ty,
fnptr_ty,
AllowTwoPhase::No,
- CoerceNever::Yes,
+ ExprIsRead::Yes,
);
}
TyKind::Ref(_, base_ty, mutbl) => {
@@ -1024,7 +989,7 @@
ty,
ptr_ty,
AllowTwoPhase::No,
- CoerceNever::Yes,
+ ExprIsRead::Yes,
);
}
_ => {}
@@ -1073,6 +1038,77 @@
ty
}
+ fn demand_scrutinee_type(
+ &mut self,
+ scrut: ExprId,
+ contains_ref_bindings: bool,
+ no_arms: bool,
+ scrutinee_is_read: ExprIsRead,
+ ) -> Ty<'db> {
+ // Not entirely obvious: if matches may create ref bindings, we want to
+ // use the *precise* type of the scrutinee, *not* some supertype, as
+ // the "scrutinee type" (issue #23116).
+ //
+ // arielb1 [writes here in this comment thread][c] that there
+ // is certainly *some* potential danger, e.g., for an example
+ // like:
+ //
+ // [c]: https://github.com/rust-lang/rust/pull/43399#discussion_r130223956
+ //
+ // ```
+ // let Foo(x) = f()[0];
+ // ```
+ //
+ // Then if the pattern matches by reference, we want to match
+ // `f()[0]` as a lexpr, so we can't allow it to be
+ // coerced. But if the pattern matches by value, `f()[0]` is
+ // still syntactically a lexpr, but we *do* want to allow
+ // coercions.
+ //
+ // However, *likely* we are ok with allowing coercions to
+ // happen if there are no explicit ref mut patterns - all
+ // implicit ref mut patterns must occur behind a reference, so
+ // they will have the "correct" variance and lifetime.
+ //
+ // This does mean that the following pattern would be legal:
+ //
+ // ```
+ // struct Foo(Bar);
+ // struct Bar(u32);
+ // impl Deref for Foo {
+ // type Target = Bar;
+ // fn deref(&self) -> &Bar { &self.0 }
+ // }
+ // impl DerefMut for Foo {
+ // fn deref_mut(&mut self) -> &mut Bar { &mut self.0 }
+ // }
+ // fn foo(x: &mut Foo) {
+ // {
+ // let Bar(z): &mut Bar = x;
+ // *z = 42;
+ // }
+ // assert_eq!(foo.0.0, 42);
+ // }
+ // ```
+ //
+ // FIXME(tschottdorf): don't call contains_explicit_ref_binding, which
+ // is problematic as the HIR is being scraped, but ref bindings may be
+ // implicit after #42640. We need to make sure that pat_adjustments
+ // (once introduced) is populated by the time we get here.
+ //
+ // See #44848.
+ if contains_ref_bindings || no_arms {
+ self.infer_expr_no_expect(scrut, scrutinee_is_read)
+ } else {
+ // ...but otherwise we want to use any supertype of the
+ // scrutinee. This is sort of a workaround, see note (*) in
+ // `check_pat` for some details.
+ let scrut_ty = self.table.next_ty_var();
+ self.infer_expr_coerce_never(scrut, &Expectation::HasType(scrut_ty), scrutinee_is_read);
+ scrut_ty
+ }
+ }
+
fn infer_expr_path(&mut self, path: &Path, id: ExprOrPatId, scope_id: ExprId) -> Ty<'db> {
let g = self.resolver.update_to_inner_scope(self.db, self.owner, scope_id);
let ty = match self.infer_path(path, id) {
@@ -1089,6 +1125,47 @@
ty
}
+ fn infer_unop_expr(
+ &mut self,
+ unop: UnaryOp,
+ oprnd: ExprId,
+ expected: &Expectation<'db>,
+ expr: ExprId,
+ ) -> Ty<'db> {
+ let expected_inner = match unop {
+ UnaryOp::Not | UnaryOp::Neg => expected,
+ UnaryOp::Deref => &Expectation::None,
+ };
+ let mut oprnd_t = self.infer_expr_inner(oprnd, expected_inner, ExprIsRead::Yes);
+
+ oprnd_t = self.table.structurally_resolve_type(oprnd_t);
+ match unop {
+ UnaryOp::Deref => {
+ if let Some(ty) = self.lookup_derefing(expr, oprnd, oprnd_t) {
+ oprnd_t = ty;
+ } else {
+ // FIXME: Report an error.
+ oprnd_t = self.types.error;
+ }
+ }
+ UnaryOp::Not => {
+ let result = self.infer_user_unop(expr, oprnd_t, unop);
+ // If it's builtin, we can reuse the type, this helps inference.
+ if !(oprnd_t.is_integral() || oprnd_t.kind() == TyKind::Bool) {
+ oprnd_t = result;
+ }
+ }
+ UnaryOp::Neg => {
+ let result = self.infer_user_unop(expr, oprnd_t, unop);
+ // If it's builtin, we can reuse the type, this helps inference.
+ if !oprnd_t.is_numeric() {
+ oprnd_t = result;
+ }
+ }
+ }
+ oprnd_t
+ }
+
fn infer_async_block(
&mut self,
tgt_expr: ExprId,
@@ -1106,8 +1183,7 @@
let (_, inner_ty) = self.with_breakable_ctx(BreakableKind::Border, None, None, |this| {
let ty = this.infer_block(tgt_expr, *id, statements, *tail, None, expected);
if let Some(target) = expected.only_has_type(&mut this.table) {
- match this.coerce(tgt_expr.into(), ty, target, AllowTwoPhase::No, CoerceNever::Yes)
- {
+ match this.coerce(tgt_expr.into(), ty, target, AllowTwoPhase::No, ExprIsRead::Yes) {
Ok(res) => res,
Err(_) => {
this.result
@@ -1220,7 +1296,10 @@
}
fn infer_expr_array(&mut self, array: &Array, expected: &Expectation<'db>) -> Ty<'db> {
- let elem_ty = match expected.to_option(&mut self.table).as_ref().map(|t| t.kind()) {
+ let elem_ty = match expected
+ .to_option(&mut self.table)
+ .map(|t| self.table.try_structurally_resolve_type(t).kind())
+ {
Some(TyKind::Array(st, _) | TyKind::Slice(st)) => st,
_ => self.table.next_ty_var(),
};
@@ -1236,7 +1315,13 @@
let mut coerce = CoerceMany::with_coercion_sites(elem_ty, elements);
for &expr in elements.iter() {
let cur_elem_ty = self.infer_expr_inner(expr, &expected, ExprIsRead::Yes);
- coerce.coerce(self, &ObligationCause::new(), expr, cur_elem_ty);
+ coerce.coerce(
+ self,
+ &ObligationCause::new(),
+ expr,
+ cur_elem_ty,
+ ExprIsRead::Yes,
+ );
}
(
coerce.complete(self),
@@ -1250,14 +1335,18 @@
ExprIsRead::Yes,
);
let usize = self.types.usize;
- match self.body[repeat] {
+ let len = match self.body[repeat] {
Expr::Underscore => {
self.write_expr_ty(repeat, usize);
+ self.table.next_const_var()
}
- _ => _ = self.infer_expr(repeat, &Expectation::HasType(usize), ExprIsRead::Yes),
- }
+ _ => {
+ self.infer_expr(repeat, &Expectation::HasType(usize), ExprIsRead::Yes);
+ consteval::eval_to_const(repeat, self)
+ }
+ };
- (elem_ty, consteval::eval_to_const(repeat, self))
+ (elem_ty, len)
}
};
// Try to evaluate unevaluated constant, and insert variable if is not possible.
@@ -1274,7 +1363,7 @@
let return_expr_ty =
self.infer_expr_inner(expr, &Expectation::HasType(ret_ty), ExprIsRead::Yes);
let mut coerce_many = self.return_coercion.take().unwrap();
- coerce_many.coerce(self, &ObligationCause::new(), expr, return_expr_ty);
+ coerce_many.coerce(self, &ObligationCause::new(), expr, return_expr_ty, ExprIsRead::Yes);
self.return_coercion = Some(coerce_many);
}
@@ -1285,7 +1374,13 @@
self.infer_return(expr);
} else {
let mut coerce = self.return_coercion.take().unwrap();
- coerce.coerce_forced_unit(self, ret, &ObligationCause::new(), true);
+ coerce.coerce_forced_unit(
+ self,
+ ret,
+ &ObligationCause::new(),
+ true,
+ ExprIsRead::Yes,
+ );
self.return_coercion = Some(coerce);
}
}
@@ -1350,103 +1445,6 @@
}
}
- fn infer_overloadable_binop(
- &mut self,
- lhs: ExprId,
- op: BinaryOp,
- rhs: ExprId,
- tgt_expr: ExprId,
- ) -> Ty<'db> {
- let lhs_expectation = Expectation::none();
- let is_read = if matches!(op, BinaryOp::Assignment { .. }) {
- ExprIsRead::Yes
- } else {
- ExprIsRead::No
- };
- let lhs_ty = self.infer_expr(lhs, &lhs_expectation, is_read);
- let rhs_ty = self.table.next_ty_var();
-
- let trait_func = lang_items_for_bin_op(op).and_then(|(name, lang_item)| {
- let trait_id = self.resolve_lang_item(lang_item)?.as_trait()?;
- let func = trait_id.trait_items(self.db).method_by_name(&name)?;
- Some((trait_id, func))
- });
- let func = match trait_func {
- Some((_, it)) => it,
- None => {
- // HACK: `rhs_ty` is a general inference variable with no clue at all at this
- // point. Passing `lhs_ty` as both operands just to check if `lhs_ty` is a builtin
- // type applicable to `op`.
- let ret_ty = if self.is_builtin_binop(lhs_ty, lhs_ty, op) {
- // Assume both operands are builtin so we can continue inference. No guarantee
- // on the correctness, rustc would complain as necessary lang items don't seem
- // to exist anyway.
- self.enforce_builtin_binop_types(lhs_ty, rhs_ty, op)
- } else {
- self.err_ty()
- };
-
- self.infer_expr_coerce(rhs, &Expectation::has_type(rhs_ty), ExprIsRead::Yes);
-
- return ret_ty;
- }
- };
-
- // HACK: We can use this substitution for the function because the function itself doesn't
- // have its own generic parameters.
- let args = GenericArgs::new_from_iter(self.interner(), [lhs_ty.into(), rhs_ty.into()]);
-
- self.write_method_resolution(tgt_expr, func, args);
-
- let method_ty = self.db.value_ty(func.into()).unwrap().instantiate(self.interner(), args);
- self.register_obligations_for_call(method_ty);
-
- self.infer_expr_coerce(rhs, &Expectation::has_type(rhs_ty), ExprIsRead::Yes);
-
- let ret_ty = match method_ty.callable_sig(self.interner()) {
- Some(sig) => {
- let sig = sig.skip_binder();
- let p_left = sig.inputs_and_output.as_slice()[0];
- if matches!(op, BinaryOp::CmpOp(..) | BinaryOp::Assignment { .. })
- && let TyKind::Ref(lt, _, mtbl) = p_left.kind()
- {
- self.write_expr_adj(
- lhs,
- Box::new([Adjustment {
- kind: Adjust::Borrow(AutoBorrow::Ref(lt, mtbl)),
- target: p_left,
- }]),
- );
- }
- let p_right = sig.inputs_and_output.as_slice()[1];
- if matches!(op, BinaryOp::CmpOp(..))
- && let TyKind::Ref(lt, _, mtbl) = p_right.kind()
- {
- self.write_expr_adj(
- rhs,
- Box::new([Adjustment {
- kind: Adjust::Borrow(AutoBorrow::Ref(lt, mtbl)),
- target: p_right,
- }]),
- );
- }
- sig.output()
- }
- None => self.err_ty(),
- };
-
- let ret_ty = self.process_remote_user_written_ty(ret_ty);
-
- if self.is_builtin_binop(lhs_ty, rhs_ty, op) {
- // use knowledge of built-in binary ops, which can sometimes help inference
- let builtin_ret = self.enforce_builtin_binop_types(lhs_ty, rhs_ty, op);
- self.unify(builtin_ret, ret_ty);
- builtin_ret
- } else {
- ret_ty
- }
- }
-
fn infer_block(
&mut self,
expr: ExprId,
@@ -1548,20 +1546,13 @@
// we don't even make an attempt at coercion
this.table.new_maybe_never_var()
} else if let Some(t) = expected.only_has_type(&mut this.table) {
- let coerce_never = if this
- .expr_guaranteed_to_constitute_read_for_never(expr, ExprIsRead::Yes)
- {
- CoerceNever::Yes
- } else {
- CoerceNever::No
- };
if this
.coerce(
expr.into(),
this.types.unit,
t,
AllowTwoPhase::No,
- coerce_never,
+ ExprIsRead::Yes,
)
.is_err()
{
@@ -1591,7 +1582,7 @@
name: &Name,
) -> Option<(Ty<'db>, Either<FieldId, TupleFieldId>, Vec<Adjustment<'db>>, bool)> {
let interner = self.interner();
- let mut autoderef = self.table.autoderef(receiver_ty);
+ let mut autoderef = self.table.autoderef_with_tracking(receiver_ty);
let mut private_field = None;
let res = autoderef.by_ref().find_map(|(derefed_ty, _)| {
let (field_id, parameters) = match derefed_ty.kind() {
@@ -1640,14 +1631,16 @@
Some(match res {
Some((field_id, ty)) => {
- let adjustments = autoderef.adjust_steps();
+ let adjustments =
+ self.table.register_infer_ok(autoderef.adjust_steps_as_infer_ok());
let ty = self.process_remote_user_written_ty(ty);
(ty, field_id, adjustments, true)
}
None => {
let (field_id, subst) = private_field?;
- let adjustments = autoderef.adjust_steps();
+ let adjustments =
+ self.table.register_infer_ok(autoderef.adjust_steps_as_infer_ok());
let ty = self.db.field_types(field_id.parent)[field_id.local_id]
.instantiate(self.interner(), subst);
let ty = self.process_remote_user_written_ty(ty);
@@ -1666,6 +1659,7 @@
) -> Ty<'db> {
// Field projections don't constitute reads.
let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none(), ExprIsRead::No);
+ let receiver_ty = self.table.structurally_resolve_type(receiver_ty);
if name.is_missing() {
// Bail out early, don't even try to look up field. Also, we don't issue an unresolved
@@ -1689,46 +1683,39 @@
None => {
// no field found, lets attempt to resolve it like a function so that IDE things
// work out while people are typing
- let canonicalized_receiver = self.canonicalize(receiver_ty);
- let resolved = method_resolution::lookup_method(
- &canonicalized_receiver,
- &mut self.table,
- Self::get_traits_in_scope(&self.resolver, &self.traits_in_scope)
- .as_ref()
- .left_or_else(|&it| it),
- VisibleFromModule::Filter(self.resolver.module()),
- name,
+ let resolved = self.lookup_method_including_private(
+ receiver_ty,
+ name.clone(),
+ None,
+ receiver,
+ tgt_expr,
);
self.push_diagnostic(InferenceDiagnostic::UnresolvedField {
expr: tgt_expr,
receiver: receiver_ty,
name: name.clone(),
- method_with_same_name_exists: resolved.is_some(),
+ method_with_same_name_exists: resolved.is_ok(),
});
match resolved {
- Some((adjust, func, _)) => {
- let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
- let args = self.substs_for_method_call(tgt_expr, func.into(), None);
- self.write_expr_adj(receiver, adjustments.into_boxed_slice());
- self.write_method_resolution(tgt_expr, func, args);
-
- self.check_method_call(
- tgt_expr,
- &[],
- self.db
- .value_ty(func.into())
- .unwrap()
- .instantiate(self.interner(), args),
- ty,
- expected,
- )
+ Ok((func, _is_visible)) => {
+ self.check_method_call(tgt_expr, &[], func.sig, receiver_ty, expected)
}
- None => self.err_ty(),
+ Err(_) => self.err_ty(),
}
}
}
}
+ fn instantiate_erroneous_method(&mut self, def_id: FunctionId) -> MethodCallee<'db> {
+ // FIXME: Using fresh infer vars for the method args isn't optimal,
+ // we can do better by going thorough the full probe/confirm machinery.
+ let args = self.table.fresh_args_for_item(def_id.into());
+ let sig = self.db.callable_item_signature(def_id.into()).instantiate(self.interner(), args);
+ let sig =
+ self.infcx().instantiate_binder_with_fresh_vars(BoundRegionConversionTime::FnCall, sig);
+ MethodCallee { def_id, args, sig }
+ }
+
fn infer_call(
&mut self,
tgt_expr: ExprId,
@@ -1737,13 +1724,14 @@
expected: &Expectation<'db>,
) -> Ty<'db> {
let callee_ty = self.infer_expr(callee, &Expectation::none(), ExprIsRead::Yes);
+ let callee_ty = self.table.try_structurally_resolve_type(callee_ty);
let interner = self.interner();
- let mut derefs = self.table.autoderef(callee_ty);
+ let mut derefs = InferenceContextAutoderef::new_from_inference_context(self, callee_ty);
let (res, derefed_callee) = loop {
let Some((callee_deref_ty, _)) = derefs.next() else {
break (None, callee_ty);
};
- if let Some(res) = derefs.table.callable_sig(callee_deref_ty, args.len()) {
+ if let Some(res) = derefs.ctx().table.callable_sig(callee_deref_ty, args.len()) {
break (Some(res), callee_deref_ty);
}
};
@@ -1753,7 +1741,8 @@
|| res.is_none();
let (param_tys, ret_ty) = match res {
Some((func, params, ret_ty)) => {
- let mut adjustments = derefs.adjust_steps();
+ let infer_ok = derefs.adjust_steps_as_infer_ok();
+ let mut adjustments = self.table.register_infer_ok(infer_ok);
if let Some(fn_x) = func {
self.write_fn_trait_method_resolution(
fn_x,
@@ -1819,7 +1808,7 @@
indices_to_skip,
is_varargs,
);
- self.table.normalize_associated_types_in(ret_ty)
+ ret_ty
}
fn infer_method_call(
@@ -1834,56 +1823,26 @@
let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none(), ExprIsRead::Yes);
let receiver_ty = self.table.try_structurally_resolve_type(receiver_ty);
- if matches!(receiver_ty.kind(), TyKind::Error(_) | TyKind::Infer(InferTy::TyVar(_))) {
- // Don't probe on error type, or on a fully unresolved infer var.
- // FIXME: Emit an error if we're probing on an infer var (type annotations needed).
- for &arg in args {
- // Make sure we infer and record the arguments.
- self.infer_expr_no_expect(arg, ExprIsRead::Yes);
- }
- return receiver_ty;
- }
-
- let canonicalized_receiver = self.canonicalize(receiver_ty);
-
- let resolved = method_resolution::lookup_method(
- &canonicalized_receiver,
- &mut self.table,
- Self::get_traits_in_scope(&self.resolver, &self.traits_in_scope)
- .as_ref()
- .left_or_else(|&it| it),
- VisibleFromModule::Filter(self.resolver.module()),
- method_name,
+ let resolved = self.lookup_method_including_private(
+ receiver_ty,
+ method_name.clone(),
+ generic_args,
+ receiver,
+ tgt_expr,
);
match resolved {
- Some((adjust, func, visible)) => {
+ Ok((func, visible)) => {
if !visible {
self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem {
id: tgt_expr.into(),
- item: func.into(),
+ item: func.def_id.into(),
})
}
-
- let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
- self.write_expr_adj(receiver, adjustments.into_boxed_slice());
-
- let gen_args = self.substs_for_method_call(tgt_expr, func.into(), generic_args);
- self.write_method_resolution(tgt_expr, func, gen_args);
- let interner = DbInterner::new_with(self.db, None, None);
- self.check_method_call(
- tgt_expr,
- args,
- self.db
- .value_ty(func.into())
- .expect("we have a function def")
- .instantiate(interner, gen_args),
- ty,
- expected,
- )
+ self.check_method_call(tgt_expr, args, func.sig, receiver_ty, expected)
}
// Failed to resolve, report diagnostic and try to resolve as call to field access or
// assoc function
- None => {
+ Err(_) => {
let field_with_same_name_exists = match self.lookup_field(receiver_ty, method_name)
{
Some((ty, field_id, adjustments, _public)) => {
@@ -1894,65 +1853,65 @@
None => None,
};
- let assoc_func_with_same_name = method_resolution::iterate_method_candidates(
- &canonicalized_receiver,
- &mut self.table,
- Self::get_traits_in_scope(&self.resolver, &self.traits_in_scope)
- .as_ref()
- .left_or_else(|&it| it),
- VisibleFromModule::Filter(self.resolver.module()),
- Some(method_name),
- method_resolution::LookupMode::Path,
- |_ty, item, visible| match item {
- hir_def::AssocItemId::FunctionId(function_id) if visible => {
- Some(function_id)
- }
- _ => None,
- },
- );
+ let assoc_func_with_same_name = self.with_method_resolution(|ctx| {
+ if !matches!(
+ receiver_ty.kind(),
+ TyKind::Infer(InferTy::TyVar(_)) | TyKind::Error(_)
+ ) {
+ ctx.probe_for_name(
+ method_resolution::Mode::Path,
+ method_name.clone(),
+ receiver_ty,
+ )
+ } else {
+ Err(MethodError::ErrorReported)
+ }
+ });
+ let assoc_func_with_same_name = match assoc_func_with_same_name {
+ Ok(method_resolution::Pick {
+ item: CandidateId::FunctionId(def_id), ..
+ })
+ | Err(MethodError::PrivateMatch(method_resolution::Pick {
+ item: CandidateId::FunctionId(def_id),
+ ..
+ })) => Some(self.instantiate_erroneous_method(def_id)),
+ _ => None,
+ };
self.push_diagnostic(InferenceDiagnostic::UnresolvedMethodCall {
expr: tgt_expr,
receiver: receiver_ty,
name: method_name.clone(),
field_with_same_name: field_with_same_name_exists,
- assoc_func_with_same_name,
+ assoc_func_with_same_name: assoc_func_with_same_name.map(|it| it.def_id),
});
let recovered = match assoc_func_with_same_name {
- Some(f) => {
- let args = self.substs_for_method_call(tgt_expr, f.into(), generic_args);
- let interner = DbInterner::new_with(self.db, None, None);
- let f = self
- .db
- .value_ty(f.into())
- .expect("we have a function def")
- .instantiate(interner, args);
- let sig = f.callable_sig(self.interner()).expect("we have a function def");
- Some((f, sig, true))
- }
+ Some(it) => Some((
+ Ty::new_fn_def(
+ self.interner(),
+ CallableDefId::FunctionId(it.def_id).into(),
+ it.args,
+ ),
+ it.sig,
+ true,
+ )),
None => field_with_same_name_exists.and_then(|field_ty| {
let callable_sig = field_ty.callable_sig(self.interner())?;
- Some((field_ty, callable_sig, false))
+ Some((field_ty, callable_sig.skip_binder(), false))
}),
};
match recovered {
- Some((callee_ty, sig, strip_first)) => {
- let sig = sig.skip_binder();
- self.check_call(
- tgt_expr,
- args,
- callee_ty,
- sig.inputs_and_output
- .inputs()
- .get(strip_first as usize..)
- .unwrap_or(&[]),
- sig.output(),
- &[],
- true,
- expected,
- )
- }
+ Some((callee_ty, sig, strip_first)) => self.check_call(
+ tgt_expr,
+ args,
+ callee_ty,
+ sig.inputs_and_output.inputs().get(strip_first as usize..).unwrap_or(&[]),
+ sig.output(),
+ &[],
+ true,
+ expected,
+ ),
None => {
for &arg in args.iter() {
self.infer_expr_no_expect(arg, ExprIsRead::Yes);
@@ -1968,38 +1927,20 @@
&mut self,
tgt_expr: ExprId,
args: &[ExprId],
- method_ty: Ty<'db>,
+ sig: FnSig<'db>,
receiver_ty: Ty<'db>,
expected: &Expectation<'db>,
) -> Ty<'db> {
- self.register_obligations_for_call(method_ty);
- let ((formal_receiver_ty, param_tys), ret_ty, is_varargs) =
- match method_ty.callable_sig(self.interner()) {
- Some(sig) => {
- let sig = sig.skip_binder();
- (
- if !sig.inputs_and_output.inputs().is_empty() {
- (
- sig.inputs_and_output.as_slice()[0],
- sig.inputs_and_output.inputs()[1..].to_vec(),
- )
- } else {
- (self.types.error, Vec::new())
- },
- sig.output(),
- sig.c_variadic,
- )
- }
- None => {
- let formal_receiver_ty = self.table.next_ty_var();
- let ret_ty = self.table.next_ty_var();
- ((formal_receiver_ty, Vec::new()), ret_ty, true)
- }
- };
+ let (formal_receiver_ty, param_tys) = if !sig.inputs_and_output.inputs().is_empty() {
+ (sig.inputs_and_output.as_slice()[0], &sig.inputs_and_output.inputs()[1..])
+ } else {
+ (self.types.error, &[] as _)
+ };
+ let ret_ty = sig.output();
self.table.unify(formal_receiver_ty, receiver_ty);
- self.check_call_arguments(tgt_expr, ¶m_tys, ret_ty, expected, args, &[], is_varargs);
- self.table.normalize_associated_types_in(ret_ty)
+ self.check_call_arguments(tgt_expr, param_tys, ret_ty, expected, args, &[], sig.c_variadic);
+ ret_ty
}
/// Generic function that factors out common logic from function calls,
@@ -2110,20 +2051,13 @@
// fulfillment error to be more accurate.
let coerced_ty = this.table.resolve_vars_with_obligations(coerced_ty);
- let coerce_never = if this
- .expr_guaranteed_to_constitute_read_for_never(provided_arg, ExprIsRead::Yes)
- {
- CoerceNever::Yes
- } else {
- CoerceNever::No
- };
let coerce_error = this
.coerce(
provided_arg.into(),
checked_ty,
coerced_ty,
AllowTwoPhase::Yes,
- coerce_never,
+ ExprIsRead::Yes,
)
.err();
if coerce_error.is_some() {
@@ -2204,144 +2138,19 @@
if !args_count_matches {}
}
- fn substs_for_method_call(
- &mut self,
- expr: ExprId,
- def: GenericDefId,
- generic_args: Option<&HirGenericArgs>,
- ) -> GenericArgs<'db> {
- struct LowererCtx<'a, 'b, 'db> {
- ctx: &'a mut InferenceContext<'b, 'db>,
- expr: ExprId,
- }
-
- impl<'db> GenericArgsLowerer<'db> for LowererCtx<'_, '_, 'db> {
- fn report_len_mismatch(
- &mut self,
- def: GenericDefId,
- provided_count: u32,
- expected_count: u32,
- kind: IncorrectGenericsLenKind,
- ) {
- self.ctx.push_diagnostic(InferenceDiagnostic::MethodCallIncorrectGenericsLen {
- expr: self.expr,
- provided_count,
- expected_count,
- kind,
- def,
- });
- }
-
- fn report_arg_mismatch(
- &mut self,
- param_id: GenericParamId,
- arg_idx: u32,
- has_self_arg: bool,
- ) {
- self.ctx.push_diagnostic(InferenceDiagnostic::MethodCallIncorrectGenericsOrder {
- expr: self.expr,
- param_id,
- arg_idx,
- has_self_arg,
- });
- }
-
- fn provided_kind(
- &mut self,
- param_id: GenericParamId,
- param: GenericParamDataRef<'_>,
- arg: &HirGenericArg,
- ) -> GenericArg<'db> {
- match (param, arg) {
- (
- GenericParamDataRef::LifetimeParamData(_),
- HirGenericArg::Lifetime(lifetime),
- ) => self.ctx.make_body_lifetime(*lifetime).into(),
- (GenericParamDataRef::TypeParamData(_), HirGenericArg::Type(type_ref)) => {
- self.ctx.make_body_ty(*type_ref).into()
- }
- (GenericParamDataRef::ConstParamData(_), HirGenericArg::Const(konst)) => {
- let GenericParamId::ConstParamId(const_id) = param_id else {
- unreachable!("non-const param ID for const param");
- };
- let const_ty = self.ctx.db.const_param_ty_ns(const_id);
- self.ctx.make_body_const(*konst, const_ty).into()
- }
- _ => unreachable!("unmatching param kinds were passed to `provided_kind()`"),
- }
- }
-
- fn provided_type_like_const(
- &mut self,
- const_ty: Ty<'db>,
- arg: TypeLikeConst<'_>,
- ) -> Const<'db> {
- match arg {
- TypeLikeConst::Path(path) => self.ctx.make_path_as_body_const(path, const_ty),
- TypeLikeConst::Infer => self.ctx.table.next_const_var(),
- }
- }
-
- fn inferred_kind(
- &mut self,
- _def: GenericDefId,
- param_id: GenericParamId,
- _param: GenericParamDataRef<'_>,
- _infer_args: bool,
- _preceding_args: &[GenericArg<'db>],
- ) -> GenericArg<'db> {
- // Always create an inference var, even when `infer_args == false`. This helps with diagnostics,
- // and I think it's also required in the presence of `impl Trait` (that must be inferred).
- self.ctx.table.next_var_for_param(param_id)
- }
-
- fn parent_arg(&mut self, param_id: GenericParamId) -> GenericArg<'db> {
- self.ctx.table.next_var_for_param(param_id)
- }
-
- fn report_elided_lifetimes_in_path(
- &mut self,
- _def: GenericDefId,
- _expected_count: u32,
- _hard_error: bool,
- ) {
- unreachable!("we set `LifetimeElisionKind::Infer`")
- }
-
- fn report_elision_failure(&mut self, _def: GenericDefId, _expected_count: u32) {
- unreachable!("we set `LifetimeElisionKind::Infer`")
- }
-
- fn report_missing_lifetime(&mut self, _def: GenericDefId, _expected_count: u32) {
- unreachable!("we set `LifetimeElisionKind::Infer`")
- }
- }
-
- substs_from_args_and_bindings(
- self.db,
- self.body,
- generic_args,
- def,
- true,
- LifetimeElisionKind::Infer,
- false,
- None,
- &mut LowererCtx { ctx: self, expr },
- )
- }
-
fn register_obligations_for_call(&mut self, callable_ty: Ty<'db>) {
let callable_ty = self.table.try_structurally_resolve_type(callable_ty);
if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind() {
- let generic_predicates =
- self.db.generic_predicates(GenericDefId::from_callable(self.db, fn_def.0));
- if let Some(predicates) = generic_predicates.instantiate(self.interner(), parameters) {
- let interner = self.interner();
- let param_env = self.table.trait_env.env;
- self.table.register_predicates(predicates.map(|predicate| {
- Obligation::new(interner, ObligationCause::new(), param_env, predicate)
- }));
- }
+ let generic_predicates = GenericPredicates::query_all(
+ self.db,
+ GenericDefId::from_callable(self.db, fn_def.0),
+ );
+ let param_env = self.table.trait_env.env;
+ self.table.register_predicates(clauses_as_obligations(
+ generic_predicates.iter_instantiated_copied(self.interner(), parameters.as_slice()),
+ ObligationCause::new(),
+ param_env,
+ ));
// add obligation for trait implementation, if this is a trait method
match fn_def.0 {
CallableDefId::FunctionId(f) => {
@@ -2410,122 +2219,6 @@
indices
}
- /// Dereferences a single level of immutable referencing.
- fn deref_ty_if_possible(&mut self, ty: Ty<'db>) -> Ty<'db> {
- let ty = self.table.try_structurally_resolve_type(ty);
- match ty.kind() {
- TyKind::Ref(_, inner, Mutability::Not) => {
- self.table.try_structurally_resolve_type(inner)
- }
- _ => ty,
- }
- }
-
- /// Enforces expectations on lhs type and rhs type depending on the operator and returns the
- /// output type of the binary op.
- fn enforce_builtin_binop_types(&mut self, lhs: Ty<'db>, rhs: Ty<'db>, op: BinaryOp) -> Ty<'db> {
- // Special-case a single layer of referencing, so that things like `5.0 + &6.0f32` work (See rust-lang/rust#57447).
- let lhs = self.deref_ty_if_possible(lhs);
- let rhs = self.deref_ty_if_possible(rhs);
-
- let (op, is_assign) = match op {
- BinaryOp::Assignment { op: Some(inner) } => (BinaryOp::ArithOp(inner), true),
- _ => (op, false),
- };
-
- let output_ty = match op {
- BinaryOp::LogicOp(_) => {
- let bool_ = self.types.bool;
- self.unify(lhs, bool_);
- self.unify(rhs, bool_);
- bool_
- }
-
- BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => {
- // result type is same as LHS always
- lhs
- }
-
- BinaryOp::ArithOp(_) => {
- // LHS, RHS, and result will have the same type
- self.unify(lhs, rhs);
- lhs
- }
-
- BinaryOp::CmpOp(_) => {
- // LHS and RHS will have the same type
- self.unify(lhs, rhs);
- self.types.bool
- }
-
- BinaryOp::Assignment { op: None } => {
- stdx::never!("Simple assignment operator is not binary op.");
- lhs
- }
-
- BinaryOp::Assignment { .. } => unreachable!("handled above"),
- };
-
- if is_assign { self.types.unit } else { output_ty }
- }
-
- fn is_builtin_binop(&mut self, lhs: Ty<'db>, rhs: Ty<'db>, op: BinaryOp) -> bool {
- // Special-case a single layer of referencing, so that things like `5.0 + &6.0f32` work (See rust-lang/rust#57447).
- let lhs = self.deref_ty_if_possible(lhs);
- let rhs = self.deref_ty_if_possible(rhs);
-
- let op = match op {
- BinaryOp::Assignment { op: Some(inner) } => BinaryOp::ArithOp(inner),
- _ => op,
- };
-
- match op {
- BinaryOp::LogicOp(_) => true,
-
- BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => {
- lhs.is_integral() && rhs.is_integral()
- }
-
- BinaryOp::ArithOp(
- ArithOp::Add | ArithOp::Sub | ArithOp::Mul | ArithOp::Div | ArithOp::Rem,
- ) => {
- lhs.is_integral() && rhs.is_integral()
- || lhs.is_floating_point() && rhs.is_floating_point()
- }
-
- BinaryOp::ArithOp(ArithOp::BitAnd | ArithOp::BitOr | ArithOp::BitXor) => {
- lhs.is_integral() && rhs.is_integral()
- || lhs.is_floating_point() && rhs.is_floating_point()
- || matches!((lhs.kind(), rhs.kind()), (TyKind::Bool, TyKind::Bool))
- }
-
- BinaryOp::CmpOp(_) => {
- let is_scalar = |kind| {
- matches!(
- kind,
- TyKind::Bool
- | TyKind::Char
- | TyKind::Int(_)
- | TyKind::Uint(_)
- | TyKind::Float(_)
- | TyKind::FnDef(..)
- | TyKind::FnPtr(..)
- | TyKind::RawPtr(..)
- | TyKind::Infer(InferTy::IntVar(_) | InferTy::FloatVar(_))
- )
- };
- is_scalar(lhs.kind()) && is_scalar(rhs.kind())
- }
-
- BinaryOp::Assignment { op: None } => {
- stdx::never!("Simple assignment operator is not binary op.");
- false
- }
-
- BinaryOp::Assignment { .. } => unreachable!("handled above"),
- }
- }
-
pub(super) fn with_breakable_ctx<T>(
&mut self,
kind: BreakableKind,
diff --git a/crates/hir-ty/src/infer/fallback.rs b/crates/hir-ty/src/infer/fallback.rs
index b1c9146..d0ce8cb 100644
--- a/crates/hir-ty/src/infer/fallback.rs
+++ b/crates/hir-ty/src/infer/fallback.rs
@@ -160,7 +160,7 @@
};
debug!("fallback_if_possible(ty={:?}): defaulting to `{:?}`", ty, fallback);
- self.demand_eqtype(ty, fallback);
+ _ = self.demand_eqtype_fixme_no_diag(ty, fallback);
true
}
diff --git a/crates/hir-ty/src/infer/mutability.rs b/crates/hir-ty/src/infer/mutability.rs
index 71a9c94..a257547 100644
--- a/crates/hir-ty/src/infer/mutability.rs
+++ b/crates/hir-ty/src/infer/mutability.rs
@@ -1,25 +1,15 @@
//! Finds if an expression is an immutable context or a mutable context, which is used in selecting
//! between `Deref` and `DerefMut` or `Index` and `IndexMut` or similar.
-use hir_def::{
- hir::{
- Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, Pat, PatId, Statement,
- UnaryOp,
- },
- lang_item::LangItem,
+use hir_def::hir::{
+ Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, Pat, PatId, Statement, UnaryOp,
};
-use hir_expand::name::Name;
-use intern::sym;
use rustc_ast_ir::Mutability;
-use rustc_type_ir::inherent::IntoKind;
-use crate::next_solver::infer::traits::{Obligation, ObligationCause};
-use crate::next_solver::{GenericArgs, TraitRef};
use crate::{
- Adjust, Adjustment, AutoBorrow, OverloadedDeref,
- infer::{Expectation, InferenceContext, expr::ExprIsRead},
+ Adjust, AutoBorrow, OverloadedDeref,
+ infer::{InferenceContext, place_op::PlaceOp},
lower::lower_mutability,
- next_solver::TyKind,
};
impl<'db> InferenceContext<'_, 'db> {
@@ -28,13 +18,33 @@
}
fn infer_mut_expr(&mut self, tgt_expr: ExprId, mut mutability: Mutability) {
+ let krate = self.krate();
if let Some(adjustments) = self.result.expr_adjustments.get_mut(&tgt_expr) {
- for adj in adjustments.iter_mut().rev() {
+ let mut adjustments = adjustments.iter_mut().rev().peekable();
+ while let Some(adj) = adjustments.next() {
match &mut adj.kind {
Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => (),
- Adjust::Deref(Some(d)) => *d = OverloadedDeref(Some(mutability)),
+ Adjust::Deref(Some(d)) => {
+ if mutability == Mutability::Mut {
+ let source_ty = match adjustments.peek() {
+ Some(prev_adj) => prev_adj.target,
+ None => self.result.type_of_expr[tgt_expr],
+ };
+ if let Some(infer_ok) = Self::try_mutable_overloaded_place_op(
+ &self.table,
+ krate,
+ source_ty,
+ None,
+ PlaceOp::Deref,
+ ) {
+ self.table.register_predicates(infer_ok.obligations);
+ }
+ *d = OverloadedDeref(Some(mutability));
+ }
+ }
Adjust::Borrow(b) => match b {
- AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m) => mutability = *m,
+ AutoBorrow::Ref(m) => mutability = (*m).into(),
+ AutoBorrow::RawPtr(m) => mutability = *m,
},
}
}
@@ -128,75 +138,15 @@
self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread))
}
&Expr::Index { base, index } => {
- if mutability == Mutability::Mut
- && let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr)
- && let Some(index_trait) =
- LangItem::IndexMut.resolve_trait(self.db, self.table.trait_env.krate)
- && let Some(index_fn) = index_trait
- .trait_items(self.db)
- .method_by_name(&Name::new_symbol_root(sym::index_mut))
- {
- *f = index_fn;
- let mut base_ty = None;
- let base_adjustments =
- self.result.expr_adjustments.get_mut(&base).and_then(|it| it.last_mut());
- if let Some(Adjustment {
- kind: Adjust::Borrow(AutoBorrow::Ref(_, mutability)),
- target,
- }) = base_adjustments
- {
- if let TyKind::Ref(_, ty, _) = target.kind() {
- base_ty = Some(ty);
- }
- *mutability = Mutability::Mut;
- }
-
- // Apply `IndexMut` obligation for non-assignee expr
- if let Some(base_ty) = base_ty {
- let index_ty = if let Some(ty) = self.result.type_of_expr.get(index) {
- *ty
- } else {
- self.infer_expr(index, &Expectation::none(), ExprIsRead::Yes)
- };
- let trait_ref = TraitRef::new(
- self.interner(),
- index_trait.into(),
- GenericArgs::new_from_iter(
- self.interner(),
- [base_ty.into(), index_ty.into()],
- ),
- );
- self.table.register_predicate(Obligation::new(
- self.interner(),
- ObligationCause::new(),
- self.table.trait_env.env,
- trait_ref,
- ));
- }
+ if mutability == Mutability::Mut {
+ self.convert_place_op_to_mutable(PlaceOp::Index, tgt_expr, base, Some(index));
}
self.infer_mut_expr(base, mutability);
self.infer_mut_expr(index, Mutability::Not);
}
Expr::UnaryOp { expr, op: UnaryOp::Deref } => {
- let mut mutability = mutability;
- if let Some((f, _)) = self.result.method_resolutions.get_mut(&tgt_expr)
- && mutability == Mutability::Mut
- && let Some(deref_trait) =
- LangItem::DerefMut.resolve_trait(self.db, self.table.trait_env.krate)
- {
- let ty = self.result.type_of_expr.get(*expr);
- let is_mut_ptr = ty.is_some_and(|ty| {
- let ty = self.table.shallow_resolve(*ty);
- matches!(ty.kind(), TyKind::RawPtr(_, Mutability::Mut))
- });
- if is_mut_ptr {
- mutability = Mutability::Not;
- } else if let Some(deref_fn) = deref_trait
- .trait_items(self.db)
- .method_by_name(&Name::new_symbol_root(sym::deref_mut))
- {
- *f = deref_fn;
- }
+ if mutability == Mutability::Mut {
+ self.convert_place_op_to_mutable(PlaceOp::Deref, tgt_expr, *expr, None);
}
self.infer_mut_expr(*expr, mutability);
}
diff --git a/crates/hir-ty/src/infer/op.rs b/crates/hir-ty/src/infer/op.rs
new file mode 100644
index 0000000..88319a8
--- /dev/null
+++ b/crates/hir-ty/src/infer/op.rs
@@ -0,0 +1,468 @@
+//! Inference of binary and unary operators.
+
+use std::collections::hash_map;
+
+use hir_def::{GenericParamId, TraitId, hir::ExprId, lang_item::LangItem};
+use intern::{Symbol, sym};
+use rustc_ast_ir::Mutability;
+use rustc_type_ir::inherent::{IntoKind, Ty as _};
+use syntax::ast::{ArithOp, BinaryOp, UnaryOp};
+use tracing::debug;
+
+use crate::{
+ Adjust, Adjustment, AutoBorrow,
+ infer::{AllowTwoPhase, AutoBorrowMutability, Expectation, InferenceContext, expr::ExprIsRead},
+ method_resolution::{MethodCallee, TreatNotYetDefinedOpaques},
+ next_solver::{
+ GenericArgs, TraitRef, Ty, TyKind,
+ fulfill::NextSolverError,
+ infer::traits::{Obligation, ObligationCause},
+ obligation_ctxt::ObligationCtxt,
+ },
+};
+
+impl<'a, 'db> InferenceContext<'a, 'db> {
+ /// Checks a `a <op>= b`
+ pub(crate) fn infer_assign_op_expr(
+ &mut self,
+ expr: ExprId,
+ op: ArithOp,
+ lhs: ExprId,
+ rhs: ExprId,
+ ) -> Ty<'db> {
+ let (lhs_ty, rhs_ty, return_ty) =
+ self.infer_overloaded_binop(expr, lhs, rhs, BinaryOp::Assignment { op: Some(op) });
+
+ let category = BinOpCategory::from(op);
+ let ty = if !lhs_ty.is_ty_var()
+ && !rhs_ty.is_ty_var()
+ && is_builtin_binop(lhs_ty, rhs_ty, category)
+ {
+ self.enforce_builtin_binop_types(lhs_ty, rhs_ty, category);
+ self.types.unit
+ } else {
+ return_ty
+ };
+
+ self.check_lhs_assignable(lhs);
+
+ ty
+ }
+
+ /// Checks a potentially overloaded binary operator.
+ pub(crate) fn infer_binop_expr(
+ &mut self,
+ expr: ExprId,
+ op: BinaryOp,
+ lhs_expr: ExprId,
+ rhs_expr: ExprId,
+ ) -> Ty<'db> {
+ debug!(
+ "check_binop(expr.hir_id={:?}, expr={:?}, op={:?}, lhs_expr={:?}, rhs_expr={:?})",
+ expr, expr, op, lhs_expr, rhs_expr
+ );
+
+ match op {
+ BinaryOp::LogicOp(_) => {
+ // && and || are a simple case.
+ self.infer_expr_coerce(
+ lhs_expr,
+ &Expectation::HasType(self.types.bool),
+ ExprIsRead::Yes,
+ );
+ let lhs_diverges = self.diverges;
+ self.infer_expr_coerce(
+ rhs_expr,
+ &Expectation::HasType(self.types.bool),
+ ExprIsRead::Yes,
+ );
+
+ // Depending on the LHS' value, the RHS can never execute.
+ self.diverges = lhs_diverges;
+
+ self.types.bool
+ }
+ _ => {
+ // Otherwise, we always treat operators as if they are
+ // overloaded. This is the way to be most flexible w/r/t
+ // types that get inferred.
+ let (lhs_ty, rhs_ty, return_ty) =
+ self.infer_overloaded_binop(expr, lhs_expr, rhs_expr, op);
+
+ // Supply type inference hints if relevant. Probably these
+ // hints should be enforced during select as part of the
+ // `consider_unification_despite_ambiguity` routine, but this
+ // more convenient for now.
+ //
+ // The basic idea is to help type inference by taking
+ // advantage of things we know about how the impls for
+ // scalar types are arranged. This is important in a
+ // scenario like `1_u32 << 2`, because it lets us quickly
+ // deduce that the result type should be `u32`, even
+ // though we don't know yet what type 2 has and hence
+ // can't pin this down to a specific impl.
+ let category = BinOpCategory::from(op);
+ if !lhs_ty.is_ty_var()
+ && !rhs_ty.is_ty_var()
+ && is_builtin_binop(lhs_ty, rhs_ty, category)
+ {
+ let builtin_return_ty =
+ self.enforce_builtin_binop_types(lhs_ty, rhs_ty, category);
+ _ = self.demand_eqtype(expr.into(), builtin_return_ty, return_ty);
+ builtin_return_ty
+ } else {
+ return_ty
+ }
+ }
+ }
+ }
+
+ fn enforce_builtin_binop_types(
+ &mut self,
+ lhs_ty: Ty<'db>,
+ rhs_ty: Ty<'db>,
+ category: BinOpCategory,
+ ) -> Ty<'db> {
+ debug_assert!(is_builtin_binop(lhs_ty, rhs_ty, category));
+
+ // Special-case a single layer of referencing, so that things like `5.0 + &6.0f32` work.
+ // (See https://github.com/rust-lang/rust/issues/57447.)
+ let (lhs_ty, rhs_ty) = (deref_ty_if_possible(lhs_ty), deref_ty_if_possible(rhs_ty));
+
+ match category {
+ BinOpCategory::Shortcircuit => {
+ self.demand_suptype(self.types.bool, lhs_ty);
+ self.demand_suptype(self.types.bool, rhs_ty);
+ self.types.bool
+ }
+
+ BinOpCategory::Shift => {
+ // result type is same as LHS always
+ lhs_ty
+ }
+
+ BinOpCategory::Math | BinOpCategory::Bitwise => {
+ // both LHS and RHS and result will have the same type
+ self.demand_suptype(lhs_ty, rhs_ty);
+ lhs_ty
+ }
+
+ BinOpCategory::Comparison => {
+ // both LHS and RHS and result will have the same type
+ self.demand_suptype(lhs_ty, rhs_ty);
+ self.types.bool
+ }
+ }
+ }
+
+ fn infer_overloaded_binop(
+ &mut self,
+ expr: ExprId,
+ lhs_expr: ExprId,
+ rhs_expr: ExprId,
+ op: BinaryOp,
+ ) -> (Ty<'db>, Ty<'db>, Ty<'db>) {
+ debug!("infer_overloaded_binop(expr.hir_id={:?}, op={:?})", expr, op);
+
+ let lhs_ty = match op {
+ BinaryOp::Assignment { .. } => {
+ // rust-lang/rust#52126: We have to use strict
+ // equivalence on the LHS of an assign-op like `+=`;
+ // overwritten or mutably-borrowed places cannot be
+ // coerced to a supertype.
+ self.infer_expr_no_expect(lhs_expr, ExprIsRead::Yes)
+ }
+ _ => {
+ // Find a suitable supertype of the LHS expression's type, by coercing to
+ // a type variable, to pass as the `Self` to the trait, avoiding invariant
+ // trait matching creating lifetime constraints that are too strict.
+ // e.g., adding `&'a T` and `&'b T`, given `&'x T: Add<&'x T>`, will result
+ // in `&'a T <: &'x T` and `&'b T <: &'x T`, instead of `'a = 'b = 'x`.
+ let lhs_ty = self.infer_expr_no_expect(lhs_expr, ExprIsRead::No);
+ let fresh_var = self.table.next_ty_var();
+ self.demand_coerce(lhs_expr, lhs_ty, fresh_var, AllowTwoPhase::No, ExprIsRead::No)
+ }
+ };
+ let lhs_ty = self.table.resolve_vars_with_obligations(lhs_ty);
+
+ // N.B., as we have not yet type-checked the RHS, we don't have the
+ // type at hand. Make a variable to represent it. The whole reason
+ // for this indirection is so that, below, we can check the expr
+ // using this variable as the expected type, which sometimes lets
+ // us do better coercions than we would be able to do otherwise,
+ // particularly for things like `String + &String`.
+ let rhs_ty_var = self.table.next_ty_var();
+ let result = self.lookup_op_method(
+ lhs_ty,
+ Some((rhs_expr, rhs_ty_var)),
+ self.lang_item_for_bin_op(op),
+ );
+
+ // see `NB` above
+ let rhs_ty =
+ self.infer_expr_coerce(rhs_expr, &Expectation::HasType(rhs_ty_var), ExprIsRead::No);
+ let rhs_ty = self.table.resolve_vars_with_obligations(rhs_ty);
+
+ let return_ty = match result {
+ Ok(method) => {
+ let by_ref_binop = !is_op_by_value(op);
+ if (matches!(op, BinaryOp::Assignment { .. }) || by_ref_binop)
+ && let TyKind::Ref(_, _, mutbl) =
+ method.sig.inputs_and_output.inputs()[0].kind()
+ {
+ let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::Yes);
+ let autoref = Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)),
+ target: method.sig.inputs_and_output.inputs()[0],
+ };
+ self.write_expr_adj(lhs_expr, Box::new([autoref]));
+ }
+ if by_ref_binop
+ && let TyKind::Ref(_, _, mutbl) =
+ method.sig.inputs_and_output.inputs()[1].kind()
+ {
+ // Allow two-phase borrows for binops in initial deployment
+ // since they desugar to methods
+ let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::Yes);
+
+ let autoref = Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)),
+ target: method.sig.inputs_and_output.inputs()[1],
+ };
+ // HACK(eddyb) Bypass checks due to reborrows being in
+ // some cases applied on the RHS, on top of which we need
+ // to autoref, which is not allowed by write_expr_adj.
+ // self.write_expr_adj(rhs_expr, Box::new([autoref]));
+ match self.result.expr_adjustments.entry(rhs_expr) {
+ hash_map::Entry::Occupied(mut entry) => {
+ let mut adjustments = Vec::from(std::mem::take(entry.get_mut()));
+ adjustments.reserve_exact(1);
+ adjustments.push(autoref);
+ entry.insert(adjustments.into_boxed_slice());
+ }
+ hash_map::Entry::Vacant(entry) => {
+ entry.insert(Box::new([autoref]));
+ }
+ };
+ }
+ self.write_method_resolution(expr, method.def_id, method.args);
+
+ method.sig.output()
+ }
+ Err(_errors) => {
+ // FIXME: Report diagnostic.
+ self.types.error
+ }
+ };
+
+ (lhs_ty, rhs_ty, return_ty)
+ }
+
+ pub(crate) fn infer_user_unop(
+ &mut self,
+ ex: ExprId,
+ operand_ty: Ty<'db>,
+ op: UnaryOp,
+ ) -> Ty<'db> {
+ match self.lookup_op_method(operand_ty, None, self.lang_item_for_unop(op)) {
+ Ok(method) => {
+ self.write_method_resolution(ex, method.def_id, method.args);
+ method.sig.output()
+ }
+ Err(_errors) => {
+ // FIXME: Report diagnostic.
+ self.types.error
+ }
+ }
+ }
+
+ fn lookup_op_method(
+ &mut self,
+ lhs_ty: Ty<'db>,
+ opt_rhs: Option<(ExprId, Ty<'db>)>,
+ (opname, trait_did): (Symbol, Option<TraitId>),
+ ) -> Result<MethodCallee<'db>, Vec<NextSolverError<'db>>> {
+ let Some(trait_did) = trait_did else {
+ // Bail if the operator trait is not defined.
+ return Err(vec![]);
+ };
+
+ debug!(
+ "lookup_op_method(lhs_ty={:?}, opname={:?}, trait_did={:?})",
+ lhs_ty, opname, trait_did
+ );
+
+ let opt_rhs_ty = opt_rhs.map(|it| it.1);
+ let cause = ObligationCause::new();
+
+ // We don't consider any other candidates if this lookup fails
+ // so we can freely treat opaque types as inference variables here
+ // to allow more code to compile.
+ let treat_opaques = TreatNotYetDefinedOpaques::AsInfer;
+ let method = self.table.lookup_method_for_operator(
+ cause.clone(),
+ opname,
+ trait_did,
+ lhs_ty,
+ opt_rhs_ty,
+ treat_opaques,
+ );
+ match method {
+ Some(ok) => {
+ let method = self.table.register_infer_ok(ok);
+ self.table.select_obligations_where_possible();
+ Ok(method)
+ }
+ None => {
+ // Guide inference for the RHS expression if it's provided --
+ // this will allow us to better error reporting, at the expense
+ // of making some error messages a bit more specific.
+ if let Some((rhs_expr, rhs_ty)) = opt_rhs
+ && rhs_ty.is_ty_var()
+ {
+ self.infer_expr_coerce(rhs_expr, &Expectation::HasType(rhs_ty), ExprIsRead::No);
+ }
+
+ // Construct an obligation `self_ty : Trait<input_tys>`
+ let args = GenericArgs::for_item(
+ self.interner(),
+ trait_did.into(),
+ |param_idx, param_id, _| match param_id {
+ GenericParamId::LifetimeParamId(_) | GenericParamId::ConstParamId(_) => {
+ unreachable!("did not expect operand trait to have lifetime/const args")
+ }
+ GenericParamId::TypeParamId(_) => {
+ if param_idx == 0 {
+ lhs_ty.into()
+ } else {
+ opt_rhs_ty.expect("expected RHS for binop").into()
+ }
+ }
+ },
+ );
+ let obligation = Obligation::new(
+ self.interner(),
+ cause,
+ self.table.trait_env.env,
+ TraitRef::new_from_args(self.interner(), trait_did.into(), args),
+ );
+ let mut ocx = ObligationCtxt::new(self.infcx());
+ ocx.register_obligation(obligation);
+ Err(ocx.evaluate_obligations_error_on_ambiguity())
+ }
+ }
+ }
+
+ fn lang_item_for_bin_op(&self, op: BinaryOp) -> (Symbol, Option<TraitId>) {
+ let (method_name, trait_lang_item) =
+ crate::lang_items::lang_items_for_bin_op(op).expect("invalid operator provided");
+ (method_name, trait_lang_item.resolve_trait(self.db, self.krate()))
+ }
+
+ fn lang_item_for_unop(&self, op: UnaryOp) -> (Symbol, Option<TraitId>) {
+ let (method_name, trait_lang_item) = match op {
+ UnaryOp::Not => (sym::not, LangItem::Not),
+ UnaryOp::Neg => (sym::neg, LangItem::Neg),
+ UnaryOp::Deref => panic!("Deref is not overloadable"),
+ };
+ (method_name, trait_lang_item.resolve_trait(self.db, self.krate()))
+ }
+}
+
+// Binary operator categories. These categories summarize the behavior
+// with respect to the builtin operations supported.
+#[derive(Clone, Copy)]
+enum BinOpCategory {
+ /// &&, || -- cannot be overridden
+ Shortcircuit,
+
+ /// <<, >> -- when shifting a single integer, rhs can be any
+ /// integer type. For simd, types must match.
+ Shift,
+
+ /// +, -, etc -- takes equal types, produces same type as input,
+ /// applicable to ints/floats/simd
+ Math,
+
+ /// &, |, ^ -- takes equal types, produces same type as input,
+ /// applicable to ints/floats/simd/bool
+ Bitwise,
+
+ /// ==, !=, etc -- takes equal types, produces bools, except for simd,
+ /// which produce the input type
+ Comparison,
+}
+
+impl From<BinaryOp> for BinOpCategory {
+ fn from(op: BinaryOp) -> BinOpCategory {
+ match op {
+ BinaryOp::LogicOp(_) => BinOpCategory::Shortcircuit,
+ BinaryOp::ArithOp(op) | BinaryOp::Assignment { op: Some(op) } => op.into(),
+ BinaryOp::CmpOp(_) => BinOpCategory::Comparison,
+ BinaryOp::Assignment { op: None } => unreachable!(
+ "assignment is lowered into `Expr::Assignment`, not into `Expr::BinaryOp`"
+ ),
+ }
+ }
+}
+
+impl From<ArithOp> for BinOpCategory {
+ fn from(op: ArithOp) -> BinOpCategory {
+ use ArithOp::*;
+ match op {
+ Shl | Shr => BinOpCategory::Shift,
+ Add | Sub | Mul | Div | Rem => BinOpCategory::Math,
+ BitXor | BitAnd | BitOr => BinOpCategory::Bitwise,
+ }
+ }
+}
+
+/// Returns `true` if the binary operator takes its arguments by value.
+fn is_op_by_value(op: BinaryOp) -> bool {
+ !matches!(op, BinaryOp::CmpOp(_))
+}
+
+/// Dereferences a single level of immutable referencing.
+fn deref_ty_if_possible(ty: Ty<'_>) -> Ty<'_> {
+ match ty.kind() {
+ TyKind::Ref(_, ty, Mutability::Not) => ty,
+ _ => ty,
+ }
+}
+
+/// Returns `true` if this is a built-in arithmetic operation (e.g.,
+/// u32 + u32, i16x4 == i16x4) and false if these types would have to be
+/// overloaded to be legal. There are two reasons that we distinguish
+/// builtin operations from overloaded ones (vs trying to drive
+/// everything uniformly through the trait system and intrinsics or
+/// something like that):
+///
+/// 1. Builtin operations can trivially be evaluated in constants.
+/// 2. For comparison operators applied to SIMD types the result is
+/// not of type `bool`. For example, `i16x4 == i16x4` yields a
+/// type like `i16x4`. This means that the overloaded trait
+/// `PartialEq` is not applicable.
+///
+/// Reason #2 is the killer. I tried for a while to always use
+/// overloaded logic and just check the types in constants/codegen after
+/// the fact, and it worked fine, except for SIMD types. -nmatsakis
+fn is_builtin_binop<'db>(lhs: Ty<'db>, rhs: Ty<'db>, category: BinOpCategory) -> bool {
+ // Special-case a single layer of referencing, so that things like `5.0 + &6.0f32` work.
+ // (See https://github.com/rust-lang/rust/issues/57447.)
+ let (lhs, rhs) = (deref_ty_if_possible(lhs), deref_ty_if_possible(rhs));
+
+ match category {
+ BinOpCategory::Shortcircuit => true,
+ BinOpCategory::Shift => lhs.is_integral() && rhs.is_integral(),
+ BinOpCategory::Math => {
+ lhs.is_integral() && rhs.is_integral()
+ || lhs.is_floating_point() && rhs.is_floating_point()
+ }
+ BinOpCategory::Bitwise => {
+ lhs.is_integral() && rhs.is_integral()
+ || lhs.is_floating_point() && rhs.is_floating_point()
+ || lhs.is_bool() && rhs.is_bool()
+ }
+ BinOpCategory::Comparison => lhs.is_scalar() && rhs.is_scalar(),
+ }
+}
diff --git a/crates/hir-ty/src/infer/opaques.rs b/crates/hir-ty/src/infer/opaques.rs
index f7719f5..ba4b53a 100644
--- a/crates/hir-ty/src/infer/opaques.rs
+++ b/crates/hir-ty/src/infer/opaques.rs
@@ -109,7 +109,7 @@
let expected =
EarlyBinder::bind(ty.ty).instantiate(interner, opaque_type_key.args);
- self.demand_eqtype(expected, hidden_type.ty);
+ _ = self.demand_eqtype_fixme_no_diag(expected, hidden_type.ty);
}
self.result.type_of_opaque.insert(def_id, ty.ty);
diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs
index 8019844..ece2bdc 100644
--- a/crates/hir-ty/src/infer/pat.rs
+++ b/crates/hir-ty/src/infer/pat.rs
@@ -1,6 +1,6 @@
//! Type inference for patterns.
-use std::iter::repeat_with;
+use std::{cmp, iter};
use hir_def::{
HasModule,
@@ -16,11 +16,10 @@
DeclContext, DeclOrigin, InferenceDiagnostic,
consteval::{self, try_const_usize, usize_const},
infer::{
- AllowTwoPhase, BindingMode, Expectation, InferenceContext, TypeMismatch,
- coerce::CoerceNever, expr::ExprIsRead,
+ AllowTwoPhase, BindingMode, Expectation, InferenceContext, TypeMismatch, expr::ExprIsRead,
},
lower::lower_mutability,
- next_solver::{GenericArgs, Ty, TyKind},
+ next_solver::{GenericArgs, Ty, TyKind, Tys, infer::traits::ObligationCause},
};
impl<'db> InferenceContext<'_, 'db> {
@@ -184,42 +183,61 @@
/// Ellipses found in the original pattern or expression must be filtered out.
pub(super) fn infer_tuple_pat_like(
&mut self,
+ pat: PatId,
expected: Ty<'db>,
default_bm: BindingMode,
ellipsis: Option<u32>,
- subs: &[PatId],
+ elements: &[PatId],
decl: Option<DeclContext>,
) -> Ty<'db> {
- let expected = self.table.structurally_resolve_type(expected);
- let expectations = match expected.kind() {
- TyKind::Tuple(parameters) => parameters,
- _ => self.types.empty_tys,
- };
-
- let ((pre, post), n_uncovered_patterns) = match ellipsis {
- Some(idx) => {
- (subs.split_at(idx as usize), expectations.len().saturating_sub(subs.len()))
+ let mut expected_len = elements.len();
+ if ellipsis.is_some() {
+ // Require known type only when `..` is present.
+ if let TyKind::Tuple(tys) = self.table.structurally_resolve_type(expected).kind() {
+ expected_len = tys.len();
}
- None => ((subs, &[][..]), 0),
+ }
+ let max_len = cmp::max(expected_len, elements.len());
+
+ let element_tys_iter = (0..max_len).map(|_| self.table.next_ty_var());
+ let element_tys = Tys::new_from_iter(self.interner(), element_tys_iter);
+ let pat_ty = Ty::new(self.interner(), TyKind::Tuple(element_tys));
+ if self.demand_eqtype(pat.into(), expected, pat_ty).is_err()
+ && let TyKind::Tuple(expected) = expected.kind()
+ {
+ // Equate expected type with the infer vars, for better diagnostics.
+ for (expected, elem_ty) in iter::zip(expected, element_tys) {
+ _ = self
+ .table
+ .at(&ObligationCause::dummy())
+ .eq(expected, elem_ty)
+ .map(|infer_ok| self.table.register_infer_ok(infer_ok));
+ }
+ }
+ let (before_ellipsis, after_ellipsis) = match ellipsis {
+ Some(ellipsis) => {
+ let element_tys = element_tys.as_slice();
+ // Don't check patterns twice.
+ let from_end_start = cmp::max(
+ element_tys.len().saturating_sub(elements.len() - ellipsis as usize),
+ ellipsis as usize,
+ );
+ (
+ element_tys.get(..ellipsis as usize).unwrap_or(element_tys),
+ element_tys.get(from_end_start..).unwrap_or_default(),
+ )
+ }
+ None => (element_tys.as_slice(), &[][..]),
};
- let mut expectations_iter =
- expectations.iter().chain(repeat_with(|| self.table.next_ty_var()));
-
- let mut inner_tys = Vec::with_capacity(n_uncovered_patterns + subs.len());
-
- inner_tys.extend(expectations_iter.by_ref().take(n_uncovered_patterns + subs.len()));
-
- // Process pre
- for (ty, pat) in inner_tys.iter_mut().zip(pre) {
- *ty = self.infer_pat(*pat, *ty, default_bm, decl);
+ for (&elem, &elem_ty) in iter::zip(elements, before_ellipsis.iter().chain(after_ellipsis)) {
+ self.infer_pat(elem, elem_ty, default_bm, decl);
}
-
- // Process post
- for (ty, pat) in inner_tys.iter_mut().skip(pre.len() + n_uncovered_patterns).zip(post) {
- *ty = self.infer_pat(*pat, *ty, default_bm, decl);
+ if let Some(uncovered) = elements.get(element_tys.len()..) {
+ for &elem in uncovered {
+ self.infer_pat(elem, self.types.error, default_bm, decl);
+ }
}
-
- Ty::new_tup_from_iter(self.interner(), inner_tys.into_iter())
+ pat_ty
}
/// The resolver needs to be updated to the surrounding expression when inside assignment
@@ -273,7 +291,7 @@
let ty = match &self.body[pat] {
Pat::Tuple { args, ellipsis } => {
- self.infer_tuple_pat_like(expected, default_bm, *ellipsis, args, decl)
+ self.infer_tuple_pat_like(pat, expected, default_bm, *ellipsis, args, decl)
}
Pat::Or(pats) => {
for pat in pats.iter() {
@@ -306,7 +324,7 @@
expected,
ty_inserted_vars,
AllowTwoPhase::No,
- CoerceNever::Yes,
+ ExprIsRead::No,
) {
Ok(coerced_ty) => {
self.write_pat_ty(pat, coerced_ty);
@@ -331,8 +349,15 @@
self.infer_slice_pat(expected, prefix, *slice, suffix, default_bm, decl)
}
Pat::Wild => expected,
- Pat::Range { .. } => {
- // FIXME: do some checks here.
+ Pat::Range { start, end, range_type: _ } => {
+ if let Some(start) = *start {
+ let start_ty = self.infer_expr(start, &Expectation::None, ExprIsRead::Yes);
+ _ = self.demand_eqtype(start.into(), expected, start_ty);
+ }
+ if let Some(end) = *end {
+ let end_ty = self.infer_expr(end, &Expectation::None, ExprIsRead::Yes);
+ _ = self.demand_eqtype(end.into(), expected, end_ty);
+ }
expected
}
&Pat::Lit(expr) => {
@@ -357,7 +382,7 @@
GenericArgs::fill_with_defaults(
self.interner(),
box_adt.into(),
- std::iter::once(inner_ty.into()).chain(alloc_ty.map(Into::into)),
+ iter::once(inner_ty.into()).chain(alloc_ty.map(Into::into)),
|_, id, _| self.table.next_var_for_param(id),
),
)
@@ -374,16 +399,17 @@
Pat::Expr(expr) => {
let old_inside_assign = std::mem::replace(&mut self.inside_assignment, false);
// LHS of assignment doesn't constitute reads.
+ let expr_is_read = ExprIsRead::No;
let result =
- self.infer_expr_coerce(*expr, &Expectation::has_type(expected), ExprIsRead::No);
+ self.infer_expr_coerce(*expr, &Expectation::has_type(expected), expr_is_read);
// We are returning early to avoid the unifiability check below.
let lhs_ty = self.insert_type_vars_shallow(result);
let ty = match self.coerce(
- pat.into(),
+ (*expr).into(),
expected,
lhs_ty,
AllowTwoPhase::No,
- CoerceNever::Yes,
+ expr_is_read,
) {
Ok(ty) => ty,
Err(_) => {
@@ -416,7 +442,7 @@
.result
.pat_adjustments
.get(&pat)
- .and_then(|it| it.first())
+ .and_then(|it| it.last())
.unwrap_or(&self.result.type_of_pat[pat])
}
@@ -469,9 +495,9 @@
let bound_ty = match mode {
BindingMode::Ref(mutability) => {
let inner_lt = self.table.next_region_var();
- Ty::new_ref(self.interner(), inner_lt, inner_ty, mutability)
+ Ty::new_ref(self.interner(), inner_lt, expected, mutability)
}
- BindingMode::Move => inner_ty,
+ BindingMode::Move => expected,
};
self.write_pat_ty(pat, inner_ty);
self.write_binding_ty(binding, bound_ty);
diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs
index 9ade842..6e3d158 100644
--- a/crates/hir-ty/src/infer/path.rs
+++ b/crates/hir-ty/src/infer/path.rs
@@ -13,11 +13,12 @@
InferenceDiagnostic, ValueTyDefId,
generics::generics,
infer::diagnostics::InferenceTyLoweringContext as TyLoweringContext,
- lower::LifetimeElisionKind,
- method_resolution::{self, VisibleFromModule},
+ lower::{GenericPredicates, LifetimeElisionKind},
+ method_resolution::{self, CandidateId, MethodError},
next_solver::{
GenericArg, GenericArgs, TraitRef, Ty,
infer::traits::{Obligation, ObligationCause},
+ util::clauses_as_obligations,
},
};
@@ -31,7 +32,7 @@
}
ValuePathResolution::NonGeneric(ty) => return Some(ty),
};
- let args = self.process_remote_user_written_ty(substs);
+ let args = self.insert_type_vars(substs);
self.add_required_obligations_for_value_path(generic_def, args);
@@ -221,14 +222,14 @@
def: GenericDefId,
subst: GenericArgs<'db>,
) {
- let predicates = self.db.generic_predicates(def);
let interner = self.interner();
+ let predicates = GenericPredicates::query_all(self.db, def);
let param_env = self.table.trait_env.env;
- if let Some(predicates) = predicates.instantiate(self.interner(), subst) {
- self.table.register_predicates(predicates.map(|predicate| {
- Obligation::new(interner, ObligationCause::new(), param_env, predicate)
- }));
- }
+ self.table.register_predicates(clauses_as_obligations(
+ predicates.iter_instantiated_copied(interner, subst.as_slice()),
+ ObligationCause::new(),
+ param_env,
+ ));
// We need to add `Self: Trait` obligation when `def` is a trait assoc item.
let container = match def {
@@ -265,7 +266,7 @@
match item {
AssocItemId::FunctionId(func) => {
if segment.name == &self.db.function_signature(func).name {
- Some(AssocItemId::FunctionId(func))
+ Some(CandidateId::FunctionId(func))
} else {
None
}
@@ -273,7 +274,7 @@
AssocItemId::ConstId(konst) => {
if self.db.const_signature(konst).name.as_ref() == Some(segment.name) {
- Some(AssocItemId::ConstId(konst))
+ Some(CandidateId::ConstId(konst))
} else {
None
}
@@ -282,9 +283,8 @@
}
})?;
let def = match item {
- AssocItemId::FunctionId(f) => ValueNs::FunctionId(f),
- AssocItemId::ConstId(c) => ValueNs::ConstId(c),
- AssocItemId::TypeAliasId(_) => unreachable!(),
+ CandidateId::FunctionId(f) => ValueNs::FunctionId(f),
+ CandidateId::ConstId(c) => ValueNs::ConstId(c),
};
self.write_assoc_resolution(id, item, trait_ref.args);
@@ -305,39 +305,23 @@
return Some(result);
}
- let canonical_ty = self.canonicalize(ty);
-
- let mut not_visible = None;
- let res = method_resolution::iterate_method_candidates(
- &canonical_ty,
- &mut self.table,
- Self::get_traits_in_scope(&self.resolver, &self.traits_in_scope)
- .as_ref()
- .left_or_else(|&it| it),
- VisibleFromModule::Filter(self.resolver.module()),
- Some(name),
- method_resolution::LookupMode::Path,
- |_ty, item, visible| {
- if visible {
- Some((item, true))
- } else {
- if not_visible.is_none() {
- not_visible = Some((item, false));
- }
- None
+ let res = self.with_method_resolution(|ctx| {
+ ctx.probe_for_name(method_resolution::Mode::Path, name.clone(), ty)
+ });
+ let (item, visible) = match res {
+ Ok(res) => (res.item, true),
+ Err(error) => match error {
+ MethodError::PrivateMatch(candidate_id) => (candidate_id.item, false),
+ _ => {
+ self.push_diagnostic(InferenceDiagnostic::UnresolvedAssocItem { id });
+ return None;
}
},
- );
- let res = res.or(not_visible);
- if res.is_none() {
- self.push_diagnostic(InferenceDiagnostic::UnresolvedAssocItem { id });
- }
- let (item, visible) = res?;
+ };
let (def, container) = match item {
- AssocItemId::FunctionId(f) => (ValueNs::FunctionId(f), f.lookup(self.db).container),
- AssocItemId::ConstId(c) => (ValueNs::ConstId(c), c.lookup(self.db).container),
- AssocItemId::TypeAliasId(_) => unreachable!(),
+ CandidateId::FunctionId(f) => (ValueNs::FunctionId(f), f.lookup(self.db).container),
+ CandidateId::ConstId(c) => (ValueNs::ConstId(c), c.lookup(self.db).container),
};
let substs = match container {
ItemContainerId::ImplId(impl_id) => {
@@ -372,6 +356,10 @@
self.write_assoc_resolution(id, item, substs);
if !visible {
+ let item = match item {
+ CandidateId::FunctionId(it) => it.into(),
+ CandidateId::ConstId(it) => it.into(),
+ };
self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem { id, item });
}
Some((def, substs))
diff --git a/crates/hir-ty/src/infer/place_op.rs b/crates/hir-ty/src/infer/place_op.rs
new file mode 100644
index 0000000..50018bb
--- /dev/null
+++ b/crates/hir-ty/src/infer/place_op.rs
@@ -0,0 +1,329 @@
+//! Inference of *place operators*: deref and indexing (operators that create places, as opposed to values).
+
+use base_db::Crate;
+use hir_def::{hir::ExprId, lang_item::LangItem};
+use intern::sym;
+use rustc_ast_ir::Mutability;
+use rustc_type_ir::inherent::{IntoKind, Ty as _};
+use tracing::debug;
+
+use crate::{
+ Adjust, Adjustment, AutoBorrow, PointerCast,
+ autoderef::InferenceContextAutoderef,
+ infer::{AllowTwoPhase, AutoBorrowMutability, InferenceContext, unify::InferenceTable},
+ method_resolution::{MethodCallee, TreatNotYetDefinedOpaques},
+ next_solver::{
+ ClauseKind, Ty, TyKind,
+ infer::{
+ InferOk,
+ traits::{Obligation, ObligationCause},
+ },
+ },
+};
+
+#[derive(Debug, Copy, Clone)]
+pub(super) enum PlaceOp {
+ Deref,
+ Index,
+}
+
+impl<'a, 'db> InferenceContext<'a, 'db> {
+ pub(super) fn try_overloaded_deref(
+ &self,
+ base_ty: Ty<'db>,
+ ) -> Option<InferOk<'db, MethodCallee<'db>>> {
+ self.try_overloaded_place_op(base_ty, None, PlaceOp::Deref)
+ }
+
+ /// For the overloaded place expressions (`*x`, `x[3]`), the trait
+ /// returns a type of `&T`, but the actual type we assign to the
+ /// *expression* is `T`. So this function just peels off the return
+ /// type by one layer to yield `T`.
+ fn make_overloaded_place_return_type(&self, method: MethodCallee<'db>) -> Ty<'db> {
+ // extract method return type, which will be &T;
+ let ret_ty = method.sig.output();
+
+ // method returns &T, but the type as visible to user is T, so deref
+ ret_ty.builtin_deref(true).unwrap()
+ }
+
+ /// Type-check `*oprnd_expr` with `oprnd_expr` type-checked already.
+ pub(super) fn lookup_derefing(
+ &mut self,
+ expr: ExprId,
+ oprnd_expr: ExprId,
+ oprnd_ty: Ty<'db>,
+ ) -> Option<Ty<'db>> {
+ if let Some(ty) = oprnd_ty.builtin_deref(true) {
+ return Some(ty);
+ }
+
+ let ok = self.try_overloaded_deref(oprnd_ty)?;
+ let method = self.table.register_infer_ok(ok);
+ if let TyKind::Ref(_, _, Mutability::Not) = method.sig.inputs_and_output.inputs()[0].kind()
+ {
+ self.write_expr_adj(
+ oprnd_expr,
+ Box::new([Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::Not)),
+ target: method.sig.inputs_and_output.inputs()[0],
+ }]),
+ );
+ } else {
+ panic!("input to deref is not a ref?");
+ }
+ let ty = self.make_overloaded_place_return_type(method);
+ self.write_method_resolution(expr, method.def_id, method.args);
+ Some(ty)
+ }
+
+ /// Type-check `*base_expr[index_expr]` with `base_expr` and `index_expr` type-checked already.
+ pub(super) fn lookup_indexing(
+ &mut self,
+ expr: ExprId,
+ base_expr: ExprId,
+ base_ty: Ty<'db>,
+ idx_ty: Ty<'db>,
+ ) -> Option<(/*index type*/ Ty<'db>, /*element type*/ Ty<'db>)> {
+ // FIXME(#18741) -- this is almost but not quite the same as the
+ // autoderef that normal method probing does. They could likely be
+ // consolidated.
+
+ let mut autoderef = InferenceContextAutoderef::new_from_inference_context(self, base_ty);
+ let mut result = None;
+ while result.is_none() && autoderef.next().is_some() {
+ result = Self::try_index_step(expr, base_expr, &mut autoderef, idx_ty);
+ }
+ result
+ }
+
+ /// To type-check `base_expr[index_expr]`, we progressively autoderef
+ /// (and otherwise adjust) `base_expr`, looking for a type which either
+ /// supports builtin indexing or overloaded indexing.
+ /// This loop implements one step in that search; the autoderef loop
+ /// is implemented by `lookup_indexing`.
+ fn try_index_step(
+ expr: ExprId,
+ base_expr: ExprId,
+ autoderef: &mut InferenceContextAutoderef<'_, 'a, 'db>,
+ index_ty: Ty<'db>,
+ ) -> Option<(/*index type*/ Ty<'db>, /*element type*/ Ty<'db>)> {
+ let ty = autoderef.final_ty();
+ let adjusted_ty = autoderef.ctx().table.structurally_resolve_type(ty);
+ debug!(
+ "try_index_step(expr={:?}, base_expr={:?}, adjusted_ty={:?}, \
+ index_ty={:?})",
+ expr, base_expr, adjusted_ty, index_ty
+ );
+
+ for unsize in [false, true] {
+ let mut self_ty = adjusted_ty;
+ if unsize {
+ // We only unsize arrays here.
+ if let TyKind::Array(element_ty, ct) = adjusted_ty.kind() {
+ let ctx = autoderef.ctx();
+ ctx.table.register_predicate(Obligation::new(
+ ctx.interner(),
+ ObligationCause::new(),
+ ctx.table.trait_env.env,
+ ClauseKind::ConstArgHasType(ct, ctx.types.usize),
+ ));
+ self_ty = Ty::new_slice(ctx.interner(), element_ty);
+ } else {
+ continue;
+ }
+ }
+
+ // If some lookup succeeds, write callee into table and extract index/element
+ // type from the method signature.
+ // If some lookup succeeded, install method in table
+ let input_ty = autoderef.ctx().table.next_ty_var();
+ let method =
+ autoderef.ctx().try_overloaded_place_op(self_ty, Some(input_ty), PlaceOp::Index);
+
+ if let Some(result) = method {
+ debug!("try_index_step: success, using overloaded indexing");
+ let method = autoderef.ctx().table.register_infer_ok(result);
+
+ let infer_ok = autoderef.adjust_steps_as_infer_ok();
+ let mut adjustments = autoderef.ctx().table.register_infer_ok(infer_ok);
+ if let TyKind::Ref(region, _, Mutability::Not) =
+ method.sig.inputs_and_output.inputs()[0].kind()
+ {
+ adjustments.push(Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(AutoBorrowMutability::Not)),
+ target: Ty::new_imm_ref(autoderef.ctx().interner(), region, adjusted_ty),
+ });
+ } else {
+ panic!("input to index is not a ref?");
+ }
+ if unsize {
+ adjustments.push(Adjustment {
+ kind: Adjust::Pointer(PointerCast::Unsize),
+ target: method.sig.inputs_and_output.inputs()[0],
+ });
+ }
+ autoderef.ctx().write_expr_adj(base_expr, adjustments.into_boxed_slice());
+
+ autoderef.ctx().write_method_resolution(expr, method.def_id, method.args);
+
+ return Some((input_ty, autoderef.ctx().make_overloaded_place_return_type(method)));
+ }
+ }
+
+ None
+ }
+
+ /// Try to resolve an overloaded place op. We only deal with the immutable
+ /// variant here (Deref/Index). In some contexts we would need the mutable
+ /// variant (DerefMut/IndexMut); those would be later converted by
+ /// `convert_place_derefs_to_mutable`.
+ pub(super) fn try_overloaded_place_op(
+ &self,
+ base_ty: Ty<'db>,
+ opt_rhs_ty: Option<Ty<'db>>,
+ op: PlaceOp,
+ ) -> Option<InferOk<'db, MethodCallee<'db>>> {
+ debug!("try_overloaded_place_op({:?},{:?})", base_ty, op);
+
+ let (Some(imm_tr), imm_op) = (match op {
+ PlaceOp::Deref => (LangItem::Deref.resolve_trait(self.db, self.krate()), sym::deref),
+ PlaceOp::Index => (LangItem::Index.resolve_trait(self.db, self.krate()), sym::index),
+ }) else {
+ // Bail if `Deref` or `Index` isn't defined.
+ return None;
+ };
+
+ // FIXME(trait-system-refactor-initiative#231): we may want to treat
+ // opaque types as rigid here to support `impl Deref<Target = impl Index<usize>>`.
+ let treat_opaques = TreatNotYetDefinedOpaques::AsInfer;
+ self.table.lookup_method_for_operator(
+ ObligationCause::new(),
+ imm_op,
+ imm_tr,
+ base_ty,
+ opt_rhs_ty,
+ treat_opaques,
+ )
+ }
+
+ pub(super) fn try_mutable_overloaded_place_op(
+ table: &InferenceTable<'db>,
+ krate: Crate,
+ base_ty: Ty<'db>,
+ opt_rhs_ty: Option<Ty<'db>>,
+ op: PlaceOp,
+ ) -> Option<InferOk<'db, MethodCallee<'db>>> {
+ debug!("try_mutable_overloaded_place_op({:?},{:?})", base_ty, op);
+
+ let (Some(mut_tr), mut_op) = (match op {
+ PlaceOp::Deref => (LangItem::DerefMut.resolve_trait(table.db, krate), sym::deref_mut),
+ PlaceOp::Index => (LangItem::IndexMut.resolve_trait(table.db, krate), sym::index_mut),
+ }) else {
+ // Bail if `DerefMut` or `IndexMut` isn't defined.
+ return None;
+ };
+
+ // We have to replace the operator with the mutable variant for the
+ // program to compile, so we don't really have a choice here and want
+ // to just try using `DerefMut` even if its not in the item bounds
+ // of the opaque.
+ let treat_opaques = TreatNotYetDefinedOpaques::AsInfer;
+ table.lookup_method_for_operator(
+ ObligationCause::new(),
+ mut_op,
+ mut_tr,
+ base_ty,
+ opt_rhs_ty,
+ treat_opaques,
+ )
+ }
+
+ pub(super) fn convert_place_op_to_mutable(
+ &mut self,
+ op: PlaceOp,
+ expr: ExprId,
+ base_expr: ExprId,
+ index_expr: Option<ExprId>,
+ ) {
+ debug!("convert_place_op_to_mutable({:?}, {:?}, {:?})", op, expr, base_expr);
+ if !self.result.method_resolutions.contains_key(&expr) {
+ debug!("convert_place_op_to_mutable - builtin, nothing to do");
+ return;
+ }
+
+ // Need to deref because overloaded place ops take self by-reference.
+ let base_ty = self
+ .expr_ty_after_adjustments(base_expr)
+ .builtin_deref(false)
+ .expect("place op takes something that is not a ref");
+
+ let arg_ty = match op {
+ PlaceOp::Deref => None,
+ PlaceOp::Index => {
+ // We would need to recover the `T` used when we resolve `<_ as Index<T>>::index`
+ // in try_index_step. This is the arg at index 1.
+ //
+ // FIXME: rustc does not use the type of `index_expr` with the following explanation.
+ //
+ // Note: we should *not* use `expr_ty` of index_expr here because autoderef
+ // during coercions can cause type of index_expr to differ from `T` (#72002).
+ // We also could not use `expr_ty_adjusted` of index_expr because reborrowing
+ // during coercions can also cause type of index_expr to differ from `T`,
+ // which can potentially cause regionck failure (#74933).
+ Some(self.expr_ty_after_adjustments(
+ index_expr.expect("`PlaceOp::Index` should have `index_expr`"),
+ ))
+ }
+ };
+ let method =
+ Self::try_mutable_overloaded_place_op(&self.table, self.krate(), base_ty, arg_ty, op);
+ let method = match method {
+ Some(ok) => self.table.register_infer_ok(ok),
+ // Couldn't find the mutable variant of the place op, keep the
+ // current, immutable version.
+ None => return,
+ };
+ debug!("convert_place_op_to_mutable: method={:?}", method);
+ self.result.method_resolutions.insert(expr, (method.def_id, method.args));
+
+ let TyKind::Ref(region, _, Mutability::Mut) =
+ method.sig.inputs_and_output.inputs()[0].kind()
+ else {
+ panic!("input to mutable place op is not a mut ref?");
+ };
+
+ // Convert the autoref in the base expr to mutable with the correct
+ // region and mutability.
+ let base_expr_ty = self.expr_ty(base_expr);
+ let interner = self.interner();
+ if let Some(adjustments) = self.result.expr_adjustments.get_mut(&base_expr) {
+ let mut source = base_expr_ty;
+ for adjustment in &mut adjustments[..] {
+ if let Adjust::Borrow(AutoBorrow::Ref(..)) = adjustment.kind {
+ debug!("convert_place_op_to_mutable: converting autoref {:?}", adjustment);
+ let mutbl = AutoBorrowMutability::Mut {
+ // Deref/indexing can be desugared to a method call,
+ // so maybe we could use two-phase here.
+ // See the documentation of AllowTwoPhase for why that's
+ // not the case today.
+ allow_two_phase_borrow: AllowTwoPhase::No,
+ };
+ adjustment.kind = Adjust::Borrow(AutoBorrow::Ref(mutbl));
+ adjustment.target = Ty::new_ref(interner, region, source, mutbl.into());
+ }
+ source = adjustment.target;
+ }
+
+ // If we have an autoref followed by unsizing at the end, fix the unsize target.
+ if let [
+ ..,
+ Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(..)), .. },
+ Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), ref mut target },
+ ] = adjustments[..]
+ {
+ *target = method.sig.inputs_and_output.inputs()[0];
+ }
+ }
+ }
+}
diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs
index 59e8d84..0b56649 100644
--- a/crates/hir-ty/src/infer/unify.rs
+++ b/crates/hir-ty/src/infer/unify.rs
@@ -7,10 +7,9 @@
use intern::sym;
use rustc_hash::FxHashSet;
use rustc_type_ir::{
- DebruijnIndex, InferConst, InferTy, RegionVid, TyVid, TypeFoldable, TypeFolder,
- TypeSuperFoldable, TypeVisitableExt, UpcastFrom,
- inherent::{Const as _, IntoKind, Ty as _},
- solve::{Certainty, GoalSource},
+ TyVid, TypeFoldable, TypeVisitableExt, UpcastFrom,
+ inherent::{Const as _, GenericArg as _, IntoKind, SliceLike, Ty as _},
+ solve::Certainty,
};
use smallvec::SmallVec;
use triomphe::Arc;
@@ -18,15 +17,14 @@
use crate::{
TraitEnvironment,
db::HirDatabase,
- infer::InferenceContext,
next_solver::{
- self, AliasTy, Binder, Canonical, ClauseKind, Const, ConstKind, DbInterner,
- ErrorGuaranteed, GenericArg, GenericArgs, Predicate, PredicateKind, Region, RegionKind,
- SolverDefId, TraitRef, Ty, TyKind, TypingMode,
+ AliasTy, Canonical, ClauseKind, Const, DbInterner, ErrorGuaranteed, GenericArg,
+ GenericArgs, Goal, Predicate, PredicateKind, Region, SolverDefId, Term, TraitRef, Ty,
+ TyKind, TypingMode,
fulfill::{FulfillmentCtxt, NextSolverError},
infer::{
DbInternerInferExt, InferCtxt, InferOk, InferResult,
- at::ToTrace,
+ at::{At, ToTrace},
snapshot::CombinedSnapshot,
traits::{Obligation, ObligationCause, PredicateObligation},
},
@@ -38,15 +36,6 @@
},
};
-impl<'db> InferenceContext<'_, 'db> {
- pub(super) fn canonicalize<T>(&mut self, t: T) -> rustc_type_ir::Canonical<DbInterner<'db>, T>
- where
- T: rustc_type_ir::TypeFoldable<DbInterner<'db>>,
- {
- self.table.canonicalize(t)
- }
-}
-
struct NestedObligationsForSelfTy<'a, 'db> {
ctx: &'a InferenceTable<'db>,
self_ty: TyVid,
@@ -73,21 +62,7 @@
let db = self.ctx.interner();
let goal = inspect_goal.goal();
- if self.ctx.predicate_has_self_ty(goal.predicate, self.self_ty)
- // We do not push the instantiated forms of goals as it would cause any
- // aliases referencing bound vars to go from having escaping bound vars to
- // being able to be normalized to an inference variable.
- //
- // This is mostly just a hack as arbitrary nested goals could still contain
- // such aliases while having a different `GoalSource`. Closure signature inference
- // however can't really handle *every* higher ranked `Fn` goal also being present
- // in the form of `?c: Fn<(<?x as Trait<'!a>>::Assoc)`.
- //
- // This also just better matches the behaviour of the old solver where we do not
- // encounter instantiated forms of goals, only nested goals that referred to bound
- // vars from instantiated goals.
- && !matches!(inspect_goal.source(), GoalSource::InstantiateHigherRanked)
- {
+ if self.ctx.predicate_has_self_ty(goal.predicate, self.self_ty) {
self.obligations_for_self_ty.push(Obligation::new(
db,
self.root_cause.clone(),
@@ -292,21 +267,7 @@
T: TypeFoldable<DbInterner<'db>> + Clone,
{
let ty = self.resolve_vars_with_obligations(ty);
- self.infer_ctxt
- .at(&ObligationCause::new(), self.trait_env.env)
- .deeply_normalize(ty.clone())
- .unwrap_or(ty)
- }
-
- /// Works almost same as [`Self::normalize_associated_types_in`], but this also resolves shallow
- /// the inference variables
- pub(crate) fn eagerly_normalize_and_resolve_shallow_in<T>(&mut self, ty: T) -> T
- where
- T: TypeFoldable<DbInterner<'db>>,
- {
- let ty = self.resolve_vars_with_obligations(ty);
- let ty = self.normalize_associated_types_in(ty);
- self.resolve_vars_with_obligations(ty)
+ self.at(&ObligationCause::new()).deeply_normalize(ty.clone()).unwrap_or(ty)
}
pub(crate) fn normalize_alias_ty(&mut self, alias: Ty<'db>) -> Ty<'db> {
@@ -316,19 +277,19 @@
.unwrap_or(alias)
}
- pub(crate) fn next_ty_var(&mut self) -> Ty<'db> {
+ pub(crate) fn next_ty_var(&self) -> Ty<'db> {
self.infer_ctxt.next_ty_var()
}
- pub(crate) fn next_const_var(&mut self) -> Const<'db> {
+ pub(crate) fn next_const_var(&self) -> Const<'db> {
self.infer_ctxt.next_const_var()
}
- pub(crate) fn next_int_var(&mut self) -> Ty<'db> {
+ pub(crate) fn next_int_var(&self) -> Ty<'db> {
self.infer_ctxt.next_int_var()
}
- pub(crate) fn next_float_var(&mut self) -> Ty<'db> {
+ pub(crate) fn next_float_var(&self) -> Ty<'db> {
self.infer_ctxt.next_float_var()
}
@@ -338,101 +299,12 @@
var
}
- pub(crate) fn next_region_var(&mut self) -> Region<'db> {
+ pub(crate) fn next_region_var(&self) -> Region<'db> {
self.infer_ctxt.next_region_var()
}
- pub(crate) fn next_var_for_param(&mut self, id: GenericParamId) -> GenericArg<'db> {
- match id {
- GenericParamId::TypeParamId(_) => self.next_ty_var().into(),
- GenericParamId::ConstParamId(_) => self.next_const_var().into(),
- GenericParamId::LifetimeParamId(_) => self.next_region_var().into(),
- }
- }
-
- pub(crate) fn resolve_with_fallback<T>(
- &mut self,
- t: T,
- fallback_ty: &mut dyn FnMut(DebruijnIndex, InferTy) -> Ty<'db>,
- fallback_const: &mut dyn FnMut(DebruijnIndex, InferConst) -> Const<'db>,
- fallback_region: &mut dyn FnMut(DebruijnIndex, RegionVid) -> Region<'db>,
- ) -> T
- where
- T: TypeFoldable<DbInterner<'db>>,
- {
- struct Resolver<'a, 'db> {
- table: &'a mut InferenceTable<'db>,
- binder: DebruijnIndex,
- fallback_ty: &'a mut dyn FnMut(DebruijnIndex, InferTy) -> Ty<'db>,
- fallback_const: &'a mut dyn FnMut(DebruijnIndex, InferConst) -> Const<'db>,
- fallback_region: &'a mut dyn FnMut(DebruijnIndex, RegionVid) -> Region<'db>,
- }
-
- impl<'db> TypeFolder<DbInterner<'db>> for Resolver<'_, 'db> {
- fn cx(&self) -> DbInterner<'db> {
- self.table.interner()
- }
-
- fn fold_binder<T>(&mut self, t: Binder<'db, T>) -> Binder<'db, T>
- where
- T: TypeFoldable<DbInterner<'db>>,
- {
- self.binder.shift_in(1);
- let result = t.super_fold_with(self);
- self.binder.shift_out(1);
- result
- }
-
- fn fold_ty(&mut self, t: Ty<'db>) -> Ty<'db> {
- if !t.has_infer() {
- return t;
- }
-
- if let TyKind::Infer(infer) = t.kind() {
- (self.fallback_ty)(self.binder, infer)
- } else {
- t.super_fold_with(self)
- }
- }
-
- fn fold_const(&mut self, c: Const<'db>) -> Const<'db> {
- if !c.has_infer() {
- return c;
- }
-
- if let ConstKind::Infer(infer) = c.kind() {
- (self.fallback_const)(self.binder, infer)
- } else {
- c.super_fold_with(self)
- }
- }
-
- fn fold_region(&mut self, r: Region<'db>) -> Region<'db> {
- if let RegionKind::ReVar(infer) = r.kind() {
- (self.fallback_region)(self.binder, infer)
- } else {
- r
- }
- }
- }
-
- t.fold_with(&mut Resolver {
- table: self,
- binder: DebruijnIndex::ZERO,
- fallback_ty,
- fallback_const,
- fallback_region,
- })
- }
-
- pub(crate) fn instantiate_canonical<T>(
- &mut self,
- canonical: rustc_type_ir::Canonical<DbInterner<'db>, T>,
- ) -> T
- where
- T: rustc_type_ir::TypeFoldable<DbInterner<'db>>,
- {
- self.infer_ctxt.instantiate_canonical(&canonical).0
+ pub(crate) fn next_var_for_param(&self, id: GenericParamId) -> GenericArg<'db> {
+ self.infer_ctxt.next_var_for_param(id)
}
pub(crate) fn resolve_completely<T>(&mut self, value: T) -> T
@@ -456,7 +328,11 @@
/// Unify two relatable values (e.g. `Ty`) and return new trait goals arising from it, so the
/// caller needs to deal with them.
pub(crate) fn try_unify<T: ToTrace<'db>>(&mut self, t1: T, t2: T) -> InferResult<'db, ()> {
- self.infer_ctxt.at(&ObligationCause::new(), self.trait_env.env).eq(t1, t2)
+ self.at(&ObligationCause::new()).eq(t1, t2)
+ }
+
+ pub(crate) fn at<'a>(&'a self, cause: &'a ObligationCause) -> At<'a, 'db> {
+ self.infer_ctxt.at(cause, self.trait_env.env)
}
pub(crate) fn shallow_resolve(&self, ty: Ty<'db>) -> Ty<'db> {
@@ -486,15 +362,6 @@
self.infer_ctxt.fresh_args_for_item(def)
}
- /// Like `fresh_args_for_item()`, but first uses the args from `first`.
- pub(crate) fn fill_rest_fresh_args(
- &self,
- def_id: SolverDefId,
- first: impl IntoIterator<Item = GenericArg<'db>>,
- ) -> GenericArgs<'db> {
- self.infer_ctxt.fill_rest_fresh_args(def_id, first)
- }
-
/// Try to resolve `ty` to a structural type, normalizing aliases.
///
/// In case there is still ambiguity, the returned type may be an inference
@@ -535,17 +402,6 @@
self.fulfillment_cx = snapshot.obligations;
}
- #[tracing::instrument(skip_all)]
- pub(crate) fn run_in_snapshot<T>(
- &mut self,
- f: impl FnOnce(&mut InferenceTable<'db>) -> T,
- ) -> T {
- let snapshot = self.snapshot();
- let result = f(self);
- self.rollback_to(snapshot);
- result
- }
-
pub(crate) fn commit_if_ok<T, E>(
&mut self,
f: impl FnOnce(&mut InferenceTable<'db>) -> Result<T, E>,
@@ -566,22 +422,19 @@
/// choice (during e.g. method resolution or deref).
#[tracing::instrument(level = "debug", skip(self))]
pub(crate) fn try_obligation(&mut self, predicate: Predicate<'db>) -> NextTraitSolveResult {
- let goal = next_solver::Goal { param_env: self.trait_env.env, predicate };
+ let goal = Goal { param_env: self.trait_env.env, predicate };
let canonicalized = self.canonicalize(goal);
next_trait_solve_canonical_in_ctxt(&self.infer_ctxt, canonicalized)
}
pub(crate) fn register_obligation(&mut self, predicate: Predicate<'db>) {
- let goal = next_solver::Goal { param_env: self.trait_env.env, predicate };
+ let goal = Goal { param_env: self.trait_env.env, predicate };
self.register_obligation_in_env(goal)
}
#[tracing::instrument(level = "debug", skip(self))]
- fn register_obligation_in_env(
- &mut self,
- goal: next_solver::Goal<'db, next_solver::Predicate<'db>>,
- ) {
+ fn register_obligation_in_env(&mut self, goal: Goal<'db, Predicate<'db>>) {
let result = next_trait_solve_in_ctxt(&self.infer_ctxt, goal);
tracing::debug!(?result);
match result {
@@ -619,7 +472,7 @@
self.fulfillment_cx.register_predicate_obligation(&self.infer_ctxt, obligation);
}
- pub(super) fn register_predicates<I>(&mut self, obligations: I)
+ pub(crate) fn register_predicates<I>(&mut self, obligations: I)
where
I: IntoIterator<Item = PredicateObligation<'db>>,
{
@@ -628,6 +481,23 @@
});
}
+ /// checking later, during regionck, that `arg` is well-formed.
+ pub(crate) fn register_wf_obligation(&mut self, term: Term<'db>, cause: ObligationCause) {
+ self.register_predicate(Obligation::new(
+ self.interner(),
+ cause,
+ self.trait_env.env,
+ ClauseKind::WellFormed(term),
+ ));
+ }
+
+ /// Registers obligations that all `args` are well-formed.
+ pub(crate) fn add_wf_bounds(&mut self, args: GenericArgs<'db>) {
+ for term in args.iter().filter_map(|it| it.as_term()) {
+ self.register_wf_obligation(term, ObligationCause::new());
+ }
+ }
+
pub(crate) fn callable_sig(
&mut self,
ty: Ty<'db>,
@@ -714,26 +584,20 @@
}
/// Whenever you lower a user-written type, you should call this.
- pub(crate) fn process_user_written_ty<T>(&mut self, ty: T) -> T
- where
- T: TypeFoldable<DbInterner<'db>>,
- {
+ pub(crate) fn process_user_written_ty(&mut self, ty: Ty<'db>) -> Ty<'db> {
self.process_remote_user_written_ty(ty)
// FIXME: Register a well-formed obligation.
}
/// The difference of this method from `process_user_written_ty()` is that this method doesn't register a well-formed obligation,
/// while `process_user_written_ty()` should (but doesn't currently).
- pub(crate) fn process_remote_user_written_ty<T>(&mut self, ty: T) -> T
- where
- T: TypeFoldable<DbInterner<'db>>,
- {
+ pub(crate) fn process_remote_user_written_ty(&mut self, ty: Ty<'db>) -> Ty<'db> {
let ty = self.insert_type_vars(ty);
// See https://github.com/rust-lang/rust/blob/cdb45c87e2cd43495379f7e867e3cc15dcee9f93/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs#L487-L495:
// Even though the new solver only lazily normalizes usually, here we eagerly normalize so that not everything needs
// to normalize before inspecting the `TyKind`.
// FIXME(next-solver): We should not deeply normalize here, only shallowly.
- self.normalize_associated_types_in(ty)
+ self.try_structurally_resolve_type(ty)
}
/// Replaces ConstScalar::Unknown by a new type var, so we can maybe still infer it.
@@ -762,7 +626,7 @@
}
let mut ty = ty;
- ty = self.eagerly_normalize_and_resolve_shallow_in(ty);
+ ty = self.try_structurally_resolve_type(ty);
if let Some(sized) = short_circuit_trivial_tys(ty) {
return sized;
}
@@ -784,7 +648,7 @@
// Structs can have DST as its last field and such cases are not handled
// as unsized by the chalk, so we do this manually.
ty = last_field_ty;
- ty = self.eagerly_normalize_and_resolve_shallow_in(ty);
+ ty = self.try_structurally_resolve_type(ty);
if let Some(sized) = short_circuit_trivial_tys(ty) {
return sized;
}
@@ -852,7 +716,7 @@
{
let value = if self.should_normalize {
let cause = ObligationCause::new();
- let at = self.ctx.infer_ctxt.at(&cause, self.ctx.trait_env.env);
+ let at = self.ctx.at(&cause);
let universes = vec![None; outer_exclusive_binder(value).as_usize()];
match deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals(
at, value, universes,
diff --git a/crates/hir-ty/src/lang_items.rs b/crates/hir-ty/src/lang_items.rs
index 3ef7f50..d0d0aa7 100644
--- a/crates/hir-ty/src/lang_items.rs
+++ b/crates/hir-ty/src/lang_items.rs
@@ -1,8 +1,7 @@
//! Functions to detect special lang items
use hir_def::{AdtId, lang_item::LangItem, signatures::StructFlags};
-use hir_expand::name::Name;
-use intern::sym;
+use intern::{Symbol, sym};
use crate::db::HirDatabase;
@@ -11,48 +10,48 @@
db.struct_signature(id).flags.contains(StructFlags::IS_BOX)
}
-pub fn lang_items_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, LangItem)> {
+pub fn lang_items_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Symbol, LangItem)> {
use syntax::ast::{ArithOp, BinaryOp, CmpOp, Ordering};
Some(match op {
BinaryOp::LogicOp(_) => return None,
BinaryOp::ArithOp(aop) => match aop {
- ArithOp::Add => (Name::new_symbol_root(sym::add), LangItem::Add),
- ArithOp::Mul => (Name::new_symbol_root(sym::mul), LangItem::Mul),
- ArithOp::Sub => (Name::new_symbol_root(sym::sub), LangItem::Sub),
- ArithOp::Div => (Name::new_symbol_root(sym::div), LangItem::Div),
- ArithOp::Rem => (Name::new_symbol_root(sym::rem), LangItem::Rem),
- ArithOp::Shl => (Name::new_symbol_root(sym::shl), LangItem::Shl),
- ArithOp::Shr => (Name::new_symbol_root(sym::shr), LangItem::Shr),
- ArithOp::BitXor => (Name::new_symbol_root(sym::bitxor), LangItem::BitXor),
- ArithOp::BitOr => (Name::new_symbol_root(sym::bitor), LangItem::BitOr),
- ArithOp::BitAnd => (Name::new_symbol_root(sym::bitand), LangItem::BitAnd),
+ ArithOp::Add => (sym::add, LangItem::Add),
+ ArithOp::Mul => (sym::mul, LangItem::Mul),
+ ArithOp::Sub => (sym::sub, LangItem::Sub),
+ ArithOp::Div => (sym::div, LangItem::Div),
+ ArithOp::Rem => (sym::rem, LangItem::Rem),
+ ArithOp::Shl => (sym::shl, LangItem::Shl),
+ ArithOp::Shr => (sym::shr, LangItem::Shr),
+ ArithOp::BitXor => (sym::bitxor, LangItem::BitXor),
+ ArithOp::BitOr => (sym::bitor, LangItem::BitOr),
+ ArithOp::BitAnd => (sym::bitand, LangItem::BitAnd),
},
BinaryOp::Assignment { op: Some(aop) } => match aop {
- ArithOp::Add => (Name::new_symbol_root(sym::add_assign), LangItem::AddAssign),
- ArithOp::Mul => (Name::new_symbol_root(sym::mul_assign), LangItem::MulAssign),
- ArithOp::Sub => (Name::new_symbol_root(sym::sub_assign), LangItem::SubAssign),
- ArithOp::Div => (Name::new_symbol_root(sym::div_assign), LangItem::DivAssign),
- ArithOp::Rem => (Name::new_symbol_root(sym::rem_assign), LangItem::RemAssign),
- ArithOp::Shl => (Name::new_symbol_root(sym::shl_assign), LangItem::ShlAssign),
- ArithOp::Shr => (Name::new_symbol_root(sym::shr_assign), LangItem::ShrAssign),
- ArithOp::BitXor => (Name::new_symbol_root(sym::bitxor_assign), LangItem::BitXorAssign),
- ArithOp::BitOr => (Name::new_symbol_root(sym::bitor_assign), LangItem::BitOrAssign),
- ArithOp::BitAnd => (Name::new_symbol_root(sym::bitand_assign), LangItem::BitAndAssign),
+ ArithOp::Add => (sym::add_assign, LangItem::AddAssign),
+ ArithOp::Mul => (sym::mul_assign, LangItem::MulAssign),
+ ArithOp::Sub => (sym::sub_assign, LangItem::SubAssign),
+ ArithOp::Div => (sym::div_assign, LangItem::DivAssign),
+ ArithOp::Rem => (sym::rem_assign, LangItem::RemAssign),
+ ArithOp::Shl => (sym::shl_assign, LangItem::ShlAssign),
+ ArithOp::Shr => (sym::shr_assign, LangItem::ShrAssign),
+ ArithOp::BitXor => (sym::bitxor_assign, LangItem::BitXorAssign),
+ ArithOp::BitOr => (sym::bitor_assign, LangItem::BitOrAssign),
+ ArithOp::BitAnd => (sym::bitand_assign, LangItem::BitAndAssign),
},
BinaryOp::CmpOp(cop) => match cop {
- CmpOp::Eq { negated: false } => (Name::new_symbol_root(sym::eq), LangItem::PartialEq),
- CmpOp::Eq { negated: true } => (Name::new_symbol_root(sym::ne), LangItem::PartialEq),
+ CmpOp::Eq { negated: false } => (sym::eq, LangItem::PartialEq),
+ CmpOp::Eq { negated: true } => (sym::ne, LangItem::PartialEq),
CmpOp::Ord { ordering: Ordering::Less, strict: false } => {
- (Name::new_symbol_root(sym::le), LangItem::PartialOrd)
+ (sym::le, LangItem::PartialOrd)
}
CmpOp::Ord { ordering: Ordering::Less, strict: true } => {
- (Name::new_symbol_root(sym::lt), LangItem::PartialOrd)
+ (sym::lt, LangItem::PartialOrd)
}
CmpOp::Ord { ordering: Ordering::Greater, strict: false } => {
- (Name::new_symbol_root(sym::ge), LangItem::PartialOrd)
+ (sym::ge, LangItem::PartialOrd)
}
CmpOp::Ord { ordering: Ordering::Greater, strict: true } => {
- (Name::new_symbol_root(sym::gt), LangItem::PartialOrd)
+ (sym::gt, LangItem::PartialOrd)
}
},
BinaryOp::Assignment { op: None } => return None,
diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs
index fdacc1d..b29c7d2 100644
--- a/crates/hir-ty/src/lib.rs
+++ b/crates/hir-ty/src/lib.rs
@@ -89,13 +89,12 @@
could_coerce, could_unify, could_unify_deeply,
};
pub use lower::{
- LifetimeElisionKind, TyDefId, TyLoweringContext, ValueTyDefId,
+ GenericPredicates, ImplTraits, LifetimeElisionKind, TyDefId, TyLoweringContext, ValueTyDefId,
associated_type_shorthand_candidates, diagnostics::*,
};
-pub use method_resolution::check_orphan_rules;
pub use next_solver::interner::{attach_db, attach_db_allow_change, with_attached_db};
pub use target_feature::TargetFeatures;
-pub use traits::TraitEnvironment;
+pub use traits::{TraitEnvironment, check_orphan_rules};
pub use utils::{
TargetFeatureIsSafeInTarget, Unsafety, all_super_traits, direct_super_traits,
is_fn_unsafe_to_call, target_feature_is_safe_in_target,
diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs
index a181ae0..3f187d2 100644
--- a/crates/hir-ty/src/lower.rs
+++ b/crates/hir-ty/src/lower.rs
@@ -8,19 +8,16 @@
pub(crate) mod diagnostics;
pub(crate) mod path;
-use std::{
- cell::OnceCell,
- iter, mem,
- ops::{self, Deref, Not as _},
-};
+use std::{cell::OnceCell, iter, mem};
+use arrayvec::ArrayVec;
use base_db::Crate;
use either::Either;
use hir_def::{
AdtId, AssocItemId, CallableDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId,
- FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
- LocalFieldId, Lookup, StaticId, StructId, TypeAliasId, TypeOrConstParamId, TypeParamId,
- UnionId, VariantId,
+ FunctionId, GeneralConstId, GenericDefId, GenericParamId, HasModule, ImplId, ItemContainerId,
+ LifetimeParamId, LocalFieldId, Lookup, StaticId, StructId, TypeAliasId, TypeOrConstParamId,
+ TypeParamId, UnionId, VariantId,
builtin_type::BuiltinType,
expr_store::{ExpressionStore, HygieneId, path::Path},
hir::generics::{
@@ -45,7 +42,7 @@
AliasTyKind, BoundVarIndexKind, ConstKind, DebruijnIndex, ExistentialPredicate,
ExistentialProjection, ExistentialTraitRef, FnSig, OutlivesPredicate,
TyKind::{self},
- TypeVisitableExt,
+ TypeVisitableExt, Upcast,
inherent::{GenericArg as _, GenericArgs as _, IntoKind as _, Region as _, SliceLike, Ty as _},
};
use salsa::plumbing::AsId;
@@ -56,13 +53,13 @@
use crate::{
FnAbi, ImplTraitId, TraitEnvironment, TyLoweringDiagnostic, TyLoweringDiagnosticKind,
consteval::intern_const_ref,
- db::HirDatabase,
+ db::{HirDatabase, InternedOpaqueTyId},
generics::{Generics, generics, trait_self_param_idx},
next_solver::{
- AliasTy, Binder, BoundExistentialPredicates, Clause, Clauses, Const, DbInterner,
- EarlyBinder, EarlyParamRegion, ErrorGuaranteed, GenericArg, GenericArgs, ParamConst,
- ParamEnv, PolyFnSig, Predicate, Region, SolverDefId, TraitPredicate, TraitRef, Ty, Tys,
- UnevaluatedConst, abi::Safety,
+ AliasTy, Binder, BoundExistentialPredicates, Clause, ClauseKind, Clauses, Const,
+ DbInterner, EarlyBinder, EarlyParamRegion, ErrorGuaranteed, GenericArg, GenericArgs,
+ ParamConst, ParamEnv, PolyFnSig, Predicate, Region, SolverDefId, TraitPredicate, TraitRef,
+ Ty, Tys, UnevaluatedConst, abi::Safety,
},
};
@@ -75,7 +72,7 @@
#[derive(PartialEq, Eq, Debug, Hash)]
pub struct ImplTrait<'db> {
- pub(crate) predicates: Vec<Clause<'db>>,
+ pub(crate) predicates: Box<[Clause<'db>]>,
}
pub type ImplTraitIdx<'db> = Idx<ImplTrait<'db>>;
@@ -338,7 +335,7 @@
Some(Const::new(
self.interner,
rustc_type_ir::ConstKind::Unevaluated(UnevaluatedConst::new(
- SolverDefId::ConstId(c),
+ GeneralConstId::ConstId(c).into(),
args,
)),
))
@@ -473,7 +470,7 @@
let idx = self
.impl_trait_mode
.opaque_type_data
- .alloc(ImplTrait { predicates: Vec::default() });
+ .alloc(ImplTrait { predicates: Box::default() });
let impl_trait_id = origin.either(
|f| ImplTraitId::ReturnTypeImplTrait(f, idx),
@@ -916,8 +913,7 @@
});
predicates.extend(sized_clause);
}
- predicates.shrink_to_fit();
- predicates
+ predicates.into_boxed_slice()
});
ImplTrait { predicates }
}
@@ -982,50 +978,89 @@
Some((trait_ref, create_diagnostics(ctx.diagnostics)))
}
-pub(crate) fn return_type_impl_traits<'db>(
- db: &'db dyn HirDatabase,
- def: hir_def::FunctionId,
-) -> Option<Arc<EarlyBinder<'db, ImplTraits<'db>>>> {
- // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe
- let data = db.function_signature(def);
- let resolver = def.resolver(db);
- let mut ctx_ret =
- TyLoweringContext::new(db, &resolver, &data.store, def.into(), LifetimeElisionKind::Infer)
- .with_impl_trait_mode(ImplTraitLoweringMode::Opaque);
- if let Some(ret_type) = data.ret_type {
- let _ret = ctx_ret.lower_ty(ret_type);
- }
- let return_type_impl_traits =
- ImplTraits { impl_traits: ctx_ret.impl_trait_mode.opaque_type_data };
- if return_type_impl_traits.impl_traits.is_empty() {
- None
- } else {
- Some(Arc::new(EarlyBinder::bind(return_type_impl_traits)))
+impl<'db> ImplTraitId<'db> {
+ #[inline]
+ pub fn predicates(self, db: &'db dyn HirDatabase) -> EarlyBinder<'db, &'db [Clause<'db>]> {
+ let (impl_traits, idx) = match self {
+ ImplTraitId::ReturnTypeImplTrait(owner, idx) => {
+ (ImplTraits::return_type_impl_traits(db, owner), idx)
+ }
+ ImplTraitId::TypeAliasImplTrait(owner, idx) => {
+ (ImplTraits::type_alias_impl_traits(db, owner), idx)
+ }
+ };
+ impl_traits
+ .as_deref()
+ .expect("owner should have opaque type")
+ .as_ref()
+ .map_bound(|it| &*it.impl_traits[idx].predicates)
}
}
-pub(crate) fn type_alias_impl_traits<'db>(
- db: &'db dyn HirDatabase,
- def: hir_def::TypeAliasId,
-) -> Option<Arc<EarlyBinder<'db, ImplTraits<'db>>>> {
- let data = db.type_alias_signature(def);
- let resolver = def.resolver(db);
- let mut ctx = TyLoweringContext::new(
- db,
- &resolver,
- &data.store,
- def.into(),
- LifetimeElisionKind::AnonymousReportError,
- )
- .with_impl_trait_mode(ImplTraitLoweringMode::Opaque);
- if let Some(type_ref) = data.ty {
- let _ty = ctx.lower_ty(type_ref);
+impl InternedOpaqueTyId {
+ #[inline]
+ pub fn predicates<'db>(self, db: &'db dyn HirDatabase) -> EarlyBinder<'db, &'db [Clause<'db>]> {
+ self.loc(db).predicates(db)
}
- let type_alias_impl_traits = ImplTraits { impl_traits: ctx.impl_trait_mode.opaque_type_data };
- if type_alias_impl_traits.impl_traits.is_empty() {
- None
- } else {
- Some(Arc::new(EarlyBinder::bind(type_alias_impl_traits)))
+}
+
+#[salsa::tracked]
+impl<'db> ImplTraits<'db> {
+ #[salsa::tracked(returns(ref), unsafe(non_update_return_type))]
+ pub(crate) fn return_type_impl_traits(
+ db: &'db dyn HirDatabase,
+ def: hir_def::FunctionId,
+ ) -> Option<Box<EarlyBinder<'db, ImplTraits<'db>>>> {
+ // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe
+ let data = db.function_signature(def);
+ let resolver = def.resolver(db);
+ let mut ctx_ret = TyLoweringContext::new(
+ db,
+ &resolver,
+ &data.store,
+ def.into(),
+ LifetimeElisionKind::Infer,
+ )
+ .with_impl_trait_mode(ImplTraitLoweringMode::Opaque);
+ if let Some(ret_type) = data.ret_type {
+ let _ret = ctx_ret.lower_ty(ret_type);
+ }
+ let mut return_type_impl_traits =
+ ImplTraits { impl_traits: ctx_ret.impl_trait_mode.opaque_type_data };
+ if return_type_impl_traits.impl_traits.is_empty() {
+ None
+ } else {
+ return_type_impl_traits.impl_traits.shrink_to_fit();
+ Some(Box::new(EarlyBinder::bind(return_type_impl_traits)))
+ }
+ }
+
+ #[salsa::tracked(returns(ref), unsafe(non_update_return_type))]
+ pub(crate) fn type_alias_impl_traits(
+ db: &'db dyn HirDatabase,
+ def: hir_def::TypeAliasId,
+ ) -> Option<Box<EarlyBinder<'db, ImplTraits<'db>>>> {
+ let data = db.type_alias_signature(def);
+ let resolver = def.resolver(db);
+ let mut ctx = TyLoweringContext::new(
+ db,
+ &resolver,
+ &data.store,
+ def.into(),
+ LifetimeElisionKind::AnonymousReportError,
+ )
+ .with_impl_trait_mode(ImplTraitLoweringMode::Opaque);
+ if let Some(type_ref) = data.ty {
+ let _ty = ctx.lower_ty(type_ref);
+ }
+ let mut type_alias_impl_traits =
+ ImplTraits { impl_traits: ctx.impl_trait_mode.opaque_type_data };
+ if type_alias_impl_traits.impl_traits.is_empty() {
+ None
+ } else {
+ type_alias_impl_traits.impl_traits.shrink_to_fit();
+ Some(Box::new(EarlyBinder::bind(type_alias_impl_traits)))
+ }
}
}
@@ -1331,12 +1366,13 @@
/// following bounds are disallowed: `T: Foo<U::Item>, U: Foo<T::Item>`, but
/// these are fine: `T: Foo<U::Item>, U: Foo<()>`.
#[tracing::instrument(skip(db), ret)]
-pub(crate) fn generic_predicates_for_param_query<'db>(
+#[salsa::tracked(returns(ref), unsafe(non_update_return_type), cycle_result = generic_predicates_for_param_cycle_result)]
+pub(crate) fn generic_predicates_for_param<'db>(
db: &'db dyn HirDatabase,
def: GenericDefId,
param_id: TypeOrConstParamId,
assoc_name: Option<Name>,
-) -> GenericPredicates<'db> {
+) -> EarlyBinder<'db, Box<[Clause<'db>]>> {
let generics = generics(db, def);
let interner = DbInterner::new_with(db, None, None);
let resolver = def.resolver(db);
@@ -1436,44 +1472,140 @@
predicates.extend(implicitly_sized_predicates);
};
}
- GenericPredicates(predicates.is_empty().not().then(|| predicates.into()))
+ EarlyBinder::bind(predicates.into_boxed_slice())
}
-pub(crate) fn generic_predicates_for_param_cycle_result(
- _db: &dyn HirDatabase,
+pub(crate) fn generic_predicates_for_param_cycle_result<'db>(
+ _db: &'db dyn HirDatabase,
_def: GenericDefId,
_param_id: TypeOrConstParamId,
_assoc_name: Option<Name>,
-) -> GenericPredicates<'_> {
- GenericPredicates(None)
+) -> EarlyBinder<'db, Box<[Clause<'db>]>> {
+ EarlyBinder::bind(Box::new([]))
+}
+
+#[inline]
+pub(crate) fn type_alias_bounds<'db>(
+ db: &'db dyn HirDatabase,
+ type_alias: TypeAliasId,
+) -> EarlyBinder<'db, &'db [Clause<'db>]> {
+ type_alias_bounds_with_diagnostics(db, type_alias).0.as_ref().map_bound(|it| &**it)
+}
+
+#[salsa::tracked(returns(ref), unsafe(non_update_return_type))]
+pub fn type_alias_bounds_with_diagnostics<'db>(
+ db: &'db dyn HirDatabase,
+ type_alias: TypeAliasId,
+) -> (EarlyBinder<'db, Box<[Clause<'db>]>>, Diagnostics) {
+ let type_alias_data = db.type_alias_signature(type_alias);
+ let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db);
+ let mut ctx = TyLoweringContext::new(
+ db,
+ &resolver,
+ &type_alias_data.store,
+ type_alias.into(),
+ LifetimeElisionKind::AnonymousReportError,
+ );
+ let interner = ctx.interner;
+ let def_id = type_alias.into();
+
+ let item_args = GenericArgs::identity_for_item(interner, def_id);
+ let interner_ty = Ty::new_projection_from_args(interner, def_id, item_args);
+
+ let mut bounds = Vec::new();
+ for bound in &type_alias_data.bounds {
+ ctx.lower_type_bound(bound, interner_ty, false).for_each(|pred| {
+ bounds.push(pred);
+ });
+ }
+
+ if !ctx.unsized_types.contains(&interner_ty) {
+ let sized_trait = LangItem::Sized
+ .resolve_trait(ctx.db, interner.krate.expect("Must have interner.krate"));
+ if let Some(sized_trait) = sized_trait {
+ let trait_ref = TraitRef::new_from_args(
+ interner,
+ sized_trait.into(),
+ GenericArgs::new_from_iter(interner, [interner_ty.into()]),
+ );
+ bounds.push(trait_ref.upcast(interner));
+ };
+ }
+
+ (EarlyBinder::bind(bounds.into_boxed_slice()), create_diagnostics(ctx.diagnostics))
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct GenericPredicates<'db>(Option<Arc<[Clause<'db>]>>);
+pub struct GenericPredicates<'db> {
+ // The order is the following: first, if `parent_is_trait == true`, comes the implicit trait predicate for the
+ // parent. Then come the explicit predicates for the parent, then the explicit trait predicate for the child,
+ // then the implicit trait predicate for the child, if `is_trait` is `true`.
+ predicates: EarlyBinder<'db, Box<[Clause<'db>]>>,
+ own_predicates_start: u32,
+ is_trait: bool,
+ parent_is_trait: bool,
+}
+#[salsa::tracked]
impl<'db> GenericPredicates<'db> {
- #[inline]
- pub fn instantiate(
- &self,
- interner: DbInterner<'db>,
- args: GenericArgs<'db>,
- ) -> Option<impl Iterator<Item = Clause<'db>>> {
- self.0
- .as_ref()
- .map(|it| EarlyBinder::bind(it.iter().copied()).iter_instantiated(interner, args))
- }
-
- #[inline]
- pub fn instantiate_identity(&self) -> Option<impl Iterator<Item = Clause<'db>>> {
- self.0.as_ref().map(|it| it.iter().copied())
+ /// Resolve the where clause(s) of an item with generics.
+ ///
+ /// Diagnostics are computed only for this item's predicates, not for parents.
+ #[salsa::tracked(returns(ref), unsafe(non_update_return_type))]
+ pub fn query_with_diagnostics(
+ db: &'db dyn HirDatabase,
+ def: GenericDefId,
+ ) -> (GenericPredicates<'db>, Diagnostics) {
+ generic_predicates_filtered_by(db, def, PredicateFilter::All, |_| true)
}
}
-impl<'db> ops::Deref for GenericPredicates<'db> {
- type Target = [Clause<'db>];
+impl<'db> GenericPredicates<'db> {
+ #[inline]
+ pub fn query(db: &'db dyn HirDatabase, def: GenericDefId) -> &'db GenericPredicates<'db> {
+ &Self::query_with_diagnostics(db, def).0
+ }
- fn deref(&self) -> &Self::Target {
- self.0.as_deref().unwrap_or(&[])
+ #[inline]
+ pub fn query_all(
+ db: &'db dyn HirDatabase,
+ def: GenericDefId,
+ ) -> EarlyBinder<'db, &'db [Clause<'db>]> {
+ Self::query(db, def).all_predicates()
+ }
+
+ #[inline]
+ pub fn query_own(
+ db: &'db dyn HirDatabase,
+ def: GenericDefId,
+ ) -> EarlyBinder<'db, &'db [Clause<'db>]> {
+ Self::query(db, def).own_predicates()
+ }
+
+ #[inline]
+ pub fn query_explicit(
+ db: &'db dyn HirDatabase,
+ def: GenericDefId,
+ ) -> EarlyBinder<'db, &'db [Clause<'db>]> {
+ Self::query(db, def).explicit_predicates()
+ }
+
+ #[inline]
+ pub fn all_predicates(&self) -> EarlyBinder<'db, &[Clause<'db>]> {
+ self.predicates.as_ref().map_bound(|it| &**it)
+ }
+
+ #[inline]
+ pub fn own_predicates(&self) -> EarlyBinder<'db, &[Clause<'db>]> {
+ self.predicates.as_ref().map_bound(|it| &it[self.own_predicates_start as usize..])
+ }
+
+ /// Returns the predicates, minus the implicit `Self: Trait` predicate for a trait.
+ #[inline]
+ pub fn explicit_predicates(&self) -> EarlyBinder<'db, &[Clause<'db>]> {
+ self.predicates.as_ref().map_bound(|it| {
+ &it[usize::from(self.parent_is_trait)..it.len() - usize::from(self.is_trait)]
+ })
}
}
@@ -1492,136 +1624,31 @@
db: &'db dyn HirDatabase,
def: GenericDefId,
) -> Arc<TraitEnvironment<'db>> {
- let generics = generics(db, def);
- if generics.has_no_predicates() && generics.is_empty() {
- return TraitEnvironment::empty(def.krate(db));
- }
-
- let resolver = def.resolver(db);
- let mut ctx = TyLoweringContext::new(
- db,
- &resolver,
- generics.store(),
- def,
- LifetimeElisionKind::AnonymousReportError,
- );
- let mut traits_in_scope = Vec::new();
- let mut clauses = Vec::new();
- for maybe_parent_generics in
- std::iter::successors(Some(&generics), |generics| generics.parent_generics())
- {
- ctx.store = maybe_parent_generics.store();
- for pred in maybe_parent_generics.where_predicates() {
- for pred in ctx.lower_where_predicate(pred, false, &generics, PredicateFilter::All) {
- if let rustc_type_ir::ClauseKind::Trait(tr) = pred.kind().skip_binder() {
- traits_in_scope.push((tr.self_ty(), tr.def_id().0));
- }
- clauses.push(pred);
- }
- }
- }
-
- if let Some(trait_id) = def.assoc_trait_container(db) {
- // add `Self: Trait<T1, T2, ...>` to the environment in trait
- // function default implementations (and speculative code
- // inside consts or type aliases)
- cov_mark::hit!(trait_self_implements_self);
- let trait_ref = TraitRef::identity(ctx.interner, trait_id.into());
- let clause = Clause(Predicate::new(
- ctx.interner,
- Binder::dummy(rustc_type_ir::PredicateKind::Clause(rustc_type_ir::ClauseKind::Trait(
- TraitPredicate { trait_ref, polarity: rustc_type_ir::PredicatePolarity::Positive },
- ))),
- ));
- clauses.push(clause);
- }
-
- let explicitly_unsized_tys = ctx.unsized_types;
-
- let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate());
- if let Some(sized_trait) = sized_trait {
- let (mut generics, mut def_id) =
- (crate::next_solver::generics::generics(db, def.into()), def);
- loop {
- let self_idx = trait_self_param_idx(db, def_id);
- for (idx, p) in generics.own_params.iter().enumerate() {
- if let Some(self_idx) = self_idx
- && p.index() as usize == self_idx
- {
- continue;
- }
- let GenericParamId::TypeParamId(param_id) = p.id else {
- continue;
- };
- let idx = idx as u32 + generics.parent_count as u32;
- let param_ty = Ty::new_param(ctx.interner, param_id, idx);
- if explicitly_unsized_tys.contains(¶m_ty) {
- continue;
- }
- let trait_ref = TraitRef::new_from_args(
- ctx.interner,
- sized_trait.into(),
- GenericArgs::new_from_iter(ctx.interner, [param_ty.into()]),
- );
- let clause = Clause(Predicate::new(
- ctx.interner,
- Binder::dummy(rustc_type_ir::PredicateKind::Clause(
- rustc_type_ir::ClauseKind::Trait(TraitPredicate {
- trait_ref,
- polarity: rustc_type_ir::PredicatePolarity::Positive,
- }),
- )),
- ));
- clauses.push(clause);
- }
-
- if let Some(g) = generics.parent {
- generics = crate::next_solver::generics::generics(db, g.into());
- def_id = g;
- } else {
- break;
- }
- }
- }
-
- let clauses = rustc_type_ir::elaborate::elaborate(ctx.interner, clauses);
- let clauses = Clauses::new_from_iter(ctx.interner, clauses);
+ let module = def.module(db);
+ let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block());
+ let predicates = GenericPredicates::query_all(db, def);
+ let traits_in_scope = predicates
+ .iter_identity_copied()
+ .filter_map(|pred| match pred.kind().skip_binder() {
+ ClauseKind::Trait(tr) => Some((tr.self_ty(), tr.def_id().0)),
+ _ => None,
+ })
+ .collect();
+ let clauses = rustc_type_ir::elaborate::elaborate(interner, predicates.iter_identity_copied());
+ let clauses = Clauses::new_from_iter(interner, clauses);
let env = ParamEnv { clauses };
- TraitEnvironment::new(resolver.krate(), None, traits_in_scope.into_boxed_slice(), env)
+ // FIXME: We should normalize projections here, like rustc does.
+
+ TraitEnvironment::new(module.krate(), module.containing_block(), traits_in_scope, env)
}
-#[derive(Copy, Clone, Debug)]
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(crate) enum PredicateFilter {
SelfTrait,
All,
}
-/// Resolve the where clause(s) of an item with generics.
-#[tracing::instrument(skip(db))]
-pub(crate) fn generic_predicates_query<'db>(
- db: &'db dyn HirDatabase,
- def: GenericDefId,
-) -> GenericPredicates<'db> {
- generic_predicates_filtered_by(db, def, PredicateFilter::All, |_| true).0
-}
-
-pub(crate) fn generic_predicates_without_parent_query<'db>(
- db: &'db dyn HirDatabase,
- def: GenericDefId,
-) -> GenericPredicates<'db> {
- generic_predicates_filtered_by(db, def, PredicateFilter::All, |d| d == def).0
-}
-
-/// Resolve the where clause(s) of an item with generics,
-/// except the ones inherited from the parent
-pub(crate) fn generic_predicates_without_parent_with_diagnostics_query<'db>(
- db: &'db dyn HirDatabase,
- def: GenericDefId,
-) -> (GenericPredicates<'db>, Diagnostics) {
- generic_predicates_filtered_by(db, def, PredicateFilter::All, |d| d == def)
-}
-
/// Resolve the where clause(s) of an item with generics,
/// with a given filter
#[tracing::instrument(skip(db, filter), ret)]
@@ -1644,15 +1671,35 @@
def,
LifetimeElisionKind::AnonymousReportError,
);
+ let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate());
let mut predicates = Vec::new();
- for maybe_parent_generics in
+ let all_generics =
std::iter::successors(Some(&generics), |generics| generics.parent_generics())
- {
- ctx.store = maybe_parent_generics.store();
- for pred in maybe_parent_generics.where_predicates() {
- tracing::debug!(?pred);
- if filter(maybe_parent_generics.def()) {
+ .collect::<ArrayVec<_, 2>>();
+ let mut is_trait = false;
+ let mut parent_is_trait = false;
+ if all_generics.len() > 1 {
+ add_implicit_trait_predicate(
+ interner,
+ all_generics.last().unwrap().def(),
+ predicate_filter,
+ &mut predicates,
+ &mut parent_is_trait,
+ );
+ }
+ // We need to lower parent predicates first - see the comment below lowering of implicit `Sized` predicates
+ // for why.
+ let mut own_predicates_start = 0;
+ for &maybe_parent_generics in all_generics.iter().rev() {
+ let current_def_predicates_start = predicates.len();
+ // Collect only diagnostics from the child, not including parents.
+ ctx.diagnostics.clear();
+
+ if filter(maybe_parent_generics.def()) {
+ ctx.store = maybe_parent_generics.store();
+ for pred in maybe_parent_generics.where_predicates() {
+ tracing::debug!(?pred);
predicates.extend(ctx.lower_where_predicate(
pred,
false,
@@ -1660,66 +1707,132 @@
predicate_filter,
));
}
+
+ push_const_arg_has_type_predicates(db, &mut predicates, maybe_parent_generics);
+
+ if let Some(sized_trait) = sized_trait {
+ let mut add_sized_clause = |param_idx, param_id, param_data| {
+ let (
+ GenericParamId::TypeParamId(param_id),
+ GenericParamDataRef::TypeParamData(param_data),
+ ) = (param_id, param_data)
+ else {
+ return;
+ };
+
+ if param_data.provenance == TypeParamProvenance::TraitSelf {
+ return;
+ }
+
+ let param_ty = Ty::new_param(interner, param_id, param_idx);
+ if ctx.unsized_types.contains(¶m_ty) {
+ return;
+ }
+ let trait_ref = TraitRef::new_from_args(
+ interner,
+ sized_trait.into(),
+ GenericArgs::new_from_iter(interner, [param_ty.into()]),
+ );
+ let clause = Clause(Predicate::new(
+ interner,
+ Binder::dummy(rustc_type_ir::PredicateKind::Clause(
+ rustc_type_ir::ClauseKind::Trait(TraitPredicate {
+ trait_ref,
+ polarity: rustc_type_ir::PredicatePolarity::Positive,
+ }),
+ )),
+ ));
+ predicates.push(clause);
+ };
+ let parent_params_len = maybe_parent_generics.len_parent();
+ maybe_parent_generics.iter_self().enumerate().for_each(
+ |(param_idx, (param_id, param_data))| {
+ add_sized_clause(
+ (param_idx + parent_params_len) as u32,
+ param_id,
+ param_data,
+ );
+ },
+ );
+ }
+
+ // We do not clear `ctx.unsized_types`, as the `?Sized` clause of a child (e.g. an associated type) can
+ // be declared on the parent (e.g. the trait). It is nevertheless fine to register the implicit `Sized`
+ // predicates before lowering the child, as a child cannot define a `?Sized` predicate for its parent.
+ // But we do have to lower the parent first.
+ }
+
+ if maybe_parent_generics.def() == def {
+ own_predicates_start = current_def_predicates_start as u32;
}
}
- let explicitly_unsized_tys = ctx.unsized_types;
+ add_implicit_trait_predicate(interner, def, predicate_filter, &mut predicates, &mut is_trait);
- let sized_trait = LangItem::Sized.resolve_trait(db, resolver.krate());
- if let Some(sized_trait) = sized_trait {
- let mut add_sized_clause = |param_idx, param_id, param_data| {
- let (
- GenericParamId::TypeParamId(param_id),
- GenericParamDataRef::TypeParamData(param_data),
- ) = (param_id, param_data)
- else {
- return;
- };
+ let diagnostics = create_diagnostics(ctx.diagnostics);
+ let predicates = GenericPredicates {
+ own_predicates_start,
+ is_trait,
+ parent_is_trait,
+ predicates: EarlyBinder::bind(predicates.into_boxed_slice()),
+ };
+ return (predicates, diagnostics);
- if param_data.provenance == TypeParamProvenance::TraitSelf {
- return;
- }
-
- let param_ty = Ty::new_param(interner, param_id, param_idx);
- if explicitly_unsized_tys.contains(¶m_ty) {
- return;
- }
- let trait_ref = TraitRef::new_from_args(
- interner,
- sized_trait.into(),
- GenericArgs::new_from_iter(interner, [param_ty.into()]),
- );
- let clause = Clause(Predicate::new(
- interner,
- Binder::dummy(rustc_type_ir::PredicateKind::Clause(
- rustc_type_ir::ClauseKind::Trait(TraitPredicate {
- trait_ref,
- polarity: rustc_type_ir::PredicatePolarity::Positive,
- }),
- )),
- ));
- predicates.push(clause);
- };
- if generics.parent_generics().is_some_and(|parent| filter(parent.def())) {
- generics.iter_parent().enumerate().for_each(|(param_idx, (param_id, param_data))| {
- add_sized_clause(param_idx as u32, param_id, param_data);
- });
- }
- if filter(def) {
- let parent_params_len = generics.len_parent();
- generics.iter_self().enumerate().for_each(|(param_idx, (param_id, param_data))| {
- add_sized_clause((param_idx + parent_params_len) as u32, param_id, param_data);
- });
+ fn add_implicit_trait_predicate<'db>(
+ interner: DbInterner<'db>,
+ def: GenericDefId,
+ predicate_filter: PredicateFilter,
+ predicates: &mut Vec<Clause<'db>>,
+ set_is_trait: &mut bool,
+ ) {
+ // For traits, add `Self: Trait` predicate. This is
+ // not part of the predicates that a user writes, but it
+ // is something that one must prove in order to invoke a
+ // method or project an associated type.
+ //
+ // In the chalk setup, this predicate is not part of the
+ // "predicates" for a trait item. But it is useful in
+ // rustc because if you directly (e.g.) invoke a trait
+ // method like `Trait::method(...)`, you must naturally
+ // prove that the trait applies to the types that were
+ // used, and adding the predicate into this list ensures
+ // that this is done.
+ if let GenericDefId::TraitId(def_id) = def
+ && predicate_filter == PredicateFilter::All
+ {
+ *set_is_trait = true;
+ predicates.push(TraitRef::identity(interner, def_id.into()).upcast(interner));
}
}
+}
- // FIXME: rustc gathers more predicates by recursing through resulting trait predicates.
- // See https://github.com/rust-lang/rust/blob/76c5ed2847cdb26ef2822a3a165d710f6b772217/compiler/rustc_hir_analysis/src/collect/predicates_of.rs#L689-L715
+fn push_const_arg_has_type_predicates<'db>(
+ db: &'db dyn HirDatabase,
+ predicates: &mut Vec<Clause<'db>>,
+ generics: &Generics,
+) {
+ let interner = DbInterner::new_with(db, None, None);
+ let const_params_offset = generics.len_parent() + generics.len_lifetimes_self();
+ for (param_index, (param_idx, param_data)) in generics.iter_self_type_or_consts().enumerate() {
+ if !matches!(param_data, TypeOrConstParamData::ConstParamData(_)) {
+ continue;
+ }
- (
- GenericPredicates(predicates.is_empty().not().then(|| predicates.into())),
- create_diagnostics(ctx.diagnostics),
- )
+ let param_id = ConstParamId::from_unchecked(TypeOrConstParamId {
+ parent: generics.def(),
+ local_id: param_idx,
+ });
+ predicates.push(Clause(
+ ClauseKind::ConstArgHasType(
+ Const::new_param(
+ interner,
+ ParamConst { id: param_id, index: (param_index + const_params_offset) as u32 },
+ ),
+ db.const_param_ty_ns(param_id),
+ )
+ .upcast(interner),
+ ));
+ }
}
/// Generate implicit `: Sized` predicates for all generics that has no `?Sized` bound.
@@ -2112,7 +2225,8 @@
|pred| pred != def && pred == GenericDefId::TraitId(trait_ref.def_id.0),
)
.0
- .deref()
+ .predicates
+ .instantiate_identity()
{
tracing::debug!(?pred);
let sup_trait_ref = match pred.kind().skip_binder() {
@@ -2158,10 +2272,11 @@
}
let predicates =
- db.generic_predicates_for_param(def, param_id.into(), assoc_name.clone());
+ generic_predicates_for_param(db, def, param_id.into(), assoc_name.clone());
predicates
- .iter()
- .find_map(|pred| match (*pred).kind().skip_binder() {
+ .as_ref()
+ .iter_identity_copied()
+ .find_map(|pred| match pred.kind().skip_binder() {
rustc_type_ir::ClauseKind::Trait(trait_predicate) => Some(trait_predicate),
_ => None,
})
diff --git a/crates/hir-ty/src/lower/path.rs b/crates/hir-ty/src/lower/path.rs
index 9ba0da6..6d3ce74 100644
--- a/crates/hir-ty/src/lower/path.rs
+++ b/crates/hir-ty/src/lower/path.rs
@@ -774,7 +774,7 @@
}
}
- fn parent_arg(&mut self, param_id: GenericParamId) -> GenericArg<'db> {
+ fn parent_arg(&mut self, _param_idx: u32, param_id: GenericParamId) -> GenericArg<'db> {
match param_id {
GenericParamId::TypeParamId(_) => {
Ty::new_error(self.ctx.ctx.interner, ErrorGuaranteed).into()
@@ -992,7 +992,7 @@
preceding_args: &[GenericArg<'db>],
) -> GenericArg<'db>;
- fn parent_arg(&mut self, param_id: GenericParamId) -> GenericArg<'db>;
+ fn parent_arg(&mut self, param_idx: u32, param_id: GenericParamId) -> GenericArg<'db>;
}
/// Returns true if there was an error.
@@ -1129,7 +1129,9 @@
let mut substs = Vec::with_capacity(def_generics.len());
- substs.extend(def_generics.iter_parent_id().map(|id| ctx.parent_arg(id)));
+ substs.extend(
+ def_generics.iter_parent_id().enumerate().map(|(idx, id)| ctx.parent_arg(idx as u32, id)),
+ );
let mut args = args_slice.iter().enumerate().peekable();
let mut params = def_generics.iter_self().peekable();
diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs
index 1e30897..59299f2 100644
--- a/crates/hir-ty/src/method_resolution.rs
+++ b/crates/hir-ty/src/method_resolution.rs
@@ -2,683 +2,349 @@
//! For details about how this works in rustc, see the method lookup page in the
//! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html)
//! and the corresponding code mostly in rustc_hir_analysis/check/method/probe.rs.
-use std::ops::ControlFlow;
+
+mod confirm;
+mod probe;
+
+use either::Either;
+use hir_expand::name::Name;
+use span::Edition;
+use tracing::{debug, instrument};
use base_db::Crate;
use hir_def::{
- AdtId, AssocItemId, BlockId, ConstId, FunctionId, HasModule, ImplId, ItemContainerId, Lookup,
- ModuleId, TraitId, TypeAliasId,
+ AssocItemId, BlockId, ConstId, FunctionId, GenericParamId, HasModule, ImplId, ItemContainerId,
+ ModuleId, TraitId,
+ expr_store::path::GenericArgs as HirGenericArgs,
+ hir::ExprId,
nameres::{DefMap, block_def_map, crate_def_map},
- signatures::{ConstFlags, EnumFlags, FnFlags, StructFlags, TraitFlags, TypeAliasFlags},
+ resolver::Resolver,
};
-use hir_expand::name::Name;
-use intern::sym;
-use rustc_ast_ir::Mutability;
+use intern::{Symbol, sym};
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_type_ir::{
- FloatTy, IntTy, TypeVisitableExt, UintTy,
- inherent::{
- AdtDef, BoundExistentialPredicates, GenericArgs as _, IntoKind, SliceLike, Ty as _,
- },
+ TypeVisitableExt,
+ fast_reject::{TreatParams, simplify_type},
+ inherent::{BoundExistentialPredicates, IntoKind, SliceLike},
};
-use smallvec::{SmallVec, smallvec};
-use stdx::never;
+use stdx::impl_from;
use triomphe::Arc;
use crate::{
- TraitEnvironment,
- autoderef::{self, AutoderefKind},
+ TraitEnvironment, all_super_traits,
db::HirDatabase,
- infer::{Adjust, Adjustment, OverloadedDeref, PointerCast, unify::InferenceTable},
- lang_items::is_box,
+ infer::{InferenceContext, unify::InferenceTable},
+ lower::GenericPredicates,
next_solver::{
- Canonical, DbInterner, ErrorGuaranteed, GenericArgs, Goal, Predicate, Region, SolverDefId,
- TraitRef, Ty, TyKind, TypingMode,
+ Binder, ClauseKind, DbInterner, FnSig, GenericArgs, PredicateKind, SimplifiedType,
+ SolverDefId, TraitRef, Ty, TyKind, TypingMode,
infer::{
- DbInternerInferExt, InferCtxt,
+ BoundRegionConversionTime, DbInternerInferExt, InferCtxt, InferOk,
select::ImplSource,
- traits::{Obligation, ObligationCause, PredicateObligation},
+ traits::{Obligation, ObligationCause, PredicateObligations},
},
obligation_ctxt::ObligationCtxt,
+ util::clauses_as_obligations,
},
- traits::next_trait_solve_canonical_in_ctxt,
- utils::all_super_traits,
};
-/// This is used as a key for indexing impls.
-#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
-pub enum TyFingerprint {
- // These are lang item impls:
- Str,
- Slice,
- Array,
- Never,
- Ref(Mutability),
- RawPtr(Mutability),
- Bool,
- Char,
- Int(IntTy),
- Uint(UintTy),
- Float(FloatTy),
- // These can have user-defined impls:
- Adt(hir_def::AdtId),
- Dyn(TraitId),
- ForeignType(TypeAliasId),
- // These only exist for trait impls
- Unit,
- Unnameable,
- Function(u32),
-}
-
-impl TyFingerprint {
- /// Creates a TyFingerprint for looking up an inherent impl. Only certain
- /// types can have inherent impls: if we have some `struct S`, we can have
- /// an `impl S`, but not `impl &S`. Hence, this will return `None` for
- /// reference types and such.
- pub fn for_inherent_impl<'db>(ty: Ty<'db>) -> Option<TyFingerprint> {
- let fp = match ty.kind() {
- TyKind::Str => TyFingerprint::Str,
- TyKind::Never => TyFingerprint::Never,
- TyKind::Slice(..) => TyFingerprint::Slice,
- TyKind::Array(..) => TyFingerprint::Array,
- TyKind::Bool => TyFingerprint::Bool,
- TyKind::Char => TyFingerprint::Char,
- TyKind::Int(int) => TyFingerprint::Int(int),
- TyKind::Uint(int) => TyFingerprint::Uint(int),
- TyKind::Float(float) => TyFingerprint::Float(float),
- TyKind::Adt(adt_def, _) => TyFingerprint::Adt(adt_def.def_id().0),
- TyKind::RawPtr(_, mutability) => TyFingerprint::RawPtr(mutability),
- TyKind::Foreign(alias_id, ..) => TyFingerprint::ForeignType(alias_id.0),
- TyKind::Dynamic(bounds, _) => {
- bounds.principal_def_id().map(|trait_| TyFingerprint::Dyn(trait_.0))?
- }
- _ => return None,
- };
- Some(fp)
- }
-
- /// Creates a TyFingerprint for looking up a trait impl.
- pub fn for_trait_impl<'db>(ty: Ty<'db>) -> Option<TyFingerprint> {
- let fp = match ty.kind() {
- TyKind::Str => TyFingerprint::Str,
- TyKind::Never => TyFingerprint::Never,
- TyKind::Slice(..) => TyFingerprint::Slice,
- TyKind::Array(..) => TyFingerprint::Array,
- TyKind::Bool => TyFingerprint::Bool,
- TyKind::Char => TyFingerprint::Char,
- TyKind::Int(int) => TyFingerprint::Int(int),
- TyKind::Uint(int) => TyFingerprint::Uint(int),
- TyKind::Float(float) => TyFingerprint::Float(float),
- TyKind::Adt(adt_def, _) => TyFingerprint::Adt(adt_def.def_id().0),
- TyKind::RawPtr(_, mutability) => TyFingerprint::RawPtr(mutability),
- TyKind::Foreign(alias_id, ..) => TyFingerprint::ForeignType(alias_id.0),
- TyKind::Dynamic(bounds, _) => {
- bounds.principal_def_id().map(|trait_| TyFingerprint::Dyn(trait_.0))?
- }
- TyKind::Ref(_, _, mutability) => TyFingerprint::Ref(mutability),
- TyKind::Tuple(subst) => {
- let first_ty = subst.as_slice().first();
- match first_ty {
- Some(ty) => return TyFingerprint::for_trait_impl(*ty),
- None => TyFingerprint::Unit,
- }
- }
- // FIXME(next-solver): Putting `Alias` here is *probably* incorrect, AFAIK it should return `None`. But this breaks
- // flyimport, which uses an incorrect but fast method resolution algorithm. Therefore we put it here,
- // because this function is only called by flyimport, and anyway we should get rid of `TyFingerprint`
- // and switch to `rustc_type_ir`'s `SimplifiedType`.
- TyKind::Alias(..)
- | TyKind::FnDef(_, _)
- | TyKind::Closure(_, _)
- | TyKind::Coroutine(..)
- | TyKind::CoroutineClosure(..)
- | TyKind::CoroutineWitness(..) => TyFingerprint::Unnameable,
- TyKind::FnPtr(sig, _) => {
- TyFingerprint::Function(sig.skip_binder().inputs_and_output.inner().len() as u32)
- }
- TyKind::Param(_)
- | TyKind::Bound(..)
- | TyKind::Placeholder(..)
- | TyKind::Infer(_)
- | TyKind::Error(_)
- | TyKind::Pat(..)
- | TyKind::UnsafeBinder(..) => return None,
- };
- Some(fp)
- }
-}
-
-pub(crate) const ALL_INT_FPS: [TyFingerprint; 12] = [
- TyFingerprint::Int(IntTy::I8),
- TyFingerprint::Int(IntTy::I16),
- TyFingerprint::Int(IntTy::I32),
- TyFingerprint::Int(IntTy::I64),
- TyFingerprint::Int(IntTy::I128),
- TyFingerprint::Int(IntTy::Isize),
- TyFingerprint::Uint(UintTy::U8),
- TyFingerprint::Uint(UintTy::U16),
- TyFingerprint::Uint(UintTy::U32),
- TyFingerprint::Uint(UintTy::U64),
- TyFingerprint::Uint(UintTy::U128),
- TyFingerprint::Uint(UintTy::Usize),
-];
-
-pub(crate) const ALL_FLOAT_FPS: [TyFingerprint; 4] = [
- TyFingerprint::Float(FloatTy::F16),
- TyFingerprint::Float(FloatTy::F32),
- TyFingerprint::Float(FloatTy::F64),
- TyFingerprint::Float(FloatTy::F128),
-];
-
-type TraitFpMap = FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Box<[ImplId]>>>;
-type TraitFpMapCollector = FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Vec<ImplId>>>;
-
-/// Trait impls defined or available in some crate.
-#[derive(Debug, Eq, PartialEq)]
-pub struct TraitImpls {
- // If the `Option<TyFingerprint>` is `None`, the impl may apply to any self type.
- map: TraitFpMap,
-}
-
-impl TraitImpls {
- pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: Crate) -> Arc<Self> {
- let _p = tracing::info_span!("trait_impls_in_crate_query", ?krate).entered();
- let mut impls = FxHashMap::default();
-
- Self::collect_def_map(db, &mut impls, crate_def_map(db, krate));
-
- Arc::new(Self::finish(impls))
- }
-
- pub(crate) fn trait_impls_in_block_query(
- db: &dyn HirDatabase,
- block: BlockId,
- ) -> Option<Arc<Self>> {
- let _p = tracing::info_span!("trait_impls_in_block_query").entered();
- let mut impls = FxHashMap::default();
-
- Self::collect_def_map(db, &mut impls, block_def_map(db, block));
-
- if impls.is_empty() { None } else { Some(Arc::new(Self::finish(impls))) }
- }
-
- pub(crate) fn trait_impls_in_deps_query(
- db: &dyn HirDatabase,
- krate: Crate,
- ) -> Arc<[Arc<Self>]> {
- let _p = tracing::info_span!("trait_impls_in_deps_query", ?krate).entered();
- Arc::from_iter(
- db.transitive_deps(krate).into_iter().map(|krate| db.trait_impls_in_crate(krate)),
- )
- }
-
- fn finish(map: TraitFpMapCollector) -> TraitImpls {
- TraitImpls {
- map: map
- .into_iter()
- .map(|(k, v)| (k, v.into_iter().map(|(k, v)| (k, v.into_boxed_slice())).collect()))
- .collect(),
- }
- }
-
- fn collect_def_map(db: &dyn HirDatabase, map: &mut TraitFpMapCollector, def_map: &DefMap) {
- for (_module_id, module_data) in def_map.modules() {
- for impl_id in module_data.scope.impls() {
- // Reservation impls should be ignored during trait resolution, so we never need
- // them during type analysis. See rust-lang/rust#64631 for details.
- //
- // FIXME: Reservation impls should be considered during coherence checks. If we are
- // (ever) to implement coherence checks, this filtering should be done by the trait
- // solver.
- if db.attrs(impl_id.into()).by_key(sym::rustc_reservation_impl).exists() {
- continue;
- }
- let target_trait = match db.impl_trait(impl_id) {
- Some(tr) => tr.skip_binder().def_id.0,
- None => continue,
- };
- let self_ty = db.impl_self_ty(impl_id);
- let self_ty_fp = TyFingerprint::for_trait_impl(self_ty.instantiate_identity());
- map.entry(target_trait).or_default().entry(self_ty_fp).or_default().push(impl_id);
- }
-
- // To better support custom derives, collect impls in all unnamed const items.
- // const _: () = { ... };
- for konst in module_data.scope.unnamed_consts() {
- let body = db.body(konst.into());
- for (_, block_def_map) in body.blocks(db) {
- Self::collect_def_map(db, map, block_def_map);
- }
- }
- }
- }
-
- /// Queries all trait impls for the given type.
- pub fn for_self_ty_without_blanket_impls(
- &self,
- fp: TyFingerprint,
- ) -> impl Iterator<Item = ImplId> + '_ {
- self.map
- .values()
- .flat_map(move |impls| impls.get(&Some(fp)).into_iter())
- .flat_map(|it| it.iter().copied())
- }
-
- /// Queries all impls of the given trait.
- pub fn for_trait(&self, trait_: TraitId) -> impl Iterator<Item = ImplId> + '_ {
- self.map
- .get(&trait_)
- .into_iter()
- .flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
- }
-
- /// Queries all impls of `trait_` that may apply to `self_ty`.
- pub fn for_trait_and_self_ty(
- &self,
- trait_: TraitId,
- self_ty: TyFingerprint,
- ) -> impl Iterator<Item = ImplId> + '_ {
- self.map
- .get(&trait_)
- .into_iter()
- .flat_map(move |map| map.get(&Some(self_ty)).into_iter().chain(map.get(&None)))
- .flat_map(|v| v.iter().copied())
- }
-
- /// Queries whether `self_ty` has potentially applicable implementations of `trait_`.
- pub fn has_impls_for_trait_and_self_ty(&self, trait_: TraitId, self_ty: TyFingerprint) -> bool {
- self.for_trait_and_self_ty(trait_, self_ty).next().is_some()
- }
-
- pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
- self.map.values().flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
- }
-}
-
-/// Inherent impls defined in some crate.
-///
-/// Inherent impls can only be defined in the crate that also defines the self type of the impl
-/// (note that some primitives are considered to be defined by both libcore and liballoc).
-///
-/// This makes inherent impl lookup easier than trait impl lookup since we only have to consider a
-/// single crate.
-#[derive(Debug, Eq, PartialEq)]
-pub struct InherentImpls {
- map: FxHashMap<TyFingerprint, Vec<ImplId>>,
- invalid_impls: Vec<ImplId>,
-}
-
-impl InherentImpls {
- pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: Crate) -> Arc<Self> {
- let _p = tracing::info_span!("inherent_impls_in_crate_query", ?krate).entered();
- let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
-
- let crate_def_map = crate_def_map(db, krate);
- impls.collect_def_map(db, crate_def_map);
- impls.shrink_to_fit();
-
- Arc::new(impls)
- }
-
- pub(crate) fn inherent_impls_in_block_query(
- db: &dyn HirDatabase,
- block: BlockId,
- ) -> Option<Arc<Self>> {
- let _p = tracing::info_span!("inherent_impls_in_block_query").entered();
- let mut impls = Self { map: FxHashMap::default(), invalid_impls: Vec::default() };
-
- let block_def_map = block_def_map(db, block);
- impls.collect_def_map(db, block_def_map);
- impls.shrink_to_fit();
-
- if impls.map.is_empty() && impls.invalid_impls.is_empty() {
- None
- } else {
- Some(Arc::new(impls))
- }
- }
-
- fn shrink_to_fit(&mut self) {
- self.map.values_mut().for_each(Vec::shrink_to_fit);
- self.map.shrink_to_fit();
- }
-
- fn collect_def_map(&mut self, db: &dyn HirDatabase, def_map: &DefMap) {
- for (_module_id, module_data) in def_map.modules() {
- for impl_id in module_data.scope.impls() {
- let data = db.impl_signature(impl_id);
- if data.target_trait.is_some() {
- continue;
- }
-
- let self_ty = db.impl_self_ty(impl_id);
- let self_ty = self_ty.instantiate_identity();
-
- match is_inherent_impl_coherent(db, def_map, impl_id, self_ty) {
- true => {
- // `fp` should only be `None` in error cases (either erroneous code or incomplete name resolution)
- if let Some(fp) = TyFingerprint::for_inherent_impl(self_ty) {
- self.map.entry(fp).or_default().push(impl_id);
- }
- }
- false => self.invalid_impls.push(impl_id),
- }
- }
-
- // To better support custom derives, collect impls in all unnamed const items.
- // const _: () = { ... };
- for konst in module_data.scope.unnamed_consts() {
- let body = db.body(konst.into());
- for (_, block_def_map) in body.blocks(db) {
- self.collect_def_map(db, block_def_map);
- }
- }
- }
- }
-
- pub fn for_self_ty<'db>(&self, self_ty: Ty<'db>) -> &[ImplId] {
- match TyFingerprint::for_inherent_impl(self_ty) {
- Some(fp) => self.map.get(&fp).map(|vec| vec.as_ref()).unwrap_or(&[]),
- None => &[],
- }
- }
-
- pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
- self.map.values().flat_map(|v| v.iter().copied())
- }
-
- pub fn invalid_impls(&self) -> &[ImplId] {
- &self.invalid_impls
- }
-}
-
-pub(crate) fn incoherent_inherent_impl_crates(
- db: &dyn HirDatabase,
- krate: Crate,
- fp: TyFingerprint,
-) -> SmallVec<[Crate; 2]> {
- let _p = tracing::info_span!("incoherent_inherent_impl_crates").entered();
- let mut res = SmallVec::new();
-
- // should pass crate for finger print and do reverse deps
-
- for krate in db.transitive_deps(krate) {
- let impls = db.inherent_impls_in_crate(krate);
- if impls.map.get(&fp).is_some_and(|v| !v.is_empty()) {
- res.push(krate);
- }
- }
-
- res
-}
-
-pub fn def_crates<'db>(
- db: &'db dyn HirDatabase,
- ty: Ty<'db>,
- cur_crate: Crate,
-) -> Option<SmallVec<[Crate; 2]>> {
- match ty.kind() {
- TyKind::Adt(adt_def, _) => {
- let def_id = adt_def.def_id().0;
- let rustc_has_incoherent_inherent_impls = match def_id {
- hir_def::AdtId::StructId(id) => db
- .struct_signature(id)
- .flags
- .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
- hir_def::AdtId::UnionId(id) => db
- .union_signature(id)
- .flags
- .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
- hir_def::AdtId::EnumId(id) => db
- .enum_signature(id)
- .flags
- .contains(EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
- };
- Some(if rustc_has_incoherent_inherent_impls {
- db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::Adt(def_id))
- } else {
- smallvec![def_id.module(db).krate()]
- })
- }
- TyKind::Foreign(alias) => {
- let alias = alias.0;
- Some(
- if db
- .type_alias_signature(alias)
- .flags
- .contains(TypeAliasFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPL)
- {
- db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::ForeignType(alias))
- } else {
- smallvec![alias.module(db).krate()]
- },
- )
- }
- TyKind::Dynamic(bounds, _) => {
- let trait_id = bounds.principal_def_id()?.0;
- Some(
- if db
- .trait_signature(trait_id)
- .flags
- .contains(TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS)
- {
- db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::Dyn(trait_id))
- } else {
- smallvec![trait_id.module(db).krate()]
- },
- )
- }
- // for primitives, there may be impls in various places (core and alloc
- // mostly). We just check the whole crate graph for crates with impls
- // (cached behind a query).
- TyKind::Bool
- | TyKind::Char
- | TyKind::Int(_)
- | TyKind::Uint(_)
- | TyKind::Float(_)
- | TyKind::Str
- | TyKind::Slice(_)
- | TyKind::Array(..)
- | TyKind::RawPtr(..) => Some(db.incoherent_inherent_impl_crates(
- cur_crate,
- TyFingerprint::for_inherent_impl(ty).expect("fingerprint for primitive"),
- )),
- _ => None,
- }
-}
-
-/// Look up the method with the given name.
-pub(crate) fn lookup_method<'db>(
- ty: &Canonical<'db, Ty<'db>>,
- table: &mut InferenceTable<'db>,
- traits_in_scope: &FxHashSet<TraitId>,
- visible_from_module: VisibleFromModule,
- name: &Name,
-) -> Option<(ReceiverAdjustments, FunctionId, bool)> {
- let mut not_visible = None;
- let res = iterate_method_candidates(
- ty,
- table,
- traits_in_scope,
- visible_from_module,
- Some(name),
- LookupMode::MethodCall,
- |adjustments, f, visible| match f {
- AssocItemId::FunctionId(f) if visible => Some((adjustments, f, true)),
- AssocItemId::FunctionId(f) if not_visible.is_none() => {
- not_visible = Some((adjustments, f, false));
- None
- }
- _ => None,
- },
- );
- res.or(not_visible)
-}
-
-/// Whether we're looking up a dotted method call (like `v.len()`) or a path
-/// (like `Vec::new`).
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-pub enum LookupMode {
- /// Looking up a method call like `v.len()`: We only consider candidates
- /// that have a `self` parameter, and do autoderef.
- MethodCall,
- /// Looking up a path like `Vec::new` or `Vec::default`: We consider all
- /// candidates including associated constants, but don't do autoderef.
- Path,
-}
-
-#[derive(Clone, Copy)]
-pub enum VisibleFromModule {
- /// Filter for results that are visible from the given module
- Filter(ModuleId),
- /// Include impls from the given block.
- IncludeBlock(BlockId),
- /// Do nothing special in regards visibility
- None,
-}
-
-impl From<Option<ModuleId>> for VisibleFromModule {
- fn from(module: Option<ModuleId>) -> Self {
- match module {
- Some(module) => Self::Filter(module),
- None => Self::None,
- }
- }
-}
-
-impl From<Option<BlockId>> for VisibleFromModule {
- fn from(block: Option<BlockId>) -> Self {
- match block {
- Some(block) => Self::IncludeBlock(block),
- None => Self::None,
- }
- }
-}
+pub use self::probe::{
+ Candidate, CandidateKind, CandidateStep, CandidateWithPrivate, Mode, Pick, PickKind,
+};
#[derive(Debug, Clone)]
-pub enum AutorefOrPtrAdjustment {
- Autoref(Mutability),
- ToConstPtr,
+pub struct MethodResolutionUnstableFeatures {
+ arbitrary_self_types: bool,
+ arbitrary_self_types_pointers: bool,
+ supertrait_item_shadowing: bool,
}
-#[derive(Debug, Clone, Default)]
-pub struct ReceiverAdjustments {
- autoref: Option<AutorefOrPtrAdjustment>,
- autoderefs: usize,
- unsize_array: bool,
-}
-
-impl ReceiverAdjustments {
- pub(crate) fn apply<'db>(
- &self,
- table: &mut InferenceTable<'db>,
- mut ty: Ty<'db>,
- ) -> (Ty<'db>, Vec<Adjustment<'db>>) {
- let mut adjust = Vec::new();
- let mut autoderef = table.autoderef(ty);
- autoderef.next();
- for _ in 0..self.autoderefs {
- match autoderef.next() {
- None => {
- never!("autoderef not possible for {:?}", ty);
- ty = Ty::new_error(table.interner(), ErrorGuaranteed);
- break;
- }
- Some((new_ty, _)) => {
- ty = new_ty;
- let mutbl = match self.autoref {
- Some(AutorefOrPtrAdjustment::Autoref(m)) => Some(m),
- Some(AutorefOrPtrAdjustment::ToConstPtr) => Some(Mutability::Not),
- // FIXME should we know the mutability here, when autoref is `None`?
- None => None,
- };
- adjust.push(Adjustment {
- kind: Adjust::Deref(match autoderef.steps().last().unwrap().1 {
- AutoderefKind::Overloaded => Some(OverloadedDeref(mutbl)),
- AutoderefKind::Builtin => None,
- }),
- target: ty,
- });
- }
- }
+impl MethodResolutionUnstableFeatures {
+ pub fn from_def_map(def_map: &DefMap) -> Self {
+ Self {
+ arbitrary_self_types: def_map.is_unstable_feature_enabled(&sym::arbitrary_self_types),
+ arbitrary_self_types_pointers: def_map
+ .is_unstable_feature_enabled(&sym::arbitrary_self_types_pointers),
+ supertrait_item_shadowing: def_map
+ .is_unstable_feature_enabled(&sym::supertrait_item_shadowing),
}
- if let Some(autoref) = &self.autoref {
- let lt = table.next_region_var();
- match autoref {
- AutorefOrPtrAdjustment::Autoref(m) => {
- let a = Adjustment::borrow(table.interner(), *m, ty, lt);
- ty = a.target;
- adjust.push(a);
+ }
+}
+
+pub struct MethodResolutionContext<'a, 'db> {
+ pub infcx: &'a InferCtxt<'db>,
+ pub resolver: &'a Resolver<'db>,
+ pub env: &'a TraitEnvironment<'db>,
+ pub traits_in_scope: &'a FxHashSet<TraitId>,
+ pub edition: Edition,
+ pub unstable_features: &'a MethodResolutionUnstableFeatures,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum CandidateId {
+ FunctionId(FunctionId),
+ ConstId(ConstId),
+}
+impl_from!(FunctionId, ConstId for CandidateId);
+
+impl CandidateId {
+ fn container(self, db: &dyn HirDatabase) -> ItemContainerId {
+ match self {
+ CandidateId::FunctionId(id) => id.loc(db).container,
+ CandidateId::ConstId(id) => id.loc(db).container,
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug)]
+pub(crate) struct MethodCallee<'db> {
+ /// Impl method ID, for inherent methods, or trait method ID, otherwise.
+ pub def_id: FunctionId,
+ pub args: GenericArgs<'db>,
+
+ /// Instantiated method signature, i.e., it has been
+ /// instantiated, normalized, and has had late-bound
+ /// lifetimes replaced with inference variables.
+ pub sig: FnSig<'db>,
+}
+
+#[derive(Debug)]
+pub enum MethodError<'db> {
+ /// Did not find an applicable method.
+ NoMatch,
+
+ /// Multiple methods might apply.
+ Ambiguity(Vec<CandidateSource>),
+
+ /// Found an applicable method, but it is not visible.
+ PrivateMatch(Pick<'db>),
+
+ /// Found a `Self: Sized` bound where `Self` is a trait object.
+ IllegalSizedBound { candidates: Vec<FunctionId>, needs_mut: bool },
+
+ /// Error has already been emitted, no need to emit another one.
+ ErrorReported,
+}
+
+// A pared down enum describing just the places from which a method
+// candidate can arise. Used for error reporting only.
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub enum CandidateSource {
+ Impl(ImplId),
+ Trait(TraitId),
+}
+
+impl<'a, 'db> InferenceContext<'a, 'db> {
+ /// Performs method lookup. If lookup is successful, it will return the callee
+ /// and store an appropriate adjustment for the self-expr. In some cases it may
+ /// report an error (e.g., invoking the `drop` method).
+ #[instrument(level = "debug", skip(self))]
+ pub(crate) fn lookup_method_including_private(
+ &mut self,
+ self_ty: Ty<'db>,
+ name: Name,
+ generic_args: Option<&HirGenericArgs>,
+ receiver: ExprId,
+ call_expr: ExprId,
+ ) -> Result<(MethodCallee<'db>, bool), MethodError<'db>> {
+ let (pick, is_visible) = match self.lookup_probe(name, self_ty) {
+ Ok(it) => (it, true),
+ Err(MethodError::PrivateMatch(it)) => {
+ // FIXME: Report error.
+ (it, false)
+ }
+ Err(err) => return Err(err),
+ };
+
+ let result = self.confirm_method(&pick, self_ty, call_expr, generic_args);
+ debug!("result = {:?}", result);
+
+ if result.illegal_sized_bound {
+ // FIXME: Report an error.
+ }
+
+ self.write_expr_adj(receiver, result.adjustments);
+ self.write_method_resolution(call_expr, result.callee.def_id, result.callee.args);
+
+ Ok((result.callee, is_visible))
+ }
+
+ #[instrument(level = "debug", skip(self))]
+ pub(crate) fn lookup_probe(
+ &self,
+ method_name: Name,
+ self_ty: Ty<'db>,
+ ) -> probe::PickResult<'db> {
+ self.with_method_resolution(|ctx| {
+ let pick = ctx.probe_for_name(probe::Mode::MethodCall, method_name, self_ty)?;
+ Ok(pick)
+ })
+ }
+
+ pub(crate) fn with_method_resolution<R>(
+ &self,
+ f: impl FnOnce(&MethodResolutionContext<'_, 'db>) -> R,
+ ) -> R {
+ let traits_in_scope = self.get_traits_in_scope();
+ let traits_in_scope = match &traits_in_scope {
+ Either::Left(it) => it,
+ Either::Right(it) => *it,
+ };
+ let ctx = MethodResolutionContext {
+ infcx: &self.table.infer_ctxt,
+ resolver: &self.resolver,
+ env: &self.table.trait_env,
+ traits_in_scope,
+ edition: self.edition,
+ unstable_features: &self.unstable_features,
+ };
+ f(&ctx)
+ }
+}
+
+/// Used by [FnCtxt::lookup_method_for_operator] with `-Znext-solver`.
+///
+/// With `AsRigid` we error on `impl Opaque: NotInItemBounds` while
+/// `AsInfer` just treats it as ambiguous and succeeds. This is necessary
+/// as we want [FnCtxt::check_expr_call] to treat not-yet-defined opaque
+/// types as rigid to support `impl Deref<Target = impl FnOnce()>` and
+/// `Box<impl FnOnce()>`.
+///
+/// We only want to treat opaque types as rigid if we need to eagerly choose
+/// between multiple candidates. We otherwise treat them as ordinary inference
+/// variable to avoid rejecting otherwise correct code.
+#[derive(Debug)]
+#[expect(dead_code)]
+pub(super) enum TreatNotYetDefinedOpaques {
+ AsInfer,
+ AsRigid,
+}
+
+impl<'db> InferenceTable<'db> {
+ /// `lookup_method_in_trait` is used for overloaded operators.
+ /// It does a very narrow slice of what the normal probe/confirm path does.
+ /// In particular, it doesn't really do any probing: it simply constructs
+ /// an obligation for a particular trait with the given self type and checks
+ /// whether that trait is implemented.
+ #[instrument(level = "debug", skip(self))]
+ pub(super) fn lookup_method_for_operator(
+ &self,
+ cause: ObligationCause,
+ method_name: Symbol,
+ trait_def_id: TraitId,
+ self_ty: Ty<'db>,
+ opt_rhs_ty: Option<Ty<'db>>,
+ treat_opaques: TreatNotYetDefinedOpaques,
+ ) -> Option<InferOk<'db, MethodCallee<'db>>> {
+ // Construct a trait-reference `self_ty : Trait<input_tys>`
+ let args = GenericArgs::for_item(
+ self.interner(),
+ trait_def_id.into(),
+ |param_idx, param_id, _| match param_id {
+ GenericParamId::LifetimeParamId(_) | GenericParamId::ConstParamId(_) => {
+ unreachable!("did not expect operator trait to have lifetime/const")
}
- AutorefOrPtrAdjustment::ToConstPtr => {
- if let TyKind::RawPtr(pointee, Mutability::Mut) = ty.kind() {
- let a = Adjustment {
- kind: Adjust::Pointer(PointerCast::MutToConstPointer),
- target: Ty::new_ptr(table.interner(), pointee, Mutability::Not),
- };
- ty = a.target;
- adjust.push(a);
+ GenericParamId::TypeParamId(_) => {
+ if param_idx == 0 {
+ self_ty.into()
+ } else if let Some(rhs_ty) = opt_rhs_ty {
+ assert_eq!(param_idx, 1, "did not expect >1 param on operator trait");
+ rhs_ty.into()
} else {
- never!("`ToConstPtr` target is not a raw mutable pointer");
+ // FIXME: We should stop passing `None` for the failure case
+ // when probing for call exprs. I.e. `opt_rhs_ty` should always
+ // be set when it needs to be.
+ self.next_var_for_param(param_id)
}
}
- };
- }
- if self.unsize_array {
- ty = 'it: {
- if let TyKind::Ref(l, inner, m) = ty.kind()
- && let TyKind::Array(inner, _) = inner.kind()
- {
- break 'it Ty::new_ref(
- table.interner(),
- l,
- Ty::new_slice(table.interner(), inner),
- m,
- );
- }
- // FIXME: report diagnostic if array unsizing happens without indirection.
- ty
- };
- adjust.push(Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target: ty });
- }
- (ty, adjust)
- }
+ },
+ );
- fn with_autoref(&self, a: AutorefOrPtrAdjustment) -> ReceiverAdjustments {
- Self { autoref: Some(a), ..*self }
- }
-}
+ let obligation = Obligation::new(
+ self.interner(),
+ cause,
+ self.trait_env.env,
+ TraitRef::new_from_args(self.interner(), trait_def_id.into(), args),
+ );
-// This would be nicer if it just returned an iterator, but that runs into
-// lifetime problems, because we need to borrow temp `CrateImplDefs`.
-// FIXME add a context type here?
-pub(crate) fn iterate_method_candidates<'db, T>(
- ty: &Canonical<'db, Ty<'db>>,
- table: &mut InferenceTable<'db>,
- traits_in_scope: &FxHashSet<TraitId>,
- visible_from_module: VisibleFromModule,
- name: Option<&Name>,
- mode: LookupMode,
- mut callback: impl FnMut(ReceiverAdjustments, AssocItemId, bool) -> Option<T>,
-) -> Option<T> {
- let mut slot = None;
- _ = iterate_method_candidates_dyn_impl(
- ty,
- table,
- traits_in_scope,
- visible_from_module,
- name,
- mode,
- &mut |adj, item, visible| {
- assert!(slot.is_none());
- if let Some(it) = callback(adj, item, visible) {
- slot = Some(it);
- return ControlFlow::Break(());
+ // Now we want to know if this can be matched
+ let matches_trait = match treat_opaques {
+ TreatNotYetDefinedOpaques::AsInfer => self.infer_ctxt.predicate_may_hold(&obligation),
+ TreatNotYetDefinedOpaques::AsRigid => {
+ self.infer_ctxt.predicate_may_hold_opaque_types_jank(&obligation)
}
- ControlFlow::Continue(())
- },
- );
- slot
+ };
+
+ if !matches_trait {
+ debug!("--> Cannot match obligation");
+ // Cannot be matched, no such method resolution is possible.
+ return None;
+ }
+
+ // Trait must have a method named `m_name` and it should not have
+ // type parameters or early-bound regions.
+ let interner = self.interner();
+ // We use `Ident::with_dummy_span` since no built-in operator methods have
+ // any macro-specific hygiene, so the span's context doesn't really matter.
+ let Some(method_item) =
+ trait_def_id.trait_items(self.db).method_by_name(&Name::new_symbol_root(method_name))
+ else {
+ panic!("expected associated item for operator trait")
+ };
+
+ let def_id = method_item;
+
+ debug!("lookup_in_trait_adjusted: method_item={:?}", method_item);
+ let mut obligations = PredicateObligations::new();
+
+ // Instantiate late-bound regions and instantiate the trait
+ // parameters into the method type to get the actual method type.
+ //
+ // N.B., instantiate late-bound regions before normalizing the
+ // function signature so that normalization does not need to deal
+ // with bound regions.
+ let fn_sig =
+ self.db.callable_item_signature(method_item.into()).instantiate(interner, args);
+ let fn_sig = self
+ .infer_ctxt
+ .instantiate_binder_with_fresh_vars(BoundRegionConversionTime::FnCall, fn_sig);
+
+ // Register obligations for the parameters. This will include the
+ // `Self` parameter, which in turn has a bound of the main trait,
+ // so this also effectively registers `obligation` as well. (We
+ // used to register `obligation` explicitly, but that resulted in
+ // double error messages being reported.)
+ //
+ // Note that as the method comes from a trait, it should not have
+ // any late-bound regions appearing in its bounds.
+ let bounds = GenericPredicates::query_all(self.db, method_item.into());
+ let bounds = clauses_as_obligations(
+ bounds.iter_instantiated_copied(interner, args.as_slice()),
+ ObligationCause::new(),
+ self.trait_env.env,
+ );
+
+ obligations.extend(bounds);
+
+ // Also add an obligation for the method type being well-formed.
+ debug!(
+ "lookup_method_in_trait: matched method fn_sig={:?} obligation={:?}",
+ fn_sig, obligation
+ );
+ for ty in fn_sig.inputs_and_output {
+ obligations.push(Obligation::new(
+ interner,
+ obligation.cause.clone(),
+ self.trait_env.env,
+ Binder::dummy(PredicateKind::Clause(ClauseKind::WellFormed(ty.into()))),
+ ));
+ }
+
+ let callee = MethodCallee { def_id, args, sig: fn_sig };
+ debug!("callee = {:?}", callee);
+
+ Some(InferOk { obligations, value: callee })
+ }
}
pub fn lookup_impl_const<'db>(
@@ -690,7 +356,7 @@
let interner = infcx.interner;
let db = interner.db;
- let trait_id = match const_id.lookup(db).container {
+ let trait_id = match const_id.loc(db).container {
ItemContainerId::TraitId(id) => id,
_ => return (const_id, subs),
};
@@ -719,7 +385,7 @@
) -> Option<usize> {
let db = interner.db;
- let ItemContainerId::TraitId(trait_id) = func.lookup(db).container else {
+ let ItemContainerId::TraitId(trait_id) = func.loc(db).container else {
return None;
};
let trait_params = db.generic_params(trait_id.into()).len();
@@ -755,7 +421,7 @@
let interner = DbInterner::new_with(db, Some(env.krate), env.block);
let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
- let ItemContainerId::TraitId(trait_id) = func.lookup(db).container else {
+ let ItemContainerId::TraitId(trait_id) = func.loc(db).container else {
return (func, fn_subst);
};
let trait_params = db.generic_params(trait_id.into()).len();
@@ -833,981 +499,337 @@
}
}
-fn is_inherent_impl_coherent<'db>(
- db: &'db dyn HirDatabase,
- def_map: &DefMap,
- impl_id: ImplId,
- self_ty: Ty<'db>,
-) -> bool {
- let self_ty = self_ty.kind();
- let impl_allowed = match self_ty {
- TyKind::Tuple(_)
- | TyKind::FnDef(_, _)
- | TyKind::Array(_, _)
- | TyKind::Never
- | TyKind::RawPtr(_, _)
- | TyKind::Ref(_, _, _)
- | TyKind::Slice(_)
- | TyKind::Str
- | TyKind::Bool
- | TyKind::Char
- | TyKind::Int(_)
- | TyKind::Uint(_)
- | TyKind::Float(_) => def_map.is_rustc_coherence_is_core(),
+#[salsa::tracked(returns(ref))]
+fn crates_containing_incoherent_inherent_impls(db: &dyn HirDatabase) -> Box<[Crate]> {
+ // We assume that only sysroot crates contain `#[rustc_has_incoherent_inherent_impls]`
+ // impls, since this is an internal feature and only std uses it.
+ db.all_crates().iter().copied().filter(|krate| krate.data(db).origin.is_lang()).collect()
+}
- TyKind::Adt(adt_def, _) => adt_def.def_id().0.module(db).krate() == def_map.krate(),
- TyKind::Dynamic(it, _) => it
- .principal_def_id()
- .is_some_and(|trait_id| trait_id.0.module(db).krate() == def_map.krate()),
-
+pub fn incoherent_inherent_impls(db: &dyn HirDatabase, self_ty: SimplifiedType) -> &[ImplId] {
+ let has_incoherent_impls = match self_ty.def() {
+ Some(def_id) => match def_id.try_into() {
+ Ok(def_id) => {
+ db.attrs(def_id).by_key(sym::rustc_has_incoherent_inherent_impls).exists()
+ }
+ Err(()) => true,
+ },
_ => true,
};
- impl_allowed || {
- let rustc_has_incoherent_inherent_impls = match self_ty {
- TyKind::Tuple(_)
- | TyKind::FnDef(_, _)
- | TyKind::Array(_, _)
- | TyKind::Never
- | TyKind::RawPtr(_, _)
- | TyKind::Ref(_, _, _)
- | TyKind::Slice(_)
- | TyKind::Str
- | TyKind::Bool
- | TyKind::Char
- | TyKind::Int(_)
- | TyKind::Uint(_)
- | TyKind::Float(_) => true,
+ return if !has_incoherent_impls {
+ &[]
+ } else {
+ incoherent_inherent_impls_query(db, (), self_ty)
+ };
- TyKind::Adt(adt_def, _) => match adt_def.def_id().0 {
- hir_def::AdtId::StructId(id) => db
- .struct_signature(id)
- .flags
- .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
- hir_def::AdtId::UnionId(id) => db
- .union_signature(id)
- .flags
- .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
- hir_def::AdtId::EnumId(it) => db
- .enum_signature(it)
- .flags
- .contains(EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
- },
- TyKind::Dynamic(it, _) => it.principal_def_id().is_some_and(|trait_id| {
- db.trait_signature(trait_id.0)
- .flags
- .contains(TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS)
- }),
+ #[salsa::tracked(returns(ref))]
+ fn incoherent_inherent_impls_query(
+ db: &dyn HirDatabase,
+ _force_query_input_to_be_interned: (),
+ self_ty: SimplifiedType,
+ ) -> Box<[ImplId]> {
+ let _p = tracing::info_span!("incoherent_inherent_impl_crates").entered();
- _ => false,
- };
- let items = impl_id.impl_items(db);
- rustc_has_incoherent_inherent_impls
- && !items.items.is_empty()
- && items.items.iter().all(|&(_, assoc)| match assoc {
- AssocItemId::FunctionId(it) => {
- db.function_signature(it).flags.contains(FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL)
- }
- AssocItemId::ConstId(it) => {
- db.const_signature(it).flags.contains(ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL)
- }
- AssocItemId::TypeAliasId(it) => db
- .type_alias_signature(it)
- .flags
- .contains(TypeAliasFlags::RUSTC_ALLOW_INCOHERENT_IMPL),
- })
+ let mut result = Vec::new();
+ for &krate in crates_containing_incoherent_inherent_impls(db) {
+ let impls = InherentImpls::for_crate(db, krate);
+ result.extend_from_slice(impls.for_self_ty(&self_ty));
+ }
+ result.into_boxed_slice()
}
}
-/// Checks whether the impl satisfies the orphan rules.
-///
-/// Given `impl<P1..=Pn> Trait<T1..=Tn> for T0`, an `impl`` is valid only if at least one of the following is true:
-/// - Trait is a local trait
-/// - All of
-/// - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type.
-/// - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
-pub fn check_orphan_rules<'db>(db: &'db dyn HirDatabase, impl_: ImplId) -> bool {
- let Some(impl_trait) = db.impl_trait(impl_) else {
- // not a trait impl
- return true;
- };
+pub fn simplified_type_module(db: &dyn HirDatabase, ty: &SimplifiedType) -> Option<ModuleId> {
+ match ty.def()? {
+ SolverDefId::AdtId(id) => Some(id.module(db)),
+ SolverDefId::TypeAliasId(id) => Some(id.module(db)),
+ SolverDefId::TraitId(id) => Some(id.module(db)),
+ _ => None,
+ }
+}
- let local_crate = impl_.lookup(db).container.krate();
- let is_local = |tgt_crate| tgt_crate == local_crate;
+#[derive(Debug, PartialEq, Eq)]
+pub struct InherentImpls {
+ map: FxHashMap<SimplifiedType, Box<[ImplId]>>,
+}
- let trait_ref = impl_trait.instantiate_identity();
- let trait_id = trait_ref.def_id.0;
- if is_local(trait_id.module(db).krate()) {
- // trait to be implemented is local
- return true;
+#[salsa::tracked]
+impl InherentImpls {
+ #[salsa::tracked(returns(ref))]
+ pub fn for_crate(db: &dyn HirDatabase, krate: Crate) -> Self {
+ let _p = tracing::info_span!("inherent_impls_in_crate_query", ?krate).entered();
+
+ let crate_def_map = crate_def_map(db, krate);
+
+ Self::collect_def_map(db, crate_def_map)
}
- let unwrap_fundamental = |mut ty: Ty<'db>| {
- // Unwrap all layers of fundamental types with a loop.
- loop {
- match ty.kind() {
- TyKind::Ref(_, referenced, _) => ty = referenced,
- TyKind::Adt(adt_def, subs) => {
- let AdtId::StructId(s) = adt_def.def_id().0 else {
- break ty;
+ #[salsa::tracked(returns(ref))]
+ pub fn for_block(db: &dyn HirDatabase, block: BlockId) -> Option<Box<Self>> {
+ let _p = tracing::info_span!("inherent_impls_in_block_query").entered();
+
+ let block_def_map = block_def_map(db, block);
+ let result = Self::collect_def_map(db, block_def_map);
+ if result.map.is_empty() { None } else { Some(Box::new(result)) }
+ }
+}
+
+impl InherentImpls {
+ fn collect_def_map(db: &dyn HirDatabase, def_map: &DefMap) -> Self {
+ let mut map = FxHashMap::default();
+ collect(db, def_map, &mut map);
+ let mut map = map
+ .into_iter()
+ .map(|(self_ty, impls)| (self_ty, impls.into_boxed_slice()))
+ .collect::<FxHashMap<_, _>>();
+ map.shrink_to_fit();
+ return Self { map };
+
+ fn collect(
+ db: &dyn HirDatabase,
+ def_map: &DefMap,
+ map: &mut FxHashMap<SimplifiedType, Vec<ImplId>>,
+ ) {
+ for (_module_id, module_data) in def_map.modules() {
+ for impl_id in module_data.scope.impls() {
+ let data = db.impl_signature(impl_id);
+ if data.target_trait.is_some() {
+ continue;
+ }
+
+ let interner = DbInterner::new_with(db, None, None);
+ let self_ty = db.impl_self_ty(impl_id);
+ let self_ty = self_ty.instantiate_identity();
+ if let Some(self_ty) =
+ simplify_type(interner, self_ty, TreatParams::InstantiateWithInfer)
+ {
+ map.entry(self_ty).or_default().push(impl_id);
+ }
+ }
+
+ // To better support custom derives, collect impls in all unnamed const items.
+ // const _: () = { ... };
+ for konst in module_data.scope.unnamed_consts() {
+ let body = db.body(konst.into());
+ for (_, block_def_map) in body.blocks(db) {
+ collect(db, block_def_map, map);
+ }
+ }
+ }
+ }
+ }
+
+ pub fn for_self_ty(&self, self_ty: &SimplifiedType) -> &[ImplId] {
+ self.map.get(self_ty).map(|it| &**it).unwrap_or_default()
+ }
+
+ pub fn for_each_crate_and_block(
+ db: &dyn HirDatabase,
+ krate: Crate,
+ block: Option<BlockId>,
+ for_each: &mut dyn FnMut(&InherentImpls),
+ ) {
+ let blocks = std::iter::successors(block, |block| block.loc(db).module.containing_block());
+ blocks.filter_map(|block| Self::for_block(db, block).as_deref()).for_each(&mut *for_each);
+ for_each(Self::for_crate(db, krate));
+ }
+}
+
+#[derive(Debug, PartialEq)]
+struct OneTraitImpls {
+ non_blanket_impls: FxHashMap<SimplifiedType, Box<[ImplId]>>,
+ blanket_impls: Box<[ImplId]>,
+}
+
+#[derive(Default)]
+struct OneTraitImplsBuilder {
+ non_blanket_impls: FxHashMap<SimplifiedType, Vec<ImplId>>,
+ blanket_impls: Vec<ImplId>,
+}
+
+impl OneTraitImplsBuilder {
+ fn finish(self) -> OneTraitImpls {
+ let mut non_blanket_impls = self
+ .non_blanket_impls
+ .into_iter()
+ .map(|(self_ty, impls)| (self_ty, impls.into_boxed_slice()))
+ .collect::<FxHashMap<_, _>>();
+ non_blanket_impls.shrink_to_fit();
+ let blanket_impls = self.blanket_impls.into_boxed_slice();
+ OneTraitImpls { non_blanket_impls, blanket_impls }
+ }
+}
+
+#[derive(Debug, PartialEq)]
+pub struct TraitImpls {
+ map: FxHashMap<TraitId, OneTraitImpls>,
+}
+
+#[salsa::tracked]
+impl TraitImpls {
+ #[salsa::tracked(returns(ref))]
+ pub fn for_crate(db: &dyn HirDatabase, krate: Crate) -> Arc<Self> {
+ let _p = tracing::info_span!("inherent_impls_in_crate_query", ?krate).entered();
+
+ let crate_def_map = crate_def_map(db, krate);
+ let result = Self::collect_def_map(db, crate_def_map);
+ Arc::new(result)
+ }
+
+ #[salsa::tracked(returns(ref))]
+ pub fn for_block(db: &dyn HirDatabase, block: BlockId) -> Option<Box<Self>> {
+ let _p = tracing::info_span!("inherent_impls_in_block_query").entered();
+
+ let block_def_map = block_def_map(db, block);
+ let result = Self::collect_def_map(db, block_def_map);
+ if result.map.is_empty() { None } else { Some(Box::new(result)) }
+ }
+
+ #[salsa::tracked(returns(ref))]
+ pub fn for_crate_and_deps(db: &dyn HirDatabase, krate: Crate) -> Box<[Arc<Self>]> {
+ krate.transitive_deps(db).iter().map(|&dep| Self::for_crate(db, dep).clone()).collect()
+ }
+}
+
+impl TraitImpls {
+ fn collect_def_map(db: &dyn HirDatabase, def_map: &DefMap) -> Self {
+ let mut map = FxHashMap::default();
+ collect(db, def_map, &mut map);
+ let mut map = map
+ .into_iter()
+ .map(|(trait_id, trait_map)| (trait_id, trait_map.finish()))
+ .collect::<FxHashMap<_, _>>();
+ map.shrink_to_fit();
+ return Self { map };
+
+ fn collect(
+ db: &dyn HirDatabase,
+ def_map: &DefMap,
+ map: &mut FxHashMap<TraitId, OneTraitImplsBuilder>,
+ ) {
+ for (_module_id, module_data) in def_map.modules() {
+ for impl_id in module_data.scope.impls() {
+ // Reservation impls should be ignored during trait resolution, so we never need
+ // them during type analysis. See rust-lang/rust#64631 for details.
+ //
+ // FIXME: Reservation impls should be considered during coherence checks. If we are
+ // (ever) to implement coherence checks, this filtering should be done by the trait
+ // solver.
+ if db.attrs(impl_id.into()).by_key(sym::rustc_reservation_impl).exists() {
+ continue;
+ }
+ let trait_ref = match db.impl_trait(impl_id) {
+ Some(tr) => tr.instantiate_identity(),
+ None => continue,
};
- let struct_signature = db.struct_signature(s);
- if struct_signature.flags.contains(StructFlags::FUNDAMENTAL) {
- let next = subs.types().next();
- match next {
- Some(it) => ty = it,
- None => break ty,
+ let self_ty = trait_ref.self_ty();
+ let interner = DbInterner::new_with(db, None, None);
+ let entry = map.entry(trait_ref.def_id.0).or_default();
+ match simplify_type(interner, self_ty, TreatParams::InstantiateWithInfer) {
+ Some(self_ty) => {
+ entry.non_blanket_impls.entry(self_ty).or_default().push(impl_id)
}
- } else {
- break ty;
+ None => entry.blanket_impls.push(impl_id),
}
}
- _ => break ty,
+
+ // To better support custom derives, collect impls in all unnamed const items.
+ // const _: () = { ... };
+ for konst in module_data.scope.unnamed_consts() {
+ let body = db.body(konst.into());
+ for (_, block_def_map) in body.blocks(db) {
+ collect(db, block_def_map, map);
+ }
+ }
}
}
- };
- // - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type.
+ }
- // FIXME: param coverage
- // - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
- let is_not_orphan = trait_ref.args.types().any(|ty| match unwrap_fundamental(ty).kind() {
- TyKind::Adt(adt_def, _) => is_local(adt_def.def_id().0.module(db).krate()),
- TyKind::Error(_) => true,
- TyKind::Dynamic(it, _) => {
- it.principal_def_id().is_some_and(|trait_id| is_local(trait_id.0.module(db).krate()))
+ pub fn blanket_impls(&self, for_trait: TraitId) -> &[ImplId] {
+ self.map.get(&for_trait).map(|it| &*it.blanket_impls).unwrap_or_default()
+ }
+
+ /// Queries whether `self_ty` has potentially applicable implementations of `trait_`.
+ pub fn has_impls_for_trait_and_self_ty(
+ &self,
+ trait_: TraitId,
+ self_ty: &SimplifiedType,
+ ) -> bool {
+ self.map.get(&trait_).is_some_and(|trait_impls| {
+ trait_impls.non_blanket_impls.contains_key(self_ty)
+ || !trait_impls.blanket_impls.is_empty()
+ })
+ }
+
+ pub fn for_trait_and_self_ty(&self, trait_: TraitId, self_ty: &SimplifiedType) -> &[ImplId] {
+ self.map
+ .get(&trait_)
+ .and_then(|map| map.non_blanket_impls.get(self_ty))
+ .map(|it| &**it)
+ .unwrap_or_default()
+ }
+
+ pub fn for_trait(&self, trait_: TraitId, mut callback: impl FnMut(&[ImplId])) {
+ if let Some(impls) = self.map.get(&trait_) {
+ callback(&impls.blanket_impls);
+ for impls in impls.non_blanket_impls.values() {
+ callback(impls);
+ }
}
- _ => false,
- });
- #[allow(clippy::let_and_return)]
- is_not_orphan
-}
+ }
-/// To be used from `hir` only.
-pub fn iterate_path_candidates<'db>(
- ty: &Canonical<'db, Ty<'db>>,
- db: &'db dyn HirDatabase,
- env: Arc<TraitEnvironment<'db>>,
- traits_in_scope: &FxHashSet<TraitId>,
- visible_from_module: VisibleFromModule,
- name: Option<&Name>,
- callback: &mut dyn MethodCandidateCallback,
-) -> ControlFlow<()> {
- iterate_method_candidates_dyn(
- ty,
- db,
- env,
- traits_in_scope,
- visible_from_module,
- name,
- LookupMode::Path,
- // the adjustments are not relevant for path lookup
- callback,
- )
-}
+ pub fn for_self_ty(&self, self_ty: &SimplifiedType, mut callback: impl FnMut(&[ImplId])) {
+ for for_trait in self.map.values() {
+ if let Some(for_ty) = for_trait.non_blanket_impls.get(self_ty) {
+ callback(for_ty);
+ }
+ }
+ }
-/// To be used from `hir` only.
-pub fn iterate_method_candidates_dyn<'db>(
- ty: &Canonical<'db, Ty<'db>>,
- db: &'db dyn HirDatabase,
- env: Arc<TraitEnvironment<'db>>,
- traits_in_scope: &FxHashSet<TraitId>,
- visible_from_module: VisibleFromModule,
- name: Option<&Name>,
- mode: LookupMode,
- callback: &mut dyn MethodCandidateCallback,
-) -> ControlFlow<()> {
- iterate_method_candidates_dyn_impl(
- ty,
- &mut InferenceTable::new(db, env, None),
- traits_in_scope,
- visible_from_module,
- name,
- mode,
- callback,
- )
-}
+ pub fn for_each_crate_and_block(
+ db: &dyn HirDatabase,
+ krate: Crate,
+ block: Option<BlockId>,
+ for_each: &mut dyn FnMut(&TraitImpls),
+ ) {
+ let blocks = std::iter::successors(block, |block| block.loc(db).module.containing_block());
+ blocks.filter_map(|block| Self::for_block(db, block).as_deref()).for_each(&mut *for_each);
+ Self::for_crate_and_deps(db, krate).iter().map(|it| &**it).for_each(for_each);
+ }
-fn iterate_method_candidates_dyn_impl<'db>(
- ty: &Canonical<'db, Ty<'db>>,
- table: &mut InferenceTable<'db>,
- traits_in_scope: &FxHashSet<TraitId>,
- visible_from_module: VisibleFromModule,
- name: Option<&Name>,
- mode: LookupMode,
- callback: &mut dyn MethodCandidateCallback,
-) -> ControlFlow<()> {
- let _p = tracing::info_span!(
- "iterate_method_candidates_dyn",
- ?mode,
- ?name,
- traits_in_scope_len = traits_in_scope.len()
- )
- .entered();
+ /// Like [`Self::for_each_crate_and_block()`], but takes in account two blocks, one for a trait and one for a self type.
+ pub fn for_each_crate_and_block_trait_and_type(
+ db: &dyn HirDatabase,
+ krate: Crate,
+ type_block: Option<BlockId>,
+ trait_block: Option<BlockId>,
+ for_each: &mut dyn FnMut(&TraitImpls),
+ ) {
+ let in_self_and_deps = TraitImpls::for_crate_and_deps(db, krate);
+ in_self_and_deps.iter().for_each(|impls| for_each(impls));
- match mode {
- LookupMode::MethodCall => {
- // For method calls, rust first does any number of autoderef, and
- // then one autoref (i.e. when the method takes &self or &mut self).
- // Note that when we've got a receiver like &S, even if the method
- // we find in the end takes &self, we still do the autoderef step
- // (just as rustc does an autoderef and then autoref again).
-
- // We have to be careful about the order we're looking at candidates
- // in here. Consider the case where we're resolving `it.clone()`
- // where `it: &Vec<_>`. This resolves to the clone method with self
- // type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where
- // the receiver type exactly matches before cases where we have to
- // do autoref. But in the autoderef steps, the `&_` self type comes
- // up *before* the `Vec<_>` self type.
- //
- // On the other hand, we don't want to just pick any by-value method
- // before any by-autoref method; it's just that we need to consider
- // the methods by autoderef order of *receiver types*, not *self
- // types*.
-
- table.run_in_snapshot(|table| {
- let ty = table.instantiate_canonical(*ty);
- let deref_chain = autoderef_method_receiver(table, ty);
-
- deref_chain.into_iter().try_for_each(|(receiver_ty, adj)| {
- iterate_method_candidates_with_autoref(
- table,
- receiver_ty,
- adj,
- traits_in_scope,
- visible_from_module,
- name,
- callback,
- )
+ // We must not provide duplicate impls to the solver. Therefore we work with the following strategy:
+ // start from each block, and walk ancestors until you meet the other block. If they never meet,
+ // that means there can't be duplicate impls; if they meet, we stop the search of the deeper block.
+ // This breaks when they are equal (both will stop immediately), therefore we handle this case
+ // specifically.
+ let blocks_iter = |block: Option<BlockId>| {
+ std::iter::successors(block, |block| block.loc(db).module.containing_block())
+ };
+ let for_each_block = |current_block: Option<BlockId>, other_block: Option<BlockId>| {
+ blocks_iter(current_block)
+ .take_while(move |&block| {
+ other_block.is_none_or(|other_block| other_block != block)
})
- })
- }
- LookupMode::Path => {
- // No autoderef for path lookups
- iterate_method_candidates_for_self_ty(
- ty,
- table,
- traits_in_scope,
- visible_from_module,
- name,
- callback,
- )
- }
- }
-}
-
-#[tracing::instrument(skip_all, fields(name = ?name))]
-fn iterate_method_candidates_with_autoref<'db>(
- table: &mut InferenceTable<'db>,
- receiver_ty: Canonical<'db, Ty<'db>>,
- first_adjustment: ReceiverAdjustments,
- traits_in_scope: &FxHashSet<TraitId>,
- visible_from_module: VisibleFromModule,
- name: Option<&Name>,
- callback: &mut dyn MethodCandidateCallback,
-) -> ControlFlow<()> {
- let interner = table.interner();
-
- let mut iterate_method_candidates_by_receiver = move |receiver_ty, first_adjustment| {
- iterate_method_candidates_by_receiver(
- table,
- receiver_ty,
- first_adjustment,
- traits_in_scope,
- visible_from_module,
- name,
- callback,
- )
- };
-
- let mut maybe_reborrowed = first_adjustment.clone();
- if let TyKind::Ref(_, _, m) = receiver_ty.value.kind() {
- // Prefer reborrow of references to move
- maybe_reborrowed.autoref = Some(AutorefOrPtrAdjustment::Autoref(m));
- maybe_reborrowed.autoderefs += 1;
- }
-
- iterate_method_candidates_by_receiver(receiver_ty, maybe_reborrowed)?;
-
- let refed = Canonical {
- max_universe: receiver_ty.max_universe,
- variables: receiver_ty.variables,
- value: Ty::new_ref(interner, Region::error(interner), receiver_ty.value, Mutability::Not),
- };
-
- iterate_method_candidates_by_receiver(
- refed,
- first_adjustment.with_autoref(AutorefOrPtrAdjustment::Autoref(Mutability::Not)),
- )?;
-
- let ref_muted = Canonical {
- max_universe: receiver_ty.max_universe,
- variables: receiver_ty.variables,
- value: Ty::new_ref(interner, Region::error(interner), receiver_ty.value, Mutability::Mut),
- };
-
- iterate_method_candidates_by_receiver(
- ref_muted,
- first_adjustment.with_autoref(AutorefOrPtrAdjustment::Autoref(Mutability::Mut)),
- )?;
-
- if let TyKind::RawPtr(ty, Mutability::Mut) = receiver_ty.value.kind() {
- let const_ptr_ty = rustc_type_ir::Canonical {
- max_universe: rustc_type_ir::UniverseIndex::ZERO,
- value: Ty::new_ptr(interner, ty, Mutability::Not),
- variables: receiver_ty.variables,
+ .filter_map(move |block| TraitImpls::for_block(db, block).as_deref())
};
- iterate_method_candidates_by_receiver(
- const_ptr_ty,
- first_adjustment.with_autoref(AutorefOrPtrAdjustment::ToConstPtr),
- )?;
- }
-
- ControlFlow::Continue(())
-}
-
-pub trait MethodCandidateCallback {
- fn on_inherent_method(
- &mut self,
- adjustments: ReceiverAdjustments,
- item: AssocItemId,
- is_visible: bool,
- ) -> ControlFlow<()>;
-
- fn on_trait_method(
- &mut self,
- adjustments: ReceiverAdjustments,
- item: AssocItemId,
- is_visible: bool,
- ) -> ControlFlow<()>;
-}
-
-impl<F> MethodCandidateCallback for F
-where
- F: FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
-{
- fn on_inherent_method(
- &mut self,
- adjustments: ReceiverAdjustments,
- item: AssocItemId,
- is_visible: bool,
- ) -> ControlFlow<()> {
- self(adjustments, item, is_visible)
- }
-
- fn on_trait_method(
- &mut self,
- adjustments: ReceiverAdjustments,
- item: AssocItemId,
- is_visible: bool,
- ) -> ControlFlow<()> {
- self(adjustments, item, is_visible)
- }
-}
-
-#[tracing::instrument(skip_all, fields(name = ?name))]
-fn iterate_method_candidates_by_receiver<'db>(
- table: &mut InferenceTable<'db>,
- receiver_ty: Canonical<'db, Ty<'db>>,
- receiver_adjustments: ReceiverAdjustments,
- traits_in_scope: &FxHashSet<TraitId>,
- visible_from_module: VisibleFromModule,
- name: Option<&Name>,
- callback: &mut dyn MethodCandidateCallback,
-) -> ControlFlow<()> {
- let receiver_ty = table.instantiate_canonical(receiver_ty);
- // We're looking for methods with *receiver* type receiver_ty. These could
- // be found in any of the derefs of receiver_ty, so we have to go through
- // that, including raw derefs.
- table.run_in_snapshot(|table| {
- let mut autoderef = autoderef::Autoderef::new_no_tracking(table, receiver_ty)
- .include_raw_pointers()
- .use_receiver_trait();
- while let Some((self_ty, _)) = autoderef.next() {
- iterate_inherent_methods(
- self_ty,
- autoderef.table,
- name,
- Some(receiver_ty),
- Some(receiver_adjustments.clone()),
- visible_from_module,
- LookupMode::MethodCall,
- &mut |adjustments, item, is_visible| {
- callback.on_inherent_method(adjustments, item, is_visible)
- },
- )?
- }
- ControlFlow::Continue(())
- })?;
- table.run_in_snapshot(|table| {
- let mut autoderef = autoderef::Autoderef::new_no_tracking(table, receiver_ty)
- .include_raw_pointers()
- .use_receiver_trait();
- while let Some((self_ty, _)) = autoderef.next() {
- if matches!(self_ty.kind(), TyKind::Infer(rustc_type_ir::TyVar(_))) {
- // don't try to resolve methods on unknown types
- return ControlFlow::Continue(());
- }
-
- iterate_trait_method_candidates(
- self_ty,
- autoderef.table,
- traits_in_scope,
- name,
- Some(receiver_ty),
- Some(receiver_adjustments.clone()),
- LookupMode::MethodCall,
- &mut |adjustments, item, is_visible| {
- callback.on_trait_method(adjustments, item, is_visible)
- },
- )?
- }
- ControlFlow::Continue(())
- })
-}
-
-#[tracing::instrument(skip_all, fields(name = ?name))]
-fn iterate_method_candidates_for_self_ty<'db>(
- self_ty: &Canonical<'db, Ty<'db>>,
- table: &mut InferenceTable<'db>,
- traits_in_scope: &FxHashSet<TraitId>,
- visible_from_module: VisibleFromModule,
- name: Option<&Name>,
- callback: &mut dyn MethodCandidateCallback,
-) -> ControlFlow<()> {
- table.run_in_snapshot(|table| {
- let self_ty = table.instantiate_canonical(*self_ty);
- iterate_inherent_methods(
- self_ty,
- table,
- name,
- None,
- None,
- visible_from_module,
- LookupMode::Path,
- &mut |adjustments, item, is_visible| {
- callback.on_inherent_method(adjustments, item, is_visible)
- },
- )?;
- iterate_trait_method_candidates(
- self_ty,
- table,
- traits_in_scope,
- name,
- None,
- None,
- LookupMode::Path,
- &mut |adjustments, item, is_visible| {
- callback.on_trait_method(adjustments, item, is_visible)
- },
- )
- })
-}
-
-#[tracing::instrument(skip_all, fields(name = ?name, visible_from_module, receiver_ty))]
-fn iterate_trait_method_candidates<'db>(
- self_ty: Ty<'db>,
- table: &mut InferenceTable<'db>,
- traits_in_scope: &FxHashSet<TraitId>,
- name: Option<&Name>,
- receiver_ty: Option<Ty<'db>>,
- receiver_adjustments: Option<ReceiverAdjustments>,
- mode: LookupMode,
- callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
-) -> ControlFlow<()> {
- let db = table.db;
-
- let canonical_self_ty = table.canonicalize(self_ty);
- let krate = table.trait_env.krate;
-
- 'traits: for &t in traits_in_scope {
- let data = db.trait_signature(t);
-
- // Traits annotated with `#[rustc_skip_during_method_dispatch]` are skipped during
- // method resolution, if the receiver is an array, and we're compiling for editions before
- // 2021.
- // This is to make `[a].into_iter()` not break code with the new `IntoIterator` impl for
- // arrays.
- if data.flags.contains(TraitFlags::SKIP_ARRAY_DURING_METHOD_DISPATCH)
- && matches!(self_ty.kind(), TyKind::Array(..))
- {
- // FIXME: this should really be using the edition of the method name's span, in case it
- // comes from a macro
- if !krate.data(db).edition.at_least_2021() {
- continue;
- }
- }
- if data.flags.contains(TraitFlags::SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH)
- && matches!(
- self_ty.kind(), TyKind::Adt(adt_def, subst)
- if is_box(table.db, adt_def.def_id().0)
- && matches!(subst.type_at(0).kind(), TyKind::Slice(..))
- )
- {
- // FIXME: this should really be using the edition of the method name's span, in case it
- // comes from a macro
- if !krate.data(db).edition.at_least_2024() {
- continue;
- }
- }
-
- // we'll be lazy about checking whether the type implements the
- // trait, but if we find out it doesn't, we'll skip the rest of the
- // iteration
- let mut known_implemented = false;
- for &(_, item) in t.trait_items(db).items.iter() {
- // Don't pass a `visible_from_module` down to `is_valid_candidate`,
- // since only inherent methods should be included into visibility checking.
- let visible = match is_valid_trait_method_candidate(
- table,
- t,
- name,
- receiver_ty,
- item,
- self_ty,
- mode,
- ) {
- IsValidCandidate::Yes => true,
- IsValidCandidate::NotVisible => false,
- IsValidCandidate::No => continue,
- };
- if !known_implemented {
- let goal = generic_implements_goal_ns(table, t, canonical_self_ty);
- if next_trait_solve_canonical_in_ctxt(&table.infer_ctxt, goal).no_solution() {
- continue 'traits;
- }
- }
- known_implemented = true;
- callback(receiver_adjustments.clone().unwrap_or_default(), item, visible)?;
- }
- }
- ControlFlow::Continue(())
-}
-
-#[tracing::instrument(skip_all, fields(name = ?name, visible_from_module, receiver_ty))]
-fn iterate_inherent_methods<'db>(
- self_ty: Ty<'db>,
- table: &mut InferenceTable<'db>,
- name: Option<&Name>,
- receiver_ty: Option<Ty<'db>>,
- receiver_adjustments: Option<ReceiverAdjustments>,
- visible_from_module: VisibleFromModule,
- mode: LookupMode,
- callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
-) -> ControlFlow<()> {
- let db = table.db;
- let env = table.trait_env.clone();
-
- // For trait object types and placeholder types with trait bounds, the methods of the trait and
- // its super traits are considered inherent methods. This matters because these methods have
- // higher priority than the other traits' methods, which would be considered in
- // `iterate_trait_method_candidates()` only after this function.
- match self_ty.kind() {
- TyKind::Param(_) => {
- let env = table.trait_env.clone();
- let traits =
- env.traits_in_scope_from_clauses(self_ty).flat_map(|t| all_super_traits(db, t));
- iterate_inherent_trait_methods(
- self_ty,
- table,
- name,
- receiver_ty,
- receiver_adjustments.clone(),
- callback,
- traits,
- mode,
- )?;
- }
- TyKind::Dynamic(bounds, _) => {
- if let Some(principal_trait) = bounds.principal_def_id() {
- let traits = all_super_traits(db, principal_trait.0);
- iterate_inherent_trait_methods(
- self_ty,
- table,
- name,
- receiver_ty,
- receiver_adjustments.clone(),
- callback,
- traits.into_iter(),
- mode,
- )?;
- }
- }
- _ => {}
- }
-
- let def_crates = match def_crates(db, self_ty, env.krate) {
- Some(k) => k,
- None => return ControlFlow::Continue(()),
- };
-
- let (module, mut block) = match visible_from_module {
- VisibleFromModule::Filter(module) => (Some(module), module.containing_block()),
- VisibleFromModule::IncludeBlock(block) => (None, Some(block)),
- VisibleFromModule::None => (None, None),
- };
-
- while let Some(block_id) = block {
- if let Some(impls) = db.inherent_impls_in_block(block_id) {
- impls_for_self_ty(
- &impls,
- self_ty,
- table,
- name,
- receiver_ty,
- receiver_adjustments.clone(),
- module,
- callback,
- )?;
- }
-
- block = block_def_map(db, block_id).parent().and_then(|module| module.containing_block());
- }
-
- for krate in def_crates {
- let impls = db.inherent_impls_in_crate(krate);
- impls_for_self_ty(
- &impls,
- self_ty,
- table,
- name,
- receiver_ty,
- receiver_adjustments.clone(),
- module,
- callback,
- )?;
- }
- return ControlFlow::Continue(());
-
- #[tracing::instrument(skip_all, fields(name = ?name, visible_from_module, receiver_ty))]
- fn iterate_inherent_trait_methods<'db>(
- self_ty: Ty<'db>,
- table: &mut InferenceTable<'db>,
- name: Option<&Name>,
- receiver_ty: Option<Ty<'db>>,
- receiver_adjustments: Option<ReceiverAdjustments>,
- callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
- traits: impl Iterator<Item = TraitId>,
- mode: LookupMode,
- ) -> ControlFlow<()> {
- let db = table.db;
- for t in traits {
- let data = t.trait_items(db);
- for &(_, item) in data.items.iter() {
- // We don't pass `visible_from_module` as all trait items should be visible.
- let visible = match is_valid_trait_method_candidate(
- table,
- t,
- name,
- receiver_ty,
- item,
- self_ty,
- mode,
- ) {
- IsValidCandidate::Yes => true,
- IsValidCandidate::NotVisible => false,
- IsValidCandidate::No => continue,
- };
- callback(receiver_adjustments.clone().unwrap_or_default(), item, visible)?;
- }
- }
- ControlFlow::Continue(())
- }
-
- #[tracing::instrument(skip_all, fields(name = ?name, visible_from_module, receiver_ty))]
- fn impls_for_self_ty<'db>(
- impls: &InherentImpls,
- self_ty: Ty<'db>,
- table: &mut InferenceTable<'db>,
- name: Option<&Name>,
- receiver_ty: Option<Ty<'db>>,
- receiver_adjustments: Option<ReceiverAdjustments>,
- visible_from_module: Option<ModuleId>,
- callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>,
- ) -> ControlFlow<()> {
- for &impl_id in impls.for_self_ty(self_ty) {
- for &(ref item_name, item) in impl_id.impl_items(table.db).items.iter() {
- let visible = match is_valid_impl_method_candidate(
- table,
- self_ty,
- receiver_ty,
- visible_from_module,
- name,
- impl_id,
- item,
- item_name,
- ) {
- IsValidCandidate::Yes => true,
- IsValidCandidate::NotVisible => false,
- IsValidCandidate::No => continue,
- };
- callback(receiver_adjustments.clone().unwrap_or_default(), item, visible)?;
- }
- }
- ControlFlow::Continue(())
- }
-}
-
-/// Returns the receiver type for the index trait call.
-pub(crate) fn resolve_indexing_op<'db>(
- table: &mut InferenceTable<'db>,
- ty: Canonical<'db, Ty<'db>>,
- index_trait: TraitId,
-) -> Option<ReceiverAdjustments> {
- let ty = table.instantiate_canonical(ty);
- let deref_chain = autoderef_method_receiver(table, ty);
- for (ty, adj) in deref_chain {
- let goal = generic_implements_goal_ns(table, index_trait, ty);
- if !next_trait_solve_canonical_in_ctxt(&table.infer_ctxt, goal).no_solution() {
- return Some(adj);
- }
- }
- None
-}
-
-// FIXME: Replace this with a `Try` impl once stable
-macro_rules! check_that {
- ($cond:expr) => {
- if !$cond {
- return IsValidCandidate::No;
- }
- };
-}
-
-#[derive(Debug)]
-enum IsValidCandidate {
- Yes,
- No,
- NotVisible,
-}
-
-#[tracing::instrument(skip_all, fields(name))]
-fn is_valid_impl_method_candidate<'db>(
- table: &mut InferenceTable<'db>,
- self_ty: Ty<'db>,
- receiver_ty: Option<Ty<'db>>,
- visible_from_module: Option<ModuleId>,
- name: Option<&Name>,
- impl_id: ImplId,
- item: AssocItemId,
- item_name: &Name,
-) -> IsValidCandidate {
- match item {
- AssocItemId::FunctionId(f) => is_valid_impl_fn_candidate(
- table,
- impl_id,
- f,
- name,
- receiver_ty,
- self_ty,
- visible_from_module,
- item_name,
- ),
- AssocItemId::ConstId(c) => {
- let db = table.db;
- check_that!(receiver_ty.is_none());
- check_that!(name.is_none_or(|n| n == item_name));
-
- if let Some(from_module) = visible_from_module
- && !db.assoc_visibility(c.into()).is_visible_from(db, from_module)
- {
- cov_mark::hit!(const_candidate_not_visible);
- return IsValidCandidate::NotVisible;
- }
- let self_ty_matches = table.run_in_snapshot(|table| {
- let impl_args = table.fresh_args_for_item(impl_id.into());
- let expected_self_ty =
- db.impl_self_ty(impl_id).instantiate(table.interner(), impl_args);
- table.unify(expected_self_ty, self_ty)
- });
- if !self_ty_matches {
- cov_mark::hit!(const_candidate_self_type_mismatch);
- return IsValidCandidate::No;
- }
- IsValidCandidate::Yes
- }
- _ => IsValidCandidate::No,
- }
-}
-
-/// Checks whether a given `AssocItemId` is applicable for `receiver_ty`.
-#[tracing::instrument(skip_all, fields(name))]
-fn is_valid_trait_method_candidate<'db>(
- table: &mut InferenceTable<'db>,
- trait_id: TraitId,
- name: Option<&Name>,
- receiver_ty: Option<Ty<'db>>,
- item: AssocItemId,
- self_ty: Ty<'db>,
- mode: LookupMode,
-) -> IsValidCandidate {
- let db = table.db;
- match item {
- AssocItemId::FunctionId(fn_id) => {
- let data = db.function_signature(fn_id);
-
- check_that!(name.is_none_or(|n| n == &data.name));
-
- table.run_in_snapshot(|table| {
- let impl_subst = table.fresh_args_for_item(trait_id.into());
- let expect_self_ty = impl_subst.type_at(0);
-
- check_that!(table.unify(expect_self_ty, self_ty));
-
- if let Some(receiver_ty) = receiver_ty {
- check_that!(data.has_self_param());
-
- let args = table.fill_rest_fresh_args(fn_id.into(), impl_subst);
-
- let sig = db.callable_item_signature(fn_id.into());
- let expected_receiver = sig
- .map_bound(|s| s.skip_binder().inputs_and_output.as_slice()[0])
- .instantiate(table.interner(), args);
-
- // FIXME: Clean up this mess with some context struct like rustc's `ProbeContext`
- let variance = match mode {
- LookupMode::MethodCall => rustc_type_ir::Variance::Covariant,
- LookupMode::Path => rustc_type_ir::Variance::Invariant,
- };
- let res = table
- .infer_ctxt
- .at(&ObligationCause::dummy(), table.trait_env.env)
- .relate(expected_receiver, variance, receiver_ty);
- let Ok(infer_ok) = res else {
- return IsValidCandidate::No;
- };
-
- if !infer_ok.obligations.is_empty() {
- let mut ctxt = ObligationCtxt::new(&table.infer_ctxt);
- ctxt.register_obligations(infer_ok.into_obligations());
- // FIXME: Are we doing this correctly? Probably better to follow rustc more closely.
- check_that!(ctxt.try_evaluate_obligations().is_empty());
- }
-
- check_that!(table.unify(receiver_ty, expected_receiver));
- }
-
- IsValidCandidate::Yes
- })
- }
- AssocItemId::ConstId(c) => {
- check_that!(receiver_ty.is_none());
- check_that!(name.is_none_or(|n| db.const_signature(c).name.as_ref() == Some(n)));
-
- IsValidCandidate::Yes
- }
- _ => IsValidCandidate::No,
- }
-}
-
-#[tracing::instrument(skip_all, fields(name))]
-fn is_valid_impl_fn_candidate<'db>(
- table: &mut InferenceTable<'db>,
- impl_id: ImplId,
- fn_id: FunctionId,
- name: Option<&Name>,
- receiver_ty: Option<Ty<'db>>,
- self_ty: Ty<'db>,
- visible_from_module: Option<ModuleId>,
- item_name: &Name,
-) -> IsValidCandidate {
- check_that!(name.is_none_or(|n| n == item_name));
-
- let db = table.db;
- let data = db.function_signature(fn_id);
-
- if let Some(from_module) = visible_from_module
- && !db.assoc_visibility(fn_id.into()).is_visible_from(db, from_module)
- {
- cov_mark::hit!(autoderef_candidate_not_visible);
- return IsValidCandidate::NotVisible;
- }
- table.run_in_snapshot(|table| {
- let _p = tracing::info_span!("subst_for_def").entered();
- let impl_subst = table.infer_ctxt.fresh_args_for_item(impl_id.into());
- let expect_self_ty = db.impl_self_ty(impl_id).instantiate(table.interner(), &impl_subst);
-
- check_that!(table.unify(expect_self_ty, self_ty));
-
- if let Some(receiver_ty) = receiver_ty {
- let _p = tracing::info_span!("check_receiver_ty").entered();
- check_that!(data.has_self_param());
-
- let args = table.infer_ctxt.fresh_args_for_item(fn_id.into());
-
- let sig = db.callable_item_signature(fn_id.into());
- let expected_receiver = sig
- .map_bound(|s| s.skip_binder().inputs_and_output.as_slice()[0])
- .instantiate(table.interner(), args);
-
- check_that!(table.unify(receiver_ty, expected_receiver));
- }
-
- // We need to consider the bounds on the impl to distinguish functions of the same name
- // for a type.
- let predicates = db.generic_predicates(impl_id.into());
- let Some(predicates) = predicates.instantiate(table.interner(), impl_subst) else {
- return IsValidCandidate::Yes;
- };
-
- let mut ctxt = ObligationCtxt::new(&table.infer_ctxt);
-
- ctxt.register_obligations(predicates.into_iter().map(|p| {
- PredicateObligation::new(
- table.interner(),
- ObligationCause::new(),
- table.trait_env.env,
- p.0,
- )
- }));
-
- if ctxt.try_evaluate_obligations().is_empty() {
- IsValidCandidate::Yes
+ if trait_block == type_block {
+ blocks_iter(trait_block)
+ .filter_map(|block| TraitImpls::for_block(db, block).as_deref())
+ .for_each(for_each);
} else {
- IsValidCandidate::No
+ for_each_block(trait_block, type_block).for_each(&mut *for_each);
+ for_each_block(type_block, trait_block).for_each(for_each);
}
- })
-}
-
-/// This creates Substs for a trait with the given Self type and type variables
-/// for all other parameters, to query the trait solver with it.
-#[tracing::instrument(skip_all)]
-fn generic_implements_goal_ns<'db>(
- table: &mut InferenceTable<'db>,
- trait_: TraitId,
- self_ty: Canonical<'db, Ty<'db>>,
-) -> Canonical<'db, Goal<'db, Predicate<'db>>> {
- let args = table.infer_ctxt.fresh_args_for_item(SolverDefId::TraitId(trait_));
- let self_ty = table.instantiate_canonical(self_ty);
- let trait_ref =
- rustc_type_ir::TraitRef::new_from_args(table.infer_ctxt.interner, trait_.into(), args)
- .with_replaced_self_ty(table.infer_ctxt.interner, self_ty);
- let goal = Goal::new(table.infer_ctxt.interner, table.trait_env.env, trait_ref);
-
- table.canonicalize(goal)
-}
-
-fn autoderef_method_receiver<'db>(
- table: &mut InferenceTable<'db>,
- ty: Ty<'db>,
-) -> Vec<(Canonical<'db, Ty<'db>>, ReceiverAdjustments)> {
- let interner = table.interner();
- let mut deref_chain = Vec::new();
- let mut autoderef = autoderef::Autoderef::new_no_tracking(table, ty).use_receiver_trait();
- while let Some((ty, derefs)) = autoderef.next() {
- deref_chain.push((
- autoderef.table.canonicalize(ty),
- ReceiverAdjustments { autoref: None, autoderefs: derefs, unsize_array: false },
- ));
}
- // As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!)
- if let Some((rustc_type_ir::Array(parameters, _), variables, max_universe, adj)) =
- deref_chain.last().map(|d| (d.0.value.kind(), d.0.variables, d.0.max_universe, d.1.clone()))
- {
- let unsized_ty = Ty::new_slice(interner, parameters);
- deref_chain.push((
- Canonical { max_universe, value: unsized_ty, variables },
- ReceiverAdjustments { unsize_array: true, ..adj.clone() },
- ));
- }
- deref_chain
}
diff --git a/crates/hir-ty/src/method_resolution/confirm.rs b/crates/hir-ty/src/method_resolution/confirm.rs
new file mode 100644
index 0000000..9e8791e
--- /dev/null
+++ b/crates/hir-ty/src/method_resolution/confirm.rs
@@ -0,0 +1,616 @@
+//! Confirmation step of method selection, meaning ensuring the selected candidate
+//! is valid and registering all obligations.
+
+use hir_def::{
+ FunctionId, GenericDefId, GenericParamId, ItemContainerId, TraitId,
+ expr_store::path::{GenericArg as HirGenericArg, GenericArgs as HirGenericArgs},
+ hir::{ExprId, generics::GenericParamDataRef},
+ lang_item::LangItem,
+};
+use rustc_type_ir::{
+ TypeFoldable,
+ elaborate::elaborate,
+ inherent::{BoundExistentialPredicates, IntoKind, SliceLike, Ty as _},
+};
+use tracing::debug;
+
+use crate::{
+ Adjust, Adjustment, AutoBorrow, IncorrectGenericsLenKind, InferenceDiagnostic,
+ LifetimeElisionKind, PointerCast,
+ db::HirDatabase,
+ infer::{AllowTwoPhase, AutoBorrowMutability, InferenceContext, TypeMismatch},
+ lower::{
+ GenericPredicates,
+ path::{GenericArgsLowerer, TypeLikeConst, substs_from_args_and_bindings},
+ },
+ method_resolution::{CandidateId, MethodCallee, probe},
+ next_solver::{
+ Binder, Clause, ClauseKind, Const, DbInterner, EarlyParamRegion, ErrorGuaranteed, FnSig,
+ GenericArg, GenericArgs, ParamConst, PolyExistentialTraitRef, PolyTraitRef, Region,
+ TraitRef, Ty, TyKind,
+ infer::{
+ BoundRegionConversionTime, InferCtxt,
+ traits::{ObligationCause, PredicateObligation},
+ },
+ util::{clauses_as_obligations, upcast_choices},
+ },
+};
+
+struct ConfirmContext<'a, 'b, 'db> {
+ ctx: &'a mut InferenceContext<'b, 'db>,
+ candidate: FunctionId,
+ expr: ExprId,
+}
+
+#[derive(Debug)]
+pub(crate) struct ConfirmResult<'db> {
+ pub(crate) callee: MethodCallee<'db>,
+ pub(crate) illegal_sized_bound: bool,
+ pub(crate) adjustments: Box<[Adjustment<'db>]>,
+}
+
+impl<'a, 'db> InferenceContext<'a, 'db> {
+ pub(crate) fn confirm_method(
+ &mut self,
+ pick: &probe::Pick<'db>,
+ unadjusted_self_ty: Ty<'db>,
+ expr: ExprId,
+ generic_args: Option<&HirGenericArgs>,
+ ) -> ConfirmResult<'db> {
+ debug!(
+ "confirm(unadjusted_self_ty={:?}, pick={:?}, generic_args={:?})",
+ unadjusted_self_ty, pick, generic_args,
+ );
+
+ let CandidateId::FunctionId(candidate) = pick.item else {
+ panic!("confirmation is only done for method calls, not path lookups");
+ };
+ let mut confirm_cx = ConfirmContext::new(self, candidate, expr);
+ confirm_cx.confirm(unadjusted_self_ty, pick, generic_args)
+ }
+}
+
+impl<'a, 'b, 'db> ConfirmContext<'a, 'b, 'db> {
+ fn new(
+ ctx: &'a mut InferenceContext<'b, 'db>,
+ candidate: FunctionId,
+ expr: ExprId,
+ ) -> ConfirmContext<'a, 'b, 'db> {
+ ConfirmContext { ctx, candidate, expr }
+ }
+
+ #[inline]
+ fn db(&self) -> &'db dyn HirDatabase {
+ self.ctx.table.infer_ctxt.interner.db
+ }
+
+ #[inline]
+ fn interner(&self) -> DbInterner<'db> {
+ self.ctx.table.infer_ctxt.interner
+ }
+
+ #[inline]
+ fn infcx(&self) -> &InferCtxt<'db> {
+ &self.ctx.table.infer_ctxt
+ }
+
+ fn confirm(
+ &mut self,
+ unadjusted_self_ty: Ty<'db>,
+ pick: &probe::Pick<'db>,
+ generic_args: Option<&HirGenericArgs>,
+ ) -> ConfirmResult<'db> {
+ // Adjust the self expression the user provided and obtain the adjusted type.
+ let (self_ty, adjustments) = self.adjust_self_ty(unadjusted_self_ty, pick);
+
+ // Create generic args for the method's type parameters.
+ let rcvr_args = self.fresh_receiver_args(self_ty, pick);
+ let all_args = self.instantiate_method_args(generic_args, rcvr_args);
+
+ debug!("rcvr_args={rcvr_args:?}, all_args={all_args:?}");
+
+ // Create the final signature for the method, replacing late-bound regions.
+ let (method_sig, method_predicates) =
+ self.instantiate_method_sig(pick, all_args.as_slice());
+
+ // If there is a `Self: Sized` bound and `Self` is a trait object, it is possible that
+ // something which derefs to `Self` actually implements the trait and the caller
+ // wanted to make a static dispatch on it but forgot to import the trait.
+ // See test `tests/ui/issues/issue-35976.rs`.
+ //
+ // In that case, we'll error anyway, but we'll also re-run the search with all traits
+ // in scope, and if we find another method which can be used, we'll output an
+ // appropriate hint suggesting to import the trait.
+ let filler_args = GenericArgs::fill_rest(
+ self.interner(),
+ self.candidate.into(),
+ rcvr_args,
+ |index, id, _| match id {
+ GenericParamId::TypeParamId(id) => Ty::new_param(self.interner(), id, index).into(),
+ GenericParamId::ConstParamId(id) => {
+ Const::new_param(self.interner(), ParamConst { id, index }).into()
+ }
+ GenericParamId::LifetimeParamId(id) => {
+ Region::new_early_param(self.interner(), EarlyParamRegion { id, index }).into()
+ }
+ },
+ );
+ let illegal_sized_bound = self.predicates_require_illegal_sized_bound(
+ GenericPredicates::query_all(self.db(), self.candidate.into())
+ .iter_instantiated_copied(self.interner(), filler_args.as_slice()),
+ );
+
+ // Unify the (adjusted) self type with what the method expects.
+ //
+ // SUBTLE: if we want good error messages, because of "guessing" while matching
+ // traits, no trait system method can be called before this point because they
+ // could alter our Self-type, except for normalizing the receiver from the
+ // signature (which is also done during probing).
+ let method_sig_rcvr = method_sig.inputs().as_slice()[0];
+ debug!(
+ "confirm: self_ty={:?} method_sig_rcvr={:?} method_sig={:?}",
+ self_ty, method_sig_rcvr, method_sig
+ );
+ self.unify_receivers(self_ty, method_sig_rcvr, pick);
+
+ // Make sure nobody calls `drop()` explicitly.
+ self.check_for_illegal_method_calls();
+
+ // Lint when an item is shadowing a supertrait item.
+ self.lint_shadowed_supertrait_items(pick);
+
+ // Add any trait/regions obligations specified on the method's type parameters.
+ // We won't add these if we encountered an illegal sized bound, so that we can use
+ // a custom error in that case.
+ if !illegal_sized_bound {
+ self.add_obligations(method_sig, all_args, method_predicates);
+ }
+
+ // Create the final `MethodCallee`.
+ let callee = MethodCallee { def_id: self.candidate, args: all_args, sig: method_sig };
+ ConfirmResult { callee, illegal_sized_bound, adjustments }
+ }
+
+ ///////////////////////////////////////////////////////////////////////////
+ // ADJUSTMENTS
+
+ fn adjust_self_ty(
+ &mut self,
+ unadjusted_self_ty: Ty<'db>,
+ pick: &probe::Pick<'db>,
+ ) -> (Ty<'db>, Box<[Adjustment<'db>]>) {
+ // Commit the autoderefs by calling `autoderef` again, but this
+ // time writing the results into the various typeck results.
+ let mut autoderef = self.ctx.table.autoderef_with_tracking(unadjusted_self_ty);
+ let Some((mut target, n)) = autoderef.nth(pick.autoderefs) else {
+ return (Ty::new_error(self.interner(), ErrorGuaranteed), Box::new([]));
+ };
+ assert_eq!(n, pick.autoderefs);
+
+ let mut adjustments =
+ self.ctx.table.register_infer_ok(autoderef.adjust_steps_as_infer_ok());
+ match pick.autoref_or_ptr_adjustment {
+ Some(probe::AutorefOrPtrAdjustment::Autoref { mutbl, unsize }) => {
+ let region = self.infcx().next_region_var();
+ // Type we're wrapping in a reference, used later for unsizing
+ let base_ty = target;
+
+ target = Ty::new_ref(self.interner(), region, target, mutbl);
+
+ // Method call receivers are the primary use case
+ // for two-phase borrows.
+ let mutbl = AutoBorrowMutability::new(mutbl, AllowTwoPhase::Yes);
+
+ adjustments
+ .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl)), target });
+
+ if unsize {
+ let unsized_ty = if let TyKind::Array(elem_ty, _) = base_ty.kind() {
+ Ty::new_slice(self.interner(), elem_ty)
+ } else {
+ panic!(
+ "AutorefOrPtrAdjustment's unsize flag should only be set for array ty, found {:?}",
+ base_ty
+ )
+ };
+ target = Ty::new_ref(self.interner(), region, unsized_ty, mutbl.into());
+ adjustments
+ .push(Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target });
+ }
+ }
+ Some(probe::AutorefOrPtrAdjustment::ToConstPtr) => {
+ target = match target.kind() {
+ TyKind::RawPtr(ty, mutbl) => {
+ assert!(mutbl.is_mut());
+ Ty::new_imm_ptr(self.interner(), ty)
+ }
+ other => panic!("Cannot adjust receiver type {other:?} to const ptr"),
+ };
+
+ adjustments.push(Adjustment {
+ kind: Adjust::Pointer(PointerCast::MutToConstPointer),
+ target,
+ });
+ }
+ None => {}
+ }
+
+ (target, adjustments.into_boxed_slice())
+ }
+
+ /// Returns a set of generic parameters for the method *receiver* where all type and region
+ /// parameters are instantiated with fresh variables. This generic parameters does not include any
+ /// parameters declared on the method itself.
+ ///
+ /// Note that this generic parameters may include late-bound regions from the impl level. If so,
+ /// these are instantiated later in the `instantiate_method_sig` routine.
+ fn fresh_receiver_args(
+ &mut self,
+ self_ty: Ty<'db>,
+ pick: &probe::Pick<'db>,
+ ) -> GenericArgs<'db> {
+ match pick.kind {
+ probe::InherentImplPick(impl_def_id) => {
+ self.infcx().fresh_args_for_item(impl_def_id.into())
+ }
+
+ probe::ObjectPick(trait_def_id) => {
+ // If the trait is not object safe (specifically, we care about when
+ // the receiver is not valid), then there's a chance that we will not
+ // actually be able to recover the object by derefing the receiver like
+ // we should if it were valid.
+ if self.db().dyn_compatibility_of_trait(trait_def_id).is_some() {
+ return GenericArgs::error_for_item(self.interner(), trait_def_id.into());
+ }
+
+ self.extract_existential_trait_ref(self_ty, |this, object_ty, principal| {
+ // The object data has no entry for the Self
+ // Type. For the purposes of this method call, we
+ // instantiate the object type itself. This
+ // wouldn't be a sound instantiation in all cases,
+ // since each instance of the object type is a
+ // different existential and hence could match
+ // distinct types (e.g., if `Self` appeared as an
+ // argument type), but those cases have already
+ // been ruled out when we deemed the trait to be
+ // "dyn-compatible".
+ let original_poly_trait_ref =
+ principal.with_self_ty(this.interner(), object_ty);
+ let upcast_poly_trait_ref = this.upcast(original_poly_trait_ref, trait_def_id);
+ let upcast_trait_ref =
+ this.instantiate_binder_with_fresh_vars(upcast_poly_trait_ref);
+ debug!(
+ "original_poly_trait_ref={:?} upcast_trait_ref={:?} target_trait={:?}",
+ original_poly_trait_ref, upcast_trait_ref, trait_def_id
+ );
+ upcast_trait_ref.args
+ })
+ }
+
+ probe::TraitPick(trait_def_id) => {
+ // Make a trait reference `$0 : Trait<$1...$n>`
+ // consisting entirely of type variables. Later on in
+ // the process we will unify the transformed-self-type
+ // of the method with the actual type in order to
+ // unify some of these variables.
+ self.infcx().fresh_args_for_item(trait_def_id.into())
+ }
+
+ probe::WhereClausePick(poly_trait_ref) => {
+ // Where clauses can have bound regions in them. We need to instantiate
+ // those to convert from a poly-trait-ref to a trait-ref.
+ self.instantiate_binder_with_fresh_vars(poly_trait_ref).args
+ }
+ }
+ }
+
+ fn extract_existential_trait_ref<R, F>(&self, self_ty: Ty<'db>, mut closure: F) -> R
+ where
+ F: FnMut(&ConfirmContext<'a, 'b, 'db>, Ty<'db>, PolyExistentialTraitRef<'db>) -> R,
+ {
+ // If we specified that this is an object method, then the
+ // self-type ought to be something that can be dereferenced to
+ // yield an object-type (e.g., `&Object` or `Box<Object>`
+ // etc).
+
+ let mut autoderef = self.ctx.table.autoderef(self_ty);
+
+ // We don't need to gate this behind arbitrary self types
+ // per se, but it does make things a bit more gated.
+ if self.ctx.unstable_features.arbitrary_self_types
+ || self.ctx.unstable_features.arbitrary_self_types_pointers
+ {
+ autoderef = autoderef.use_receiver_trait();
+ }
+
+ autoderef
+ .include_raw_pointers()
+ .find_map(|(ty, _)| match ty.kind() {
+ TyKind::Dynamic(data, ..) => Some(closure(
+ self,
+ ty,
+ data.principal().expect("calling trait method on empty object?"),
+ )),
+ _ => None,
+ })
+ .unwrap_or_else(|| {
+ panic!("self-type `{:?}` for ObjectPick never dereferenced to an object", self_ty)
+ })
+ }
+
+ fn instantiate_method_args(
+ &mut self,
+ generic_args: Option<&HirGenericArgs>,
+ parent_args: GenericArgs<'db>,
+ ) -> GenericArgs<'db> {
+ struct LowererCtx<'a, 'b, 'db> {
+ ctx: &'a mut InferenceContext<'b, 'db>,
+ expr: ExprId,
+ parent_args: &'a [GenericArg<'db>],
+ }
+
+ impl<'db> GenericArgsLowerer<'db> for LowererCtx<'_, '_, 'db> {
+ fn report_len_mismatch(
+ &mut self,
+ def: GenericDefId,
+ provided_count: u32,
+ expected_count: u32,
+ kind: IncorrectGenericsLenKind,
+ ) {
+ self.ctx.push_diagnostic(InferenceDiagnostic::MethodCallIncorrectGenericsLen {
+ expr: self.expr,
+ provided_count,
+ expected_count,
+ kind,
+ def,
+ });
+ }
+
+ fn report_arg_mismatch(
+ &mut self,
+ param_id: GenericParamId,
+ arg_idx: u32,
+ has_self_arg: bool,
+ ) {
+ self.ctx.push_diagnostic(InferenceDiagnostic::MethodCallIncorrectGenericsOrder {
+ expr: self.expr,
+ param_id,
+ arg_idx,
+ has_self_arg,
+ });
+ }
+
+ fn provided_kind(
+ &mut self,
+ param_id: GenericParamId,
+ param: GenericParamDataRef<'_>,
+ arg: &HirGenericArg,
+ ) -> GenericArg<'db> {
+ match (param, arg) {
+ (
+ GenericParamDataRef::LifetimeParamData(_),
+ HirGenericArg::Lifetime(lifetime),
+ ) => self.ctx.make_body_lifetime(*lifetime).into(),
+ (GenericParamDataRef::TypeParamData(_), HirGenericArg::Type(type_ref)) => {
+ self.ctx.make_body_ty(*type_ref).into()
+ }
+ (GenericParamDataRef::ConstParamData(_), HirGenericArg::Const(konst)) => {
+ let GenericParamId::ConstParamId(const_id) = param_id else {
+ unreachable!("non-const param ID for const param");
+ };
+ let const_ty = self.ctx.db.const_param_ty_ns(const_id);
+ self.ctx.make_body_const(*konst, const_ty).into()
+ }
+ _ => unreachable!("unmatching param kinds were passed to `provided_kind()`"),
+ }
+ }
+
+ fn provided_type_like_const(
+ &mut self,
+ const_ty: Ty<'db>,
+ arg: TypeLikeConst<'_>,
+ ) -> Const<'db> {
+ match arg {
+ TypeLikeConst::Path(path) => self.ctx.make_path_as_body_const(path, const_ty),
+ TypeLikeConst::Infer => self.ctx.table.next_const_var(),
+ }
+ }
+
+ fn inferred_kind(
+ &mut self,
+ _def: GenericDefId,
+ param_id: GenericParamId,
+ _param: GenericParamDataRef<'_>,
+ _infer_args: bool,
+ _preceding_args: &[GenericArg<'db>],
+ ) -> GenericArg<'db> {
+ // Always create an inference var, even when `infer_args == false`. This helps with diagnostics,
+ // and I think it's also required in the presence of `impl Trait` (that must be inferred).
+ self.ctx.table.next_var_for_param(param_id)
+ }
+
+ fn parent_arg(&mut self, param_idx: u32, _param_id: GenericParamId) -> GenericArg<'db> {
+ self.parent_args[param_idx as usize]
+ }
+
+ fn report_elided_lifetimes_in_path(
+ &mut self,
+ _def: GenericDefId,
+ _expected_count: u32,
+ _hard_error: bool,
+ ) {
+ unreachable!("we set `LifetimeElisionKind::Infer`")
+ }
+
+ fn report_elision_failure(&mut self, _def: GenericDefId, _expected_count: u32) {
+ unreachable!("we set `LifetimeElisionKind::Infer`")
+ }
+
+ fn report_missing_lifetime(&mut self, _def: GenericDefId, _expected_count: u32) {
+ unreachable!("we set `LifetimeElisionKind::Infer`")
+ }
+ }
+
+ substs_from_args_and_bindings(
+ self.db(),
+ self.ctx.body,
+ generic_args,
+ self.candidate.into(),
+ true,
+ LifetimeElisionKind::Infer,
+ false,
+ None,
+ &mut LowererCtx { ctx: self.ctx, expr: self.expr, parent_args: parent_args.as_slice() },
+ )
+ }
+
+ fn unify_receivers(
+ &mut self,
+ self_ty: Ty<'db>,
+ method_self_ty: Ty<'db>,
+ pick: &probe::Pick<'db>,
+ ) {
+ debug!(
+ "unify_receivers: self_ty={:?} method_self_ty={:?} pick={:?}",
+ self_ty, method_self_ty, pick
+ );
+ let cause = ObligationCause::new();
+ match self.ctx.table.at(&cause).sup(method_self_ty, self_ty) {
+ Ok(infer_ok) => {
+ self.ctx.table.register_infer_ok(infer_ok);
+ }
+ Err(_) => {
+ if self.ctx.unstable_features.arbitrary_self_types {
+ self.ctx.result.type_mismatches.insert(
+ self.expr.into(),
+ TypeMismatch { expected: method_self_ty, actual: self_ty },
+ );
+ }
+ }
+ }
+ }
+
+ // NOTE: this returns the *unnormalized* predicates and method sig. Because of
+ // inference guessing, the predicates and method signature can't be normalized
+ // until we unify the `Self` type.
+ fn instantiate_method_sig<'c>(
+ &mut self,
+ pick: &probe::Pick<'db>,
+ all_args: &'c [GenericArg<'db>],
+ ) -> (FnSig<'db>, impl Iterator<Item = PredicateObligation<'db>> + use<'c, 'db>) {
+ debug!("instantiate_method_sig(pick={:?}, all_args={:?})", pick, all_args);
+
+ // Instantiate the bounds on the method with the
+ // type/early-bound-regions instantiations performed. There can
+ // be no late-bound regions appearing here.
+ let def_id = self.candidate;
+ let method_predicates = clauses_as_obligations(
+ GenericPredicates::query_all(self.db(), def_id.into())
+ .iter_instantiated_copied(self.interner(), all_args),
+ ObligationCause::new(),
+ self.ctx.table.trait_env.env,
+ );
+
+ let sig =
+ self.db().callable_item_signature(def_id.into()).instantiate(self.interner(), all_args);
+ debug!("type scheme instantiated, sig={:?}", sig);
+
+ let sig = self.instantiate_binder_with_fresh_vars(sig);
+ debug!("late-bound lifetimes from method instantiated, sig={:?}", sig);
+
+ (sig, method_predicates)
+ }
+
+ fn add_obligations(
+ &mut self,
+ sig: FnSig<'db>,
+ all_args: GenericArgs<'db>,
+ method_predicates: impl Iterator<Item = PredicateObligation<'db>>,
+ ) {
+ debug!("add_obligations: sig={:?} all_args={:?}", sig, all_args);
+
+ self.ctx.table.register_predicates(method_predicates);
+
+ // this is a projection from a trait reference, so we have to
+ // make sure that the trait reference inputs are well-formed.
+ self.ctx.table.add_wf_bounds(all_args);
+
+ // the function type must also be well-formed (this is not
+ // implied by the args being well-formed because of inherent
+ // impls and late-bound regions - see issue #28609).
+ for ty in sig.inputs_and_output {
+ self.ctx.table.register_wf_obligation(ty.into(), ObligationCause::new());
+ }
+ }
+
+ ///////////////////////////////////////////////////////////////////////////
+ // MISCELLANY
+
+ fn predicates_require_illegal_sized_bound(
+ &self,
+ predicates: impl Iterator<Item = Clause<'db>>,
+ ) -> bool {
+ let Some(sized_def_id) =
+ LangItem::Sized.resolve_trait(self.db(), self.ctx.resolver.krate())
+ else {
+ return false;
+ };
+
+ elaborate(self.interner(), predicates)
+ // We don't care about regions here.
+ .filter_map(|pred| match pred.kind().skip_binder() {
+ ClauseKind::Trait(trait_pred) if trait_pred.def_id().0 == sized_def_id => {
+ Some(trait_pred)
+ }
+ _ => None,
+ })
+ .any(|trait_pred| matches!(trait_pred.self_ty().kind(), TyKind::Dynamic(..)))
+ }
+
+ fn check_for_illegal_method_calls(&self) {
+ // Disallow calls to the method `drop` defined in the `Drop` trait.
+ if let ItemContainerId::TraitId(trait_def_id) = self.candidate.loc(self.db()).container
+ && LangItem::Drop
+ .resolve_trait(self.db(), self.ctx.resolver.krate())
+ .is_some_and(|drop_trait| drop_trait == trait_def_id)
+ {
+ // FIXME: Report an error.
+ }
+ }
+
+ #[expect(clippy::needless_return)]
+ fn lint_shadowed_supertrait_items(&self, pick: &probe::Pick<'_>) {
+ if pick.shadowed_candidates.is_empty() {
+ return;
+ }
+
+ // FIXME: Emit the lint.
+ }
+
+ fn upcast(
+ &self,
+ source_trait_ref: PolyTraitRef<'db>,
+ target_trait_def_id: TraitId,
+ ) -> PolyTraitRef<'db> {
+ let upcast_trait_refs =
+ upcast_choices(self.interner(), source_trait_ref, target_trait_def_id);
+
+ // must be exactly one trait ref or we'd get an ambig error etc
+ if let &[upcast_trait_ref] = upcast_trait_refs.as_slice() {
+ upcast_trait_ref
+ } else {
+ Binder::dummy(TraitRef::new_from_args(
+ self.interner(),
+ target_trait_def_id.into(),
+ GenericArgs::error_for_item(self.interner(), target_trait_def_id.into()),
+ ))
+ }
+ }
+
+ fn instantiate_binder_with_fresh_vars<T>(&self, value: Binder<'db, T>) -> T
+ where
+ T: TypeFoldable<DbInterner<'db>> + Copy,
+ {
+ self.infcx().instantiate_binder_with_fresh_vars(BoundRegionConversionTime::FnCall, value)
+ }
+}
diff --git a/crates/hir-ty/src/method_resolution/probe.rs b/crates/hir-ty/src/method_resolution/probe.rs
new file mode 100644
index 0000000..adc144c
--- /dev/null
+++ b/crates/hir-ty/src/method_resolution/probe.rs
@@ -0,0 +1,2077 @@
+//! Candidate assembly and selection in method resolution - where we enumerate all candidates
+//! and choose the best one (or, in some IDE scenarios, just enumerate them all).
+
+use std::{cell::RefCell, convert::Infallible, ops::ControlFlow};
+
+use hir_def::{
+ AssocItemId, FunctionId, GenericParamId, ImplId, ItemContainerId, TraitId,
+ signatures::TraitFlags,
+};
+use hir_expand::name::Name;
+use rustc_ast_ir::Mutability;
+use rustc_hash::{FxHashMap, FxHashSet};
+use rustc_type_ir::{
+ InferTy, TypeVisitableExt, Upcast, Variance,
+ elaborate::{self, supertrait_def_ids},
+ fast_reject::{DeepRejectCtxt, TreatParams, simplify_type},
+ inherent::{AdtDef as _, BoundExistentialPredicates as _, IntoKind, SliceLike, Ty as _},
+};
+use smallvec::{SmallVec, smallvec};
+use tracing::{debug, instrument};
+
+use self::CandidateKind::*;
+pub(super) use self::PickKind::*;
+use crate::{
+ autoderef::Autoderef,
+ db::HirDatabase,
+ lower::GenericPredicates,
+ method_resolution::{
+ CandidateId, CandidateSource, InherentImpls, MethodError, MethodResolutionContext,
+ incoherent_inherent_impls, simplified_type_module,
+ },
+ next_solver::{
+ Binder, Canonical, ClauseKind, DbInterner, FnSig, GenericArg, GenericArgs, Goal, ParamEnv,
+ PolyTraitRef, Predicate, Region, SimplifiedType, TraitRef, Ty, TyKind,
+ infer::{
+ BoundRegionConversionTime, InferCtxt, InferOk,
+ canonical::{QueryResponse, canonicalizer::OriginalQueryValues},
+ select::{ImplSource, Selection, SelectionResult},
+ traits::{Obligation, ObligationCause, PredicateObligation},
+ },
+ obligation_ctxt::ObligationCtxt,
+ util::clauses_as_obligations,
+ },
+};
+
+struct ProbeContext<'a, 'db, Choice> {
+ ctx: &'a MethodResolutionContext<'a, 'db>,
+ mode: Mode,
+
+ /// This is the OriginalQueryValues for the steps queries
+ /// that are answered in steps.
+ orig_steps_var_values: &'a OriginalQueryValues<'db>,
+ steps: &'a [CandidateStep<'db>],
+
+ inherent_candidates: Vec<Candidate<'db>>,
+ extension_candidates: Vec<Candidate<'db>>,
+ impl_dups: FxHashSet<ImplId>,
+
+ /// List of potential private candidates. Will be trimmed to ones that
+ /// actually apply and then the result inserted into `private_candidate`
+ private_candidates: Vec<Candidate<'db>>,
+
+ /// Collects near misses when the candidate functions are missing a `self` keyword and is only
+ /// used for error reporting
+ static_candidates: Vec<CandidateSource>,
+
+ choice: Choice,
+}
+
+#[derive(Debug)]
+pub struct CandidateWithPrivate<'db> {
+ pub candidate: Candidate<'db>,
+ pub is_visible: bool,
+}
+
+#[derive(Debug, Clone)]
+pub struct Candidate<'db> {
+ pub item: CandidateId,
+ pub kind: CandidateKind<'db>,
+}
+
+#[derive(Debug, Clone)]
+pub enum CandidateKind<'db> {
+ InherentImplCandidate { impl_def_id: ImplId, receiver_steps: usize },
+ ObjectCandidate(PolyTraitRef<'db>),
+ TraitCandidate(PolyTraitRef<'db>),
+ WhereClauseCandidate(PolyTraitRef<'db>),
+}
+
+#[derive(Debug, PartialEq, Eq, Copy, Clone)]
+enum ProbeResult {
+ NoMatch,
+ Match,
+}
+
+/// When adjusting a receiver we often want to do one of
+///
+/// - Add a `&` (or `&mut`), converting the receiver from `T` to `&T` (or `&mut T`)
+/// - If the receiver has type `*mut T`, convert it to `*const T`
+///
+/// This type tells us which one to do.
+///
+/// Note that in principle we could do both at the same time. For example, when the receiver has
+/// type `T`, we could autoref it to `&T`, then convert to `*const T`. Or, when it has type `*mut
+/// T`, we could convert it to `*const T`, then autoref to `&*const T`. However, currently we do
+/// (at most) one of these. Either the receiver has type `T` and we convert it to `&T` (or with
+/// `mut`), or it has type `*mut T` and we convert it to `*const T`.
+#[derive(Debug, PartialEq, Copy, Clone)]
+pub enum AutorefOrPtrAdjustment {
+ /// Receiver has type `T`, add `&` or `&mut` (if `T` is `mut`), and maybe also "unsize" it.
+ /// Unsizing is used to convert a `[T; N]` to `[T]`, which only makes sense when autorefing.
+ Autoref {
+ mutbl: Mutability,
+
+ /// Indicates that the source expression should be "unsized" to a target type.
+ /// This is special-cased for just arrays unsizing to slices.
+ unsize: bool,
+ },
+ /// Receiver has type `*mut T`, convert to `*const T`
+ ToConstPtr,
+}
+
+impl AutorefOrPtrAdjustment {
+ fn get_unsize(&self) -> bool {
+ match self {
+ AutorefOrPtrAdjustment::Autoref { mutbl: _, unsize } => *unsize,
+ AutorefOrPtrAdjustment::ToConstPtr => false,
+ }
+ }
+}
+
+/// Criteria to apply when searching for a given Pick. This is used during
+/// the search for potentially shadowed methods to ensure we don't search
+/// more candidates than strictly necessary.
+#[derive(Debug)]
+struct PickConstraintsForShadowed {
+ autoderefs: usize,
+ receiver_steps: Option<usize>,
+ def_id: CandidateId,
+}
+
+impl PickConstraintsForShadowed {
+ fn may_shadow_based_on_autoderefs(&self, autoderefs: usize) -> bool {
+ autoderefs == self.autoderefs
+ }
+
+ fn candidate_may_shadow(&self, candidate: &Candidate<'_>) -> bool {
+ // An item never shadows itself
+ candidate.item != self.def_id
+ // and we're only concerned about inherent impls doing the shadowing.
+ // Shadowing can only occur if the shadowed is further along
+ // the Receiver dereferencing chain than the shadowed.
+ && match candidate.kind {
+ CandidateKind::InherentImplCandidate { receiver_steps, .. } => match self.receiver_steps {
+ Some(shadowed_receiver_steps) => receiver_steps > shadowed_receiver_steps,
+ _ => false
+ },
+ _ => false
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct Pick<'db> {
+ pub item: CandidateId,
+ pub kind: PickKind<'db>,
+
+ /// Indicates that the source expression should be autoderef'd N times
+ /// ```ignore (not-rust)
+ /// A = expr | *expr | **expr | ...
+ /// ```
+ pub autoderefs: usize,
+
+ /// Indicates that we want to add an autoref (and maybe also unsize it), or if the receiver is
+ /// `*mut T`, convert it to `*const T`.
+ pub autoref_or_ptr_adjustment: Option<AutorefOrPtrAdjustment>,
+ pub self_ty: Ty<'db>,
+
+ /// Number of jumps along the `Receiver::Target` chain we followed
+ /// to identify this method. Used only for deshadowing errors.
+ /// Only applies for inherent impls.
+ pub receiver_steps: Option<usize>,
+
+ /// Candidates that were shadowed by supertraits.
+ pub shadowed_candidates: Vec<CandidateId>,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum PickKind<'db> {
+ InherentImplPick(ImplId),
+ ObjectPick(TraitId),
+ TraitPick(TraitId),
+ WhereClausePick(
+ // Trait
+ PolyTraitRef<'db>,
+ ),
+}
+
+pub(crate) type PickResult<'db> = Result<Pick<'db>, MethodError<'db>>;
+
+#[derive(PartialEq, Eq, Copy, Clone, Debug)]
+pub enum Mode {
+ // An expression of the form `receiver.method_name(...)`.
+ // Autoderefs are performed on `receiver`, lookup is done based on the
+ // `self` argument of the method, and static methods aren't considered.
+ MethodCall,
+ // An expression of the form `Type::item` or `<T>::item`.
+ // No autoderefs are performed, lookup is done based on the type each
+ // implementation is for, and static methods are included.
+ Path,
+}
+
+#[derive(Debug, Clone)]
+pub struct CandidateStep<'db> {
+ pub self_ty: Canonical<'db, QueryResponse<'db, Ty<'db>>>,
+ pub self_ty_is_opaque: bool,
+ pub autoderefs: usize,
+ /// `true` if the type results from a dereference of a raw pointer.
+ /// when assembling candidates, we include these steps, but not when
+ /// picking methods. This so that if we have `foo: *const Foo` and `Foo` has methods
+ /// `fn by_raw_ptr(self: *const Self)` and `fn by_ref(&self)`, then
+ /// `foo.by_raw_ptr()` will work and `foo.by_ref()` won't.
+ pub from_unsafe_deref: bool,
+ pub unsize: bool,
+ /// We will generate CandidateSteps which are reachable via a chain
+ /// of following `Receiver`. The first 'n' of those will be reachable
+ /// by following a chain of 'Deref' instead (since there's a blanket
+ /// implementation of Receiver for Deref).
+ /// We use the entire set of steps when identifying method candidates
+ /// (e.g. identifying relevant `impl` blocks) but only those that are
+ /// reachable via Deref when examining what the receiver type can
+ /// be converted into by autodereffing.
+ pub reachable_via_deref: bool,
+}
+
+#[derive(Clone, Debug)]
+struct MethodAutoderefStepsResult<'db> {
+ /// The valid autoderef steps that could be found by following a chain
+ /// of `Receiver<Target=T>` or `Deref<Target=T>` trait implementations.
+ pub steps: SmallVec<[CandidateStep<'db>; 3]>,
+ /// If Some(T), a type autoderef reported an error on.
+ pub opt_bad_ty: Option<MethodAutoderefBadTy<'db>>,
+ /// If `true`, `steps` has been truncated due to reaching the
+ /// recursion limit.
+ pub reached_recursion_limit: bool,
+}
+
+#[derive(Debug, Clone)]
+struct MethodAutoderefBadTy<'db> {
+ pub reached_raw_pointer: bool,
+ pub ty: Canonical<'db, QueryResponse<'db, Ty<'db>>>,
+}
+
+impl<'a, 'db> MethodResolutionContext<'a, 'db> {
+ #[instrument(level = "debug", skip(self))]
+ pub fn probe_for_name(&self, mode: Mode, item_name: Name, self_ty: Ty<'db>) -> PickResult<'db> {
+ self.probe_op(mode, self_ty, ProbeForNameChoice { private_candidate: None, item_name })
+ }
+
+ #[instrument(level = "debug", skip(self))]
+ pub fn probe_all(
+ &self,
+ mode: Mode,
+ self_ty: Ty<'db>,
+ ) -> impl Iterator<Item = CandidateWithPrivate<'db>> {
+ self.probe_op(mode, self_ty, ProbeAllChoice::new()).candidates.into_inner().into_values()
+ }
+
+ fn probe_op<Choice: ProbeChoice<'db>>(
+ &self,
+ mode: Mode,
+ self_ty: Ty<'db>,
+ choice: Choice,
+ ) -> Choice::FinalChoice {
+ let mut orig_values = OriginalQueryValues::default();
+ let query_input = self.infcx.canonicalize_query(self_ty, &mut orig_values);
+ let steps = match mode {
+ Mode::MethodCall => self.method_autoderef_steps(&query_input),
+ Mode::Path => self.infcx.probe(|_| {
+ // Mode::Path - the deref steps is "trivial". This turns
+ // our CanonicalQuery into a "trivial" QueryResponse. This
+ // is a bit inefficient, but I don't think that writing
+ // special handling for this "trivial case" is a good idea.
+
+ let infcx = self.infcx;
+ let (self_ty, var_values) = infcx.instantiate_canonical(&query_input);
+ debug!(?self_ty, ?query_input, "probe_op: Mode::Path");
+ MethodAutoderefStepsResult {
+ steps: smallvec![CandidateStep {
+ self_ty: self
+ .infcx
+ .make_query_response_ignoring_pending_obligations(var_values, self_ty),
+ self_ty_is_opaque: false,
+ autoderefs: 0,
+ from_unsafe_deref: false,
+ unsize: false,
+ reachable_via_deref: true,
+ }],
+ opt_bad_ty: None,
+ reached_recursion_limit: false,
+ }
+ }),
+ };
+
+ if steps.reached_recursion_limit {
+ // FIXME: Report an error.
+ }
+
+ // If we encountered an `_` type or an error type during autoderef, this is
+ // ambiguous.
+ if let Some(bad_ty) = &steps.opt_bad_ty {
+ if bad_ty.reached_raw_pointer
+ && !self.unstable_features.arbitrary_self_types_pointers
+ && self.edition.at_least_2018()
+ {
+ // this case used to be allowed by the compiler,
+ // so we do a future-compat lint here for the 2015 edition
+ // (see https://github.com/rust-lang/rust/issues/46906)
+ // FIXME: Emit the lint.
+ // self.tcx.node_span_lint(
+ // lint::builtin::TYVAR_BEHIND_RAW_POINTER,
+ // scope_expr_id,
+ // span,
+ // |lint| {
+ // lint.primary_message("type annotations needed");
+ // },
+ // );
+ } else {
+ // Ended up encountering a type variable when doing autoderef,
+ // but it may not be a type variable after processing obligations
+ // in our local `FnCtxt`, so don't call `structurally_resolve_type`.
+ let ty = &bad_ty.ty;
+ let ty = self
+ .infcx
+ .instantiate_query_response_and_region_obligations(
+ &ObligationCause::new(),
+ self.env.env,
+ &orig_values,
+ ty,
+ )
+ .unwrap_or_else(|_| panic!("instantiating {:?} failed?", ty));
+ let ty = self.infcx.resolve_vars_if_possible(ty.value);
+ match ty.kind() {
+ TyKind::Infer(InferTy::TyVar(_)) => {
+ // FIXME: Report "type annotations needed" error.
+ }
+ TyKind::Error(_) => {}
+ _ => panic!("unexpected bad final type in method autoderef"),
+ };
+ return Choice::final_choice_from_err(MethodError::ErrorReported);
+ }
+ }
+
+ debug!("ProbeContext: steps for self_ty={:?} are {:?}", self_ty, steps);
+
+ // this creates one big transaction so that all type variables etc
+ // that we create during the probe process are removed later
+ self.infcx.probe(|_| {
+ let mut probe_cx = ProbeContext::new(self, mode, &orig_values, &steps.steps, choice);
+
+ probe_cx.assemble_inherent_candidates();
+ probe_cx.assemble_extension_candidates_for_traits_in_scope();
+ Choice::choose(probe_cx)
+ })
+ }
+
+ fn method_autoderef_steps(
+ &self,
+ self_ty: &Canonical<'db, Ty<'db>>,
+ ) -> MethodAutoderefStepsResult<'db> {
+ self.infcx.probe(|_| {
+ debug!("method_autoderef_steps({:?})", self_ty);
+
+ // We accept not-yet-defined opaque types in the autoderef
+ // chain to support recursive calls. We do error if the final
+ // infer var is not an opaque.
+ let infcx = self.infcx;
+ let (self_ty, inference_vars) = infcx.instantiate_canonical(self_ty);
+ let self_ty_is_opaque = |ty: Ty<'_>| {
+ if let TyKind::Infer(InferTy::TyVar(vid)) = ty.kind() {
+ infcx.has_opaques_with_sub_unified_hidden_type(vid)
+ } else {
+ false
+ }
+ };
+
+ // If arbitrary self types is not enabled, we follow the chain of
+ // `Deref<Target=T>`. If arbitrary self types is enabled, we instead
+ // follow the chain of `Receiver<Target=T>`, but we also record whether
+ // such types are reachable by following the (potentially shorter)
+ // chain of `Deref<Target=T>`. We will use the first list when finding
+ // potentially relevant function implementations (e.g. relevant impl blocks)
+ // but the second list when determining types that the receiver may be
+ // converted to, in order to find out which of those methods might actually
+ // be callable.
+ let mut autoderef_via_deref =
+ Autoderef::new(infcx, self.env, self_ty).include_raw_pointers();
+
+ let mut reached_raw_pointer = false;
+ let arbitrary_self_types_enabled = self.unstable_features.arbitrary_self_types
+ || self.unstable_features.arbitrary_self_types_pointers;
+ let (mut steps, reached_recursion_limit) = if arbitrary_self_types_enabled {
+ let reachable_via_deref =
+ autoderef_via_deref.by_ref().map(|_| true).chain(std::iter::repeat(false));
+
+ let mut autoderef_via_receiver = Autoderef::new(infcx, self.env, self_ty)
+ .include_raw_pointers()
+ .use_receiver_trait();
+ let steps = autoderef_via_receiver
+ .by_ref()
+ .zip(reachable_via_deref)
+ .map(|((ty, d), reachable_via_deref)| {
+ let step = CandidateStep {
+ self_ty: infcx.make_query_response_ignoring_pending_obligations(
+ inference_vars,
+ ty,
+ ),
+ self_ty_is_opaque: self_ty_is_opaque(ty),
+ autoderefs: d,
+ from_unsafe_deref: reached_raw_pointer,
+ unsize: false,
+ reachable_via_deref,
+ };
+ if ty.is_raw_ptr() {
+ // all the subsequent steps will be from_unsafe_deref
+ reached_raw_pointer = true;
+ }
+ step
+ })
+ .collect::<SmallVec<[_; _]>>();
+ (steps, autoderef_via_receiver.reached_recursion_limit())
+ } else {
+ let steps = autoderef_via_deref
+ .by_ref()
+ .map(|(ty, d)| {
+ let step = CandidateStep {
+ self_ty: infcx.make_query_response_ignoring_pending_obligations(
+ inference_vars,
+ ty,
+ ),
+ self_ty_is_opaque: self_ty_is_opaque(ty),
+ autoderefs: d,
+ from_unsafe_deref: reached_raw_pointer,
+ unsize: false,
+ reachable_via_deref: true,
+ };
+ if ty.is_raw_ptr() {
+ // all the subsequent steps will be from_unsafe_deref
+ reached_raw_pointer = true;
+ }
+ step
+ })
+ .collect();
+ (steps, autoderef_via_deref.reached_recursion_limit())
+ };
+ let final_ty = autoderef_via_deref.final_ty();
+ let opt_bad_ty = match final_ty.kind() {
+ TyKind::Infer(InferTy::TyVar(_)) if !self_ty_is_opaque(final_ty) => {
+ Some(MethodAutoderefBadTy {
+ reached_raw_pointer,
+ ty: infcx.make_query_response_ignoring_pending_obligations(
+ inference_vars,
+ final_ty,
+ ),
+ })
+ }
+ TyKind::Error(_) => Some(MethodAutoderefBadTy {
+ reached_raw_pointer,
+ ty: infcx
+ .make_query_response_ignoring_pending_obligations(inference_vars, final_ty),
+ }),
+ TyKind::Array(elem_ty, _) => {
+ let autoderefs = steps.iter().filter(|s| s.reachable_via_deref).count() - 1;
+ steps.push(CandidateStep {
+ self_ty: infcx.make_query_response_ignoring_pending_obligations(
+ inference_vars,
+ Ty::new_slice(infcx.interner, elem_ty),
+ ),
+ self_ty_is_opaque: false,
+ autoderefs,
+ // this could be from an unsafe deref if we had
+ // a *mut/const [T; N]
+ from_unsafe_deref: reached_raw_pointer,
+ unsize: true,
+ reachable_via_deref: true, // this is always the final type from
+ // autoderef_via_deref
+ });
+
+ None
+ }
+ _ => None,
+ };
+
+ debug!("method_autoderef_steps: steps={:?} opt_bad_ty={:?}", steps, opt_bad_ty);
+ MethodAutoderefStepsResult { steps, opt_bad_ty, reached_recursion_limit }
+ })
+ }
+}
+
+trait ProbeChoice<'db>: Sized {
+ type Choice;
+ type FinalChoice;
+
+ /// Finds the method with the appropriate name (or return type, as the case may be).
+ // The length of the returned iterator is nearly always 0 or 1 and this
+ // method is fairly hot.
+ fn with_impl_or_trait_item<'a>(
+ this: &mut ProbeContext<'a, 'db, Self>,
+ items: &[(Name, AssocItemId)],
+ callback: impl FnMut(&mut ProbeContext<'a, 'db, Self>, CandidateId),
+ );
+
+ fn consider_candidates(
+ this: &ProbeContext<'_, 'db, Self>,
+ self_ty: Ty<'db>,
+ candidates: Vec<&Candidate<'db>>,
+ ) -> ControlFlow<Self::Choice>;
+
+ fn consider_private_candidates(
+ this: &mut ProbeContext<'_, 'db, Self>,
+ self_ty: Ty<'db>,
+ instantiate_self_ty_obligations: &[PredicateObligation<'db>],
+ );
+
+ fn map_choice_pick(
+ choice: Self::Choice,
+ f: impl FnOnce(Pick<'db>) -> Pick<'db>,
+ ) -> Self::Choice;
+
+ fn check_by_value_method_shadowing(
+ this: &mut ProbeContext<'_, 'db, Self>,
+ by_value_pick: &Self::Choice,
+ step: &CandidateStep<'db>,
+ self_ty: Ty<'db>,
+ instantiate_self_ty_obligations: &[PredicateObligation<'db>],
+ ) -> ControlFlow<Self::Choice>;
+
+ fn check_autorefed_method_shadowing(
+ this: &mut ProbeContext<'_, 'db, Self>,
+ autoref_pick: &Self::Choice,
+ step: &CandidateStep<'db>,
+ self_ty: Ty<'db>,
+ instantiate_self_ty_obligations: &[PredicateObligation<'db>],
+ ) -> ControlFlow<Self::Choice>;
+
+ fn final_choice_from_err(err: MethodError<'db>) -> Self::FinalChoice;
+
+ fn choose(this: ProbeContext<'_, 'db, Self>) -> Self::FinalChoice;
+}
+
+#[derive(Debug)]
+struct ProbeForNameChoice<'db> {
+ item_name: Name,
+
+ /// Some(candidate) if there is a private candidate
+ private_candidate: Option<Pick<'db>>,
+}
+
+impl<'db> ProbeChoice<'db> for ProbeForNameChoice<'db> {
+ type Choice = PickResult<'db>;
+ type FinalChoice = PickResult<'db>;
+
+ fn with_impl_or_trait_item<'a>(
+ this: &mut ProbeContext<'a, 'db, Self>,
+ items: &[(Name, AssocItemId)],
+ mut callback: impl FnMut(&mut ProbeContext<'a, 'db, Self>, CandidateId),
+ ) {
+ let item = items
+ .iter()
+ .filter_map(|(name, id)| {
+ let id = match *id {
+ AssocItemId::FunctionId(id) => id.into(),
+ AssocItemId::ConstId(id) => id.into(),
+ AssocItemId::TypeAliasId(_) => return None,
+ };
+ Some((name, id))
+ })
+ .find(|(name, _)| **name == this.choice.item_name)
+ .map(|(_, id)| id)
+ .filter(|id| this.mode == Mode::Path || matches!(id, CandidateId::FunctionId(_)));
+ if let Some(item) = item {
+ callback(this, item);
+ }
+ }
+
+ fn consider_candidates(
+ this: &ProbeContext<'_, 'db, Self>,
+ self_ty: Ty<'db>,
+ mut applicable_candidates: Vec<&Candidate<'db>>,
+ ) -> ControlFlow<Self::Choice> {
+ if applicable_candidates.len() > 1
+ && let Some(pick) =
+ this.collapse_candidates_to_trait_pick(self_ty, &applicable_candidates)
+ {
+ return ControlFlow::Break(Ok(pick));
+ }
+
+ if applicable_candidates.len() > 1 {
+ // We collapse to a subtrait pick *after* filtering unstable candidates
+ // to make sure we don't prefer a unstable subtrait method over a stable
+ // supertrait method.
+ if this.ctx.unstable_features.supertrait_item_shadowing
+ && let Some(pick) =
+ this.collapse_candidates_to_subtrait_pick(self_ty, &applicable_candidates)
+ {
+ return ControlFlow::Break(Ok(pick));
+ }
+
+ let sources =
+ applicable_candidates.iter().map(|p| this.candidate_source(p, self_ty)).collect();
+ return ControlFlow::Break(Err(MethodError::Ambiguity(sources)));
+ }
+
+ match applicable_candidates.pop() {
+ Some(probe) => ControlFlow::Break(Ok(probe.to_unadjusted_pick(self_ty))),
+ None => ControlFlow::Continue(()),
+ }
+ }
+
+ fn consider_private_candidates(
+ this: &mut ProbeContext<'_, 'db, Self>,
+ self_ty: Ty<'db>,
+ instantiate_self_ty_obligations: &[PredicateObligation<'db>],
+ ) {
+ if this.choice.private_candidate.is_none()
+ && let ControlFlow::Break(Ok(pick)) = this.consider_candidates(
+ self_ty,
+ instantiate_self_ty_obligations,
+ &this.private_candidates,
+ None,
+ )
+ {
+ this.choice.private_candidate = Some(pick);
+ }
+ }
+
+ fn map_choice_pick(
+ choice: Self::Choice,
+ f: impl FnOnce(Pick<'db>) -> Pick<'db>,
+ ) -> Self::Choice {
+ choice.map(f)
+ }
+
+ fn check_by_value_method_shadowing(
+ this: &mut ProbeContext<'_, 'db, Self>,
+ by_value_pick: &Self::Choice,
+ step: &CandidateStep<'db>,
+ self_ty: Ty<'db>,
+ instantiate_self_ty_obligations: &[PredicateObligation<'db>],
+ ) -> ControlFlow<Self::Choice> {
+ if let Ok(by_value_pick) = by_value_pick
+ && matches!(by_value_pick.kind, PickKind::InherentImplPick(_))
+ {
+ for mutbl in [Mutability::Not, Mutability::Mut] {
+ if let Err(e) = this.check_for_shadowed_autorefd_method(
+ by_value_pick,
+ step,
+ self_ty,
+ instantiate_self_ty_obligations,
+ mutbl,
+ ) {
+ return ControlFlow::Break(Err(e));
+ }
+ }
+ }
+ ControlFlow::Continue(())
+ }
+
+ fn check_autorefed_method_shadowing(
+ this: &mut ProbeContext<'_, 'db, Self>,
+ autoref_pick: &Self::Choice,
+ step: &CandidateStep<'db>,
+ self_ty: Ty<'db>,
+ instantiate_self_ty_obligations: &[PredicateObligation<'db>],
+ ) -> ControlFlow<Self::Choice> {
+ if let Ok(autoref_pick) = autoref_pick.as_ref() {
+ // Check we're not shadowing others
+ if matches!(autoref_pick.kind, PickKind::InherentImplPick(_))
+ && let Err(e) = this.check_for_shadowed_autorefd_method(
+ autoref_pick,
+ step,
+ self_ty,
+ instantiate_self_ty_obligations,
+ Mutability::Mut,
+ )
+ {
+ return ControlFlow::Break(Err(e));
+ }
+ }
+ ControlFlow::Continue(())
+ }
+
+ fn final_choice_from_err(err: MethodError<'db>) -> Self::FinalChoice {
+ Err(err)
+ }
+
+ fn choose(this: ProbeContext<'_, 'db, Self>) -> Self::FinalChoice {
+ this.pick()
+ }
+}
+
+#[derive(Debug)]
+struct ProbeAllChoice<'db> {
+ candidates: RefCell<FxHashMap<CandidateId, CandidateWithPrivate<'db>>>,
+ considering_visible_candidates: bool,
+}
+
+impl ProbeAllChoice<'_> {
+ fn new() -> Self {
+ Self { candidates: RefCell::default(), considering_visible_candidates: true }
+ }
+}
+
+impl<'db> ProbeChoice<'db> for ProbeAllChoice<'db> {
+ type Choice = Infallible;
+ type FinalChoice = Self;
+
+ fn with_impl_or_trait_item<'a>(
+ this: &mut ProbeContext<'a, 'db, Self>,
+ items: &[(Name, AssocItemId)],
+ mut callback: impl FnMut(&mut ProbeContext<'a, 'db, Self>, CandidateId),
+ ) {
+ let mode = this.mode;
+ items
+ .iter()
+ .filter_map(|(_, id)| is_relevant_kind_for_mode(mode, *id))
+ .for_each(|id| callback(this, id));
+ }
+
+ fn consider_candidates(
+ this: &ProbeContext<'_, 'db, Self>,
+ _self_ty: Ty<'db>,
+ candidates: Vec<&Candidate<'db>>,
+ ) -> ControlFlow<Self::Choice> {
+ let is_visible = this.choice.considering_visible_candidates;
+ let mut all_candidates = this.choice.candidates.borrow_mut();
+ for candidate in candidates {
+ // We should not override existing entries, because inherent methods of trait objects (from the principal)
+ // are also visited as trait methods, and we want to consider them inherent.
+ all_candidates
+ .entry(candidate.item)
+ .or_insert(CandidateWithPrivate { candidate: candidate.clone(), is_visible });
+ }
+ ControlFlow::Continue(())
+ }
+
+ fn consider_private_candidates(
+ this: &mut ProbeContext<'_, 'db, Self>,
+ self_ty: Ty<'db>,
+ instantiate_self_ty_obligations: &[PredicateObligation<'db>],
+ ) {
+ this.choice.considering_visible_candidates = false;
+ let ControlFlow::Continue(()) = this.consider_candidates(
+ self_ty,
+ instantiate_self_ty_obligations,
+ &this.private_candidates,
+ None,
+ );
+ this.choice.considering_visible_candidates = true;
+ }
+
+ fn map_choice_pick(
+ choice: Self::Choice,
+ _f: impl FnOnce(Pick<'db>) -> Pick<'db>,
+ ) -> Self::Choice {
+ choice
+ }
+
+ fn check_by_value_method_shadowing(
+ _this: &mut ProbeContext<'_, 'db, Self>,
+ _by_value_pick: &Self::Choice,
+ _step: &CandidateStep<'db>,
+ _self_ty: Ty<'db>,
+ _instantiate_self_ty_obligations: &[PredicateObligation<'db>],
+ ) -> ControlFlow<Self::Choice> {
+ ControlFlow::Continue(())
+ }
+
+ fn check_autorefed_method_shadowing(
+ _this: &mut ProbeContext<'_, 'db, Self>,
+ _autoref_pick: &Self::Choice,
+ _step: &CandidateStep<'db>,
+ _self_ty: Ty<'db>,
+ _instantiate_self_ty_obligations: &[PredicateObligation<'db>],
+ ) -> ControlFlow<Self::Choice> {
+ ControlFlow::Continue(())
+ }
+
+ fn final_choice_from_err(_err: MethodError<'db>) -> Self::FinalChoice {
+ Self::new()
+ }
+
+ fn choose(mut this: ProbeContext<'_, 'db, Self>) -> Self::FinalChoice {
+ let ControlFlow::Continue(()) = this.pick_all_method();
+ this.choice
+ }
+}
+
+impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> {
+ fn new(
+ ctx: &'a MethodResolutionContext<'a, 'db>,
+ mode: Mode,
+ orig_steps_var_values: &'a OriginalQueryValues<'db>,
+ steps: &'a [CandidateStep<'db>],
+ choice: Choice,
+ ) -> ProbeContext<'a, 'db, Choice> {
+ ProbeContext {
+ ctx,
+ mode,
+ inherent_candidates: Vec::new(),
+ extension_candidates: Vec::new(),
+ impl_dups: FxHashSet::default(),
+ orig_steps_var_values,
+ steps,
+ private_candidates: Vec::new(),
+ static_candidates: Vec::new(),
+ choice,
+ }
+ }
+
+ #[inline]
+ fn db(&self) -> &'db dyn HirDatabase {
+ self.ctx.infcx.interner.db
+ }
+
+ #[inline]
+ fn interner(&self) -> DbInterner<'db> {
+ self.ctx.infcx.interner
+ }
+
+ #[inline]
+ fn infcx(&self) -> &'a InferCtxt<'db> {
+ self.ctx.infcx
+ }
+
+ #[inline]
+ fn param_env(&self) -> ParamEnv<'db> {
+ self.ctx.env.env
+ }
+
+ /// When we're looking up a method by path (UFCS), we relate the receiver
+ /// types invariantly. When we are looking up a method by the `.` operator,
+ /// we relate them covariantly.
+ fn variance(&self) -> Variance {
+ match self.mode {
+ Mode::MethodCall => Variance::Covariant,
+ Mode::Path => Variance::Invariant,
+ }
+ }
+
+ ///////////////////////////////////////////////////////////////////////////
+ // CANDIDATE ASSEMBLY
+
+ fn push_candidate(&mut self, candidate: Candidate<'db>, is_inherent: bool) {
+ let is_accessible = if is_inherent {
+ let candidate_id = match candidate.item {
+ CandidateId::FunctionId(id) => id.into(),
+ CandidateId::ConstId(id) => id.into(),
+ };
+ let visibility = self.db().assoc_visibility(candidate_id);
+ self.ctx.resolver.is_visible(self.db(), visibility)
+ } else {
+ true
+ };
+ if is_accessible {
+ if is_inherent {
+ self.inherent_candidates.push(candidate);
+ } else {
+ self.extension_candidates.push(candidate);
+ }
+ } else {
+ self.private_candidates.push(candidate);
+ }
+ }
+
+ fn assemble_inherent_candidates(&mut self) {
+ for step in self.steps.iter() {
+ self.assemble_probe(&step.self_ty, step.autoderefs);
+ }
+ }
+
+ #[instrument(level = "debug", skip(self))]
+ fn assemble_probe(
+ &mut self,
+ self_ty: &Canonical<'db, QueryResponse<'db, Ty<'db>>>,
+ receiver_steps: usize,
+ ) {
+ let raw_self_ty = self_ty.value.value;
+ match raw_self_ty.kind() {
+ TyKind::Dynamic(data, ..) => {
+ if let Some(p) = data.principal() {
+ // Subtle: we can't use `instantiate_query_response` here: using it will
+ // commit to all of the type equalities assumed by inference going through
+ // autoderef (see the `method-probe-no-guessing` test).
+ //
+ // However, in this code, it is OK if we end up with an object type that is
+ // "more general" than the object type that we are evaluating. For *every*
+ // object type `MY_OBJECT`, a function call that goes through a trait-ref
+ // of the form `<MY_OBJECT as SuperTraitOf(MY_OBJECT)>::func` is a valid
+ // `ObjectCandidate`, and it should be discoverable "exactly" through one
+ // of the iterations in the autoderef loop, so there is no problem with it
+ // being discoverable in another one of these iterations.
+ //
+ // Using `instantiate_canonical` on our
+ // `Canonical<QueryResponse<Ty<'db>>>` and then *throwing away* the
+ // `CanonicalVarValues` will exactly give us such a generalization - it
+ // will still match the original object type, but it won't pollute our
+ // type variables in any form, so just do that!
+ let (QueryResponse { value: generalized_self_ty, .. }, _ignored_var_values) =
+ self.infcx().instantiate_canonical(self_ty);
+
+ self.assemble_inherent_candidates_from_object(generalized_self_ty);
+ self.assemble_inherent_impl_candidates_for_type(
+ &SimplifiedType::Trait(p.def_id().0.into()),
+ receiver_steps,
+ );
+ self.assemble_inherent_candidates_for_incoherent_ty(
+ raw_self_ty,
+ receiver_steps,
+ );
+ }
+ }
+ TyKind::Adt(def, _) => {
+ let def_id = def.def_id().0;
+ self.assemble_inherent_impl_candidates_for_type(
+ &SimplifiedType::Adt(def_id.into()),
+ receiver_steps,
+ );
+ self.assemble_inherent_candidates_for_incoherent_ty(raw_self_ty, receiver_steps);
+ }
+ TyKind::Foreign(did) => {
+ self.assemble_inherent_impl_candidates_for_type(
+ &SimplifiedType::Foreign(did.0.into()),
+ receiver_steps,
+ );
+ self.assemble_inherent_candidates_for_incoherent_ty(raw_self_ty, receiver_steps);
+ }
+ TyKind::Param(_) => {
+ self.assemble_inherent_candidates_from_param(raw_self_ty);
+ }
+ TyKind::Bool
+ | TyKind::Char
+ | TyKind::Int(_)
+ | TyKind::Uint(_)
+ | TyKind::Float(_)
+ | TyKind::Str
+ | TyKind::Array(..)
+ | TyKind::Slice(_)
+ | TyKind::RawPtr(_, _)
+ | TyKind::Ref(..)
+ | TyKind::Never
+ | TyKind::Tuple(..) => {
+ self.assemble_inherent_candidates_for_incoherent_ty(raw_self_ty, receiver_steps)
+ }
+ _ => {}
+ }
+ }
+
+ fn assemble_inherent_candidates_for_incoherent_ty(
+ &mut self,
+ self_ty: Ty<'db>,
+ receiver_steps: usize,
+ ) {
+ let Some(simp) = simplify_type(self.interner(), self_ty, TreatParams::InstantiateWithInfer)
+ else {
+ panic!("unexpected incoherent type: {:?}", self_ty)
+ };
+ for &impl_def_id in incoherent_inherent_impls(self.db(), simp) {
+ self.assemble_inherent_impl_probe(impl_def_id, receiver_steps);
+ }
+ }
+
+ fn assemble_inherent_impl_candidates_for_type(
+ &mut self,
+ self_ty: &SimplifiedType,
+ receiver_steps: usize,
+ ) {
+ let Some(module) = simplified_type_module(self.db(), self_ty) else {
+ return;
+ };
+ InherentImpls::for_each_crate_and_block(
+ self.db(),
+ module.krate(),
+ module.containing_block(),
+ &mut |impls| {
+ for &impl_def_id in impls.for_self_ty(self_ty) {
+ self.assemble_inherent_impl_probe(impl_def_id, receiver_steps);
+ }
+ },
+ );
+ }
+
+ #[instrument(level = "debug", skip(self))]
+ fn assemble_inherent_impl_probe(&mut self, impl_def_id: ImplId, receiver_steps: usize) {
+ if !self.impl_dups.insert(impl_def_id) {
+ return; // already visited
+ }
+
+ self.with_impl_item(impl_def_id, |this, item| {
+ if !this.has_applicable_self(item) {
+ // No receiver declared. Not a candidate.
+ this.record_static_candidate(CandidateSource::Impl(impl_def_id));
+ return;
+ }
+ this.push_candidate(
+ Candidate { item, kind: InherentImplCandidate { impl_def_id, receiver_steps } },
+ true,
+ );
+ });
+ }
+
+ #[instrument(level = "debug", skip(self))]
+ fn assemble_inherent_candidates_from_object(&mut self, self_ty: Ty<'db>) {
+ let principal = match self_ty.kind() {
+ TyKind::Dynamic(data, ..) => Some(data),
+ _ => None,
+ }
+ .and_then(|data| data.principal())
+ .unwrap_or_else(|| {
+ panic!("non-object {:?} in assemble_inherent_candidates_from_object", self_ty)
+ });
+
+ // It is illegal to invoke a method on a trait instance that refers to
+ // the `Self` type. An [`DynCompatibilityViolation::SupertraitSelf`] error
+ // will be reported by `dyn_compatibility.rs` if the method refers to the
+ // `Self` type anywhere other than the receiver. Here, we use a
+ // instantiation that replaces `Self` with the object type itself. Hence,
+ // a `&self` method will wind up with an argument type like `&dyn Trait`.
+ let trait_ref = principal.with_self_ty(self.interner(), self_ty);
+ self.assemble_candidates_for_bounds(
+ elaborate::supertraits(self.interner(), trait_ref),
+ |this, new_trait_ref, item| {
+ this.push_candidate(Candidate { item, kind: ObjectCandidate(new_trait_ref) }, true);
+ },
+ );
+ }
+
+ #[instrument(level = "debug", skip(self))]
+ fn assemble_inherent_candidates_from_param(&mut self, param_ty: Ty<'db>) {
+ debug_assert!(matches!(param_ty.kind(), TyKind::Param(_)));
+
+ let interner = self.interner();
+
+ // We use `DeepRejectCtxt` here which may return false positive on where clauses
+ // with alias self types. We need to later on reject these as inherent candidates
+ // in `consider_probe`.
+ let bounds = self.param_env().clauses.iter().filter_map(|predicate| {
+ let bound_predicate = predicate.kind();
+ match bound_predicate.skip_binder() {
+ ClauseKind::Trait(trait_predicate) => DeepRejectCtxt::relate_rigid_rigid(interner)
+ .types_may_unify(param_ty, trait_predicate.trait_ref.self_ty())
+ .then(|| bound_predicate.rebind(trait_predicate.trait_ref)),
+ ClauseKind::RegionOutlives(_)
+ | ClauseKind::TypeOutlives(_)
+ | ClauseKind::Projection(_)
+ | ClauseKind::ConstArgHasType(_, _)
+ | ClauseKind::WellFormed(_)
+ | ClauseKind::ConstEvaluatable(_)
+ | ClauseKind::UnstableFeature(_)
+ | ClauseKind::HostEffect(..) => None,
+ }
+ });
+
+ self.assemble_candidates_for_bounds(bounds, |this, poly_trait_ref, item| {
+ this.push_candidate(
+ Candidate { item, kind: WhereClauseCandidate(poly_trait_ref) },
+ true,
+ );
+ });
+ }
+
+ // Do a search through a list of bounds, using a callback to actually
+ // create the candidates.
+ fn assemble_candidates_for_bounds<F>(
+ &mut self,
+ bounds: impl Iterator<Item = PolyTraitRef<'db>>,
+ mut mk_cand: F,
+ ) where
+ F: for<'b> FnMut(&mut ProbeContext<'b, 'db, Choice>, PolyTraitRef<'db>, CandidateId),
+ {
+ for bound_trait_ref in bounds {
+ debug!("elaborate_bounds(bound_trait_ref={:?})", bound_trait_ref);
+ self.with_trait_item(bound_trait_ref.def_id().0, |this, item| {
+ if !this.has_applicable_self(item) {
+ this.record_static_candidate(CandidateSource::Trait(
+ bound_trait_ref.def_id().0,
+ ));
+ } else {
+ mk_cand(this, bound_trait_ref, item);
+ }
+ });
+ }
+ }
+
+ #[instrument(level = "debug", skip(self))]
+ fn assemble_extension_candidates_for_traits_in_scope(&mut self) {
+ for &trait_did in self.ctx.traits_in_scope {
+ self.assemble_extension_candidates_for_trait(trait_did);
+ }
+ }
+
+ #[instrument(level = "debug", skip(self))]
+ fn assemble_extension_candidates_for_trait(&mut self, trait_def_id: TraitId) {
+ let trait_args = self.infcx().fresh_args_for_item(trait_def_id.into());
+ let trait_ref = TraitRef::new_from_args(self.interner(), trait_def_id.into(), trait_args);
+
+ self.with_trait_item(trait_def_id, |this, item| {
+ // Check whether `trait_def_id` defines a method with suitable name.
+ if !this.has_applicable_self(item) {
+ debug!("method has inapplicable self");
+ this.record_static_candidate(CandidateSource::Trait(trait_def_id));
+ return;
+ }
+ this.push_candidate(
+ Candidate { item, kind: TraitCandidate(Binder::dummy(trait_ref)) },
+ false,
+ );
+ });
+ }
+}
+
+///////////////////////////////////////////////////////////////////////////
+// THE ACTUAL SEARCH
+impl<'a, 'db> ProbeContext<'a, 'db, ProbeForNameChoice<'db>> {
+ #[instrument(level = "debug", skip(self))]
+ fn pick(mut self) -> PickResult<'db> {
+ if let Some(r) = self.pick_core() {
+ return r;
+ }
+
+ debug!("pick: actual search failed, assemble diagnostics");
+
+ if let Some(candidate) = self.choice.private_candidate {
+ return Err(MethodError::PrivateMatch(candidate));
+ }
+
+ Err(MethodError::NoMatch)
+ }
+
+ fn pick_core(&mut self) -> Option<PickResult<'db>> {
+ self.pick_all_method().break_value()
+ }
+
+ /// Check for cases where arbitrary self types allows shadowing
+ /// of methods that might be a compatibility break. Specifically,
+ /// we have something like:
+ /// ```ignore (illustrative)
+ /// struct A;
+ /// impl A {
+ /// fn foo(self: &NonNull<A>) {}
+ /// // note this is by reference
+ /// }
+ /// ```
+ /// then we've come along and added this method to `NonNull`:
+ /// ```ignore (illustrative)
+ /// fn foo(self) // note this is by value
+ /// ```
+ /// Report an error in this case.
+ fn check_for_shadowed_autorefd_method(
+ &mut self,
+ possible_shadower: &Pick<'db>,
+ step: &CandidateStep<'db>,
+ self_ty: Ty<'db>,
+ instantiate_self_ty_obligations: &[PredicateObligation<'db>],
+ mutbl: Mutability,
+ ) -> Result<(), MethodError<'db>> {
+ // The errors emitted by this function are part of
+ // the arbitrary self types work, and should not impact
+ // other users.
+ if !self.ctx.unstable_features.arbitrary_self_types
+ && !self.ctx.unstable_features.arbitrary_self_types_pointers
+ {
+ return Ok(());
+ }
+
+ // Set criteria for how we find methods possibly shadowed by 'possible_shadower'
+ let pick_constraints = PickConstraintsForShadowed {
+ // It's the same `self` type...
+ autoderefs: possible_shadower.autoderefs,
+ // ... but the method was found in an impl block determined
+ // by searching further along the Receiver chain than the other,
+ // showing that it's a smart pointer type causing the problem...
+ receiver_steps: possible_shadower.receiver_steps,
+ // ... and they don't end up pointing to the same item in the
+ // first place (could happen with things like blanket impls for T)
+ def_id: possible_shadower.item,
+ };
+ // A note on the autoderefs above. Within pick_by_value_method, an extra
+ // autoderef may be applied in order to reborrow a reference with
+ // a different lifetime. That seems as though it would break the
+ // logic of these constraints, since the number of autoderefs could
+ // no longer be used to identify the fundamental type of the receiver.
+ // However, this extra autoderef is applied only to by-value calls
+ // where the receiver is already a reference. So this situation would
+ // only occur in cases where the shadowing looks like this:
+ // ```
+ // struct A;
+ // impl A {
+ // fn foo(self: &&NonNull<A>) {}
+ // // note this is by DOUBLE reference
+ // }
+ // ```
+ // then we've come along and added this method to `NonNull`:
+ // ```
+ // fn foo(&self) // note this is by single reference
+ // ```
+ // and the call is:
+ // ```
+ // let bar = NonNull<Foo>;
+ // let bar = &foo;
+ // bar.foo();
+ // ```
+ // In these circumstances, the logic is wrong, and we wouldn't spot
+ // the shadowing, because the autoderef-based maths wouldn't line up.
+ // This is a niche case and we can live without generating an error
+ // in the case of such shadowing.
+ let potentially_shadowed_pick = self.pick_autorefd_method(
+ step,
+ self_ty,
+ instantiate_self_ty_obligations,
+ mutbl,
+ Some(&pick_constraints),
+ );
+ // Look for actual pairs of shadower/shadowed which are
+ // the sort of shadowing case we want to avoid. Specifically...
+ if let ControlFlow::Break(Ok(possible_shadowed)) = &potentially_shadowed_pick {
+ let sources = [possible_shadower, possible_shadowed]
+ .into_iter()
+ .map(|p| self.candidate_source_from_pick(p))
+ .collect();
+ return Err(MethodError::Ambiguity(sources));
+ }
+ Ok(())
+ }
+}
+
+impl<'a, 'db, Choice: ProbeChoice<'db>> ProbeContext<'a, 'db, Choice> {
+ fn pick_all_method(&mut self) -> ControlFlow<Choice::Choice> {
+ self.steps
+ .iter()
+ // At this point we're considering the types to which the receiver can be converted,
+ // so we want to follow the `Deref` chain not the `Receiver` chain. Filter out
+ // steps which can only be reached by following the (longer) `Receiver` chain.
+ .filter(|step| step.reachable_via_deref)
+ .filter(|step| {
+ debug!("pick_all_method: step={:?}", step);
+ // skip types that are from a type error or that would require dereferencing
+ // a raw pointer
+ !step.self_ty.value.value.references_non_lt_error() && !step.from_unsafe_deref
+ })
+ .try_for_each(|step| {
+ let InferOk { value: self_ty, obligations: instantiate_self_ty_obligations } = self
+ .infcx()
+ .instantiate_query_response_and_region_obligations(
+ &ObligationCause::new(),
+ self.param_env(),
+ self.orig_steps_var_values,
+ &step.self_ty,
+ )
+ .unwrap_or_else(|_| panic!("{:?} was applicable but now isn't?", step.self_ty));
+
+ let by_value_pick =
+ self.pick_by_value_method(step, self_ty, &instantiate_self_ty_obligations);
+
+ // Check for shadowing of a by-reference method by a by-value method (see comments on check_for_shadowing)
+ if let ControlFlow::Break(by_value_pick) = by_value_pick {
+ Choice::check_by_value_method_shadowing(
+ self,
+ &by_value_pick,
+ step,
+ self_ty,
+ &instantiate_self_ty_obligations,
+ )?;
+ return ControlFlow::Break(by_value_pick);
+ }
+
+ let autoref_pick = self.pick_autorefd_method(
+ step,
+ self_ty,
+ &instantiate_self_ty_obligations,
+ Mutability::Not,
+ None,
+ );
+ // Check for shadowing of a by-mut-ref method by a by-reference method (see comments on check_for_shadowing)
+ if let ControlFlow::Break(autoref_pick) = autoref_pick {
+ Choice::check_autorefed_method_shadowing(
+ self,
+ &autoref_pick,
+ step,
+ self_ty,
+ &instantiate_self_ty_obligations,
+ )?;
+ return ControlFlow::Break(autoref_pick);
+ }
+
+ // Note that no shadowing errors are produced from here on,
+ // as we consider const ptr methods.
+ // We allow new methods that take *mut T to shadow
+ // methods which took *const T, so there is no entry in
+ // this list for the results of `pick_const_ptr_method`.
+ // The reason is that the standard pointer cast method
+ // (on a mutable pointer) always already shadows the
+ // cast method (on a const pointer). So, if we added
+ // `pick_const_ptr_method` to this method, the anti-
+ // shadowing algorithm would always complain about
+ // the conflict between *const::cast and *mut::cast.
+ // In practice therefore this does constrain us:
+ // we cannot add new
+ // self: *mut Self
+ // methods to types such as NonNull or anything else
+ // which implements Receiver, because this might in future
+ // shadow existing methods taking
+ // self: *const NonNull<Self>
+ // in the pointee. In practice, methods taking raw pointers
+ // are rare, and it seems that it should be easily possible
+ // to avoid such compatibility breaks.
+ // We also don't check for reborrowed pin methods which
+ // may be shadowed; these also seem unlikely to occur.
+ self.pick_autorefd_method(
+ step,
+ self_ty,
+ &instantiate_self_ty_obligations,
+ Mutability::Mut,
+ None,
+ )?;
+ self.pick_const_ptr_method(step, self_ty, &instantiate_self_ty_obligations)
+ })
+ }
+
+ /// For each type `T` in the step list, this attempts to find a method where
+ /// the (transformed) self type is exactly `T`. We do however do one
+ /// transformation on the adjustment: if we are passing a region pointer in,
+ /// we will potentially *reborrow* it to a shorter lifetime. This allows us
+ /// to transparently pass `&mut` pointers, in particular, without consuming
+ /// them for their entire lifetime.
+ fn pick_by_value_method(
+ &mut self,
+ step: &CandidateStep<'db>,
+ self_ty: Ty<'db>,
+ instantiate_self_ty_obligations: &[PredicateObligation<'db>],
+ ) -> ControlFlow<Choice::Choice> {
+ if step.unsize {
+ return ControlFlow::Continue(());
+ }
+
+ self.pick_method(self_ty, instantiate_self_ty_obligations, None).map_break(|r| {
+ Choice::map_choice_pick(r, |mut pick| {
+ pick.autoderefs = step.autoderefs;
+
+ match step.self_ty.value.value.kind() {
+ // Insert a `&*` or `&mut *` if this is a reference type:
+ TyKind::Ref(_, _, mutbl) => {
+ pick.autoderefs += 1;
+ pick.autoref_or_ptr_adjustment = Some(AutorefOrPtrAdjustment::Autoref {
+ mutbl,
+ unsize: pick.autoref_or_ptr_adjustment.is_some_and(|a| a.get_unsize()),
+ })
+ }
+
+ _ => (),
+ }
+
+ pick
+ })
+ })
+ }
+
+ fn pick_autorefd_method(
+ &mut self,
+ step: &CandidateStep<'db>,
+ self_ty: Ty<'db>,
+ instantiate_self_ty_obligations: &[PredicateObligation<'db>],
+ mutbl: Mutability,
+ pick_constraints: Option<&PickConstraintsForShadowed>,
+ ) -> ControlFlow<Choice::Choice> {
+ let interner = self.interner();
+
+ if let Some(pick_constraints) = pick_constraints
+ && !pick_constraints.may_shadow_based_on_autoderefs(step.autoderefs)
+ {
+ return ControlFlow::Continue(());
+ }
+
+ // In general, during probing we erase regions.
+ let region = Region::new_erased(interner);
+
+ let autoref_ty = Ty::new_ref(interner, region, self_ty, mutbl);
+ self.pick_method(autoref_ty, instantiate_self_ty_obligations, pick_constraints).map_break(
+ |r| {
+ Choice::map_choice_pick(r, |mut pick| {
+ pick.autoderefs = step.autoderefs;
+ pick.autoref_or_ptr_adjustment =
+ Some(AutorefOrPtrAdjustment::Autoref { mutbl, unsize: step.unsize });
+ pick
+ })
+ },
+ )
+ }
+
+ /// If `self_ty` is `*mut T` then this picks `*const T` methods. The reason why we have a
+ /// special case for this is because going from `*mut T` to `*const T` with autoderefs and
+ /// autorefs would require dereferencing the pointer, which is not safe.
+ fn pick_const_ptr_method(
+ &mut self,
+ step: &CandidateStep<'db>,
+ self_ty: Ty<'db>,
+ instantiate_self_ty_obligations: &[PredicateObligation<'db>],
+ ) -> ControlFlow<Choice::Choice> {
+ // Don't convert an unsized reference to ptr
+ if step.unsize {
+ return ControlFlow::Continue(());
+ }
+
+ let TyKind::RawPtr(ty, Mutability::Mut) = self_ty.kind() else {
+ return ControlFlow::Continue(());
+ };
+
+ let const_ptr_ty = Ty::new_ptr(self.interner(), ty, Mutability::Not);
+ self.pick_method(const_ptr_ty, instantiate_self_ty_obligations, None).map_break(|r| {
+ Choice::map_choice_pick(r, |mut pick| {
+ pick.autoderefs = step.autoderefs;
+ pick.autoref_or_ptr_adjustment = Some(AutorefOrPtrAdjustment::ToConstPtr);
+ pick
+ })
+ })
+ }
+
+ fn pick_method(
+ &mut self,
+ self_ty: Ty<'db>,
+ instantiate_self_ty_obligations: &[PredicateObligation<'db>],
+ pick_constraints: Option<&PickConstraintsForShadowed>,
+ ) -> ControlFlow<Choice::Choice> {
+ debug!("pick_method(self_ty={:?})", self_ty);
+
+ for (kind, candidates) in
+ [("inherent", &self.inherent_candidates), ("extension", &self.extension_candidates)]
+ {
+ debug!("searching {} candidates", kind);
+ self.consider_candidates(
+ self_ty,
+ instantiate_self_ty_obligations,
+ candidates,
+ pick_constraints,
+ )?;
+ }
+
+ Choice::consider_private_candidates(self, self_ty, instantiate_self_ty_obligations);
+
+ ControlFlow::Continue(())
+ }
+
+ fn consider_candidates(
+ &self,
+ self_ty: Ty<'db>,
+ instantiate_self_ty_obligations: &[PredicateObligation<'db>],
+ candidates: &[Candidate<'db>],
+ pick_constraints: Option<&PickConstraintsForShadowed>,
+ ) -> ControlFlow<Choice::Choice> {
+ let applicable_candidates: Vec<_> = candidates
+ .iter()
+ .filter(|candidate| {
+ pick_constraints
+ .map(|pick_constraints| pick_constraints.candidate_may_shadow(candidate))
+ .unwrap_or(true)
+ })
+ .filter(|probe| {
+ self.consider_probe(self_ty, instantiate_self_ty_obligations, probe)
+ != ProbeResult::NoMatch
+ })
+ .collect();
+
+ debug!("applicable_candidates: {:?}", applicable_candidates);
+
+ Choice::consider_candidates(self, self_ty, applicable_candidates)
+ }
+
+ fn select_trait_candidate(
+ &self,
+ trait_ref: TraitRef<'db>,
+ ) -> SelectionResult<'db, Selection<'db>> {
+ let obligation =
+ Obligation::new(self.interner(), ObligationCause::new(), self.param_env(), trait_ref);
+ self.infcx().select(&obligation)
+ }
+
+ /// Used for ambiguous method call error reporting. Uses probing that throws away the result internally,
+ /// so do not use to make a decision that may lead to a successful compilation.
+ fn candidate_source(&self, candidate: &Candidate<'db>, self_ty: Ty<'db>) -> CandidateSource {
+ match candidate.kind {
+ InherentImplCandidate { impl_def_id, .. } => CandidateSource::Impl(impl_def_id),
+ ObjectCandidate(trait_ref) | WhereClauseCandidate(trait_ref) => {
+ CandidateSource::Trait(trait_ref.def_id().0)
+ }
+ TraitCandidate(trait_ref) => self.infcx().probe(|_| {
+ let trait_ref = self.infcx().instantiate_binder_with_fresh_vars(
+ BoundRegionConversionTime::FnCall,
+ trait_ref,
+ );
+ let (xform_self_ty, _) = self.xform_self_ty(
+ candidate.item,
+ trait_ref.self_ty(),
+ trait_ref.args.as_slice(),
+ );
+ // Guide the trait selection to show impls that have methods whose type matches
+ // up with the `self` parameter of the method.
+ let _ = self
+ .infcx()
+ .at(&ObligationCause::dummy(), self.param_env())
+ .sup(xform_self_ty, self_ty);
+ match self.select_trait_candidate(trait_ref) {
+ Ok(Some(ImplSource::UserDefined(ref impl_data))) => {
+ // If only a single impl matches, make the error message point
+ // to that impl.
+ CandidateSource::Impl(impl_data.impl_def_id)
+ }
+ _ => CandidateSource::Trait(trait_ref.def_id.0),
+ }
+ }),
+ }
+ }
+
+ fn candidate_source_from_pick(&self, pick: &Pick<'db>) -> CandidateSource {
+ match pick.kind {
+ InherentImplPick(impl_) => CandidateSource::Impl(impl_),
+ ObjectPick(trait_) | TraitPick(trait_) => CandidateSource::Trait(trait_),
+ WhereClausePick(trait_ref) => CandidateSource::Trait(trait_ref.skip_binder().def_id.0),
+ }
+ }
+
+ #[instrument(level = "debug", skip(self), ret)]
+ fn consider_probe(
+ &self,
+ self_ty: Ty<'db>,
+ instantiate_self_ty_obligations: &[PredicateObligation<'db>],
+ probe: &Candidate<'db>,
+ ) -> ProbeResult {
+ self.infcx().probe(|_| {
+ let mut result = ProbeResult::Match;
+ let cause = &ObligationCause::new();
+ let mut ocx = ObligationCtxt::new(self.infcx());
+
+ // Subtle: we're not *really* instantiating the current self type while
+ // probing, but instead fully recompute the autoderef steps once we've got
+ // a final `Pick`. We can't nicely handle these obligations outside of a probe.
+ //
+ // We simply handle them for each candidate here for now. That's kinda scuffed
+ // and ideally we just put them into the `FnCtxt` right away. We need to consider
+ // them to deal with defining uses in `method_autoderef_steps`.
+ ocx.register_obligations(instantiate_self_ty_obligations.iter().cloned());
+ let errors = ocx.try_evaluate_obligations();
+ if !errors.is_empty() {
+ unreachable!("unexpected autoderef error {errors:?}");
+ }
+
+ let mut trait_predicate = None;
+ let (xform_self_ty, xform_ret_ty);
+
+ match probe.kind {
+ InherentImplCandidate { impl_def_id, .. } => {
+ let impl_args = self.infcx().fresh_args_for_item(impl_def_id.into());
+ let impl_ty =
+ self.db().impl_self_ty(impl_def_id).instantiate(self.interner(), impl_args);
+ (xform_self_ty, xform_ret_ty) =
+ self.xform_self_ty(probe.item, impl_ty, impl_args.as_slice());
+ match ocx.relate(
+ cause,
+ self.param_env(),
+ self.variance(),
+ self_ty,
+ xform_self_ty,
+ ) {
+ Ok(()) => {}
+ Err(err) => {
+ debug!("--> cannot relate self-types {:?}", err);
+ return ProbeResult::NoMatch;
+ }
+ }
+ // Check whether the impl imposes obligations we have to worry about.
+ let impl_bounds = GenericPredicates::query_all(self.db(), impl_def_id.into());
+ let impl_bounds = clauses_as_obligations(
+ impl_bounds.iter_instantiated_copied(self.interner(), impl_args.as_slice()),
+ ObligationCause::new(),
+ self.param_env(),
+ );
+ // Convert the bounds into obligations.
+ ocx.register_obligations(impl_bounds);
+ }
+ TraitCandidate(poly_trait_ref) => {
+ // Some trait methods are excluded for arrays before 2021.
+ // (`array.into_iter()` wants a slice iterator for compatibility.)
+ if self_ty.is_array() && !self.ctx.edition.at_least_2021() {
+ let trait_signature = self.db().trait_signature(poly_trait_ref.def_id().0);
+ if trait_signature
+ .flags
+ .contains(TraitFlags::SKIP_ARRAY_DURING_METHOD_DISPATCH)
+ {
+ return ProbeResult::NoMatch;
+ }
+ }
+
+ // Some trait methods are excluded for boxed slices before 2024.
+ // (`boxed_slice.into_iter()` wants a slice iterator for compatibility.)
+ if self_ty.boxed_ty().is_some_and(Ty::is_slice)
+ && !self.ctx.edition.at_least_2024()
+ {
+ let trait_signature = self.db().trait_signature(poly_trait_ref.def_id().0);
+ if trait_signature
+ .flags
+ .contains(TraitFlags::SKIP_BOXED_SLICE_DURING_METHOD_DISPATCH)
+ {
+ return ProbeResult::NoMatch;
+ }
+ }
+
+ let trait_ref = self.infcx().instantiate_binder_with_fresh_vars(
+ BoundRegionConversionTime::FnCall,
+ poly_trait_ref,
+ );
+ (xform_self_ty, xform_ret_ty) = self.xform_self_ty(
+ probe.item,
+ trait_ref.self_ty(),
+ trait_ref.args.as_slice(),
+ );
+ match ocx.relate(
+ cause,
+ self.param_env(),
+ self.variance(),
+ self_ty,
+ xform_self_ty,
+ ) {
+ Ok(()) => {}
+ Err(err) => {
+ debug!("--> cannot relate self-types {:?}", err);
+ return ProbeResult::NoMatch;
+ }
+ }
+ let obligation = Obligation::new(
+ self.interner(),
+ cause.clone(),
+ self.param_env(),
+ Binder::dummy(trait_ref),
+ );
+
+ // We only need this hack to deal with fatal overflow in the old solver.
+ ocx.register_obligation(obligation);
+
+ trait_predicate = Some(trait_ref.upcast(self.interner()));
+ }
+ ObjectCandidate(poly_trait_ref) | WhereClauseCandidate(poly_trait_ref) => {
+ let trait_ref = self.infcx().instantiate_binder_with_fresh_vars(
+ BoundRegionConversionTime::FnCall,
+ poly_trait_ref,
+ );
+ (xform_self_ty, xform_ret_ty) = self.xform_self_ty(
+ probe.item,
+ trait_ref.self_ty(),
+ trait_ref.args.as_slice(),
+ );
+
+ if matches!(probe.kind, WhereClauseCandidate(_)) {
+ // `WhereClauseCandidate` requires that the self type is a param,
+ // because it has special behavior with candidate preference as an
+ // inherent pick.
+ match ocx.structurally_normalize_ty(
+ cause,
+ self.param_env(),
+ trait_ref.self_ty(),
+ ) {
+ Ok(ty) => {
+ if !matches!(ty.kind(), TyKind::Param(_)) {
+ debug!("--> not a param ty: {xform_self_ty:?}");
+ return ProbeResult::NoMatch;
+ }
+ }
+ Err(errors) => {
+ debug!("--> cannot relate self-types {:?}", errors);
+ return ProbeResult::NoMatch;
+ }
+ }
+ }
+
+ match ocx.relate(
+ cause,
+ self.param_env(),
+ self.variance(),
+ self_ty,
+ xform_self_ty,
+ ) {
+ Ok(()) => {}
+ Err(err) => {
+ debug!("--> cannot relate self-types {:?}", err);
+ return ProbeResult::NoMatch;
+ }
+ }
+ }
+ }
+
+ // See <https://github.com/rust-lang/trait-system-refactor-initiative/issues/134>.
+ //
+ // In the new solver, check the well-formedness of the return type.
+ // This emulates, in a way, the predicates that fall out of
+ // normalizing the return type in the old solver.
+ //
+ // FIXME(-Znext-solver): We alternatively could check the predicates of
+ // the method itself hold, but we intentionally do not do this in the old
+ // solver b/c of cycles, and doing it in the new solver would be stronger.
+ // This should be fixed in the future, since it likely leads to much better
+ // method winnowing.
+ if let Some(xform_ret_ty) = xform_ret_ty {
+ ocx.register_obligation(Obligation::new(
+ self.interner(),
+ cause.clone(),
+ self.param_env(),
+ ClauseKind::WellFormed(xform_ret_ty.into()),
+ ));
+ }
+
+ if !ocx.try_evaluate_obligations().is_empty() {
+ result = ProbeResult::NoMatch;
+ }
+
+ if self.should_reject_candidate_due_to_opaque_treated_as_rigid(trait_predicate) {
+ result = ProbeResult::NoMatch;
+ }
+
+ // FIXME: Need to leak-check here.
+ // if let Err(_) = self.leak_check(outer_universe, Some(snapshot)) {
+ // result = ProbeResult::NoMatch;
+ // }
+
+ result
+ })
+ }
+
+ /// Trait candidates for not-yet-defined opaque types are a somewhat hacky.
+ ///
+ /// We want to only accept trait methods if they were hold even if the
+ /// opaque types were rigid. To handle this, we both check that for trait
+ /// candidates the goal were to hold even when treating opaques as rigid,
+ /// see [OpaqueTypesJank](rustc_trait_selection::solve::OpaqueTypesJank).
+ ///
+ /// We also check that all opaque types encountered as self types in the
+ /// autoderef chain don't get constrained when applying the candidate.
+ /// Importantly, this also handles calling methods taking `&self` on
+ /// `impl Trait` to reject the "by-self" candidate.
+ ///
+ /// This needs to happen at the end of `consider_probe` as we need to take
+ /// all the constraints from that into account.
+ #[instrument(level = "debug", skip(self), ret)]
+ fn should_reject_candidate_due_to_opaque_treated_as_rigid(
+ &self,
+ trait_predicate: Option<Predicate<'db>>,
+ ) -> bool {
+ // This function is what hacky and doesn't perfectly do what we want it to.
+ // It's not soundness critical and we should be able to freely improve this
+ // in the future.
+ //
+ // Some concrete edge cases include the fact that `goal_may_hold_opaque_types_jank`
+ // also fails if there are any constraints opaques which are never used as a self
+ // type. We also allow where-bounds which are currently ambiguous but end up
+ // constraining an opaque later on.
+
+ // Check whether the trait candidate would not be applicable if the
+ // opaque type were rigid.
+ if let Some(predicate) = trait_predicate {
+ let goal = Goal { param_env: self.param_env(), predicate };
+ if !self.infcx().goal_may_hold_opaque_types_jank(goal) {
+ return true;
+ }
+ }
+
+ // Check whether any opaque types in the autoderef chain have been
+ // constrained.
+ for step in self.steps {
+ if step.self_ty_is_opaque {
+ debug!(?step.autoderefs, ?step.self_ty, "self_type_is_opaque");
+ let constrained_opaque = self.infcx().probe(|_| {
+ // If we fail to instantiate the self type of this
+ // step, this part of the deref-chain is no longer
+ // reachable. In this case we don't care about opaque
+ // types there.
+ let Ok(ok) = self.infcx().instantiate_query_response_and_region_obligations(
+ &ObligationCause::new(),
+ self.param_env(),
+ self.orig_steps_var_values,
+ &step.self_ty,
+ ) else {
+ debug!("failed to instantiate self_ty");
+ return false;
+ };
+ let mut ocx = ObligationCtxt::new(self.infcx());
+ let self_ty = ocx.register_infer_ok_obligations(ok);
+ if !ocx.try_evaluate_obligations().is_empty() {
+ debug!("failed to prove instantiate self_ty obligations");
+ return false;
+ }
+
+ !self.infcx().resolve_vars_if_possible(self_ty).is_ty_var()
+ });
+ if constrained_opaque {
+ debug!("opaque type has been constrained");
+ return true;
+ }
+ }
+ }
+
+ false
+ }
+
+ /// Sometimes we get in a situation where we have multiple probes that are all impls of the
+ /// same trait, but we don't know which impl to use. In this case, since in all cases the
+ /// external interface of the method can be determined from the trait, it's ok not to decide.
+ /// We can basically just collapse all of the probes for various impls into one where-clause
+ /// probe. This will result in a pending obligation so when more type-info is available we can
+ /// make the final decision.
+ ///
+ /// Example (`tests/ui/methods/method-two-trait-defer-resolution-1.rs`):
+ ///
+ /// ```ignore (illustrative)
+ /// trait Foo { ... }
+ /// impl Foo for Vec<i32> { ... }
+ /// impl Foo for Vec<usize> { ... }
+ /// ```
+ ///
+ /// Now imagine the receiver is `Vec<_>`. It doesn't really matter at this time which impl we
+ /// use, so it's ok to just commit to "using the method from the trait Foo".
+ fn collapse_candidates_to_trait_pick(
+ &self,
+ self_ty: Ty<'db>,
+ probes: &[&Candidate<'db>],
+ ) -> Option<Pick<'db>> {
+ // Do all probes correspond to the same trait?
+ let ItemContainerId::TraitId(container) = probes[0].item.container(self.db()) else {
+ return None;
+ };
+ for p in &probes[1..] {
+ let ItemContainerId::TraitId(p_container) = p.item.container(self.db()) else {
+ return None;
+ };
+ if p_container != container {
+ return None;
+ }
+ }
+
+ // FIXME: check the return type here somehow.
+ // If so, just use this trait and call it a day.
+ Some(Pick {
+ item: probes[0].item,
+ kind: TraitPick(container),
+ autoderefs: 0,
+ autoref_or_ptr_adjustment: None,
+ self_ty,
+ receiver_steps: None,
+ shadowed_candidates: vec![],
+ })
+ }
+
+ /// Much like `collapse_candidates_to_trait_pick`, this method allows us to collapse
+ /// multiple conflicting picks if there is one pick whose trait container is a subtrait
+ /// of the trait containers of all of the other picks.
+ ///
+ /// This implements RFC #3624.
+ fn collapse_candidates_to_subtrait_pick(
+ &self,
+ self_ty: Ty<'db>,
+ probes: &[&Candidate<'db>],
+ ) -> Option<Pick<'db>> {
+ let mut child_candidate = probes[0];
+ let ItemContainerId::TraitId(mut child_trait) = child_candidate.item.container(self.db())
+ else {
+ return None;
+ };
+ let mut supertraits: FxHashSet<_> =
+ supertrait_def_ids(self.interner(), child_trait.into()).collect();
+
+ let mut remaining_candidates: Vec<_> = probes[1..].to_vec();
+ while !remaining_candidates.is_empty() {
+ let mut made_progress = false;
+ let mut next_round = vec![];
+
+ for remaining_candidate in remaining_candidates {
+ let ItemContainerId::TraitId(remaining_trait) =
+ remaining_candidate.item.container(self.db())
+ else {
+ return None;
+ };
+ if supertraits.contains(&remaining_trait.into()) {
+ made_progress = true;
+ continue;
+ }
+
+ // This pick is not a supertrait of the `child_pick`.
+ // Check if it's a subtrait of the `child_pick`, instead.
+ // If it is, then it must have been a subtrait of every
+ // other pick we've eliminated at this point. It will
+ // take over at this point.
+ let remaining_trait_supertraits: FxHashSet<_> =
+ supertrait_def_ids(self.interner(), remaining_trait.into()).collect();
+ if remaining_trait_supertraits.contains(&child_trait.into()) {
+ child_candidate = remaining_candidate;
+ child_trait = remaining_trait;
+ supertraits = remaining_trait_supertraits;
+ made_progress = true;
+ continue;
+ }
+
+ // `child_pick` is not a supertrait of this pick.
+ // Don't bail here, since we may be comparing two supertraits
+ // of a common subtrait. These two supertraits won't be related
+ // at all, but we will pick them up next round when we find their
+ // child as we continue iterating in this round.
+ next_round.push(remaining_candidate);
+ }
+
+ if made_progress {
+ // If we've made progress, iterate again.
+ remaining_candidates = next_round;
+ } else {
+ // Otherwise, we must have at least two candidates which
+ // are not related to each other at all.
+ return None;
+ }
+ }
+
+ Some(Pick {
+ item: child_candidate.item,
+ kind: TraitPick(child_trait),
+ autoderefs: 0,
+ autoref_or_ptr_adjustment: None,
+ self_ty,
+ shadowed_candidates: probes
+ .iter()
+ .map(|c| c.item)
+ .filter(|item| *item != child_candidate.item)
+ .collect(),
+ receiver_steps: None,
+ })
+ }
+
+ ///////////////////////////////////////////////////////////////////////////
+ // MISCELLANY
+ fn has_applicable_self(&self, item: CandidateId) -> bool {
+ // "Fast track" -- check for usage of sugar when in method call
+ // mode.
+ //
+ // In Path mode (i.e., resolving a value like `T::next`), consider any
+ // associated value (i.e., methods, constants).
+ match item {
+ CandidateId::FunctionId(id) if self.mode == Mode::MethodCall => {
+ self.db().function_signature(id).has_self_param()
+ }
+ _ => true,
+ }
+ // FIXME -- check for types that deref to `Self`,
+ // like `Rc<Self>` and so on.
+ //
+ // Note also that the current code will break if this type
+ // includes any of the type parameters defined on the method
+ // -- but this could be overcome.
+ }
+
+ fn record_static_candidate(&mut self, source: CandidateSource) {
+ self.static_candidates.push(source);
+ }
+
+ #[instrument(level = "debug", skip(self))]
+ fn xform_self_ty(
+ &self,
+ item: CandidateId,
+ impl_ty: Ty<'db>,
+ args: &[GenericArg<'db>],
+ ) -> (Ty<'db>, Option<Ty<'db>>) {
+ if let CandidateId::FunctionId(item) = item
+ && self.mode == Mode::MethodCall
+ {
+ let sig = self.xform_method_sig(item, args);
+ (sig.inputs().as_slice()[0], Some(sig.output()))
+ } else {
+ (impl_ty, None)
+ }
+ }
+
+ #[instrument(level = "debug", skip(self))]
+ fn xform_method_sig(&self, method: FunctionId, args: &[GenericArg<'db>]) -> FnSig<'db> {
+ let fn_sig = self.db().callable_item_signature(method.into());
+ debug!(?fn_sig);
+
+ assert!(!args.has_escaping_bound_vars());
+
+ // It is possible for type parameters or early-bound lifetimes
+ // to appear in the signature of `self`. The generic parameters
+ // we are given do not include type/lifetime parameters for the
+ // method yet. So create fresh variables here for those too,
+ // if there are any.
+ let generics = self.db().generic_params(method.into());
+
+ let xform_fn_sig = if generics.is_empty() {
+ fn_sig.instantiate(self.interner(), args)
+ } else {
+ let args = GenericArgs::for_item(
+ self.interner(),
+ method.into(),
+ |param_index, param_id, _| {
+ let i = param_index as usize;
+ if i < args.len() {
+ args[i]
+ } else {
+ match param_id {
+ GenericParamId::LifetimeParamId(_) => {
+ // In general, during probe we erase regions.
+ Region::new_erased(self.interner()).into()
+ }
+ GenericParamId::TypeParamId(_) => self.infcx().next_ty_var().into(),
+ GenericParamId::ConstParamId(_) => self.infcx().next_const_var().into(),
+ }
+ }
+ },
+ );
+ fn_sig.instantiate(self.interner(), args)
+ };
+
+ self.interner().instantiate_bound_regions_with_erased(xform_fn_sig)
+ }
+
+ fn with_impl_item(&mut self, def_id: ImplId, callback: impl FnMut(&mut Self, CandidateId)) {
+ Choice::with_impl_or_trait_item(self, &def_id.impl_items(self.db()).items, callback)
+ }
+
+ fn with_trait_item(&mut self, def_id: TraitId, callback: impl FnMut(&mut Self, CandidateId)) {
+ Choice::with_impl_or_trait_item(self, &def_id.trait_items(self.db()).items, callback)
+ }
+}
+
+/// Determine if the given associated item type is relevant in the current context.
+fn is_relevant_kind_for_mode(mode: Mode, kind: AssocItemId) -> Option<CandidateId> {
+ Some(match (mode, kind) {
+ (Mode::MethodCall, AssocItemId::FunctionId(id)) => id.into(),
+ (Mode::Path, AssocItemId::ConstId(id)) => id.into(),
+ (Mode::Path, AssocItemId::FunctionId(id)) => id.into(),
+ _ => return None,
+ })
+}
+
+impl<'db> Candidate<'db> {
+ fn to_unadjusted_pick(&self, self_ty: Ty<'db>) -> Pick<'db> {
+ Pick {
+ item: self.item,
+ kind: match self.kind {
+ InherentImplCandidate { impl_def_id, .. } => InherentImplPick(impl_def_id),
+ ObjectCandidate(trait_ref) => ObjectPick(trait_ref.skip_binder().def_id.0),
+ TraitCandidate(trait_ref) => TraitPick(trait_ref.skip_binder().def_id.0),
+ WhereClauseCandidate(trait_ref) => {
+ // Only trait derived from where-clauses should
+ // appear here, so they should not contain any
+ // inference variables or other artifacts. This
+ // means they are safe to put into the
+ // `WhereClausePick`.
+ assert!(
+ !trait_ref.skip_binder().args.has_infer()
+ && !trait_ref.skip_binder().args.has_placeholders()
+ );
+
+ WhereClausePick(trait_ref)
+ }
+ },
+ autoderefs: 0,
+ autoref_or_ptr_adjustment: None,
+ self_ty,
+ receiver_steps: match self.kind {
+ InherentImplCandidate { receiver_steps, .. } => Some(receiver_steps),
+ _ => None,
+ },
+ shadowed_candidates: vec![],
+ }
+ }
+}
diff --git a/crates/hir-ty/src/mir.rs b/crates/hir-ty/src/mir.rs
index 7aebe17..b5b691d 100644
--- a/crates/hir-ty/src/mir.rs
+++ b/crates/hir-ty/src/mir.rs
@@ -12,7 +12,7 @@
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
use rustc_ast_ir::Mutability;
use rustc_hash::FxHashMap;
-use rustc_type_ir::inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Ty as _};
+use rustc_type_ir::inherent::{GenericArgs as _, IntoKind, SliceLike, Ty as _};
use smallvec::{SmallVec, smallvec};
use stdx::{impl_from, never};
@@ -22,7 +22,6 @@
db::{HirDatabase, InternedClosureId},
display::{DisplayTarget, HirDisplay},
infer::PointerCast,
- lang_items::is_box,
next_solver::{
Const, DbInterner, ErrorGuaranteed, GenericArgs, ParamEnv, Ty, TyKind,
infer::{InferCtxt, traits::ObligationCause},
@@ -185,7 +184,7 @@
match self {
ProjectionElem::Deref => match base.kind() {
TyKind::RawPtr(inner, _) | TyKind::Ref(_, inner, _) => inner,
- TyKind::Adt(adt_def, subst) if is_box(db, adt_def.def_id().0) => subst.type_at(0),
+ TyKind::Adt(adt_def, subst) if adt_def.is_box() => subst.type_at(0),
_ => {
never!(
"Overloaded deref on type {} is not a projection",
diff --git a/crates/hir-ty/src/mir/eval.rs b/crates/hir-ty/src/mir/eval.rs
index 6e62bcb..da15ca6 100644
--- a/crates/hir-ty/src/mir/eval.rs
+++ b/crates/hir-ty/src/mir/eval.rs
@@ -42,8 +42,8 @@
layout::{Layout, LayoutError, RustcEnumVariantIdx},
method_resolution::{is_dyn_method, lookup_impl_const},
next_solver::{
- Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArgs, Region,
- SolverDefId, Ty, TyKind, TypingMode, UnevaluatedConst, ValueConst,
+ Const, ConstBytes, ConstKind, DbInterner, ErrorGuaranteed, GenericArgs, Region, Ty, TyKind,
+ TypingMode, UnevaluatedConst, ValueConst,
infer::{DbInternerInferExt, InferCtxt, traits::ObligationCause},
obligation_ctxt::ObligationCtxt,
},
@@ -1917,24 +1917,28 @@
let value = match konst.kind() {
ConstKind::Value(value) => value,
ConstKind::Unevaluated(UnevaluatedConst { def: const_id, args: subst }) => 'b: {
- let mut const_id = match const_id {
- SolverDefId::ConstId(it) => GeneralConstId::from(it),
- SolverDefId::StaticId(it) => it.into(),
- _ => unreachable!("unevaluated consts should be consts or statics"),
- };
+ let mut id = const_id.0;
let mut subst = subst;
- if let hir_def::GeneralConstId::ConstId(c) = const_id {
+ if let hir_def::GeneralConstId::ConstId(c) = id {
let (c, s) = lookup_impl_const(&self.infcx, self.trait_env.clone(), c, subst);
- const_id = hir_def::GeneralConstId::ConstId(c);
+ id = hir_def::GeneralConstId::ConstId(c);
subst = s;
}
- result_owner = self
- .db
- .const_eval(const_id, subst, Some(self.trait_env.clone()))
- .map_err(|e| {
- let name = const_id.name(self.db);
- MirEvalError::ConstEvalError(name, Box::new(e))
- })?;
+ result_owner = match id {
+ GeneralConstId::ConstId(const_id) => self
+ .db
+ .const_eval(const_id, subst, Some(self.trait_env.clone()))
+ .map_err(|e| {
+ let name = id.name(self.db);
+ MirEvalError::ConstEvalError(name, Box::new(e))
+ })?,
+ GeneralConstId::StaticId(static_id) => {
+ self.db.const_eval_static(static_id).map_err(|e| {
+ let name = id.name(self.db);
+ MirEvalError::ConstEvalError(name, Box::new(e))
+ })?
+ }
+ };
if let ConstKind::Value(value) = result_owner.kind() {
break 'b value;
}
diff --git a/crates/hir-ty/src/mir/eval/tests.rs b/crates/hir-ty/src/mir/eval/tests.rs
index f242115..88acd49 100644
--- a/crates/hir-ty/src/mir/eval/tests.rs
+++ b/crates/hir-ty/src/mir/eval/tests.rs
@@ -544,7 +544,7 @@
fn for_loop() {
check_pass(
r#"
-//- minicore: iterator, add
+//- minicore: iterator, add, builtin_impls
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
@@ -706,7 +706,7 @@
fn closure_state() {
check_pass(
r#"
-//- minicore: fn, add, copy
+//- minicore: fn, add, copy, builtin_impls
fn should_not_reach() {
_ // FIXME: replace this function with panic when that works
}
diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs
index 1439c43..7f457ca 100644
--- a/crates/hir-ty/src/mir/lower.rs
+++ b/crates/hir-ty/src/mir/lower.rs
@@ -33,6 +33,7 @@
infer::{CaptureKind, CapturedItem, TypeMismatch, cast::CastTy},
inhabitedness::is_ty_uninhabited_from,
layout::LayoutError,
+ method_resolution::CandidateId,
mir::{
AggregateKind, Arena, BasicBlock, BasicBlockId, BinOp, BorrowKind, CastKind, Either, Expr,
FieldId, GenericArgs, Idx, InferenceResult, Local, LocalId, MemoryMap, MirBody, MirSpan,
@@ -388,15 +389,15 @@
);
Ok(Some(current))
}
- Adjust::Borrow(AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m)) => {
- let Some((p, current)) =
- self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)?
- else {
- return Ok(None);
- };
- let bk = BorrowKind::from_rustc(*m);
- self.push_assignment(current, place, Rvalue::Ref(bk, p), expr_id.into());
- Ok(Some(current))
+ Adjust::Borrow(AutoBorrow::Ref(m)) => self.lower_expr_to_place_with_borrow_adjust(
+ expr_id,
+ place,
+ current,
+ rest,
+ (*m).into(),
+ ),
+ Adjust::Borrow(AutoBorrow::RawPtr(m)) => {
+ self.lower_expr_to_place_with_borrow_adjust(expr_id, place, current, rest, *m)
}
Adjust::Pointer(cast) => {
let Some((p, current)) =
@@ -421,6 +422,24 @@
}
}
+ fn lower_expr_to_place_with_borrow_adjust(
+ &mut self,
+ expr_id: ExprId,
+ place: Place<'db>,
+ current: BasicBlockId<'db>,
+ rest: &[Adjustment<'db>],
+ m: Mutability,
+ ) -> Result<'db, Option<BasicBlockId<'db>>> {
+ let Some((p, current)) =
+ self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)?
+ else {
+ return Ok(None);
+ };
+ let bk = BorrowKind::from_rustc(m);
+ self.push_assignment(current, place, Rvalue::Ref(bk, p), expr_id.into());
+ Ok(Some(current))
+ }
+
fn lower_expr_to_place(
&mut self,
expr_id: ExprId,
@@ -460,18 +479,14 @@
let pr =
if let Some((assoc, subst)) = self.infer.assoc_resolutions_for_expr(expr_id) {
match assoc {
- hir_def::AssocItemId::ConstId(c) => {
+ CandidateId::ConstId(c) => {
self.lower_const(c.into(), current, place, subst, expr_id.into())?;
return Ok(Some(current));
}
- hir_def::AssocItemId::FunctionId(_) => {
+ CandidateId::FunctionId(_) => {
// FnDefs are zero sized, no action is needed.
return Ok(Some(current));
}
- hir_def::AssocItemId::TypeAliasId(_) => {
- // FIXME: If it is unreachable, use proper error instead of `not_supported`.
- not_supported!("associated functions and types")
- }
}
} else if let Some(variant) = self.infer.variant_resolution_for_expr(expr_id) {
match variant {
@@ -1517,10 +1532,20 @@
UnevaluatedConst { def: const_id.into(), args: subst },
)
} else {
- let name = const_id.name(self.db);
- self.db
- .const_eval(const_id, subst, None)
- .map_err(|e| MirLowerError::ConstEvalError(name.into(), Box::new(e)))?
+ match const_id {
+ id @ GeneralConstId::ConstId(const_id) => {
+ self.db.const_eval(const_id, subst, None).map_err(|e| {
+ let name = id.name(self.db);
+ MirLowerError::ConstEvalError(name.into(), Box::new(e))
+ })?
+ }
+ GeneralConstId::StaticId(static_id) => {
+ self.db.const_eval_static(static_id).map_err(|e| {
+ let name = const_id.name(self.db);
+ MirLowerError::ConstEvalError(name.into(), Box::new(e))
+ })?
+ }
+ }
};
let ty = self
.db
diff --git a/crates/hir-ty/src/mir/lower/as_place.rs b/crates/hir-ty/src/mir/lower/as_place.rs
index 52f1412..bceafae 100644
--- a/crates/hir-ty/src/mir/lower/as_place.rs
+++ b/crates/hir-ty/src/mir/lower/as_place.rs
@@ -193,7 +193,7 @@
return self.lower_overloaded_deref(
current,
p,
- self.expr_ty_after_adjustments(*expr),
+ self.expr_ty_without_adjust(*expr),
self.expr_ty_without_adjust(expr_id),
expr_id.into(),
'b: {
diff --git a/crates/hir-ty/src/mir/lower/pattern_matching.rs b/crates/hir-ty/src/mir/lower/pattern_matching.rs
index b1b86ab..c3a4814 100644
--- a/crates/hir-ty/src/mir/lower/pattern_matching.rs
+++ b/crates/hir-ty/src/mir/lower/pattern_matching.rs
@@ -1,9 +1,8 @@
//! MIR lowering for patterns
-use hir_def::{AssocItemId, hir::ExprId, signatures::VariantFields};
+use hir_def::{hir::ExprId, signatures::VariantFields};
use rustc_type_ir::inherent::{IntoKind, SliceLike, Ty as _};
-use crate::next_solver::GenericArgs;
use crate::{
BindingMode,
mir::{
@@ -16,6 +15,7 @@
},
},
};
+use crate::{method_resolution::CandidateId, next_solver::GenericArgs};
macro_rules! not_supported {
($x: expr) => {
@@ -207,7 +207,7 @@
mode,
)?
}
- Pat::Range { start, end } => {
+ Pat::Range { start, end, range_type: _ } => {
let mut add_check = |l: &ExprId, binop| -> Result<'db, ()> {
let lv = self.lower_literal_or_const_to_operand(self.infer[pattern], l)?;
let else_target = *current_else.get_or_insert_with(|| self.new_basic_block());
@@ -393,7 +393,7 @@
}
let (c, subst) = 'b: {
if let Some(x) = self.infer.assoc_resolutions_for_pat(pattern)
- && let AssocItemId::ConstId(c) = x.0
+ && let CandidateId::ConstId(c) = x.0
{
break 'b (c, x.1);
}
diff --git a/crates/hir-ty/src/next_solver/def_id.rs b/crates/hir-ty/src/next_solver/def_id.rs
index 77f2106..b6167b4 100644
--- a/crates/hir-ty/src/next_solver/def_id.rs
+++ b/crates/hir-ty/src/next_solver/def_id.rs
@@ -1,8 +1,9 @@
//! Definition of `SolverDefId`
use hir_def::{
- AdtId, CallableDefId, ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId,
- GeneralConstId, GenericDefId, ImplId, StaticId, StructId, TraitId, TypeAliasId, UnionId,
+ AdtId, AttrDefId, CallableDefId, ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId,
+ GeneralConstId, GenericDefId, HasModule, ImplId, ModuleId, StaticId, StructId, TraitId,
+ TypeAliasId, UnionId, db::DefDatabase,
};
use rustc_type_ir::inherent;
use stdx::impl_from;
@@ -154,6 +155,28 @@
}
}
+impl TryFrom<SolverDefId> for AttrDefId {
+ type Error = ();
+ #[inline]
+ fn try_from(value: SolverDefId) -> Result<Self, Self::Error> {
+ match value {
+ SolverDefId::AdtId(it) => Ok(it.into()),
+ SolverDefId::ConstId(it) => Ok(it.into()),
+ SolverDefId::FunctionId(it) => Ok(it.into()),
+ SolverDefId::ImplId(it) => Ok(it.into()),
+ SolverDefId::StaticId(it) => Ok(it.into()),
+ SolverDefId::TraitId(it) => Ok(it.into()),
+ SolverDefId::TypeAliasId(it) => Ok(it.into()),
+ SolverDefId::EnumVariantId(it) => Ok(it.into()),
+ SolverDefId::Ctor(Ctor::Struct(it)) => Ok(it.into()),
+ SolverDefId::Ctor(Ctor::Enum(it)) => Ok(it.into()),
+ SolverDefId::InternedClosureId(_)
+ | SolverDefId::InternedCoroutineId(_)
+ | SolverDefId::InternedOpaqueTyId(_) => Err(()),
+ }
+ }
+}
+
impl TryFrom<SolverDefId> for DefWithBodyId {
type Error = ();
@@ -218,6 +241,28 @@
}
}
+impl HasModule for SolverDefId {
+ fn module(&self, db: &dyn DefDatabase) -> ModuleId {
+ match *self {
+ SolverDefId::AdtId(id) => id.module(db),
+ SolverDefId::ConstId(id) => id.module(db),
+ SolverDefId::FunctionId(id) => id.module(db),
+ SolverDefId::ImplId(id) => id.module(db),
+ SolverDefId::StaticId(id) => id.module(db),
+ SolverDefId::TraitId(id) => id.module(db),
+ SolverDefId::TypeAliasId(id) => id.module(db),
+ SolverDefId::InternedClosureId(id) => id.loc(db).0.module(db),
+ SolverDefId::InternedCoroutineId(id) => id.loc(db).0.module(db),
+ SolverDefId::InternedOpaqueTyId(id) => match id.loc(db) {
+ crate::ImplTraitId::ReturnTypeImplTrait(owner, _) => owner.module(db),
+ crate::ImplTraitId::TypeAliasImplTrait(owner, _) => owner.module(db),
+ },
+ SolverDefId::Ctor(Ctor::Enum(id)) | SolverDefId::EnumVariantId(id) => id.module(db),
+ SolverDefId::Ctor(Ctor::Struct(id)) => id.module(db),
+ }
+ }
+}
+
impl<'db> inherent::DefId<DbInterner<'db>> for SolverDefId {
fn as_local(self) -> Option<SolverDefId> {
Some(self)
@@ -290,6 +335,55 @@
declare_id_wrapper!(ImplIdWrapper, ImplId);
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
+pub struct GeneralConstIdWrapper(pub GeneralConstId);
+
+impl std::fmt::Debug for GeneralConstIdWrapper {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Debug::fmt(&self.0, f)
+ }
+}
+impl From<GeneralConstIdWrapper> for GeneralConstId {
+ #[inline]
+ fn from(value: GeneralConstIdWrapper) -> GeneralConstId {
+ value.0
+ }
+}
+impl From<GeneralConstId> for GeneralConstIdWrapper {
+ #[inline]
+ fn from(value: GeneralConstId) -> GeneralConstIdWrapper {
+ Self(value)
+ }
+}
+impl From<GeneralConstIdWrapper> for SolverDefId {
+ #[inline]
+ fn from(value: GeneralConstIdWrapper) -> SolverDefId {
+ match value.0 {
+ GeneralConstId::ConstId(id) => SolverDefId::ConstId(id),
+ GeneralConstId::StaticId(id) => SolverDefId::StaticId(id),
+ }
+ }
+}
+impl TryFrom<SolverDefId> for GeneralConstIdWrapper {
+ type Error = ();
+ #[inline]
+ fn try_from(value: SolverDefId) -> Result<Self, Self::Error> {
+ match value {
+ SolverDefId::ConstId(it) => Ok(Self(it.into())),
+ SolverDefId::StaticId(it) => Ok(Self(it.into())),
+ _ => Err(()),
+ }
+ }
+}
+impl<'db> inherent::DefId<DbInterner<'db>> for GeneralConstIdWrapper {
+ fn as_local(self) -> Option<SolverDefId> {
+ Some(self.into())
+ }
+ fn is_local(self) -> bool {
+ true
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct CallableIdWrapper(pub CallableDefId);
impl std::fmt::Debug for CallableIdWrapper {
diff --git a/crates/hir-ty/src/next_solver/fold.rs b/crates/hir-ty/src/next_solver/fold.rs
index f776b6e..7836419 100644
--- a/crates/hir-ty/src/next_solver/fold.rs
+++ b/crates/hir-ty/src/next_solver/fold.rs
@@ -5,7 +5,7 @@
TypeVisitableExt, inherent::IntoKind,
};
-use crate::next_solver::BoundConst;
+use crate::next_solver::{BoundConst, FxIndexMap};
use super::{
Binder, BoundRegion, BoundTy, Const, ConstKind, DbInterner, Predicate, Region, Ty, TyKind,
@@ -158,3 +158,65 @@
t.fold_with(&mut Folder { interner, callback })
}
+
+impl<'db> DbInterner<'db> {
+ /// Replaces all regions bound by the given `Binder` with the
+ /// results returned by the closure; the closure is expected to
+ /// return a free region (relative to this binder), and hence the
+ /// binder is removed in the return type. The closure is invoked
+ /// once for each unique `BoundRegionKind`; multiple references to the
+ /// same `BoundRegionKind` will reuse the previous result. A map is
+ /// returned at the end with each bound region and the free region
+ /// that replaced it.
+ ///
+ /// # Panics
+ ///
+ /// This method only replaces late bound regions. Any types or
+ /// constants bound by `value` will cause an ICE.
+ pub fn instantiate_bound_regions<T, F>(
+ self,
+ value: Binder<'db, T>,
+ mut fld_r: F,
+ ) -> (T, FxIndexMap<BoundRegion, Region<'db>>)
+ where
+ F: FnMut(BoundRegion) -> Region<'db>,
+ T: TypeFoldable<DbInterner<'db>>,
+ {
+ let mut region_map = FxIndexMap::default();
+ let real_fld_r = |br: BoundRegion| *region_map.entry(br).or_insert_with(|| fld_r(br));
+ let value = self.instantiate_bound_regions_uncached(value, real_fld_r);
+ (value, region_map)
+ }
+
+ pub fn instantiate_bound_regions_uncached<T, F>(
+ self,
+ value: Binder<'db, T>,
+ mut replace_regions: F,
+ ) -> T
+ where
+ F: FnMut(BoundRegion) -> Region<'db>,
+ T: TypeFoldable<DbInterner<'db>>,
+ {
+ let value = value.skip_binder();
+ if !value.has_escaping_bound_vars() {
+ value
+ } else {
+ let delegate = FnMutDelegate {
+ regions: &mut replace_regions,
+ types: &mut |b| panic!("unexpected bound ty in binder: {b:?}"),
+ consts: &mut |b| panic!("unexpected bound ct in binder: {b:?}"),
+ };
+ let mut replacer = BoundVarReplacer::new(self, delegate);
+ value.fold_with(&mut replacer)
+ }
+ }
+
+ /// Replaces any late-bound regions bound in `value` with `'erased`. Useful in codegen but also
+ /// method lookup and a few other places where precise region relationships are not required.
+ pub fn instantiate_bound_regions_with_erased<T>(self, value: Binder<'db, T>) -> T
+ where
+ T: TypeFoldable<DbInterner<'db>>,
+ {
+ self.instantiate_bound_regions(value, |_| Region::new_erased(self)).0
+ }
+}
diff --git a/crates/hir-ty/src/next_solver/fulfill.rs b/crates/hir-ty/src/next_solver/fulfill.rs
index 7783075..40cc84e 100644
--- a/crates/hir-ty/src/next_solver/fulfill.rs
+++ b/crates/hir-ty/src/next_solver/fulfill.rs
@@ -2,7 +2,7 @@
mod errors;
-use std::{mem, ops::ControlFlow};
+use std::ops::ControlFlow;
use rustc_hash::FxHashSet;
use rustc_next_trait_solver::{
@@ -77,14 +77,12 @@
obligations
}
- fn drain_pending(
- &mut self,
- cond: impl Fn(&PredicateObligation<'db>) -> bool,
- ) -> PendingObligations<'db> {
- let (not_stalled, pending) =
- mem::take(&mut self.pending).into_iter().partition(|(o, _)| cond(o));
- self.pending = pending;
- not_stalled
+ fn drain_pending<'this, 'cond>(
+ &'this mut self,
+ cond: impl 'cond + Fn(&PredicateObligation<'db>) -> bool,
+ ) -> impl Iterator<Item = (PredicateObligation<'db>, Option<GoalStalledOn<DbInterner<'db>>>)>
+ {
+ self.pending.extract_if(.., move |(o, _)| cond(o))
}
fn on_fulfillment_overflow(&mut self, infcx: &InferCtxt<'db>) {
@@ -165,9 +163,11 @@
// to not put the obligations queue in `InferenceTable`'s snapshots.
// assert_eq!(self.usable_in_snapshot, infcx.num_open_snapshots());
let mut errors = Vec::new();
+ let mut obligations = Vec::new();
loop {
let mut any_changed = false;
- for (mut obligation, stalled_on) in self.obligations.drain_pending(|_| true) {
+ obligations.extend(self.obligations.drain_pending(|_| true));
+ for (mut obligation, stalled_on) in obligations.drain(..) {
if obligation.recursion_depth >= infcx.interner.recursion_limit() {
self.obligations.on_fulfillment_overflow(infcx);
// Only return true errors that we have accumulated while processing.
@@ -269,7 +269,6 @@
.is_break()
})
})
- .into_iter()
.map(|(o, _)| o)
.collect()
}
diff --git a/crates/hir-ty/src/next_solver/fulfill/errors.rs b/crates/hir-ty/src/next_solver/fulfill/errors.rs
index 82dbf94..8495af4 100644
--- a/crates/hir-ty/src/next_solver/fulfill/errors.rs
+++ b/crates/hir-ty/src/next_solver/fulfill/errors.rs
@@ -98,7 +98,7 @@
PredicateKind::Clause(ClauseKind::ConstArgHasType(ct, expected_ty)) => {
let ct_ty = match ct.kind() {
ConstKind::Unevaluated(uv) => {
- infcx.interner.type_of(uv.def).instantiate(infcx.interner, uv.args)
+ infcx.interner.type_of(uv.def.into()).instantiate(infcx.interner, uv.args)
}
ConstKind::Param(param_ct) => param_ct.find_const_ty_from_env(obligation.param_env),
ConstKind::Value(cv) => cv.ty,
@@ -286,7 +286,6 @@
nested_goal.source(),
GoalSource::ImplWhereBound
| GoalSource::AliasBoundConstCondition
- | GoalSource::InstantiateHigherRanked
| GoalSource::AliasWellFormed
) && nested_goal.result().is_err()
})
@@ -555,8 +554,6 @@
ChildMode::Host(_parent_host_pred),
GoalSource::ImplWhereBound | GoalSource::AliasBoundConstCondition,
) => make_obligation(),
- // Skip over a higher-ranked predicate.
- (_, GoalSource::InstantiateHigherRanked) => self.obligation.clone(),
(ChildMode::PassThrough, _)
| (_, GoalSource::AliasWellFormed | GoalSource::AliasBoundConstCondition) => {
make_obligation()
@@ -620,7 +617,7 @@
}
mod wf {
- use hir_def::ItemContainerId;
+ use hir_def::{GeneralConstId, ItemContainerId};
use rustc_type_ir::inherent::{
AdtDef, BoundExistentialPredicates, GenericArgs as _, IntoKind, SliceLike, Term as _,
Ty as _,
@@ -1054,14 +1051,14 @@
predicate,
));
- if let SolverDefId::ConstId(uv_def) = uv.def
+ if let GeneralConstId::ConstId(uv_def) = uv.def.0
&& let ItemContainerId::ImplId(impl_) =
uv_def.loc(self.interner().db).container
&& self.interner().db.impl_signature(impl_).target_trait.is_none()
{
return; // Subtree is handled by above function
} else {
- let obligations = self.nominal_obligations(uv.def, uv.args);
+ let obligations = self.nominal_obligations(uv.def.into(), uv.args);
self.out.extend(obligations);
}
}
diff --git a/crates/hir-ty/src/next_solver/generic_arg.rs b/crates/hir-ty/src/next_solver/generic_arg.rs
index dedd6a1..2205cba 100644
--- a/crates/hir-ty/src/next_solver/generic_arg.rs
+++ b/crates/hir-ty/src/next_solver/generic_arg.rs
@@ -17,7 +17,7 @@
generics::Generics,
};
-#[derive(Copy, Clone, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)]
+#[derive(Copy, Clone, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable, salsa::Supertype)]
pub enum GenericArg<'db> {
Ty(Ty<'db>),
Lifetime(Region<'db>),
@@ -196,6 +196,11 @@
{
let defs = interner.generics_of(def_id);
let count = defs.count();
+
+ if count == 0 {
+ return Default::default();
+ }
+
let mut args = SmallVec::with_capacity(count);
Self::fill_item(&mut args, interner, defs, &mut mk_kind);
interner.mk_args(&args)
diff --git a/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs b/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs
index 7995545..1029a7f 100644
--- a/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs
+++ b/crates/hir-ty/src/next_solver/infer/canonical/canonicalizer.rs
@@ -10,8 +10,8 @@
use rustc_type_ir::InferTy::{self, FloatVar, IntVar, TyVar};
use rustc_type_ir::inherent::{Const as _, IntoKind as _, Region as _, SliceLike, Ty as _};
use rustc_type_ir::{
- BoundVar, BoundVarIndexKind, CanonicalQueryInput, DebruijnIndex, Flags, InferConst, RegionKind,
- TyVid, TypeFlags, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, UniverseIndex,
+ BoundVar, BoundVarIndexKind, DebruijnIndex, Flags, InferConst, RegionKind, TyVid, TypeFlags,
+ TypeFoldable, TypeFolder, TypeSuperFoldable, TypeVisitableExt, UniverseIndex,
};
use smallvec::SmallVec;
use tracing::debug;
@@ -19,7 +19,7 @@
use crate::next_solver::infer::InferCtxt;
use crate::next_solver::{
Binder, Canonical, CanonicalVarKind, CanonicalVars, Const, ConstKind, DbInterner, GenericArg,
- ParamEnvAnd, Placeholder, Region, Ty, TyKind,
+ Placeholder, Region, Ty, TyKind,
};
/// When we canonicalize a value to form a query, we wind up replacing
@@ -66,33 +66,19 @@
/// [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html#canonicalizing-the-query
pub fn canonicalize_query<V>(
&self,
- value: ParamEnvAnd<'db, V>,
+ value: V,
query_state: &mut OriginalQueryValues<'db>,
- ) -> CanonicalQueryInput<DbInterner<'db>, ParamEnvAnd<'db, V>>
+ ) -> Canonical<'db, V>
where
V: TypeFoldable<DbInterner<'db>>,
{
- let (param_env, value) = value.into_parts();
- // FIXME(#118965): We don't canonicalize the static lifetimes that appear in the
- // `param_env` because they are treated differently by trait selection.
- let canonical_param_env = Canonicalizer::canonicalize(
- param_env,
- self,
- self.interner,
- &CanonicalizeFreeRegionsOtherThanStatic,
- query_state,
- );
-
- let canonical = Canonicalizer::canonicalize_with_base(
- canonical_param_env,
+ Canonicalizer::canonicalize(
value,
self,
self.interner,
&CanonicalizeAllFreeRegions,
query_state,
)
- .unchecked_map(|(param_env, value)| ParamEnvAnd { param_env, value });
- CanonicalQueryInput { canonical, typing_mode: self.typing_mode() }
}
/// Canonicalizes a query *response* `V`. When we canonicalize a
@@ -285,26 +271,6 @@
}
}
-struct CanonicalizeFreeRegionsOtherThanStatic;
-
-impl CanonicalizeMode for CanonicalizeFreeRegionsOtherThanStatic {
- fn canonicalize_free_region<'db>(
- &self,
- canonicalizer: &mut Canonicalizer<'_, 'db>,
- r: Region<'db>,
- ) -> Region<'db> {
- if r.is_static() { r } else { canonicalizer.canonical_var_for_region_in_root_universe(r) }
- }
-
- fn any(&self) -> bool {
- true
- }
-
- fn preserve_universes(&self) -> bool {
- false
- }
-}
-
struct Canonicalizer<'cx, 'db> {
/// Set to `None` to disable the resolution of inference variables.
infcx: &'cx InferCtxt<'db>,
diff --git a/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs b/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs
index 6360291..13c620c 100644
--- a/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs
+++ b/crates/hir-ty/src/next_solver/infer/canonical/instantiate.rs
@@ -1,21 +1,31 @@
//! This module contains code to instantiate new values into a
-//! `Canonical<'tcx, T>`.
+//! `Canonical<'db, T>`.
//!
//! For an overview of what canonicalization is and how it fits into
//! rustc, check out the [chapter in the rustc dev guide][c].
//!
//! [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html
+use std::{fmt::Debug, iter};
+
use crate::next_solver::{
- BoundConst, BoundRegion, BoundTy, Canonical, CanonicalVarValues, Clauses, Const, ConstKind,
- DbInterner, GenericArg, Predicate, Region, RegionKind, Ty, TyKind, fold::FnMutDelegate,
+ BoundConst, BoundRegion, BoundTy, Canonical, CanonicalVarKind, CanonicalVarValues, Clauses,
+ Const, ConstKind, DbInterner, GenericArg, ParamEnv, Predicate, Region, RegionKind, Ty, TyKind,
+ fold::FnMutDelegate,
+ infer::{
+ InferCtxt, InferOk, InferResult,
+ canonical::{QueryRegionConstraints, QueryResponse, canonicalizer::OriginalQueryValues},
+ traits::{ObligationCause, PredicateObligations},
+ },
};
use rustc_hash::FxHashMap;
+use rustc_index::{Idx as _, IndexVec};
use rustc_type_ir::{
- BoundVarIndexKind, GenericArgKind, TypeFlags, TypeFoldable, TypeFolder, TypeSuperFoldable,
- TypeVisitableExt,
+ BoundVar, BoundVarIndexKind, GenericArgKind, TypeFlags, TypeFoldable, TypeFolder,
+ TypeSuperFoldable, TypeVisitableExt, UniverseIndex,
inherent::{GenericArg as _, IntoKind, SliceLike},
};
+use tracing::{debug, instrument};
pub trait CanonicalExt<'db, V> {
fn instantiate(&self, tcx: DbInterner<'db>, var_values: &CanonicalVarValues<'db>) -> V
@@ -169,3 +179,331 @@
c.super_fold_with(self)
}
}
+
+impl<'db> InferCtxt<'db> {
+ /// A version of `make_canonicalized_query_response` that does
+ /// not pack in obligations, for contexts that want to drop
+ /// pending obligations instead of treating them as an ambiguity (e.g.
+ /// typeck "probing" contexts).
+ ///
+ /// If you DO want to keep track of pending obligations (which
+ /// include all region obligations, so this includes all cases
+ /// that care about regions) with this function, you have to
+ /// do it yourself, by e.g., having them be a part of the answer.
+ pub fn make_query_response_ignoring_pending_obligations<T>(
+ &self,
+ inference_vars: CanonicalVarValues<'db>,
+ answer: T,
+ ) -> Canonical<'db, QueryResponse<'db, T>>
+ where
+ T: TypeFoldable<DbInterner<'db>>,
+ {
+ // While we ignore region constraints and pending obligations,
+ // we do return constrained opaque types to avoid unconstrained
+ // inference variables in the response. This is important as we want
+ // to check that opaques in deref steps stay unconstrained.
+ //
+ // This doesn't handle the more general case for non-opaques as
+ // ambiguous `Projection` obligations have same the issue.
+ let opaque_types = self
+ .inner
+ .borrow_mut()
+ .opaque_type_storage
+ .iter_opaque_types()
+ .map(|(k, v)| (k, v.ty))
+ .collect();
+
+ self.canonicalize_response(QueryResponse {
+ var_values: inference_vars,
+ region_constraints: QueryRegionConstraints::default(),
+ opaque_types,
+ value: answer,
+ })
+ }
+
+ /// Given the (canonicalized) result to a canonical query,
+ /// instantiates the result so it can be used, plugging in the
+ /// values from the canonical query. (Note that the result may
+ /// have been ambiguous; you should check the certainty level of
+ /// the query before applying this function.)
+ ///
+ /// To get a good understanding of what is happening here, check
+ /// out the [chapter in the rustc dev guide][c].
+ ///
+ /// [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html#processing-the-canonicalized-query-result
+ pub fn instantiate_query_response_and_region_obligations<R>(
+ &self,
+ cause: &ObligationCause,
+ param_env: ParamEnv<'db>,
+ original_values: &OriginalQueryValues<'db>,
+ query_response: &Canonical<'db, QueryResponse<'db, R>>,
+ ) -> InferResult<'db, R>
+ where
+ R: TypeFoldable<DbInterner<'db>>,
+ {
+ let InferOk { value: result_args, obligations } =
+ self.query_response_instantiation(cause, param_env, original_values, query_response)?;
+
+ for predicate in &query_response.value.region_constraints.outlives {
+ let predicate = instantiate_value(self.interner, &result_args, *predicate);
+ self.register_outlives_constraint(predicate);
+ }
+
+ for assumption in &query_response.value.region_constraints.assumptions {
+ let assumption = instantiate_value(self.interner, &result_args, *assumption);
+ self.register_region_assumption(assumption);
+ }
+
+ let user_result: R =
+ query_response
+ .instantiate_projected(self.interner, &result_args, |q_r| q_r.value.clone());
+
+ Ok(InferOk { value: user_result, obligations })
+ }
+
+ /// Given the original values and the (canonicalized) result from
+ /// computing a query, returns an instantiation that can be applied
+ /// to the query result to convert the result back into the
+ /// original namespace.
+ ///
+ /// The instantiation also comes accompanied with subobligations
+ /// that arose from unification; these might occur if (for
+ /// example) we are doing lazy normalization and the value
+ /// assigned to a type variable is unified with an unnormalized
+ /// projection.
+ fn query_response_instantiation<R>(
+ &self,
+ cause: &ObligationCause,
+ param_env: ParamEnv<'db>,
+ original_values: &OriginalQueryValues<'db>,
+ query_response: &Canonical<'db, QueryResponse<'db, R>>,
+ ) -> InferResult<'db, CanonicalVarValues<'db>>
+ where
+ R: Debug + TypeFoldable<DbInterner<'db>>,
+ {
+ debug!(
+ "query_response_instantiation(original_values={:#?}, query_response={:#?})",
+ original_values, query_response,
+ );
+
+ let mut value = self.query_response_instantiation_guess(
+ cause,
+ param_env,
+ original_values,
+ query_response,
+ )?;
+
+ value.obligations.extend(
+ self.unify_query_response_instantiation_guess(
+ cause,
+ param_env,
+ original_values,
+ &value.value,
+ query_response,
+ )?
+ .into_obligations(),
+ );
+
+ Ok(value)
+ }
+
+ /// Given the original values and the (canonicalized) result from
+ /// computing a query, returns a **guess** at an instantiation that
+ /// can be applied to the query result to convert the result back
+ /// into the original namespace. This is called a **guess**
+ /// because it uses a quick heuristic to find the values for each
+ /// canonical variable; if that quick heuristic fails, then we
+ /// will instantiate fresh inference variables for each canonical
+ /// variable instead. Therefore, the result of this method must be
+ /// properly unified
+ #[instrument(level = "debug", skip(self, param_env))]
+ fn query_response_instantiation_guess<R>(
+ &self,
+ cause: &ObligationCause,
+ param_env: ParamEnv<'db>,
+ original_values: &OriginalQueryValues<'db>,
+ query_response: &Canonical<'db, QueryResponse<'db, R>>,
+ ) -> InferResult<'db, CanonicalVarValues<'db>>
+ where
+ R: Debug + TypeFoldable<DbInterner<'db>>,
+ {
+ // For each new universe created in the query result that did
+ // not appear in the original query, create a local
+ // superuniverse.
+ let mut universe_map = original_values.universe_map.clone();
+ let num_universes_in_query = original_values.universe_map.len();
+ let num_universes_in_response = query_response.max_universe.as_usize() + 1;
+ for _ in num_universes_in_query..num_universes_in_response {
+ universe_map.push(self.create_next_universe());
+ }
+ assert!(!universe_map.is_empty()); // always have the root universe
+ assert_eq!(universe_map[UniverseIndex::ROOT.as_usize()], UniverseIndex::ROOT);
+
+ // Every canonical query result includes values for each of
+ // the inputs to the query. Therefore, we begin by unifying
+ // these values with the original inputs that were
+ // canonicalized.
+ let result_values = &query_response.value.var_values;
+ assert_eq!(original_values.var_values.len(), result_values.len());
+
+ // Quickly try to find initial values for the canonical
+ // variables in the result in terms of the query. We do this
+ // by iterating down the values that the query gave to each of
+ // the canonical inputs. If we find that one of those values
+ // is directly equal to one of the canonical variables in the
+ // result, then we can type the corresponding value from the
+ // input. See the example above.
+ let mut opt_values: IndexVec<BoundVar, Option<GenericArg<'db>>> =
+ IndexVec::from_elem_n(None, query_response.variables.len());
+
+ for (original_value, result_value) in iter::zip(&original_values.var_values, result_values)
+ {
+ match result_value.kind() {
+ GenericArgKind::Type(result_value) => {
+ // We disable the instantiation guess for inference variables
+ // and only use it for placeholders. We need to handle the
+ // `sub_root` of type inference variables which would make this
+ // more involved. They are also a lot rarer than region variables.
+ if let TyKind::Bound(index_kind, b) = result_value.kind()
+ && !matches!(
+ query_response.variables.as_slice()[b.var.as_usize()],
+ CanonicalVarKind::Ty { .. }
+ )
+ {
+ // We only allow a `Canonical` index in generic parameters.
+ assert!(matches!(index_kind, BoundVarIndexKind::Canonical));
+ opt_values[b.var] = Some(*original_value);
+ }
+ }
+ GenericArgKind::Lifetime(result_value) => {
+ if let RegionKind::ReBound(index_kind, b) = result_value.kind() {
+ // We only allow a `Canonical` index in generic parameters.
+ assert!(matches!(index_kind, BoundVarIndexKind::Canonical));
+ opt_values[b.var] = Some(*original_value);
+ }
+ }
+ GenericArgKind::Const(result_value) => {
+ if let ConstKind::Bound(index_kind, b) = result_value.kind() {
+ // We only allow a `Canonical` index in generic parameters.
+ assert!(matches!(index_kind, BoundVarIndexKind::Canonical));
+ opt_values[b.var] = Some(*original_value);
+ }
+ }
+ }
+ }
+
+ // Create result arguments: if we found a value for a
+ // given variable in the loop above, use that. Otherwise, use
+ // a fresh inference variable.
+ let interner = self.interner;
+ let variables = query_response.variables;
+ let var_values =
+ CanonicalVarValues::instantiate(interner, variables, |var_values, kind| {
+ if kind.universe() != UniverseIndex::ROOT {
+ // A variable from inside a binder of the query. While ideally these shouldn't
+ // exist at all, we have to deal with them for now.
+ self.instantiate_canonical_var(kind, var_values, |u| universe_map[u.as_usize()])
+ } else if kind.is_existential() {
+ match opt_values[BoundVar::new(var_values.len())] {
+ Some(k) => k,
+ None => self.instantiate_canonical_var(kind, var_values, |u| {
+ universe_map[u.as_usize()]
+ }),
+ }
+ } else {
+ // For placeholders which were already part of the input, we simply map this
+ // universal bound variable back the placeholder of the input.
+ opt_values[BoundVar::new(var_values.len())]
+ .expect("expected placeholder to be unified with itself during response")
+ }
+ });
+
+ let mut obligations = PredicateObligations::new();
+
+ // Carry all newly resolved opaque types to the caller's scope
+ for &(a, b) in &query_response.value.opaque_types {
+ let a = instantiate_value(self.interner, &var_values, a);
+ let b = instantiate_value(self.interner, &var_values, b);
+ debug!(?a, ?b, "constrain opaque type");
+ // We use equate here instead of, for example, just registering the
+ // opaque type's hidden value directly, because the hidden type may have been an inference
+ // variable that got constrained to the opaque type itself. In that case we want to equate
+ // the generic args of the opaque with the generic params of its hidden type version.
+ obligations.extend(
+ self.at(cause, param_env)
+ .eq(Ty::new_opaque(self.interner, a.def_id, a.args), b)?
+ .obligations,
+ );
+ }
+
+ Ok(InferOk { value: var_values, obligations })
+ }
+
+ /// Given a "guess" at the values for the canonical variables in
+ /// the input, try to unify with the *actual* values found in the
+ /// query result. Often, but not always, this is a no-op, because
+ /// we already found the mapping in the "guessing" step.
+ ///
+ /// See also: [`Self::query_response_instantiation_guess`]
+ fn unify_query_response_instantiation_guess<R>(
+ &self,
+ cause: &ObligationCause,
+ param_env: ParamEnv<'db>,
+ original_values: &OriginalQueryValues<'db>,
+ result_args: &CanonicalVarValues<'db>,
+ query_response: &Canonical<'db, QueryResponse<'db, R>>,
+ ) -> InferResult<'db, ()>
+ where
+ R: Debug + TypeFoldable<DbInterner<'db>>,
+ {
+ // A closure that yields the result value for the given
+ // canonical variable; this is taken from
+ // `query_response.var_values` after applying the instantiation
+ // by `result_args`.
+ let instantiated_query_response = |index: BoundVar| -> GenericArg<'db> {
+ query_response
+ .instantiate_projected(self.interner, result_args, |v| v.var_values[index])
+ };
+
+ // Unify the original value for each variable with the value
+ // taken from `query_response` (after applying `result_args`).
+ self.unify_canonical_vars(cause, param_env, original_values, instantiated_query_response)
+ }
+
+ /// Given two sets of values for the same set of canonical variables, unify them.
+ /// The second set is produced lazily by supplying indices from the first set.
+ fn unify_canonical_vars(
+ &self,
+ cause: &ObligationCause,
+ param_env: ParamEnv<'db>,
+ variables1: &OriginalQueryValues<'db>,
+ variables2: impl Fn(BoundVar) -> GenericArg<'db>,
+ ) -> InferResult<'db, ()> {
+ let mut obligations = PredicateObligations::new();
+ for (index, value1) in variables1.var_values.iter().enumerate() {
+ let value2 = variables2(BoundVar::new(index));
+
+ match (value1.kind(), value2.kind()) {
+ (GenericArgKind::Type(v1), GenericArgKind::Type(v2)) => {
+ obligations.extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
+ }
+ (GenericArgKind::Lifetime(re1), GenericArgKind::Lifetime(re2))
+ if re1.is_erased() && re2.is_erased() =>
+ {
+ // no action needed
+ }
+ (GenericArgKind::Lifetime(v1), GenericArgKind::Lifetime(v2)) => {
+ self.inner.borrow_mut().unwrap_region_constraints().make_eqregion(v1, v2);
+ }
+ (GenericArgKind::Const(v1), GenericArgKind::Const(v2)) => {
+ let ok = self.at(cause, param_env).eq(v1, v2)?;
+ obligations.extend(ok.into_obligations());
+ }
+ _ => {
+ panic!("kind mismatch, cannot unify {:?} and {:?}", value1, value2,);
+ }
+ }
+ }
+ Ok(InferOk { value: (), obligations })
+ }
+}
diff --git a/crates/hir-ty/src/next_solver/infer/canonical/mod.rs b/crates/hir-ty/src/next_solver/infer/canonical/mod.rs
index b3bd0a4..a0420a5 100644
--- a/crates/hir-ty/src/next_solver/infer/canonical/mod.rs
+++ b/crates/hir-ty/src/next_solver/infer/canonical/mod.rs
@@ -22,10 +22,12 @@
//! [c]: https://rust-lang.github.io/chalk/book/canonical_queries/canonicalization.html
use crate::next_solver::{
- Canonical, CanonicalVarValues, Const, DbInterner, GenericArg, PlaceholderConst,
- PlaceholderRegion, PlaceholderTy, Region, Ty, TyKind, infer::InferCtxt,
+ ArgOutlivesPredicate, Canonical, CanonicalVarValues, Const, DbInterner, GenericArg,
+ OpaqueTypeKey, PlaceholderConst, PlaceholderRegion, PlaceholderTy, Region, Ty, TyKind,
+ infer::InferCtxt,
};
use instantiate::CanonicalExt;
+use macros::{TypeFoldable, TypeVisitable};
use rustc_index::IndexVec;
use rustc_type_ir::inherent::IntoKind;
use rustc_type_ir::{CanonicalVarKind, InferTy, TypeFoldable, UniverseIndex, inherent::Ty as _};
@@ -135,3 +137,22 @@
}
}
}
+
+/// After we execute a query with a canonicalized key, we get back a
+/// `Canonical<QueryResponse<..>>`. You can use
+/// `instantiate_query_result` to access the data in this result.
+#[derive(Clone, Debug, TypeVisitable, TypeFoldable)]
+pub struct QueryResponse<'db, R> {
+ pub var_values: CanonicalVarValues<'db>,
+ pub region_constraints: QueryRegionConstraints<'db>,
+ pub opaque_types: Vec<(OpaqueTypeKey<'db>, Ty<'db>)>,
+ pub value: R,
+}
+
+#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, TypeVisitable, TypeFoldable)]
+pub struct QueryRegionConstraints<'db> {
+ pub outlives: Vec<QueryOutlivesConstraint<'db>>,
+ pub assumptions: Vec<ArgOutlivesPredicate<'db>>,
+}
+
+pub type QueryOutlivesConstraint<'tcx> = ArgOutlivesPredicate<'tcx>;
diff --git a/crates/hir-ty/src/next_solver/infer/mod.rs b/crates/hir-ty/src/next_solver/infer/mod.rs
index 7b8f52b..fcce04f 100644
--- a/crates/hir-ty/src/next_solver/infer/mod.rs
+++ b/crates/hir-ty/src/next_solver/infer/mod.rs
@@ -29,9 +29,10 @@
use unify_key::{ConstVariableOrigin, ConstVariableValue, ConstVidKey};
use crate::next_solver::{
- BoundConst, BoundRegion, BoundTy, BoundVarKind, Goal, SolverContext,
+ ArgOutlivesPredicate, BoundConst, BoundRegion, BoundTy, BoundVarKind, Goal, Predicate,
+ SolverContext,
fold::BoundVarReplacerDelegate,
- infer::{select::EvaluationResult, traits::PredicateObligation},
+ infer::{at::ToTrace, select::EvaluationResult, traits::PredicateObligation},
obligation_ctxt::ObligationCtxt,
};
@@ -47,6 +48,7 @@
pub mod canonical;
mod context;
pub mod opaque_types;
+mod outlives;
pub mod region_constraints;
pub mod relate;
pub mod resolve;
@@ -141,7 +143,14 @@
/// for each body-id in this map, which will process the
/// obligations within. This is expected to be done 'late enough'
/// that all type inference variables have been bound and so forth.
- pub(crate) region_obligations: Vec<RegionObligation<'db>>,
+ pub(crate) region_obligations: Vec<TypeOutlivesConstraint<'db>>,
+
+ /// The outlives bounds that we assume must hold about placeholders that
+ /// come from instantiating the binder of coroutine-witnesses. These bounds
+ /// are deduced from the well-formedness of the witness's types, and are
+ /// necessary because of the way we anonymize the regions in a coroutine,
+ /// which may cause types to no longer be considered well-formed.
+ region_assumptions: Vec<ArgOutlivesPredicate<'db>>,
/// Caches for opaque type inference.
pub(crate) opaque_type_storage: OpaqueTypeStorage<'db>,
@@ -158,12 +167,13 @@
float_unification_storage: Default::default(),
region_constraint_storage: Some(Default::default()),
region_obligations: vec![],
+ region_assumptions: Default::default(),
opaque_type_storage: Default::default(),
}
}
#[inline]
- pub fn region_obligations(&self) -> &[RegionObligation<'db>] {
+ pub fn region_obligations(&self) -> &[TypeOutlivesConstraint<'db>] {
&self.region_obligations
}
@@ -318,7 +328,7 @@
/// See the `region_obligations` field for more information.
#[derive(Clone, Debug)]
-pub struct RegionObligation<'db> {
+pub struct TypeOutlivesConstraint<'db> {
pub sub_region: Region<'db>,
pub sup_type: Ty<'db>,
}
@@ -387,6 +397,12 @@
self.typing_mode
}
+ /// Evaluates whether the predicate can be satisfied (by any means)
+ /// in the given `ParamEnv`.
+ pub fn predicate_may_hold(&self, obligation: &PredicateObligation<'db>) -> bool {
+ self.evaluate_obligation(obligation).may_apply()
+ }
+
/// See the comment on [OpaqueTypesJank](crate::solve::OpaqueTypesJank)
/// for more details.
pub fn predicate_may_hold_opaque_types_jank(
@@ -507,6 +523,22 @@
})
}
+ pub fn can_eq<T: ToTrace<'db>>(&self, param_env: ParamEnv<'db>, a: T, b: T) -> bool {
+ self.probe(|_| {
+ let mut ocx = ObligationCtxt::new(self);
+ let Ok(()) = ocx.eq(&ObligationCause::dummy(), param_env, a, b) else {
+ return false;
+ };
+ ocx.try_evaluate_obligations().is_empty()
+ })
+ }
+
+ /// See the comment on [OpaqueTypesJank](crate::solve::OpaqueTypesJank)
+ /// for more details.
+ pub fn goal_may_hold_opaque_types_jank(&self, goal: Goal<'db, Predicate<'db>>) -> bool {
+ <&SolverContext<'db>>::from(self).root_goal_may_hold_opaque_types_jank(goal)
+ }
+
pub fn type_is_copy_modulo_regions(&self, param_env: ParamEnv<'db>, ty: Ty<'db>) -> bool {
let ty = self.resolve_vars_if_possible(ty);
@@ -632,6 +664,14 @@
self.inner.borrow_mut().type_variables().num_vars()
}
+ pub fn next_var_for_param(&self, id: GenericParamId) -> GenericArg<'db> {
+ match id {
+ GenericParamId::TypeParamId(_) => self.next_ty_var().into(),
+ GenericParamId::ConstParamId(_) => self.next_const_var().into(),
+ GenericParamId::LifetimeParamId(_) => self.next_region_var().into(),
+ }
+ }
+
pub fn next_ty_var(&self) -> Ty<'db> {
self.next_ty_var_with_origin(TypeVariableOrigin { param_def_id: None })
}
@@ -846,6 +886,22 @@
self.inner.borrow_mut().opaque_type_storage.iter_opaque_types().collect()
}
+ pub fn has_opaques_with_sub_unified_hidden_type(&self, ty_vid: TyVid) -> bool {
+ let ty_sub_vid = self.sub_unification_table_root_var(ty_vid);
+ let inner = &mut *self.inner.borrow_mut();
+ let mut type_variables = inner.type_variable_storage.with_log(&mut inner.undo_log);
+ inner.opaque_type_storage.iter_opaque_types().any(|(_, hidden_ty)| {
+ if let TyKind::Infer(InferTy::TyVar(hidden_vid)) = hidden_ty.ty.kind() {
+ let opaque_sub_vid = type_variables.sub_unification_table_root_var(hidden_vid);
+ if opaque_sub_vid == ty_sub_vid {
+ return true;
+ }
+ }
+
+ false
+ })
+ }
+
#[inline(always)]
pub fn can_define_opaque_ty(&self, id: impl Into<SolverDefId>) -> bool {
match self.typing_mode_unchecked() {
diff --git a/crates/hir-ty/src/next_solver/infer/outlives/mod.rs b/crates/hir-ty/src/next_solver/infer/outlives/mod.rs
new file mode 100644
index 0000000..321c4b8
--- /dev/null
+++ b/crates/hir-ty/src/next_solver/infer/outlives/mod.rs
@@ -0,0 +1 @@
+mod obligations;
diff --git a/crates/hir-ty/src/next_solver/infer/outlives/obligations.rs b/crates/hir-ty/src/next_solver/infer/outlives/obligations.rs
new file mode 100644
index 0000000..befb200
--- /dev/null
+++ b/crates/hir-ty/src/next_solver/infer/outlives/obligations.rs
@@ -0,0 +1,68 @@
+use ena::undo_log::UndoLogs;
+use rustc_type_ir::{OutlivesPredicate, TypeVisitableExt};
+use tracing::{debug, instrument};
+
+use crate::next_solver::{
+ ArgOutlivesPredicate, GenericArg, Region, RegionOutlivesPredicate, Ty,
+ infer::{InferCtxt, TypeOutlivesConstraint, snapshot::undo_log::UndoLog},
+};
+
+impl<'db> InferCtxt<'db> {
+ pub fn register_outlives_constraint(
+ &self,
+ OutlivesPredicate(arg, r2): ArgOutlivesPredicate<'db>,
+ ) {
+ match arg {
+ GenericArg::Lifetime(r1) => {
+ self.register_region_outlives_constraint(OutlivesPredicate(r1, r2));
+ }
+ GenericArg::Ty(ty1) => {
+ self.register_type_outlives_constraint(ty1, r2);
+ }
+ GenericArg::Const(_) => unreachable!(),
+ }
+ }
+
+ pub fn register_region_outlives_constraint(
+ &self,
+ OutlivesPredicate(r_a, r_b): RegionOutlivesPredicate<'db>,
+ ) {
+ // `'a: 'b` ==> `'b <= 'a`
+ self.sub_regions(r_b, r_a);
+ }
+
+ /// Registers that the given region obligation must be resolved
+ /// from within the scope of `body_id`. These regions are enqueued
+ /// and later processed by regionck, when full type information is
+ /// available (see `region_obligations` field for more
+ /// information).
+ #[instrument(level = "debug", skip(self))]
+ pub fn register_type_outlives_constraint_inner(&self, obligation: TypeOutlivesConstraint<'db>) {
+ let mut inner = self.inner.borrow_mut();
+ inner.undo_log.push(UndoLog::PushTypeOutlivesConstraint);
+ inner.region_obligations.push(obligation);
+ }
+
+ pub fn register_type_outlives_constraint(&self, sup_type: Ty<'db>, sub_region: Region<'db>) {
+ // `is_global` means the type has no params, infer, placeholder, or non-`'static`
+ // free regions. If the type has none of these things, then we can skip registering
+ // this outlives obligation since it has no components which affect lifetime
+ // checking in an interesting way.
+ if sup_type.is_global() {
+ return;
+ }
+
+ debug!(?sup_type, ?sub_region);
+
+ self.register_type_outlives_constraint_inner(TypeOutlivesConstraint {
+ sup_type,
+ sub_region,
+ });
+ }
+
+ pub fn register_region_assumption(&self, assumption: ArgOutlivesPredicate<'db>) {
+ let mut inner = self.inner.borrow_mut();
+ inner.undo_log.push(UndoLog::PushRegionAssumption);
+ inner.region_assumptions.push(assumption);
+ }
+}
diff --git a/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs b/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs
index c8ec8da..f246af1 100644
--- a/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs
+++ b/crates/hir-ty/src/next_solver/infer/snapshot/undo_log.rs
@@ -28,8 +28,8 @@
FloatUnificationTable(sv::UndoLog<ut::Delegate<FloatVid>>),
RegionConstraintCollector(region_constraints::UndoLog<'db>),
RegionUnificationTable(sv::UndoLog<ut::Delegate<RegionVidKey<'db>>>),
- #[expect(dead_code, reason = "this is used in rustc")]
- PushRegionObligation,
+ PushTypeOutlivesConstraint,
+ PushRegionAssumption,
}
macro_rules! impl_from {
@@ -75,8 +75,13 @@
UndoLog::RegionUnificationTable(undo) => {
self.region_constraint_storage.as_mut().unwrap().unification_table.reverse(undo)
}
- UndoLog::PushRegionObligation => {
- self.region_obligations.pop();
+ UndoLog::PushTypeOutlivesConstraint => {
+ let popped = self.region_obligations.pop();
+ assert!(popped.is_some(), "pushed region constraint but could not pop it");
+ }
+ UndoLog::PushRegionAssumption => {
+ let popped = self.region_assumptions.pop();
+ assert!(popped.is_some(), "pushed region assumption but could not pop it");
}
}
}
diff --git a/crates/hir-ty/src/next_solver/interner.rs b/crates/hir-ty/src/next_solver/interner.rs
index b18e08b..a3c984f 100644
--- a/crates/hir-ty/src/next_solver/interner.rs
+++ b/crates/hir-ty/src/next_solver/interner.rs
@@ -1,14 +1,15 @@
//! Things related to the Interner in the next-trait-solver.
-use std::{fmt, ops::ControlFlow};
+use std::fmt;
+use rustc_ast_ir::{FloatTy, IntTy, UintTy};
pub use tls_cache::clear_tls_solver_cache;
pub use tls_db::{attach_db, attach_db_allow_change, with_attached_db};
use base_db::Crate;
use hir_def::{
- AdtId, AttrDefId, BlockId, CallableDefId, DefWithBodyId, EnumVariantId, ItemContainerId,
- StructId, UnionId, VariantId,
+ AdtId, AttrDefId, BlockId, CallableDefId, DefWithBodyId, EnumVariantId, HasModule,
+ ItemContainerId, StructId, UnionId, VariantId,
lang_item::LangItem,
signatures::{FieldData, FnFlags, ImplFlags, StructFlags, TraitFlags},
};
@@ -19,9 +20,10 @@
use rustc_type_ir::{
AliasTermKind, AliasTyKind, BoundVar, CollectAndApply, CoroutineWitnessTypes, DebruijnIndex,
EarlyBinder, FlagComputation, Flags, GenericArgKind, ImplPolarity, InferTy, Interner, TraitRef,
- TypeVisitableExt, UniverseIndex, Upcast, Variance,
+ TypeFlags, TypeVisitableExt, UniverseIndex, Upcast, Variance,
elaborate::elaborate,
error::TypeError,
+ fast_reject,
inherent::{self, GenericsOf, IntoKind, SliceLike as _, Span as _, Ty as _},
lang_items::{SolverAdtLangItem, SolverLangItem, SolverTraitLangItem},
solve::SizedTraitKind,
@@ -30,12 +32,13 @@
use crate::{
FnAbi,
db::{HirDatabase, InternedCoroutine, InternedCoroutineId},
- method_resolution::{ALL_FLOAT_FPS, ALL_INT_FPS, TyFingerprint},
+ lower::GenericPredicates,
+ method_resolution::TraitImpls,
next_solver::{
AdtIdWrapper, BoundConst, CallableIdWrapper, CanonicalVarKind, ClosureIdWrapper,
- CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, ImplIdWrapper, OpaqueTypeKey,
- RegionAssumptions, SolverContext, SolverDefIds, TraitIdWrapper, TypeAliasIdWrapper,
- util::{ContainsTypeErrors, explicit_item_bounds, for_trait_impls},
+ CoroutineIdWrapper, Ctor, FnSig, FxIndexMap, GeneralConstIdWrapper, ImplIdWrapper,
+ OpaqueTypeKey, RegionAssumptions, SimplifiedType, SolverContext, SolverDefIds,
+ TraitIdWrapper, TypeAliasIdWrapper, util::explicit_item_bounds,
},
};
@@ -211,6 +214,10 @@
}
impl<'db> $name<'db> {
+ pub fn empty(interner: DbInterner<'db>) -> Self {
+ $name::new_(interner.db(), smallvec::SmallVec::new())
+ }
+
pub fn new_from_iter(
interner: DbInterner<'db>,
data: impl IntoIterator<Item = $ty<'db>>,
@@ -583,6 +590,10 @@
self.inner().flags.is_enum
}
+ pub fn is_box(&self) -> bool {
+ self.inner().flags.is_box
+ }
+
#[inline]
pub fn repr(self) -> ReprOptions {
self.inner().repr
@@ -759,7 +770,7 @@
}
impl<'db> Flags for Pattern<'db> {
- fn flags(&self) -> rustc_type_ir::TypeFlags {
+ fn flags(&self) -> TypeFlags {
match self.inner() {
PatternKind::Range { start, end } => {
FlagComputation::for_const_kind(&start.kind()).flags
@@ -772,6 +783,7 @@
}
flags
}
+ PatternKind::NotNull => TypeFlags::empty(),
}
}
@@ -787,6 +799,7 @@
}
idx
}
+ PatternKind::NotNull => rustc_type_ir::INNERMOST,
}
}
}
@@ -824,7 +837,10 @@
)?;
Ok(Pattern::new(tcx, PatternKind::Or(pats)))
}
- (PatternKind::Range { .. } | PatternKind::Or(_), _) => Err(TypeError::Mismatch),
+ (PatternKind::NotNull, PatternKind::NotNull) => Ok(a),
+ (PatternKind::Range { .. } | PatternKind::Or(_) | PatternKind::NotNull, _) => {
+ Err(TypeError::Mismatch)
+ }
}
}
}
@@ -867,6 +883,7 @@
type CoroutineId = CoroutineIdWrapper;
type AdtId = AdtIdWrapper;
type ImplId = ImplIdWrapper;
+ type UnevaluatedConstId = GeneralConstIdWrapper;
type Span = Span;
type GenericArgs = GenericArgs<'db>;
@@ -1264,27 +1281,21 @@
})
}
- #[tracing::instrument(skip(self), ret)]
+ #[tracing::instrument(skip(self))]
fn item_bounds(
self,
def_id: Self::DefId,
) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> {
- explicit_item_bounds(self, def_id).map_bound(|bounds| {
- Clauses::new_from_iter(self, elaborate(self, bounds).collect::<Vec<_>>())
- })
+ explicit_item_bounds(self, def_id).map_bound(|bounds| elaborate(self, bounds))
}
- #[tracing::instrument(skip(self), ret)]
+ #[tracing::instrument(skip(self))]
fn item_self_bounds(
self,
def_id: Self::DefId,
) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> {
- explicit_item_bounds(self, def_id).map_bound(|bounds| {
- Clauses::new_from_iter(
- self,
- elaborate(self, bounds).filter_only_self().collect::<Vec<_>>(),
- )
- })
+ explicit_item_bounds(self, def_id)
+ .map_bound(|bounds| elaborate(self, bounds).filter_only_self())
}
fn item_non_self_bounds(
@@ -1309,9 +1320,8 @@
self,
def_id: Self::DefId,
) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> {
- let predicates = self.db().generic_predicates(def_id.try_into().unwrap());
- let predicates: Vec<_> = predicates.iter().cloned().collect();
- EarlyBinder::bind(predicates.into_iter())
+ GenericPredicates::query_all(self.db, def_id.try_into().unwrap())
+ .map_bound(|it| it.iter().copied())
}
#[tracing::instrument(level = "debug", skip(self), ret)]
@@ -1319,9 +1329,8 @@
self,
def_id: Self::DefId,
) -> EarlyBinder<Self, impl IntoIterator<Item = Self::Clause>> {
- let predicates = self.db().generic_predicates_without_parent(def_id.try_into().unwrap());
- let predicates: Vec<_> = predicates.iter().cloned().collect();
- EarlyBinder::bind(predicates.into_iter())
+ GenericPredicates::query_own(self.db, def_id.try_into().unwrap())
+ .map_bound(|it| it.iter().copied())
}
#[tracing::instrument(skip(self), ret)]
@@ -1334,23 +1343,21 @@
_ => false,
};
- let predicates: Vec<(Clause<'db>, Span)> = self
- .db()
- .generic_predicates(def_id.0.into())
- .iter()
- .filter(|p| match p.kind().skip_binder() {
- // rustc has the following assertion:
- // https://github.com/rust-lang/rust/blob/52618eb338609df44978b0ca4451ab7941fd1c7a/compiler/rustc_hir_analysis/src/hir_ty_lowering/bounds.rs#L525-L608
- rustc_type_ir::ClauseKind::Trait(it) => is_self(it.self_ty()),
- rustc_type_ir::ClauseKind::TypeOutlives(it) => is_self(it.0),
- rustc_type_ir::ClauseKind::Projection(it) => is_self(it.self_ty()),
- rustc_type_ir::ClauseKind::HostEffect(it) => is_self(it.self_ty()),
- _ => false,
- })
- .cloned()
- .map(|p| (p, Span::dummy()))
- .collect();
- EarlyBinder::bind(predicates)
+ GenericPredicates::query_explicit(self.db, def_id.0.into()).map_bound(move |predicates| {
+ predicates
+ .iter()
+ .copied()
+ .filter(move |p| match p.kind().skip_binder() {
+ // rustc has the following assertion:
+ // https://github.com/rust-lang/rust/blob/52618eb338609df44978b0ca4451ab7941fd1c7a/compiler/rustc_hir_analysis/src/hir_ty_lowering/bounds.rs#L525-L608
+ ClauseKind::Trait(it) => is_self(it.self_ty()),
+ ClauseKind::TypeOutlives(it) => is_self(it.0),
+ ClauseKind::Projection(it) => is_self(it.self_ty()),
+ ClauseKind::HostEffect(it) => is_self(it.self_ty()),
+ _ => false,
+ })
+ .map(|p| (p, Span::dummy()))
+ })
}
#[tracing::instrument(skip(self), ret)]
@@ -1368,25 +1375,25 @@
}
}
- let predicates: Vec<(Clause<'db>, Span)> = self
- .db()
- .generic_predicates(def_id.try_into().unwrap())
- .iter()
- .filter(|p| match p.kind().skip_binder() {
- rustc_type_ir::ClauseKind::Trait(it) => is_self_or_assoc(it.self_ty()),
- rustc_type_ir::ClauseKind::TypeOutlives(it) => is_self_or_assoc(it.0),
- rustc_type_ir::ClauseKind::Projection(it) => is_self_or_assoc(it.self_ty()),
- rustc_type_ir::ClauseKind::HostEffect(it) => is_self_or_assoc(it.self_ty()),
- // FIXME: Not sure is this correct to allow other clauses but we might replace
- // `generic_predicates_ns` query here with something closer to rustc's
- // `implied_bounds_with_filter`, which is more granular lowering than this
- // "lower at once and then filter" implementation.
- _ => true,
- })
- .cloned()
- .map(|p| (p, Span::dummy()))
- .collect();
- EarlyBinder::bind(predicates)
+ GenericPredicates::query_explicit(self.db, def_id.try_into().unwrap()).map_bound(
+ |predicates| {
+ predicates
+ .iter()
+ .copied()
+ .filter(|p| match p.kind().skip_binder() {
+ ClauseKind::Trait(it) => is_self_or_assoc(it.self_ty()),
+ ClauseKind::TypeOutlives(it) => is_self_or_assoc(it.0),
+ ClauseKind::Projection(it) => is_self_or_assoc(it.self_ty()),
+ ClauseKind::HostEffect(it) => is_self_or_assoc(it.self_ty()),
+ // FIXME: Not sure is this correct to allow other clauses but we might replace
+ // `generic_predicates_ns` query here with something closer to rustc's
+ // `implied_bounds_with_filter`, which is more granular lowering than this
+ // "lower at once and then filter" implementation.
+ _ => true,
+ })
+ .map(|p| (p, Span::dummy()))
+ },
+ )
}
fn impl_super_outlives(
@@ -1396,15 +1403,12 @@
let trait_ref = self.db().impl_trait(impl_id.0).expect("expected an impl of trait");
trait_ref.map_bound(|trait_ref| {
let clause: Clause<'_> = trait_ref.upcast(self);
- Clauses::new_from_iter(
- self,
- rustc_type_ir::elaborate::elaborate(self, [clause]).filter(|clause| {
- matches!(
- clause.kind().skip_binder(),
- ClauseKind::TypeOutlives(_) | ClauseKind::RegionOutlives(_)
- )
- }),
- )
+ elaborate(self, [clause]).filter(|clause| {
+ matches!(
+ clause.kind().skip_binder(),
+ ClauseKind::TypeOutlives(_) | ClauseKind::RegionOutlives(_)
+ )
+ })
})
}
@@ -1599,89 +1603,161 @@
)
}
- fn associated_type_def_ids(self, def_id: Self::DefId) -> impl IntoIterator<Item = Self::DefId> {
- let trait_ = match def_id {
- SolverDefId::TraitId(id) => id,
- _ => unreachable!(),
- };
- trait_.trait_items(self.db()).associated_types().map(|id| id.into())
+ fn associated_type_def_ids(
+ self,
+ def_id: Self::TraitId,
+ ) -> impl IntoIterator<Item = Self::DefId> {
+ def_id.0.trait_items(self.db()).associated_types().map(|id| id.into())
}
fn for_each_relevant_impl(
self,
- trait_: Self::TraitId,
+ trait_def_id: Self::TraitId,
self_ty: Self::Ty,
mut f: impl FnMut(Self::ImplId),
) {
- let trait_ = trait_.0;
- let self_ty_fp = TyFingerprint::for_trait_impl(self_ty);
- let fps: &[TyFingerprint] = match self_ty.kind() {
- TyKind::Infer(InferTy::IntVar(..)) => &ALL_INT_FPS,
- TyKind::Infer(InferTy::FloatVar(..)) => &ALL_FLOAT_FPS,
- _ => self_ty_fp.as_slice(),
+ let krate = self.krate.expect("trait solving requires setting `DbInterner::krate`");
+ let trait_block = trait_def_id.0.loc(self.db).container.containing_block();
+ let mut consider_impls_for_simplified_type = |simp: SimplifiedType| {
+ let type_block = simp.def().and_then(|def_id| {
+ let module = match def_id {
+ SolverDefId::AdtId(AdtId::StructId(id)) => id.module(self.db),
+ SolverDefId::AdtId(AdtId::EnumId(id)) => id.module(self.db),
+ SolverDefId::AdtId(AdtId::UnionId(id)) => id.module(self.db),
+ SolverDefId::TraitId(id) => id.module(self.db),
+ SolverDefId::TypeAliasId(id) => id.module(self.db),
+ SolverDefId::ConstId(_)
+ | SolverDefId::FunctionId(_)
+ | SolverDefId::ImplId(_)
+ | SolverDefId::StaticId(_)
+ | SolverDefId::InternedClosureId(_)
+ | SolverDefId::InternedCoroutineId(_)
+ | SolverDefId::InternedOpaqueTyId(_)
+ | SolverDefId::EnumVariantId(_)
+ | SolverDefId::Ctor(_) => return None,
+ };
+ module.containing_block()
+ });
+ TraitImpls::for_each_crate_and_block_trait_and_type(
+ self.db,
+ krate,
+ type_block,
+ trait_block,
+ &mut |impls| {
+ for &impl_ in impls.for_trait_and_self_ty(trait_def_id.0, &simp) {
+ f(impl_.into());
+ }
+ },
+ );
};
- if fps.is_empty() {
- _ = for_trait_impls(
- self.db(),
- self.krate.expect("Must have self.krate"),
- self.block,
- trait_,
- self_ty_fp,
- |impls| {
- for i in impls.for_trait(trait_) {
- use rustc_type_ir::TypeVisitable;
- let contains_errors = self.db().impl_trait(i).map_or(false, |b| {
- b.skip_binder().visit_with(&mut ContainsTypeErrors).is_break()
- });
- if contains_errors {
- continue;
- }
+ match self_ty.kind() {
+ TyKind::Bool
+ | TyKind::Char
+ | TyKind::Int(_)
+ | TyKind::Uint(_)
+ | TyKind::Float(_)
+ | TyKind::Adt(_, _)
+ | TyKind::Foreign(_)
+ | TyKind::Str
+ | TyKind::Array(_, _)
+ | TyKind::Pat(_, _)
+ | TyKind::Slice(_)
+ | TyKind::RawPtr(_, _)
+ | TyKind::Ref(_, _, _)
+ | TyKind::FnDef(_, _)
+ | TyKind::FnPtr(..)
+ | TyKind::Dynamic(_, _)
+ | TyKind::Closure(..)
+ | TyKind::CoroutineClosure(..)
+ | TyKind::Coroutine(_, _)
+ | TyKind::Never
+ | TyKind::Tuple(_)
+ | TyKind::UnsafeBinder(_) => {
+ let simp =
+ fast_reject::simplify_type(self, self_ty, fast_reject::TreatParams::AsRigid)
+ .unwrap();
+ consider_impls_for_simplified_type(simp);
+ }
- f(i.into());
- }
- ControlFlow::Continue(())
- },
- );
- } else {
- _ = for_trait_impls(
- self.db(),
- self.krate.expect("Must have self.krate"),
- self.block,
- trait_,
- self_ty_fp,
- |impls| {
- for fp in fps {
- for i in impls.for_trait_and_self_ty(trait_, *fp) {
- use rustc_type_ir::TypeVisitable;
- let contains_errors = self.db().impl_trait(i).map_or(false, |b| {
- b.skip_binder().visit_with(&mut ContainsTypeErrors).is_break()
- });
- if contains_errors {
- continue;
- }
+ // HACK: For integer and float variables we have to manually look at all impls
+ // which have some integer or float as a self type.
+ TyKind::Infer(InferTy::IntVar(_)) => {
+ use IntTy::*;
+ use UintTy::*;
+ // This causes a compiler error if any new integer kinds are added.
+ let (I8 | I16 | I32 | I64 | I128 | Isize): IntTy;
+ let (U8 | U16 | U32 | U64 | U128 | Usize): UintTy;
+ let possible_integers = [
+ // signed integers
+ SimplifiedType::Int(I8),
+ SimplifiedType::Int(I16),
+ SimplifiedType::Int(I32),
+ SimplifiedType::Int(I64),
+ SimplifiedType::Int(I128),
+ SimplifiedType::Int(Isize),
+ // unsigned integers
+ SimplifiedType::Uint(U8),
+ SimplifiedType::Uint(U16),
+ SimplifiedType::Uint(U32),
+ SimplifiedType::Uint(U64),
+ SimplifiedType::Uint(U128),
+ SimplifiedType::Uint(Usize),
+ ];
+ for simp in possible_integers {
+ consider_impls_for_simplified_type(simp);
+ }
+ }
- f(i.into());
- }
- }
- ControlFlow::Continue(())
- },
- );
+ TyKind::Infer(InferTy::FloatVar(_)) => {
+ // This causes a compiler error if any new float kinds are added.
+ let (FloatTy::F16 | FloatTy::F32 | FloatTy::F64 | FloatTy::F128);
+ let possible_floats = [
+ SimplifiedType::Float(FloatTy::F16),
+ SimplifiedType::Float(FloatTy::F32),
+ SimplifiedType::Float(FloatTy::F64),
+ SimplifiedType::Float(FloatTy::F128),
+ ];
+
+ for simp in possible_floats {
+ consider_impls_for_simplified_type(simp);
+ }
+ }
+
+ // The only traits applying to aliases and placeholders are blanket impls.
+ //
+ // Impls which apply to an alias after normalization are handled by
+ // `assemble_candidates_after_normalizing_self_ty`.
+ TyKind::Alias(_, _) | TyKind::Placeholder(..) | TyKind::Error(_) => (),
+
+ // FIXME: These should ideally not exist as a self type. It would be nice for
+ // the builtin auto trait impls of coroutines to instead directly recurse
+ // into the witness.
+ TyKind::CoroutineWitness(..) => (),
+
+ // These variants should not exist as a self type.
+ TyKind::Infer(
+ InferTy::TyVar(_)
+ | InferTy::FreshTy(_)
+ | InferTy::FreshIntTy(_)
+ | InferTy::FreshFloatTy(_),
+ )
+ | TyKind::Param(_)
+ | TyKind::Bound(_, _) => panic!("unexpected self type: {self_ty:?}"),
}
+
+ self.for_each_blanket_impl(trait_def_id, f)
}
fn for_each_blanket_impl(self, trait_def_id: Self::TraitId, mut f: impl FnMut(Self::ImplId)) {
let Some(krate) = self.krate else { return };
+ let block = trait_def_id.0.loc(self.db).container.containing_block();
- for impls in self.db.trait_impls_in_deps(krate).iter() {
- for impl_id in impls.for_trait(trait_def_id.0) {
- let impl_data = self.db.impl_signature(impl_id);
- let self_ty_ref = &impl_data.store[impl_data.self_ty];
- if matches!(self_ty_ref, hir_def::type_ref::TypeRef::TypeParam(_)) {
- f(impl_id.into());
- }
+ TraitImpls::for_each_crate_and_block(self.db, krate, block, &mut |impls| {
+ for &impl_ in impls.blanket_impls(trait_def_id.0) {
+ f(impl_.into());
}
- }
+ });
}
fn has_item_definition(self, _def_id: Self::DefId) -> bool {
@@ -2145,6 +2221,7 @@
CoroutineIdWrapper,
AdtIdWrapper,
ImplIdWrapper,
+ GeneralConstIdWrapper,
Pattern<'db>,
Safety,
FnAbi,
diff --git a/crates/hir-ty/src/next_solver/predicate.rs b/crates/hir-ty/src/next_solver/predicate.rs
index 3438b75..7cc3af7 100644
--- a/crates/hir-ty/src/next_solver/predicate.rs
+++ b/crates/hir-ty/src/next_solver/predicate.rs
@@ -13,7 +13,7 @@
};
use smallvec::SmallVec;
-use crate::next_solver::{InternedWrapperNoDebug, TraitIdWrapper};
+use crate::next_solver::{GenericArg, InternedWrapperNoDebug, TraitIdWrapper};
use super::{Binder, BoundVarKinds, DbInterner, Region, Ty, interned_vec_db};
@@ -43,6 +43,7 @@
pub type PolyTraitRef<'db> = Binder<'db, TraitRef<'db>>;
pub type PolyExistentialTraitRef<'db> = Binder<'db, ExistentialTraitRef<'db>>;
pub type PolyExistentialProjection<'db> = Binder<'db, ExistentialProjection<'db>>;
+pub type ArgOutlivesPredicate<'db> = OutlivesPredicate<'db, GenericArg<'db>>;
/// Compares via an ordering that will not change if modules are reordered or other changes are
/// made to the tree. In particular, this ordering is preserved across incremental compilations.
diff --git a/crates/hir-ty/src/next_solver/region.rs b/crates/hir-ty/src/next_solver/region.rs
index b5f0e6d..19f3c38 100644
--- a/crates/hir-ty/src/next_solver/region.rs
+++ b/crates/hir-ty/src/next_solver/region.rs
@@ -79,6 +79,10 @@
matches!(self.inner(), RegionKind::ReStatic)
}
+ pub fn is_erased(&self) -> bool {
+ matches!(self.inner(), RegionKind::ReErased)
+ }
+
pub fn is_var(&self) -> bool {
matches!(self.inner(), RegionKind::ReVar(_))
}
diff --git a/crates/hir-ty/src/next_solver/solver.rs b/crates/hir-ty/src/next_solver/solver.rs
index 7b96b40..b5ed770 100644
--- a/crates/hir-ty/src/next_solver/solver.rs
+++ b/crates/hir-ty/src/next_solver/solver.rs
@@ -11,13 +11,10 @@
};
use tracing::debug;
-use crate::{
- ImplTraitId,
- next_solver::{
- AliasTy, CanonicalVarKind, Clause, ClauseKind, CoercePredicate, GenericArgs, ImplIdWrapper,
- ParamEnv, Predicate, PredicateKind, SubtypePredicate, Ty, TyKind, fold::fold_tys,
- util::sizedness_fast_path,
- },
+use crate::next_solver::{
+ AliasTy, CanonicalVarKind, Clause, ClauseKind, CoercePredicate, GenericArgs, ImplIdWrapper,
+ ParamEnv, Predicate, PredicateKind, SubtypePredicate, Ty, TyKind, fold::fold_tys,
+ util::sizedness_fast_path,
};
use super::{
@@ -163,20 +160,7 @@
})
};
- let db = interner.db;
- let (opaques_table, opaque_idx) = match opaque_id.loc(db) {
- ImplTraitId::ReturnTypeImplTrait(func, opaque_idx) => {
- (db.return_type_impl_traits(func), opaque_idx)
- }
- ImplTraitId::TypeAliasImplTrait(type_alias, opaque_idx) => {
- (db.type_alias_impl_traits(type_alias), opaque_idx)
- }
- };
- let item_bounds = opaques_table
- .as_deref()
- .unwrap()
- .as_ref()
- .map_bound(|table| &table.impl_traits[opaque_idx].predicates);
+ let item_bounds = opaque_id.predicates(interner.db);
for predicate in item_bounds.iter_instantiated_copied(interner, args.as_slice()) {
let predicate = replace_opaques_in(predicate);
@@ -249,14 +233,17 @@
_param_env: ParamEnv<'db>,
uv: rustc_type_ir::UnevaluatedConst<Self::Interner>,
) -> Option<<Self::Interner as rustc_type_ir::Interner>::Const> {
- let c = match uv.def {
- SolverDefId::ConstId(c) => GeneralConstId::ConstId(c),
- SolverDefId::StaticId(c) => GeneralConstId::StaticId(c),
- _ => unreachable!(),
- };
- let subst = uv.args;
- let ec = self.cx().db.const_eval(c, subst, None).ok()?;
- Some(ec)
+ match uv.def.0 {
+ GeneralConstId::ConstId(c) => {
+ let subst = uv.args;
+ let ec = self.cx().db.const_eval(c, subst, None).ok()?;
+ Some(ec)
+ }
+ GeneralConstId::StaticId(c) => {
+ let ec = self.cx().db.const_eval_static(c).ok()?;
+ Some(ec)
+ }
+ }
}
fn compute_goal_fast_path(
diff --git a/crates/hir-ty/src/next_solver/ty.rs b/crates/hir-ty/src/next_solver/ty.rs
index b8406fe..58849ce 100644
--- a/crates/hir-ty/src/next_solver/ty.rs
+++ b/crates/hir-ty/src/next_solver/ty.rs
@@ -25,8 +25,8 @@
};
use crate::{
- ImplTraitId,
db::{HirDatabase, InternedCoroutine},
+ lower::GenericPredicates,
next_solver::{
AdtDef, AliasTy, Binder, CallableIdWrapper, Clause, ClauseKind, ClosureIdWrapper, Const,
CoroutineIdWrapper, FnSig, GenericArg, PolyFnSig, Region, TraitRef, TypeAliasIdWrapper,
@@ -41,6 +41,7 @@
util::{FloatExt, IntegerExt},
};
+pub type SimplifiedType = rustc_type_ir::fast_reject::SimplifiedType<SolverDefId>;
pub type TyKind<'db> = rustc_type_ir::TyKind<DbInterner<'db>>;
pub type FnHeader<'db> = rustc_type_ir::FnHeader<DbInterner<'db>>;
@@ -127,6 +128,22 @@
Ty::new_tup(interner, &[])
}
+ pub fn new_imm_ptr(interner: DbInterner<'db>, ty: Ty<'db>) -> Self {
+ Ty::new_ptr(interner, ty, Mutability::Not)
+ }
+
+ pub fn new_imm_ref(interner: DbInterner<'db>, region: Region<'db>, ty: Ty<'db>) -> Self {
+ Ty::new_ref(interner, region, ty, Mutability::Not)
+ }
+
+ pub fn new_opaque(
+ interner: DbInterner<'db>,
+ def_id: SolverDefId,
+ args: GenericArgs<'db>,
+ ) -> Self {
+ Ty::new_alias(interner, AliasTyKind::Opaque, AliasTy::new_from_args(interner, def_id, args))
+ }
+
/// Returns the `Size` for primitive types (bool, uint, int, char, float).
pub fn primitive_size(self, interner: DbInterner<'db>) -> Size {
match self.kind() {
@@ -327,11 +344,40 @@
}
#[inline]
+ pub fn is_bool(self) -> bool {
+ matches!(self.kind(), TyKind::Bool)
+ }
+
+ /// A scalar type is one that denotes an atomic datum, with no sub-components.
+ /// (A RawPtr is scalar because it represents a non-managed pointer, so its
+ /// contents are abstract to rustc.)
+ #[inline]
+ pub fn is_scalar(self) -> bool {
+ matches!(
+ self.kind(),
+ TyKind::Bool
+ | TyKind::Char
+ | TyKind::Int(_)
+ | TyKind::Float(_)
+ | TyKind::Uint(_)
+ | TyKind::FnDef(..)
+ | TyKind::FnPtr(..)
+ | TyKind::RawPtr(_, _)
+ | TyKind::Infer(InferTy::IntVar(_) | InferTy::FloatVar(_))
+ )
+ }
+
+ #[inline]
pub fn is_infer(self) -> bool {
matches!(self.kind(), TyKind::Infer(..))
}
#[inline]
+ pub fn is_numeric(self) -> bool {
+ self.is_integral() || self.is_floating_point()
+ }
+
+ #[inline]
pub fn is_str(self) -> bool {
matches!(self.kind(), TyKind::Str)
}
@@ -346,10 +392,27 @@
matches!(self.kind(), TyKind::RawPtr(..))
}
+ #[inline]
+ pub fn is_array(self) -> bool {
+ matches!(self.kind(), TyKind::Array(..))
+ }
+
+ #[inline]
+ pub fn is_slice(self) -> bool {
+ matches!(self.kind(), TyKind::Slice(..))
+ }
+
pub fn is_union(self) -> bool {
self.as_adt().is_some_and(|(adt, _)| matches!(adt, AdtId::UnionId(_)))
}
+ pub fn boxed_ty(self) -> Option<Ty<'db>> {
+ match self.kind() {
+ TyKind::Adt(adt_def, args) if adt_def.is_box() => Some(args.type_at(0)),
+ _ => None,
+ }
+ }
+
#[inline]
pub fn as_adt(self) -> Option<(AdtId, GenericArgs<'db>)> {
match self.kind() {
@@ -378,11 +441,9 @@
///
/// The parameter `explicit` indicates if this is an *explicit* dereference.
/// Some types -- notably raw ptrs -- can only be dereferenced explicitly.
- pub fn builtin_deref(self, db: &dyn HirDatabase, explicit: bool) -> Option<Ty<'db>> {
+ pub fn builtin_deref(self, explicit: bool) -> Option<Ty<'db>> {
match self.kind() {
- TyKind::Adt(adt, substs) if crate::lang_items::is_box(db, adt.def_id().0) => {
- Some(substs.as_slice()[0].expect_ty())
- }
+ TyKind::Adt(adt, substs) if adt.is_box() => Some(substs.as_slice()[0].expect_ty()),
TyKind::Ref(_, ty, _) => Some(ty),
TyKind::RawPtr(ty, _) if explicit => Some(ty),
_ => None,
@@ -562,26 +623,14 @@
let interner = DbInterner::new_with(db, None, None);
match self.kind() {
- TyKind::Alias(AliasTyKind::Opaque, opaque_ty) => {
- match db.lookup_intern_impl_trait_id(opaque_ty.def_id.expect_opaque_ty()) {
- ImplTraitId::ReturnTypeImplTrait(func, idx) => {
- db.return_type_impl_traits(func).map(|it| {
- let data =
- (*it).as_ref().map_bound(|rpit| &rpit.impl_traits[idx].predicates);
- data.iter_instantiated_copied(interner, opaque_ty.args.as_slice())
- .collect()
- })
- }
- ImplTraitId::TypeAliasImplTrait(alias, idx) => {
- db.type_alias_impl_traits(alias).map(|it| {
- let data =
- (*it).as_ref().map_bound(|rpit| &rpit.impl_traits[idx].predicates);
- data.iter_instantiated_copied(interner, opaque_ty.args.as_slice())
- .collect()
- })
- }
- }
- }
+ TyKind::Alias(AliasTyKind::Opaque, opaque_ty) => Some(
+ opaque_ty
+ .def_id
+ .expect_opaque_ty()
+ .predicates(db)
+ .iter_instantiated_copied(interner, opaque_ty.args.as_slice())
+ .collect(),
+ ),
TyKind::Param(param) => {
// FIXME: We shouldn't use `param.id` here.
let generic_params = db.generic_params(param.id.parent());
@@ -589,11 +638,8 @@
match param_data {
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
TypeParamProvenance::ArgumentImplTrait => {
- let predicates = db
- .generic_predicates(param.id.parent())
- .instantiate_identity()
- .into_iter()
- .flatten()
+ let predicates = GenericPredicates::query_all(db, param.id.parent())
+ .iter_identity_copied()
.filter(|wc| match wc.kind().skip_binder() {
ClauseKind::Trait(tr) => tr.self_ty() == self,
ClauseKind::Projection(pred) => pred.self_ty() == self,
diff --git a/crates/hir-ty/src/next_solver/util.rs b/crates/hir-ty/src/next_solver/util.rs
index d113f76..972c8e2 100644
--- a/crates/hir-ty/src/next_solver/util.rs
+++ b/crates/hir-ty/src/next_solver/util.rs
@@ -1,39 +1,30 @@
//! Various utilities for the next-trait-solver.
-use std::{
- iter,
- ops::{self, ControlFlow},
-};
+use std::ops::ControlFlow;
-use base_db::Crate;
-use hir_def::{BlockId, HasModule, lang_item::LangItem};
-use la_arena::Idx;
+use hir_def::TraitId;
use rustc_abi::{Float, HasDataLayout, Integer, IntegerType, Primitive, ReprOptions};
use rustc_type_ir::{
ConstKind, CoroutineArgs, DebruijnIndex, FloatTy, INNERMOST, IntTy, Interner,
PredicatePolarity, RegionKind, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeSuperVisitable,
- TypeVisitableExt, TypeVisitor, UintTy, UniverseIndex,
- inherent::{
- AdtDef, GenericArg as _, GenericArgs as _, IntoKind, ParamEnv as _, SliceLike, Ty as _,
- },
+ TypeVisitableExt, TypeVisitor, UintTy, UniverseIndex, elaborate,
+ inherent::{AdtDef, GenericArg as _, IntoKind, ParamEnv as _, SliceLike, Ty as _},
lang_items::SolverTraitLangItem,
solve::SizedTraitKind,
};
-use crate::{
- db::HirDatabase,
- lower::{LifetimeElisionKind, TyLoweringContext},
- method_resolution::{TraitImpls, TyFingerprint},
- next_solver::{
- BoundConst, FxIndexMap, ParamEnv, Placeholder, PlaceholderConst, PlaceholderRegion,
- infer::InferCtxt,
+use crate::next_solver::{
+ BoundConst, FxIndexMap, ParamEnv, Placeholder, PlaceholderConst, PlaceholderRegion,
+ PolyTraitRef,
+ infer::{
+ InferCtxt,
+ traits::{Obligation, ObligationCause, PredicateObligation},
},
};
use super::{
- Binder, BoundRegion, BoundTy, Clause, ClauseKind, Clauses, Const, DbInterner, EarlyBinder,
- GenericArgs, Predicate, PredicateKind, Region, SolverDefId, TraitPredicate, TraitRef, Ty,
- TyKind,
+ Binder, BoundRegion, BoundTy, Clause, ClauseKind, Const, DbInterner, EarlyBinder, GenericArgs,
+ Predicate, PredicateKind, Region, SolverDefId, Ty, TyKind,
fold::{BoundVarReplacer, FnMutDelegate},
};
@@ -388,54 +379,6 @@
}
}
-pub(crate) fn for_trait_impls(
- db: &dyn HirDatabase,
- krate: Crate,
- block: Option<BlockId>,
- trait_id: hir_def::TraitId,
- self_ty_fp: Option<TyFingerprint>,
- mut f: impl FnMut(&TraitImpls) -> ControlFlow<()>,
-) -> ControlFlow<()> {
- // Note: Since we're using `impls_for_trait` and `impl_provided_for`,
- // only impls where the trait can be resolved should ever reach Chalk.
- // `impl_datum` relies on that and will panic if the trait can't be resolved.
- let in_self_and_deps = db.trait_impls_in_deps(krate);
- let trait_module = trait_id.module(db);
- let type_module = match self_ty_fp {
- Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(db)),
- Some(TyFingerprint::ForeignType(type_id)) => Some(type_id.module(db)),
- Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(db)),
- _ => None,
- };
-
- let mut def_blocks =
- [trait_module.containing_block(), type_module.and_then(|it| it.containing_block())];
-
- let block_impls = iter::successors(block, |&block_id| {
- cov_mark::hit!(block_local_impls);
- block_id.loc(db).module.containing_block()
- })
- .inspect(|&block_id| {
- // make sure we don't search the same block twice
- def_blocks.iter_mut().for_each(|block| {
- if *block == Some(block_id) {
- *block = None;
- }
- });
- })
- .filter_map(|block_id| db.trait_impls_in_block(block_id));
- for it in in_self_and_deps.iter().map(ops::Deref::deref) {
- f(it)?;
- }
- for it in block_impls {
- f(&it)?;
- }
- for it in def_blocks.into_iter().flatten().filter_map(|it| db.trait_impls_in_block(it)) {
- f(&it)?;
- }
- ControlFlow::Continue(())
-}
-
// FIXME(next-trait-solver): uplift
pub fn sizedness_constraint_for_ty<'db>(
interner: DbInterner<'db>,
@@ -507,79 +450,14 @@
pub fn explicit_item_bounds<'db>(
interner: DbInterner<'db>,
def_id: SolverDefId,
-) -> EarlyBinder<'db, Clauses<'db>> {
+) -> EarlyBinder<'db, impl DoubleEndedIterator<Item = Clause<'db>> + ExactSizeIterator> {
let db = interner.db();
- match def_id {
- SolverDefId::TypeAliasId(type_alias) => {
- // Lower bounds -- we could/should maybe move this to a separate query in `lower`
- let type_alias_data = db.type_alias_signature(type_alias);
- let resolver = hir_def::resolver::HasResolver::resolver(type_alias, db);
- let mut ctx = TyLoweringContext::new(
- db,
- &resolver,
- &type_alias_data.store,
- type_alias.into(),
- LifetimeElisionKind::AnonymousReportError,
- );
-
- let item_args = GenericArgs::identity_for_item(interner, def_id);
- let interner_ty = Ty::new_projection_from_args(interner, def_id, item_args);
-
- let mut bounds = Vec::new();
- for bound in &type_alias_data.bounds {
- ctx.lower_type_bound(bound, interner_ty, false).for_each(|pred| {
- bounds.push(pred);
- });
- }
-
- if !ctx.unsized_types.contains(&interner_ty) {
- let sized_trait = LangItem::Sized
- .resolve_trait(ctx.db, interner.krate.expect("Must have interner.krate"));
- let sized_bound = sized_trait.map(|trait_id| {
- let trait_ref = TraitRef::new_from_args(
- interner,
- trait_id.into(),
- GenericArgs::new_from_iter(interner, [interner_ty.into()]),
- );
- Clause(Predicate::new(
- interner,
- Binder::dummy(rustc_type_ir::PredicateKind::Clause(
- rustc_type_ir::ClauseKind::Trait(TraitPredicate {
- trait_ref,
- polarity: rustc_type_ir::PredicatePolarity::Positive,
- }),
- )),
- ))
- });
- bounds.extend(sized_bound);
- bounds.shrink_to_fit();
- }
-
- rustc_type_ir::EarlyBinder::bind(Clauses::new_from_iter(interner, bounds))
- }
- SolverDefId::InternedOpaqueTyId(id) => {
- let full_id = db.lookup_intern_impl_trait_id(id);
- match full_id {
- crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
- let datas = db
- .return_type_impl_traits(func)
- .expect("impl trait id without impl traits");
- let datas = (*datas).as_ref().skip_binder();
- let data = &datas.impl_traits[Idx::from_raw(idx.into_raw())];
- EarlyBinder::bind(Clauses::new_from_iter(interner, data.predicates.clone()))
- }
- crate::ImplTraitId::TypeAliasImplTrait(alias, idx) => {
- let datas = db
- .type_alias_impl_traits(alias)
- .expect("impl trait id without impl traits");
- let datas = (*datas).as_ref().skip_binder();
- let data = &datas.impl_traits[Idx::from_raw(idx.into_raw())];
- EarlyBinder::bind(Clauses::new_from_iter(interner, data.predicates.clone()))
- }
- }
- }
+ let clauses = match def_id {
+ SolverDefId::TypeAliasId(type_alias) => crate::lower::type_alias_bounds(db, type_alias),
+ SolverDefId::InternedOpaqueTyId(id) => id.predicates(db),
_ => panic!("Unexpected GenericDefId"),
- }
+ };
+ clauses.map_bound(|clauses| clauses.iter().copied())
}
pub struct ContainsTypeErrors;
@@ -792,3 +670,34 @@
false
}
+
+/// Casts a trait reference into a reference to one of its super
+/// traits; returns `None` if `target_trait_def_id` is not a
+/// supertrait.
+pub(crate) fn upcast_choices<'db>(
+ interner: DbInterner<'db>,
+ source_trait_ref: PolyTraitRef<'db>,
+ target_trait_def_id: TraitId,
+) -> Vec<PolyTraitRef<'db>> {
+ if source_trait_ref.def_id().0 == target_trait_def_id {
+ return vec![source_trait_ref]; // Shortcut the most common case.
+ }
+
+ elaborate::supertraits(interner, source_trait_ref)
+ .filter(|r| r.def_id().0 == target_trait_def_id)
+ .collect()
+}
+
+#[inline]
+pub(crate) fn clauses_as_obligations<'db>(
+ clauses: impl IntoIterator<Item = Clause<'db>>,
+ cause: ObligationCause,
+ param_env: ParamEnv<'db>,
+) -> impl Iterator<Item = PredicateObligation<'db>> {
+ clauses.into_iter().map(move |clause| Obligation {
+ cause: cause.clone(),
+ param_env,
+ predicate: clause.as_predicate(),
+ recursion_depth: 0,
+ })
+}
diff --git a/crates/hir-ty/src/opaques.rs b/crates/hir-ty/src/opaques.rs
index 8531f24..acf532c 100644
--- a/crates/hir-ty/src/opaques.rs
+++ b/crates/hir-ty/src/opaques.rs
@@ -7,7 +7,6 @@
use la_arena::ArenaMap;
use rustc_type_ir::inherent::Ty as _;
use syntax::ast;
-use triomphe::Arc;
use crate::{
ImplTraitId,
@@ -29,7 +28,7 @@
// A function may define its own RPITs.
extend_with_opaques(
db,
- db.return_type_impl_traits(func),
+ ImplTraits::return_type_impl_traits(db, func),
|opaque_idx| ImplTraitId::ReturnTypeImplTrait(func, opaque_idx),
result,
);
@@ -38,7 +37,7 @@
let extend_with_taits = |type_alias| {
extend_with_opaques(
db,
- db.type_alias_impl_traits(type_alias),
+ ImplTraits::type_alias_impl_traits(db, type_alias),
|opaque_idx| ImplTraitId::TypeAliasImplTrait(type_alias, opaque_idx),
result,
);
@@ -75,12 +74,12 @@
fn extend_with_opaques<'db>(
db: &'db dyn HirDatabase,
- opaques: Option<Arc<EarlyBinder<'db, ImplTraits<'db>>>>,
+ opaques: &Option<Box<EarlyBinder<'db, ImplTraits<'db>>>>,
mut make_impl_trait: impl FnMut(ImplTraitIdx<'db>) -> ImplTraitId<'db>,
result: &mut Vec<SolverDefId>,
) {
if let Some(opaques) = opaques {
- for (opaque_idx, _) in (*opaques).as_ref().skip_binder().impl_traits.iter() {
+ for (opaque_idx, _) in (**opaques).as_ref().skip_binder().impl_traits.iter() {
let opaque_id = InternedOpaqueTyId::new(db, make_impl_trait(opaque_idx));
result.push(opaque_id.into());
}
@@ -109,6 +108,14 @@
db: &'db dyn HirDatabase,
type_alias: TypeAliasId,
) -> ArenaMap<ImplTraitIdx<'db>, EarlyBinder<'db, Ty<'db>>> {
+ // Call this first, to not perform redundant work if there are no TAITs.
+ let Some(taits_count) = ImplTraits::type_alias_impl_traits(db, type_alias)
+ .as_deref()
+ .map(|taits| taits.as_ref().skip_binder().impl_traits.len())
+ else {
+ return ArenaMap::new();
+ };
+
let loc = type_alias.loc(db);
let module = loc.module(db);
let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block());
@@ -119,10 +126,6 @@
let defining_bodies = tait_defining_bodies(db, &loc);
- let taits_count = db
- .type_alias_impl_traits(type_alias)
- .map_or(0, |taits| (*taits).as_ref().skip_binder().impl_traits.len());
-
let mut result = ArenaMap::with_capacity(taits_count);
for defining_body in defining_bodies {
let infer = db.infer(defining_body);
diff --git a/crates/hir-ty/src/specialization.rs b/crates/hir-ty/src/specialization.rs
index f4ee4de..304679d 100644
--- a/crates/hir-ty/src/specialization.rs
+++ b/crates/hir-ty/src/specialization.rs
@@ -2,17 +2,17 @@
use hir_def::{ImplId, nameres::crate_def_map};
use intern::sym;
+use rustc_type_ir::inherent::SliceLike;
use tracing::debug;
use crate::{
db::HirDatabase,
+ lower::GenericPredicates,
next_solver::{
DbInterner, TypingMode,
- infer::{
- DbInternerInferExt,
- traits::{Obligation, ObligationCause},
- },
+ infer::{DbInternerInferExt, traits::ObligationCause},
obligation_ctxt::ObligationCtxt,
+ util::clauses_as_obligations,
},
};
@@ -102,14 +102,12 @@
// Now check that the source trait ref satisfies all the where clauses of the target impl.
// This is not just for correctness; we also need this to constrain any params that may
// only be referenced via projection predicates.
- if let Some(predicates) =
- db.generic_predicates(parent_impl_def_id.into()).instantiate(interner, parent_args)
- {
- ocx.register_obligations(
- predicates
- .map(|predicate| Obligation::new(interner, cause.clone(), param_env, predicate)),
- );
- }
+ ocx.register_obligations(clauses_as_obligations(
+ GenericPredicates::query_all(db, parent_impl_def_id.into())
+ .iter_instantiated_copied(interner, parent_args.as_slice()),
+ cause.clone(),
+ param_env,
+ ));
let errors = ocx.evaluate_obligations_error_on_ambiguity();
if !errors.is_empty() {
diff --git a/crates/hir-ty/src/tests/coercion.rs b/crates/hir-ty/src/tests/coercion.rs
index 5a53db4..36630ab 100644
--- a/crates/hir-ty/src/tests/coercion.rs
+++ b/crates/hir-ty/src/tests/coercion.rs
@@ -49,7 +49,7 @@
//- minicore: coerce_unsized
fn test() {
let x: &[isize] = &[1];
- // ^^^^ adjustments: Deref(None), Borrow(Ref('?2, Not)), Pointer(Unsize)
+ // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
let x: *const [isize] = &[1];
// ^^^^ adjustments: Deref(None), Borrow(RawPtr(Not)), Pointer(Unsize)
}
@@ -88,6 +88,47 @@
}
#[test]
+fn unsized_from_keeps_type_info() {
+ check_types(
+ r#"
+//- minicore: coerce_unsized, from
+use core::{marker::Unsize, ops::CoerceUnsized};
+
+struct MyBox<T: ?Sized> {
+ ptr: *const T,
+}
+
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<MyBox<U>> for MyBox<T> {}
+
+struct MyRc<T: ?Sized> {
+ ptr: *const T,
+}
+
+impl<T: ?Sized> core::convert::From<MyBox<T>> for MyRc<T> {
+ fn from(_: MyBox<T>) -> MyRc<T> {
+ loop {}
+ }
+}
+
+fn make_box() -> MyBox<[i32; 2]> {
+ loop {}
+}
+
+fn take<T: ?Sized>(value: MyRc<T>) -> MyRc<T> {
+ value
+}
+
+fn test() {
+ let boxed: MyBox<[i32]> = make_box();
+ let rc = MyRc::from(boxed);
+ //^^ MyRc<[i32]>
+ let _: MyRc<[i32]> = take(rc);
+}
+"#,
+ );
+}
+
+#[test]
fn if_coerce() {
check_no_mismatches(
r#"
@@ -96,7 +137,7 @@
fn test() {
let x = if true {
foo(&[1])
- // ^^^^ adjustments: Deref(None), Borrow(Ref('?1, Not)), Pointer(Unsize)
+ // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
} else {
&[1]
};
@@ -148,7 +189,7 @@
fn test(i: i32) {
let x = match i {
2 => foo(&[2]),
- // ^^^^ adjustments: Deref(None), Borrow(Ref('?1, Not)), Pointer(Unsize)
+ // ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
1 => &[1],
_ => &[3],
};
@@ -268,7 +309,7 @@
fn returns_string() -> String { loop {} }
fn test() {
takes_ref_str(&{ returns_string() });
- // ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref('{region error}, Not))
+ // ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref(Not))
}
"#,
);
@@ -833,11 +874,11 @@
fn main() {
let a: V<&dyn Tr>;
(a,) = V { t: &S };
- //^^^^expected V<&'? S>, got (V<&'? (dyn Tr + 'static)>,)
+ //^^^^expected V<&'? S>, got (V<&'? (dyn Tr + '?)>,)
let mut a: V<&dyn Tr> = V { t: &S };
(a,) = V { t: &S };
- //^^^^expected V<&'? S>, got (V<&'? (dyn Tr + 'static)>,)
+ //^^^^expected V<&'? S>, got (V<&'? (dyn Tr + '?)>,)
}
"#,
);
@@ -854,8 +895,8 @@
}
fn test() {
Struct == Struct;
- // ^^^^^^ adjustments: Borrow(Ref('{region error}, Not))
- // ^^^^^^ adjustments: Borrow(Ref('{region error}, Not))
+ // ^^^^^^ adjustments: Borrow(Ref(Not))
+ // ^^^^^^ adjustments: Borrow(Ref(Not))
}",
);
}
@@ -871,7 +912,7 @@
}
fn test() {
Struct += Struct;
- // ^^^^^^ adjustments: Borrow(Ref('{region error}, Mut))
+ // ^^^^^^ adjustments: Borrow(Ref(Mut { allow_two_phase_borrow: Yes }))
// ^^^^^^ adjustments:
}",
);
@@ -885,7 +926,7 @@
fn test() {
let x = [1, 2, 3];
x[2] = 6;
- // ^ adjustments: Borrow(Ref('?0, Mut))
+ // ^ adjustments: Borrow(Ref(Mut { allow_two_phase_borrow: No }))
}
",
);
@@ -910,11 +951,11 @@
}
fn test() {
Struct[0];
- // ^^^^^^ adjustments: Borrow(Ref('?0, Not))
+ // ^^^^^^ adjustments: Borrow(Ref(Not))
StructMut[0];
- // ^^^^^^^^^ adjustments: Borrow(Ref('?1, Not))
+ // ^^^^^^^^^ adjustments: Borrow(Ref(Not))
&mut StructMut[0];
- // ^^^^^^^^^ adjustments: Borrow(Ref('?2, Mut))
+ // ^^^^^^^^^ adjustments: Borrow(Ref(Mut { allow_two_phase_borrow: No }))
}",
);
}
diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs
index 14ec161..e98e5e4 100644
--- a/crates/hir-ty/src/tests/incremental.rs
+++ b/crates/hir-ty/src/tests/incremental.rs
@@ -4,7 +4,7 @@
use salsa::EventKind;
use test_fixture::WithFixture;
-use crate::{db::HirDatabase, test_db::TestDB};
+use crate::{db::HirDatabase, method_resolution::TraitImpls, test_db::TestDB};
use super::visit_module;
@@ -44,10 +44,12 @@
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
- "return_type_impl_traits_shim",
- "expr_scopes_shim",
+ "GenericPredicates < 'db >::query_with_diagnostics_",
"lang_item",
"crate_lang_items",
+ "ImplTraits < 'db >::return_type_impl_traits_",
+ "expr_scopes_shim",
+ "lang_item",
]
"#]],
);
@@ -131,19 +133,22 @@
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
- "return_type_impl_traits_shim",
- "expr_scopes_shim",
+ "GenericPredicates < 'db >::query_with_diagnostics_",
"lang_item",
"crate_lang_items",
"attrs_shim",
"attrs_shim",
+ "ImplTraits < 'db >::return_type_impl_traits_",
+ "expr_scopes_shim",
+ "lang_item",
"infer_shim",
"function_signature_shim",
"function_signature_with_source_map_shim",
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
- "return_type_impl_traits_shim",
+ "GenericPredicates < 'db >::query_with_diagnostics_",
+ "ImplTraits < 'db >::return_type_impl_traits_",
"expr_scopes_shim",
"infer_shim",
"function_signature_shim",
@@ -151,7 +156,8 @@
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
- "return_type_impl_traits_shim",
+ "GenericPredicates < 'db >::query_with_diagnostics_",
+ "ImplTraits < 'db >::return_type_impl_traits_",
"expr_scopes_shim",
]
"#]],
@@ -230,9 +236,9 @@
|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
- db.trait_impls_in_crate(module.krate());
+ TraitImpls::for_crate(&db, module.krate());
},
- &[("trait_impls_in_crate_shim", 1)],
+ &[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
@@ -241,7 +247,7 @@
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
- "trait_impls_in_crate_shim",
+ "TraitImpls::for_crate_",
]
"#]],
);
@@ -267,9 +273,9 @@
|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
- db.trait_impls_in_crate(module.krate());
+ TraitImpls::for_crate(&db, module.krate());
},
- &[("trait_impls_in_crate_shim", 1)],
+ &[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"parse_shim",
@@ -277,7 +283,7 @@
"file_item_tree_query",
"real_span_map_shim",
"crate_local_def_map",
- "trait_impls_in_crate_shim",
+ "TraitImpls::for_crate_",
]
"#]],
);
@@ -302,9 +308,9 @@
|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
- db.trait_impls_in_crate(module.krate());
+ TraitImpls::for_crate(&db, module.krate());
},
- &[("trait_impls_in_crate_shim", 1)],
+ &[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
@@ -313,7 +319,7 @@
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
- "trait_impls_in_crate_shim",
+ "TraitImpls::for_crate_",
]
"#]],
);
@@ -340,9 +346,9 @@
|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
- db.trait_impls_in_crate(module.krate());
+ TraitImpls::for_crate(&db, module.krate());
},
- &[("trait_impls_in_crate_shim", 1)],
+ &[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"parse_shim",
@@ -350,7 +356,7 @@
"file_item_tree_query",
"real_span_map_shim",
"crate_local_def_map",
- "trait_impls_in_crate_shim",
+ "TraitImpls::for_crate_",
]
"#]],
);
@@ -375,9 +381,9 @@
|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
- db.trait_impls_in_crate(module.krate());
+ TraitImpls::for_crate(&db, module.krate());
},
- &[("trait_impls_in_crate_shim", 1)],
+ &[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
@@ -386,7 +392,7 @@
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
- "trait_impls_in_crate_shim",
+ "TraitImpls::for_crate_",
]
"#]],
);
@@ -410,9 +416,9 @@
|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
- db.trait_impls_in_crate(module.krate());
+ TraitImpls::for_crate(&db, module.krate());
},
- &[("trait_impls_in_crate_shim", 1)],
+ &[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"parse_shim",
@@ -420,7 +426,7 @@
"file_item_tree_query",
"real_span_map_shim",
"crate_local_def_map",
- "trait_impls_in_crate_shim",
+ "TraitImpls::for_crate_",
]
"#]],
);
@@ -449,9 +455,9 @@
|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
- db.trait_impls_in_crate(module.krate());
+ TraitImpls::for_crate(&db, module.krate());
},
- &[("trait_impls_in_crate_shim", 1)],
+ &[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"source_root_crates_shim",
@@ -460,7 +466,7 @@
"ast_id_map_shim",
"parse_shim",
"real_span_map_shim",
- "trait_impls_in_crate_shim",
+ "TraitImpls::for_crate_",
]
"#]],
);
@@ -492,9 +498,9 @@
|| {
let module = db.module_for_file(pos.file_id.file_id(&db));
let _crate_def_map = module.def_map(&db);
- db.trait_impls_in_crate(module.krate());
+ TraitImpls::for_crate(&db, module.krate());
},
- &[("trait_impls_in_crate_shim", 1)],
+ &[("TraitImpls::for_crate_", 1)],
expect_test::expect![[r#"
[
"parse_shim",
@@ -502,7 +508,7 @@
"file_item_tree_query",
"real_span_map_shim",
"crate_local_def_map",
- "trait_impls_in_crate_shim",
+ "TraitImpls::for_crate_",
"attrs_shim",
"impl_trait_with_diagnostics_shim",
"impl_signature_shim",
@@ -581,37 +587,37 @@
"body_shim",
"body_with_source_map_shim",
"trait_environment_shim",
+ "GenericPredicates < 'db >::query_with_diagnostics_",
"lang_item",
"crate_lang_items",
"attrs_shim",
"attrs_shim",
- "generic_predicates_shim",
- "return_type_impl_traits_shim",
+ "GenericPredicates < 'db >::query_with_diagnostics_",
+ "ImplTraits < 'db >::return_type_impl_traits_",
"infer_shim",
"function_signature_shim",
"function_signature_with_source_map_shim",
"trait_environment_shim",
- "return_type_impl_traits_shim",
+ "GenericPredicates < 'db >::query_with_diagnostics_",
+ "ImplTraits < 'db >::return_type_impl_traits_",
"expr_scopes_shim",
"struct_signature_shim",
"struct_signature_with_source_map_shim",
- "generic_predicates_shim",
+ "GenericPredicates < 'db >::query_with_diagnostics_",
"value_ty_shim",
"VariantFields::firewall_",
"VariantFields::query_",
"lang_item",
- "lang_item",
- "inherent_impls_in_crate_shim",
+ "InherentImpls::for_crate_",
"impl_signature_shim",
"impl_signature_with_source_map_shim",
"callable_item_signature_shim",
- "trait_impls_in_deps_shim",
- "trait_impls_in_crate_shim",
+ "TraitImpls::for_crate_and_deps_",
+ "TraitImpls::for_crate_",
"impl_trait_with_diagnostics_shim",
"impl_self_ty_with_diagnostics_shim",
- "generic_predicates_shim",
- "value_ty_shim",
- "generic_predicates_shim",
+ "GenericPredicates < 'db >::query_with_diagnostics_",
+ "lang_item",
]
"#]],
);
@@ -678,29 +684,29 @@
"function_signature_shim",
"body_with_source_map_shim",
"body_shim",
- "trait_environment_shim",
+ "GenericPredicates < 'db >::query_with_diagnostics_",
"crate_lang_items",
"attrs_shim",
"attrs_shim",
"attrs_shim",
- "generic_predicates_shim",
- "return_type_impl_traits_shim",
+ "GenericPredicates < 'db >::query_with_diagnostics_",
+ "ImplTraits < 'db >::return_type_impl_traits_",
"infer_shim",
"function_signature_with_source_map_shim",
- "return_type_impl_traits_shim",
+ "GenericPredicates < 'db >::query_with_diagnostics_",
+ "ImplTraits < 'db >::return_type_impl_traits_",
"expr_scopes_shim",
"struct_signature_with_source_map_shim",
- "generic_predicates_shim",
+ "GenericPredicates < 'db >::query_with_diagnostics_",
"VariantFields::query_",
- "inherent_impls_in_crate_shim",
+ "InherentImpls::for_crate_",
"impl_signature_with_source_map_shim",
"impl_signature_shim",
"callable_item_signature_shim",
- "trait_impls_in_crate_shim",
+ "TraitImpls::for_crate_",
"impl_trait_with_diagnostics_shim",
"impl_self_ty_with_diagnostics_shim",
- "generic_predicates_shim",
- "generic_predicates_shim",
+ "GenericPredicates < 'db >::query_with_diagnostics_",
]
"#]],
);
diff --git a/crates/hir-ty/src/tests/method_resolution.rs b/crates/hir-ty/src/tests/method_resolution.rs
index b0afd60..274d33a 100644
--- a/crates/hir-ty/src/tests/method_resolution.rs
+++ b/crates/hir-ty/src/tests/method_resolution.rs
@@ -8,6 +8,7 @@
fn infer_slice_method() {
check_types(
r#"
+//- /core.rs crate:core
impl<T> [T] {
#[rustc_allow_incoherent_impl]
fn foo(&self) -> T {
@@ -27,13 +28,13 @@
fn cross_crate_primitive_method() {
check_types(
r#"
-//- /main.rs crate:main deps:other_crate
+//- /main.rs crate:main deps:core
fn test() {
let x = 1f32;
x.foo();
} //^^^^^^^ f32
-//- /lib.rs crate:other_crate
+//- /lib.rs crate:core
mod foo {
impl f32 {
#[rustc_allow_incoherent_impl]
@@ -48,6 +49,7 @@
fn infer_array_inherent_impl() {
check_types(
r#"
+//- /core.rs crate:core
impl<T, const N: usize> [T; N] {
#[rustc_allow_incoherent_impl]
fn foo(&self) -> T {
@@ -981,7 +983,6 @@
#[test]
fn method_resolution_overloaded_const() {
- cov_mark::check!(const_candidate_self_type_mismatch);
check_types(
r#"
struct Wrapper<T>(T);
@@ -1376,7 +1377,6 @@
#[test]
fn autoderef_visibility_method() {
- cov_mark::check!(autoderef_candidate_not_visible);
check(
r#"
//- minicore: receiver
@@ -1415,7 +1415,6 @@
#[test]
fn trait_vs_private_inherent_const() {
- cov_mark::check!(const_candidate_not_visible);
check(
r#"
mod a {
@@ -1505,6 +1504,7 @@
fn resolve_const_generic_array_methods() {
check_types(
r#"
+//- /core.rs crate:core
#[lang = "array"]
impl<T, const N: usize> [T; N] {
#[rustc_allow_incoherent_impl]
@@ -1536,6 +1536,7 @@
fn resolve_const_generic_method() {
check_types(
r#"
+//- /core.rs crate:core
struct Const<const N: usize>;
#[lang = "array"]
@@ -1714,8 +1715,8 @@
95..103 'u32::foo': fn foo<u32>() -> u8
109..115 'S::foo': fn foo<S>() -> u8
121..127 'T::foo': fn foo<T>() -> u8
- 133..139 'U::foo': {unknown}
- 145..157 '<[u32]>::foo': {unknown}
+ 133..139 'U::foo': fn foo<U>() -> u8
+ 145..157 '<[u32]>::foo': fn foo<[u32]>() -> u8
"#]],
);
}
@@ -1869,6 +1870,7 @@
"#,
);
}
+
#[test]
fn receiver_adjustment_autoref() {
check(
@@ -1879,9 +1881,9 @@
}
fn test() {
Foo.foo();
- //^^^ adjustments: Borrow(Ref('?0, Not))
+ //^^^ adjustments: Borrow(Ref(Not))
(&Foo).foo();
- // ^^^^ adjustments: Deref(None), Borrow(Ref('?2, Not))
+ // ^^^^ adjustments: Deref(None), Borrow(Ref(Not))
}
"#,
);
@@ -1895,7 +1897,7 @@
fn test() {
let a = [1, 2, 3];
a.len();
-} //^ adjustments: Borrow(Ref('?0, Not)), Pointer(Unsize)
+} //^ adjustments: Borrow(Ref(Not)), Pointer(Unsize)
"#,
);
}
@@ -2036,6 +2038,7 @@
check(
r#"
//- minicore: error, send
+//- /std.rs crate:std
pub struct Box<T>(T);
use core::error::Error;
@@ -2108,7 +2111,7 @@
}
fn test() {
Box::new(Foo).foo();
- //^^^^^^^^^^^^^ adjustments: Deref(None), Borrow(Ref('?0, Not))
+ //^^^^^^^^^^^^^ adjustments: Deref(None), Borrow(Ref(Not))
}
"#,
);
@@ -2126,7 +2129,7 @@
use core::mem::ManuallyDrop;
fn test() {
ManuallyDrop::new(Foo).foo();
- //^^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref('?0, Not))
+ //^^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref(Not))
}
"#,
);
@@ -2176,6 +2179,8 @@
check(
r#"
//- minicore: receiver
+#![feature(arbitrary_self_types)]
+
use core::ops::Receiver;
struct Foo;
diff --git a/crates/hir-ty/src/tests/patterns.rs b/crates/hir-ty/src/tests/patterns.rs
index 607daad..c312b16 100644
--- a/crates/hir-ty/src/tests/patterns.rs
+++ b/crates/hir-ty/src/tests/patterns.rs
@@ -6,7 +6,7 @@
fn infer_pattern() {
check_infer(
r#"
- //- minicore: iterator
+ //- minicore: iterator, add, builtin_impls
fn test(x: &i32) {
let y = x;
let &z = x;
@@ -189,26 +189,45 @@
fn infer_range_pattern() {
check_infer_with_mismatches(
r#"
- fn test(x: &i32) {
- if let 1..76 = 2u32 {}
- if let 1..=76 = 2u32 {}
- }
+//- minicore: range
+fn test(x..y: &core::ops::Range<u32>) {
+ if let 1..76 = 2u32 {}
+ if let 1..=76 = 2u32 {}
+}
"#,
expect![[r#"
- 8..9 'x': &'? i32
- 17..75 '{ ...2 {} }': ()
- 23..45 'if let...u32 {}': ()
- 26..42 'let 1....= 2u32': bool
- 30..35 '1..76': u32
- 38..42 '2u32': u32
- 43..45 '{}': ()
- 50..73 'if let...u32 {}': ()
- 53..70 'let 1....= 2u32': bool
- 57..63 '1..=76': u32
- 66..70 '2u32': u32
- 71..73 '{}': ()
+ 8..9 'x': Range<u32>
+ 8..12 'x..y': Range<u32>
+ 11..12 'y': Range<u32>
+ 38..96 '{ ...2 {} }': ()
+ 44..66 'if let...u32 {}': ()
+ 47..63 'let 1....= 2u32': bool
+ 51..52 '1': u32
+ 51..56 '1..76': u32
+ 54..56 '76': u32
+ 59..63 '2u32': u32
+ 64..66 '{}': ()
+ 71..94 'if let...u32 {}': ()
+ 74..91 'let 1....= 2u32': bool
+ 78..79 '1': u32
+ 78..84 '1..=76': u32
+ 82..84 '76': u32
+ 87..91 '2u32': u32
+ 92..94 '{}': ()
"#]],
);
+ check_no_mismatches(
+ r#"
+//- minicore: range
+fn main() {
+ let byte: u8 = 0u8;
+ let b = match byte {
+ b'0'..=b'9' => true,
+ _ => false,
+ };
+}
+ "#,
+ );
}
#[test]
@@ -1259,3 +1278,22 @@
"#,
);
}
+
+#[test]
+fn destructuring_assign_ref() {
+ check_no_mismatches(
+ r#"
+struct Foo;
+
+fn foo() -> (&'static Foo, u32) {
+ (&Foo, 0)
+}
+
+fn bar() {
+ let ext: &Foo;
+ let v;
+ (ext, v) = foo();
+}
+ "#,
+ );
+}
diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs
index 75d3203..f03f8d7 100644
--- a/crates/hir-ty/src/tests/regression.rs
+++ b/crates/hir-ty/src/tests/regression.rs
@@ -292,7 +292,7 @@
149..156 'content': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
181..188 'content': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
191..313 'if ICE... }': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
- 194..231 'ICE_RE..._VALUE': bool
+ 194..231 'ICE_RE..._VALUE': {unknown}
194..247 'ICE_RE...&name)': bool
241..246 '&name': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
242..246 'name': &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? &'? {unknown}
@@ -629,7 +629,7 @@
65..69 'self': Self
267..271 'self': Self
466..470 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
- 488..522 '{ ... }': <SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}> as BoxedDsl<DB>>::Output
+ 488..522 '{ ... }': {unknown}
498..502 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
498..508 'self.order': O
498..515 'self.o...into()': dyn QueryFragment<DB> + 'static
@@ -725,7 +725,7 @@
138..146 'bar(key)': impl Future<Output = <K as Foo<R>>::Bar>
142..145 'key': &'? K
162..165 'key': &'? K
- 224..227 '{ }': impl Future<Output = <K as Foo<R>>::Bar>
+ 224..227 '{ }': ()
"#]],
);
}
@@ -2522,3 +2522,43 @@
"#,
);
}
+
+#[test]
+fn issue_9881_super_trait_blanket_impl() {
+ check_types(
+ r#"
+pub trait TryStream: Stream {
+ fn try_poll_next(&self) {}
+}
+
+pub trait Stream {
+ type Item;
+ fn poll_next(&self) {}
+}
+
+trait StreamAlias: Stream<Item = ()> {}
+
+impl<S: Stream<Item = ()>> TryStream for S {}
+
+impl<S: Stream<Item = ()>> StreamAlias for S {}
+
+struct StreamImpl;
+
+impl Stream for StreamImpl {
+ type Item = ();
+}
+
+fn foo() -> impl StreamAlias {
+ StreamImpl
+}
+
+fn main() {
+ let alias = foo();
+ let _: () = alias.try_poll_next();
+ // ^ ()
+ let _: () = alias.poll_next();
+ // ^ ()
+}
+ "#,
+ );
+}
diff --git a/crates/hir-ty/src/tests/regression/new_solver.rs b/crates/hir-ty/src/tests/regression/new_solver.rs
index 90c81d1..5c1f85c 100644
--- a/crates/hir-ty/src/tests/regression/new_solver.rs
+++ b/crates/hir-ty/src/tests/regression/new_solver.rs
@@ -180,7 +180,7 @@
"#,
expect![[r#"
150..154 'self': &'a Grid
- 174..181 '{ }': <&'a Grid as IntoIterator>::IntoIter
+ 174..181 '{ }': ()
"#]],
);
}
@@ -414,7 +414,7 @@
244..246 '_x': {unknown}
249..257 'to_bytes': fn to_bytes() -> [u8; _]
249..259 'to_bytes()': [u8; _]
- 249..268 'to_byt..._vec()': Vec<<[u8; _] as Foo>::Item>
+ 249..268 'to_byt..._vec()': {unknown}
"#]],
);
}
@@ -552,3 +552,144 @@
"#]],
);
}
+
+#[test]
+fn regression_19957() {
+ // This test documents issue #19957: async-trait patterns incorrectly produce
+ // type mismatches between Pin<Box<dyn Future>> and Pin<Box<impl Future>>.
+ check_no_mismatches(
+ r#"
+//- minicore: future, pin, result, error, send, coerce_unsized, dispatch_from_dyn
+use core::{future::Future, pin::Pin};
+
+#[lang = "owned_box"]
+pub struct Box<T: ?Sized> {
+ inner: *mut T,
+}
+
+impl<T> Box<T> {
+ fn pin(value: T) -> Pin<Box<T>> {
+ // Implementation details don't matter here for type checking
+ loop {}
+ }
+}
+
+impl<T: ?Sized + core::marker::Unsize<U>, U: ?Sized> core::ops::CoerceUnsized<Box<U>> for Box<T> {}
+
+impl<T: ?Sized + core::ops::DispatchFromDyn<U>, U: ?Sized> core::ops::DispatchFromDyn<Box<U>> for Box<T> {}
+
+pub struct ExampleData {
+ pub id: i32,
+}
+
+// Simulates what #[async_trait] expands to
+pub trait SimpleModel {
+ fn save<'life0, 'async_trait>(
+ &'life0 self,
+ ) -> Pin<Box<dyn Future<Output = i32> + Send + 'async_trait>>
+ where
+ 'life0: 'async_trait,
+ Self: 'async_trait;
+}
+
+impl SimpleModel for ExampleData {
+ fn save<'life0, 'async_trait>(
+ &'life0 self,
+ ) -> Pin<Box<dyn Future<Output = i32> + Send + 'async_trait>>
+ where
+ 'life0: 'async_trait,
+ Self: 'async_trait,
+ {
+ // Body creates Pin<Box<impl Future>>, which should coerce to Pin<Box<dyn Future>>
+ Box::pin(async move { self.id })
+ }
+}
+"#,
+ )
+}
+
+#[test]
+fn regression_20975() {
+ check_infer(
+ r#"
+//- minicore: future, iterators, range
+use core::future::Future;
+
+struct Foo<T>(T);
+
+trait X {}
+
+impl X for i32 {}
+impl X for i64 {}
+
+impl<T: X> Iterator for Foo<T> {
+ type Item = T;
+ fn next(&mut self) -> Option<Self::Item> {
+ self.next_spec()
+ }
+}
+
+trait Bar {
+ type Item;
+
+ fn next_spec(&mut self) -> Option<Self::Item>;
+}
+
+impl<T: X> Bar for Foo<T> {
+ type Item = T;
+
+ fn next_spec(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+
+struct JoinAll<F>
+where
+ F: Future,
+{
+ f: F,
+}
+
+fn join_all<I>(iter: I) -> JoinAll<<I as IntoIterator>::Item>
+where
+ I: IntoIterator,
+ <I as IntoIterator>::Item: Future,
+{
+ loop {}
+}
+
+fn main() {
+ let x = Foo(42).filter_map(|_| Some(async {}));
+ join_all(x);
+}
+"#,
+ expect![[r#"
+ 164..168 'self': &'? mut Foo<T>
+ 192..224 '{ ... }': Option<T>
+ 202..206 'self': &'? mut Foo<T>
+ 202..218 'self.n...spec()': Option<T>
+ 278..282 'self': &'? mut Self
+ 380..384 'self': &'? mut Foo<T>
+ 408..428 '{ ... }': Option<T>
+ 418..422 'None': Option<T>
+ 501..505 'iter': I
+ 614..629 '{ loop {} }': JoinAll<impl Future>
+ 620..627 'loop {}': !
+ 625..627 '{}': ()
+ 641..713 '{ ...(x); }': ()
+ 651..652 'x': FilterMap<Foo<i32>, impl FnMut(i32) -> Option<impl Future<Output = ()>>>
+ 655..658 'Foo': fn Foo<i32>(i32) -> Foo<i32>
+ 655..662 'Foo(42)': Foo<i32>
+ 655..693 'Foo(42...c {}))': FilterMap<Foo<i32>, impl FnMut(i32) -> Option<impl Future<Output = ()>>>
+ 659..661 '42': i32
+ 674..692 '|_| So...nc {})': impl FnMut(i32) -> Option<impl Future<Output = ()>>
+ 675..676 '_': i32
+ 678..682 'Some': fn Some<impl Future<Output = ()>>(impl Future<Output = ()>) -> Option<impl Future<Output = ()>>
+ 678..692 'Some(async {})': Option<impl Future<Output = ()>>
+ 683..691 'async {}': impl Future<Output = ()>
+ 699..707 'join_all': fn join_all<FilterMap<Foo<i32>, impl FnMut(i32) -> Option<impl Future<Output = ()>>>>(FilterMap<Foo<i32>, impl FnMut(i32) -> Option<impl Future<Output = ()>>>) -> JoinAll<<FilterMap<Foo<i32>, impl FnMut(i32) -> Option<impl Future<Output = ()>>> as IntoIterator>::Item>
+ 699..710 'join_all(x)': JoinAll<impl Future<Output = ()>>
+ 708..709 'x': FilterMap<Foo<i32>, impl FnMut(i32) -> Option<impl Future<Output = ()>>>
+ "#]],
+ );
+}
diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs
index c2392b3..2e107b2 100644
--- a/crates/hir-ty/src/tests/simple.rs
+++ b/crates/hir-ty/src/tests/simple.rs
@@ -645,10 +645,13 @@
fn infer_binary_op() {
check_infer(
r#"
+//- minicore: add, builtin_impls
fn f(x: bool) -> i32 {
0i32
}
+const CONST_2: isize = 0;
+
fn test() -> bool {
let x = a && b;
let y = true || false;
@@ -658,8 +661,9 @@
let h = minus_forty <= CONST_2;
let c = f(z || y) + 5;
let d = b;
- let g = minus_forty ^= i;
+ let g = minus_forty += i;
let ten: usize = 10;
+ let some_num = 0usize;
let ten_is_eleven = ten == some_num;
ten < 3
@@ -669,53 +673,56 @@
5..6 'x': bool
21..33 '{ 0i32 }': i32
27..31 '0i32': i32
- 53..369 '{ ... < 3 }': bool
- 63..64 'x': bool
- 67..68 'a': bool
- 67..73 'a && b': bool
- 72..73 'b': bool
- 83..84 'y': bool
- 87..91 'true': bool
- 87..100 'true || false': bool
- 95..100 'false': bool
- 110..111 'z': bool
- 114..115 'x': bool
- 114..120 'x == y': bool
- 119..120 'y': bool
- 130..131 't': bool
- 134..135 'x': bool
- 134..140 'x != y': bool
- 139..140 'y': bool
- 150..161 'minus_forty': isize
- 171..179 '-40isize': isize
- 172..179 '40isize': isize
- 189..190 'h': bool
- 193..204 'minus_forty': isize
- 193..215 'minus_...ONST_2': bool
- 208..215 'CONST_2': isize
- 225..226 'c': i32
- 229..230 'f': fn f(bool) -> i32
- 229..238 'f(z || y)': i32
- 229..242 'f(z || y) + 5': i32
- 231..232 'z': bool
- 231..237 'z || y': bool
- 236..237 'y': bool
- 241..242 '5': i32
- 252..253 'd': {unknown}
- 256..257 'b': {unknown}
- 267..268 'g': ()
- 271..282 'minus_forty': isize
- 271..287 'minus_...y ^= i': ()
- 286..287 'i': isize
- 297..300 'ten': usize
- 310..312 '10': usize
- 322..335 'ten_is_eleven': bool
- 338..341 'ten': usize
- 338..353 'ten == some_num': bool
- 345..353 'some_num': usize
- 360..363 'ten': usize
- 360..367 'ten < 3': bool
- 366..367 '3': usize
+ 58..59 '0': isize
+ 80..423 '{ ... < 3 }': bool
+ 90..91 'x': bool
+ 94..95 'a': bool
+ 94..100 'a && b': bool
+ 99..100 'b': bool
+ 110..111 'y': bool
+ 114..118 'true': bool
+ 114..127 'true || false': bool
+ 122..127 'false': bool
+ 137..138 'z': bool
+ 141..142 'x': bool
+ 141..147 'x == y': bool
+ 146..147 'y': bool
+ 157..158 't': bool
+ 161..162 'x': bool
+ 161..167 'x != y': bool
+ 166..167 'y': bool
+ 177..188 'minus_forty': isize
+ 198..206 '-40isize': isize
+ 199..206 '40isize': isize
+ 216..217 'h': bool
+ 220..231 'minus_forty': isize
+ 220..242 'minus_...ONST_2': bool
+ 235..242 'CONST_2': isize
+ 252..253 'c': i32
+ 256..257 'f': fn f(bool) -> i32
+ 256..265 'f(z || y)': i32
+ 256..269 'f(z || y) + 5': i32
+ 258..259 'z': bool
+ 258..264 'z || y': bool
+ 263..264 'y': bool
+ 268..269 '5': i32
+ 279..280 'd': {unknown}
+ 283..284 'b': {unknown}
+ 294..295 'g': ()
+ 298..309 'minus_forty': isize
+ 298..314 'minus_...y += i': ()
+ 313..314 'i': isize
+ 324..327 'ten': usize
+ 337..339 '10': usize
+ 349..357 'some_num': usize
+ 360..366 '0usize': usize
+ 376..389 'ten_is_eleven': bool
+ 392..395 'ten': usize
+ 392..407 'ten == some_num': bool
+ 399..407 'some_num': usize
+ 414..417 'ten': usize
+ 414..421 'ten < 3': bool
+ 420..421 '3': usize
"#]],
);
}
@@ -1071,6 +1078,7 @@
fn infer_inherent_method_str() {
check_infer(
r#"
+//- /core.rs crate:core
#![rustc_coherence_is_core]
#[lang = "str"]
impl str {
@@ -2691,6 +2699,7 @@
fn box_into_vec() {
check_infer(
r#"
+//- /core.rs crate:core
#[lang = "sized"]
pub trait Sized {}
@@ -3934,3 +3943,16 @@
"#]],
);
}
+
+#[test]
+fn infer_array_size() {
+ check_no_mismatches(
+ r#"
+fn foo(a: [u8; 3]) {}
+
+fn bar() {
+ foo([0; _]);
+}
+ "#,
+ );
+}
diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs
index c0e4393..eb4ae5e 100644
--- a/crates/hir-ty/src/tests/traits.rs
+++ b/crates/hir-ty/src/tests/traits.rs
@@ -1,4 +1,3 @@
-use cov_mark::check;
use expect_test::expect;
use crate::tests::infer_with_mismatches;
@@ -278,11 +277,11 @@
fn infer_ops_neg() {
check_types(
r#"
-//- /main.rs crate:main deps:std
+//- minicore:unary_ops
struct Bar;
struct Foo;
-impl std::ops::Neg for Bar {
+impl core::ops::Neg for Bar {
type Output = Foo;
}
@@ -291,15 +290,6 @@
let b = -a;
b;
} //^ Foo
-
-//- /std.rs crate:std
-#[prelude_import] use ops::*;
-mod ops {
- #[lang = "neg"]
- pub trait Neg {
- type Output;
- }
-}
"#,
);
}
@@ -308,11 +298,11 @@
fn infer_ops_not() {
check_types(
r#"
-//- /main.rs crate:main deps:std
+//- minicore:unary_ops
struct Bar;
struct Foo;
-impl std::ops::Not for Bar {
+impl core::ops::Not for Bar {
type Output = Foo;
}
@@ -321,15 +311,6 @@
let b = !a;
b;
} //^ Foo
-
-//- /std.rs crate:std
-#[prelude_import] use ops::*;
-mod ops {
- #[lang = "not"]
- pub trait Not {
- type Output;
- }
-}
"#,
);
}
@@ -368,7 +349,6 @@
#[test]
fn trait_default_method_self_bound_implements_trait() {
- cov_mark::check!(trait_self_implements_self);
check(
r#"
trait Trait {
@@ -1211,7 +1191,7 @@
expect![[r#"
29..33 'self': &'? Self
54..58 'self': &'? Self
- 98..100 '{}': impl Trait<u64>
+ 98..100 '{}': ()
110..111 'x': impl Trait<u64>
130..131 'y': &'? impl Trait<u64>
151..268 '{ ...2(); }': ()
@@ -2982,13 +2962,13 @@
140..146 'IsCopy': IsCopy
140..153 'IsCopy.test()': bool
159..166 'NotCopy': NotCopy
- 159..173 'NotCopy.test()': {unknown}
+ 159..173 'NotCopy.test()': bool
179..195 '(IsCop...sCopy)': (IsCopy, IsCopy)
179..202 '(IsCop...test()': bool
180..186 'IsCopy': IsCopy
188..194 'IsCopy': IsCopy
208..225 '(IsCop...tCopy)': (IsCopy, NotCopy)
- 208..232 '(IsCop...test()': {unknown}
+ 208..232 '(IsCop...test()': bool
209..215 'IsCopy': IsCopy
217..224 'NotCopy': NotCopy
"#]],
@@ -3081,7 +3061,7 @@
79..194 '{ ...ized }': ()
85..88 '1u8': u8
85..95 '1u8.test()': bool
- 101..116 '(*"foo").test()': {unknown}
+ 101..116 '(*"foo").test()': bool
102..108 '*"foo"': str
103..108 '"foo"': &'static str
135..145 '(1u8, 1u8)': (u8, u8)
@@ -3089,7 +3069,7 @@
136..139 '1u8': u8
141..144 '1u8': u8
158..171 '(1u8, *"foo")': (u8, str)
- 158..178 '(1u8, ...test()': {unknown}
+ 158..178 '(1u8, ...test()': bool
159..162 '1u8': u8
164..170 '*"foo"': str
165..170 '"foo"': &'static str
@@ -3944,7 +3924,6 @@
#[test]
fn foreign_trait_with_local_trait_impl() {
- check!(block_local_impls);
check(
r#"
mod module {
@@ -3955,15 +3934,16 @@
}
fn f() {
+ struct Foo;
use module::T;
- impl T for usize {
+ impl T for Foo {
const C: usize = 0;
fn f(&self) {}
}
- 0usize.f();
- //^^^^^^^^^^ type: ()
- usize::C;
- //^^^^^^^^type: usize
+ Foo.f();
+ //^^^^^^^ type: ()
+ Foo::C;
+ //^^^^^^ type: usize
}
"#,
);
@@ -4023,7 +4003,7 @@
212..295 '{ ...ZED; }': ()
218..239 'F::Exp..._SIZED': Yes
245..266 'F::Imp..._SIZED': Yes
- 272..292 'F::Rel..._SIZED': {unknown}
+ 272..292 'F::Rel..._SIZED': Yes
"#]],
);
}
@@ -4274,7 +4254,7 @@
127..128 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + 'static)
164..195 '{ ...f(); }': ()
170..171 'v': &'? (dyn Trait<Assoc<i32> = &'a i32> + 'static)
- 170..184 'v.get::<i32>()': <dyn Trait<Assoc<i32> = &'a i32> + 'static as Trait>::Assoc<i32>
+ 170..184 'v.get::<i32>()': <{unknown} as Trait>::Assoc<i32>
170..192 'v.get:...eref()': {unknown}
"#]],
);
@@ -5051,3 +5031,28 @@
"#]],
);
}
+
+#[test]
+fn implicit_sized_bound_on_param() {
+ check(
+ r#"
+//- minicore: sized
+struct PBox<T, A>(T, A);
+
+impl<T, A> PBox<T, A> {
+ fn token_with(self) {}
+}
+
+trait MoveMessage {
+ fn token<A>(self, alloc: A)
+ where
+ Self: Sized,
+ {
+ let b = PBox::<Self, A>(self, alloc);
+ b.token_with();
+ // ^^^^^^^^^^^^^^ type: ()
+ }
+}
+ "#,
+ );
+}
diff --git a/crates/hir-ty/src/traits.rs b/crates/hir-ty/src/traits.rs
index 00c8eb7..2055c31 100644
--- a/crates/hir-ty/src/traits.rs
+++ b/crates/hir-ty/src/traits.rs
@@ -4,13 +4,18 @@
use std::hash::Hash;
use base_db::Crate;
-use hir_def::{BlockId, TraitId, lang_item::LangItem};
+use hir_def::{
+ AdtId, AssocItemId, BlockId, HasModule, ImplId, Lookup, TraitId,
+ lang_item::LangItem,
+ nameres::DefMap,
+ signatures::{ConstFlags, EnumFlags, FnFlags, StructFlags, TraitFlags, TypeAliasFlags},
+};
use hir_expand::name::Name;
use intern::sym;
use rustc_next_trait_solver::solve::{HasChanged, SolverDelegateEvalExt};
use rustc_type_ir::{
TypingMode,
- inherent::{IntoKind, Span as _},
+ inherent::{AdtDef, BoundExistentialPredicates, IntoKind, Span as _},
solve::Certainty,
};
use triomphe::Arc;
@@ -263,3 +268,147 @@
let result = crate::traits::next_trait_solve_in_ctxt(&infcx, goal);
matches!(result, Ok((_, Certainty::Yes)))
}
+
+pub fn is_inherent_impl_coherent(db: &dyn HirDatabase, def_map: &DefMap, impl_id: ImplId) -> bool {
+ let self_ty = db.impl_self_ty(impl_id).instantiate_identity();
+ let self_ty = self_ty.kind();
+ let impl_allowed = match self_ty {
+ TyKind::Tuple(_)
+ | TyKind::FnDef(_, _)
+ | TyKind::Array(_, _)
+ | TyKind::Never
+ | TyKind::RawPtr(_, _)
+ | TyKind::Ref(_, _, _)
+ | TyKind::Slice(_)
+ | TyKind::Str
+ | TyKind::Bool
+ | TyKind::Char
+ | TyKind::Int(_)
+ | TyKind::Uint(_)
+ | TyKind::Float(_) => def_map.is_rustc_coherence_is_core(),
+
+ TyKind::Adt(adt_def, _) => adt_def.def_id().0.module(db).krate() == def_map.krate(),
+ TyKind::Dynamic(it, _) => it
+ .principal_def_id()
+ .is_some_and(|trait_id| trait_id.0.module(db).krate() == def_map.krate()),
+
+ _ => true,
+ };
+ impl_allowed || {
+ let rustc_has_incoherent_inherent_impls = match self_ty {
+ TyKind::Tuple(_)
+ | TyKind::FnDef(_, _)
+ | TyKind::Array(_, _)
+ | TyKind::Never
+ | TyKind::RawPtr(_, _)
+ | TyKind::Ref(_, _, _)
+ | TyKind::Slice(_)
+ | TyKind::Str
+ | TyKind::Bool
+ | TyKind::Char
+ | TyKind::Int(_)
+ | TyKind::Uint(_)
+ | TyKind::Float(_) => true,
+
+ TyKind::Adt(adt_def, _) => match adt_def.def_id().0 {
+ hir_def::AdtId::StructId(id) => db
+ .struct_signature(id)
+ .flags
+ .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
+ hir_def::AdtId::UnionId(id) => db
+ .union_signature(id)
+ .flags
+ .contains(StructFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
+ hir_def::AdtId::EnumId(it) => db
+ .enum_signature(it)
+ .flags
+ .contains(EnumFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS),
+ },
+ TyKind::Dynamic(it, _) => it.principal_def_id().is_some_and(|trait_id| {
+ db.trait_signature(trait_id.0)
+ .flags
+ .contains(TraitFlags::RUSTC_HAS_INCOHERENT_INHERENT_IMPLS)
+ }),
+
+ _ => false,
+ };
+ let items = impl_id.impl_items(db);
+ rustc_has_incoherent_inherent_impls
+ && !items.items.is_empty()
+ && items.items.iter().all(|&(_, assoc)| match assoc {
+ AssocItemId::FunctionId(it) => {
+ db.function_signature(it).flags.contains(FnFlags::RUSTC_ALLOW_INCOHERENT_IMPL)
+ }
+ AssocItemId::ConstId(it) => {
+ db.const_signature(it).flags.contains(ConstFlags::RUSTC_ALLOW_INCOHERENT_IMPL)
+ }
+ AssocItemId::TypeAliasId(it) => db
+ .type_alias_signature(it)
+ .flags
+ .contains(TypeAliasFlags::RUSTC_ALLOW_INCOHERENT_IMPL),
+ })
+ }
+}
+
+/// Checks whether the impl satisfies the orphan rules.
+///
+/// Given `impl<P1..=Pn> Trait<T1..=Tn> for T0`, an `impl`` is valid only if at least one of the following is true:
+/// - Trait is a local trait
+/// - All of
+/// - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type.
+/// - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
+pub fn check_orphan_rules<'db>(db: &'db dyn HirDatabase, impl_: ImplId) -> bool {
+ let Some(impl_trait) = db.impl_trait(impl_) else {
+ // not a trait impl
+ return true;
+ };
+
+ let local_crate = impl_.lookup(db).container.krate();
+ let is_local = |tgt_crate| tgt_crate == local_crate;
+
+ let trait_ref = impl_trait.instantiate_identity();
+ let trait_id = trait_ref.def_id.0;
+ if is_local(trait_id.module(db).krate()) {
+ // trait to be implemented is local
+ return true;
+ }
+
+ let unwrap_fundamental = |mut ty: Ty<'db>| {
+ // Unwrap all layers of fundamental types with a loop.
+ loop {
+ match ty.kind() {
+ TyKind::Ref(_, referenced, _) => ty = referenced,
+ TyKind::Adt(adt_def, subs) => {
+ let AdtId::StructId(s) = adt_def.def_id().0 else {
+ break ty;
+ };
+ let struct_signature = db.struct_signature(s);
+ if struct_signature.flags.contains(StructFlags::FUNDAMENTAL) {
+ let next = subs.types().next();
+ match next {
+ Some(it) => ty = it,
+ None => break ty,
+ }
+ } else {
+ break ty;
+ }
+ }
+ _ => break ty,
+ }
+ }
+ };
+ // - At least one of the types `T0..=Tn`` must be a local type. Let `Ti`` be the first such type.
+
+ // FIXME: param coverage
+ // - No uncovered type parameters `P1..=Pn` may appear in `T0..Ti`` (excluding `Ti`)
+ let is_not_orphan = trait_ref.args.types().any(|ty| match unwrap_fundamental(ty).kind() {
+ TyKind::Adt(adt_def, _) => is_local(adt_def.def_id().0.module(db).krate()),
+ TyKind::Error(_) => true,
+ TyKind::Dynamic(it, _) => {
+ it.principal_def_id().is_some_and(|trait_id| is_local(trait_id.0.module(db).krate()))
+ }
+ _ => false,
+ });
+ #[allow(clippy::let_and_return)]
+ is_not_orphan
+}
diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml
index dfa3938..24b2bd9 100644
--- a/crates/hir/Cargo.toml
+++ b/crates/hir/Cargo.toml
@@ -18,9 +18,8 @@
arrayvec.workspace = true
itertools.workspace = true
smallvec.workspace = true
-tracing.workspace = true
+tracing = { workspace = true, features = ["attributes"] }
triomphe.workspace = true
-indexmap.workspace = true
ra-ap-rustc_type_ir.workspace = true
diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs
index 147f1b8..cfc4080 100644
--- a/crates/hir/src/attrs.rs
+++ b/crates/hir/src/attrs.rs
@@ -1,7 +1,5 @@
//! Attributes & documentation for hir types.
-use std::ops::ControlFlow;
-
use hir_def::{
AssocItemId, AttrDefId, ModuleDefId,
attr::AttrsWithOwner,
@@ -14,7 +12,13 @@
mod_path::{ModPath, PathKind},
name::Name,
};
-use hir_ty::{db::HirDatabase, method_resolution};
+use hir_ty::{
+ db::HirDatabase,
+ method_resolution::{
+ self, CandidateId, MethodError, MethodResolutionContext, MethodResolutionUnstableFeatures,
+ },
+ next_solver::{DbInterner, TypingMode, infer::DbInternerInferExt},
+};
use crate::{
Adt, AsAssocItem, AssocItem, BuiltinType, Const, ConstParam, DocLinkDef, Enum, ExternCrateDecl,
@@ -242,7 +246,7 @@
name: &Name,
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
- ty.iterate_assoc_items(db, ty.krate(db), move |assoc_item| {
+ ty.iterate_assoc_items(db, move |assoc_item| {
if assoc_item.name(db)? != *name {
return None;
}
@@ -257,37 +261,39 @@
name: &Name,
ns: Option<Namespace>,
) -> Option<DocLinkDef> {
- let canonical = ty.canonical(db);
let krate = ty.krate(db);
let environment = resolver
.generic_def()
.map_or_else(|| crate::TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
let traits_in_scope = resolver.traits_in_scope(db);
- let mut result = None;
-
// `ty.iterate_path_candidates()` require a scope, which is not available when resolving
// attributes here. Use path resolution directly instead.
//
// FIXME: resolve type aliases (which are not yielded by iterate_path_candidates)
- _ = method_resolution::iterate_path_candidates(
- &canonical,
- db,
- environment,
- &traits_in_scope,
- method_resolution::VisibleFromModule::None,
- Some(name),
- &mut |_, assoc_item_id: AssocItemId, _| {
- // If two traits in scope define the same item, Rustdoc links to no specific trait (for
- // instance, given two methods `a`, Rustdoc simply links to `method.a` with no
- // disambiguation) so we just pick the first one we find as well.
- result = as_module_def_if_namespace_matches(assoc_item_id.into(), ns);
-
- if result.is_some() { ControlFlow::Break(()) } else { ControlFlow::Continue(()) }
- },
- );
-
- result
+ let interner = DbInterner::new_with(db, Some(environment.krate), environment.block);
+ let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
+ let unstable_features =
+ MethodResolutionUnstableFeatures::from_def_map(resolver.top_level_def_map());
+ let ctx = MethodResolutionContext {
+ infcx: &infcx,
+ resolver: &resolver,
+ env: &environment,
+ traits_in_scope: &traits_in_scope,
+ edition: krate.edition(db),
+ unstable_features: &unstable_features,
+ };
+ let resolution = ctx.probe_for_name(method_resolution::Mode::Path, name.clone(), ty.ty);
+ let resolution = match resolution {
+ Ok(resolution) => resolution.item,
+ Err(MethodError::PrivateMatch(resolution)) => resolution.item,
+ _ => return None,
+ };
+ let resolution = match resolution {
+ CandidateId::FunctionId(id) => AssocItem::Function(id.into()),
+ CandidateId::ConstId(id) => AssocItem::Const(id.into()),
+ };
+ as_module_def_if_namespace_matches(resolution, ns)
}
fn resolve_field(
diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs
index d61c2ec..c215438 100644
--- a/crates/hir/src/display.rs
+++ b/crates/hir/src/display.rs
@@ -11,6 +11,7 @@
type_ref::{TypeBound, TypeRef, TypeRefId},
};
use hir_ty::{
+ GenericPredicates,
db::HirDatabase,
display::{
HirDisplay, HirDisplayError, HirDisplayWithExpressionStore, HirFormatter, SizedByDefault,
@@ -484,11 +485,9 @@
let param_data = ¶ms[self.id.local_id()];
let krate = self.id.parent().krate(f.db).id;
let ty = self.ty(f.db).ty;
- let predicates = f.db.generic_predicates(self.id.parent());
+ let predicates = GenericPredicates::query_all(f.db, self.id.parent());
let predicates = predicates
- .instantiate_identity()
- .into_iter()
- .flatten()
+ .iter_identity_copied()
.filter(|wc| match wc.kind().skip_binder() {
ClauseKind::Trait(tr) => tr.self_ty() == ty,
ClauseKind::Projection(proj) => proj.self_ty() == ty,
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index bb1741a..5400003 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -75,26 +75,28 @@
proc_macro::ProcMacroKind,
};
use hir_ty::{
- TraitEnvironment, TyDefId, TyLoweringDiagnostic, ValueTyDefId, all_super_traits, autoderef,
- check_orphan_rules,
+ GenericPredicates, TraitEnvironment, TyDefId, TyLoweringDiagnostic, ValueTyDefId,
+ all_super_traits, autoderef, check_orphan_rules,
consteval::try_const_usize,
db::{InternedClosureId, InternedCoroutineId},
diagnostics::BodyValidationDiagnostic,
direct_super_traits, known_const_to_ast,
layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
- method_resolution,
+ method_resolution::{
+ self, InherentImpls, MethodResolutionContext, MethodResolutionUnstableFeatures,
+ },
mir::{MutBorrowKind, interpret_mir},
next_solver::{
- AliasTy, Canonical, ClauseKind, ConstKind, DbInterner, ErrorGuaranteed, GenericArg,
- GenericArgs, PolyFnSig, Region, SolverDefId, Ty, TyKind, TypingMode,
+ AliasTy, ClauseKind, ConstKind, DbInterner, ErrorGuaranteed, GenericArg, GenericArgs,
+ PolyFnSig, Region, SolverDefId, Ty, TyKind, TypingMode,
infer::{DbInternerInferExt, InferCtxt},
},
- traits::{self, FnTrait, structurally_normalize_ty},
+ traits::{self, FnTrait, is_inherent_impl_coherent, structurally_normalize_ty},
};
use itertools::Itertools;
use rustc_hash::FxHashSet;
use rustc_type_ir::{
- AliasTyKind, TypeSuperVisitable, TypeVisitable, TypeVisitor,
+ AliasTyKind, TypeSuperVisitable, TypeVisitable, TypeVisitor, fast_reject,
inherent::{AdtDef, GenericArgs as _, IntoKind, SliceLike, Term as _, Ty as _},
};
use smallvec::SmallVec;
@@ -136,7 +138,7 @@
attr::{AttrSourceMap, Attrs, AttrsWithOwner},
find_path::PrefixKind,
import_map,
- lang_item::LangItem,
+ lang_item::{LangItem, crate_lang_items},
nameres::{DefMap, ModuleSource, crate_def_map},
per_ns::Namespace,
type_ref::{Mutability, TypeRef},
@@ -171,11 +173,12 @@
drop::DropGlue,
dyn_compatibility::{DynCompatibilityViolation, MethodViolationCode},
layout::LayoutError,
- method_resolution::TyFingerprint,
mir::{MirEvalError, MirLowerError},
next_solver::abi::Safety,
next_solver::clear_tls_solver_cache,
},
+ // FIXME: These are needed for import assets, properly encapsulate them.
+ hir_ty::{method_resolution::TraitImpls, next_solver::SimplifiedType},
intern::{Symbol, sym},
};
@@ -244,7 +247,15 @@
self,
db: &dyn HirDatabase,
) -> impl Iterator<Item = Crate> {
- db.transitive_rev_deps(self.id).into_iter().map(|id| Crate { id })
+ self.id.transitive_rev_deps(db).into_iter().map(|id| Crate { id })
+ }
+
+ pub fn notable_traits_in_deps(self, db: &dyn HirDatabase) -> impl Iterator<Item = &TraitId> {
+ self.id
+ .transitive_deps(db)
+ .into_iter()
+ .filter_map(|krate| db.crate_notable_traits(krate))
+ .flatten()
}
pub fn root_module(self) -> Module {
@@ -754,8 +765,6 @@
}
self.legacy_macros(db).into_iter().for_each(|m| emit_macro_def_diagnostics(db, acc, m));
- let inherent_impls = db.inherent_impls_in_crate(self.id.krate());
-
let interner = DbInterner::new_with(db, Some(self.id.krate()), self.id.containing_block());
let infcx = interner.infer_ctxt().build(TypingMode::non_body_analysis());
@@ -784,7 +793,9 @@
emit_def_diagnostic(db, acc, diag, edition);
}
- if inherent_impls.invalid_impls().contains(&impl_def.id) {
+ if impl_signature.target_trait.is_none()
+ && !is_inherent_impl_coherent(db, def_map, impl_def.id)
+ {
acc.push(IncoherentImpl { impl_: ast_id_map.get(loc.id.value), file_id }.into())
}
@@ -2795,7 +2806,7 @@
pub fn eval(self, db: &dyn HirDatabase) -> Result<EvaluatedConst<'_>, ConstEvalError<'_>> {
let interner = DbInterner::new_with(db, None, None);
let ty = db.value_ty(self.id.into()).unwrap().instantiate_identity();
- db.const_eval(self.id.into(), GenericArgs::new_from_iter(interner, []), None)
+ db.const_eval(self.id, GenericArgs::new_from_iter(interner, []), None)
.map(|it| EvaluatedConst { const_: it, def: self.id.into(), ty })
}
}
@@ -2874,10 +2885,12 @@
/// Evaluate the static initializer.
pub fn eval(self, db: &dyn HirDatabase) -> Result<EvaluatedConst<'_>, ConstEvalError<'_>> {
- let interner = DbInterner::new_with(db, None, None);
let ty = db.value_ty(self.id.into()).unwrap().instantiate_identity();
- db.const_eval(self.id.into(), GenericArgs::new_from_iter(interner, []), None)
- .map(|it| EvaluatedConst { const_: it, def: self.id.into(), ty })
+ db.const_eval_static(self.id).map(|it| EvaluatedConst {
+ const_: it,
+ def: self.id.into(),
+ ty,
+ })
}
}
@@ -3350,6 +3363,15 @@
TypeAlias(TypeAlias),
}
+impl From<method_resolution::CandidateId> for AssocItem {
+ fn from(value: method_resolution::CandidateId) -> Self {
+ match value {
+ method_resolution::CandidateId::FunctionId(id) => AssocItem::Function(Function { id }),
+ method_resolution::CandidateId::ConstId(id) => AssocItem::Const(Const { id }),
+ }
+ }
+}
+
#[derive(Debug, Clone)]
pub enum AssocItemContainer {
Trait(Trait),
@@ -3701,7 +3723,7 @@
push_ty_diagnostics(
db,
acc,
- db.generic_predicates_without_parent_with_diagnostics(def).1,
+ GenericPredicates::query_with_diagnostics(db, def).1.clone(),
&source_map,
);
for (param_id, param) in generics.iter_type_or_consts() {
@@ -4196,10 +4218,13 @@
/// parameter, not additional bounds that might be added e.g. by a method if
/// the parameter comes from an impl!
pub fn trait_bounds(self, db: &dyn HirDatabase) -> Vec<Trait> {
- db.generic_predicates_for_param(self.id.parent(), self.id.into(), None)
- .iter()
+ let self_ty = self.ty(db).ty;
+ GenericPredicates::query_explicit(db, self.id.parent())
+ .iter_identity_copied()
.filter_map(|pred| match &pred.kind().skip_binder() {
- ClauseKind::Trait(trait_ref) => Some(Trait::from(trait_ref.def_id().0)),
+ ClauseKind::Trait(trait_ref) if trait_ref.self_ty() == self_ty => {
+ Some(Trait::from(trait_ref.def_id().0))
+ }
_ => None,
})
.collect()
@@ -4361,90 +4386,81 @@
impl Impl {
pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec<Impl> {
- let inherent = db.inherent_impls_in_crate(krate.id);
- let trait_ = db.trait_impls_in_crate(krate.id);
+ let mut result = Vec::new();
+ extend_with_def_map(db, crate_def_map(db, krate.id), &mut result);
+ return result;
- inherent.all_impls().chain(trait_.all_impls()).map(Self::from).collect()
+ fn extend_with_def_map(db: &dyn HirDatabase, def_map: &DefMap, result: &mut Vec<Impl>) {
+ for (_, module) in def_map.modules() {
+ result.extend(module.scope.impls().map(Impl::from));
+
+ for unnamed_const in module.scope.unnamed_consts() {
+ for (_, block_def_map) in db.body(unnamed_const.into()).blocks(db) {
+ extend_with_def_map(db, block_def_map, result);
+ }
+ }
+ }
+ }
}
pub fn all_in_module(db: &dyn HirDatabase, module: Module) -> Vec<Impl> {
module.id.def_map(db)[module.id.local_id].scope.impls().map(Into::into).collect()
}
+ /// **Note:** This is an **approximation** that strives to give the *human-perceived notion* of an "impl for type",
+ /// **not** answer the technical question "what are all impls applying to this type". In particular, it excludes
+ /// blanket impls, and only does a shallow type constructor check. In fact, this should've probably been on `Adt`
+ /// etc., and not on `Type`. If you would want to create a precise list of all impls applying to a type,
+ /// you would need to include blanket impls, and try to prove to predicates for each candidate.
pub fn all_for_type<'db>(db: &'db dyn HirDatabase, Type { ty, env }: Type<'db>) -> Vec<Impl> {
- let def_crates = match method_resolution::def_crates(db, ty, env.krate) {
- Some(def_crates) => def_crates,
- None => return Vec::new(),
+ let mut result = Vec::new();
+ let interner = DbInterner::new_with(db, Some(env.krate), env.block);
+ let Some(simplified_ty) =
+ fast_reject::simplify_type(interner, ty, fast_reject::TreatParams::AsRigid)
+ else {
+ return Vec::new();
};
-
- let filter = |impl_def: &Impl| {
- let self_ty = impl_def.self_ty(db);
- let rref = self_ty.remove_ref();
- ty.equals_ctor(rref.as_ref().map_or(self_ty.ty, |it| it.ty))
- };
-
- let fp = TyFingerprint::for_inherent_impl(ty);
- let fp = match fp {
- Some(fp) => fp,
- None => return Vec::new(),
- };
-
- let mut all = Vec::new();
- def_crates.iter().for_each(|&id| {
- all.extend(
- db.inherent_impls_in_crate(id)
- .for_self_ty(ty)
- .iter()
- .cloned()
- .map(Self::from)
- .filter(filter),
- )
- });
-
- for id in def_crates
- .iter()
- .flat_map(|&id| Crate { id }.transitive_reverse_dependencies(db))
- .map(|Crate { id }| id)
- {
- all.extend(
- db.trait_impls_in_crate(id)
- .for_self_ty_without_blanket_impls(fp)
- .map(Self::from)
- .filter(filter),
+ let mut extend_with_impls =
+ |impls: &[ImplId]| result.extend(impls.iter().copied().map(Impl::from));
+ extend_with_impls(method_resolution::incoherent_inherent_impls(db, simplified_ty));
+ if let Some(module) = method_resolution::simplified_type_module(db, &simplified_ty) {
+ InherentImpls::for_each_crate_and_block(
+ db,
+ module.krate(),
+ module.containing_block(),
+ &mut |impls| extend_with_impls(impls.for_self_ty(&simplified_ty)),
);
- }
-
- if let Some(block) = ty.as_adt().and_then(|(def, _)| def.module(db).containing_block()) {
- if let Some(inherent_impls) = db.inherent_impls_in_block(block) {
- all.extend(
- inherent_impls.for_self_ty(ty).iter().cloned().map(Self::from).filter(filter),
- );
+ std::iter::successors(module.containing_block(), |block| {
+ block.loc(db).module.containing_block()
+ })
+ .filter_map(|block| TraitImpls::for_block(db, block).as_deref())
+ .for_each(|impls| impls.for_self_ty(&simplified_ty, &mut extend_with_impls));
+ for &krate in &**db.all_crates() {
+ TraitImpls::for_crate(db, krate)
+ .for_self_ty(&simplified_ty, &mut extend_with_impls);
}
- if let Some(trait_impls) = db.trait_impls_in_block(block) {
- all.extend(
- trait_impls
- .for_self_ty_without_blanket_impls(fp)
- .map(Self::from)
- .filter(filter),
- );
+ } else {
+ for &krate in &**db.all_crates() {
+ TraitImpls::for_crate(db, krate)
+ .for_self_ty(&simplified_ty, &mut extend_with_impls);
}
}
-
- all
+ result
}
pub fn all_for_trait(db: &dyn HirDatabase, trait_: Trait) -> Vec<Impl> {
- let module = trait_.module(db);
- let krate = module.krate();
+ let module = trait_.module(db).id;
let mut all = Vec::new();
- for Crate { id } in krate.transitive_reverse_dependencies(db) {
- let impls = db.trait_impls_in_crate(id);
- all.extend(impls.for_trait(trait_.id).map(Self::from))
+ let mut handle_impls = |impls: &TraitImpls| {
+ impls.for_trait(trait_.id, |impls| all.extend(impls.iter().copied().map(Impl::from)));
+ };
+ for krate in module.krate().transitive_rev_deps(db) {
+ handle_impls(TraitImpls::for_crate(db, krate));
}
- if let Some(block) = module.id.containing_block()
- && let Some(trait_impls) = db.trait_impls_in_block(block)
+ if let Some(block) = module.containing_block()
+ && let Some(impls) = TraitImpls::for_block(db, block)
{
- all.extend(trait_impls.for_trait(trait_.id).map(Self::from));
+ handle_impls(impls);
}
all
}
@@ -5265,13 +5281,12 @@
}
}
- pub fn fingerprint_for_trait_impl(&self) -> Option<TyFingerprint> {
- TyFingerprint::for_trait_impl(self.ty)
- }
-
- pub(crate) fn canonical(&self, db: &'db dyn HirDatabase) -> Canonical<'db, Ty<'db>> {
- let interner = DbInterner::new_with(db, None, None);
- hir_ty::replace_errors_with_variables(interner, &self.ty)
+ pub fn fingerprint_for_trait_impl(&self) -> Option<SimplifiedType> {
+ fast_reject::simplify_type(
+ DbInterner::conjure(),
+ self.ty,
+ fast_reject::TreatParams::AsRigid,
+ )
}
/// Returns types that this type dereferences to (including this type itself). The returned
@@ -5295,11 +5310,10 @@
pub fn iterate_assoc_items<T>(
&self,
db: &'db dyn HirDatabase,
- krate: Crate,
mut callback: impl FnMut(AssocItem) -> Option<T>,
) -> Option<T> {
let mut slot = None;
- self.iterate_assoc_items_dyn(db, krate, &mut |assoc_item_id| {
+ self.iterate_assoc_items_dyn(db, &mut |assoc_item_id| {
slot = callback(assoc_item_id.into());
slot.is_some()
});
@@ -5309,24 +5323,36 @@
fn iterate_assoc_items_dyn(
&self,
db: &'db dyn HirDatabase,
- krate: Crate,
callback: &mut dyn FnMut(AssocItemId) -> bool,
) {
- let ty_ns = self.ty;
- let def_crates = match method_resolution::def_crates(db, ty_ns, krate.id) {
- Some(it) => it,
- None => return,
- };
- for krate in def_crates {
- let impls = db.inherent_impls_in_crate(krate);
-
- for impl_def in impls.for_self_ty(ty_ns) {
+ let mut handle_impls = |impls: &[ImplId]| {
+ for &impl_def in impls {
for &(_, item) in impl_def.impl_items(db).items.iter() {
if callback(item) {
return;
}
}
}
+ };
+
+ let interner = DbInterner::new_with(db, None, None);
+ let Some(simplified_type) =
+ fast_reject::simplify_type(interner, self.ty, fast_reject::TreatParams::AsRigid)
+ else {
+ return;
+ };
+
+ handle_impls(method_resolution::incoherent_inherent_impls(db, simplified_type));
+
+ if let Some(module) = method_resolution::simplified_type_module(db, &simplified_type) {
+ InherentImpls::for_each_crate_and_block(
+ db,
+ module.krate(),
+ module.containing_block(),
+ &mut |impls| {
+ handle_impls(impls.for_self_ty(&simplified_type));
+ },
+ );
}
}
@@ -5417,26 +5443,20 @@
db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
traits_in_scope: &FxHashSet<TraitId>,
- with_local_impls: Option<Module>,
name: Option<&Name>,
mut callback: impl FnMut(Function) -> Option<T>,
) -> Option<T> {
let _p = tracing::info_span!("iterate_method_candidates_with_traits").entered();
let mut slot = None;
- self.iterate_method_candidates_split_inherent(
- db,
- scope,
- traits_in_scope,
- with_local_impls,
- name,
- |f| match callback(f) {
+ self.iterate_method_candidates_split_inherent(db, scope, traits_in_scope, name, |f| {
+ match callback(f) {
it @ Some(_) => {
slot = it;
ControlFlow::Break(())
}
None => ControlFlow::Continue(()),
- },
- );
+ }
+ });
slot
}
@@ -5444,7 +5464,6 @@
&self,
db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
- with_local_impls: Option<Module>,
name: Option<&Name>,
callback: impl FnMut(Function) -> Option<T>,
) -> Option<T> {
@@ -5452,12 +5471,37 @@
db,
scope,
&scope.visible_traits().0,
- with_local_impls,
name,
callback,
)
}
+ fn with_method_resolution<R>(
+ &self,
+ db: &'db dyn HirDatabase,
+ resolver: &Resolver<'db>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ f: impl FnOnce(&MethodResolutionContext<'_, 'db>) -> R,
+ ) -> R {
+ let module = resolver.module();
+ let interner = DbInterner::new_with(db, Some(module.krate()), module.containing_block());
+ let infcx = interner.infer_ctxt().build(TypingMode::PostAnalysis);
+ let unstable_features =
+ MethodResolutionUnstableFeatures::from_def_map(resolver.top_level_def_map());
+ let environment = resolver
+ .generic_def()
+ .map_or_else(|| TraitEnvironment::empty(module.krate()), |d| db.trait_environment(d));
+ let ctx = MethodResolutionContext {
+ infcx: &infcx,
+ resolver,
+ env: &environment,
+ traits_in_scope,
+ edition: resolver.krate().data(db).edition,
+ unstable_features: &unstable_features,
+ };
+ f(&ctx)
+ }
+
/// Allows you to treat inherent and non-inherent methods differently.
///
/// Note that inherent methods may actually be trait methods! For example, in `dyn Trait`, the trait's methods
@@ -5467,67 +5511,77 @@
db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
traits_in_scope: &FxHashSet<TraitId>,
- with_local_impls: Option<Module>,
name: Option<&Name>,
- callback: impl MethodCandidateCallback,
+ mut callback: impl MethodCandidateCallback,
) {
- struct Callback<T>(T);
-
- impl<T: MethodCandidateCallback> method_resolution::MethodCandidateCallback for Callback<T> {
- fn on_inherent_method(
- &mut self,
- _adjustments: method_resolution::ReceiverAdjustments,
- item: AssocItemId,
- _is_visible: bool,
- ) -> ControlFlow<()> {
- if let AssocItemId::FunctionId(func) = item {
- self.0.on_inherent_method(func.into())
- } else {
- ControlFlow::Continue(())
- }
- }
-
- fn on_trait_method(
- &mut self,
- _adjustments: method_resolution::ReceiverAdjustments,
- item: AssocItemId,
- _is_visible: bool,
- ) -> ControlFlow<()> {
- if let AssocItemId::FunctionId(func) = item {
- self.0.on_trait_method(func.into())
- } else {
- ControlFlow::Continue(())
- }
- }
- }
-
let _p = tracing::info_span!(
- "iterate_method_candidates_dyn",
- with_local_impls = traits_in_scope.len(),
+ "iterate_method_candidates_split_inherent",
traits_in_scope = traits_in_scope.len(),
?name,
)
.entered();
- let interner = DbInterner::new_with(db, None, None);
- // There should be no inference vars in types passed here
- let canonical = hir_ty::replace_errors_with_variables(interner, &self.ty);
- let krate = scope.krate();
- let environment = scope
- .resolver()
- .generic_def()
- .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
+ self.with_method_resolution(db, scope.resolver(), traits_in_scope, |ctx| {
+ // There should be no inference vars in types passed here
+ let canonical = hir_ty::replace_errors_with_variables(ctx.infcx.interner, &self.ty);
+ let (self_ty, _) = ctx.infcx.instantiate_canonical(&canonical);
- _ = method_resolution::iterate_method_candidates_dyn(
- &canonical,
- db,
- environment,
- traits_in_scope,
- with_local_impls.and_then(|b| b.id.containing_block()).into(),
- name,
- method_resolution::LookupMode::MethodCall,
- &mut Callback(callback),
- );
+ match name {
+ Some(name) => {
+ match ctx.probe_for_name(
+ method_resolution::Mode::MethodCall,
+ name.clone(),
+ self_ty,
+ ) {
+ Ok(candidate)
+ | Err(method_resolution::MethodError::PrivateMatch(candidate)) => {
+ let method_resolution::CandidateId::FunctionId(id) = candidate.item
+ else {
+ unreachable!("`Mode::MethodCall` can only return functions");
+ };
+ let id = Function { id };
+ match candidate.kind {
+ method_resolution::PickKind::InherentImplPick(_)
+ | method_resolution::PickKind::ObjectPick(..)
+ | method_resolution::PickKind::WhereClausePick(..) => {
+ // Candidates from where clauses and trait objects are considered inherent.
+ _ = callback.on_inherent_method(id);
+ }
+ method_resolution::PickKind::TraitPick(..) => {
+ _ = callback.on_trait_method(id);
+ }
+ }
+ }
+ Err(_) => {}
+ };
+ }
+ None => {
+ _ = ctx.probe_all(method_resolution::Mode::MethodCall, self_ty).try_for_each(
+ |candidate| {
+ let method_resolution::CandidateId::FunctionId(id) =
+ candidate.candidate.item
+ else {
+ unreachable!("`Mode::MethodCall` can only return functions");
+ };
+ let id = Function { id };
+ match candidate.candidate.kind {
+ method_resolution::CandidateKind::InherentImplCandidate {
+ ..
+ }
+ | method_resolution::CandidateKind::ObjectCandidate(..)
+ | method_resolution::CandidateKind::WhereClauseCandidate(..) => {
+ // Candidates from where clauses and trait objects are considered inherent.
+ callback.on_inherent_method(id)
+ }
+ method_resolution::CandidateKind::TraitCandidate(..) => {
+ callback.on_trait_method(id)
+ }
+ }
+ },
+ );
+ }
+ }
+ })
}
#[tracing::instrument(skip_all, fields(name = ?name))]
@@ -5536,27 +5590,21 @@
db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
traits_in_scope: &FxHashSet<TraitId>,
- with_local_impls: Option<Module>,
name: Option<&Name>,
mut callback: impl FnMut(AssocItem) -> Option<T>,
) -> Option<T> {
let _p = tracing::info_span!("iterate_path_candidates").entered();
let mut slot = None;
- self.iterate_path_candidates_split_inherent(
- db,
- scope,
- traits_in_scope,
- with_local_impls,
- name,
- |item| match callback(item) {
+ self.iterate_path_candidates_split_inherent(db, scope, traits_in_scope, name, |item| {
+ match callback(item) {
it @ Some(_) => {
slot = it;
ControlFlow::Break(())
}
None => ControlFlow::Continue(()),
- },
- );
+ }
+ });
slot
}
@@ -5571,50 +5619,68 @@
db: &'db dyn HirDatabase,
scope: &SemanticsScope<'_>,
traits_in_scope: &FxHashSet<TraitId>,
- with_local_impls: Option<Module>,
name: Option<&Name>,
- callback: impl PathCandidateCallback,
+ mut callback: impl PathCandidateCallback,
) {
- struct Callback<T>(T);
+ let _p = tracing::info_span!(
+ "iterate_path_candidates_split_inherent",
+ traits_in_scope = traits_in_scope.len(),
+ ?name,
+ )
+ .entered();
- impl<T: PathCandidateCallback> method_resolution::MethodCandidateCallback for Callback<T> {
- fn on_inherent_method(
- &mut self,
- _adjustments: method_resolution::ReceiverAdjustments,
- item: AssocItemId,
- _is_visible: bool,
- ) -> ControlFlow<()> {
- self.0.on_inherent_item(item.into())
+ self.with_method_resolution(db, scope.resolver(), traits_in_scope, |ctx| {
+ // There should be no inference vars in types passed here
+ let canonical = hir_ty::replace_errors_with_variables(ctx.infcx.interner, &self.ty);
+ let (self_ty, _) = ctx.infcx.instantiate_canonical(&canonical);
+
+ match name {
+ Some(name) => {
+ match ctx.probe_for_name(
+ method_resolution::Mode::MethodCall,
+ name.clone(),
+ self_ty,
+ ) {
+ Ok(candidate)
+ | Err(method_resolution::MethodError::PrivateMatch(candidate)) => {
+ let id = candidate.item.into();
+ match candidate.kind {
+ method_resolution::PickKind::InherentImplPick(_)
+ | method_resolution::PickKind::ObjectPick(..)
+ | method_resolution::PickKind::WhereClausePick(..) => {
+ // Candidates from where clauses and trait objects are considered inherent.
+ _ = callback.on_inherent_item(id);
+ }
+ method_resolution::PickKind::TraitPick(..) => {
+ _ = callback.on_trait_item(id);
+ }
+ }
+ }
+ Err(_) => {}
+ };
+ }
+ None => {
+ _ = ctx.probe_all(method_resolution::Mode::Path, self_ty).try_for_each(
+ |candidate| {
+ let id = candidate.candidate.item.into();
+ match candidate.candidate.kind {
+ method_resolution::CandidateKind::InherentImplCandidate {
+ ..
+ }
+ | method_resolution::CandidateKind::ObjectCandidate(..)
+ | method_resolution::CandidateKind::WhereClauseCandidate(..) => {
+ // Candidates from where clauses and trait objects are considered inherent.
+ callback.on_inherent_item(id)
+ }
+ method_resolution::CandidateKind::TraitCandidate(..) => {
+ callback.on_trait_item(id)
+ }
+ }
+ },
+ );
+ }
}
-
- fn on_trait_method(
- &mut self,
- _adjustments: method_resolution::ReceiverAdjustments,
- item: AssocItemId,
- _is_visible: bool,
- ) -> ControlFlow<()> {
- self.0.on_trait_item(item.into())
- }
- }
-
- let interner = DbInterner::new_with(db, None, None);
- let canonical = hir_ty::replace_errors_with_variables(interner, &self.ty);
-
- let krate = scope.krate();
- let environment = scope
- .resolver()
- .generic_def()
- .map_or_else(|| TraitEnvironment::empty(krate.id), |d| db.trait_environment(d));
-
- _ = method_resolution::iterate_path_candidates(
- &canonical,
- db,
- environment,
- traits_in_scope,
- with_local_impls.and_then(|b| b.id.containing_block()).into(),
- name,
- &mut Callback(callback),
- );
+ })
}
pub fn as_adt(&self) -> Option<Adt> {
diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs
index ec43442..769cfd9 100644
--- a/crates/hir/src/semantics.rs
+++ b/crates/hir/src/semantics.rs
@@ -1581,9 +1581,9 @@
hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => {
Adjust::Borrow(AutoBorrow::RawPtr(mutability(m)))
}
- hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(_, m)) => {
+ hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(m)) => {
// FIXME: Handle lifetimes here
- Adjust::Borrow(AutoBorrow::Ref(mutability(m)))
+ Adjust::Borrow(AutoBorrow::Ref(mutability(m.into())))
}
hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc),
};
diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs
index f994ed2..ae328a9 100644
--- a/crates/hir/src/source_analyzer.rs
+++ b/crates/hir/src/source_analyzer.rs
@@ -35,7 +35,7 @@
unsafe_operations,
},
lang_items::lang_items_for_bin_op,
- method_resolution,
+ method_resolution::{self, CandidateId},
next_solver::{
DbInterner, ErrorGuaranteed, GenericArgs, Ty, TyKind, TypingMode, infer::DbInternerInferExt,
},
@@ -651,8 +651,9 @@
let lhs = self.ty_of_expr(binop_expr.lhs()?)?;
let rhs = self.ty_of_expr(binop_expr.rhs()?)?;
- let (_op_trait, op_fn) = lang_items_for_bin_op(op)
- .and_then(|(name, lang_item)| self.lang_trait_fn(db, lang_item, &name))?;
+ let (_op_trait, op_fn) = lang_items_for_bin_op(op).and_then(|(name, lang_item)| {
+ self.lang_trait_fn(db, lang_item, &Name::new_symbol_root(name))
+ })?;
// HACK: subst for `index()` coincides with that for `Index` because `index()` itself
// doesn't have any generic parameters, so we skip building another subst for `index()`.
let interner = DbInterner::new_with(db, None, None);
@@ -861,7 +862,7 @@
let expr_id = self.expr_id(path_expr.into())?;
if let Some((assoc, subs)) = infer.assoc_resolutions_for_expr_or_pat(expr_id) {
let (assoc, subst) = match assoc {
- AssocItemId::FunctionId(f_in_trait) => {
+ CandidateId::FunctionId(f_in_trait) => {
match infer.type_of_expr_or_pat(expr_id) {
None => {
let subst = GenericSubstitution::new(
@@ -869,7 +870,7 @@
subs,
self.trait_environment(db),
);
- (assoc, subst)
+ (AssocItemId::from(f_in_trait), subst)
}
Some(func_ty) => {
if let TyKind::FnDef(_fn_def, subs) = func_ty.kind() {
@@ -889,12 +890,12 @@
subs,
self.trait_environment(db),
);
- (assoc, subst)
+ (f_in_trait.into(), subst)
}
}
}
}
- AssocItemId::ConstId(const_id) => {
+ CandidateId::ConstId(const_id) => {
let (konst, subst) =
self.resolve_impl_const_or_trait_def_with_subst(db, const_id, subs);
let subst = GenericSubstitution::new(
@@ -904,14 +905,6 @@
);
(konst.into(), subst)
}
- AssocItemId::TypeAliasId(type_alias) => (
- assoc,
- GenericSubstitution::new(
- type_alias.into(),
- subs,
- self.trait_environment(db),
- ),
- ),
};
return Some((PathResolution::Def(AssocItem::from(assoc).into()), Some(subst)));
@@ -927,7 +920,7 @@
if let Some((assoc, subs)) = infer.assoc_resolutions_for_expr_or_pat(expr_or_pat_id)
{
let (assoc, subst) = match assoc {
- AssocItemId::ConstId(const_id) => {
+ CandidateId::ConstId(const_id) => {
let (konst, subst) =
self.resolve_impl_const_or_trait_def_with_subst(db, const_id, subs);
let subst = GenericSubstitution::new(
@@ -935,12 +928,12 @@
subst,
self.trait_environment(db),
);
- (konst.into(), subst)
+ (AssocItemId::from(konst), subst)
}
- assoc => (
- assoc,
+ CandidateId::FunctionId(function_id) => (
+ function_id.into(),
GenericSubstitution::new(
- assoc.into(),
+ function_id.into(),
subs,
self.trait_environment(db),
),
diff --git a/crates/ide-assists/Cargo.toml b/crates/ide-assists/Cargo.toml
index 385b0e1..dcccf13f 100644
--- a/crates/ide-assists/Cargo.toml
+++ b/crates/ide-assists/Cargo.toml
@@ -18,7 +18,7 @@
itertools.workspace = true
either.workspace = true
smallvec.workspace = true
-tracing.workspace = true
+tracing = { workspace = true, features = ["attributes"] }
# local deps
stdx.workspace = true
diff --git a/crates/ide-assists/src/handlers/add_explicit_enum_discriminant.rs b/crates/ide-assists/src/handlers/add_explicit_enum_discriminant.rs
index 10b0879..7960373 100644
--- a/crates/ide-assists/src/handlers/add_explicit_enum_discriminant.rs
+++ b/crates/ide-assists/src/handlers/add_explicit_enum_discriminant.rs
@@ -1,8 +1,11 @@
use hir::Semantics;
use ide_db::{RootDatabase, assists::AssistId, source_change::SourceChangeBuilder};
-use syntax::{AstNode, ast};
+use syntax::{
+ AstNode,
+ ast::{self, Radix},
+};
-use crate::{AssistContext, Assists};
+use crate::{AssistContext, Assists, utils::add_group_separators};
// Assist: add_explicit_enum_discriminant
//
@@ -53,8 +56,9 @@
"Add explicit enum discriminants",
enum_node.syntax().text_range(),
|builder| {
+ let mut radix = Radix::Decimal;
for variant_node in variant_list.variants() {
- add_variant_discriminant(&ctx.sema, builder, &variant_node);
+ add_variant_discriminant(&ctx.sema, builder, &variant_node, &mut radix);
}
},
);
@@ -66,8 +70,10 @@
sema: &Semantics<'_, RootDatabase>,
builder: &mut SourceChangeBuilder,
variant_node: &ast::Variant,
+ radix: &mut Radix,
) {
- if variant_node.expr().is_some() {
+ if let Some(expr) = variant_node.expr() {
+ *radix = expr_radix(&expr).unwrap_or(*radix);
return;
}
@@ -80,7 +86,24 @@
let variant_range = variant_node.syntax().text_range();
- builder.insert(variant_range.end(), format!(" = {discriminant}"));
+ let (group_size, prefix, text) = match radix {
+ Radix::Binary => (4, "0b", format!("{discriminant:b}")),
+ Radix::Octal => (3, "0o", format!("{discriminant:o}")),
+ Radix::Decimal => (6, "", discriminant.to_string()),
+ Radix::Hexadecimal => (4, "0x", format!("{discriminant:x}")),
+ };
+ let pretty_num = add_group_separators(&text, group_size);
+ builder.insert(variant_range.end(), format!(" = {prefix}{pretty_num}"));
+}
+
+fn expr_radix(expr: &ast::Expr) -> Option<Radix> {
+ if let ast::Expr::Literal(lit) = expr
+ && let ast::LiteralKind::IntNumber(num) = lit.kind()
+ {
+ Some(num.radix())
+ } else {
+ None
+ }
}
#[cfg(test)]
@@ -172,9 +195,9 @@
#[repr(i64)]
enum TheEnum {
Foo = 1 << 63,
- Bar = -9223372036854775807,
+ Bar = -9_223372_036854_775807,
Baz = 0x7fff_ffff_ffff_fffe,
- Quux = 9223372036854775807,
+ Quux = 0x7fff_ffff_ffff_ffff,
}
"#,
);
diff --git a/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
index 7e03eb3..636cbfe 100644
--- a/crates/ide-assists/src/handlers/add_missing_impl_members.rs
+++ b/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -2470,4 +2470,37 @@
}"#,
);
}
+
+ #[test]
+ fn test_parameter_names_matching_macros_not_qualified() {
+ // Parameter names that match macro names should not be qualified
+ check_assist(
+ add_missing_impl_members,
+ r#"
+//- /lib.rs crate:dep
+#[macro_export]
+macro_rules! my_macro {
+ () => {}
+}
+
+pub trait Foo {
+ fn foo(&self, my_macro: usize);
+}
+
+//- /main.rs crate:main deps:dep
+struct Bar;
+
+impl dep::Foo for Bar {$0}
+"#,
+ r#"
+struct Bar;
+
+impl dep::Foo for Bar {
+ fn foo(&self, my_macro: usize) {
+ ${0:todo!()}
+ }
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/crates/ide-assists/src/handlers/add_missing_match_arms.rs
index 7843ab9..3eeff2a 100644
--- a/crates/ide-assists/src/handlers/add_missing_match_arms.rs
+++ b/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -67,9 +67,9 @@
}
.map(move |pat| (pat, has_guard))
})
- .map(|(pat, has_guard)| {
+ .filter_map(|(pat, has_guard)| {
has_catch_all_arm |= !has_guard && matches!(pat, Pat::WildcardPat(_));
- pat
+ (!has_guard).then_some(pat)
})
// Exclude top level wildcards so that they are expanded by this assist, retains status quo in #8129.
.filter(|pat| !matches!(pat, Pat::WildcardPat(_)))
@@ -998,7 +998,8 @@
A::Ds(_value) => { let x = 1; }
A::Es(B::Xs) => (),
A::As => ${1:todo!()},
- A::Cs => ${2:todo!()},$0
+ A::Bs => ${2:todo!()},
+ A::Cs => ${3:todo!()},$0
}
}
"#,
diff --git a/crates/ide-assists/src/handlers/convert_char_literal.rs b/crates/ide-assists/src/handlers/convert_char_literal.rs
new file mode 100644
index 0000000..0a50ba8
--- /dev/null
+++ b/crates/ide-assists/src/handlers/convert_char_literal.rs
@@ -0,0 +1,97 @@
+use syntax::{AstToken, ast};
+
+use crate::{AssistContext, AssistId, Assists, GroupLabel};
+
+// Assist: convert_char_literal
+//
+// Converts character literals between different representations. Currently supports normal character -> ASCII / Unicode escape.
+// ```
+// const _: char = 'a'$0;
+// ```
+// ->
+// ```
+// const _: char = '\x61';
+// ```
+pub(crate) fn convert_char_literal(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ if !ctx.has_empty_selection() {
+ return None;
+ }
+
+ let literal = ctx.find_node_at_offset::<ast::Literal>()?;
+ let literal = match literal.kind() {
+ ast::LiteralKind::Char(it) => it,
+ _ => return None,
+ };
+
+ let value = literal.value().ok()?;
+ let text = literal.syntax().text().to_owned();
+ let range = literal.syntax().text_range();
+ let group_id = GroupLabel("Convert char representation".into());
+
+ let mut add_assist = |converted: String| {
+ // Skip no-op assists (e.g. `'const C: char = '\\x61';'` already matches the ASCII form).
+ if converted == text {
+ return;
+ }
+ let label = format!("Convert {text} to {converted}");
+ acc.add_group(
+ &group_id,
+ AssistId::refactor_rewrite("convert_char_literal"),
+ label,
+ range,
+ |builder| builder.replace(range, converted),
+ );
+ };
+
+ if value.is_ascii() {
+ add_assist(format!("'\\x{:02x}'", value as u32));
+ }
+
+ add_assist(format!("'\\u{{{:x}}}'", value as u32));
+
+ Some(())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_assist_by_label;
+
+ use super::convert_char_literal;
+
+ #[test]
+ fn ascii_char_to_ascii_and_unicode() {
+ let before = "const _: char = 'a'$0;";
+ check_assist_by_label(
+ convert_char_literal,
+ before,
+ "const _: char = '\\x61';",
+ "Convert 'a' to '\\x61'",
+ );
+ check_assist_by_label(
+ convert_char_literal,
+ before,
+ "const _: char = '\\u{61}';",
+ "Convert 'a' to '\\u{61}'",
+ );
+ }
+
+ #[test]
+ fn non_ascii_char_only_unicode() {
+ check_assist_by_label(
+ convert_char_literal,
+ "const _: char = '😀'$0;",
+ "const _: char = '\\u{1f600}';",
+ "Convert '😀' to '\\u{1f600}'",
+ );
+ }
+
+ #[test]
+ fn ascii_escape_can_convert_to_unicode() {
+ check_assist_by_label(
+ convert_char_literal,
+ "const _: char = '\\x61'$0;",
+ "const _: char = '\\u{61}';",
+ "Convert '\\x61' to '\\u{61}'",
+ );
+ }
+}
diff --git a/crates/ide-assists/src/handlers/convert_for_to_while_let.rs b/crates/ide-assists/src/handlers/convert_for_to_while_let.rs
index 2d6a59a..156286d 100644
--- a/crates/ide-assists/src/handlers/convert_for_to_while_let.rs
+++ b/crates/ide-assists/src/handlers/convert_for_to_while_let.rs
@@ -129,7 +129,7 @@
let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
let has_wanted_method = ty
- .iterate_method_candidates(sema.db, &scope, None, Some(&wanted_method), |func| {
+ .iterate_method_candidates(sema.db, &scope, Some(&wanted_method), |func| {
if func.ret_type(sema.db).impls_trait(sema.db, iter_trait, &[]) {
return Some(());
}
diff --git a/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs b/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs
index 3917ca1..c8a244b 100644
--- a/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs
+++ b/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs
@@ -165,7 +165,7 @@
let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?;
let has_wanted_method = ty
- .iterate_method_candidates(sema.db, &scope, None, Some(&wanted_method), |func| {
+ .iterate_method_candidates(sema.db, &scope, Some(&wanted_method), |func| {
if func.ret_type(sema.db).impls_trait(sema.db, iter_trait, &[]) {
return Some(());
}
diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs
index 9a9adf2..44d020a 100644
--- a/crates/ide-assists/src/handlers/extract_function.rs
+++ b/crates/ide-assists/src/handlers/extract_function.rs
@@ -2060,7 +2060,7 @@
.filter_map(|reference| path_element_of_reference(syntax, reference))
.map(|expr| tm.make_mut(&expr));
- usages_for_param.push((param, usages.collect()));
+ usages_for_param.push((param, usages.unique().collect()));
}
let res = tm.make_syntax_mut(syntax);
@@ -4250,7 +4250,7 @@
check_assist(
extract_function,
r#"
-//- minicore: option
+//- minicore: option, add, builtin_impls
fn bar() -> Option<i32> { None }
fn foo() -> Option<()> {
let n = bar()?;
@@ -4314,7 +4314,7 @@
check_assist(
extract_function,
r#"
-//- minicore: result
+//- minicore: result, add, builtin_impls
fn foo() -> Result<(), i64> {
let n = 1;
$0let k = foo()?;
@@ -4345,7 +4345,7 @@
check_assist(
extract_function,
r#"
-//- minicore: option
+//- minicore: option, add, builtin_impls
fn foo() -> Option<()> {
let n = 1;
$0let k = foo()?;
@@ -4382,7 +4382,7 @@
check_assist(
extract_function,
r#"
-//- minicore: result
+//- minicore: result, add, builtin_impls
fn foo() -> Result<(), i64> {
let n = 1;
$0let k = foo()?;
@@ -4441,7 +4441,7 @@
check_assist(
extract_function,
r#"
-//- minicore: result
+//- minicore: result, add, builtin_impls
fn foo() -> Result<(), i64> {
let n = 1;
$0let k = foo()?;
@@ -6233,4 +6233,64 @@
cov_mark::check!(extract_function_in_braces_is_not_applicable);
check_assist_not_applicable(extract_function, r"fn foo(arr: &mut $0[$0i32]) {}");
}
+
+ #[test]
+ fn issue_20965_panic() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: fmt
+#[derive(Debug)]
+struct Foo(&'static str);
+
+impl Foo {
+ fn text(&self) -> &str { self.0 }
+}
+
+fn main() {
+ let s = Foo("");
+ $0print!("{}{}", s, s);$0
+ let _ = s.text() == "";
+}"#,
+ r#"
+#[derive(Debug)]
+struct Foo(&'static str);
+
+impl Foo {
+ fn text(&self) -> &str { self.0 }
+}
+
+fn main() {
+ let s = Foo("");
+ fun_name(&s);
+ let _ = s.text() == "";
+}
+
+fn $0fun_name(s: &Foo) {
+ *print!("{}{}", s, s);
+}"#,
+ );
+ }
+
+ #[test]
+ fn parameter_is_added_used_in_eq_expression_in_macro() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: fmt
+fn foo() {
+ let v = 123;
+ $0print!("{v:?}{}", v == 123);$0
+}"#,
+ r#"
+fn foo() {
+ let v = 123;
+ fun_name(v);
+}
+
+fn $0fun_name(v: i32) {
+ print!("{v:?}{}", v == 123);
+}"#,
+ );
+ }
}
diff --git a/crates/ide-assists/src/handlers/extract_variable.rs b/crates/ide-assists/src/handlers/extract_variable.rs
index da59626..7c60184 100644
--- a/crates/ide-assists/src/handlers/extract_variable.rs
+++ b/crates/ide-assists/src/handlers/extract_variable.rs
@@ -2189,7 +2189,7 @@
//- minicore: index
struct X;
-impl std::ops::Index<usize> for X {
+impl core::ops::Index<usize> for X {
type Output = i32;
fn index(&self) -> &Self::Output { 0 }
}
@@ -2204,7 +2204,7 @@
r#"
struct X;
-impl std::ops::Index<usize> for X {
+impl core::ops::Index<usize> for X {
type Output = i32;
fn index(&self) -> &Self::Output { 0 }
}
@@ -2214,8 +2214,8 @@
}
fn foo(s: &S) {
- let $0sub = &s.sub;
- sub[0];
+ let $0x = &s.sub;
+ x[0];
}"#,
"Extract into variable",
);
diff --git a/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/crates/ide-assists/src/handlers/generate_delegate_methods.rs
index d8a2e03..b2e791a 100644
--- a/crates/ide-assists/src/handlers/generate_delegate_methods.rs
+++ b/crates/ide-assists/src/handlers/generate_delegate_methods.rs
@@ -1,4 +1,4 @@
-use hir::{HasCrate, HasVisibility};
+use hir::HasVisibility;
use ide_db::{FxHashSet, path_transform::PathTransform};
use syntax::{
ast::{
@@ -79,8 +79,7 @@
let mut seen_names = FxHashSet::default();
for ty in sema_field_ty.autoderef(ctx.db()) {
- let krate = ty.krate(ctx.db());
- ty.iterate_assoc_items(ctx.db(), krate, |item| {
+ ty.iterate_assoc_items(ctx.db(), |item| {
if let hir::AssocItem::Function(f) = item {
let name = f.name(ctx.db());
if f.self_param(ctx.db()).is_some()
diff --git a/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs b/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs
index d88b0f3..6a86823 100644
--- a/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs
+++ b/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs
@@ -2,7 +2,10 @@
use ide_db::{RootDatabase, famous_defs::FamousDefs};
use syntax::ast::{self, AstNode, HasName};
-use crate::{AssistContext, AssistId, Assists, utils::generate_trait_impl_text_intransitive};
+use crate::{
+ AssistContext, AssistId, Assists,
+ utils::{generate_trait_impl_text_intransitive, is_selected},
+};
// Assist: generate_from_impl_for_enum
//
@@ -26,8 +29,68 @@
ctx: &AssistContext<'_>,
) -> Option<()> {
let variant = ctx.find_node_at_offset::<ast::Variant>()?;
- let variant_name = variant.name()?;
- let enum_ = ast::Adt::Enum(variant.parent_enum());
+ let adt = ast::Adt::Enum(variant.parent_enum());
+ let variants = selected_variants(ctx, &variant)?;
+
+ let target = variant.syntax().text_range();
+ acc.add(
+ AssistId::generate("generate_from_impl_for_enum"),
+ "Generate `From` impl for this enum variant(s)",
+ target,
+ |edit| {
+ let start_offset = variant.parent_enum().syntax().text_range().end();
+ let from_impl = variants
+ .into_iter()
+ .map(|variant_info| {
+ let from_trait = format!("From<{}>", variant_info.ty);
+ let impl_code = generate_impl_code(variant_info);
+ generate_trait_impl_text_intransitive(&adt, &from_trait, &impl_code)
+ })
+ .collect::<String>();
+ edit.insert(start_offset, from_impl);
+ },
+ )
+}
+
+fn generate_impl_code(VariantInfo { name, field_name, ty }: VariantInfo) -> String {
+ if let Some(field) = field_name {
+ format!(
+ r#" fn from({field}: {ty}) -> Self {{
+ Self::{name} {{ {field} }}
+ }}"#
+ )
+ } else {
+ format!(
+ r#" fn from(v: {ty}) -> Self {{
+ Self::{name}(v)
+ }}"#
+ )
+ }
+}
+
+struct VariantInfo {
+ name: ast::Name,
+ field_name: Option<ast::Name>,
+ ty: ast::Type,
+}
+
+fn selected_variants(ctx: &AssistContext<'_>, variant: &ast::Variant) -> Option<Vec<VariantInfo>> {
+ variant
+ .parent_enum()
+ .variant_list()?
+ .variants()
+ .filter(|it| is_selected(it, ctx.selection_trimmed(), true))
+ .map(|variant| {
+ let (name, ty) = extract_variant_info(&ctx.sema, &variant)?;
+ Some(VariantInfo { name: variant.name()?, field_name: name, ty })
+ })
+ .collect()
+}
+
+fn extract_variant_info(
+ sema: &'_ hir::Semantics<'_, RootDatabase>,
+ variant: &ast::Variant,
+) -> Option<(Option<ast::Name>, ast::Type)> {
let (field_name, field_type) = match variant.kind() {
ast::StructKind::Tuple(field_list) => {
if field_list.fields().count() != 1 {
@@ -45,36 +108,11 @@
ast::StructKind::Unit => return None,
};
- if existing_from_impl(&ctx.sema, &variant).is_some() {
+ if existing_from_impl(sema, variant).is_some() {
cov_mark::hit!(test_add_from_impl_already_exists);
return None;
}
-
- let target = variant.syntax().text_range();
- acc.add(
- AssistId::generate("generate_from_impl_for_enum"),
- "Generate `From` impl for this enum variant",
- target,
- |edit| {
- let start_offset = variant.parent_enum().syntax().text_range().end();
- let from_trait = format!("From<{field_type}>");
- let impl_code = if let Some(name) = field_name {
- format!(
- r#" fn from({name}: {field_type}) -> Self {{
- Self::{variant_name} {{ {name} }}
- }}"#
- )
- } else {
- format!(
- r#" fn from(v: {field_type}) -> Self {{
- Self::{variant_name}(v)
- }}"#
- )
- };
- let from_impl = generate_trait_impl_text_intransitive(&enum_, &from_trait, &impl_code);
- edit.insert(start_offset, from_impl);
- },
- )
+ Some((field_name, field_type))
}
fn existing_from_impl(
@@ -123,6 +161,32 @@
);
}
+ #[test]
+ fn test_generate_from_impl_for_multiple_enum_variants() {
+ check_assist(
+ generate_from_impl_for_enum,
+ r#"
+//- minicore: from
+enum A { $0Foo(u32), Bar$0(i32) }
+"#,
+ r#"
+enum A { Foo(u32), Bar(i32) }
+
+impl From<u32> for A {
+ fn from(v: u32) -> Self {
+ Self::Foo(v)
+ }
+}
+
+impl From<i32> for A {
+ fn from(v: i32) -> Self {
+ Self::Bar(v)
+ }
+}
+"#,
+ );
+ }
+
// FIXME(next-solver): it would be nice to not be *required* to resolve the
// path in order to properly generate assists
#[test]
diff --git a/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs b/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs
index af9c493..f10b21b 100644
--- a/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs
+++ b/crates/ide-assists/src/handlers/generate_is_empty_from_len.rs
@@ -95,7 +95,7 @@
let scope = ctx.sema.scope(impl_.syntax())?;
let ty = impl_def.self_ty(db);
- ty.iterate_method_candidates(db, &scope, None, Some(fn_name), Some)
+ ty.iterate_method_candidates(db, &scope, Some(fn_name), Some)
}
#[cfg(test)]
diff --git a/crates/ide-assists/src/handlers/move_const_to_impl.rs b/crates/ide-assists/src/handlers/move_const_to_impl.rs
index a645c8b..102d7e6 100644
--- a/crates/ide-assists/src/handlers/move_const_to_impl.rs
+++ b/crates/ide-assists/src/handlers/move_const_to_impl.rs
@@ -1,4 +1,4 @@
-use hir::{AsAssocItem, AssocItemContainer, FileRange, HasCrate, HasSource};
+use hir::{AsAssocItem, AssocItemContainer, FileRange, HasSource};
use ide_db::{assists::AssistId, defs::Definition, search::SearchScope};
use syntax::{
SyntaxKind,
@@ -70,7 +70,7 @@
let ty = impl_.self_ty(db);
// If there exists another associated item with the same name, skip the assist.
if ty
- .iterate_assoc_items(db, ty.krate(db), |assoc| {
+ .iterate_assoc_items(db, |assoc| {
// Type aliases wouldn't conflict due to different namespaces, but we're only checking
// the items in inherent impls, so we assume `assoc` is never type alias for the sake
// of brevity (inherent associated types exist in nightly Rust, but it's *very*
diff --git a/crates/ide-assists/src/handlers/number_representation.rs b/crates/ide-assists/src/handlers/number_representation.rs
index 1fe40f8..fac81ae 100644
--- a/crates/ide-assists/src/handlers/number_representation.rs
+++ b/crates/ide-assists/src/handlers/number_representation.rs
@@ -1,6 +1,6 @@
use syntax::{AstToken, ast, ast::Radix};
-use crate::{AssistContext, AssistId, Assists, GroupLabel};
+use crate::{AssistContext, AssistId, Assists, GroupLabel, utils::add_group_separators};
const MIN_NUMBER_OF_DIGITS_TO_FORMAT: usize = 5;
@@ -70,18 +70,6 @@
}
}
-fn add_group_separators(s: &str, group_size: usize) -> String {
- let mut chars = Vec::new();
- for (i, ch) in s.chars().filter(|&ch| ch != '_').rev().enumerate() {
- if i > 0 && i % group_size == 0 {
- chars.push('_');
- }
- chars.push(ch);
- }
-
- chars.into_iter().rev().collect()
-}
-
#[cfg(test)]
mod tests {
use crate::tests::{check_assist_by_label, check_assist_not_applicable, check_assist_target};
diff --git a/crates/ide-assists/src/handlers/remove_parentheses.rs b/crates/ide-assists/src/handlers/remove_parentheses.rs
index d514c1c..aa4d2bc 100644
--- a/crates/ide-assists/src/handlers/remove_parentheses.rs
+++ b/crates/ide-assists/src/handlers/remove_parentheses.rs
@@ -163,6 +163,31 @@
}
#[test]
+ fn remove_parens_prefix_with_ret_like_prefix() {
+ check_assist(remove_parentheses, r#"fn f() { !$0(return) }"#, r#"fn f() { !return }"#);
+ // `break`, `continue` behave the same under prefix operators
+ check_assist(remove_parentheses, r#"fn f() { !$0(break) }"#, r#"fn f() { !break }"#);
+ check_assist(remove_parentheses, r#"fn f() { !$0(continue) }"#, r#"fn f() { !continue }"#);
+ check_assist(
+ remove_parentheses,
+ r#"fn f() { !$0(return false) }"#,
+ r#"fn f() { !return false }"#,
+ );
+
+ // Binary operators should still allow removal unless a ret-like expression is immediately followed by `||` or `&&`.
+ check_assist(
+ remove_parentheses,
+ r#"fn f() { true || $0(return) }"#,
+ r#"fn f() { true || return }"#,
+ );
+ check_assist(
+ remove_parentheses,
+ r#"fn f() { cond && $0(return) }"#,
+ r#"fn f() { cond && return }"#,
+ );
+ }
+
+ #[test]
fn remove_parens_return_with_value_followed_by_block() {
check_assist(
remove_parentheses,
@@ -224,6 +249,79 @@
}
#[test]
+ fn remove_parens_return_in_unary_not() {
+ check_assist(
+ remove_parentheses,
+ r#"fn f() { cond && !$0(return) }"#,
+ r#"fn f() { cond && !return }"#,
+ );
+ check_assist(
+ remove_parentheses,
+ r#"fn f() { cond && !$0(return false) }"#,
+ r#"fn f() { cond && !return false }"#,
+ );
+ }
+
+ #[test]
+ fn remove_parens_return_in_disjunction_with_closure_risk() {
+ // `return` may only be blocked when it would form `return ||` or `return &&`
+ check_assist_not_applicable(
+ remove_parentheses,
+ r#"fn f() { let _x = true && $0(return) || true; }"#,
+ );
+ check_assist_not_applicable(
+ remove_parentheses,
+ r#"fn f() { let _x = true && !$0(return) || true; }"#,
+ );
+ check_assist_not_applicable(
+ remove_parentheses,
+ r#"fn f() { let _x = true && $0(return false) || true; }"#,
+ );
+ check_assist_not_applicable(
+ remove_parentheses,
+ r#"fn f() { let _x = true && !$0(return false) || true; }"#,
+ );
+ check_assist_not_applicable(
+ remove_parentheses,
+ r#"fn f() { let _x = true && $0(return) && true; }"#,
+ );
+ check_assist_not_applicable(
+ remove_parentheses,
+ r#"fn f() { let _x = true && !$0(return) && true; }"#,
+ );
+ check_assist_not_applicable(
+ remove_parentheses,
+ r#"fn f() { let _x = true && $0(return false) && true; }"#,
+ );
+ check_assist_not_applicable(
+ remove_parentheses,
+ r#"fn f() { let _x = true && !$0(return false) && true; }"#,
+ );
+ check_assist_not_applicable(
+ remove_parentheses,
+ r#"fn f() { let _x = $0(return) || true; }"#,
+ );
+ check_assist_not_applicable(
+ remove_parentheses,
+ r#"fn f() { let _x = $0(return) && true; }"#,
+ );
+ }
+
+ #[test]
+ fn remove_parens_return_in_disjunction_is_ok() {
+ check_assist(
+ remove_parentheses,
+ r#"fn f() { let _x = true || $0(return); }"#,
+ r#"fn f() { let _x = true || return; }"#,
+ );
+ check_assist(
+ remove_parentheses,
+ r#"fn f() { let _x = true && $0(return); }"#,
+ r#"fn f() { let _x = true && return; }"#,
+ );
+ }
+
+ #[test]
fn remove_parens_double_paren_stmt() {
check_assist(
remove_parentheses,
diff --git a/crates/ide-assists/src/handlers/replace_arith_op.rs b/crates/ide-assists/src/handlers/replace_arith_op.rs
index a3fb851..b686dc0 100644
--- a/crates/ide-assists/src/handlers/replace_arith_op.rs
+++ b/crates/ide-assists/src/handlers/replace_arith_op.rs
@@ -240,12 +240,12 @@
replace_arith_with_wrapping,
r#"
fn main() {
- let x = 1*x $0+ 2;
+ let x = 1*3 $0+ 2;
}
"#,
r#"
fn main() {
- let x = (1*x).wrapping_add(2);
+ let x = (1*3).wrapping_add(2);
}
"#,
)
diff --git a/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs b/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs
index 14161d9..c85ec73 100644
--- a/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs
+++ b/crates/ide-assists/src/handlers/replace_method_eager_lazy.rs
@@ -35,17 +35,13 @@
let (_, receiver_ty) = callable.receiver_param(ctx.sema.db)?;
let n_params = callable.n_params() + 1;
- let method_name_lazy = format!(
- "{method_name}{}",
- if method_name.text().ends_with("or") { "_else" } else { "_with" }
- );
+ let method_name_lazy = lazy_method_name(&method_name.text());
receiver_ty.iterate_method_candidates_with_traits(
ctx.sema.db,
&scope,
&scope.visible_traits().0,
None,
- None,
|func| {
let valid = func.name(ctx.sema.db).as_str() == &*method_name_lazy
&& func.num_params(ctx.sema.db) == n_params
@@ -71,6 +67,18 @@
)
}
+fn lazy_method_name(name: &str) -> String {
+ if ends_is(name, "or") {
+ format!("{name}_else")
+ } else if ends_is(name, "and") {
+ format!("{name}_then")
+ } else if ends_is(name, "then_some") {
+ name.strip_suffix("_some").unwrap().to_owned()
+ } else {
+ format!("{name}_with")
+ }
+}
+
fn into_closure(param: &Expr) -> Expr {
(|| {
if let ast::Expr::CallExpr(call) = param {
@@ -118,16 +126,13 @@
}
let method_name_text = method_name.text();
- let method_name_eager = method_name_text
- .strip_suffix("_else")
- .or_else(|| method_name_text.strip_suffix("_with"))?;
+ let method_name_eager = eager_method_name(&method_name_text)?;
receiver_ty.iterate_method_candidates_with_traits(
ctx.sema.db,
&scope,
&scope.visible_traits().0,
None,
- None,
|func| {
let valid = func.name(ctx.sema.db).as_str() == method_name_eager
&& func.num_params(ctx.sema.db) == n_params;
@@ -158,6 +163,20 @@
.unwrap_or_else(|| make::expr_call(param.clone(), make::arg_list(Vec::new())).into())
}
+fn eager_method_name(name: &str) -> Option<&str> {
+ if name == "then" {
+ return Some("then_some");
+ }
+
+ name.strip_suffix("_else")
+ .or_else(|| name.strip_suffix("_then"))
+ .or_else(|| name.strip_suffix("_with"))
+}
+
+fn ends_is(name: &str, end: &str) -> bool {
+ name.strip_suffix(end).is_some_and(|s| s.is_empty() || s.ends_with('_'))
+}
+
#[cfg(test)]
mod tests {
use crate::tests::check_assist;
@@ -299,4 +318,84 @@
"#,
)
}
+
+ #[test]
+ fn replace_and_with_and_then() {
+ check_assist(
+ replace_with_lazy_method,
+ r#"
+//- minicore: option, fn
+fn foo() {
+ let foo = Some("foo");
+ return foo.and$0(Some("bar"));
+}
+"#,
+ r#"
+fn foo() {
+ let foo = Some("foo");
+ return foo.and_then(|| Some("bar"));
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_and_then_with_and() {
+ check_assist(
+ replace_with_eager_method,
+ r#"
+//- minicore: option, fn
+fn foo() {
+ let foo = Some("foo");
+ return foo.and_then$0(|| Some("bar"));
+}
+"#,
+ r#"
+fn foo() {
+ let foo = Some("foo");
+ return foo.and(Some("bar"));
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_then_some_with_then() {
+ check_assist(
+ replace_with_lazy_method,
+ r#"
+//- minicore: option, fn, bool_impl
+fn foo() {
+ let foo = true;
+ let x = foo.then_some$0(2);
+}
+"#,
+ r#"
+fn foo() {
+ let foo = true;
+ let x = foo.then(|| 2);
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn replace_then_with_then_some() {
+ check_assist(
+ replace_with_eager_method,
+ r#"
+//- minicore: option, fn, bool_impl
+fn foo() {
+ let foo = true;
+ let x = foo.then$0(|| 2);
+}
+"#,
+ r#"
+fn foo() {
+ let foo = true;
+ let x = foo.then_some(2);
+}
+"#,
+ )
+ }
}
diff --git a/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs b/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
index f6526cd..e9f0e19 100644
--- a/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
+++ b/crates/ide-assists/src/handlers/replace_qualified_name_with_use.rs
@@ -1,4 +1,4 @@
-use hir::AsAssocItem;
+use hir::{AsAssocItem, ModuleDef, PathResolution};
use ide_db::{
helpers::mod_path_to_ast,
imports::insert_use::{ImportScope, insert_use},
@@ -6,7 +6,8 @@
use syntax::{
AstNode, Edition, SyntaxNode,
ast::{self, HasGenericArgs, make},
- match_ast, ted,
+ match_ast,
+ syntax_editor::SyntaxEditor,
};
use crate::{AssistContext, AssistId, Assists};
@@ -30,26 +31,19 @@
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
- let mut original_path: ast::Path = ctx.find_node_at_offset()?;
+ let original_path: ast::Path = ctx.find_node_at_offset()?;
// We don't want to mess with use statements
if original_path.syntax().ancestors().find_map(ast::UseTree::cast).is_some() {
cov_mark::hit!(not_applicable_in_use);
return None;
}
- if original_path.qualifier().is_none() {
- original_path = original_path.parent_path()?;
- }
+ let original_path = target_path(ctx, original_path)?;
- // only offer replacement for non assoc items
- match ctx.sema.resolve_path(&original_path)? {
- hir::PathResolution::Def(def) if def.as_assoc_item(ctx.sema.db).is_none() => (),
- _ => return None,
- }
// then search for an import for the first path segment of what we want to replace
// that way it is less likely that we import the item from a different location due re-exports
let module = match ctx.sema.resolve_path(&original_path.first_qualifier_or_self())? {
- hir::PathResolution::Def(module @ hir::ModuleDef::Module(_)) => module,
+ PathResolution::Def(module @ ModuleDef::Module(_)) => module,
_ => return None,
};
@@ -78,8 +72,10 @@
|builder| {
// Now that we've brought the name into scope, re-qualify all paths that could be
// affected (that is, all paths inside the node we added the `use` to).
- let scope = builder.make_import_scope_mut(scope);
- shorten_paths(scope.as_syntax_node(), &original_path);
+ let scope_node = scope.as_syntax_node();
+ let mut editor = builder.make_editor(scope_node);
+ shorten_paths(&mut editor, scope_node, &original_path);
+ builder.add_file_edits(ctx.vfs_file_id(), editor);
let path = drop_generic_args(&original_path);
let edition = ctx
.sema
@@ -92,23 +88,42 @@
Some(qualifier) => make::path_concat(qualifier, path),
None => path,
};
+ let scope = builder.make_import_scope_mut(scope);
insert_use(&scope, path, &ctx.config.insert_use);
},
)
}
+fn target_path(ctx: &AssistContext<'_>, mut original_path: ast::Path) -> Option<ast::Path> {
+ let on_first = original_path.qualifier().is_none();
+
+ if on_first {
+ original_path = original_path.top_path();
+ }
+
+ match ctx.sema.resolve_path(&original_path)? {
+ PathResolution::Def(ModuleDef::Variant(_)) if on_first => original_path.qualifier(),
+ PathResolution::Def(def) if def.as_assoc_item(ctx.db()).is_some() => {
+ on_first.then_some(original_path.qualifier()?)
+ }
+ _ => Some(original_path),
+ }
+}
+
fn drop_generic_args(path: &ast::Path) -> ast::Path {
- let path = path.clone_for_update();
+ let path = path.clone_subtree();
+ let mut editor = SyntaxEditor::new(path.syntax().clone());
if let Some(segment) = path.segment()
&& let Some(generic_args) = segment.generic_arg_list()
{
- ted::remove(generic_args.syntax());
+ editor.delete(generic_args.syntax());
}
- path
+
+ ast::Path::cast(editor.finish().new_root().clone()).unwrap()
}
/// Mutates `node` to shorten `path` in all descendants of `node`.
-fn shorten_paths(node: &SyntaxNode, path: &ast::Path) {
+fn shorten_paths(editor: &mut SyntaxEditor, node: &SyntaxNode, path: &ast::Path) {
for child in node.children() {
match_ast! {
match child {
@@ -118,26 +133,26 @@
// Don't descend into submodules, they don't have the same `use` items in scope.
// FIXME: This isn't true due to `super::*` imports?
ast::Module(_) => continue,
- ast::Path(p) => if maybe_replace_path(p.clone(), path.clone()).is_none() {
- shorten_paths(p.syntax(), path);
+ ast::Path(p) => if maybe_replace_path(editor, p.clone(), path.clone()).is_none() {
+ shorten_paths(editor, p.syntax(), path);
},
- _ => shorten_paths(&child, path),
+ _ => shorten_paths(editor, &child, path),
}
}
}
}
-fn maybe_replace_path(path: ast::Path, target: ast::Path) -> Option<()> {
+fn maybe_replace_path(editor: &mut SyntaxEditor, path: ast::Path, target: ast::Path) -> Option<()> {
if !path_eq_no_generics(path.clone(), target) {
return None;
}
// Shorten `path`, leaving only its last segment.
if let Some(parent) = path.qualifier() {
- ted::remove(parent.syntax());
+ editor.delete(parent.syntax());
}
if let Some(double_colon) = path.coloncolon_token() {
- ted::remove(&double_colon);
+ editor.delete(double_colon);
}
Some(())
@@ -270,12 +285,117 @@
}
",
r"
-use std::fmt;
+use std::fmt::Debug;
mod std { pub mod fmt { pub trait Debug {} } }
fn main() {
- fmt::Debug;
- let x: fmt::Debug = fmt::Debug;
+ Debug;
+ let x: Debug = Debug;
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn assist_runs_on_first_segment_for_enum() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std { pub mod option { pub enum Option<T> { Some(T), None } } }
+fn main() {
+ $0std::option::Option;
+ let x: std::option::Option<()> = std::option::Option::Some(());
+}
+ ",
+ r"
+use std::option::Option;
+
+mod std { pub mod option { pub enum Option<T> { Some(T), None } } }
+fn main() {
+ Option;
+ let x: Option<()> = Option::Some(());
+}
+ ",
+ );
+
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std { pub mod option { pub enum Option<T> { Some(T), None } } }
+fn main() {
+ std::option::Option;
+ let x: std::option::Option<()> = $0std::option::Option::Some(());
+}
+ ",
+ r"
+use std::option::Option;
+
+mod std { pub mod option { pub enum Option<T> { Some(T), None } } }
+fn main() {
+ Option;
+ let x: Option<()> = Option::Some(());
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn assist_runs_on_first_segment_for_assoc_type() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod foo { pub struct Foo; impl Foo { pub fn foo() {} } }
+fn main() {
+ $0foo::Foo::foo();
+}
+ ",
+ r"
+use foo::Foo;
+
+mod foo { pub struct Foo; impl Foo { pub fn foo() {} } }
+fn main() {
+ Foo::foo();
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn assist_runs_on_enum_variant() {
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std { pub mod option { pub enum Option<T> { Some(T), None } } }
+fn main() {
+ let x = std::option::Option::Some$0(());
+}
+ ",
+ r"
+use std::option::Option::Some;
+
+mod std { pub mod option { pub enum Option<T> { Some(T), None } } }
+fn main() {
+ let x = Some(());
+}
+ ",
+ );
+
+ check_assist(
+ replace_qualified_name_with_use,
+ r"
+mod std { pub mod option { pub enum Option<T> { Some(T), None } } }
+fn main() {
+ std::option::Option;
+ let x: std::option::Option<()> = $0std::option::Option::Some(());
+}
+ ",
+ r"
+use std::option::Option;
+
+mod std { pub mod option { pub enum Option<T> { Some(T), None } } }
+fn main() {
+ Option;
+ let x: Option<()> = Option::Some(());
}
",
);
diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs
index 4b4aa94..ca46890 100644
--- a/crates/ide-assists/src/lib.rs
+++ b/crates/ide-assists/src/lib.rs
@@ -119,6 +119,7 @@
mod change_visibility;
mod convert_bool_then;
mod convert_bool_to_enum;
+ mod convert_char_literal;
mod convert_closure_to_fn;
mod convert_comment_block;
mod convert_comment_from_or_to_doc;
@@ -256,6 +257,7 @@
convert_bool_then::convert_bool_then_to_if,
convert_bool_then::convert_if_to_bool_then,
convert_bool_to_enum::convert_bool_to_enum,
+ convert_char_literal::convert_char_literal,
convert_closure_to_fn::convert_closure_to_fn,
convert_comment_block::convert_comment_block,
convert_comment_from_or_to_doc::convert_comment_from_or_to_doc,
diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs
index 160b31a..7eef257 100644
--- a/crates/ide-assists/src/tests/generated.rs
+++ b/crates/ide-assists/src/tests/generated.rs
@@ -425,6 +425,19 @@
}
#[test]
+fn doctest_convert_char_literal() {
+ check_doc_test(
+ "convert_char_literal",
+ r#####"
+const _: char = 'a'$0;
+"#####,
+ r#####"
+const _: char = '\x61';
+"#####,
+ )
+}
+
+#[test]
fn doctest_convert_closure_to_fn() {
check_doc_test(
"convert_closure_to_fn",
diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs
index fbdd066..a00af92 100644
--- a/crates/ide-assists/src/utils.rs
+++ b/crates/ide-assists/src/utils.rs
@@ -1080,6 +1080,18 @@
assert_eq!(Some("r"), string_prefix(r##"r#""#"##));
}
+pub(crate) fn add_group_separators(s: &str, group_size: usize) -> String {
+ let mut chars = Vec::new();
+ for (i, ch) in s.chars().filter(|&ch| ch != '_').rev().enumerate() {
+ if i > 0 && i % group_size == 0 && ch != '-' {
+ chars.push('_');
+ }
+ chars.push(ch);
+ }
+
+ chars.into_iter().rev().collect()
+}
+
/// Replaces the record expression, handling field shorthands including inside macros.
pub(crate) fn replace_record_field_expr(
ctx: &AssistContext<'_>,
@@ -1163,6 +1175,15 @@
}
}
+pub(crate) fn is_selected(
+ it: &impl AstNode,
+ selection: syntax::TextRange,
+ allow_empty: bool,
+) -> bool {
+ selection.intersect(it.syntax().text_range()).is_some_and(|it| !it.is_empty())
+ || allow_empty && it.syntax().text_range().contains_range(selection)
+}
+
pub fn is_body_const(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> bool {
let mut is_const = true;
preorder_expr(expr, &mut |ev| {
diff --git a/crates/ide-completion/src/completions/attribute.rs b/crates/ide-completion/src/completions/attribute.rs
index 297ce33..20776f6 100644
--- a/crates/ide-completion/src/completions/attribute.rs
+++ b/crates/ide-completion/src/completions/attribute.rs
@@ -231,7 +231,7 @@
macro_rules! attrs {
// attributes applicable to all items
[@ { item $($tt:tt)* } {$($acc:tt)*}] => {
- attrs!(@ { $($tt)* } { $($acc)*, "deprecated", "doc", "dochidden", "docalias", "docinclude", "must_use", "no_mangle" })
+ attrs!(@ { $($tt)* } { $($acc)*, "deprecated", "doc", "dochidden", "docalias", "docinclude", "must_use", "no_mangle", "unsafe" })
};
// attributes applicable to all adts
[@ { adt $($tt:tt)* } {$($acc:tt)*}] => {
@@ -395,6 +395,7 @@
attr("track_caller", None, None),
attr("type_length_limit = …", Some("type_length_limit"), Some("type_length_limit = ${0:128}"))
.prefer_inner(),
+ attr("unsafe(…)", Some("unsafe"), Some("unsafe($0)")),
attr("used", None, None),
attr("warn(…)", Some("warn"), Some("warn(${0:lint})")),
attr(
diff --git a/crates/ide-completion/src/completions/attribute/cfg.rs b/crates/ide-completion/src/completions/attribute/cfg.rs
index b6739c9..1350e58 100644
--- a/crates/ide-completion/src/completions/attribute/cfg.rs
+++ b/crates/ide-completion/src/completions/attribute/cfg.rs
@@ -2,7 +2,7 @@
use ide_db::SymbolKind;
use itertools::Itertools;
-use syntax::{AstToken, Direction, NodeOrToken, SyntaxKind, algo, ast::Ident};
+use syntax::{AstToken, Direction, NodeOrToken, SmolStr, SyntaxKind, algo, ast::Ident};
use crate::{CompletionItem, completions::Completions, context::CompletionContext};
@@ -56,10 +56,15 @@
None => ctx
.krate
.potential_cfg(ctx.db)
- .get_cfg_keys()
- .unique()
- .map(|s| (s.as_str(), ""))
- .chain(CFG_CONDITION.iter().copied())
+ .into_iter()
+ .map(|x| match x {
+ hir::CfgAtom::Flag(key) => (key.as_str(), "".into()),
+ hir::CfgAtom::KeyValue { key, .. } => {
+ (key.as_str(), SmolStr::from_iter([key.as_str(), " = $0"]))
+ }
+ })
+ .chain(CFG_CONDITION.iter().map(|&(k, snip)| (k, SmolStr::new_static(snip))))
+ .unique_by(|&(s, _)| s)
.for_each(|(s, snippet)| {
let mut item = CompletionItem::new(
SymbolKind::BuiltinAttr,
diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs
index 511b593..c9f4405 100644
--- a/crates/ide-completion/src/completions/dot.rs
+++ b/crates/ide-completion/src/completions/dot.rs
@@ -270,7 +270,6 @@
ctx.db,
&ctx.scope,
traits_in_scope,
- Some(ctx.module),
None,
Callback { ctx, f, seen_methods: FxHashSet::default() },
);
@@ -597,7 +596,6 @@
}
"#,
expect![[r#"
- me local_method() fn(&self)
me pub_module_method() fn(&self)
"#]],
);
@@ -1526,6 +1524,8 @@
check_no_kw(
r#"
//- minicore: receiver
+#![feature(arbitrary_self_types)]
+
use core::ops::Receiver;
struct Foo;
diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs
index 5cae7bd..77734c5 100644
--- a/crates/ide-completion/src/completions/expr.rs
+++ b/crates/ide-completion/src/completions/expr.rs
@@ -126,13 +126,12 @@
ctx.db,
&ctx.scope,
&ctx.traits_in_scope(),
- Some(ctx.module),
None,
PathCallback { ctx, acc, add_assoc_item, seen: FxHashSet::default() },
);
// Iterate assoc types separately
- ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
+ ty.iterate_assoc_items(ctx.db, |item| {
if let hir::AssocItem::TypeAlias(ty) = item {
acc.add_type_alias(ctx, ty)
}
@@ -196,13 +195,12 @@
ctx.db,
&ctx.scope,
&ctx.traits_in_scope(),
- Some(ctx.module),
None,
PathCallback { ctx, acc, add_assoc_item, seen: FxHashSet::default() },
);
// Iterate assoc types separately
- ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
+ ty.iterate_assoc_items(ctx.db, |item| {
if let hir::AssocItem::TypeAlias(ty) = item {
acc.add_type_alias(ctx, ty)
}
@@ -232,7 +230,6 @@
ctx.db,
&ctx.scope,
&ctx.traits_in_scope(),
- Some(ctx.module),
None,
PathCallback { ctx, acc, add_assoc_item, seen: FxHashSet::default() },
);
@@ -355,7 +352,10 @@
if !in_block_expr {
add_keyword("unsafe", "unsafe {\n $0\n}");
- add_keyword("const", "const {\n $0\n}");
+ if !wants_const_token {
+ // Avoid having two `const` items in `&raw $0`
+ add_keyword("const", "const {\n $0\n}");
+ }
}
add_keyword("match", "match $1 {\n $0\n}");
add_keyword("while", "while $1 {\n $0\n}");
diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs
index d62b471..ba1fe64 100644
--- a/crates/ide-completion/src/completions/postfix.rs
+++ b/crates/ide-completion/src/completions/postfix.rs
@@ -11,10 +11,11 @@
text_edit::TextEdit,
ty_filter::TryEnum,
};
+use itertools::Itertools;
use stdx::never;
use syntax::{
SyntaxKind::{EXPR_STMT, STMT_LIST},
- T, TextRange, TextSize,
+ T, TextRange, TextSize, ToSmolStr,
ast::{self, AstNode, AstToken},
match_ast,
};
@@ -360,10 +361,18 @@
resulting_element.syntax().parent().and_then(ast::RefExpr::cast)
{
found_ref_or_deref = true;
- let exclusive = parent_ref_element.mut_token().is_some();
+ let last_child_or_token = parent_ref_element.syntax().last_child_or_token();
+ prefix.insert_str(
+ 0,
+ parent_ref_element
+ .syntax()
+ .children_with_tokens()
+ .filter(|it| Some(it) != last_child_or_token.as_ref())
+ .format("")
+ .to_smolstr()
+ .as_str(),
+ );
resulting_element = ast::Expr::from(parent_ref_element);
-
- prefix.insert_str(0, if exclusive { "&mut " } else { "&" });
}
if !found_ref_or_deref {
@@ -1006,6 +1015,20 @@
);
check_edit_with_config(
+ CompletionConfig { snippets: vec![snippet.clone()], ..TEST_CONFIG },
+ "ok",
+ r#"fn main() { &raw mut 42.$0 }"#,
+ r#"fn main() { Ok(&raw mut 42) }"#,
+ );
+
+ check_edit_with_config(
+ CompletionConfig { snippets: vec![snippet.clone()], ..TEST_CONFIG },
+ "ok",
+ r#"fn main() { &raw const 42.$0 }"#,
+ r#"fn main() { Ok(&raw const 42) }"#,
+ );
+
+ check_edit_with_config(
CompletionConfig { snippets: vec![snippet], ..TEST_CONFIG },
"ok",
r#"
@@ -1032,6 +1055,55 @@
}
#[test]
+ fn postfix_custom_snippets_completion_for_reference_expr() {
+ // https://github.com/rust-lang/rust-analyzer/issues/21035
+ let snippet = Snippet::new(
+ &[],
+ &["group".into()],
+ &["(${receiver})".into()],
+ "",
+ &[],
+ crate::SnippetScope::Expr,
+ )
+ .unwrap();
+
+ check_edit_with_config(
+ CompletionConfig { snippets: vec![snippet.clone()], ..TEST_CONFIG },
+ "group",
+ r#"fn main() { &[1, 2, 3].g$0 }"#,
+ r#"fn main() { (&[1, 2, 3]) }"#,
+ );
+
+ check_edit_with_config(
+ CompletionConfig { snippets: vec![snippet.clone()], ..TEST_CONFIG },
+ "group",
+ r#"fn main() { &&foo(a, b, 1+1).$0 }"#,
+ r#"fn main() { (&&foo(a, b, 1+1)) }"#,
+ );
+
+ check_edit_with_config(
+ CompletionConfig { snippets: vec![snippet.clone()], ..TEST_CONFIG },
+ "group",
+ r#"fn main() { &mut Foo { a: 1, b: 2, c: 3 }.$0 }"#,
+ r#"fn main() { (&mut Foo { a: 1, b: 2, c: 3 }) }"#,
+ );
+
+ check_edit_with_config(
+ CompletionConfig { snippets: vec![snippet.clone()], ..TEST_CONFIG },
+ "group",
+ r#"fn main() { &raw mut Foo::new().$0 }"#,
+ r#"fn main() { (&raw mut Foo::new()) }"#,
+ );
+
+ check_edit_with_config(
+ CompletionConfig { snippets: vec![snippet.clone()], ..TEST_CONFIG },
+ "group",
+ r#"fn main() { &raw const Foo::bar::SOME_CONST.$0 }"#,
+ r#"fn main() { (&raw const Foo::bar::SOME_CONST) }"#,
+ );
+ }
+
+ #[test]
fn no_postfix_completions_in_if_block_that_has_an_else() {
check(
r#"
diff --git a/crates/ide-completion/src/completions/type.rs b/crates/ide-completion/src/completions/type.rs
index 3465b73..abcf9fc 100644
--- a/crates/ide-completion/src/completions/type.rs
+++ b/crates/ide-completion/src/completions/type.rs
@@ -67,7 +67,7 @@
});
// Iterate assoc types separately
- ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
+ ty.iterate_assoc_items(ctx.db, |item| {
if let hir::AssocItem::TypeAlias(ty) = item {
acc.add_type_alias(ctx, ty)
}
@@ -110,7 +110,7 @@
});
// Iterate assoc types separately
- ty.iterate_assoc_items(ctx.db, ctx.krate, |item| {
+ ty.iterate_assoc_items(ctx.db, |item| {
if let hir::AssocItem::TypeAlias(ty) = item {
acc.add_type_alias(ctx, ty)
}
diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs
index 2f166b7..31a9a74 100644
--- a/crates/ide-completion/src/context.rs
+++ b/crates/ide-completion/src/context.rs
@@ -615,21 +615,14 @@
mut cb: impl FnMut(hir::AssocItem),
) {
let mut seen = FxHashSet::default();
- ty.iterate_path_candidates(
- self.db,
- &self.scope,
- &self.traits_in_scope(),
- Some(self.module),
- None,
- |item| {
- // We might iterate candidates of a trait multiple times here, so deduplicate
- // them.
- if seen.insert(item) {
- cb(item)
- }
- None::<()>
- },
- );
+ ty.iterate_path_candidates(self.db, &self.scope, &self.traits_in_scope(), None, |item| {
+ // We might iterate candidates of a trait multiple times here, so deduplicate
+ // them.
+ if seen.insert(item) {
+ cb(item)
+ }
+ None::<()>
+ });
}
/// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items and
diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs
index d6d3978..e4076fc 100644
--- a/crates/ide-completion/src/context/analysis.rs
+++ b/crates/ide-completion/src/context/analysis.rs
@@ -1679,12 +1679,16 @@
let mut param_ctx = None;
let mut missing_variants = vec![];
+ let is_pat_like = |kind| {
+ ast::Pat::can_cast(kind)
+ || ast::RecordPatField::can_cast(kind)
+ || ast::RecordPatFieldList::can_cast(kind)
+ };
- let (refutability, has_type_ascription) =
- pat
+ let (refutability, has_type_ascription) = pat
.syntax()
.ancestors()
- .find(|it| !ast::Pat::can_cast(it.kind()))
+ .find(|it| !is_pat_like(it.kind()))
.map_or((PatternRefutability::Irrefutable, false), |node| {
let refutability = match_ast! {
match node {
diff --git a/crates/ide-completion/src/tests/attribute.rs b/crates/ide-completion/src/tests/attribute.rs
index 2a62389..9ff490f 100644
--- a/crates/ide-completion/src/tests/attribute.rs
+++ b/crates/ide-completion/src/tests/attribute.rs
@@ -61,6 +61,7 @@
at target_feature(enable = "…")
at test
at track_caller
+ at unsafe(…)
at used
at warn(…)
md mac
@@ -95,6 +96,7 @@
at no_mangle
at non_exhaustive
at repr(…)
+ at unsafe(…)
at warn(…)
md proc_macros
kw crate::
@@ -173,6 +175,7 @@
at no_std
at recursion_limit = "…"
at type_length_limit = …
+ at unsafe(…)
at warn(…)
at windows_subsystem = "…"
kw crate::
@@ -201,6 +204,7 @@
at must_use
at no_mangle
at path = "…"
+ at unsafe(…)
at warn(…)
kw crate::
kw self::
@@ -224,6 +228,7 @@
at must_use
at no_implicit_prelude
at no_mangle
+ at unsafe(…)
at warn(…)
kw crate::
kw self::
@@ -252,6 +257,7 @@
at macro_use
at must_use
at no_mangle
+ at unsafe(…)
at warn(…)
kw crate::
kw self::
@@ -277,6 +283,7 @@
at forbid(…)
at must_use
at no_mangle
+ at unsafe(…)
at warn(…)
kw crate::
kw self::
@@ -303,6 +310,7 @@
at macro_use
at must_use
at no_mangle
+ at unsafe(…)
at warn(…)
kw crate::
kw self::
@@ -328,6 +336,7 @@
at forbid(…)
at must_use
at no_mangle
+ at unsafe(…)
at warn(…)
kw crate::
kw self::
@@ -353,6 +362,7 @@
at forbid(…)
at must_use
at no_mangle
+ at unsafe(…)
at warn(…)
kw crate::
kw self::
@@ -387,6 +397,7 @@
at no_mangle
at non_exhaustive
at repr(…)
+ at unsafe(…)
at warn(…)
md core
kw crate::
@@ -416,6 +427,7 @@
at no_mangle
at non_exhaustive
at repr(…)
+ at unsafe(…)
at warn(…)
kw crate::
kw self::
@@ -441,6 +453,7 @@
at forbid(…)
at must_use
at no_mangle
+ at unsafe(…)
at warn(…)
kw crate::
kw self::
@@ -470,6 +483,7 @@
at link_section = "…"
at must_use
at no_mangle
+ at unsafe(…)
at used
at warn(…)
kw crate::
@@ -497,6 +511,7 @@
at forbid(…)
at must_use
at no_mangle
+ at unsafe(…)
at warn(…)
kw crate::
kw self::
@@ -524,6 +539,7 @@
at forbid(…)
at must_use
at no_mangle
+ at unsafe(…)
at warn(…)
kw crate::
kw self::
@@ -545,6 +561,7 @@
at forbid(…)
at must_use
at no_mangle
+ at unsafe(…)
at warn(…)
kw crate::
kw self::
@@ -572,6 +589,7 @@
at forbid(…)
at must_use
at no_mangle
+ at unsafe(…)
at warn(…)
"#]],
);
@@ -592,12 +610,44 @@
at must_use
at no_mangle
at on_unimplemented
+ at unsafe(…)
at warn(…)
"#]],
);
}
#[test]
+fn attr_on_unsafe_attr() {
+ check(
+ r#"#[unsafe($0)] static FOO: () = ()"#,
+ expect![[r#"
+ at allow(…)
+ at cfg(…)
+ at cfg_attr(…)
+ at deny(…)
+ at deprecated
+ at doc = "…"
+ at doc = include_str!("…")
+ at doc(alias = "…")
+ at doc(hidden)
+ at expect(…)
+ at export_name = "…"
+ at forbid(…)
+ at global_allocator
+ at link_name = "…"
+ at link_section = "…"
+ at must_use
+ at no_mangle
+ at unsafe(…)
+ at used
+ at warn(…)
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
+
+#[test]
fn attr_diagnostic_on_unimplemented() {
check(
r#"#[diagnostic::on_unimplemented($0)] trait Foo {}"#,
@@ -643,6 +693,7 @@
at link
at must_use
at no_mangle
+ at unsafe(…)
at warn(…)
kw crate::
kw self::
@@ -665,6 +716,7 @@
at link
at must_use
at no_mangle
+ at unsafe(…)
at warn(…)
kw crate::
kw self::
@@ -723,6 +775,7 @@
at target_feature(enable = "…")
at test
at track_caller
+ at unsafe(…)
at warn(…)
kw crate::
kw self::
@@ -773,6 +826,7 @@
at target_feature(enable = "…")
at test
at track_caller
+ at unsafe(…)
at used
at warn(…)
kw crate::
@@ -929,6 +983,34 @@
}
#[test]
+ fn complete_key_attr() {
+ check_edit(
+ "test",
+ r#"
+//- /main.rs cfg:test,dbg=false,opt_level=2
+#[cfg($0)]
+"#,
+ r#"
+#[cfg(test)]
+"#,
+ );
+ }
+
+ #[test]
+ fn complete_key_value_attr() {
+ check_edit(
+ "opt_level",
+ r#"
+//- /main.rs cfg:test,dbg=false,opt_level=2
+#[cfg($0)]
+"#,
+ r#"
+#[cfg(opt_level = $0)]
+"#,
+ );
+ }
+
+ #[test]
fn cfg_target_endian() {
check(
r#"#[cfg(target_endian = $0"#,
diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs
index 4033aa5..78f003dd 100644
--- a/crates/ide-completion/src/tests/expression.rs
+++ b/crates/ide-completion/src/tests/expression.rs
@@ -628,7 +628,6 @@
fn main() fn()
bt u32 u32
kw const
- kw const
kw crate::
kw false
kw for
@@ -3198,6 +3197,7 @@
fn ambiguous_float_literal() {
check(
r#"
+//- /core.rs crate:core
#![rustc_coherence_is_core]
impl i32 {
@@ -3232,6 +3232,7 @@
fn ambiguous_float_literal_in_ambiguous_method_call() {
check(
r#"
+//- /core.rs crate:core
#![rustc_coherence_is_core]
impl i32 {
diff --git a/crates/ide-completion/src/tests/pattern.rs b/crates/ide-completion/src/tests/pattern.rs
index 6eb0b81..a765fd12 100644
--- a/crates/ide-completion/src/tests/pattern.rs
+++ b/crates/ide-completion/src/tests/pattern.rs
@@ -133,6 +133,44 @@
}
#[test]
+fn refutable_in_record_pat_field() {
+ check(
+ r#"
+enum Bar { Value, Nil }
+struct Foo { x: Bar }
+fn foo(foo: Foo) { match foo { Foo { x: $0 } } }
+"#,
+ expect![[r#"
+ en Bar
+ st Foo
+ bn Foo {…} Foo { x$1 }$0
+ kw mut
+ kw ref
+ "#]],
+ );
+
+ check(
+ r#"
+enum Bar { Value, Nil }
+use Bar::*;
+struct Foo { x: Bar }
+fn foo(foo: Foo) { match foo { Foo { x: $0 } } }
+"#,
+ expect![[r#"
+ en Bar
+ st Foo
+ ev Nil
+ ev Value
+ bn Foo {…} Foo { x$1 }$0
+ bn Nil Nil$0
+ bn Value Value$0
+ kw mut
+ kw ref
+ "#]],
+ );
+}
+
+#[test]
fn irrefutable() {
check_with_base_items(
r#"
@@ -653,6 +691,7 @@
check(
r#"
+//- /core.rs crate:core
#![rustc_coherence_is_core]
#[lang = "u32"]
impl u32 {
diff --git a/crates/ide-db/Cargo.toml b/crates/ide-db/Cargo.toml
index b714816..f1f9d85 100644
--- a/crates/ide-db/Cargo.toml
+++ b/crates/ide-db/Cargo.toml
@@ -15,14 +15,13 @@
[dependencies]
cov-mark = "2.0.0"
crossbeam-channel.workspace = true
-tracing.workspace = true
+tracing = { workspace = true, features = ["attributes"] }
rayon.workspace = true
fst = { version = "0.4.7", default-features = false }
rustc-hash.workspace = true
either.workspace = true
itertools.workspace = true
arrayvec.workspace = true
-indexmap.workspace = true
memchr = "2.7.5"
salsa.workspace = true
salsa-macros.workspace = true
diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs
index 50edfca..c49ade2 100644
--- a/crates/ide-db/src/imports/import_assets.rs
+++ b/crates/ide-db/src/imports/import_assets.rs
@@ -5,7 +5,7 @@
use hir::{
AsAssocItem, AssocItem, AssocItemContainer, Complete, Crate, FindPathConfig, HasCrate,
ItemInNs, ModPath, Module, ModuleDef, Name, PathResolution, PrefixKind, ScopeDef, Semantics,
- SemanticsScope, Trait, TyFingerprint, Type, db::HirDatabase,
+ SemanticsScope, Trait, Type,
};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
@@ -500,44 +500,37 @@
ModuleDef::Adt(adt) => adt.ty(db),
_ => return SmallVec::new(),
};
- ty.iterate_path_candidates::<Infallible>(
- db,
- scope,
- &FxHashSet::default(),
- None,
- None,
- |assoc| {
- // FIXME: Support extra trait imports
- if assoc.container_or_implemented_trait(db).is_some() {
- return None;
+ ty.iterate_path_candidates::<Infallible>(db, scope, &FxHashSet::default(), None, |assoc| {
+ // FIXME: Support extra trait imports
+ if assoc.container_or_implemented_trait(db).is_some() {
+ return None;
+ }
+ let name = assoc.name(db)?;
+ let is_match = match candidate {
+ NameToImport::Prefix(text, true) => name.as_str().starts_with(text),
+ NameToImport::Prefix(text, false) => {
+ name.as_str().chars().zip(text.chars()).all(|(name_char, candidate_char)| {
+ name_char.eq_ignore_ascii_case(&candidate_char)
+ })
}
- let name = assoc.name(db)?;
- let is_match = match candidate {
- NameToImport::Prefix(text, true) => name.as_str().starts_with(text),
- NameToImport::Prefix(text, false) => {
- name.as_str().chars().zip(text.chars()).all(|(name_char, candidate_char)| {
- name_char.eq_ignore_ascii_case(&candidate_char)
- })
- }
- NameToImport::Exact(text, true) => name.as_str() == text,
- NameToImport::Exact(text, false) => name.as_str().eq_ignore_ascii_case(text),
- NameToImport::Fuzzy(text, true) => text.chars().all(|c| name.as_str().contains(c)),
- NameToImport::Fuzzy(text, false) => text.chars().all(|c| {
- name.as_str().chars().any(|name_char| name_char.eq_ignore_ascii_case(&c))
- }),
- };
- if !is_match {
- return None;
- }
- result.push(LocatedImport::new(
- import_path_candidate.clone(),
- resolved_qualifier,
- assoc_to_item(assoc),
- complete_in_flyimport,
- ));
- None
- },
- );
+ NameToImport::Exact(text, true) => name.as_str() == text,
+ NameToImport::Exact(text, false) => name.as_str().eq_ignore_ascii_case(text),
+ NameToImport::Fuzzy(text, true) => text.chars().all(|c| name.as_str().contains(c)),
+ NameToImport::Fuzzy(text, false) => text
+ .chars()
+ .all(|c| name.as_str().chars().any(|name_char| name_char.eq_ignore_ascii_case(&c))),
+ };
+ if !is_match {
+ return None;
+ }
+ result.push(LocatedImport::new(
+ import_path_candidate.clone(),
+ resolved_qualifier,
+ assoc_to_item(assoc),
+ complete_in_flyimport,
+ ));
+ None
+ });
result
}
@@ -608,7 +601,6 @@
deref_chain
.into_iter()
.filter_map(|ty| Some((ty.krate(db).into(), ty.fingerprint_for_trait_impl()?)))
- .sorted()
.unique()
.collect::<Vec<_>>()
};
@@ -619,11 +611,11 @@
}
// in order to handle implied bounds through an associated type, keep all traits if any
- // type in the deref chain matches `TyFingerprint::Unnameable`. This fingerprint
+ // type in the deref chain matches `SimplifiedType::Placeholder`. This fingerprint
// won't be in `TraitImpls` anyways, as `TraitImpls` only contains actual implementations.
if !autoderef_method_receiver
.iter()
- .any(|(_, fingerprint)| matches!(fingerprint, TyFingerprint::Unnameable))
+ .any(|(_, fingerprint)| matches!(fingerprint, hir::SimplifiedType::Placeholder))
{
trait_candidates.retain(|&candidate_trait_id| {
// we care about the following cases:
@@ -635,17 +627,18 @@
// a. This is recursive for fundamental types
let defining_crate_for_trait = Trait::from(candidate_trait_id).krate(db);
- let trait_impls_in_crate = db.trait_impls_in_crate(defining_crate_for_trait.into());
+ let trait_impls_in_crate =
+ hir::TraitImpls::for_crate(db, defining_crate_for_trait.into());
let definitions_exist_in_trait_crate =
- autoderef_method_receiver.iter().any(|&(_, fingerprint)| {
+ autoderef_method_receiver.iter().any(|(_, fingerprint)| {
trait_impls_in_crate
.has_impls_for_trait_and_self_ty(candidate_trait_id, fingerprint)
});
// this is a closure for laziness: if `definitions_exist_in_trait_crate` is true,
// we can avoid a second db lookup.
let definitions_exist_in_receiver_crate = || {
- autoderef_method_receiver.iter().any(|&(krate, fingerprint)| {
- db.trait_impls_in_crate(krate)
+ autoderef_method_receiver.iter().any(|(krate, fingerprint)| {
+ hir::TraitImpls::for_crate(db, *krate)
.has_impls_for_trait_and_self_ty(candidate_trait_id, fingerprint)
})
};
@@ -663,7 +656,6 @@
scope,
&trait_candidates,
None,
- None,
|assoc| {
if let Some(&complete_in_flyimport) = required_assoc_items.get(&assoc) {
let located_trait = assoc.container_trait(db).filter(|&it| scope_filter(it))?;
@@ -688,7 +680,6 @@
scope,
&trait_candidates,
None,
- None,
|function| {
let assoc = function.as_assoc_item(db)?;
if let Some(&complete_in_flyimport) = required_assoc_items.get(&assoc) {
diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs
index 4a27035..bc5958e 100644
--- a/crates/ide-db/src/path_transform.rs
+++ b/crates/ide-db/src/path_transform.rs
@@ -546,6 +546,13 @@
match resolution {
hir::PathResolution::Def(def) if def.as_assoc_item(self.source_scope.db).is_none() => {
+ // Macros cannot be used in pattern position, and identifiers that happen
+ // to have the same name as macros (like parameter names `vec`, `format`, etc.)
+ // are bindings, not references. Don't qualify them.
+ if matches!(def, hir::ModuleDef::Macro(_)) {
+ return None;
+ }
+
let cfg = FindPathConfig {
prefer_no_std: false,
prefer_prelude: true,
diff --git a/crates/ide-db/src/prime_caches.rs b/crates/ide-db/src/prime_caches.rs
index 1463fdb..e8f06a3 100644
--- a/crates/ide-db/src/prime_caches.rs
+++ b/crates/ide-db/src/prime_caches.rs
@@ -37,6 +37,7 @@
BeginCrateDefMap { crate_id: Crate, crate_name: Symbol },
EndCrateDefMap { crate_id: Crate },
EndCrateImportMap,
+ EndSema,
EndModuleSymbols,
Cancelled(Cancelled),
}
@@ -70,78 +71,112 @@
(reverse_deps, to_be_done_deps)
};
- let (def_map_work_sender, import_map_work_sender, symbols_work_sender, progress_receiver) = {
+ let (
+ def_map_work_sender,
+ import_map_work_sender,
+ symbols_work_sender,
+ sema_work_sender,
+ progress_receiver,
+ ) = {
let (progress_sender, progress_receiver) = crossbeam_channel::unbounded();
let (def_map_work_sender, def_map_work_receiver) = crossbeam_channel::unbounded();
let (import_map_work_sender, import_map_work_receiver) = crossbeam_channel::unbounded();
+ let (sema_work_sender, sema_work_receiver) = crossbeam_channel::unbounded();
let (symbols_work_sender, symbols_work_receiver) = crossbeam_channel::unbounded();
- let prime_caches_worker =
- move |db: RootDatabase| {
- let handle_def_map = |crate_id, crate_name| {
- progress_sender.send(ParallelPrimeCacheWorkerProgress::BeginCrateDefMap {
- crate_id,
- crate_name,
- })?;
+ let prime_caches_worker = move |db: RootDatabase| {
+ let handle_def_map = |crate_id, crate_name| {
+ progress_sender.send(ParallelPrimeCacheWorkerProgress::BeginCrateDefMap {
+ crate_id,
+ crate_name,
+ })?;
- let cancelled = Cancelled::catch(|| _ = hir::crate_def_map(&db, crate_id));
+ let cancelled = Cancelled::catch(|| {
+ _ = hir::crate_def_map(&db, crate_id);
+ });
- match cancelled {
- Ok(()) => progress_sender
- .send(ParallelPrimeCacheWorkerProgress::EndCrateDefMap { crate_id })?,
- Err(cancelled) => progress_sender
- .send(ParallelPrimeCacheWorkerProgress::Cancelled(cancelled))?,
- }
-
- Ok::<_, crossbeam_channel::SendError<_>>(())
- };
- let handle_import_map = |crate_id| {
- let cancelled = Cancelled::catch(|| _ = db.import_map(crate_id));
-
- match cancelled {
- Ok(()) => progress_sender
- .send(ParallelPrimeCacheWorkerProgress::EndCrateImportMap)?,
- Err(cancelled) => progress_sender
- .send(ParallelPrimeCacheWorkerProgress::Cancelled(cancelled))?,
- }
-
- Ok::<_, crossbeam_channel::SendError<_>>(())
- };
- let handle_symbols = |module| {
- let cancelled = Cancelled::catch(AssertUnwindSafe(|| {
- _ = SymbolIndex::module_symbols(&db, module)
- }));
-
- match cancelled {
- Ok(()) => progress_sender
- .send(ParallelPrimeCacheWorkerProgress::EndModuleSymbols)?,
- Err(cancelled) => progress_sender
- .send(ParallelPrimeCacheWorkerProgress::Cancelled(cancelled))?,
- }
-
- Ok::<_, crossbeam_channel::SendError<_>>(())
- };
-
- loop {
- db.unwind_if_revision_cancelled();
-
- // Biased because we want to prefer def maps.
- crossbeam_channel::select_biased! {
- recv(def_map_work_receiver) -> work => {
- let Ok((crate_id, crate_name)) = work else { break };
- handle_def_map(crate_id, crate_name)?;
- }
- recv(import_map_work_receiver) -> work => {
- let Ok(crate_id) = work else { break };
- handle_import_map(crate_id)?;
- }
- recv(symbols_work_receiver) -> work => {
- let Ok(module) = work else { break };
- handle_symbols(module)?;
- }
- }
+ match cancelled {
+ Ok(()) => progress_sender
+ .send(ParallelPrimeCacheWorkerProgress::EndCrateDefMap { crate_id })?,
+ Err(cancelled) => progress_sender
+ .send(ParallelPrimeCacheWorkerProgress::Cancelled(cancelled))?,
}
+
Ok::<_, crossbeam_channel::SendError<_>>(())
};
+ let handle_sema = |crate_id| {
+ let cancelled = Cancelled::catch(|| {
+ hir::attach_db(&db, || {
+ // method resolution is likely to hit all trait impls at some point
+ // we pre-populate it here as this will hit a lot of parses ...
+ _ = hir::TraitImpls::for_crate(&db, crate_id);
+ // we compute the lang items here as the work for them is also highly recursive and will be trigger by the module symbols query
+ // slowing down leaf crate analysis tremendously as we go back to being blocked on a single thread
+ _ = hir::crate_lang_items(&db, crate_id);
+ })
+ });
+
+ match cancelled {
+ Ok(()) => progress_sender.send(ParallelPrimeCacheWorkerProgress::EndSema)?,
+ Err(cancelled) => progress_sender
+ .send(ParallelPrimeCacheWorkerProgress::Cancelled(cancelled))?,
+ }
+
+ Ok::<_, crossbeam_channel::SendError<_>>(())
+ };
+ let handle_import_map = |crate_id| {
+ let cancelled = Cancelled::catch(|| _ = db.import_map(crate_id));
+
+ match cancelled {
+ Ok(()) => {
+ progress_sender.send(ParallelPrimeCacheWorkerProgress::EndCrateImportMap)?
+ }
+ Err(cancelled) => progress_sender
+ .send(ParallelPrimeCacheWorkerProgress::Cancelled(cancelled))?,
+ }
+
+ Ok::<_, crossbeam_channel::SendError<_>>(())
+ };
+ let handle_symbols = |module: hir::Module| {
+ let cancelled = Cancelled::catch(AssertUnwindSafe(|| {
+ _ = SymbolIndex::module_symbols(&db, module)
+ }));
+
+ match cancelled {
+ Ok(()) => {
+ progress_sender.send(ParallelPrimeCacheWorkerProgress::EndModuleSymbols)?
+ }
+ Err(cancelled) => progress_sender
+ .send(ParallelPrimeCacheWorkerProgress::Cancelled(cancelled))?,
+ }
+
+ Ok::<_, crossbeam_channel::SendError<_>>(())
+ };
+
+ loop {
+ db.unwind_if_revision_cancelled();
+
+ // Biased because we want to prefer def maps.
+ crossbeam_channel::select_biased! {
+ recv(def_map_work_receiver) -> work => {
+ let Ok((crate_id, crate_name)) = work else { break };
+ handle_def_map(crate_id, crate_name)?;
+ }
+ recv(sema_work_receiver) -> work => {
+ let Ok(crate_id) = work else { break };
+ handle_sema(crate_id)?;
+ }
+ recv(import_map_work_receiver) -> work => {
+ let Ok(crate_id) = work else { break };
+ handle_import_map(crate_id)?;
+ }
+ recv(symbols_work_receiver) -> work => {
+ let Ok(module) = work else { break };
+ handle_symbols(module)?;
+ }
+ }
+ }
+ Ok::<_, crossbeam_channel::SendError<_>>(())
+ };
for id in 0..num_worker_threads {
stdx::thread::Builder::new(
@@ -157,13 +192,20 @@
.expect("failed to spawn thread");
}
- (def_map_work_sender, import_map_work_sender, symbols_work_sender, progress_receiver)
+ (
+ def_map_work_sender,
+ import_map_work_sender,
+ symbols_work_sender,
+ sema_work_sender,
+ progress_receiver,
+ )
};
let crate_def_maps_total = db.all_crates().len();
let mut crate_def_maps_done = 0;
let (mut crate_import_maps_total, mut crate_import_maps_done) = (0usize, 0usize);
let (mut module_symbols_total, mut module_symbols_done) = (0usize, 0usize);
+ let (mut sema_total, mut sema_done) = (0usize, 0usize);
// an index map is used to preserve ordering so we can sort the progress report in order of
// "longest crate to index" first
@@ -182,6 +224,7 @@
while crate_def_maps_done < crate_def_maps_total
|| crate_import_maps_done < crate_import_maps_total
|| module_symbols_done < module_symbols_total
+ || sema_done < sema_total
{
db.unwind_if_revision_cancelled();
@@ -228,6 +271,7 @@
}
if crate_def_maps_done == crate_def_maps_total {
+ // Can we trigger lru-eviction once at this point to reduce peak memory usage?
cb(ParallelPrimeCachesProgress {
crates_currently_indexing: vec![],
crates_done: crate_def_maps_done,
@@ -236,6 +280,8 @@
});
}
+ sema_work_sender.send(crate_id).ok();
+ sema_total += 1;
let origin = &crate_id.data(db).origin;
if origin.is_lang() {
crate_import_maps_total += 1;
@@ -259,6 +305,7 @@
}
ParallelPrimeCacheWorkerProgress::EndCrateImportMap => crate_import_maps_done += 1,
ParallelPrimeCacheWorkerProgress::EndModuleSymbols => module_symbols_done += 1,
+ ParallelPrimeCacheWorkerProgress::EndSema => sema_done += 1,
ParallelPrimeCacheWorkerProgress::Cancelled(cancelled) => {
// Cancelled::throw should probably be public
std::panic::resume_unwind(Box::new(cancelled));
diff --git a/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/crates/ide-diagnostics/src/handlers/incorrect_case.rs
index 519ff19..fdc426c 100644
--- a/crates/ide-diagnostics/src/handlers/incorrect_case.rs
+++ b/crates/ide-diagnostics/src/handlers/incorrect_case.rs
@@ -439,10 +439,27 @@
#[test]
fn ignores_no_mangle_items() {
cov_mark::check!(extern_func_no_mangle_ignored);
+ cov_mark::check!(no_mangle_static_incorrect_case_ignored);
check_diagnostics(
r#"
#[no_mangle]
extern "C" fn NonSnakeCaseName(some_var: u8) -> u8;
+#[no_mangle]
+static lower_case: () = ();
+ "#,
+ );
+ }
+
+ #[test]
+ fn ignores_unsafe_no_mangle_items() {
+ cov_mark::check!(extern_func_no_mangle_ignored);
+ cov_mark::check!(no_mangle_static_incorrect_case_ignored);
+ check_diagnostics(
+ r#"
+#[unsafe(no_mangle)]
+extern "C" fn NonSnakeCaseName(some_var: u8) -> u8;
+#[unsafe(no_mangle)]
+static lower_case: () = ();
"#,
);
}
diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs
index 49f925e..ab7256d 100644
--- a/crates/ide-diagnostics/src/handlers/missing_fields.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs
@@ -226,7 +226,7 @@
// Look for a ::new() associated function
let has_new_func = ty
- .iterate_assoc_items(ctx.sema.db, krate, |assoc_item| {
+ .iterate_assoc_items(ctx.sema.db, |assoc_item| {
if let AssocItem::Function(func) = assoc_item
&& func.name(ctx.sema.db) == sym::new
&& func.assoc_fn_params(ctx.sema.db).is_empty()
diff --git a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
index 1fc96b7..d52fc73 100644
--- a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
+++ b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs
@@ -390,7 +390,6 @@
#[test]
fn expr_diverges() {
- cov_mark::check_count!(validate_match_bailed_out, 2);
check_diagnostics(
r#"
enum Either { A, B }
@@ -401,6 +400,7 @@
Either::B => (),
}
match loop {} {
+ // ^^^^^^^ error: missing match arm: `B` not covered
Either::A => (),
}
match loop { break Either::A } {
diff --git a/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/crates/ide-diagnostics/src/handlers/mutability_errors.rs
index eefa1ac..18280a4 100644
--- a/crates/ide-diagnostics/src/handlers/mutability_errors.rs
+++ b/crates/ide-diagnostics/src/handlers/mutability_errors.rs
@@ -806,7 +806,7 @@
_ = (x, y);
let x = Foo;
let y = &mut *x;
- //^^ 💡 error: cannot mutate immutable variable `x`
+ // ^ 💡 error: cannot mutate immutable variable `x`
_ = (x, y);
let x = Foo;
//^ 💡 warn: unused variable
@@ -815,13 +815,13 @@
//^^^^^^ 💡 error: cannot mutate immutable variable `x`
_ = (x, y);
let ref mut y = *x;
- //^^ 💡 error: cannot mutate immutable variable `x`
+ // ^ 💡 error: cannot mutate immutable variable `x`
_ = y;
let (ref mut y, _) = *x;
- //^^ 💡 error: cannot mutate immutable variable `x`
+ // ^ 💡 error: cannot mutate immutable variable `x`
_ = y;
match *x {
- //^^ 💡 error: cannot mutate immutable variable `x`
+ // ^ 💡 error: cannot mutate immutable variable `x`
(ref y, 5) => _ = y,
(_, ref mut y) => _ = y,
}
@@ -1130,7 +1130,7 @@
//^^^^^^^ 💡 error: cannot mutate immutable variable `x`
let x = Box::new(5);
let closure = || *x = 2;
- //^ 💡 error: cannot mutate immutable variable `x`
+ //^^^^^^ 💡 error: cannot mutate immutable variable `x`
_ = closure;
}
"#,
diff --git a/crates/ide-diagnostics/src/handlers/private_field.rs b/crates/ide-diagnostics/src/handlers/private_field.rs
index 69cd0d2..23f0460 100644
--- a/crates/ide-diagnostics/src/handlers/private_field.rs
+++ b/crates/ide-diagnostics/src/handlers/private_field.rs
@@ -1,6 +1,9 @@
use hir::{EditionedFileId, FileRange, HasCrate, HasSource, Semantics};
use ide_db::{RootDatabase, assists::Assist, source_change::SourceChange, text_edit::TextEdit};
-use syntax::{AstNode, TextRange, TextSize, ast::HasVisibility};
+use syntax::{
+ AstNode, TextRange,
+ ast::{HasName, HasVisibility},
+};
use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, fix};
@@ -8,7 +11,6 @@
//
// This diagnostic is triggered if the accessed field is not visible from the current module.
pub(crate) fn private_field(ctx: &DiagnosticsContext<'_>, d: &hir::PrivateField) -> Diagnostic {
- // FIXME: add quickfix
Diagnostic::new_with_syntax_node_ptr(
ctx,
DiagnosticCode::RustcHardError("E0616"),
@@ -50,11 +52,19 @@
source.with_value(visibility.syntax()).original_file_range_opt(sema.db)?.0
}
None => {
- let (range, _) = source.syntax().original_file_range_opt(sema.db)?;
- FileRange {
- file_id: range.file_id,
- range: TextRange::at(range.range.start(), TextSize::new(0)),
- }
+ let (range, _) = source
+ .map(|it| {
+ Some(match it {
+ hir::FieldSource::Named(it) => {
+ it.unsafe_token().or(it.name()?.ident_token())?.text_range()
+ }
+ hir::FieldSource::Pos(it) => it.ty()?.syntax().text_range(),
+ })
+ })
+ .transpose()?
+ .original_node_file_range_opt(sema.db)?;
+
+ FileRange { file_id: range.file_id, range: TextRange::empty(range.range.start()) }
}
};
let source_change = SourceChange::from_text_edit(
@@ -229,4 +239,186 @@
"#,
);
}
+
+ #[test]
+ fn change_visibility_of_field_with_doc_comment() {
+ check_fix(
+ r#"
+pub mod foo {
+ pub struct Foo {
+ /// This is a doc comment
+ bar: u32,
+ }
+}
+
+fn main() {
+ let x = foo::Foo { bar: 0 };
+ x.bar$0;
+}
+ "#,
+ r#"
+pub mod foo {
+ pub struct Foo {
+ /// This is a doc comment
+ pub(crate) bar: u32,
+ }
+}
+
+fn main() {
+ let x = foo::Foo { bar: 0 };
+ x.bar;
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn change_visibility_of_field_with_line_comment() {
+ check_fix(
+ r#"
+pub mod foo {
+ pub struct Foo {
+ // This is a line comment
+ bar: u32,
+ }
+}
+
+fn main() {
+ let x = foo::Foo { bar: 0 };
+ x.bar$0;
+}
+ "#,
+ r#"
+pub mod foo {
+ pub struct Foo {
+ // This is a line comment
+ pub(crate) bar: u32,
+ }
+}
+
+fn main() {
+ let x = foo::Foo { bar: 0 };
+ x.bar;
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn change_visibility_of_field_with_multiple_doc_comments() {
+ check_fix(
+ r#"
+pub mod foo {
+ pub struct Foo {
+ /// First line
+ /// Second line
+ bar: u32,
+ }
+}
+
+fn main() {
+ let x = foo::Foo { bar: 0 };
+ x.bar$0;
+}
+ "#,
+ r#"
+pub mod foo {
+ pub struct Foo {
+ /// First line
+ /// Second line
+ pub(crate) bar: u32,
+ }
+}
+
+fn main() {
+ let x = foo::Foo { bar: 0 };
+ x.bar;
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn change_visibility_of_field_with_attr_and_comment() {
+ check_fix(
+ r#"
+mod foo {
+ pub struct Foo {
+ #[rustfmt::skip]
+ /// First line
+ /// Second line
+ bar: u32,
+ }
+}
+fn main() {
+ foo::Foo { $0bar: 42 };
+}
+ "#,
+ r#"
+mod foo {
+ pub struct Foo {
+ #[rustfmt::skip]
+ /// First line
+ /// Second line
+ pub(crate) bar: u32,
+ }
+}
+fn main() {
+ foo::Foo { bar: 42 };
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn change_visibility_of_field_with_macro() {
+ check_fix(
+ r#"
+macro_rules! allow_unused {
+ ($vis:vis $struct:ident $name:ident { $($fvis:vis $field:ident : $ty:ty,)* }) => {
+ $vis $struct $name {
+ $(
+ #[allow(unused)]
+ $fvis $field : $ty,
+ )*
+ }
+ };
+}
+mod foo {
+ allow_unused!(
+ pub struct Foo {
+ x: i32,
+ }
+ );
+}
+fn main() {
+ let foo = foo::Foo { x: 2 };
+ let _ = foo.$0x
+}
+ "#,
+ r#"
+macro_rules! allow_unused {
+ ($vis:vis $struct:ident $name:ident { $($fvis:vis $field:ident : $ty:ty,)* }) => {
+ $vis $struct $name {
+ $(
+ #[allow(unused)]
+ $fvis $field : $ty,
+ )*
+ }
+ };
+}
+mod foo {
+ allow_unused!(
+ pub struct Foo {
+ pub(crate) x: i32,
+ }
+ );
+}
+fn main() {
+ let foo = foo::Foo { x: 2 };
+ let _ = foo.x
+}
+ "#,
+ );
+ }
}
diff --git a/crates/ide-diagnostics/src/handlers/unresolved_assoc_item.rs b/crates/ide-diagnostics/src/handlers/unresolved_assoc_item.rs
index 4ae528b..f181021 100644
--- a/crates/ide-diagnostics/src/handlers/unresolved_assoc_item.rs
+++ b/crates/ide-diagnostics/src/handlers/unresolved_assoc_item.rs
@@ -49,4 +49,25 @@
"#,
);
}
+
+ #[test]
+ fn dyn_super_trait_assoc_type() {
+ check_diagnostics(
+ r#"
+//- minicore: future, send
+
+use core::{future::Future, marker::Send, pin::Pin};
+
+trait FusedFuture: Future {
+ fn is_terminated(&self) -> bool;
+}
+
+struct Box<T: ?Sized>(*const T);
+
+fn main() {
+ let _fut: Pin<Box<dyn FusedFuture<Output = ()> + Send>> = loop {};
+}
+"#,
+ );
+ }
}
diff --git a/crates/ide-ssr/src/resolving.rs b/crates/ide-ssr/src/resolving.rs
index a48c0f8..461de40 100644
--- a/crates/ide-ssr/src/resolving.rs
+++ b/crates/ide-ssr/src/resolving.rs
@@ -228,12 +228,10 @@
let resolved_qualifier = self.scope.speculative_resolve(&path.qualifier()?)?;
if let hir::PathResolution::Def(hir::ModuleDef::Adt(adt)) = resolved_qualifier {
let name = path.segment()?.name_ref()?;
- let module = self.scope.module();
adt.ty(self.scope.db).iterate_path_candidates(
self.scope.db,
&self.scope,
&self.scope.visible_traits().0,
- Some(module),
None,
|assoc_item| {
let item_name = assoc_item.name(self.scope.db)?;
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs
index e1d18b0..fa4b4b6 100644
--- a/crates/ide/src/hover.rs
+++ b/crates/ide/src/hover.rs
@@ -8,7 +8,6 @@
use either::Either;
use hir::{
DisplayTarget, GenericDef, GenericSubstitution, HasCrate, HasSource, LangItem, Semantics,
- db::DefDatabase,
};
use ide_db::{
FileRange, FxIndexSet, MiniCore, Ranker, RootDatabase,
@@ -522,9 +521,8 @@
return Vec::new();
}
- db.notable_traits_in_deps(ty.krate(db).into())
- .iter()
- .flat_map(|it| &**it)
+ ty.krate(db)
+ .notable_traits_in_deps(db)
.filter_map(move |&trait_| {
let trait_ = trait_.into();
ty.impls_trait(db, trait_, &[]).then(|| {
diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs
index 3a19531..071eacf 100644
--- a/crates/ide/src/hover/tests.rs
+++ b/crates/ide/src/hover/tests.rs
@@ -350,7 +350,7 @@
fn hover_closure() {
check(
r#"
-//- minicore: copy
+//- minicore: copy, add, builtin_impls
fn main() {
let x = 2;
let y = $0|z| x + z;
@@ -3280,7 +3280,7 @@
check_hover_no_memory_layout(
r#"
-//- minicore: copy
+//- minicore: copy, add, builtin_impls
fn main() {
let x = 2;
let y = $0|z| x + z;
diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs
index 21550d5..2b4fe54 100644
--- a/crates/ide/src/inlay_hints.rs
+++ b/crates/ide/src/inlay_hints.rs
@@ -316,6 +316,7 @@
pub closure_capture_hints: bool,
pub binding_mode_hints: bool,
pub implicit_drop_hints: bool,
+ pub implied_dyn_trait_hints: bool,
pub lifetime_elision_hints: LifetimeElisionHints,
pub param_names_for_lifetime_elision_hints: bool,
pub hide_named_constructor_hints: bool,
@@ -907,6 +908,7 @@
closing_brace_hints_min_lines: None,
fields_to_resolve: InlayFieldsToResolve::empty(),
implicit_drop_hints: false,
+ implied_dyn_trait_hints: false,
range_exclusive_hints: false,
minicore: MiniCore::default(),
};
diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs
index ebb0d57..b2584b6 100644
--- a/crates/ide/src/inlay_hints/adjustment.rs
+++ b/crates/ide/src/inlay_hints/adjustment.rs
@@ -352,7 +352,7 @@
check_with_config(
InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG },
r#"
-//- minicore: coerce_unsized, fn, eq, index, dispatch_from_dyn
+//- minicore: coerce_unsized, fn, eq, index, dispatch_from_dyn, builtin_impls
fn main() {
let _: u32 = loop {};
//^^^^^^^<never-to-any>
@@ -466,9 +466,8 @@
..DISABLED_CONFIG
},
r#"
-//- minicore: coerce_unsized, fn, eq, index, dispatch_from_dyn
+//- minicore: coerce_unsized, fn, eq, index, dispatch_from_dyn, builtin_impls
fn main() {
-
Struct.consume();
Struct.by_ref();
//^^^^^^.&
diff --git a/crates/ide/src/inlay_hints/implied_dyn_trait.rs b/crates/ide/src/inlay_hints/implied_dyn_trait.rs
index 562eb1e..4fbc88a 100644
--- a/crates/ide/src/inlay_hints/implied_dyn_trait.rs
+++ b/crates/ide/src/inlay_hints/implied_dyn_trait.rs
@@ -14,6 +14,10 @@
config: &InlayHintsConfig<'_>,
path: Either<ast::PathType, ast::DynTraitType>,
) -> Option<()> {
+ if !config.implied_dyn_trait_hints {
+ return None;
+ }
+
let parent = path.syntax().parent()?;
let range = match path {
Either::Left(path) => {
@@ -76,7 +80,14 @@
#[track_caller]
fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
- check_with_config(InlayHintsConfig { sized_bound: true, ..DISABLED_CONFIG }, ra_fixture);
+ check_with_config(
+ InlayHintsConfig {
+ sized_bound: true,
+ implied_dyn_trait_hints: true,
+ ..DISABLED_CONFIG
+ },
+ ra_fixture,
+ );
}
#[test]
@@ -125,7 +136,7 @@
#[test]
fn edit() {
check_edit(
- DISABLED_CONFIG,
+ InlayHintsConfig { implied_dyn_trait_hints: true, ..DISABLED_CONFIG },
r#"
trait T {}
fn foo(
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index 2609457..113cb83 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -642,7 +642,7 @@
/// Returns crates that this file belongs to.
pub fn transitive_rev_deps(&self, crate_id: Crate) -> Cancellable<Vec<Crate>> {
- self.with_db(|db| Vec::from_iter(db.transitive_rev_deps(crate_id)))
+ self.with_db(|db| Vec::from_iter(crate_id.transitive_rev_deps(db)))
}
/// Returns crates that this file *might* belong to.
diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs
index e261928..052de0f 100644
--- a/crates/ide/src/static_index.rs
+++ b/crates/ide/src/static_index.rs
@@ -173,6 +173,7 @@
adjustment_hints_mode: AdjustmentHintsMode::Prefix,
adjustment_hints_hide_outside_unsafe: false,
implicit_drop_hints: false,
+ implied_dyn_trait_hints: false,
hide_named_constructor_hints: false,
hide_closure_initialization_hints: false,
hide_closure_parameter_hints: false,
diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs
index 829d127..33df4a8 100644
--- a/crates/ide/src/syntax_highlighting/highlight.rs
+++ b/crates/ide/src/syntax_highlighting/highlight.rs
@@ -3,7 +3,7 @@
use std::ops::ControlFlow;
use either::Either;
-use hir::{AsAssocItem, HasVisibility, Semantics};
+use hir::{AsAssocItem, HasAttrs, HasVisibility, Semantics, sym};
use ide_db::{
FxHashMap, RootDatabase, SymbolKind,
defs::{Definition, IdentClass, NameClass, NameRefClass},
@@ -413,6 +413,10 @@
if is_from_builtin_crate {
h |= HlMod::DefaultLibrary;
}
+ let is_deprecated = resolved_krate.attrs(sema.db).by_key(sym::deprecated).exists();
+ if is_deprecated {
+ h |= HlMod::Deprecated;
+ }
h |= HlMod::CrateRoot;
h
}
@@ -483,20 +487,25 @@
is_ref: bool,
) -> Highlight {
let db = sema.db;
- let mut h = match def {
- Definition::Macro(m) => Highlight::new(HlTag::Symbol(m.kind(sema.db).into())),
- Definition::Field(_) | Definition::TupleField(_) => {
- Highlight::new(HlTag::Symbol(SymbolKind::Field))
+ let (mut h, attrs) = match def {
+ Definition::Macro(m) => {
+ (Highlight::new(HlTag::Symbol(m.kind(sema.db).into())), Some(m.attrs(sema.db)))
}
- Definition::Crate(_) => {
- Highlight::new(HlTag::Symbol(SymbolKind::Module)) | HlMod::CrateRoot
+ Definition::Field(field) => {
+ (Highlight::new(HlTag::Symbol(SymbolKind::Field)), Some(field.attrs(sema.db)))
}
+ Definition::TupleField(_) => (Highlight::new(HlTag::Symbol(SymbolKind::Field)), None),
+ Definition::Crate(krate) => (
+ Highlight::new(HlTag::Symbol(SymbolKind::Module)) | HlMod::CrateRoot,
+ Some(krate.attrs(sema.db)),
+ ),
Definition::Module(module) => {
let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Module));
if module.is_crate_root() {
h |= HlMod::CrateRoot;
}
- h
+
+ (h, Some(module.attrs(sema.db)))
}
Definition::Function(func) => {
let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Function));
@@ -544,7 +553,7 @@
h |= HlMod::Const;
}
- h
+ (h, Some(func.attrs(sema.db)))
}
Definition::Adt(adt) => {
let h = match adt {
@@ -553,9 +562,11 @@
hir::Adt::Union(_) => HlTag::Symbol(SymbolKind::Union),
};
- Highlight::new(h)
+ (Highlight::new(h), Some(adt.attrs(sema.db)))
}
- Definition::Variant(_) => Highlight::new(HlTag::Symbol(SymbolKind::Variant)),
+ Definition::Variant(variant) => {
+ (Highlight::new(HlTag::Symbol(SymbolKind::Variant)), Some(variant.attrs(sema.db)))
+ }
Definition::Const(konst) => {
let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Const)) | HlMod::Const;
if let Some(item) = konst.as_assoc_item(db) {
@@ -573,9 +584,11 @@
}
}
- h
+ (h, Some(konst.attrs(sema.db)))
}
- Definition::Trait(_) => Highlight::new(HlTag::Symbol(SymbolKind::Trait)),
+ Definition::Trait(trait_) => {
+ (Highlight::new(HlTag::Symbol(SymbolKind::Trait)), Some(trait_.attrs(sema.db)))
+ }
Definition::TypeAlias(type_) => {
let mut h = Highlight::new(HlTag::Symbol(SymbolKind::TypeAlias));
@@ -594,10 +607,12 @@
}
}
- h
+ (h, Some(type_.attrs(sema.db)))
}
- Definition::BuiltinType(_) => Highlight::new(HlTag::BuiltinType),
- Definition::BuiltinLifetime(_) => Highlight::new(HlTag::Symbol(SymbolKind::LifetimeParam)),
+ Definition::BuiltinType(_) => (Highlight::new(HlTag::BuiltinType), None),
+ Definition::BuiltinLifetime(_) => {
+ (Highlight::new(HlTag::Symbol(SymbolKind::LifetimeParam)), None)
+ }
Definition::Static(s) => {
let mut h = Highlight::new(HlTag::Symbol(SymbolKind::Static));
@@ -608,18 +623,23 @@
}
}
- h
+ (h, Some(s.attrs(sema.db)))
}
- Definition::SelfType(_) => Highlight::new(HlTag::Symbol(SymbolKind::Impl)),
- Definition::GenericParam(it) => match it {
- hir::GenericParam::TypeParam(_) => Highlight::new(HlTag::Symbol(SymbolKind::TypeParam)),
- hir::GenericParam::ConstParam(_) => {
- Highlight::new(HlTag::Symbol(SymbolKind::ConstParam)) | HlMod::Const
- }
- hir::GenericParam::LifetimeParam(_) => {
- Highlight::new(HlTag::Symbol(SymbolKind::LifetimeParam))
- }
- },
+ Definition::SelfType(_) => (Highlight::new(HlTag::Symbol(SymbolKind::Impl)), None),
+ Definition::GenericParam(it) => (
+ match it {
+ hir::GenericParam::TypeParam(_) => {
+ Highlight::new(HlTag::Symbol(SymbolKind::TypeParam))
+ }
+ hir::GenericParam::ConstParam(_) => {
+ Highlight::new(HlTag::Symbol(SymbolKind::ConstParam)) | HlMod::Const
+ }
+ hir::GenericParam::LifetimeParam(_) => {
+ Highlight::new(HlTag::Symbol(SymbolKind::LifetimeParam))
+ }
+ },
+ None,
+ ),
Definition::Local(local) => {
let tag = if local.is_self(db) {
HlTag::Symbol(SymbolKind::SelfParam)
@@ -639,7 +659,7 @@
if ty.as_callable(db).is_some() || ty.impls_fnonce(db) {
h |= HlMod::Callable;
}
- h
+ (h, None)
}
Definition::ExternCrateDecl(extern_crate) => {
let mut highlight =
@@ -647,16 +667,20 @@
if extern_crate.alias(db).is_none() {
highlight |= HlMod::Library;
}
- highlight
+ (highlight, Some(extern_crate.attrs(sema.db)))
}
- Definition::Label(_) => Highlight::new(HlTag::Symbol(SymbolKind::Label)),
- Definition::BuiltinAttr(_) => Highlight::new(HlTag::Symbol(SymbolKind::BuiltinAttr)),
- Definition::ToolModule(_) => Highlight::new(HlTag::Symbol(SymbolKind::ToolModule)),
- Definition::DeriveHelper(_) => Highlight::new(HlTag::Symbol(SymbolKind::DeriveHelper)),
+ Definition::Label(_) => (Highlight::new(HlTag::Symbol(SymbolKind::Label)), None),
+ Definition::BuiltinAttr(_) => {
+ (Highlight::new(HlTag::Symbol(SymbolKind::BuiltinAttr)), None)
+ }
+ Definition::ToolModule(_) => (Highlight::new(HlTag::Symbol(SymbolKind::ToolModule)), None),
+ Definition::DeriveHelper(_) => {
+ (Highlight::new(HlTag::Symbol(SymbolKind::DeriveHelper)), None)
+ }
Definition::InlineAsmRegOrRegClass(_) => {
- Highlight::new(HlTag::Symbol(SymbolKind::InlineAsmRegOrRegClass))
+ (Highlight::new(HlTag::Symbol(SymbolKind::InlineAsmRegOrRegClass)), None)
}
- Definition::InlineAsmOperand(_) => Highlight::new(HlTag::Symbol(SymbolKind::Local)),
+ Definition::InlineAsmOperand(_) => (Highlight::new(HlTag::Symbol(SymbolKind::Local)), None),
};
let def_crate = def.krate(db);
@@ -676,6 +700,12 @@
h |= HlMod::DefaultLibrary;
}
+ if let Some(attrs) = attrs
+ && attrs.by_key(sym::deprecated).exists()
+ {
+ h |= HlMod::Deprecated;
+ }
+
h
}
@@ -721,6 +751,7 @@
let is_from_other_crate = krate.as_ref().map_or(false, |krate| def_crate != *krate);
let is_from_builtin_crate = def_crate.is_builtin(sema.db);
let is_public = func.visibility(sema.db) == hir::Visibility::Public;
+ let is_deprecated = func.attrs(sema.db).by_key(sym::deprecated).exists();
if is_from_other_crate {
h |= HlMod::Library;
@@ -732,6 +763,10 @@
h |= HlMod::DefaultLibrary;
}
+ if is_deprecated {
+ h |= HlMod::Deprecated;
+ }
+
if let Some(self_param) = func.self_param(sema.db) {
match self_param.access(sema.db) {
hir::Access::Shared => h |= HlMod::Reference,
diff --git a/crates/ide/src/syntax_highlighting/html.rs b/crates/ide/src/syntax_highlighting/html.rs
index 75e46b8..ff617b3 100644
--- a/crates/ide/src/syntax_highlighting/html.rs
+++ b/crates/ide/src/syntax_highlighting/html.rs
@@ -121,6 +121,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/tags.rs b/crates/ide/src/syntax_highlighting/tags.rs
index 456a612..ca3c3e3 100644
--- a/crates/ide/src/syntax_highlighting/tags.rs
+++ b/crates/ide/src/syntax_highlighting/tags.rs
@@ -67,6 +67,8 @@
/// `foo` in `fn foo(x: i32)` is a definition, `foo` in `foo(90 + 2)` is
/// not.
Definition,
+ /// Used for things with the `#[deprecated]` attribute.
+ Deprecated,
/// Doc-strings like this one.
Documentation,
/// Highlighting injection like rust code in doc strings or ra_fixture.
@@ -224,6 +226,7 @@
HlMod::CrateRoot,
HlMod::DefaultLibrary,
HlMod::Definition,
+ HlMod::Deprecated,
HlMod::Documentation,
HlMod::Injected,
HlMod::IntraDocLink,
@@ -250,6 +253,7 @@
HlMod::CrateRoot => "crate_root",
HlMod::DefaultLibrary => "default_library",
HlMod::Definition => "declaration",
+ HlMod::Deprecated => "deprecated",
HlMod::Documentation => "documentation",
HlMod::Injected => "injected",
HlMod::IntraDocLink => "intra_doc_link",
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_asm.html b/crates/ide/src/syntax_highlighting/test_data/highlight_asm.html
index c8ffa9e..100fdd2 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_asm.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_asm.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html b/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html
index faace6e..b619138 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_assoc_functions.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html b/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
index d59f4ca..b151ff4 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_attributes.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_block_mod_items.html b/crates/ide/src/syntax_highlighting/test_data/highlight_block_mod_items.html
index 711f534..e3daeef 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_block_mod_items.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_block_mod_items.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_comments_disabled.html b/crates/ide/src/syntax_highlighting/test_data/highlight_comments_disabled.html
index 4607448..b532630 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_comments_disabled.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_comments_disabled.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_const.html b/crates/ide/src/syntax_highlighting/test_data/highlight_const.html
index 9c7324e..5d89147 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_const.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_const.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html b/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
index 4613c65..a6e6b16 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_crate_root.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html b/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
index b1b2c65..2f4a200 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_default_library.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_deprecated.html b/crates/ide/src/syntax_highlighting/test_data/highlight_deprecated.html
new file mode 100644
index 0000000..41d3dff
--- /dev/null
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_deprecated.html
@@ -0,0 +1,73 @@
+
+<style>
+body { margin: 0; }
+pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
+
+.lifetime { color: #DFAF8F; font-style: italic; }
+.label { color: #DFAF8F; font-style: italic; }
+.comment { color: #7F9F7F; }
+.documentation { color: #629755; }
+.intra_doc_link { font-style: italic; }
+.injected { opacity: 0.65 ; }
+.struct, .enum { color: #7CB8BB; }
+.enum_variant { color: #BDE0F3; }
+.string_literal { color: #CC9393; }
+.field { color: #94BFF3; }
+.function { color: #93E0E3; }
+.parameter { color: #94BFF3; }
+.text { color: #DCDCCC; }
+.type { color: #7CB8BB; }
+.builtin_type { color: #8CD0D3; }
+.type_param { color: #DFAF8F; }
+.attribute { color: #94BFF3; }
+.numeric_literal { color: #BFEBBF; }
+.bool_literal { color: #BFE6EB; }
+.macro { color: #94BFF3; }
+.proc_macro { color: #94BFF3; text-decoration: underline; }
+.derive { color: #94BFF3; font-style: italic; }
+.module { color: #AFD8AF; }
+.value_param { color: #DCDCCC; }
+.variable { color: #DCDCCC; }
+.format_specifier { color: #CC696B; }
+.mutable { text-decoration: underline; }
+.escape_sequence { color: #94BFF3; }
+.keyword { color: #F0DFAF; font-weight: bold; }
+.control { font-style: italic; }
+.reference { font-style: italic; font-weight: bold; }
+.const { font-weight: bolder; }
+.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
+
+.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
+.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
+</style>
+<pre><code><span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">!</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">deprecated</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">use</span> <span class="keyword crate_root deprecated public">crate</span> <span class="keyword">as</span> <span class="punctuation">_</span><span class="semicolon">;</span>
+<span class="keyword">extern</span> <span class="keyword">crate</span> <span class="module crate_root deprecated library">bar</span><span class="semicolon">;</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">deprecated</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration deprecated public">macro_</span> <span class="brace">{</span>
+ <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span><span class="operator">></span> <span class="brace">{</span><span class="brace">}</span><span class="semicolon">;</span>
+<span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">deprecated</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">mod</span> <span class="module declaration deprecated">mod_</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">deprecated</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">fn</span> <span class="function declaration deprecated">func</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">deprecated</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">struct</span> <span class="struct declaration deprecated">Struct</span> <span class="brace">{</span>
+ <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">deprecated</span><span class="attribute_bracket attribute">]</span>
+ <span class="field declaration deprecated">field</span><span class="colon">:</span> <span class="builtin_type">u32</span>
+<span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">deprecated</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">enum</span> <span class="enum declaration deprecated">Enum</span> <span class="brace">{</span>
+ <span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">deprecated</span><span class="attribute_bracket attribute">]</span>
+ <span class="enum_variant declaration deprecated">Variant</span>
+<span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">deprecated</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword const">const</span> <span class="constant const declaration deprecated">CONST</span><span class="colon">:</span> <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">deprecated</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">trait</span> <span class="trait declaration deprecated">Trait</span> <span class="brace">{</span><span class="brace">}</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">deprecated</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">type</span> <span class="type_alias declaration deprecated">Alias</span> <span class="operator">=</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute">deprecated</span><span class="attribute_bracket attribute">]</span>
+<span class="keyword">static</span> <span class="static declaration deprecated">STATIC</span><span class="colon">:</span> <span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">=</span> <span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+</code></pre>
\ No newline at end of file
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
index d00f279..e1c45e9 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html b/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
index 5399f83..3a45182 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_extern_crate.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_general.html b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
index d058191..fd652f4 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_general.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html b/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
index 579c6ce..22f3ba9 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_injection.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_injection_2.html b/crates/ide/src/syntax_highlighting/test_data/highlight_injection_2.html
index fc2d9a3..5a5d9bd 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_injection_2.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_injection_2.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html b/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html
index 5ef6446..b28818e 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_issue_18089.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_issue_19357.html b/crates/ide/src/syntax_highlighting/test_data/highlight_issue_19357.html
index 36ed8c5..af27294 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_issue_19357.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_issue_19357.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2015.html b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2015.html
index 0407e68..d2a53b2 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2015.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2015.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2018.html b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2018.html
index f39d033..d309b47 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2018.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2018.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2021.html b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2021.html
index f39d033..d309b47 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2021.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2021.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2024.html b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2024.html
index 721185a..575c9a6 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2024.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_2024.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_macros.html b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_macros.html
index b2c8205..caf66ac 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_macros.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_keywords_macros.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html b/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
index 618ea21..b90c962 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_lifetimes.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
index c314594..b63d5ce 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html b/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html
index 9996a87..8d8c713 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_inline.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html b/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html
index dc9e1de..538f653 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_module_docs_outline.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html b/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html
index cceb159..20b5065 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_operators.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html b/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
index e1a8d87..d5401e7 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
index 47ee2ad..1b05129 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html b/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
index 8339daf..93513f5 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_unsafe.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/test_data/regression_20952.html b/crates/ide/src/syntax_highlighting/test_data/regression_20952.html
index 2c0250c..fad1b41 100644
--- a/crates/ide/src/syntax_highlighting/test_data/regression_20952.html
+++ b/crates/ide/src/syntax_highlighting/test_data/regression_20952.html
@@ -36,6 +36,7 @@
.reference { font-style: italic; font-weight: bold; }
.const { font-weight: bolder; }
.unsafe { color: #BC8383; }
+.deprecated { text-decoration: line-through; }
.invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; }
.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs
index 58c613e..b7510e3 100644
--- a/crates/ide/src/syntax_highlighting/tests.rs
+++ b/crates/ide/src/syntax_highlighting/tests.rs
@@ -1511,3 +1511,45 @@
false,
);
}
+
+#[test]
+fn test_deprecated_highlighting() {
+ check_highlighting(
+ r#"
+//- /foo.rs crate:foo deps:bar
+#![deprecated]
+use crate as _;
+extern crate bar;
+#[deprecated]
+macro_rules! macro_ {
+ () => {};
+}
+#[deprecated]
+mod mod_ {}
+#[deprecated]
+fn func() {}
+#[deprecated]
+struct Struct {
+ #[deprecated]
+ field: u32
+}
+#[deprecated]
+enum Enum {
+ #[deprecated]
+ Variant
+}
+#[deprecated]
+const CONST: () = ();
+#[deprecated]
+trait Trait {}
+#[deprecated]
+type Alias = ();
+#[deprecated]
+static STATIC: () = ();
+//- /bar.rs crate:bar
+#![deprecated]
+ "#,
+ expect_file!["./test_data/highlight_deprecated.html"],
+ false,
+ );
+}
diff --git a/crates/intern/src/symbol/symbols.rs b/crates/intern/src/symbol/symbols.rs
index 5a9d451..0624467 100644
--- a/crates/intern/src/symbol/symbols.rs
+++ b/crates/intern/src/symbol/symbols.rs
@@ -520,4 +520,7 @@
never_type_fallback,
specialization,
min_specialization,
+ arbitrary_self_types,
+ arbitrary_self_types_pointers,
+ supertrait_item_shadowing,
}
diff --git a/crates/parser/Cargo.toml b/crates/parser/Cargo.toml
index 8384d5b..2bdf8d7 100644
--- a/crates/parser/Cargo.toml
+++ b/crates/parser/Cargo.toml
@@ -16,7 +16,7 @@
drop_bomb = "0.1.5"
ra-ap-rustc_lexer.workspace = true
rustc-literal-escaper.workspace = true
-tracing = { workspace = true, optional = true }
+tracing.workspace = true
edition.workspace = true
winnow = { version = "0.7.13", default-features = false }
@@ -27,7 +27,7 @@
stdx.workspace = true
[features]
-default = ["tracing"]
+default = []
in-rust-tree = []
[lints]
diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs
index cde62e0..ab18309 100644
--- a/crates/parser/src/grammar/expressions/atom.rs
+++ b/crates/parser/src/grammar/expressions/atom.rs
@@ -283,7 +283,7 @@
if p.eat(T![,]) {
while !p.at(EOF) && !p.at(T![')']) {
let m = p.start();
- if p.at(IDENT) && p.nth_at(1, T![=]) {
+ if p.at(IDENT) && p.nth_at(1, T![=]) && !p.nth_at(2, T![=]) {
name(p);
p.bump(T![=]);
}
diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs
index 93e02a9..6a38044 100644
--- a/crates/parser/src/syntax_kind/generated.rs
+++ b/crates/parser/src/syntax_kind/generated.rs
@@ -1006,7 +1006,149 @@
}
}
#[macro_export]
-macro_rules ! T_ { [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [att_syntax] => { $ crate :: SyntaxKind :: ATT_SYNTAX_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [clobber_abi] => { $ crate :: SyntaxKind :: CLOBBER_ABI_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [global_asm] => { $ crate :: SyntaxKind :: GLOBAL_ASM_KW } ; [inlateout] => { $ crate :: SyntaxKind :: INLATEOUT_KW } ; [inout] => { $ crate :: SyntaxKind :: INOUT_KW } ; [label] => { $ crate :: SyntaxKind :: LABEL_KW } ; [lateout] => { $ crate :: SyntaxKind :: LATEOUT_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [may_unwind] => { $ crate :: SyntaxKind :: MAY_UNWIND_KW } ; [naked_asm] => { $ crate :: SyntaxKind :: NAKED_ASM_KW } ; [nomem] => { $ crate :: SyntaxKind :: NOMEM_KW } ; [noreturn] => { $ crate :: SyntaxKind :: NORETURN_KW } ; [nostack] => { $ crate :: SyntaxKind :: NOSTACK_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [options] => { $ crate :: SyntaxKind :: OPTIONS_KW } ; [out] => { $ crate :: SyntaxKind :: OUT_KW } ; [preserves_flags] => { $ crate :: SyntaxKind :: PRESERVES_FLAGS_KW } ; [pure] => { $ crate :: SyntaxKind :: PURE_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [readonly] => { $ crate :: SyntaxKind :: READONLY_KW } ; [safe] => { $ crate :: SyntaxKind :: SAFE_KW } ; [sym] => { $ crate :: SyntaxKind :: SYM_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [gen] => { $ crate :: SyntaxKind :: GEN_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [int_number] => { $ crate :: SyntaxKind :: INT_NUMBER } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [string] => { $ crate :: SyntaxKind :: STRING } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; [frontmatter] => { $ crate :: SyntaxKind :: FRONTMATTER } ; }
+macro_rules ! T_ {
+ [$] => { $ crate :: SyntaxKind :: DOLLAR };
+ [;] => { $ crate :: SyntaxKind :: SEMICOLON };
+ [,] => { $ crate :: SyntaxKind :: COMMA };
+ ['('] => { $ crate :: SyntaxKind :: L_PAREN };
+ [')'] => { $ crate :: SyntaxKind :: R_PAREN };
+ ['{'] => { $ crate :: SyntaxKind :: L_CURLY };
+ ['}'] => { $ crate :: SyntaxKind :: R_CURLY };
+ ['['] => { $ crate :: SyntaxKind :: L_BRACK };
+ [']'] => { $ crate :: SyntaxKind :: R_BRACK };
+ [<] => { $ crate :: SyntaxKind :: L_ANGLE };
+ [>] => { $ crate :: SyntaxKind :: R_ANGLE };
+ [@] => { $ crate :: SyntaxKind :: AT };
+ [#] => { $ crate :: SyntaxKind :: POUND };
+ [~] => { $ crate :: SyntaxKind :: TILDE };
+ [?] => { $ crate :: SyntaxKind :: QUESTION };
+ [&] => { $ crate :: SyntaxKind :: AMP };
+ [|] => { $ crate :: SyntaxKind :: PIPE };
+ [+] => { $ crate :: SyntaxKind :: PLUS };
+ [*] => { $ crate :: SyntaxKind :: STAR };
+ [/] => { $ crate :: SyntaxKind :: SLASH };
+ [^] => { $ crate :: SyntaxKind :: CARET };
+ [%] => { $ crate :: SyntaxKind :: PERCENT };
+ [_] => { $ crate :: SyntaxKind :: UNDERSCORE };
+ [.] => { $ crate :: SyntaxKind :: DOT };
+ [..] => { $ crate :: SyntaxKind :: DOT2 };
+ [...] => { $ crate :: SyntaxKind :: DOT3 };
+ [..=] => { $ crate :: SyntaxKind :: DOT2EQ };
+ [:] => { $ crate :: SyntaxKind :: COLON };
+ [::] => { $ crate :: SyntaxKind :: COLON2 };
+ [=] => { $ crate :: SyntaxKind :: EQ };
+ [==] => { $ crate :: SyntaxKind :: EQ2 };
+ [=>] => { $ crate :: SyntaxKind :: FAT_ARROW };
+ [!] => { $ crate :: SyntaxKind :: BANG };
+ [!=] => { $ crate :: SyntaxKind :: NEQ };
+ [-] => { $ crate :: SyntaxKind :: MINUS };
+ [->] => { $ crate :: SyntaxKind :: THIN_ARROW };
+ [<=] => { $ crate :: SyntaxKind :: LTEQ };
+ [>=] => { $ crate :: SyntaxKind :: GTEQ };
+ [+=] => { $ crate :: SyntaxKind :: PLUSEQ };
+ [-=] => { $ crate :: SyntaxKind :: MINUSEQ };
+ [|=] => { $ crate :: SyntaxKind :: PIPEEQ };
+ [&=] => { $ crate :: SyntaxKind :: AMPEQ };
+ [^=] => { $ crate :: SyntaxKind :: CARETEQ };
+ [/=] => { $ crate :: SyntaxKind :: SLASHEQ };
+ [*=] => { $ crate :: SyntaxKind :: STAREQ };
+ [%=] => { $ crate :: SyntaxKind :: PERCENTEQ };
+ [&&] => { $ crate :: SyntaxKind :: AMP2 };
+ [||] => { $ crate :: SyntaxKind :: PIPE2 };
+ [<<] => { $ crate :: SyntaxKind :: SHL };
+ [>>] => { $ crate :: SyntaxKind :: SHR };
+ [<<=] => { $ crate :: SyntaxKind :: SHLEQ };
+ [>>=] => { $ crate :: SyntaxKind :: SHREQ };
+ [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW };
+ [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW };
+ [as] => { $ crate :: SyntaxKind :: AS_KW };
+ [become] => { $ crate :: SyntaxKind :: BECOME_KW };
+ [box] => { $ crate :: SyntaxKind :: BOX_KW };
+ [break] => { $ crate :: SyntaxKind :: BREAK_KW };
+ [const] => { $ crate :: SyntaxKind :: CONST_KW };
+ [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW };
+ [crate] => { $ crate :: SyntaxKind :: CRATE_KW };
+ [do] => { $ crate :: SyntaxKind :: DO_KW };
+ [else] => { $ crate :: SyntaxKind :: ELSE_KW };
+ [enum] => { $ crate :: SyntaxKind :: ENUM_KW };
+ [extern] => { $ crate :: SyntaxKind :: EXTERN_KW };
+ [false] => { $ crate :: SyntaxKind :: FALSE_KW };
+ [final] => { $ crate :: SyntaxKind :: FINAL_KW };
+ [fn] => { $ crate :: SyntaxKind :: FN_KW };
+ [for] => { $ crate :: SyntaxKind :: FOR_KW };
+ [if] => { $ crate :: SyntaxKind :: IF_KW };
+ [impl] => { $ crate :: SyntaxKind :: IMPL_KW };
+ [in] => { $ crate :: SyntaxKind :: IN_KW };
+ [let] => { $ crate :: SyntaxKind :: LET_KW };
+ [loop] => { $ crate :: SyntaxKind :: LOOP_KW };
+ [macro] => { $ crate :: SyntaxKind :: MACRO_KW };
+ [match] => { $ crate :: SyntaxKind :: MATCH_KW };
+ [mod] => { $ crate :: SyntaxKind :: MOD_KW };
+ [move] => { $ crate :: SyntaxKind :: MOVE_KW };
+ [mut] => { $ crate :: SyntaxKind :: MUT_KW };
+ [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW };
+ [priv] => { $ crate :: SyntaxKind :: PRIV_KW };
+ [pub] => { $ crate :: SyntaxKind :: PUB_KW };
+ [ref] => { $ crate :: SyntaxKind :: REF_KW };
+ [return] => { $ crate :: SyntaxKind :: RETURN_KW };
+ [self] => { $ crate :: SyntaxKind :: SELF_KW };
+ [static] => { $ crate :: SyntaxKind :: STATIC_KW };
+ [struct] => { $ crate :: SyntaxKind :: STRUCT_KW };
+ [super] => { $ crate :: SyntaxKind :: SUPER_KW };
+ [trait] => { $ crate :: SyntaxKind :: TRAIT_KW };
+ [true] => { $ crate :: SyntaxKind :: TRUE_KW };
+ [type] => { $ crate :: SyntaxKind :: TYPE_KW };
+ [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW };
+ [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW };
+ [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW };
+ [use] => { $ crate :: SyntaxKind :: USE_KW };
+ [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW };
+ [where] => { $ crate :: SyntaxKind :: WHERE_KW };
+ [while] => { $ crate :: SyntaxKind :: WHILE_KW };
+ [yield] => { $ crate :: SyntaxKind :: YIELD_KW };
+ [asm] => { $ crate :: SyntaxKind :: ASM_KW };
+ [att_syntax] => { $ crate :: SyntaxKind :: ATT_SYNTAX_KW };
+ [auto] => { $ crate :: SyntaxKind :: AUTO_KW };
+ [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW };
+ [clobber_abi] => { $ crate :: SyntaxKind :: CLOBBER_ABI_KW };
+ [default] => { $ crate :: SyntaxKind :: DEFAULT_KW };
+ [dyn] => { $ crate :: SyntaxKind :: DYN_KW };
+ [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW };
+ [global_asm] => { $ crate :: SyntaxKind :: GLOBAL_ASM_KW };
+ [inlateout] => { $ crate :: SyntaxKind :: INLATEOUT_KW };
+ [inout] => { $ crate :: SyntaxKind :: INOUT_KW };
+ [label] => { $ crate :: SyntaxKind :: LABEL_KW };
+ [lateout] => { $ crate :: SyntaxKind :: LATEOUT_KW };
+ [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW };
+ [may_unwind] => { $ crate :: SyntaxKind :: MAY_UNWIND_KW };
+ [naked_asm] => { $ crate :: SyntaxKind :: NAKED_ASM_KW };
+ [nomem] => { $ crate :: SyntaxKind :: NOMEM_KW };
+ [noreturn] => { $ crate :: SyntaxKind :: NORETURN_KW };
+ [nostack] => { $ crate :: SyntaxKind :: NOSTACK_KW };
+ [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW };
+ [options] => { $ crate :: SyntaxKind :: OPTIONS_KW };
+ [out] => { $ crate :: SyntaxKind :: OUT_KW };
+ [preserves_flags] => { $ crate :: SyntaxKind :: PRESERVES_FLAGS_KW };
+ [pure] => { $ crate :: SyntaxKind :: PURE_KW };
+ [raw] => { $ crate :: SyntaxKind :: RAW_KW };
+ [readonly] => { $ crate :: SyntaxKind :: READONLY_KW };
+ [safe] => { $ crate :: SyntaxKind :: SAFE_KW };
+ [sym] => { $ crate :: SyntaxKind :: SYM_KW };
+ [union] => { $ crate :: SyntaxKind :: UNION_KW };
+ [yeet] => { $ crate :: SyntaxKind :: YEET_KW };
+ [async] => { $ crate :: SyntaxKind :: ASYNC_KW };
+ [await] => { $ crate :: SyntaxKind :: AWAIT_KW };
+ [dyn] => { $ crate :: SyntaxKind :: DYN_KW };
+ [gen] => { $ crate :: SyntaxKind :: GEN_KW };
+ [try] => { $ crate :: SyntaxKind :: TRY_KW };
+ [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT };
+ [int_number] => { $ crate :: SyntaxKind :: INT_NUMBER };
+ [ident] => { $ crate :: SyntaxKind :: IDENT };
+ [string] => { $ crate :: SyntaxKind :: STRING };
+ [shebang] => { $ crate :: SyntaxKind :: SHEBANG };
+ [frontmatter] => { $ crate :: SyntaxKind :: FRONTMATTER };
+}
+
impl ::core::marker::Copy for SyntaxKind {}
impl ::core::clone::Clone for SyntaxKind {
#[inline]
diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml
index dac8e09..63745b9 100644
--- a/crates/proc-macro-api/Cargo.toml
+++ b/crates/proc-macro-api/Cargo.toml
@@ -24,10 +24,14 @@
paths = { workspace = true, features = ["serde1"] }
tt.workspace = true
stdx.workspace = true
+proc-macro-srv = {workspace = true, optional = true}
# span = {workspace = true, default-features = false} does not work
span = { path = "../span", version = "0.0.0", default-features = false}
intern.workspace = true
+[features]
+sysroot-abi = ["proc-macro-srv", "proc-macro-srv/sysroot-abi"]
+
[lints]
workspace = true
diff --git a/crates/proc-macro-api/src/legacy_protocol.rs b/crates/proc-macro-api/src/legacy_protocol.rs
index ee96b89..0a72052 100644
--- a/crates/proc-macro-api/src/legacy_protocol.rs
+++ b/crates/proc-macro-api/src/legacy_protocol.rs
@@ -95,9 +95,10 @@
let mixed_site = span_data_table.insert_full(mixed_site).0;
let task = ExpandMacro {
data: ExpandMacroData {
- macro_body: FlatTree::new(subtree, version, &mut span_data_table),
+ macro_body: FlatTree::from_subtree(subtree, version, &mut span_data_table),
macro_name: proc_macro.name.to_string(),
- attributes: attr.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)),
+ attributes: attr
+ .map(|subtree| FlatTree::from_subtree(subtree, version, &mut span_data_table)),
has_global_spans: ExpnGlobals {
serialize: version >= version::HAS_GLOBAL_SPANS,
def_site,
diff --git a/crates/proc-macro-api/src/legacy_protocol/msg.rs b/crates/proc-macro-api/src/legacy_protocol/msg.rs
index b795c45..487f50b 100644
--- a/crates/proc-macro-api/src/legacy_protocol/msg.rs
+++ b/crates/proc-macro-api/src/legacy_protocol/msg.rs
@@ -297,7 +297,7 @@
let mut span_data_table = Default::default();
let task = ExpandMacro {
data: ExpandMacroData {
- macro_body: FlatTree::new(tt.view(), v, &mut span_data_table),
+ macro_body: FlatTree::from_subtree(tt.view(), v, &mut span_data_table),
macro_name: Default::default(),
attributes: None,
has_global_spans: ExpnGlobals {
diff --git a/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs b/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs
index fb3542d..7f19506 100644
--- a/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs
+++ b/crates/proc-macro-api/src/legacy_protocol/msg/flat.rs
@@ -8,8 +8,7 @@
//! about performance here a bit.
//!
//! So what this module does is dumping a `tt::TopSubtree` into a bunch of flat
-//! array of numbers. See the test in the parent module to get an example
-//! output.
+//! array of numbers.
//!
//! ```json
//! {
@@ -35,6 +34,9 @@
//! as we don't have bincode in Cargo.toml yet, lets stick with serde_json for
//! the time being.
+#[cfg(feature = "sysroot-abi")]
+use proc_macro_srv::TokenStream;
+
use std::collections::VecDeque;
use intern::Symbol;
@@ -120,12 +122,12 @@
}
impl FlatTree {
- pub fn new(
+ pub fn from_subtree(
subtree: tt::SubtreeView<'_, Span>,
version: u32,
span_data_table: &mut SpanDataIndexMap,
) -> FlatTree {
- let mut w = Writer::<Span> {
+ let mut w = Writer::<Span, _> {
string_table: FxHashMap::default(),
work: VecDeque::new(),
span_data_table,
@@ -138,48 +140,7 @@
text: Vec::new(),
version,
};
- w.write(subtree);
-
- FlatTree {
- subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
- write_vec(w.subtree, SubtreeRepr::write_with_close_span)
- } else {
- write_vec(w.subtree, SubtreeRepr::write)
- },
- literal: if version >= EXTENDED_LEAF_DATA {
- write_vec(w.literal, LiteralRepr::write_with_kind)
- } else {
- write_vec(w.literal, LiteralRepr::write)
- },
- punct: write_vec(w.punct, PunctRepr::write),
- ident: if version >= EXTENDED_LEAF_DATA {
- write_vec(w.ident, IdentRepr::write_with_rawness)
- } else {
- write_vec(w.ident, IdentRepr::write)
- },
- token_tree: w.token_tree,
- text: w.text,
- }
- }
-
- pub fn new_raw<T: SpanTransformer<Table = ()>>(
- subtree: tt::SubtreeView<'_, T::Span>,
- version: u32,
- ) -> FlatTree {
- let mut w = Writer::<T> {
- string_table: FxHashMap::default(),
- work: VecDeque::new(),
- span_data_table: &mut (),
-
- subtree: Vec::new(),
- literal: Vec::new(),
- punct: Vec::new(),
- ident: Vec::new(),
- token_tree: Vec::new(),
- text: Vec::new(),
- version,
- };
- w.write(subtree);
+ w.write_subtree(subtree);
FlatTree {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
@@ -230,13 +191,119 @@
span_data_table,
version,
}
- .read()
+ .read_subtree()
+ }
+}
+
+#[cfg(feature = "sysroot-abi")]
+impl FlatTree {
+ pub fn from_tokenstream(
+ tokenstream: proc_macro_srv::TokenStream<Span>,
+ version: u32,
+ call_site: Span,
+ span_data_table: &mut SpanDataIndexMap,
+ ) -> FlatTree {
+ let mut w = Writer::<Span, _> {
+ string_table: FxHashMap::default(),
+ work: VecDeque::new(),
+ span_data_table,
+
+ subtree: Vec::new(),
+ literal: Vec::new(),
+ punct: Vec::new(),
+ ident: Vec::new(),
+ token_tree: Vec::new(),
+ text: Vec::new(),
+ version,
+ };
+ let group = proc_macro_srv::Group {
+ delimiter: proc_macro_srv::Delimiter::None,
+ stream: Some(tokenstream),
+ span: proc_macro_srv::DelimSpan {
+ open: call_site,
+ close: call_site,
+ entire: call_site,
+ },
+ };
+ w.write_tokenstream(&group);
+
+ FlatTree {
+ subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
+ write_vec(w.subtree, SubtreeRepr::write_with_close_span)
+ } else {
+ write_vec(w.subtree, SubtreeRepr::write)
+ },
+ literal: if version >= EXTENDED_LEAF_DATA {
+ write_vec(w.literal, LiteralRepr::write_with_kind)
+ } else {
+ write_vec(w.literal, LiteralRepr::write)
+ },
+ punct: write_vec(w.punct, PunctRepr::write),
+ ident: if version >= EXTENDED_LEAF_DATA {
+ write_vec(w.ident, IdentRepr::write_with_rawness)
+ } else {
+ write_vec(w.ident, IdentRepr::write)
+ },
+ token_tree: w.token_tree,
+ text: w.text,
+ }
}
- pub fn to_subtree_unresolved<T: SpanTransformer<Table = ()>>(
+ pub fn from_tokenstream_raw<T: SpanTransformer<Table = ()>>(
+ tokenstream: proc_macro_srv::TokenStream<T::Span>,
+ call_site: T::Span,
+ version: u32,
+ ) -> FlatTree {
+ let mut w = Writer::<T, _> {
+ string_table: FxHashMap::default(),
+ work: VecDeque::new(),
+ span_data_table: &mut (),
+
+ subtree: Vec::new(),
+ literal: Vec::new(),
+ punct: Vec::new(),
+ ident: Vec::new(),
+ token_tree: Vec::new(),
+ text: Vec::new(),
+ version,
+ };
+ let group = proc_macro_srv::Group {
+ delimiter: proc_macro_srv::Delimiter::None,
+ stream: Some(tokenstream),
+ span: proc_macro_srv::DelimSpan {
+ open: call_site,
+ close: call_site,
+ entire: call_site,
+ },
+ };
+ w.write_tokenstream(&group);
+
+ FlatTree {
+ subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
+ write_vec(w.subtree, SubtreeRepr::write_with_close_span)
+ } else {
+ write_vec(w.subtree, SubtreeRepr::write)
+ },
+ literal: if version >= EXTENDED_LEAF_DATA {
+ write_vec(w.literal, LiteralRepr::write_with_kind)
+ } else {
+ write_vec(w.literal, LiteralRepr::write)
+ },
+ punct: write_vec(w.punct, PunctRepr::write),
+ ident: if version >= EXTENDED_LEAF_DATA {
+ write_vec(w.ident, IdentRepr::write_with_rawness)
+ } else {
+ write_vec(w.ident, IdentRepr::write)
+ },
+ token_tree: w.token_tree,
+ text: w.text,
+ }
+ }
+
+ pub fn to_tokenstream_unresolved<T: SpanTransformer<Table = ()>>(
self,
version: u32,
- ) -> tt::TopSubtree<T::Span> {
+ ) -> proc_macro_srv::TokenStream<T::Span> {
Reader::<T> {
subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
read_vec(self.subtree, SubtreeRepr::read_with_close_span)
@@ -259,7 +326,37 @@
span_data_table: &(),
version,
}
- .read()
+ .read_tokenstream()
+ }
+
+ pub fn to_tokenstream_resolved(
+ self,
+ version: u32,
+ span_data_table: &SpanDataIndexMap,
+ ) -> proc_macro_srv::TokenStream<Span> {
+ Reader::<Span> {
+ subtree: if version >= ENCODE_CLOSE_SPAN_VERSION {
+ read_vec(self.subtree, SubtreeRepr::read_with_close_span)
+ } else {
+ read_vec(self.subtree, SubtreeRepr::read)
+ },
+ literal: if version >= EXTENDED_LEAF_DATA {
+ read_vec(self.literal, LiteralRepr::read_with_kind)
+ } else {
+ read_vec(self.literal, LiteralRepr::read)
+ },
+ punct: read_vec(self.punct, PunctRepr::read),
+ ident: if version >= EXTENDED_LEAF_DATA {
+ read_vec(self.ident, IdentRepr::read_with_rawness)
+ } else {
+ read_vec(self.ident, IdentRepr::read)
+ },
+ token_tree: self.token_tree,
+ text: self.text,
+ span_data_table,
+ version,
+ }
+ .read_tokenstream()
}
}
@@ -391,8 +488,8 @@
}
}
-struct Writer<'a, 'span, S: SpanTransformer> {
- work: VecDeque<(usize, tt::iter::TtIter<'a, S::Span>)>,
+struct Writer<'a, 'span, S: SpanTransformer, W> {
+ work: VecDeque<(usize, W)>,
string_table: FxHashMap<std::borrow::Cow<'a, str>, u32>,
span_data_table: &'span mut S::Table,
version: u32,
@@ -405,8 +502,8 @@
text: Vec<String>,
}
-impl<'a, T: SpanTransformer> Writer<'a, '_, T> {
- fn write(&mut self, root: tt::SubtreeView<'a, T::Span>) {
+impl<'a, T: SpanTransformer> Writer<'a, '_, T, tt::iter::TtIter<'a, T::Span>> {
+ fn write_subtree(&mut self, root: tt::SubtreeView<'a, T::Span>) {
let subtree = root.top_subtree();
self.enqueue(subtree, root.iter());
while let Some((idx, subtree)) = self.work.pop_front() {
@@ -414,10 +511,6 @@
}
}
- fn token_id_of(&mut self, span: T::Span) -> SpanId {
- T::token_id_of(self.span_data_table, span)
- }
-
fn subtree(&mut self, idx: usize, subtree: tt::iter::TtIter<'a, T::Span>) {
let mut first_tt = self.token_tree.len();
let n_tt = subtree.clone().count(); // FIXME: `count()` walks over the entire iterator.
@@ -502,6 +595,12 @@
self.work.push_back((idx, contents));
idx as u32
}
+}
+
+impl<'a, T: SpanTransformer, U> Writer<'a, '_, T, U> {
+ fn token_id_of(&mut self, span: T::Span) -> SpanId {
+ T::token_id_of(self.span_data_table, span)
+ }
pub(crate) fn intern(&mut self, text: &'a str) -> u32 {
let table = &mut self.text;
@@ -522,6 +621,105 @@
}
}
+#[cfg(feature = "sysroot-abi")]
+impl<'a, T: SpanTransformer> Writer<'a, '_, T, &'a proc_macro_srv::Group<T::Span>> {
+ fn write_tokenstream(&mut self, root: &'a proc_macro_srv::Group<T::Span>) {
+ self.enqueue_group(root);
+
+ while let Some((idx, group)) = self.work.pop_front() {
+ self.group(idx, group);
+ }
+ }
+
+ fn group(&mut self, idx: usize, group: &'a proc_macro_srv::Group<T::Span>) {
+ let mut first_tt = self.token_tree.len();
+ let n_tt = group.stream.as_ref().map_or(0, |it| it.len());
+ self.token_tree.resize(first_tt + n_tt, !0);
+
+ self.subtree[idx].tt = [first_tt as u32, (first_tt + n_tt) as u32];
+
+ for tt in group.stream.iter().flat_map(|it| it.iter()) {
+ let idx_tag = match tt {
+ proc_macro_srv::TokenTree::Group(group) => {
+ let idx = self.enqueue_group(group);
+ idx << 2
+ }
+ proc_macro_srv::TokenTree::Literal(lit) => {
+ let idx = self.literal.len() as u32;
+ let id = self.token_id_of(lit.span);
+ let (text, suffix) = if self.version >= EXTENDED_LEAF_DATA {
+ (
+ self.intern(lit.symbol.as_str()),
+ lit.suffix.as_ref().map(|s| self.intern(s.as_str())).unwrap_or(!0),
+ )
+ } else {
+ (self.intern_owned(proc_macro_srv::literal_to_string(lit)), !0)
+ };
+ self.literal.push(LiteralRepr {
+ id,
+ text,
+ kind: u16::from_le_bytes(match lit.kind {
+ proc_macro_srv::LitKind::ErrWithGuar => [0, 0],
+ proc_macro_srv::LitKind::Byte => [1, 0],
+ proc_macro_srv::LitKind::Char => [2, 0],
+ proc_macro_srv::LitKind::Integer => [3, 0],
+ proc_macro_srv::LitKind::Float => [4, 0],
+ proc_macro_srv::LitKind::Str => [5, 0],
+ proc_macro_srv::LitKind::StrRaw(r) => [6, r],
+ proc_macro_srv::LitKind::ByteStr => [7, 0],
+ proc_macro_srv::LitKind::ByteStrRaw(r) => [8, r],
+ proc_macro_srv::LitKind::CStr => [9, 0],
+ proc_macro_srv::LitKind::CStrRaw(r) => [10, r],
+ }),
+ suffix,
+ });
+ (idx << 2) | 0b01
+ }
+ proc_macro_srv::TokenTree::Punct(punct) => {
+ let idx = self.punct.len() as u32;
+ let id = self.token_id_of(punct.span);
+ self.punct.push(PunctRepr {
+ char: punct.ch as char,
+ spacing: if punct.joint { tt::Spacing::Joint } else { tt::Spacing::Alone },
+ id,
+ });
+ (idx << 2) | 0b10
+ }
+ proc_macro_srv::TokenTree::Ident(ident) => {
+ let idx = self.ident.len() as u32;
+ let id = self.token_id_of(ident.span);
+ let text = if self.version >= EXTENDED_LEAF_DATA {
+ self.intern(ident.sym.as_str())
+ } else if ident.is_raw {
+ self.intern_owned(format!("r#{}", ident.sym.as_str(),))
+ } else {
+ self.intern(ident.sym.as_str())
+ };
+ self.ident.push(IdentRepr { id, text, is_raw: ident.is_raw });
+ (idx << 2) | 0b11
+ }
+ };
+ self.token_tree[first_tt] = idx_tag;
+ first_tt += 1;
+ }
+ }
+
+ fn enqueue_group(&mut self, group: &'a proc_macro_srv::Group<T::Span>) -> u32 {
+ let idx = self.subtree.len();
+ let open = self.token_id_of(group.span.open);
+ let close = self.token_id_of(group.span.close);
+ let delimiter_kind = match group.delimiter {
+ proc_macro_srv::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
+ proc_macro_srv::Delimiter::Brace => tt::DelimiterKind::Brace,
+ proc_macro_srv::Delimiter::Bracket => tt::DelimiterKind::Bracket,
+ proc_macro_srv::Delimiter::None => tt::DelimiterKind::Invisible,
+ };
+ self.subtree.push(SubtreeRepr { open, close, kind: delimiter_kind, tt: [!0, !0] });
+ self.work.push_back((idx, group));
+ idx as u32
+ }
+}
+
struct Reader<'span, S: SpanTransformer> {
version: u32,
subtree: Vec<SubtreeRepr>,
@@ -534,7 +732,7 @@
}
impl<T: SpanTransformer> Reader<'_, T> {
- pub(crate) fn read(self) -> tt::TopSubtree<T::Span> {
+ pub(crate) fn read_subtree(self) -> tt::TopSubtree<T::Span> {
let mut res: Vec<Option<(tt::Delimiter<T::Span>, Vec<tt::TokenTree<T::Span>>)>> =
vec![None; self.subtree.len()];
let read_span = |id| T::span_for_token_id(self.span_data_table, id);
@@ -641,3 +839,122 @@
tt::TopSubtree(res.into_boxed_slice())
}
}
+
+#[cfg(feature = "sysroot-abi")]
+impl<T: SpanTransformer> Reader<'_, T> {
+ pub(crate) fn read_tokenstream(self) -> proc_macro_srv::TokenStream<T::Span> {
+ let mut res: Vec<Option<proc_macro_srv::Group<T::Span>>> = vec![None; self.subtree.len()];
+ let read_span = |id| T::span_for_token_id(self.span_data_table, id);
+ for i in (0..self.subtree.len()).rev() {
+ let repr = &self.subtree[i];
+ let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize];
+
+ let stream = token_trees
+ .iter()
+ .copied()
+ .map(|idx_tag| {
+ let tag = idx_tag & 0b11;
+ let idx = (idx_tag >> 2) as usize;
+ match tag {
+ // XXX: we iterate subtrees in reverse to guarantee
+ // that this unwrap doesn't fire.
+ 0b00 => proc_macro_srv::TokenTree::Group(res[idx].take().unwrap()),
+ 0b01 => {
+ let repr = &self.literal[idx];
+ let text = self.text[repr.text as usize].as_str();
+ let span = read_span(repr.id);
+ proc_macro_srv::TokenTree::Literal(
+ if self.version >= EXTENDED_LEAF_DATA {
+ proc_macro_srv::Literal {
+ symbol: Symbol::intern(text),
+ span,
+ kind: match u16::to_le_bytes(repr.kind) {
+ [0, _] => proc_macro_srv::LitKind::ErrWithGuar,
+ [1, _] => proc_macro_srv::LitKind::Byte,
+ [2, _] => proc_macro_srv::LitKind::Char,
+ [3, _] => proc_macro_srv::LitKind::Integer,
+ [4, _] => proc_macro_srv::LitKind::Float,
+ [5, _] => proc_macro_srv::LitKind::Str,
+ [6, r] => proc_macro_srv::LitKind::StrRaw(r),
+ [7, _] => proc_macro_srv::LitKind::ByteStr,
+ [8, r] => proc_macro_srv::LitKind::ByteStrRaw(r),
+ [9, _] => proc_macro_srv::LitKind::CStr,
+ [10, r] => proc_macro_srv::LitKind::CStrRaw(r),
+ _ => unreachable!(),
+ },
+ suffix: if repr.suffix != !0 {
+ Some(Symbol::intern(
+ self.text[repr.suffix as usize].as_str(),
+ ))
+ } else {
+ None
+ },
+ }
+ } else {
+ proc_macro_srv::literal_from_str(text, span).unwrap_or_else(
+ |_| proc_macro_srv::Literal {
+ symbol: Symbol::intern("internal error"),
+ span,
+ kind: proc_macro_srv::LitKind::ErrWithGuar,
+ suffix: None,
+ },
+ )
+ },
+ )
+ }
+ 0b10 => {
+ let repr = &self.punct[idx];
+ proc_macro_srv::TokenTree::Punct(proc_macro_srv::Punct {
+ ch: repr.char as u8,
+ joint: repr.spacing == tt::Spacing::Joint,
+ span: read_span(repr.id),
+ })
+ }
+ 0b11 => {
+ let repr = &self.ident[idx];
+ let text = self.text[repr.text as usize].as_str();
+ let (is_raw, text) = if self.version >= EXTENDED_LEAF_DATA {
+ (
+ if repr.is_raw {
+ tt::IdentIsRaw::Yes
+ } else {
+ tt::IdentIsRaw::No
+ },
+ text,
+ )
+ } else {
+ tt::IdentIsRaw::split_from_symbol(text)
+ };
+ proc_macro_srv::TokenTree::Ident(proc_macro_srv::Ident {
+ sym: Symbol::intern(text),
+ span: read_span(repr.id),
+ is_raw: is_raw.yes(),
+ })
+ }
+ other => panic!("bad tag: {other}"),
+ }
+ })
+ .collect::<Vec<_>>();
+ let g = proc_macro_srv::Group {
+ delimiter: match repr.kind {
+ tt::DelimiterKind::Parenthesis => proc_macro_srv::Delimiter::Parenthesis,
+ tt::DelimiterKind::Brace => proc_macro_srv::Delimiter::Brace,
+ tt::DelimiterKind::Bracket => proc_macro_srv::Delimiter::Bracket,
+ tt::DelimiterKind::Invisible => proc_macro_srv::Delimiter::None,
+ },
+ stream: if stream.is_empty() { None } else { Some(TokenStream::new(stream)) },
+ span: proc_macro_srv::DelimSpan {
+ open: read_span(repr.open),
+ close: read_span(repr.close),
+ // FIXME
+ entire: read_span(repr.close),
+ },
+ };
+ res[i] = Some(g);
+ }
+ // FIXME: double check this
+ proc_macro_srv::TokenStream::new(vec![proc_macro_srv::TokenTree::Group(
+ res[0].take().unwrap(),
+ )])
+ }
+}
diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs
index 97919b8..870d81f 100644
--- a/crates/proc-macro-api/src/lib.rs
+++ b/crates/proc-macro-api/src/lib.rs
@@ -5,6 +5,13 @@
//! is used to provide basic infrastructure for communication between two
//! processes: Client (RA itself), Server (the external program)
+#![cfg_attr(not(feature = "sysroot-abi"), allow(unused_crate_dependencies))]
+#![cfg_attr(
+ feature = "sysroot-abi",
+ feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)
+)]
+#![allow(internal_features)]
+
pub mod legacy_protocol;
mod process;
diff --git a/crates/proc-macro-srv-cli/Cargo.toml b/crates/proc-macro-srv-cli/Cargo.toml
index 91e9e62..dd31e74 100644
--- a/crates/proc-macro-srv-cli/Cargo.toml
+++ b/crates/proc-macro-srv-cli/Cargo.toml
@@ -19,7 +19,7 @@
[features]
default = ["postcard"]
-sysroot-abi = ["proc-macro-srv/sysroot-abi"]
+sysroot-abi = ["proc-macro-srv/sysroot-abi", "proc-macro-api/sysroot-abi"]
in-rust-tree = ["proc-macro-srv/in-rust-tree", "sysroot-abi"]
postcard = ["dep:postcard"]
diff --git a/crates/proc-macro-srv-cli/build.rs b/crates/proc-macro-srv-cli/build.rs
index 12e7c8b..bcf639f 100644
--- a/crates/proc-macro-srv-cli/build.rs
+++ b/crates/proc-macro-srv-cli/build.rs
@@ -5,7 +5,6 @@
fn main() {
set_rerun();
set_commit_info();
- println!("cargo::rustc-check-cfg=cfg(rust_analyzer)");
}
fn set_rerun() {
diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs
index 662d348..9d74fa6 100644
--- a/crates/proc-macro-srv-cli/src/main.rs
+++ b/crates/proc-macro-srv-cli/src/main.rs
@@ -9,10 +9,10 @@
mod version;
-#[cfg(any(feature = "sysroot-abi", rust_analyzer))]
+#[cfg(feature = "sysroot-abi")]
mod main_loop;
use clap::{Command, ValueEnum};
-#[cfg(any(feature = "sysroot-abi", rust_analyzer))]
+#[cfg(feature = "sysroot-abi")]
use main_loop::run;
fn main() -> std::io::Result<()> {
@@ -77,7 +77,7 @@
}
}
-#[cfg(not(any(feature = "sysroot-abi", rust_analyzer)))]
+#[cfg(not(feature = "sysroot-abi"))]
fn run(_: ProtocolFormat) -> std::io::Result<()> {
Err(std::io::Error::new(
std::io::ErrorKind::Unsupported,
diff --git a/crates/proc-macro-srv-cli/src/main_loop.rs b/crates/proc-macro-srv-cli/src/main_loop.rs
index 703bc96..5533107 100644
--- a/crates/proc-macro-srv-cli/src/main_loop.rs
+++ b/crates/proc-macro-srv-cli/src/main_loop.rs
@@ -91,9 +91,10 @@
let mixed_site = SpanId(mixed_site as u32);
let macro_body =
- macro_body.to_subtree_unresolved::<SpanTrans>(CURRENT_API_VERSION);
- let attributes = attributes
- .map(|it| it.to_subtree_unresolved::<SpanTrans>(CURRENT_API_VERSION));
+ macro_body.to_tokenstream_unresolved::<SpanTrans>(CURRENT_API_VERSION);
+ let attributes = attributes.map(|it| {
+ it.to_tokenstream_unresolved::<SpanTrans>(CURRENT_API_VERSION)
+ });
srv.expand(
lib,
@@ -107,8 +108,9 @@
mixed_site,
)
.map(|it| {
- msg::FlatTree::new_raw::<SpanTrans>(
- tt::SubtreeView::new(&it),
+ msg::FlatTree::from_tokenstream_raw::<SpanTrans>(
+ it,
+ call_site,
CURRENT_API_VERSION,
)
})
@@ -122,10 +124,10 @@
let call_site = span_data_table[call_site];
let mixed_site = span_data_table[mixed_site];
- let macro_body =
- macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table);
+ let macro_body = macro_body
+ .to_tokenstream_resolved(CURRENT_API_VERSION, &span_data_table);
let attributes = attributes.map(|it| {
- it.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)
+ it.to_tokenstream_resolved(CURRENT_API_VERSION, &span_data_table)
});
srv.expand(
lib,
@@ -140,9 +142,10 @@
)
.map(|it| {
(
- msg::FlatTree::new(
- tt::SubtreeView::new(&it),
+ msg::FlatTree::from_tokenstream(
+ it,
CURRENT_API_VERSION,
+ call_site,
&mut span_data_table,
),
serialize_span_data_index_map(&span_data_table),
diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml
index d037e71..3610171 100644
--- a/crates/proc-macro-srv/Cargo.toml
+++ b/crates/proc-macro-srv/Cargo.toml
@@ -18,8 +18,6 @@
memmap2.workspace = true
temp-dir.workspace = true
-tt.workspace = true
-syntax-bridge.workspace = true
paths.workspace = true
# span = {workspace = true, default-features = false} does not work
span = { path = "../span", version = "0.0.0", default-features = false}
@@ -38,8 +36,9 @@
proc-macro-test.path = "./proc-macro-test"
[features]
+default = []
sysroot-abi = []
-in-rust-tree = ["syntax-bridge/in-rust-tree", "tt/in-rust-tree", "sysroot-abi"]
+in-rust-tree = ["sysroot-abi"]
[lints]
workspace = true
diff --git a/crates/proc-macro-srv/build.rs b/crates/proc-macro-srv/build.rs
index 97c0c4b..4cf1820 100644
--- a/crates/proc-macro-srv/build.rs
+++ b/crates/proc-macro-srv/build.rs
@@ -4,8 +4,6 @@
use std::{env, process::Command};
fn main() {
- println!("cargo::rustc-check-cfg=cfg(rust_analyzer)");
-
let rustc = env::var("RUSTC").expect("proc-macro-srv's build script expects RUSTC to be set");
#[allow(clippy::disallowed_methods)]
let output = Command::new(rustc).arg("--version").output().expect("rustc --version must run");
diff --git a/crates/proc-macro-srv/proc-macro-test/Cargo.toml b/crates/proc-macro-srv/proc-macro-test/Cargo.toml
index bc04482..78630dd 100644
--- a/crates/proc-macro-srv/proc-macro-test/Cargo.toml
+++ b/crates/proc-macro-srv/proc-macro-test/Cargo.toml
@@ -10,4 +10,4 @@
doctest = false
[build-dependencies]
-cargo_metadata = "0.20.0"
+cargo_metadata = "0.23.0"
diff --git a/crates/proc-macro-srv/src/bridge.rs b/crates/proc-macro-srv/src/bridge.rs
new file mode 100644
index 0000000..fc063a0
--- /dev/null
+++ b/crates/proc-macro-srv/src/bridge.rs
@@ -0,0 +1,12 @@
+//! `proc_macro::bridge` newtypes.
+
+use proc_macro::bridge as pm_bridge;
+
+pub use pm_bridge::{DelimSpan, Diagnostic, ExpnGlobals, LitKind};
+
+pub type TokenTree<S> =
+ pm_bridge::TokenTree<crate::token_stream::TokenStream<S>, S, intern::Symbol>;
+pub type Literal<S> = pm_bridge::Literal<S, intern::Symbol>;
+pub type Group<S> = pm_bridge::Group<crate::token_stream::TokenStream<S>, S>;
+pub type Punct<S> = pm_bridge::Punct<S>;
+pub type Ident<S> = pm_bridge::Ident<S, intern::Symbol>;
diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs
index c8513a1..0343319 100644
--- a/crates/proc-macro-srv/src/dylib.rs
+++ b/crates/proc-macro-srv/src/dylib.rs
@@ -13,7 +13,7 @@
use crate::{
PanicMessage, ProcMacroKind, ProcMacroSrvSpan, dylib::proc_macros::ProcMacros,
- server_impl::TopSubtree,
+ token_stream::TokenStream,
};
pub(crate) struct Expander {
@@ -40,18 +40,18 @@
pub(crate) fn expand<S: ProcMacroSrvSpan>(
&self,
macro_name: &str,
- macro_body: TopSubtree<S>,
- attributes: Option<TopSubtree<S>>,
+ macro_body: TokenStream<S>,
+ attribute: Option<TokenStream<S>>,
def_site: S,
call_site: S,
mixed_site: S,
- ) -> Result<TopSubtree<S>, PanicMessage>
+ ) -> Result<TokenStream<S>, PanicMessage>
where
<S::Server as bridge::server::Types>::TokenStream: Default,
{
self.inner
.proc_macros
- .expand(macro_name, macro_body, attributes, def_site, call_site, mixed_site)
+ .expand(macro_name, macro_body, attribute, def_site, call_site, mixed_site)
}
pub(crate) fn list_macros(&self) -> impl Iterator<Item = (&str, ProcMacroKind)> {
diff --git a/crates/proc-macro-srv/src/dylib/proc_macros.rs b/crates/proc-macro-srv/src/dylib/proc_macros.rs
index 9b5721e..c879c76 100644
--- a/crates/proc-macro-srv/src/dylib/proc_macros.rs
+++ b/crates/proc-macro-srv/src/dylib/proc_macros.rs
@@ -2,7 +2,7 @@
use proc_macro::bridge;
-use crate::{ProcMacroKind, ProcMacroSrvSpan, server_impl::TopSubtree};
+use crate::{ProcMacroKind, ProcMacroSrvSpan, token_stream::TokenStream};
#[repr(transparent)]
pub(crate) struct ProcMacros([bridge::client::ProcMacro]);
@@ -17,18 +17,13 @@
pub(crate) fn expand<S: ProcMacroSrvSpan>(
&self,
macro_name: &str,
- macro_body: TopSubtree<S>,
- attributes: Option<TopSubtree<S>>,
+ macro_body: TokenStream<S>,
+ attribute: Option<TokenStream<S>>,
def_site: S,
call_site: S,
mixed_site: S,
- ) -> Result<TopSubtree<S>, crate::PanicMessage> {
- let parsed_body = crate::server_impl::TokenStream::with_subtree(macro_body);
-
- let parsed_attributes = attributes
- .map_or_else(crate::server_impl::TokenStream::default, |attr| {
- crate::server_impl::TokenStream::with_subtree(attr)
- });
+ ) -> Result<TokenStream<S>, crate::PanicMessage> {
+ let parsed_attributes = attribute.unwrap_or_default();
for proc_macro in &self.0 {
match proc_macro {
@@ -38,35 +33,29 @@
let res = client.run(
&bridge::server::SameThread,
S::make_server(call_site, def_site, mixed_site),
- parsed_body,
+ macro_body,
cfg!(debug_assertions),
);
- return res
- .map(|it| it.into_subtree(call_site))
- .map_err(crate::PanicMessage::from);
+ return res.map_err(crate::PanicMessage::from);
}
bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => {
let res = client.run(
&bridge::server::SameThread,
S::make_server(call_site, def_site, mixed_site),
- parsed_body,
+ macro_body,
cfg!(debug_assertions),
);
- return res
- .map(|it| it.into_subtree(call_site))
- .map_err(crate::PanicMessage::from);
+ return res.map_err(crate::PanicMessage::from);
}
bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => {
let res = client.run(
&bridge::server::SameThread,
S::make_server(call_site, def_site, mixed_site),
parsed_attributes,
- parsed_body,
+ macro_body,
cfg!(debug_assertions),
);
- return res
- .map(|it| it.into_subtree(call_site))
- .map_err(crate::PanicMessage::from);
+ return res.map_err(crate::PanicMessage::from);
}
_ => continue,
}
diff --git a/crates/proc-macro-srv/src/dylib/version.rs b/crates/proc-macro-srv/src/dylib/version.rs
index 3b2551f..209693b 100644
--- a/crates/proc-macro-srv/src/dylib/version.rs
+++ b/crates/proc-macro-srv/src/dylib/version.rs
@@ -5,11 +5,14 @@
use object::read::{Object, ObjectSection};
#[derive(Debug)]
-#[allow(dead_code)]
pub struct RustCInfo {
+ #[allow(dead_code)]
pub version: (usize, usize, usize),
+ #[allow(dead_code)]
pub channel: String,
+ #[allow(dead_code)]
pub commit: Option<String>,
+ #[allow(dead_code)]
pub date: Option<String>,
// something like "rustc 1.58.1 (db9d1b20b 2022-01-20)"
pub version_string: String,
diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs
index cb97882..aff4dc5 100644
--- a/crates/proc-macro-srv/src/lib.rs
+++ b/crates/proc-macro-srv/src/lib.rs
@@ -10,11 +10,16 @@
//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
//! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
-#![cfg(any(feature = "sysroot-abi", rust_analyzer))]
-#![cfg_attr(not(feature = "sysroot-abi"), allow(unused_crate_dependencies))]
+#![cfg(feature = "sysroot-abi")]
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
#![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)]
-#![allow(unreachable_pub, internal_features, clippy::disallowed_types, clippy::print_stderr)]
+#![allow(
+ unreachable_pub,
+ internal_features,
+ clippy::disallowed_types,
+ clippy::print_stderr,
+ unused_crate_dependencies
+)]
#![deny(deprecated_safe, clippy::undocumented_unsafe_blocks)]
extern crate proc_macro;
@@ -26,8 +31,10 @@
#[cfg(feature = "in-rust-tree")]
extern crate rustc_lexer;
+mod bridge;
mod dylib;
mod server_impl;
+mod token_stream;
use std::{
collections::{HashMap, hash_map::Entry},
@@ -43,10 +50,14 @@
use span::Span;
use temp_dir::TempDir;
-use crate::server_impl::TokenStream;
-
pub use crate::server_impl::token_id::SpanId;
+pub use proc_macro::Delimiter;
+
+pub use crate::bridge::*;
+pub use crate::server_impl::literal_from_str;
+pub use crate::token_stream::{TokenStream, literal_to_string};
+
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum ProcMacroKind {
CustomDerive,
@@ -81,12 +92,12 @@
env: &[(String, String)],
current_dir: Option<impl AsRef<Path>>,
macro_name: &str,
- macro_body: tt::TopSubtree<S>,
- attribute: Option<tt::TopSubtree<S>>,
+ macro_body: token_stream::TokenStream<S>,
+ attribute: Option<token_stream::TokenStream<S>>,
def_site: S,
call_site: S,
mixed_site: S,
- ) -> Result<Vec<tt::TokenTree<S>>, PanicMessage> {
+ ) -> Result<token_stream::TokenStream<S>, PanicMessage> {
let snapped_env = self.env;
let expander = self.expander(lib.as_ref()).map_err(|err| PanicMessage {
message: Some(format!("failed to load macro: {err}")),
@@ -102,15 +113,7 @@
.name(macro_name.to_owned())
.spawn_scoped(s, move || {
expander
- .expand(
- macro_name,
- server_impl::TopSubtree(macro_body.0.into_vec()),
- attribute.map(|it| server_impl::TopSubtree(it.0.into_vec())),
- def_site,
- call_site,
- mixed_site,
- )
- .map(|tt| tt.0)
+ .expand(macro_name, macro_body, attribute, def_site, call_site, mixed_site)
});
match thread.unwrap().join() {
Ok(res) => res,
@@ -157,8 +160,8 @@
}
}
-pub trait ProcMacroSrvSpan: Copy + Send {
- type Server: proc_macro::bridge::server::Server<TokenStream = TokenStream<Self>>;
+pub trait ProcMacroSrvSpan: Copy + Send + Sync {
+ type Server: proc_macro::bridge::server::Server<TokenStream = crate::token_stream::TokenStream<Self>>;
fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server;
}
diff --git a/crates/proc-macro-srv/src/server_impl.rs b/crates/proc-macro-srv/src/server_impl.rs
index 32ad327..bacead1 100644
--- a/crates/proc-macro-srv/src/server_impl.rs
+++ b/crates/proc-macro-srv/src/server_impl.rs
@@ -5,122 +5,15 @@
//! we could provide any TokenStream implementation.
//! The original idea from fedochet is using proc-macro2 as backend,
//! we use tt instead for better integration with RA.
-//!
-//! FIXME: No span and source file information is implemented yet
-use std::fmt;
+pub(crate) mod rust_analyzer_span;
+pub(crate) mod token_id;
-use intern::Symbol;
-use proc_macro::bridge;
-
-mod token_stream;
-pub use token_stream::TokenStream;
-
-pub mod rust_analyzer_span;
-pub mod token_id;
-
-use tt::Spacing;
-
-#[derive(Clone)]
-pub(crate) struct TopSubtree<S>(pub(crate) Vec<tt::TokenTree<S>>);
-
-impl<S: Copy + fmt::Debug> fmt::Debug for TopSubtree<S> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- fmt::Debug::fmt(&tt::TokenTreesView::new(&self.0), f)
- }
-}
-
-impl<S: Copy> TopSubtree<S> {
- pub(crate) fn top_subtree(&self) -> &tt::Subtree<S> {
- let tt::TokenTree::Subtree(subtree) = &self.0[0] else {
- unreachable!("the first token tree is always the top subtree");
- };
- subtree
- }
-
- pub(crate) fn from_bridge(group: bridge::Group<TokenStream<S>, S>) -> Self {
- let delimiter = delim_to_internal(group.delimiter, group.span);
- let mut tts =
- group.stream.map(|it| it.token_trees).unwrap_or_else(|| Vec::with_capacity(1));
- tts.insert(0, tt::TokenTree::Subtree(tt::Subtree { delimiter, len: tts.len() as u32 }));
- TopSubtree(tts)
- }
-}
-
-fn delim_to_internal<S>(d: proc_macro::Delimiter, span: bridge::DelimSpan<S>) -> tt::Delimiter<S> {
- let kind = match d {
- proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
- proc_macro::Delimiter::Brace => tt::DelimiterKind::Brace,
- proc_macro::Delimiter::Bracket => tt::DelimiterKind::Bracket,
- proc_macro::Delimiter::None => tt::DelimiterKind::Invisible,
- };
- tt::Delimiter { open: span.open, close: span.close, kind }
-}
-
-fn delim_to_external<S>(d: tt::Delimiter<S>) -> proc_macro::Delimiter {
- match d.kind {
- tt::DelimiterKind::Parenthesis => proc_macro::Delimiter::Parenthesis,
- tt::DelimiterKind::Brace => proc_macro::Delimiter::Brace,
- tt::DelimiterKind::Bracket => proc_macro::Delimiter::Bracket,
- tt::DelimiterKind::Invisible => proc_macro::Delimiter::None,
- }
-}
-
-#[allow(unused)]
-fn spacing_to_internal(spacing: proc_macro::Spacing) -> Spacing {
- match spacing {
- proc_macro::Spacing::Alone => Spacing::Alone,
- proc_macro::Spacing::Joint => Spacing::Joint,
- }
-}
-
-#[allow(unused)]
-fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing {
- match spacing {
- Spacing::Alone | Spacing::JointHidden => proc_macro::Spacing::Alone,
- Spacing::Joint => proc_macro::Spacing::Joint,
- }
-}
-
-fn literal_kind_to_external(kind: tt::LitKind) -> bridge::LitKind {
- match kind {
- tt::LitKind::Byte => bridge::LitKind::Byte,
- tt::LitKind::Char => bridge::LitKind::Char,
- tt::LitKind::Integer => bridge::LitKind::Integer,
- tt::LitKind::Float => bridge::LitKind::Float,
- tt::LitKind::Str => bridge::LitKind::Str,
- tt::LitKind::StrRaw(r) => bridge::LitKind::StrRaw(r),
- tt::LitKind::ByteStr => bridge::LitKind::ByteStr,
- tt::LitKind::ByteStrRaw(r) => bridge::LitKind::ByteStrRaw(r),
- tt::LitKind::CStr => bridge::LitKind::CStr,
- tt::LitKind::CStrRaw(r) => bridge::LitKind::CStrRaw(r),
- tt::LitKind::Err(_) => bridge::LitKind::ErrWithGuar,
- }
-}
-
-fn literal_kind_to_internal(kind: bridge::LitKind) -> tt::LitKind {
- match kind {
- bridge::LitKind::Byte => tt::LitKind::Byte,
- bridge::LitKind::Char => tt::LitKind::Char,
- bridge::LitKind::Str => tt::LitKind::Str,
- bridge::LitKind::StrRaw(r) => tt::LitKind::StrRaw(r),
- bridge::LitKind::ByteStr => tt::LitKind::ByteStr,
- bridge::LitKind::ByteStrRaw(r) => tt::LitKind::ByteStrRaw(r),
- bridge::LitKind::CStr => tt::LitKind::CStr,
- bridge::LitKind::CStrRaw(r) => tt::LitKind::CStrRaw(r),
- bridge::LitKind::Integer => tt::LitKind::Integer,
- bridge::LitKind::Float => tt::LitKind::Float,
- bridge::LitKind::ErrWithGuar => tt::LitKind::Err(()),
- }
-}
-
-pub(super) fn literal_from_str<Span: Copy>(
+pub fn literal_from_str<Span: Copy>(
s: &str,
span: Span,
-) -> Result<bridge::Literal<Span, Symbol>, ()> {
- use proc_macro::bridge::LitKind;
+) -> Result<crate::bridge::Literal<Span>, ()> {
use rustc_lexer::{LiteralKind, Token, TokenKind};
-
let mut tokens = rustc_lexer::tokenize(s, rustc_lexer::FrontmatterAllowed::No);
let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 });
@@ -142,98 +35,5 @@
}
let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) };
- let (kind, start_offset, end_offset) = match kind {
- LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
- LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
- LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
- LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
- LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
- LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
- LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
- LiteralKind::RawStr { n_hashes } => (
- LitKind::StrRaw(n_hashes.unwrap_or_default()),
- 2 + n_hashes.unwrap_or_default() as usize,
- 1 + n_hashes.unwrap_or_default() as usize,
- ),
- LiteralKind::RawByteStr { n_hashes } => (
- LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
- 3 + n_hashes.unwrap_or_default() as usize,
- 1 + n_hashes.unwrap_or_default() as usize,
- ),
- LiteralKind::RawCStr { n_hashes } => (
- LitKind::CStrRaw(n_hashes.unwrap_or_default()),
- 3 + n_hashes.unwrap_or_default() as usize,
- 1 + n_hashes.unwrap_or_default() as usize,
- ),
- };
-
- let (lit, suffix) = s.split_at(suffix_start as usize);
- let lit = &lit[start_offset..lit.len() - end_offset];
- let suffix = match suffix {
- "" | "_" => None,
- suffix => Some(Symbol::intern(suffix)),
- };
-
- Ok(bridge::Literal { kind, symbol: Symbol::intern(lit), suffix, span })
-}
-
-pub(super) fn from_token_tree<Span: Copy>(
- tree: bridge::TokenTree<TokenStream<Span>, Span, Symbol>,
-) -> TokenStream<Span> {
- match tree {
- bridge::TokenTree::Group(group) => {
- let group = TopSubtree::from_bridge(group);
- TokenStream { token_trees: group.0 }
- }
-
- bridge::TokenTree::Ident(ident) => {
- let text = ident.sym;
- let ident: tt::Ident<Span> = tt::Ident {
- sym: text,
- span: ident.span,
- is_raw: if ident.is_raw { tt::IdentIsRaw::Yes } else { tt::IdentIsRaw::No },
- };
- let leaf = tt::Leaf::from(ident);
- let tree = tt::TokenTree::from(leaf);
- TokenStream { token_trees: vec![tree] }
- }
-
- bridge::TokenTree::Literal(literal) => {
- let mut token_trees = Vec::new();
- let mut symbol = literal.symbol;
- if matches!(
- literal.kind,
- proc_macro::bridge::LitKind::Integer | proc_macro::bridge::LitKind::Float
- ) && symbol.as_str().starts_with('-')
- {
- token_trees.push(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
- spacing: tt::Spacing::Alone,
- span: literal.span,
- char: '-',
- })));
- symbol = Symbol::intern(&symbol.as_str()[1..]);
- }
- let literal = tt::Literal {
- symbol,
- suffix: literal.suffix,
- span: literal.span,
- kind: literal_kind_to_internal(literal.kind),
- };
- let leaf: tt::Leaf<Span> = tt::Leaf::from(literal);
- let tree = tt::TokenTree::from(leaf);
- token_trees.push(tree);
- TokenStream { token_trees }
- }
-
- bridge::TokenTree::Punct(p) => {
- let punct = tt::Punct {
- char: p.ch as char,
- spacing: if p.joint { tt::Spacing::Joint } else { tt::Spacing::Alone },
- span: p.span,
- };
- let leaf = tt::Leaf::from(punct);
- let tree = tt::TokenTree::from(leaf);
- TokenStream { token_trees: vec![tree] }
- }
- }
+ Ok(crate::token_stream::literal_from_lexer(s, span, kind, suffix_start))
}
diff --git a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
index a1863ef..7c685c2d 100644
--- a/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
+++ b/crates/proc-macro-srv/src/server_impl/rust_analyzer_span.rs
@@ -10,13 +10,13 @@
};
use intern::Symbol;
-use proc_macro::bridge::{self, server};
-use span::{FIXUP_ERASED_FILE_AST_ID_MARKER, Span};
-use tt::{TextRange, TextSize};
+use proc_macro::bridge::server;
+use span::{FIXUP_ERASED_FILE_AST_ID_MARKER, Span, TextRange, TextSize};
-use crate::server_impl::{from_token_tree, literal_from_str, token_stream::TokenStreamBuilder};
-
-type TokenStream = crate::server_impl::TokenStream<Span>;
+use crate::{
+ bridge::{Diagnostic, ExpnGlobals, Literal, TokenTree},
+ server_impl::literal_from_str,
+};
pub struct FreeFunctions;
@@ -32,7 +32,7 @@
impl server::Types for RaSpanServer {
type FreeFunctions = FreeFunctions;
- type TokenStream = TokenStream;
+ type TokenStream = crate::token_stream::TokenStream<Span>;
type Span = Span;
type Symbol = Symbol;
}
@@ -49,14 +49,11 @@
self.tracked_paths.insert(path.into());
}
- fn literal_from_str(
- &mut self,
- s: &str,
- ) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
+ fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span>, ()> {
literal_from_str(s, self.call_site)
}
- fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {
+ fn emit_diagnostic(&mut self, _: Diagnostic<Self::Span>) {
// FIXME handle diagnostic
}
}
@@ -77,11 +74,9 @@
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
stream.to_string()
}
- fn from_token_tree(
- &mut self,
- tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
- ) -> Self::TokenStream {
- from_token_tree(tree)
+
+ fn from_token_tree(&mut self, tree: TokenTree<Self::Span>) -> Self::TokenStream {
+ Self::TokenStream::new(vec![tree])
}
fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
@@ -96,16 +91,17 @@
fn concat_trees(
&mut self,
base: Option<Self::TokenStream>,
- trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
+ trees: Vec<TokenTree<Self::Span>>,
) -> Self::TokenStream {
- let mut builder = TokenStreamBuilder::default();
- if let Some(base) = base {
- builder.push(base);
+ match base {
+ Some(mut base) => {
+ for tt in trees {
+ base.push_tree(tt);
+ }
+ base
+ }
+ None => Self::TokenStream::new(trees),
}
- for tree in trees {
- builder.push(self.from_token_tree(tree));
- }
- builder.build()
}
fn concat_streams(
@@ -113,23 +109,15 @@
base: Option<Self::TokenStream>,
streams: Vec<Self::TokenStream>,
) -> Self::TokenStream {
- let mut builder = TokenStreamBuilder::default();
- if let Some(base) = base {
- builder.push(base);
+ let mut stream = base.unwrap_or_default();
+ for s in streams {
+ stream.push_stream(s);
}
- for stream in streams {
- builder.push(stream);
- }
- builder.build()
+ stream
}
- fn into_trees(
- &mut self,
- stream: Self::TokenStream,
- ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
- stream.into_bridge(&mut |first, second| {
- server::Span::join(self, first, second).unwrap_or(first)
- })
+ fn into_trees(&mut self, stream: Self::TokenStream) -> Vec<TokenTree<Self::Span>> {
+ (*stream.0).clone()
}
}
@@ -289,8 +277,8 @@
}
impl server::Server for RaSpanServer {
- fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
- bridge::ExpnGlobals {
+ fn globals(&mut self) -> ExpnGlobals<Self::Span> {
+ ExpnGlobals {
def_site: self.def_site,
call_site: self.call_site,
mixed_site: self.mixed_site,
@@ -305,97 +293,3 @@
f(symbol.as_str())
}
}
-
-#[cfg(test)]
-mod tests {
- use span::{EditionedFileId, FileId, SyntaxContext};
-
- use super::*;
-
- #[test]
- fn test_ra_server_to_string() {
- let span = Span {
- range: TextRange::empty(TextSize::new(0)),
- anchor: span::SpanAnchor {
- file_id: EditionedFileId::current_edition(FileId::from_raw(0)),
- ast_id: span::ROOT_ERASED_FILE_AST_ID,
- },
- ctx: SyntaxContext::root(span::Edition::CURRENT),
- };
- let s = TokenStream {
- token_trees: vec![
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- sym: Symbol::intern("struct"),
- span,
- is_raw: tt::IdentIsRaw::No,
- })),
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- sym: Symbol::intern("T"),
- span,
- is_raw: tt::IdentIsRaw::No,
- })),
- tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter {
- open: span,
- close: span,
- kind: tt::DelimiterKind::Brace,
- },
- len: 1,
- }),
- tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
- kind: tt::LitKind::Str,
- symbol: Symbol::intern("string"),
- suffix: None,
- span,
- })),
- ],
- };
-
- assert_eq!(s.to_string(), "struct T {\"string\"}");
- }
-
- #[test]
- fn test_ra_server_from_str() {
- let span = Span {
- range: TextRange::empty(TextSize::new(0)),
- anchor: span::SpanAnchor {
- file_id: EditionedFileId::current_edition(FileId::from_raw(0)),
- ast_id: span::ROOT_ERASED_FILE_AST_ID,
- },
- ctx: SyntaxContext::root(span::Edition::CURRENT),
- };
- let subtree_paren_a = vec![
- tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter {
- open: span,
- close: span,
- kind: tt::DelimiterKind::Parenthesis,
- },
- len: 1,
- }),
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- is_raw: tt::IdentIsRaw::No,
- sym: Symbol::intern("a"),
- span,
- })),
- ];
-
- let t1 = TokenStream::from_str("(a)", span).unwrap();
- assert_eq!(t1.token_trees.len(), 2);
- assert!(t1.token_trees == subtree_paren_a);
-
- let t2 = TokenStream::from_str("(a);", span).unwrap();
- assert_eq!(t2.token_trees.len(), 3);
- assert!(t2.token_trees[0..2] == subtree_paren_a);
-
- let underscore = TokenStream::from_str("_", span).unwrap();
- assert!(
- underscore.token_trees[0]
- == tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- sym: Symbol::intern("_"),
- span,
- is_raw: tt::IdentIsRaw::No,
- }))
- );
- }
-}
diff --git a/crates/proc-macro-srv/src/server_impl/token_id.rs b/crates/proc-macro-srv/src/server_impl/token_id.rs
index 91e70ea..3814320 100644
--- a/crates/proc-macro-srv/src/server_impl/token_id.rs
+++ b/crates/proc-macro-srv/src/server_impl/token_id.rs
@@ -3,9 +3,12 @@
use std::ops::{Bound, Range};
use intern::Symbol;
-use proc_macro::bridge::{self, server};
+use proc_macro::bridge::server;
-use crate::server_impl::{from_token_tree, literal_from_str, token_stream::TokenStreamBuilder};
+use crate::{
+ bridge::{Diagnostic, ExpnGlobals, Literal, TokenTree},
+ server_impl::literal_from_str,
+};
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct SpanId(pub u32);
@@ -17,7 +20,6 @@
}
type Span = SpanId;
-type TokenStream = crate::server_impl::TokenStream<Span>;
pub struct FreeFunctions;
@@ -29,7 +31,7 @@
impl server::Types for SpanIdServer {
type FreeFunctions = FreeFunctions;
- type TokenStream = TokenStream;
+ type TokenStream = crate::token_stream::TokenStream<Span>;
type Span = Span;
type Symbol = Symbol;
}
@@ -40,14 +42,11 @@
}
fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {}
fn track_path(&mut self, _path: &str) {}
- fn literal_from_str(
- &mut self,
- s: &str,
- ) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
+ fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span>, ()> {
literal_from_str(s, self.call_site)
}
- fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {}
+ fn emit_diagnostic(&mut self, _: Diagnostic<Self::Span>) {}
}
impl server::TokenStream for SpanIdServer {
@@ -66,11 +65,8 @@
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
stream.to_string()
}
- fn from_token_tree(
- &mut self,
- tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
- ) -> Self::TokenStream {
- from_token_tree(tree)
+ fn from_token_tree(&mut self, tree: TokenTree<Self::Span>) -> Self::TokenStream {
+ Self::TokenStream::new(vec![tree])
}
fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
@@ -80,16 +76,17 @@
fn concat_trees(
&mut self,
base: Option<Self::TokenStream>,
- trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
+ trees: Vec<TokenTree<Self::Span>>,
) -> Self::TokenStream {
- let mut builder = TokenStreamBuilder::default();
- if let Some(base) = base {
- builder.push(base);
+ match base {
+ Some(mut base) => {
+ for tt in trees {
+ base.push_tree(tt);
+ }
+ base
+ }
+ None => Self::TokenStream::new(trees),
}
- for tree in trees {
- builder.push(self.from_token_tree(tree));
- }
- builder.build()
}
fn concat_streams(
@@ -97,22 +94,15 @@
base: Option<Self::TokenStream>,
streams: Vec<Self::TokenStream>,
) -> Self::TokenStream {
- let mut builder = TokenStreamBuilder::default();
- if let Some(base) = base {
- builder.push(base);
+ let mut stream = base.unwrap_or_default();
+ for s in streams {
+ stream.push_stream(s);
}
- for stream in streams {
- builder.push(stream);
- }
- builder.build()
+ stream
}
- fn into_trees(
- &mut self,
- stream: Self::TokenStream,
- ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
- // Can't join with `SpanId`.
- stream.into_bridge(&mut |first, _second| first)
+ fn into_trees(&mut self, stream: Self::TokenStream) -> Vec<TokenTree<Self::Span>> {
+ (*stream.0).clone()
}
}
@@ -191,8 +181,8 @@
}
impl server::Server for SpanIdServer {
- fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
- bridge::ExpnGlobals {
+ fn globals(&mut self) -> ExpnGlobals<Self::Span> {
+ ExpnGlobals {
def_site: self.def_site,
call_site: self.call_site,
mixed_site: self.mixed_site,
@@ -207,73 +197,3 @@
f(symbol.as_str())
}
}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- #[test]
- fn test_ra_server_to_string() {
- let s = TokenStream {
- token_trees: vec![
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- sym: Symbol::intern("struct"),
- span: SpanId(0),
- is_raw: tt::IdentIsRaw::No,
- })),
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- sym: Symbol::intern("T"),
- span: SpanId(0),
- is_raw: tt::IdentIsRaw::No,
- })),
- tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter {
- open: SpanId(0),
- close: SpanId(0),
- kind: tt::DelimiterKind::Brace,
- },
- len: 0,
- }),
- ],
- };
-
- assert_eq!(s.to_string(), "struct T {}");
- }
-
- #[test]
- fn test_ra_server_from_str() {
- let subtree_paren_a = vec![
- tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter {
- open: SpanId(0),
- close: SpanId(0),
- kind: tt::DelimiterKind::Parenthesis,
- },
- len: 1,
- }),
- tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- is_raw: tt::IdentIsRaw::No,
- sym: Symbol::intern("a"),
- span: SpanId(0),
- })),
- ];
-
- let t1 = TokenStream::from_str("(a)", SpanId(0)).unwrap();
- assert_eq!(t1.token_trees.len(), 2);
- assert!(t1.token_trees[0..2] == subtree_paren_a);
-
- let t2 = TokenStream::from_str("(a);", SpanId(0)).unwrap();
- assert_eq!(t2.token_trees.len(), 3);
- assert!(t2.token_trees[0..2] == subtree_paren_a);
-
- let underscore = TokenStream::from_str("_", SpanId(0)).unwrap();
- assert!(
- underscore.token_trees[0]
- == tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
- sym: Symbol::intern("_"),
- span: SpanId(0),
- is_raw: tt::IdentIsRaw::No,
- }))
- );
- }
-}
diff --git a/crates/proc-macro-srv/src/server_impl/token_stream.rs b/crates/proc-macro-srv/src/server_impl/token_stream.rs
deleted file mode 100644
index c5019a5..0000000
--- a/crates/proc-macro-srv/src/server_impl/token_stream.rs
+++ /dev/null
@@ -1,170 +0,0 @@
-//! TokenStream implementation used by sysroot ABI
-
-use proc_macro::bridge;
-
-use crate::server_impl::{TopSubtree, delim_to_external, literal_kind_to_external};
-
-#[derive(Clone)]
-pub struct TokenStream<S> {
- pub(super) token_trees: Vec<tt::TokenTree<S>>,
-}
-
-// #[derive(Default)] would mean that `S: Default`.
-impl<S> Default for TokenStream<S> {
- fn default() -> Self {
- Self { token_trees: Default::default() }
- }
-}
-
-impl<S: std::fmt::Debug + Copy> std::fmt::Debug for TokenStream<S> {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- f.debug_struct("TokenStream")
- .field("token_trees", &tt::TokenTreesView::new(&self.token_trees))
- .finish()
- }
-}
-
-impl<S: Copy> TokenStream<S> {
- pub(crate) fn with_subtree(subtree: TopSubtree<S>) -> Self {
- let delimiter_kind = subtree.top_subtree().delimiter.kind;
- let mut token_trees = subtree.0;
- if delimiter_kind == tt::DelimiterKind::Invisible {
- token_trees.remove(0);
- }
- TokenStream { token_trees }
- }
-
- pub(crate) fn into_subtree(mut self, call_site: S) -> TopSubtree<S>
- where
- S: Copy,
- {
- self.token_trees.insert(
- 0,
- tt::TokenTree::Subtree(tt::Subtree {
- delimiter: tt::Delimiter {
- open: call_site,
- close: call_site,
- kind: tt::DelimiterKind::Invisible,
- },
- len: self.token_trees.len() as u32,
- }),
- );
- TopSubtree(self.token_trees)
- }
-
- pub(super) fn is_empty(&self) -> bool {
- self.token_trees.is_empty()
- }
-
- pub(crate) fn into_bridge(
- self,
- join_spans: &mut dyn FnMut(S, S) -> S,
- ) -> Vec<bridge::TokenTree<Self, S, intern::Symbol>> {
- let mut result = Vec::new();
- let mut iter = self.token_trees.into_iter();
- while let Some(tree) = iter.next() {
- match tree {
- tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
- result.push(bridge::TokenTree::Ident(bridge::Ident {
- sym: ident.sym,
- is_raw: ident.is_raw.yes(),
- span: ident.span,
- }))
- }
- // Note, we do not have to assemble our `-` punct and literal split into a single
- // negative bridge literal here. As the proc-macro docs state
- // > Literals created from negative numbers might not survive round-trips through
- // > TokenStream or strings and may be broken into two tokens (- and positive
- // > literal).
- tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
- result.push(bridge::TokenTree::Literal(bridge::Literal {
- span: lit.span,
- kind: literal_kind_to_external(lit.kind),
- symbol: lit.symbol,
- suffix: lit.suffix,
- }))
- }
- tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
- result.push(bridge::TokenTree::Punct(bridge::Punct {
- ch: punct.char as u8,
- joint: punct.spacing == tt::Spacing::Joint,
- span: punct.span,
- }))
- }
- tt::TokenTree::Subtree(subtree) => {
- result.push(bridge::TokenTree::Group(bridge::Group {
- delimiter: delim_to_external(subtree.delimiter),
- stream: if subtree.len == 0 {
- None
- } else {
- Some(TokenStream {
- token_trees: iter.by_ref().take(subtree.usize_len()).collect(),
- })
- },
- span: bridge::DelimSpan {
- open: subtree.delimiter.open,
- close: subtree.delimiter.close,
- entire: join_spans(subtree.delimiter.open, subtree.delimiter.close),
- },
- }))
- }
- }
- }
- result
- }
-}
-
-pub(super) struct TokenStreamBuilder<S> {
- acc: TokenStream<S>,
-}
-
-/// pub(super)lic implementation details for the `TokenStream` type, such as iterators.
-pub(super) mod token_stream_impls {
-
- use core::fmt;
-
- use super::{TokenStream, TopSubtree};
-
- /// Attempts to break the string into tokens and parse those tokens into a token stream.
- /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
- /// or characters not existing in the language.
- /// All tokens in the parsed stream get `Span::call_site()` spans.
- ///
- /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
- /// change these errors into `LexError`s later.
- impl<S: Copy + fmt::Debug> TokenStream<S> {
- pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, String> {
- let subtree = syntax_bridge::parse_to_token_tree_static_span(
- span::Edition::CURRENT_FIXME,
- call_site,
- src,
- )
- .ok_or_else(|| format!("lexing error: {src}"))?;
-
- Ok(TokenStream::with_subtree(TopSubtree(subtree.0.into_vec())))
- }
- }
-
- #[allow(clippy::to_string_trait_impl)]
- impl<S> ToString for TokenStream<S> {
- fn to_string(&self) -> String {
- ::tt::pretty(&self.token_trees)
- }
- }
-}
-
-impl<S: Copy> TokenStreamBuilder<S> {
- pub(super) fn push(&mut self, stream: TokenStream<S>) {
- self.acc.token_trees.extend(stream.token_trees)
- }
-
- pub(super) fn build(self) -> TokenStream<S> {
- self.acc
- }
-}
-
-impl<S: Copy> Default for TokenStreamBuilder<S> {
- fn default() -> Self {
- Self { acc: TokenStream::default() }
- }
-}
diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs
index d4f9976..1e2e8da 100644
--- a/crates/proc-macro-srv/src/tests/mod.rs
+++ b/crates/proc-macro-srv/src/tests/mod.rs
@@ -10,25 +10,45 @@
fn test_derive_empty() {
assert_expand(
"DeriveEmpty",
- r#"struct S;"#,
+ r#"struct S { field: &'r#lt fn(u32) -> &'a r#u32 }"#,
expect![[r#"
- SUBTREE $$ 1 1
- IDENT struct 1
- IDENT S 1
- PUNCH ; [alone] 1
-
-
-
- SUBTREE $$ 1 1"#]],
+ IDENT 1 struct
+ IDENT 1 S
+ GROUP {} 1 1 1
+ IDENT 1 field
+ PUNCT 1 : [alone]
+ PUNCT 1 & [joint]
+ PUNCT 1 ' [joint]
+ IDENT 1 r#lt
+ IDENT 1 fn
+ GROUP () 1 1 1
+ IDENT 1 u32
+ PUNCT 1 - [joint]
+ PUNCT 1 > [alone]
+ PUNCT 1 & [joint]
+ PUNCT 1 ' [joint]
+ IDENT 1 a
+ IDENT 1 r#u32
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT struct 42:Root[0000, 0]@0..6#ROOT2024
- IDENT S 42:Root[0000, 0]@7..8#ROOT2024
- PUNCH ; [alone] 42:Root[0000, 0]@8..9#ROOT2024
-
-
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..6#ROOT2024 struct
+ IDENT 42:Root[0000, 0]@7..8#ROOT2024 S
+ GROUP {} 42:Root[0000, 0]@9..10#ROOT2024 42:Root[0000, 0]@46..47#ROOT2024 42:Root[0000, 0]@9..47#ROOT2024
+ IDENT 42:Root[0000, 0]@11..16#ROOT2024 field
+ PUNCT 42:Root[0000, 0]@16..17#ROOT2024 : [alone]
+ PUNCT 42:Root[0000, 0]@18..19#ROOT2024 & [joint]
+ PUNCT 42:Root[0000, 0]@22..23#ROOT2024 ' [joint]
+ IDENT 42:Root[0000, 0]@22..24#ROOT2024 r#lt
+ IDENT 42:Root[0000, 0]@25..27#ROOT2024 fn
+ GROUP () 42:Root[0000, 0]@27..28#ROOT2024 42:Root[0000, 0]@31..32#ROOT2024 42:Root[0000, 0]@27..32#ROOT2024
+ IDENT 42:Root[0000, 0]@28..31#ROOT2024 u32
+ PUNCT 42:Root[0000, 0]@33..34#ROOT2024 - [joint]
+ PUNCT 42:Root[0000, 0]@34..35#ROOT2024 > [alone]
+ PUNCT 42:Root[0000, 0]@36..37#ROOT2024 & [joint]
+ PUNCT 42:Root[0000, 0]@38..39#ROOT2024 ' [joint]
+ IDENT 42:Root[0000, 0]@38..39#ROOT2024 a
+ IDENT 42:Root[0000, 0]@42..45#ROOT2024 r#u32
+ "#]],
);
}
@@ -36,35 +56,37 @@
fn test_derive_error() {
assert_expand(
"DeriveError",
- r#"struct S;"#,
+ r#"struct S { field: u32 }"#,
expect![[r#"
- SUBTREE $$ 1 1
- IDENT struct 1
- IDENT S 1
- PUNCH ; [alone] 1
+ IDENT 1 struct
+ IDENT 1 S
+ GROUP {} 1 1 1
+ IDENT 1 field
+ PUNCT 1 : [alone]
+ IDENT 1 u32
-
- SUBTREE $$ 1 1
- IDENT compile_error 1
- PUNCH ! [alone] 1
- SUBTREE () 1 1
- LITERAL Str #[derive(DeriveError)] struct S ; 1
- PUNCH ; [alone] 1"#]],
+ IDENT 1 compile_error
+ PUNCT 1 ! [joint]
+ GROUP () 1 1 1
+ LITER 1 Str #[derive(DeriveError)] struct S {field 58 u32 }
+ PUNCT 1 ; [alone]
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT struct 42:Root[0000, 0]@0..6#ROOT2024
- IDENT S 42:Root[0000, 0]@7..8#ROOT2024
- PUNCH ; [alone] 42:Root[0000, 0]@8..9#ROOT2024
+ IDENT 42:Root[0000, 0]@0..6#ROOT2024 struct
+ IDENT 42:Root[0000, 0]@7..8#ROOT2024 S
+ GROUP {} 42:Root[0000, 0]@9..10#ROOT2024 42:Root[0000, 0]@22..23#ROOT2024 42:Root[0000, 0]@9..23#ROOT2024
+ IDENT 42:Root[0000, 0]@11..16#ROOT2024 field
+ PUNCT 42:Root[0000, 0]@16..17#ROOT2024 : [alone]
+ IDENT 42:Root[0000, 0]@18..21#ROOT2024 u32
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT compile_error 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH ! [alone] 42:Root[0000, 0]@0..100#ROOT2024
- SUBTREE () 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Str #[derive(DeriveError)] struct S ; 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH ; [alone] 42:Root[0000, 0]@0..100#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..13#ROOT2024 compile_error
+ PUNCT 42:Root[0000, 0]@13..14#ROOT2024 ! [joint]
+ GROUP () 42:Root[0000, 0]@14..15#ROOT2024 42:Root[0000, 0]@64..65#ROOT2024 42:Root[0000, 0]@14..65#ROOT2024
+ LITER 42:Root[0000, 0]@15..64#ROOT2024 Str #[derive(DeriveError)] struct S {field 58 u32 }
+ PUNCT 42:Root[0000, 0]@65..66#ROOT2024 ; [alone]
+ "#]],
);
}
@@ -74,45 +96,41 @@
"fn_like_noop",
r#"ident, 0, 1, []"#,
expect![[r#"
- SUBTREE $$ 1 1
- IDENT ident 1
- PUNCH , [alone] 1
- LITERAL Integer 0 1
- PUNCH , [alone] 1
- LITERAL Integer 1 1
- PUNCH , [alone] 1
- SUBTREE [] 1 1
+ IDENT 1 ident
+ PUNCT 1 , [alone]
+ LITER 1 Integer 0
+ PUNCT 1 , [alone]
+ LITER 1 Integer 1
+ PUNCT 1 , [alone]
+ GROUP [] 1 1 1
-
- SUBTREE $$ 1 1
- IDENT ident 1
- PUNCH , [alone] 1
- LITERAL Integer 0 1
- PUNCH , [alone] 1
- LITERAL Integer 1 1
- PUNCH , [alone] 1
- SUBTREE [] 1 1"#]],
+ IDENT 1 ident
+ PUNCT 1 , [alone]
+ LITER 1 Integer 0
+ PUNCT 1 , [alone]
+ LITER 1 Integer 1
+ PUNCT 1 , [alone]
+ GROUP [] 1 1 1
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT ident 42:Root[0000, 0]@0..5#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
- LITERAL Integer 0 42:Root[0000, 0]@7..8#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@8..9#ROOT2024
- LITERAL Integer 1 42:Root[0000, 0]@10..11#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@11..12#ROOT2024
- SUBTREE [] 42:Root[0000, 0]@13..14#ROOT2024 42:Root[0000, 0]@14..15#ROOT2024
+ IDENT 42:Root[0000, 0]@0..5#ROOT2024 ident
+ PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@7..8#ROOT2024 Integer 0
+ PUNCT 42:Root[0000, 0]@8..9#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@10..11#ROOT2024 Integer 1
+ PUNCT 42:Root[0000, 0]@11..12#ROOT2024 , [alone]
+ GROUP [] 42:Root[0000, 0]@13..14#ROOT2024 42:Root[0000, 0]@14..15#ROOT2024 42:Root[0000, 0]@13..15#ROOT2024
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT ident 42:Root[0000, 0]@0..5#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
- LITERAL Integer 0 42:Root[0000, 0]@7..8#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@8..9#ROOT2024
- LITERAL Integer 1 42:Root[0000, 0]@10..11#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@11..12#ROOT2024
- SUBTREE [] 42:Root[0000, 0]@13..14#ROOT2024 42:Root[0000, 0]@14..15#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..5#ROOT2024 ident
+ PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@7..8#ROOT2024 Integer 0
+ PUNCT 42:Root[0000, 0]@8..9#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@10..11#ROOT2024 Integer 1
+ PUNCT 42:Root[0000, 0]@11..12#ROOT2024 , [alone]
+ GROUP [] 42:Root[0000, 0]@13..14#ROOT2024 42:Root[0000, 0]@14..15#ROOT2024 42:Root[0000, 0]@13..15#ROOT2024
+ "#]],
);
}
@@ -122,29 +140,25 @@
"fn_like_clone_tokens",
r#"ident, []"#,
expect![[r#"
- SUBTREE $$ 1 1
- IDENT ident 1
- PUNCH , [alone] 1
- SUBTREE [] 1 1
+ IDENT 1 ident
+ PUNCT 1 , [alone]
+ GROUP [] 1 1 1
-
- SUBTREE $$ 1 1
- IDENT ident 1
- PUNCH , [alone] 1
- SUBTREE [] 1 1"#]],
+ IDENT 1 ident
+ PUNCT 1 , [alone]
+ GROUP [] 1 1 1
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT ident 42:Root[0000, 0]@0..5#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
- SUBTREE [] 42:Root[0000, 0]@7..8#ROOT2024 42:Root[0000, 0]@8..9#ROOT2024
+ IDENT 42:Root[0000, 0]@0..5#ROOT2024 ident
+ PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
+ GROUP [] 42:Root[0000, 0]@7..8#ROOT2024 42:Root[0000, 0]@8..9#ROOT2024 42:Root[0000, 0]@7..9#ROOT2024
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT ident 42:Root[0000, 0]@0..5#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
- SUBTREE [] 42:Root[0000, 0]@7..9#ROOT2024 42:Root[0000, 0]@7..9#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..5#ROOT2024 ident
+ PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
+ GROUP [] 42:Root[0000, 0]@7..9#ROOT2024 42:Root[0000, 0]@7..9#ROOT2024 42:Root[0000, 0]@7..9#ROOT2024
+ "#]],
);
}
@@ -154,21 +168,17 @@
"fn_like_clone_tokens",
"r#async",
expect![[r#"
- SUBTREE $$ 1 1
- IDENT r#async 1
+ IDENT 1 r#async
-
- SUBTREE $$ 1 1
- IDENT r#async 1"#]],
+ IDENT 1 r#async
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT r#async 42:Root[0000, 0]@0..7#ROOT2024
+ IDENT 42:Root[0000, 0]@2..7#ROOT2024 r#async
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT r#async 42:Root[0000, 0]@0..7#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@2..7#ROOT2024 r#async
+ "#]],
);
}
@@ -178,23 +188,19 @@
"fn_like_span_join",
"foo bar",
expect![[r#"
- SUBTREE $$ 1 1
- IDENT foo 1
- IDENT bar 1
+ IDENT 1 foo
+ IDENT 1 bar
-
- SUBTREE $$ 1 1
- IDENT r#joined 1"#]],
+ IDENT 1 r#joined
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT foo 42:Root[0000, 0]@0..3#ROOT2024
- IDENT bar 42:Root[0000, 0]@8..11#ROOT2024
+ IDENT 42:Root[0000, 0]@0..3#ROOT2024 foo
+ IDENT 42:Root[0000, 0]@8..11#ROOT2024 bar
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT r#joined 42:Root[0000, 0]@0..11#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..11#ROOT2024 r#joined
+ "#]],
);
}
@@ -204,29 +210,25 @@
"fn_like_span_ops",
"set_def_site resolved_at_def_site start_span",
expect![[r#"
- SUBTREE $$ 1 1
- IDENT set_def_site 1
- IDENT resolved_at_def_site 1
- IDENT start_span 1
+ IDENT 1 set_def_site
+ IDENT 1 resolved_at_def_site
+ IDENT 1 start_span
-
- SUBTREE $$ 1 1
- IDENT set_def_site 0
- IDENT resolved_at_def_site 1
- IDENT start_span 1"#]],
+ IDENT 0 set_def_site
+ IDENT 1 resolved_at_def_site
+ IDENT 1 start_span
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT set_def_site 42:Root[0000, 0]@0..12#ROOT2024
- IDENT resolved_at_def_site 42:Root[0000, 0]@13..33#ROOT2024
- IDENT start_span 42:Root[0000, 0]@34..44#ROOT2024
+ IDENT 42:Root[0000, 0]@0..12#ROOT2024 set_def_site
+ IDENT 42:Root[0000, 0]@13..33#ROOT2024 resolved_at_def_site
+ IDENT 42:Root[0000, 0]@34..44#ROOT2024 start_span
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT set_def_site 41:Root[0000, 0]@0..150#ROOT2024
- IDENT resolved_at_def_site 42:Root[0000, 0]@13..33#ROOT2024
- IDENT start_span 42:Root[0000, 0]@34..34#ROOT2024"#]],
+ IDENT 41:Root[0000, 0]@0..150#ROOT2024 set_def_site
+ IDENT 42:Root[0000, 0]@13..33#ROOT2024 resolved_at_def_site
+ IDENT 42:Root[0000, 0]@34..34#ROOT2024 start_span
+ "#]],
);
}
@@ -236,51 +238,39 @@
"fn_like_mk_literals",
r#""#,
expect![[r#"
- SUBTREE $$ 1 1
-
- SUBTREE $$ 1 1
- LITERAL ByteStr byte_string 1
- LITERAL Char c 1
- LITERAL Str string 1
- LITERAL Str -string 1
- LITERAL CStr cstring 1
- LITERAL Float 3.14f64 1
- PUNCH - [alone] 1
- LITERAL Float 3.14f64 1
- LITERAL Float 3.14 1
- PUNCH - [alone] 1
- LITERAL Float 3.14 1
- LITERAL Integer 123i64 1
- PUNCH - [alone] 1
- LITERAL Integer 123i64 1
- LITERAL Integer 123 1
- PUNCH - [alone] 1
- LITERAL Integer 123 1"#]],
+ LITER 1 ByteStr byte_string
+ LITER 1 Char c
+ LITER 1 Str string
+ LITER 1 Str -string
+ LITER 1 CStr cstring
+ LITER 1 Float 3.14f64
+ LITER 1 Float -3.14f64
+ LITER 1 Float 3.14
+ LITER 1 Float -3.14
+ LITER 1 Integer 123i64
+ LITER 1 Integer -123i64
+ LITER 1 Integer 123
+ LITER 1 Integer -123
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL ByteStr byte_string 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Char c 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Str string 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Str -string 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL CStr cstring 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Float 3.14f64 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Float 3.14f64 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Float 3.14 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Float 3.14 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Integer 123i64 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Integer 123i64 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Integer 123 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Integer 123 42:Root[0000, 0]@0..100#ROOT2024"#]],
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 ByteStr byte_string
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Char c
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Str string
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Str -string
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 CStr cstring
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Float 3.14f64
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Float -3.14f64
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Float 3.14
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Float -3.14
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Integer 123i64
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Integer -123i64
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Integer 123
+ LITER 42:Root[0000, 0]@0..100#ROOT2024 Integer -123
+ "#]],
);
}
@@ -290,21 +280,17 @@
"fn_like_mk_idents",
r#""#,
expect![[r#"
- SUBTREE $$ 1 1
-
- SUBTREE $$ 1 1
- IDENT standard 1
- IDENT r#raw 1"#]],
+ IDENT 1 standard
+ IDENT 1 r#raw
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT standard 42:Root[0000, 0]@0..100#ROOT2024
- IDENT r#raw 42:Root[0000, 0]@0..100#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..100#ROOT2024 standard
+ IDENT 42:Root[0000, 0]@0..100#ROOT2024 r#raw
+ "#]],
);
}
@@ -314,97 +300,93 @@
"fn_like_clone_tokens",
r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##, 'a', b'b', c"null""###,
expect![[r#"
- SUBTREE $$ 1 1
- LITERAL Integer 1u16 1
- PUNCH , [alone] 1
- LITERAL Integer 2_u32 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Integer 4i64 1
- PUNCH , [alone] 1
- LITERAL Float 3.14f32 1
- PUNCH , [alone] 1
- LITERAL Str hello bridge 1
- PUNCH , [alone] 1
- LITERAL Err(()) "suffixed"suffix 1
- PUNCH , [alone] 1
- LITERAL StrRaw(2) raw 1
- PUNCH , [alone] 1
- LITERAL Char a 1
- PUNCH , [alone] 1
- LITERAL Byte b 1
- PUNCH , [alone] 1
- LITERAL CStr null 1
+ LITER 1 Integer 1u16
+ PUNCT 1 , [alone]
+ LITER 1 Integer 2_u32
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Integer 4i64
+ PUNCT 1 , [alone]
+ LITER 1 Float 3.14f32
+ PUNCT 1 , [alone]
+ LITER 1 Str hello bridge
+ PUNCT 1 , [alone]
+ LITER 1 Str suffixedsuffix
+ PUNCT 1 , [alone]
+ LITER 1 StrRaw(2) raw
+ PUNCT 1 , [alone]
+ LITER 1 Char a
+ PUNCT 1 , [alone]
+ LITER 1 Byte b
+ PUNCT 1 , [alone]
+ LITER 1 CStr null
-
- SUBTREE $$ 1 1
- LITERAL Integer 1u16 1
- PUNCH , [alone] 1
- LITERAL Integer 2_u32 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Integer 4i64 1
- PUNCH , [alone] 1
- LITERAL Float 3.14f32 1
- PUNCH , [alone] 1
- LITERAL Str hello bridge 1
- PUNCH , [alone] 1
- LITERAL Str suffixedsuffix 1
- PUNCH , [alone] 1
- LITERAL StrRaw(2) raw 1
- PUNCH , [alone] 1
- LITERAL Char a 1
- PUNCH , [alone] 1
- LITERAL Byte b 1
- PUNCH , [alone] 1
- LITERAL CStr null 1"#]],
+ LITER 1 Integer 1u16
+ PUNCT 1 , [alone]
+ LITER 1 Integer 2_u32
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Integer 4i64
+ PUNCT 1 , [alone]
+ LITER 1 Float 3.14f32
+ PUNCT 1 , [alone]
+ LITER 1 Str hello bridge
+ PUNCT 1 , [alone]
+ LITER 1 Str suffixedsuffix
+ PUNCT 1 , [alone]
+ LITER 1 StrRaw(2) raw
+ PUNCT 1 , [alone]
+ LITER 1 Char a
+ PUNCT 1 , [alone]
+ LITER 1 Byte b
+ PUNCT 1 , [alone]
+ LITER 1 CStr null
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Integer 1u16 42:Root[0000, 0]@0..4#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@4..5#ROOT2024
- LITERAL Integer 2_u32 42:Root[0000, 0]@6..11#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@11..12#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@13..14#ROOT2024
- LITERAL Integer 4i64 42:Root[0000, 0]@14..18#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@18..19#ROOT2024
- LITERAL Float 3.14f32 42:Root[0000, 0]@20..27#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@27..28#ROOT2024
- LITERAL Str hello bridge 42:Root[0000, 0]@29..43#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@43..44#ROOT2024
- LITERAL Err(()) "suffixed"suffix 42:Root[0000, 0]@45..61#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@61..62#ROOT2024
- LITERAL StrRaw(2) raw 42:Root[0000, 0]@63..73#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@73..74#ROOT2024
- LITERAL Char a 42:Root[0000, 0]@75..78#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@78..79#ROOT2024
- LITERAL Byte b 42:Root[0000, 0]@80..84#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@84..85#ROOT2024
- LITERAL CStr null 42:Root[0000, 0]@86..93#ROOT2024
+ LITER 42:Root[0000, 0]@0..4#ROOT2024 Integer 1u16
+ PUNCT 42:Root[0000, 0]@4..5#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@6..11#ROOT2024 Integer 2_u32
+ PUNCT 42:Root[0000, 0]@11..12#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@13..14#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@14..18#ROOT2024 Integer 4i64
+ PUNCT 42:Root[0000, 0]@18..19#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@20..27#ROOT2024 Float 3.14f32
+ PUNCT 42:Root[0000, 0]@27..28#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@29..43#ROOT2024 Str hello bridge
+ PUNCT 42:Root[0000, 0]@43..44#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@45..61#ROOT2024 Str suffixedsuffix
+ PUNCT 42:Root[0000, 0]@61..62#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@63..73#ROOT2024 StrRaw(2) raw
+ PUNCT 42:Root[0000, 0]@73..74#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@75..78#ROOT2024 Char a
+ PUNCT 42:Root[0000, 0]@78..79#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@80..84#ROOT2024 Byte b
+ PUNCT 42:Root[0000, 0]@84..85#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@86..93#ROOT2024 CStr null
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Integer 1u16 42:Root[0000, 0]@0..4#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@4..5#ROOT2024
- LITERAL Integer 2_u32 42:Root[0000, 0]@6..11#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@11..12#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@13..14#ROOT2024
- LITERAL Integer 4i64 42:Root[0000, 0]@14..18#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@18..19#ROOT2024
- LITERAL Float 3.14f32 42:Root[0000, 0]@20..27#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@27..28#ROOT2024
- LITERAL Str hello bridge 42:Root[0000, 0]@29..43#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@43..44#ROOT2024
- LITERAL Str suffixedsuffix 42:Root[0000, 0]@45..61#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@61..62#ROOT2024
- LITERAL StrRaw(2) raw 42:Root[0000, 0]@63..73#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@73..74#ROOT2024
- LITERAL Char a 42:Root[0000, 0]@75..78#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@78..79#ROOT2024
- LITERAL Byte b 42:Root[0000, 0]@80..84#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@84..85#ROOT2024
- LITERAL CStr null 42:Root[0000, 0]@86..93#ROOT2024"#]],
+ LITER 42:Root[0000, 0]@0..4#ROOT2024 Integer 1u16
+ PUNCT 42:Root[0000, 0]@4..5#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@6..11#ROOT2024 Integer 2_u32
+ PUNCT 42:Root[0000, 0]@11..12#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@13..14#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@14..18#ROOT2024 Integer 4i64
+ PUNCT 42:Root[0000, 0]@18..19#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@20..27#ROOT2024 Float 3.14f32
+ PUNCT 42:Root[0000, 0]@27..28#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@29..43#ROOT2024 Str hello bridge
+ PUNCT 42:Root[0000, 0]@43..44#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@45..61#ROOT2024 Str suffixedsuffix
+ PUNCT 42:Root[0000, 0]@61..62#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@63..73#ROOT2024 StrRaw(2) raw
+ PUNCT 42:Root[0000, 0]@73..74#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@75..78#ROOT2024 Char a
+ PUNCT 42:Root[0000, 0]@78..79#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@80..84#ROOT2024 Byte b
+ PUNCT 42:Root[0000, 0]@84..85#ROOT2024 , [alone]
+ LITER 42:Root[0000, 0]@86..93#ROOT2024 CStr null
+ "#]],
);
}
@@ -414,61 +396,57 @@
"fn_like_clone_tokens",
r###"-1u16, - 2_u32, -3.14f32, - 2.7"###,
expect![[r#"
- SUBTREE $$ 1 1
- PUNCH - [alone] 1
- LITERAL Integer 1u16 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Integer 2_u32 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Float 3.14f32 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Float 2.7 1
+ PUNCT 1 - [alone]
+ LITER 1 Integer 1u16
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Integer 2_u32
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Float 3.14f32
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Float 2.7
-
- SUBTREE $$ 1 1
- PUNCH - [alone] 1
- LITERAL Integer 1u16 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Integer 2_u32 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Float 3.14f32 1
- PUNCH , [alone] 1
- PUNCH - [alone] 1
- LITERAL Float 2.7 1"#]],
+ PUNCT 1 - [alone]
+ LITER 1 Integer 1u16
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Integer 2_u32
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Float 3.14f32
+ PUNCT 1 , [alone]
+ PUNCT 1 - [alone]
+ LITER 1 Float 2.7
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@0..1#ROOT2024
- LITERAL Integer 1u16 42:Root[0000, 0]@1..5#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@7..8#ROOT2024
- LITERAL Integer 2_u32 42:Root[0000, 0]@9..14#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@14..15#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@16..17#ROOT2024
- LITERAL Float 3.14f32 42:Root[0000, 0]@17..24#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@24..25#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@26..27#ROOT2024
- LITERAL Float 2.7 42:Root[0000, 0]@28..31#ROOT2024
+ PUNCT 42:Root[0000, 0]@0..1#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@1..5#ROOT2024 Integer 1u16
+ PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@7..8#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@9..14#ROOT2024 Integer 2_u32
+ PUNCT 42:Root[0000, 0]@14..15#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@16..17#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@17..24#ROOT2024 Float 3.14f32
+ PUNCT 42:Root[0000, 0]@24..25#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@26..27#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@28..31#ROOT2024 Float 2.7
-
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@0..1#ROOT2024
- LITERAL Integer 1u16 42:Root[0000, 0]@1..5#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@5..6#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@7..8#ROOT2024
- LITERAL Integer 2_u32 42:Root[0000, 0]@9..14#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@14..15#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@16..17#ROOT2024
- LITERAL Float 3.14f32 42:Root[0000, 0]@17..24#ROOT2024
- PUNCH , [alone] 42:Root[0000, 0]@24..25#ROOT2024
- PUNCH - [alone] 42:Root[0000, 0]@26..27#ROOT2024
- LITERAL Float 2.7 42:Root[0000, 0]@28..31#ROOT2024"#]],
+ PUNCT 42:Root[0000, 0]@0..1#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@1..5#ROOT2024 Integer 1u16
+ PUNCT 42:Root[0000, 0]@5..6#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@7..8#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@9..14#ROOT2024 Integer 2_u32
+ PUNCT 42:Root[0000, 0]@14..15#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@16..17#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@17..24#ROOT2024 Float 3.14f32
+ PUNCT 42:Root[0000, 0]@24..25#ROOT2024 , [alone]
+ PUNCT 42:Root[0000, 0]@26..27#ROOT2024 - [alone]
+ LITER 42:Root[0000, 0]@28..31#ROOT2024 Float 2.7
+ "#]],
);
}
@@ -482,40 +460,64 @@
r#"mod m {}"#,
r#"some arguments"#,
expect![[r#"
- SUBTREE $$ 1 1
- IDENT mod 1
- IDENT m 1
- SUBTREE {} 1 1
+ IDENT 1 mod
+ IDENT 1 m
+ GROUP {} 1 1 1
- SUBTREE $$ 1 1
- IDENT some 1
- IDENT arguments 1
- SUBTREE $$ 1 1
- IDENT compile_error 1
- PUNCH ! [alone] 1
- SUBTREE () 1 1
- LITERAL Str #[attr_error(some arguments)] mod m {} 1
- PUNCH ; [alone] 1"#]],
+ IDENT 1 some
+ IDENT 1 arguments
+
+
+ IDENT 1 compile_error
+ PUNCT 1 ! [joint]
+ GROUP () 1 1 1
+ LITER 1 Str #[attr_error(some arguments )] mod m {}
+ PUNCT 1 ; [alone]
+ "#]],
expect![[r#"
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT mod 42:Root[0000, 0]@0..3#ROOT2024
- IDENT m 42:Root[0000, 0]@4..5#ROOT2024
- SUBTREE {} 42:Root[0000, 0]@6..7#ROOT2024 42:Root[0000, 0]@7..8#ROOT2024
+ IDENT 42:Root[0000, 0]@0..3#ROOT2024 mod
+ IDENT 42:Root[0000, 0]@4..5#ROOT2024 m
+ GROUP {} 42:Root[0000, 0]@6..7#ROOT2024 42:Root[0000, 0]@7..8#ROOT2024 42:Root[0000, 0]@6..8#ROOT2024
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT some 42:Root[0000, 0]@0..4#ROOT2024
- IDENT arguments 42:Root[0000, 0]@5..14#ROOT2024
- SUBTREE $$ 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- IDENT compile_error 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH ! [alone] 42:Root[0000, 0]@0..100#ROOT2024
- SUBTREE () 42:Root[0000, 0]@0..100#ROOT2024 42:Root[0000, 0]@0..100#ROOT2024
- LITERAL Str #[attr_error(some arguments)] mod m {} 42:Root[0000, 0]@0..100#ROOT2024
- PUNCH ; [alone] 42:Root[0000, 0]@0..100#ROOT2024"#]],
+ IDENT 42:Root[0000, 0]@0..4#ROOT2024 some
+ IDENT 42:Root[0000, 0]@5..14#ROOT2024 arguments
+
+
+ IDENT 42:Root[0000, 0]@0..13#ROOT2024 compile_error
+ PUNCT 42:Root[0000, 0]@13..14#ROOT2024 ! [joint]
+ GROUP () 42:Root[0000, 0]@14..15#ROOT2024 42:Root[0000, 0]@56..57#ROOT2024 42:Root[0000, 0]@14..57#ROOT2024
+ LITER 42:Root[0000, 0]@15..56#ROOT2024 Str #[attr_error(some arguments )] mod m {}
+ PUNCT 42:Root[0000, 0]@57..58#ROOT2024 ; [alone]
+ "#]],
);
}
+#[test]
+#[should_panic = "called `Result::unwrap()` on an `Err` value: \"Mismatched token groups\""]
+fn test_broken_input_unclosed_delim() {
+ assert_expand("fn_like_clone_tokens", r###"{"###, expect![[]], expect![[]]);
+}
+
+#[test]
+#[should_panic = "called `Result::unwrap()` on an `Err` value: \"Unexpected '}'\""]
+fn test_broken_input_unopened_delim() {
+ assert_expand("fn_like_clone_tokens", r###"}"###, expect![[]], expect![[]]);
+}
+
+#[test]
+#[should_panic = "called `Result::unwrap()` on an `Err` value: \"Expected '}'\""]
+fn test_broken_input_mismatched_delim() {
+ assert_expand("fn_like_clone_tokens", r###"(}"###, expect![[]], expect![[]]);
+}
+
+#[test]
+#[should_panic = "called `Result::unwrap()` on an `Err` value: \"Invalid identifier: `🪟`\""]
+fn test_broken_input_unknowm_token() {
+ assert_expand("fn_like_clone_tokens", r###"🪟"###, expect![[]], expect![[]]);
+}
+
/// Tests that we find and classify all proc macros correctly.
#[test]
fn list_test_macros() {
diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs
index f5a76e3..1b12308 100644
--- a/crates/proc-macro-srv/src/tests/utils.rs
+++ b/crates/proc-macro-srv/src/tests/utils.rs
@@ -1,31 +1,25 @@
//! utils used in proc-macro tests
use expect_test::Expect;
-use span::{EditionedFileId, FileId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext};
-use tt::TextRange;
+use span::{
+ EditionedFileId, FileId, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext, TextRange,
+};
-use crate::{EnvSnapshot, ProcMacroSrv, SpanId, dylib, proc_macro_test_dylib_path};
+use crate::{
+ EnvSnapshot, ProcMacroSrv, SpanId, dylib, proc_macro_test_dylib_path, token_stream::TokenStream,
+};
-fn parse_string(call_site: SpanId, src: &str) -> crate::server_impl::TokenStream<SpanId> {
- crate::server_impl::TokenStream::with_subtree(crate::server_impl::TopSubtree(
- syntax_bridge::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src)
- .unwrap()
- .0
- .into_vec(),
- ))
+fn parse_string(call_site: SpanId, src: &str) -> TokenStream<SpanId> {
+ TokenStream::from_str(src, call_site).unwrap()
}
fn parse_string_spanned(
anchor: SpanAnchor,
call_site: SyntaxContext,
src: &str,
-) -> crate::server_impl::TokenStream<Span> {
- crate::server_impl::TokenStream::with_subtree(crate::server_impl::TopSubtree(
- syntax_bridge::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src)
- .unwrap()
- .0
- .into_vec(),
- ))
+) -> TokenStream<Span> {
+ TokenStream::from_str(src, Span { range: TextRange::default(), anchor, ctx: call_site })
+ .unwrap()
}
pub fn assert_expand(
@@ -60,16 +54,18 @@
let def_site = SpanId(0);
let call_site = SpanId(1);
let mixed_site = SpanId(2);
- let input_ts = parse_string(call_site, input).into_subtree(call_site);
- let attr_ts = attr.map(|attr| parse_string(call_site, attr).into_subtree(call_site));
+ let input_ts = parse_string(call_site, input);
+ let attr_ts = attr.map(|attr| parse_string(call_site, attr));
let input_ts_string = format!("{input_ts:?}");
let attr_ts_string = attr_ts.as_ref().map(|it| format!("{it:?}"));
let res =
expander.expand(macro_name, input_ts, attr_ts, def_site, call_site, mixed_site).unwrap();
expect.assert_eq(&format!(
- "{input_ts_string}\n\n{}\n\n{res:?}",
- attr_ts_string.unwrap_or_default()
+ "{input_ts_string}{}{}{}",
+ if attr_ts_string.is_some() { "\n\n" } else { "" },
+ attr_ts_string.unwrap_or_default(),
+ if res.is_empty() { String::new() } else { format!("\n\n{res:?}") }
));
let def_site = Span {
@@ -90,17 +86,18 @@
};
let mixed_site = call_site;
- let fixture =
- parse_string_spanned(call_site.anchor, call_site.ctx, input).into_subtree(call_site);
- let attr = attr.map(|attr| {
- parse_string_spanned(call_site.anchor, call_site.ctx, attr).into_subtree(call_site)
- });
+ let fixture = parse_string_spanned(call_site.anchor, call_site.ctx, input);
+ let attr = attr.map(|attr| parse_string_spanned(call_site.anchor, call_site.ctx, attr));
let fixture_string = format!("{fixture:?}");
let attr_string = attr.as_ref().map(|it| format!("{it:?}"));
let res = expander.expand(macro_name, fixture, attr, def_site, call_site, mixed_site).unwrap();
- expect_spanned
- .assert_eq(&format!("{fixture_string}\n\n{}\n\n{res:#?}", attr_string.unwrap_or_default()));
+ expect_spanned.assert_eq(&format!(
+ "{fixture_string}{}{}{}",
+ if attr_string.is_some() { "\n\n" } else { "" },
+ attr_string.unwrap_or_default(),
+ if res.is_empty() { String::new() } else { format!("\n\n{res:?}") }
+ ));
}
pub(crate) fn list() -> Vec<String> {
diff --git a/crates/proc-macro-srv/src/token_stream.rs b/crates/proc-macro-srv/src/token_stream.rs
new file mode 100644
index 0000000..628d694
--- /dev/null
+++ b/crates/proc-macro-srv/src/token_stream.rs
@@ -0,0 +1,745 @@
+//! The proc-macro server token stream implementation.
+
+use core::fmt;
+use std::sync::Arc;
+
+use intern::Symbol;
+use proc_macro::Delimiter;
+use rustc_lexer::{DocStyle, LiteralKind};
+
+use crate::bridge::{DelimSpan, Group, Ident, LitKind, Literal, Punct, TokenTree};
+
+/// Trait for allowing tests to parse tokenstreams with dynamic span ranges
+pub(crate) trait SpanLike {
+ fn derive_ranged(&self, range: std::ops::Range<usize>) -> Self;
+}
+
+#[derive(Clone)]
+pub struct TokenStream<S>(pub(crate) Arc<Vec<TokenTree<S>>>);
+
+impl<S> Default for TokenStream<S> {
+ fn default() -> Self {
+ Self(Default::default())
+ }
+}
+
+impl<S> TokenStream<S> {
+ pub fn new(tts: Vec<TokenTree<S>>) -> TokenStream<S> {
+ TokenStream(Arc::new(tts))
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.0.is_empty()
+ }
+
+ pub fn len(&self) -> usize {
+ self.0.len()
+ }
+
+ pub fn iter(&self) -> TokenStreamIter<'_, S> {
+ TokenStreamIter::new(self)
+ }
+
+ pub(crate) fn from_str(s: &str, span: S) -> Result<Self, String>
+ where
+ S: SpanLike + Copy,
+ {
+ let mut groups = Vec::new();
+ groups.push((proc_macro::Delimiter::None, 0..0, vec![]));
+ let mut offset = 0;
+ let mut tokens = rustc_lexer::tokenize(s, rustc_lexer::FrontmatterAllowed::No).peekable();
+ while let Some(token) = tokens.next() {
+ let range = offset..offset + token.len as usize;
+ offset += token.len as usize;
+
+ let mut is_joint = || {
+ tokens.peek().is_some_and(|token| {
+ matches!(
+ token.kind,
+ rustc_lexer::TokenKind::RawLifetime
+ | rustc_lexer::TokenKind::GuardedStrPrefix
+ | rustc_lexer::TokenKind::Lifetime { .. }
+ | rustc_lexer::TokenKind::Semi
+ | rustc_lexer::TokenKind::Comma
+ | rustc_lexer::TokenKind::Dot
+ | rustc_lexer::TokenKind::OpenParen
+ | rustc_lexer::TokenKind::CloseParen
+ | rustc_lexer::TokenKind::OpenBrace
+ | rustc_lexer::TokenKind::CloseBrace
+ | rustc_lexer::TokenKind::OpenBracket
+ | rustc_lexer::TokenKind::CloseBracket
+ | rustc_lexer::TokenKind::At
+ | rustc_lexer::TokenKind::Pound
+ | rustc_lexer::TokenKind::Tilde
+ | rustc_lexer::TokenKind::Question
+ | rustc_lexer::TokenKind::Colon
+ | rustc_lexer::TokenKind::Dollar
+ | rustc_lexer::TokenKind::Eq
+ | rustc_lexer::TokenKind::Bang
+ | rustc_lexer::TokenKind::Lt
+ | rustc_lexer::TokenKind::Gt
+ | rustc_lexer::TokenKind::Minus
+ | rustc_lexer::TokenKind::And
+ | rustc_lexer::TokenKind::Or
+ | rustc_lexer::TokenKind::Plus
+ | rustc_lexer::TokenKind::Star
+ | rustc_lexer::TokenKind::Slash
+ | rustc_lexer::TokenKind::Percent
+ | rustc_lexer::TokenKind::Caret
+ )
+ })
+ };
+
+ let Some((open_delim, _, tokenstream)) = groups.last_mut() else {
+ return Err("Unbalanced delimiters".to_owned());
+ };
+ match token.kind {
+ rustc_lexer::TokenKind::OpenParen => {
+ groups.push((proc_macro::Delimiter::Parenthesis, range, vec![]))
+ }
+ rustc_lexer::TokenKind::CloseParen if *open_delim != Delimiter::Parenthesis => {
+ return if *open_delim == Delimiter::None {
+ Err("Unexpected ')'".to_owned())
+ } else {
+ Err("Expected ')'".to_owned())
+ };
+ }
+ rustc_lexer::TokenKind::CloseParen => {
+ let (delimiter, open_range, stream) = groups.pop().unwrap();
+ groups.last_mut().ok_or_else(|| "Unbalanced delimiters".to_owned())?.2.push(
+ TokenTree::Group(Group {
+ delimiter,
+ stream: if stream.is_empty() {
+ None
+ } else {
+ Some(TokenStream::new(stream))
+ },
+ span: DelimSpan {
+ entire: span.derive_ranged(open_range.start..range.end),
+ open: span.derive_ranged(open_range),
+ close: span.derive_ranged(range),
+ },
+ }),
+ );
+ }
+ rustc_lexer::TokenKind::OpenBrace => {
+ groups.push((proc_macro::Delimiter::Brace, range, vec![]))
+ }
+ rustc_lexer::TokenKind::CloseBrace if *open_delim != Delimiter::Brace => {
+ return if *open_delim == Delimiter::None {
+ Err("Unexpected '}'".to_owned())
+ } else {
+ Err("Expected '}'".to_owned())
+ };
+ }
+ rustc_lexer::TokenKind::CloseBrace => {
+ let (delimiter, open_range, stream) = groups.pop().unwrap();
+ groups.last_mut().ok_or_else(|| "Unbalanced delimiters".to_owned())?.2.push(
+ TokenTree::Group(Group {
+ delimiter,
+ stream: if stream.is_empty() {
+ None
+ } else {
+ Some(TokenStream::new(stream))
+ },
+ span: DelimSpan {
+ entire: span.derive_ranged(open_range.start..range.end),
+ open: span.derive_ranged(open_range),
+ close: span.derive_ranged(range),
+ },
+ }),
+ );
+ }
+ rustc_lexer::TokenKind::OpenBracket => {
+ groups.push((proc_macro::Delimiter::Bracket, range, vec![]))
+ }
+ rustc_lexer::TokenKind::CloseBracket if *open_delim != Delimiter::Bracket => {
+ return if *open_delim == Delimiter::None {
+ Err("Unexpected ']'".to_owned())
+ } else {
+ Err("Expected ']'".to_owned())
+ };
+ }
+ rustc_lexer::TokenKind::CloseBracket => {
+ let (delimiter, open_range, stream) = groups.pop().unwrap();
+ groups.last_mut().ok_or_else(|| "Unbalanced delimiters".to_owned())?.2.push(
+ TokenTree::Group(Group {
+ delimiter,
+ stream: if stream.is_empty() {
+ None
+ } else {
+ Some(TokenStream::new(stream))
+ },
+ span: DelimSpan {
+ entire: span.derive_ranged(open_range.start..range.end),
+ open: span.derive_ranged(open_range),
+ close: span.derive_ranged(range),
+ },
+ }),
+ );
+ }
+ rustc_lexer::TokenKind::LineComment { doc_style: None }
+ | rustc_lexer::TokenKind::BlockComment { doc_style: None, terminated: _ } => {
+ continue;
+ }
+ rustc_lexer::TokenKind::LineComment { doc_style: Some(doc_style) } => {
+ let text = &s[range.start + 2..range.end];
+ tokenstream.push(TokenTree::Punct(Punct { ch: b'#', joint: false, span }));
+ if doc_style == DocStyle::Inner {
+ tokenstream.push(TokenTree::Punct(Punct { ch: b'!', joint: false, span }));
+ }
+ tokenstream.push(TokenTree::Group(Group {
+ delimiter: Delimiter::Bracket,
+ stream: Some(TokenStream::new(vec![
+ TokenTree::Ident(Ident {
+ sym: Symbol::intern("doc"),
+ is_raw: false,
+ span,
+ }),
+ TokenTree::Punct(Punct { ch: b'=', joint: false, span }),
+ TokenTree::Literal(Literal {
+ kind: LitKind::Str,
+ symbol: Symbol::intern(&text.escape_debug().to_string()),
+ suffix: None,
+ span: span.derive_ranged(range),
+ }),
+ ])),
+ span: DelimSpan { open: span, close: span, entire: span },
+ }));
+ }
+ rustc_lexer::TokenKind::BlockComment { doc_style: Some(doc_style), terminated } => {
+ let text =
+ &s[range.start + 2..if terminated { range.end - 2 } else { range.end }];
+ tokenstream.push(TokenTree::Punct(Punct { ch: b'#', joint: false, span }));
+ if doc_style == DocStyle::Inner {
+ tokenstream.push(TokenTree::Punct(Punct { ch: b'!', joint: false, span }));
+ }
+ tokenstream.push(TokenTree::Group(Group {
+ delimiter: Delimiter::Bracket,
+ stream: Some(TokenStream::new(vec![
+ TokenTree::Ident(Ident {
+ sym: Symbol::intern("doc"),
+ is_raw: false,
+ span,
+ }),
+ TokenTree::Punct(Punct { ch: b'=', joint: false, span }),
+ TokenTree::Literal(Literal {
+ kind: LitKind::Str,
+ symbol: Symbol::intern(&text.escape_debug().to_string()),
+ suffix: None,
+ span: span.derive_ranged(range),
+ }),
+ ])),
+ span: DelimSpan { open: span, close: span, entire: span },
+ }));
+ }
+ rustc_lexer::TokenKind::Whitespace => continue,
+ rustc_lexer::TokenKind::Frontmatter { .. } => unreachable!(),
+ rustc_lexer::TokenKind::Unknown => {
+ return Err(format!("Unknown token: `{}`", &s[range]));
+ }
+ rustc_lexer::TokenKind::UnknownPrefix => {
+ return Err(format!("Unknown prefix: `{}`", &s[range]));
+ }
+ rustc_lexer::TokenKind::UnknownPrefixLifetime => {
+ return Err(format!("Unknown lifetime prefix: `{}`", &s[range]));
+ }
+ // FIXME: Error on edition >= 2024 ... I dont think the proc-macro server can fetch editions currently
+ // and whose edition is this?
+ rustc_lexer::TokenKind::GuardedStrPrefix => {
+ tokenstream.push(TokenTree::Punct(Punct {
+ ch: s.as_bytes()[range.start],
+ joint: true,
+ span: span.derive_ranged(range.start..range.start + 1),
+ }));
+ tokenstream.push(TokenTree::Punct(Punct {
+ ch: s.as_bytes()[range.start + 1],
+ joint: is_joint(),
+ span: span.derive_ranged(range.start + 1..range.end),
+ }))
+ }
+ rustc_lexer::TokenKind::Ident => tokenstream.push(TokenTree::Ident(Ident {
+ sym: Symbol::intern(&s[range.clone()]),
+ is_raw: false,
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::InvalidIdent => {
+ return Err(format!("Invalid identifier: `{}`", &s[range]));
+ }
+ rustc_lexer::TokenKind::RawIdent => {
+ let range = range.start + 2..range.end;
+ tokenstream.push(TokenTree::Ident(Ident {
+ sym: Symbol::intern(&s[range.clone()]),
+ is_raw: true,
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Literal { kind, suffix_start } => {
+ tokenstream.push(TokenTree::Literal(literal_from_lexer(
+ &s[range.clone()],
+ span.derive_ranged(range),
+ kind,
+ suffix_start,
+ )))
+ }
+ rustc_lexer::TokenKind::RawLifetime => {
+ let range = range.start + 1 + 2..range.end;
+ tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'\'',
+ joint: true,
+ span: span.derive_ranged(range.start..range.start + 1),
+ }));
+ tokenstream.push(TokenTree::Ident(Ident {
+ sym: Symbol::intern(&s[range.clone()]),
+ is_raw: true,
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Lifetime { starts_with_number } => {
+ if starts_with_number {
+ return Err("Lifetime cannot start with a number".to_owned());
+ }
+ let range = range.start + 1..range.end;
+ tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'\'',
+ joint: true,
+ span: span.derive_ranged(range.start..range.start + 1),
+ }));
+ tokenstream.push(TokenTree::Ident(Ident {
+ sym: Symbol::intern(&s[range.clone()]),
+ is_raw: false,
+ span: span.derive_ranged(range),
+ }))
+ }
+ rustc_lexer::TokenKind::Semi => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b';',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Comma => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b',',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Dot => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'.',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::At => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'@',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Pound => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'#',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Tilde => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'~',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Question => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'?',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Colon => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b':',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Dollar => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'$',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Eq => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'=',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Bang => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'!',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Lt => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'<',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Gt => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'>',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Minus => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'-',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::And => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'&',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Or => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'|',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Plus => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'+',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Star => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'*',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Slash => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'/',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Caret => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'^',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Percent => tokenstream.push(TokenTree::Punct(Punct {
+ ch: b'%',
+ joint: is_joint(),
+ span: span.derive_ranged(range),
+ })),
+ rustc_lexer::TokenKind::Eof => break,
+ }
+ }
+ if let Some((Delimiter::None, _, tokentrees)) = groups.pop()
+ && groups.is_empty()
+ {
+ Ok(TokenStream::new(tokentrees))
+ } else {
+ Err("Mismatched token groups".to_owned())
+ }
+ }
+}
+
+impl<S> fmt::Display for TokenStream<S> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ for tt in self.0.iter() {
+ display_token_tree(tt, f)?;
+ }
+ Ok(())
+ }
+}
+
+fn display_token_tree<S>(tt: &TokenTree<S>, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match tt {
+ TokenTree::Group(Group { delimiter, stream, span: _ }) => {
+ write!(
+ f,
+ "{}",
+ match delimiter {
+ proc_macro::Delimiter::Parenthesis => "(",
+ proc_macro::Delimiter::Brace => "{",
+ proc_macro::Delimiter::Bracket => "[",
+ proc_macro::Delimiter::None => "",
+ }
+ )?;
+ if let Some(stream) = stream {
+ write!(f, "{stream}")?;
+ }
+ write!(
+ f,
+ "{}",
+ match delimiter {
+ proc_macro::Delimiter::Parenthesis => ")",
+ proc_macro::Delimiter::Brace => "}",
+ proc_macro::Delimiter::Bracket => "]",
+ proc_macro::Delimiter::None => "",
+ }
+ )?;
+ }
+ TokenTree::Punct(Punct { ch, joint, span: _ }) => {
+ write!(f, "{ch}{}", if *joint { "" } else { " " })?
+ }
+ TokenTree::Ident(Ident { sym, is_raw, span: _ }) => {
+ if *is_raw {
+ write!(f, "r#")?;
+ }
+ write!(f, "{sym} ")?;
+ }
+ TokenTree::Literal(lit) => {
+ display_fmt_literal(lit, f)?;
+ let joint = match lit.kind {
+ LitKind::Str
+ | LitKind::StrRaw(_)
+ | LitKind::ByteStr
+ | LitKind::ByteStrRaw(_)
+ | LitKind::CStr
+ | LitKind::CStrRaw(_) => true,
+ _ => false,
+ };
+ if !joint {
+ write!(f, " ")?;
+ }
+ }
+ }
+ Ok(())
+}
+
+pub fn literal_to_string<S>(literal: &Literal<S>) -> String {
+ let mut buf = String::new();
+ display_fmt_literal(literal, &mut buf).unwrap();
+ buf
+}
+
+fn display_fmt_literal<S>(literal: &Literal<S>, f: &mut impl std::fmt::Write) -> fmt::Result {
+ match literal.kind {
+ LitKind::Byte => write!(f, "b'{}'", literal.symbol),
+ LitKind::Char => write!(f, "'{}'", literal.symbol),
+ LitKind::Integer | LitKind::Float | LitKind::ErrWithGuar => {
+ write!(f, "{}", literal.symbol)
+ }
+ LitKind::Str => write!(f, "\"{}\"", literal.symbol),
+ LitKind::ByteStr => write!(f, "b\"{}\"", literal.symbol),
+ LitKind::CStr => write!(f, "c\"{}\"", literal.symbol),
+ LitKind::StrRaw(num_of_hashes) => {
+ let num_of_hashes = num_of_hashes as usize;
+ write!(
+ f,
+ r#"r{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
+ "",
+ text = literal.symbol
+ )
+ }
+ LitKind::ByteStrRaw(num_of_hashes) => {
+ let num_of_hashes = num_of_hashes as usize;
+ write!(
+ f,
+ r#"br{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
+ "",
+ text = literal.symbol
+ )
+ }
+ LitKind::CStrRaw(num_of_hashes) => {
+ let num_of_hashes = num_of_hashes as usize;
+ write!(
+ f,
+ r#"cr{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
+ "",
+ text = literal.symbol
+ )
+ }
+ }?;
+ if let Some(suffix) = &literal.suffix {
+ write!(f, "{suffix}")?;
+ }
+ Ok(())
+}
+
+impl<S: fmt::Debug> fmt::Debug for TokenStream<S> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ debug_token_stream(self, 0, f)
+ }
+}
+
+fn debug_token_stream<S: fmt::Debug>(
+ ts: &TokenStream<S>,
+ depth: usize,
+ f: &mut std::fmt::Formatter<'_>,
+) -> std::fmt::Result {
+ for tt in ts.0.iter() {
+ debug_token_tree(tt, depth, f)?;
+ }
+ Ok(())
+}
+
+fn debug_token_tree<S: fmt::Debug>(
+ tt: &TokenTree<S>,
+ depth: usize,
+ f: &mut std::fmt::Formatter<'_>,
+) -> std::fmt::Result {
+ write!(f, "{:indent$}", "", indent = depth * 2)?;
+ match tt {
+ TokenTree::Group(Group { delimiter, stream, span }) => {
+ writeln!(
+ f,
+ "GROUP {}{} {:#?} {:#?} {:#?}",
+ match delimiter {
+ proc_macro::Delimiter::Parenthesis => "(",
+ proc_macro::Delimiter::Brace => "{",
+ proc_macro::Delimiter::Bracket => "[",
+ proc_macro::Delimiter::None => "$",
+ },
+ match delimiter {
+ proc_macro::Delimiter::Parenthesis => ")",
+ proc_macro::Delimiter::Brace => "}",
+ proc_macro::Delimiter::Bracket => "]",
+ proc_macro::Delimiter::None => "$",
+ },
+ span.open,
+ span.close,
+ span.entire,
+ )?;
+ if let Some(stream) = stream {
+ debug_token_stream(stream, depth + 1, f)?;
+ }
+ return Ok(());
+ }
+ TokenTree::Punct(Punct { ch, joint, span }) => write!(
+ f,
+ "PUNCT {span:#?} {} {}",
+ *ch as char,
+ if *joint { "[joint]" } else { "[alone]" }
+ )?,
+ TokenTree::Ident(Ident { sym, is_raw, span }) => {
+ write!(f, "IDENT {span:#?} ")?;
+ if *is_raw {
+ write!(f, "r#")?;
+ }
+ write!(f, "{sym}")?;
+ }
+ TokenTree::Literal(Literal { kind, symbol, suffix, span }) => write!(
+ f,
+ "LITER {span:#?} {kind:?} {symbol}{}",
+ match suffix {
+ Some(suffix) => suffix.clone(),
+ None => Symbol::intern(""),
+ }
+ )?,
+ }
+ writeln!(f)
+}
+
+impl<S: Copy> TokenStream<S> {
+ /// Push `tt` onto the end of the stream, possibly gluing it to the last
+ /// token. Uses `make_mut` to maximize efficiency.
+ pub(crate) fn push_tree(&mut self, tt: TokenTree<S>) {
+ let vec_mut = Arc::make_mut(&mut self.0);
+ vec_mut.push(tt);
+ }
+
+ /// Push `stream` onto the end of the stream, possibly gluing the first
+ /// token tree to the last token. (No other token trees will be glued.)
+ /// Uses `make_mut` to maximize efficiency.
+ pub(crate) fn push_stream(&mut self, stream: TokenStream<S>) {
+ let vec_mut = Arc::make_mut(&mut self.0);
+
+ let stream_iter = stream.0.iter().cloned();
+
+ vec_mut.extend(stream_iter);
+ }
+}
+
+impl<S> FromIterator<TokenTree<S>> for TokenStream<S> {
+ fn from_iter<I: IntoIterator<Item = TokenTree<S>>>(iter: I) -> Self {
+ TokenStream::new(iter.into_iter().collect::<Vec<TokenTree<S>>>())
+ }
+}
+
+#[derive(Clone)]
+pub struct TokenStreamIter<'t, S> {
+ stream: &'t TokenStream<S>,
+ index: usize,
+}
+
+impl<'t, S> TokenStreamIter<'t, S> {
+ fn new(stream: &'t TokenStream<S>) -> Self {
+ TokenStreamIter { stream, index: 0 }
+ }
+}
+
+impl<'t, S> Iterator for TokenStreamIter<'t, S> {
+ type Item = &'t TokenTree<S>;
+
+ fn next(&mut self) -> Option<&'t TokenTree<S>> {
+ self.stream.0.get(self.index).map(|tree| {
+ self.index += 1;
+ tree
+ })
+ }
+}
+
+pub(super) fn literal_from_lexer<Span>(
+ s: &str,
+ span: Span,
+ kind: rustc_lexer::LiteralKind,
+ suffix_start: u32,
+) -> Literal<Span> {
+ let (kind, start_offset, end_offset) = match kind {
+ LiteralKind::Int { .. } => (LitKind::Integer, 0, 0),
+ LiteralKind::Float { .. } => (LitKind::Float, 0, 0),
+ LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize),
+ LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize),
+ LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize),
+ LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize),
+ LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize),
+ LiteralKind::RawStr { n_hashes } => (
+ LitKind::StrRaw(n_hashes.unwrap_or_default()),
+ 2 + n_hashes.unwrap_or_default() as usize,
+ 1 + n_hashes.unwrap_or_default() as usize,
+ ),
+ LiteralKind::RawByteStr { n_hashes } => (
+ LitKind::ByteStrRaw(n_hashes.unwrap_or_default()),
+ 3 + n_hashes.unwrap_or_default() as usize,
+ 1 + n_hashes.unwrap_or_default() as usize,
+ ),
+ LiteralKind::RawCStr { n_hashes } => (
+ LitKind::CStrRaw(n_hashes.unwrap_or_default()),
+ 3 + n_hashes.unwrap_or_default() as usize,
+ 1 + n_hashes.unwrap_or_default() as usize,
+ ),
+ };
+
+ let (lit, suffix) = s.split_at(suffix_start as usize);
+ let lit = &lit[start_offset..lit.len() - end_offset];
+ let suffix = match suffix {
+ "" | "_" => None,
+ suffix => Some(Symbol::intern(suffix)),
+ };
+
+ Literal { kind, symbol: Symbol::intern(lit), suffix, span }
+}
+
+impl SpanLike for crate::SpanId {
+ fn derive_ranged(&self, _: std::ops::Range<usize>) -> Self {
+ *self
+ }
+}
+
+impl SpanLike for () {
+ fn derive_ranged(&self, _: std::ops::Range<usize>) -> Self {
+ *self
+ }
+}
+
+impl SpanLike for crate::Span {
+ fn derive_ranged(&self, range: std::ops::Range<usize>) -> Self {
+ crate::Span {
+ range: span::TextRange::new(
+ span::TextSize::new(range.start as u32),
+ span::TextSize::new(range.end as u32),
+ ),
+ anchor: self.anchor,
+ ctx: self.ctx,
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn roundtrip() {
+ let token_stream = TokenStream::from_str("struct T {\"string\"}", ()).unwrap();
+ token_stream.to_string();
+ assert_eq!(token_stream.to_string(), "struct T {\"string\"}");
+ }
+}
diff --git a/crates/project-model/Cargo.toml b/crates/project-model/Cargo.toml
index 0dbb309..7e0b1f7 100644
--- a/crates/project-model/Cargo.toml
+++ b/crates/project-model/Cargo.toml
@@ -21,7 +21,8 @@
serde.workspace = true
serde_derive.workspace = true
temp-dir.workspace = true
-tracing.workspace = true
+toml.workspace = true
+tracing = { workspace = true, features = ["attributes"] }
triomphe.workspace = true
la-arena.workspace = true
itertools.workspace = true
diff --git a/crates/project-model/src/cargo_config_file.rs b/crates/project-model/src/cargo_config_file.rs
index a1e7ed0..5d6e5fd 100644
--- a/crates/project-model/src/cargo_config_file.rs
+++ b/crates/project-model/src/cargo_config_file.rs
@@ -1,37 +1,135 @@
-//! Read `.cargo/config.toml` as a JSON object
-use paths::{Utf8Path, Utf8PathBuf};
+//! Read `.cargo/config.toml` as a TOML table
+use paths::{AbsPath, Utf8Path, Utf8PathBuf};
use rustc_hash::FxHashMap;
+use toml::{
+ Spanned,
+ de::{DeTable, DeValue},
+};
use toolchain::Tool;
use crate::{ManifestPath, Sysroot, utf8_stdout};
-pub(crate) type CargoConfigFile = serde_json::Map<String, serde_json::Value>;
+#[derive(Clone)]
+pub struct CargoConfigFile(String);
-pub(crate) fn read(
- manifest: &ManifestPath,
- extra_env: &FxHashMap<String, Option<String>>,
- sysroot: &Sysroot,
-) -> Option<CargoConfigFile> {
- let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env);
- cargo_config
- .args(["-Z", "unstable-options", "config", "get", "--format", "json"])
- .env("RUSTC_BOOTSTRAP", "1");
- if manifest.is_rust_manifest() {
- cargo_config.arg("-Zscript");
+impl CargoConfigFile {
+ pub(crate) fn load(
+ manifest: &ManifestPath,
+ extra_env: &FxHashMap<String, Option<String>>,
+ sysroot: &Sysroot,
+ ) -> Option<Self> {
+ let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env);
+ cargo_config
+ .args(["-Z", "unstable-options", "config", "get", "--format", "toml", "--show-origin"])
+ .env("RUSTC_BOOTSTRAP", "1");
+ if manifest.is_rust_manifest() {
+ cargo_config.arg("-Zscript");
+ }
+
+ tracing::debug!("Discovering cargo config by {cargo_config:?}");
+ utf8_stdout(&mut cargo_config)
+ .inspect(|toml| {
+ tracing::debug!("Discovered cargo config: {toml:?}");
+ })
+ .inspect_err(|err| {
+ tracing::debug!("Failed to discover cargo config: {err:?}");
+ })
+ .ok()
+ .map(CargoConfigFile)
}
- tracing::debug!("Discovering cargo config by {:?}", cargo_config);
- let json: serde_json::Map<String, serde_json::Value> = utf8_stdout(&mut cargo_config)
- .inspect(|json| {
- tracing::debug!("Discovered cargo config: {:?}", json);
- })
- .inspect_err(|err| {
- tracing::debug!("Failed to discover cargo config: {:?}", err);
- })
- .ok()
- .and_then(|stdout| serde_json::from_str(&stdout).ok())?;
+ pub(crate) fn read<'a>(&'a self) -> Option<CargoConfigFileReader<'a>> {
+ CargoConfigFileReader::new(&self.0)
+ }
- Some(json)
+ #[cfg(test)]
+ pub(crate) fn from_string_for_test(s: String) -> Self {
+ CargoConfigFile(s)
+ }
+}
+
+pub(crate) struct CargoConfigFileReader<'a> {
+ toml_str: &'a str,
+ line_ends: Vec<usize>,
+ table: Spanned<DeTable<'a>>,
+}
+
+impl<'a> CargoConfigFileReader<'a> {
+ fn new(toml_str: &'a str) -> Option<Self> {
+ let toml = DeTable::parse(toml_str)
+ .inspect_err(|err| tracing::debug!("Failed to parse cargo config into toml: {err:?}"))
+ .ok()?;
+ let mut last_line_end = 0;
+ let line_ends = toml_str
+ .lines()
+ .map(|l| {
+ last_line_end += l.len() + 1;
+ last_line_end
+ })
+ .collect();
+
+ Some(CargoConfigFileReader { toml_str, table: toml, line_ends })
+ }
+
+ pub(crate) fn get_spanned(
+ &self,
+ accessor: impl IntoIterator<Item = &'a str>,
+ ) -> Option<&Spanned<DeValue<'a>>> {
+ let mut keys = accessor.into_iter();
+ let mut val = self.table.get_ref().get(keys.next()?)?;
+ for key in keys {
+ let DeValue::Table(map) = val.get_ref() else { return None };
+ val = map.get(key)?;
+ }
+ Some(val)
+ }
+
+ pub(crate) fn get(&self, accessor: impl IntoIterator<Item = &'a str>) -> Option<&DeValue<'a>> {
+ self.get_spanned(accessor).map(|it| it.as_ref())
+ }
+
+ pub(crate) fn get_origin_root(&self, spanned: &Spanned<DeValue<'a>>) -> Option<&AbsPath> {
+ let span = spanned.span();
+
+ for &line_end in &self.line_ends {
+ if line_end < span.end {
+ continue;
+ }
+
+ let after_span = &self.toml_str[span.end..line_end];
+
+ // table.key = "value" # /parent/.cargo/config.toml
+ // | |
+ // span.end line_end
+ let origin_path = after_span
+ .strip_prefix([',']) // strip trailing comma
+ .unwrap_or(after_span)
+ .trim_start()
+ .strip_prefix(['#'])
+ .and_then(|path| {
+ let path = path.trim();
+ if path.starts_with("environment variable")
+ || path.starts_with("--config cli option")
+ {
+ None
+ } else {
+ Some(path)
+ }
+ });
+
+ return origin_path.and_then(|path| {
+ <&Utf8Path>::from(path)
+ .try_into()
+ .ok()
+ // Two levels up to the config file.
+ // See https://doc.rust-lang.org/cargo/reference/config.html#config-relative-paths
+ .and_then(AbsPath::parent)
+ .and_then(AbsPath::parent)
+ });
+ }
+
+ None
+ }
}
pub(crate) fn make_lockfile_copy(
@@ -54,3 +152,74 @@
}
}
}
+
+#[test]
+fn cargo_config_file_reader_works() {
+ #[cfg(target_os = "windows")]
+ let root = "C://ROOT";
+
+ #[cfg(not(target_os = "windows"))]
+ let root = "/ROOT";
+
+ let toml = format!(
+ r##"
+alias.foo = "abc"
+alias.bar = "🙂" # {root}/home/.cargo/config.toml
+alias.sub-example = [
+ "sub", # {root}/foo/.cargo/config.toml
+ "example", # {root}/❤️💛💙/💝/.cargo/config.toml
+]
+build.rustflags = [
+ "--flag", # {root}/home/.cargo/config.toml
+ "env", # environment variable `CARGO_BUILD_RUSTFLAGS`
+ "cli", # --config cli option
+]
+env.CARGO_WORKSPACE_DIR.relative = true # {root}/home/.cargo/config.toml
+env.CARGO_WORKSPACE_DIR.value = "" # {root}/home/.cargo/config.toml
+"##
+ );
+
+ let reader = CargoConfigFileReader::new(&toml).unwrap();
+
+ let alias_foo = reader.get_spanned(["alias", "foo"]).unwrap();
+ assert_eq!(alias_foo.as_ref().as_str().unwrap(), "abc");
+ assert!(reader.get_origin_root(alias_foo).is_none());
+
+ let alias_bar = reader.get_spanned(["alias", "bar"]).unwrap();
+ assert_eq!(alias_bar.as_ref().as_str().unwrap(), "🙂");
+ assert_eq!(reader.get_origin_root(alias_bar).unwrap().as_str(), format!("{root}/home"));
+
+ let alias_sub_example = reader.get_spanned(["alias", "sub-example"]).unwrap();
+ assert!(reader.get_origin_root(alias_sub_example).is_none());
+ let alias_sub_example = alias_sub_example.as_ref().as_array().unwrap();
+
+ assert_eq!(alias_sub_example[0].get_ref().as_str().unwrap(), "sub");
+ assert_eq!(
+ reader.get_origin_root(&alias_sub_example[0]).unwrap().as_str(),
+ format!("{root}/foo")
+ );
+
+ assert_eq!(alias_sub_example[1].get_ref().as_str().unwrap(), "example");
+ assert_eq!(
+ reader.get_origin_root(&alias_sub_example[1]).unwrap().as_str(),
+ format!("{root}/❤️💛💙/💝")
+ );
+
+ let build_rustflags = reader.get(["build", "rustflags"]).unwrap().as_array().unwrap();
+ assert_eq!(
+ reader.get_origin_root(&build_rustflags[0]).unwrap().as_str(),
+ format!("{root}/home")
+ );
+ assert!(reader.get_origin_root(&build_rustflags[1]).is_none());
+ assert!(reader.get_origin_root(&build_rustflags[2]).is_none());
+
+ let env_cargo_workspace_dir =
+ reader.get(["env", "CARGO_WORKSPACE_DIR"]).unwrap().as_table().unwrap();
+ let env_relative = &env_cargo_workspace_dir["relative"];
+ assert!(env_relative.as_ref().as_bool().unwrap());
+ assert_eq!(reader.get_origin_root(env_relative).unwrap().as_str(), format!("{root}/home"));
+
+ let env_val = &env_cargo_workspace_dir["value"];
+ assert_eq!(env_val.as_ref().as_str().unwrap(), "");
+ assert_eq!(reader.get_origin_root(env_val).unwrap().as_str(), format!("{root}/home"));
+}
diff --git a/crates/project-model/src/env.rs b/crates/project-model/src/env.rs
index ae0458a..51c4479 100644
--- a/crates/project-model/src/env.rs
+++ b/crates/project-model/src/env.rs
@@ -2,9 +2,8 @@
use base_db::Env;
use paths::Utf8Path;
use rustc_hash::FxHashMap;
-use toolchain::Tool;
-use crate::{ManifestPath, PackageData, TargetKind, cargo_config_file::CargoConfigFile};
+use crate::{PackageData, TargetKind, cargo_config_file::CargoConfigFile};
/// Recreates the compile-time environment variables that Cargo sets.
///
@@ -48,8 +47,8 @@
);
}
-pub(crate) fn inject_cargo_env(env: &mut Env) {
- env.set("CARGO", Tool::Cargo.path().to_string());
+pub(crate) fn inject_cargo_env(env: &mut Env, cargo_path: &Utf8Path) {
+ env.set("CARGO", cargo_path.as_str());
}
pub(crate) fn inject_rustc_tool_env(env: &mut Env, cargo_name: &str, kind: TargetKind) {
@@ -62,46 +61,48 @@
}
pub(crate) fn cargo_config_env(
- manifest: &ManifestPath,
config: &Option<CargoConfigFile>,
extra_env: &FxHashMap<String, Option<String>>,
) -> Env {
+ use toml::de::*;
+
let mut env = Env::default();
env.extend(extra_env.iter().filter_map(|(k, v)| v.as_ref().map(|v| (k.clone(), v.clone()))));
- let Some(serde_json::Value::Object(env_json)) = config.as_ref().and_then(|c| c.get("env"))
- else {
+ let Some(config_reader) = config.as_ref().and_then(|c| c.read()) else {
+ return env;
+ };
+ let Some(env_toml) = config_reader.get(["env"]).and_then(|it| it.as_table()) else {
return env;
};
- // FIXME: The base here should be the parent of the `.cargo/config` file, not the manifest.
- // But cargo does not provide this information.
- let base = <_ as AsRef<Utf8Path>>::as_ref(manifest.parent());
-
- for (key, entry) in env_json {
- let value = match entry {
- serde_json::Value::String(s) => s.clone(),
- serde_json::Value::Object(entry) => {
+ for (key, entry) in env_toml {
+ let key = key.as_ref().as_ref();
+ let value = match entry.as_ref() {
+ DeValue::String(s) => String::from(s.clone()),
+ DeValue::Table(entry) => {
// Each entry MUST have a `value` key.
- let Some(value) = entry.get("value").and_then(|v| v.as_str()) else {
+ let Some(map) = entry.get("value").and_then(|v| v.as_ref().as_str()) else {
continue;
};
// If the entry already exists in the environment AND the `force` key is not set to
// true, then don't overwrite the value.
if extra_env.get(key).is_some_and(Option::is_some)
- && !entry.get("force").and_then(|v| v.as_bool()).unwrap_or(false)
+ && !entry.get("force").and_then(|v| v.as_ref().as_bool()).unwrap_or(false)
{
continue;
}
- if entry
- .get("relative")
- .and_then(|v| v.as_bool())
- .is_some_and(std::convert::identity)
- {
- base.join(value).to_string()
+ if let Some(base) = entry.get("relative").and_then(|v| {
+ if v.as_ref().as_bool().is_some_and(std::convert::identity) {
+ config_reader.get_origin_root(v)
+ } else {
+ None
+ }
+ }) {
+ base.join(map).to_string()
} else {
- value.to_owned()
+ map.to_owned()
}
}
_ => continue,
@@ -115,43 +116,30 @@
#[test]
fn parse_output_cargo_config_env_works() {
+ use itertools::Itertools;
+
+ let cwd = paths::AbsPathBuf::try_from(
+ paths::Utf8PathBuf::try_from(std::env::current_dir().unwrap()).unwrap(),
+ )
+ .unwrap();
+ let config_path = cwd.join(".cargo").join("config.toml");
let raw = r#"
-{
- "env": {
- "CARGO_WORKSPACE_DIR": {
- "relative": true,
- "value": ""
- },
- "INVALID": {
- "relative": "invalidbool",
- "value": "../relative"
- },
- "RELATIVE": {
- "relative": true,
- "value": "../relative"
- },
- "TEST": {
- "value": "test"
- },
- "FORCED": {
- "value": "test",
- "force": true
- },
- "UNFORCED": {
- "value": "test",
- "force": false
- },
- "OVERWRITTEN": {
- "value": "test"
- },
- "NOT_AN_OBJECT": "value"
- }
-}
+env.CARGO_WORKSPACE_DIR.relative = true
+env.CARGO_WORKSPACE_DIR.value = ""
+env.INVALID.relative = "invalidbool"
+env.INVALID.value = "../relative"
+env.RELATIVE.relative = true
+env.RELATIVE.value = "../relative"
+env.TEST.value = "test"
+env.FORCED.value = "test"
+env.FORCED.force = true
+env.UNFORCED.value = "test"
+env.UNFORCED.forced = false
+env.OVERWRITTEN.value = "test"
+env.NOT_AN_OBJECT = "value"
"#;
- let config: CargoConfigFile = serde_json::from_str(raw).unwrap();
- let cwd = paths::Utf8PathBuf::try_from(std::env::current_dir().unwrap()).unwrap();
- let manifest = paths::AbsPathBuf::assert(cwd.join("Cargo.toml"));
- let manifest = ManifestPath::try_from(manifest).unwrap();
+ let raw = raw.lines().map(|l| format!("{l} # {config_path}")).join("\n");
+ let config = CargoConfigFile::from_string_for_test(raw);
let extra_env = [
("FORCED", Some("ignored")),
("UNFORCED", Some("newvalue")),
@@ -161,7 +149,7 @@
.iter()
.map(|(k, v)| (k.to_string(), v.map(ToString::to_string)))
.collect();
- let env = cargo_config_env(&manifest, &Some(config), &extra_env);
+ let env = cargo_config_env(&Some(config), &extra_env);
assert_eq!(env.get("CARGO_WORKSPACE_DIR").as_deref(), Some(cwd.join("").as_str()));
assert_eq!(env.get("RELATIVE").as_deref(), Some(cwd.join("../relative").as_str()));
assert_eq!(env.get("INVALID").as_deref(), Some("../relative"));
diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs
index 920afe6..f244c97 100644
--- a/crates/project-model/src/sysroot.rs
+++ b/crates/project-model/src/sysroot.rs
@@ -8,6 +8,7 @@
use std::{env, fs, ops::Not, path::Path, process::Command};
use anyhow::{Result, format_err};
+use base_db::Env;
use itertools::Itertools;
use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
use rustc_hash::FxHashMap;
@@ -172,6 +173,36 @@
}
}
+ pub fn tool_path(&self, tool: Tool, current_dir: impl AsRef<Path>, envs: &Env) -> Utf8PathBuf {
+ match self.root() {
+ Some(root) => {
+ let mut cmd = toolchain::command(
+ Tool::Rustup.path(),
+ current_dir,
+ &envs
+ .into_iter()
+ .map(|(k, v)| (k.clone(), Some(v.clone())))
+ .collect::<FxHashMap<_, _>>(),
+ );
+ if !envs.contains_key("RUSTUP_TOOLCHAIN")
+ && std::env::var_os("RUSTUP_TOOLCHAIN").is_none()
+ {
+ cmd.env("RUSTUP_TOOLCHAIN", AsRef::<std::path::Path>::as_ref(root));
+ }
+
+ cmd.arg("which");
+ cmd.arg(tool.name());
+ (|| {
+ Some(Utf8PathBuf::from(
+ String::from_utf8(cmd.output().ok()?.stdout).ok()?.trim_end(),
+ ))
+ })()
+ .unwrap_or_else(|| Utf8PathBuf::from(tool.name()))
+ }
+ _ => tool.path(),
+ }
+ }
+
pub fn discover_proc_macro_srv(&self) -> Option<anyhow::Result<AbsPathBuf>> {
let root = self.root()?;
Some(
diff --git a/crates/project-model/src/toolchain_info/target_tuple.rs b/crates/project-model/src/toolchain_info/target_tuple.rs
index 9f12ede..12c64b5 100644
--- a/crates/project-model/src/toolchain_info/target_tuple.rs
+++ b/crates/project-model/src/toolchain_info/target_tuple.rs
@@ -53,7 +53,7 @@
}
fn cargo_config_build_target(config: &CargoConfigFile) -> Option<Vec<String>> {
- match parse_json_cargo_config_build_target(config) {
+ match parse_toml_cargo_config_build_target(config) {
Ok(v) => v,
Err(e) => {
tracing::debug!("Failed to discover cargo config build target {e:?}");
@@ -63,18 +63,44 @@
}
// Parses `"build.target = [target-tuple, target-tuple, ...]"` or `"build.target = "target-tuple"`
-fn parse_json_cargo_config_build_target(
+fn parse_toml_cargo_config_build_target(
config: &CargoConfigFile,
) -> anyhow::Result<Option<Vec<String>>> {
- let target = config.get("build").and_then(|v| v.as_object()).and_then(|m| m.get("target"));
- match target {
- Some(serde_json::Value::String(s)) => Ok(Some(vec![s.to_owned()])),
- Some(v) => serde_json::from_value(v.clone())
- .map(Option::Some)
- .context("Failed to parse `build.target` as an array of target"),
- // t`error: config value `build.target` is not set`, in which case we
- // don't wanna log the error
- None => Ok(None),
+ let Some(config_reader) = config.read() else {
+ return Ok(None);
+ };
+ let Some(target) = config_reader.get_spanned(["build", "target"]) else {
+ return Ok(None);
+ };
+
+ // if the target ends with `.json`, join it to the config file's parent dir.
+ // See https://github.com/rust-lang/cargo/blob/f7acf448fc127df9a77c52cc2bba027790ac4931/src/cargo/core/compiler/compile_kind.rs#L171-L192
+ let join_to_origin_if_json_path = |s: &str, spanned: &toml::Spanned<toml::de::DeValue<'_>>| {
+ if s.ends_with(".json") {
+ config_reader
+ .get_origin_root(spanned)
+ .map(|p| p.join(s).to_string())
+ .unwrap_or_else(|| s.to_owned())
+ } else {
+ s.to_owned()
+ }
+ };
+
+ let parse_err = "Failed to parse `build.target` as an array of target";
+
+ match target.as_ref() {
+ toml::de::DeValue::String(s) => {
+ Ok(Some(vec![join_to_origin_if_json_path(s.as_ref(), target)]))
+ }
+ toml::de::DeValue::Array(arr) => arr
+ .iter()
+ .map(|v| {
+ let s = v.as_ref().as_str().context(parse_err)?;
+ Ok(join_to_origin_if_json_path(s, v))
+ })
+ .collect::<anyhow::Result<_>>()
+ .map(Option::Some),
+ _ => Err(anyhow::anyhow!(parse_err)),
}
}
diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs
index aa2e159..e02891e 100644
--- a/crates/project-model/src/workspace.rs
+++ b/crates/project-model/src/workspace.rs
@@ -2,6 +2,7 @@
//! metadata` or `rust-project.json`) into representation stored in the salsa
//! database -- `CrateGraph`.
+use std::thread::Builder;
use std::{collections::VecDeque, fmt, fs, iter, ops::Deref, sync, thread};
use anyhow::Context;
@@ -12,7 +13,7 @@
};
use cfg::{CfgAtom, CfgDiff, CfgOptions};
use intern::{Symbol, sym};
-use paths::{AbsPath, AbsPathBuf, Utf8PathBuf};
+use paths::{AbsPath, AbsPathBuf, Utf8Path, Utf8PathBuf};
use rustc_hash::{FxHashMap, FxHashSet};
use semver::Version;
use span::{Edition, FileId};
@@ -26,7 +27,7 @@
ProjectJson, ProjectManifest, RustSourceWorkspaceConfig, Sysroot, TargetData, TargetKind,
WorkspaceBuildScripts,
build_dependencies::{BuildScriptOutput, ProcMacroDylibPath},
- cargo_config_file,
+ cargo_config_file::CargoConfigFile,
cargo_workspace::{CargoMetadataConfig, DepKind, FetchMetadata, PackageData, RustLibSource},
env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env},
project_json::{Crate, CrateArrayIdx},
@@ -267,7 +268,7 @@
tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.rust_lib_src_root(), root = ?sysroot.root(), "Using sysroot");
progress("querying project metadata".to_owned());
- let config_file = cargo_config_file::read(cargo_toml, extra_env, &sysroot);
+ let config_file = CargoConfigFile::load(cargo_toml, extra_env, &sysroot);
let config_file_ = config_file.clone();
let toolchain_config = QueryConfig::Cargo(&sysroot, cargo_toml, &config_file_);
let targets =
@@ -301,31 +302,39 @@
// We can speed up loading a bit by spawning all of these processes in parallel (especially
// on systems were process spawning is delayed)
let join = thread::scope(|s| {
- let rustc_cfg = s.spawn(|| {
- rustc_cfg::get(toolchain_config, targets.first().map(Deref::deref), extra_env)
- });
- let target_data = s.spawn(|| {
- target_data::get(
- toolchain_config,
- targets.first().map(Deref::deref),
- extra_env,
- ).inspect_err(|e| {
- tracing::error!(%e, "failed fetching data layout for {cargo_toml:?} workspace")
+ let rustc_cfg = Builder::new()
+ .name("ProjectWorkspace::rustc_cfg".to_owned())
+ .spawn_scoped(s, || {
+ rustc_cfg::get(toolchain_config, targets.first().map(Deref::deref), extra_env)
})
- });
-
- let rustc_dir = s.spawn(|| {
- let rustc_dir = match rustc_source {
- Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone())
- .map_err(|p| Some(format!("rustc source path is not absolute: {p}"))),
- Some(RustLibSource::Discover) => {
- sysroot.discover_rustc_src().ok_or_else(|| {
- Some("Failed to discover rustc source for sysroot.".to_owned())
+ .expect("failed to spawn thread");
+ let target_data = Builder::new()
+ .name("ProjectWorkspace::target_data".to_owned())
+ .spawn_scoped(s, || {
+ target_data::get(toolchain_config, targets.first().map(Deref::deref), extra_env)
+ .inspect_err(|e| {
+ tracing::error!(%e,
+ "failed fetching data layout for \
+ {cargo_toml:?} workspace"
+ )
})
- }
- None => Err(None),
- };
- rustc_dir.and_then(|rustc_dir| {
+ })
+ .expect("failed to spawn thread");
+
+ let rustc_dir = Builder::new()
+ .name("ProjectWorkspace::rustc_dir".to_owned())
+ .spawn_scoped(s, || {
+ let rustc_dir = match rustc_source {
+ Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone())
+ .map_err(|p| Some(format!("rustc source path is not absolute: {p}"))),
+ Some(RustLibSource::Discover) => {
+ sysroot.discover_rustc_src().ok_or_else(|| {
+ Some("Failed to discover rustc source for sysroot.".to_owned())
+ })
+ }
+ None => Err(None),
+ };
+ rustc_dir.and_then(|rustc_dir| {
info!(workspace = %cargo_toml, rustc_dir = %rustc_dir, "Using rustc source");
match FetchMetadata::new(
&rustc_dir,
@@ -359,31 +368,41 @@
Err(e) => {
tracing::error!(
%e,
- "Failed to read Cargo metadata from rustc source at {rustc_dir}",
+ "Failed to read Cargo metadata from rustc source \
+ at {rustc_dir}",
);
Err(Some(format!(
- "Failed to read Cargo metadata from rustc source at {rustc_dir}: {e}"
+ "Failed to read Cargo metadata from rustc source \
+ at {rustc_dir}: {e}"
)))
}
}
})
- });
+ })
+ .expect("failed to spawn thread");
- let cargo_metadata = s.spawn(|| fetch_metadata.exec(false, progress));
- let loaded_sysroot = s.spawn(|| {
- sysroot.load_workspace(
- &RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config(
- config,
- workspace_dir,
- &targets,
- toolchain.clone(),
- )),
- config.no_deps,
- progress,
- )
- });
- let cargo_env =
- s.spawn(move || cargo_config_env(cargo_toml, &config_file, &config.extra_env));
+ let cargo_metadata = Builder::new()
+ .name("ProjectWorkspace::cargo_metadata".to_owned())
+ .spawn_scoped(s, || fetch_metadata.exec(false, progress))
+ .expect("failed to spawn thread");
+ let loaded_sysroot = Builder::new()
+ .name("ProjectWorkspace::loaded_sysroot".to_owned())
+ .spawn_scoped(s, || {
+ sysroot.load_workspace(
+ &RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config(
+ config,
+ &targets,
+ toolchain.clone(),
+ )),
+ config.no_deps,
+ progress,
+ )
+ })
+ .expect("failed to spawn thread");
+ let cargo_env = Builder::new()
+ .name("ProjectWorkspace::cargo_env".to_owned())
+ .spawn_scoped(s, move || cargo_config_env(&config_file, &config.extra_env))
+ .expect("failed to spawn thread");
thread::Result::Ok((
rustc_cfg.join()?,
target_data.join()?,
@@ -481,7 +500,6 @@
sysroot.load_workspace(
&RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config(
config,
- project_json.project_root(),
&targets,
toolchain.clone(),
)),
@@ -526,7 +544,7 @@
None => Sysroot::empty(),
};
- let config_file = cargo_config_file::read(detached_file, &config.extra_env, &sysroot);
+ let config_file = CargoConfigFile::load(detached_file, &config.extra_env, &sysroot);
let query_config = QueryConfig::Cargo(&sysroot, detached_file, &config_file);
let toolchain = version::get(query_config, &config.extra_env).ok().flatten();
let targets = target_tuple::get(query_config, config.target.as_deref(), &config.extra_env)
@@ -537,7 +555,6 @@
let loaded_sysroot = sysroot.load_workspace(
&RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config(
config,
- dir,
&targets,
toolchain.clone(),
)),
@@ -563,8 +580,7 @@
config.no_deps,
);
let cargo_script = fetch_metadata.exec(false, &|_| ()).ok().map(|(ws, error)| {
- let cargo_config_extra_env =
- cargo_config_env(detached_file, &config_file, &config.extra_env);
+ let cargo_config_extra_env = cargo_config_env(&config_file, &config.extra_env);
(
CargoWorkspace::new(ws, detached_file.clone(), cargo_config_extra_env, false),
WorkspaceBuildScripts::default(),
@@ -803,10 +819,16 @@
// [lib]
// path = "../../src/lib.rs"
// ```
+ //
+ // or
+ //
+ // ```toml
+ // [[bin]]
+ // path = "../bin_folder/main.rs"
+ // ```
let extra_targets = cargo[pkg]
.targets
.iter()
- .filter(|&&tgt| matches!(cargo[tgt].kind, TargetKind::Lib { .. }))
.filter_map(|&tgt| cargo[tgt].root.parent())
.map(|tgt| tgt.normalize().to_path_buf())
.filter(|path| !path.starts_with(&pkg_root));
@@ -822,6 +844,8 @@
exclude.push(pkg_root.join("examples"));
exclude.push(pkg_root.join("benches"));
}
+ include.sort();
+ include.dedup();
PackageRoot { is_local, include, exclude }
})
.chain(mk_sysroot())
@@ -858,10 +882,16 @@
// [lib]
// path = "../../src/lib.rs"
// ```
+ //
+ // or
+ //
+ // ```toml
+ // [[bin]]
+ // path = "../bin_folder/main.rs"
+ // ```
let extra_targets = cargo[pkg]
.targets
.iter()
- .filter(|&&tgt| matches!(cargo[tgt].kind, TargetKind::Lib { .. }))
.filter_map(|&tgt| cargo[tgt].root.parent())
.map(|tgt| tgt.normalize().to_path_buf())
.filter(|path| !path.starts_with(&pkg_root));
@@ -877,6 +907,8 @@
exclude.push(pkg_root.join("examples"));
exclude.push(pkg_root.join("benches"));
}
+ include.sort();
+ include.dedup();
PackageRoot { is_local, include, exclude }
})
}))
@@ -1194,6 +1226,7 @@
load,
crate_ws_data.clone(),
);
+ let cargo_path = sysroot.tool_path(Tool::Cargo, cargo.workspace_root(), cargo.env());
let cfg_options = CfgOptions::from_iter(rustc_cfg);
@@ -1268,6 +1301,7 @@
} else {
Arc::new(pkg_data.manifest.parent().to_path_buf())
},
+ &cargo_path,
);
if let TargetKind::Lib { .. } = kind {
lib_tgt = Some((crate_id, name.clone()));
@@ -1375,6 +1409,7 @@
},
// FIXME: This looks incorrect but I don't think this causes problems.
crate_ws_data,
+ &cargo_path,
);
}
}
@@ -1453,6 +1488,7 @@
override_cfg: &CfgOverrides,
build_scripts: &WorkspaceBuildScripts,
crate_ws_data: Arc<CrateWorkspaceData>,
+ cargo_path: &Utf8Path,
) {
let mut rustc_pkg_crates = FxHashMap::default();
// The root package of the rustc-dev component is rustc_driver, so we match that
@@ -1503,6 +1539,7 @@
} else {
Arc::new(pkg_data.manifest.parent().to_path_buf())
},
+ cargo_path,
);
pkg_to_lib_crate.insert(pkg, crate_id);
// Add dependencies on core / std / alloc for this crate
@@ -1560,11 +1597,12 @@
build_data: Option<(&BuildScriptOutput, bool)>,
cfg_options: CfgOptions,
file_id: FileId,
- cargo_name: &str,
+ cargo_crate_name: &str,
kind: TargetKind,
origin: CrateOrigin,
crate_ws_data: Arc<CrateWorkspaceData>,
proc_macro_cwd: Arc<AbsPathBuf>,
+ cargo_path: &Utf8Path,
) -> CrateBuilderId {
let edition = pkg.edition;
let potential_cfg_options = if pkg.features.is_empty() {
@@ -1591,8 +1629,8 @@
let mut env = cargo.env().clone();
inject_cargo_package_env(&mut env, pkg);
- inject_cargo_env(&mut env);
- inject_rustc_tool_env(&mut env, cargo_name, kind);
+ inject_cargo_env(&mut env, cargo_path);
+ inject_rustc_tool_env(&mut env, cargo_crate_name, kind);
if let Some(envs) = build_data.map(|(it, _)| &it.envs) {
env.extend_from_other(envs);
@@ -1600,7 +1638,7 @@
let crate_id = crate_graph.add_crate_root(
file_id,
edition,
- Some(CrateDisplayName::from_canonical_name(cargo_name)),
+ Some(CrateDisplayName::from_canonical_name(cargo_crate_name)),
Some(pkg.version.to_string()),
cfg_options,
potential_cfg_options,
@@ -1614,7 +1652,9 @@
let proc_macro = match build_data {
Some((BuildScriptOutput { proc_macro_dylib_path, .. }, has_errors)) => {
match proc_macro_dylib_path {
- ProcMacroDylibPath::Path(path) => Ok((cargo_name.to_owned(), path.clone())),
+ ProcMacroDylibPath::Path(path) => {
+ Ok((cargo_crate_name.to_owned(), path.clone()))
+ }
ProcMacroDylibPath::NotBuilt => Err(ProcMacroLoadingError::NotYetBuilt),
ProcMacroDylibPath::NotProcMacro | ProcMacroDylibPath::DylibNotFound
if has_errors =>
@@ -1867,29 +1907,12 @@
fn sysroot_metadata_config(
config: &CargoConfig,
- current_dir: &AbsPath,
targets: &[String],
toolchain_version: Option<Version>,
) -> CargoMetadataConfig {
- // We run `cargo metadata` on sysroot with sysroot dir as a working directory, but still pass
- // the `targets` from the cargo config evaluated from the workspace's `current_dir`.
- // So, we need to *canonicalize* those *might-be-relative-paths-to-custom-target-json-files*.
- //
- // See https://github.com/rust-lang/cargo/blob/f7acf448fc127df9a77c52cc2bba027790ac4931/src/cargo/core/compiler/compile_kind.rs#L171-L192
- let targets = targets
- .iter()
- .map(|target| {
- if target.ends_with(".json") {
- current_dir.join(target).to_string()
- } else {
- target.to_owned()
- }
- })
- .collect();
-
CargoMetadataConfig {
features: Default::default(),
- targets,
+ targets: targets.to_vec(),
extra_args: Default::default(),
extra_env: config.extra_env.clone(),
toolchain_version,
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index b9dfe1f..2e48c5a 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -47,8 +47,7 @@
tracing-subscriber.workspace = true
tracing-tree.workspace = true
triomphe.workspace = true
-toml = "0.8.23"
-nohash-hasher.workspace = true
+toml.workspace = true
walkdir = "2.5.0"
semver.workspace = true
memchr = "2.7.5"
@@ -61,7 +60,6 @@
hir-ty.workspace = true
hir.workspace = true
ide-db.workspace = true
-intern.workspace = true
# This should only be used in CLI
ide-ssr.workspace = true
ide.workspace = true
diff --git a/crates/rust-analyzer/build.rs b/crates/rust-analyzer/build.rs
index 0fd381d..cc7f112 100644
--- a/crates/rust-analyzer/build.rs
+++ b/crates/rust-analyzer/build.rs
@@ -5,7 +5,6 @@
fn main() {
set_rerun();
set_commit_info();
- println!("cargo::rustc-check-cfg=cfg(rust_analyzer)");
if option_env!("CFG_RELEASE").is_none() {
println!("cargo:rustc-env=POKE_RA_DEVS=1");
}
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs
index cc8db1b..44c442f 100644
--- a/crates/rust-analyzer/src/bin/main.rs
+++ b/crates/rust-analyzer/src/bin/main.rs
@@ -307,6 +307,11 @@
config.rediscover_workspaces();
}
+ rayon::ThreadPoolBuilder::new()
+ .thread_name(|ix| format!("RayonWorker{}", ix))
+ .build_global()
+ .unwrap();
+
// If the io_threads have an error, there's usually an error on the main
// loop too because the channels are closed. Ensure we report both errors.
match (rust_analyzer::main_loop(config, connection), io_threads.join()) {
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs
index 5e4a277..59a4de9 100644
--- a/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -11,7 +11,7 @@
use cfg::{CfgAtom, CfgDiff};
use hir::{
Adt, AssocItem, Crate, DefWithBody, FindPathConfig, HasCrate, HasSource, HirDisplay, ModuleDef,
- Name,
+ Name, crate_lang_items,
db::{DefDatabase, ExpandDatabase, HirDatabase},
next_solver::{DbInterner, GenericArgs},
};
@@ -200,7 +200,7 @@
let mut num_crates = 0;
let mut visited_modules = FxHashSet::default();
let mut visit_queue = Vec::new();
- for krate in krates {
+ for &krate in &krates {
let module = krate.root_module();
let file_id = module.definition_source_file_id(db);
let file_id = file_id.original_file(db);
@@ -313,6 +313,10 @@
}
hir::attach_db(db, || {
+ if !self.skip_lang_items {
+ self.run_lang_items(db, &krates, verbosity);
+ }
+
if !self.skip_lowering {
self.run_body_lowering(db, &vfs, &bodies, verbosity);
}
@@ -1109,6 +1113,26 @@
report_metric("body lowering time", body_lowering_time.time.as_millis() as u64, "ms");
}
+ fn run_lang_items(&self, db: &RootDatabase, crates: &[Crate], verbosity: Verbosity) {
+ let mut bar = match verbosity {
+ Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
+ _ if self.output.is_some() => ProgressReport::hidden(),
+ _ => ProgressReport::new(crates.len()),
+ };
+
+ let mut sw = self.stop_watch();
+ bar.tick();
+ for &krate in crates {
+ crate_lang_items(db, krate.into());
+ bar.inc(1);
+ }
+
+ bar.finish_and_clear();
+ let time = sw.elapsed();
+ eprintln!("{:<20} {}", "Crate lang items:", time);
+ report_metric("crate lang items time", time.time.as_millis() as u64, "ms");
+ }
+
/// Invariant: `file_ids` must be sorted and deduped before passing into here
fn run_ide_things(
&self,
@@ -1186,6 +1210,7 @@
closure_capture_hints: true,
binding_mode_hints: true,
implicit_drop_hints: true,
+ implied_dyn_trait_hints: true,
lifetime_elision_hints: ide::LifetimeElisionHints::Always,
param_names_for_lifetime_elision_hints: true,
hide_named_constructor_hints: false,
diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs
index 75030be..c522060 100644
--- a/crates/rust-analyzer/src/cli/flags.rs
+++ b/crates/rust-analyzer/src/cli/flags.rs
@@ -78,6 +78,8 @@
optional --disable-proc-macros
/// Run the proc-macro-srv binary at the specified path.
optional --proc-macro-srv path: PathBuf
+ /// Skip lang items fetching.
+ optional --skip-lang-items
/// Skip body lowering.
optional --skip-lowering
/// Skip type inference.
@@ -256,6 +258,7 @@
pub disable_proc_macros: bool,
pub proc_macro_srv: Option<PathBuf>,
pub skip_lowering: bool,
+ pub skip_lang_items: bool,
pub skip_inference: bool,
pub skip_mir_stats: bool,
pub skip_data_layout: bool,
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index b8c4b9f..1b15d83 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -263,6 +263,9 @@
/// Show inlay hints for the implied type parameter `Sized` bound.
inlayHints_implicitSizedBoundHints_enable: bool = false,
+ /// Show inlay hints for the implied `dyn` keyword in trait object types.
+ inlayHints_impliedDynTraitHints_enable: bool = true,
+
/// Show inlay type hints for elided lifetimes in function signatures.
inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = LifetimeElisionDef::Never,
@@ -1983,6 +1986,7 @@
&client_capability_fields,
),
implicit_drop_hints: self.inlayHints_implicitDrops_enable().to_owned(),
+ implied_dyn_trait_hints: self.inlayHints_impliedDynTraitHints_enable().to_owned(),
range_exclusive_hints: self.inlayHints_rangeExclusiveHints_enable().to_owned(),
minicore,
}
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs
index f557dd5..91f7db7 100644
--- a/crates/rust-analyzer/src/global_state.rs
+++ b/crates/rust-analyzer/src/global_state.rs
@@ -781,6 +781,14 @@
pub(crate) fn target_spec_for_crate(&self, crate_id: Crate) -> Option<TargetSpec> {
let file_id = self.analysis.crate_root(crate_id).ok()?;
+ self.target_spec_for_file(file_id, crate_id)
+ }
+
+ pub(crate) fn target_spec_for_file(
+ &self,
+ file_id: FileId,
+ crate_id: Crate,
+ ) -> Option<TargetSpec> {
let path = self.vfs_read().file_path(file_id).clone();
let path = path.as_path()?;
diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs
index 7e52673..d15b519 100644
--- a/crates/rust-analyzer/src/handlers/request.rs
+++ b/crates/rust-analyzer/src/handlers/request.rs
@@ -2348,21 +2348,9 @@
let file_id = try_default!(from_proto::file_id(snap, &text_document.uri)?);
let file = snap.analysis.file_text(file_id)?;
- // Determine the edition of the crate the file belongs to (if there's multiple, we pick the
- // highest edition).
- let Ok(editions) = snap
- .analysis
- .relevant_crates_for(file_id)?
- .into_iter()
- .map(|crate_id| snap.analysis.crate_edition(crate_id))
- .collect::<Result<Vec<_>, _>>()
- else {
- return Ok(None);
- };
- let edition = editions.iter().copied().max();
-
let line_index = snap.file_line_index(file_id)?;
let source_root_id = snap.analysis.source_root_id(file_id).ok();
+ let crates = snap.analysis.relevant_crates_for(file_id)?;
// try to chdir to the file so we can respect `rustfmt.toml`
// FIXME: use `rustfmt --config-path` once
@@ -2383,6 +2371,17 @@
let mut command = match snap.config.rustfmt(source_root_id) {
RustfmtConfig::Rustfmt { extra_args, enable_range_formatting } => {
+ // Determine the edition of the crate the file belongs to (if there's multiple, we pick the
+ // highest edition).
+ let Ok(editions) = crates
+ .iter()
+ .map(|&crate_id| snap.analysis.crate_edition(crate_id))
+ .collect::<Result<Vec<_>, _>>()
+ else {
+ return Ok(None);
+ };
+ let edition = editions.iter().copied().max();
+
// FIXME: Set RUSTUP_TOOLCHAIN
let mut cmd = toolchain::command(
toolchain::Tool::Rustfmt.path(),
@@ -2429,7 +2428,8 @@
}
RustfmtConfig::CustomCommand { command, args } => {
let cmd = Utf8PathBuf::from(&command);
- let target_spec = TargetSpec::for_file(snap, file_id)?;
+ let target_spec =
+ crates.first().and_then(|&crate_id| snap.target_spec_for_file(file_id, crate_id));
let extra_env = snap.config.extra_env(source_root_id);
let mut cmd = match target_spec {
Some(TargetSpec::Cargo(_)) => {
diff --git a/crates/rust-analyzer/src/lsp/semantic_tokens.rs b/crates/rust-analyzer/src/lsp/semantic_tokens.rs
index 828118a..9bfdea8 100644
--- a/crates/rust-analyzer/src/lsp/semantic_tokens.rs
+++ b/crates/rust-analyzer/src/lsp/semantic_tokens.rs
@@ -143,6 +143,7 @@
DECLARATION,
STATIC,
DEFAULT_LIBRARY,
+ DEPRECATED,
}
custom {
(ASSOCIATED, "associated"),
diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs
index 995e6c4..e585c3f 100644
--- a/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -882,6 +882,7 @@
HlMod::ControlFlow => mods::CONTROL_FLOW,
HlMod::CrateRoot => mods::CRATE_ROOT,
HlMod::DefaultLibrary => mods::DEFAULT_LIBRARY,
+ HlMod::Deprecated => mods::DEPRECATED,
HlMod::Definition => mods::DECLARATION,
HlMod::Documentation => mods::DOCUMENTATION,
HlMod::Injected => mods::INJECTED,
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index c2b887c..7b339fa 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -341,20 +341,23 @@
self.handle_task(&mut prime_caches_progress, task);
}
+ let title = "Indexing";
+ let cancel_token = Some("rustAnalyzer/cachePriming".to_owned());
+
+ let mut last_report = None;
for progress in prime_caches_progress {
- let (state, message, fraction, title);
match progress {
PrimeCachesProgress::Begin => {
- state = Progress::Begin;
- message = None;
- fraction = 0.0;
- title = "Indexing";
+ self.report_progress(
+ title,
+ Progress::Begin,
+ None,
+ Some(0.0),
+ cancel_token.clone(),
+ );
}
PrimeCachesProgress::Report(report) => {
- state = Progress::Report;
- title = report.work_type;
-
- message = match &*report.crates_currently_indexing {
+ let message = match &*report.crates_currently_indexing {
[crate_name] => Some(format!(
"{}/{} ({})",
report.crates_done,
@@ -371,38 +374,66 @@
_ => None,
};
- fraction = Progress::fraction(report.crates_done, report.crates_total);
+ // Don't send too many notifications while batching, sending progress reports
+ // serializes notifications on the mainthread at the moment which slows us down
+ last_report = Some((
+ message,
+ Progress::fraction(report.crates_done, report.crates_total),
+ report.work_type,
+ ));
}
PrimeCachesProgress::End { cancelled } => {
- state = Progress::End;
- message = None;
- fraction = 1.0;
- title = "Indexing";
-
self.analysis_host.raw_database_mut().trigger_lru_eviction();
self.prime_caches_queue.op_completed(());
if cancelled {
self.prime_caches_queue
.request_op("restart after cancellation".to_owned(), ());
}
+ if let Some((message, fraction, title)) = last_report.take() {
+ self.report_progress(
+ title,
+ Progress::Report,
+ message,
+ Some(fraction),
+ cancel_token.clone(),
+ );
+ }
+ self.report_progress(
+ title,
+ Progress::End,
+ None,
+ Some(1.0),
+ cancel_token.clone(),
+ );
}
};
-
+ }
+ if let Some((message, fraction, title)) = last_report.take() {
self.report_progress(
title,
- state,
+ Progress::Report,
message,
Some(fraction),
- Some("rustAnalyzer/cachePriming".to_owned()),
+ cancel_token.clone(),
);
}
}
Event::Vfs(message) => {
let _p = tracing::info_span!("GlobalState::handle_event/vfs").entered();
- self.handle_vfs_msg(message);
+ let mut last_progress_report = None;
+ self.handle_vfs_msg(message, &mut last_progress_report);
// Coalesce many VFS event into a single loop turn
while let Ok(message) = self.loader.receiver.try_recv() {
- self.handle_vfs_msg(message);
+ self.handle_vfs_msg(message, &mut last_progress_report);
+ }
+ if let Some((message, fraction)) = last_progress_report {
+ self.report_progress(
+ "Roots Scanned",
+ Progress::Report,
+ Some(message),
+ Some(fraction),
+ None,
+ );
}
}
Event::Flycheck(message) => {
@@ -452,7 +483,11 @@
// Project has loaded properly, kick off initial flycheck
self.flycheck.iter().for_each(|flycheck| flycheck.restart_workspace(None));
}
- if self.config.prefill_caches() {
+ // delay initial cache priming until proc macros are loaded, or we will load up a bunch of garbage into salsa
+ let proc_macros_loaded = self.config.prefill_caches()
+ && !self.config.expand_proc_macros()
+ || self.fetch_proc_macros_queue.last_op_result().copied().unwrap_or(false);
+ if proc_macros_loaded {
self.prime_caches_queue.request_op("became quiescent".to_owned(), ());
}
}
@@ -846,7 +881,11 @@
}
}
- fn handle_vfs_msg(&mut self, message: vfs::loader::Message) {
+ fn handle_vfs_msg(
+ &mut self,
+ message: vfs::loader::Message,
+ last_progress_report: &mut Option<(String, f64)>,
+ ) {
let _p = tracing::info_span!("GlobalState::handle_vfs_msg").entered();
let is_changed = matches!(message, vfs::loader::Message::Changed { .. });
match message {
@@ -903,13 +942,41 @@
);
}
- self.report_progress(
- "Roots Scanned",
- state,
- Some(message),
- Some(Progress::fraction(n_done, n_total)),
- None,
- );
+ match state {
+ Progress::Begin => self.report_progress(
+ "Roots Scanned",
+ state,
+ Some(message),
+ Some(Progress::fraction(n_done, n_total)),
+ None,
+ ),
+ // Don't send too many notifications while batching, sending progress reports
+ // serializes notifications on the mainthread at the moment which slows us down
+ Progress::Report => {
+ if last_progress_report.is_none() {
+ self.report_progress(
+ "Roots Scanned",
+ state,
+ Some(message.clone()),
+ Some(Progress::fraction(n_done, n_total)),
+ None,
+ );
+ }
+
+ *last_progress_report =
+ Some((message, Progress::fraction(n_done, n_total)));
+ }
+ Progress::End => {
+ last_progress_report.take();
+ self.report_progress(
+ "Roots Scanned",
+ state,
+ Some(message),
+ Some(Progress::fraction(n_done, n_total)),
+ None,
+ )
+ }
+ }
}
}
}
diff --git a/crates/span/src/map.rs b/crates/span/src/map.rs
index bb09933..d14c497 100644
--- a/crates/span/src/map.rs
+++ b/crates/span/src/map.rs
@@ -156,6 +156,44 @@
}
}
+#[cfg(not(no_salsa_async_drops))]
+impl<S> Drop for SpanMap<S> {
+ fn drop(&mut self) {
+ struct SendPtr(*mut [()]);
+ unsafe impl Send for SendPtr {}
+ static SPAN_MAP_DROP_THREAD: std::sync::OnceLock<
+ std::sync::mpsc::Sender<(SendPtr, fn(SendPtr))>,
+ > = std::sync::OnceLock::new();
+ SPAN_MAP_DROP_THREAD
+ .get_or_init(|| {
+ let (sender, receiver) = std::sync::mpsc::channel::<(SendPtr, fn(SendPtr))>();
+ std::thread::Builder::new()
+ .name("SpanMapDropper".to_owned())
+ .spawn(move || receiver.iter().for_each(|(b, drop)| drop(b)))
+ .unwrap();
+ sender
+ })
+ .send((
+ unsafe {
+ SendPtr(std::mem::transmute::<*mut [(TextSize, SpanData<S>)], *mut [()]>(
+ Box::<[(TextSize, SpanData<S>)]>::into_raw(
+ std::mem::take(&mut self.spans).into_boxed_slice(),
+ ),
+ ))
+ },
+ |b: SendPtr| {
+ _ = unsafe {
+ Box::from_raw(std::mem::transmute::<
+ *mut [()],
+ *mut [(TextSize, SpanData<S>)],
+ >(b.0))
+ }
+ },
+ ))
+ .unwrap();
+ }
+}
+
#[derive(PartialEq, Eq, Hash, Debug)]
pub struct RealSpanMap {
file_id: EditionedFileId,
diff --git a/crates/syntax/src/ast/prec.rs b/crates/syntax/src/ast/prec.rs
index 1364adb..8c88224 100644
--- a/crates/syntax/src/ast/prec.rs
+++ b/crates/syntax/src/ast/prec.rs
@@ -3,14 +3,15 @@
use stdx::always;
use crate::{
- AstNode, SyntaxNode,
+ AstNode, Direction, SyntaxNode, T,
+ algo::skip_trivia_token,
ast::{self, BinaryOp, Expr, HasArgList, RangeItem},
match_ast,
};
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)]
pub enum ExprPrecedence {
- // return val, break val, yield val, closures
+ // return, break, continue, yield, yeet, become (with or without value)
Jump,
// = += -= *= /= %= &= |= ^= <<= >>=
Assign,
@@ -76,18 +77,12 @@
Some(_) => ExprPrecedence::Unambiguous,
},
- Expr::BreakExpr(e) if e.expr().is_some() => ExprPrecedence::Jump,
- Expr::BecomeExpr(e) if e.expr().is_some() => ExprPrecedence::Jump,
- Expr::ReturnExpr(e) if e.expr().is_some() => ExprPrecedence::Jump,
- Expr::YeetExpr(e) if e.expr().is_some() => ExprPrecedence::Jump,
- Expr::YieldExpr(e) if e.expr().is_some() => ExprPrecedence::Jump,
-
Expr::BreakExpr(_)
| Expr::BecomeExpr(_)
| Expr::ReturnExpr(_)
| Expr::YeetExpr(_)
| Expr::YieldExpr(_)
- | Expr::ContinueExpr(_) => ExprPrecedence::Unambiguous,
+ | Expr::ContinueExpr(_) => ExprPrecedence::Jump,
Expr::RangeExpr(..) => ExprPrecedence::Range,
@@ -226,6 +221,20 @@
return false;
}
+ // Keep parens when a ret-like expr is followed by `||` or `&&`.
+ // For `||`, removing parens could reparse as `<ret-like> || <closure>`.
+ // For `&&`, we avoid introducing `<ret-like> && <expr>` into a binary chain.
+
+ if self.precedence() == ExprPrecedence::Jump
+ && let Some(node) =
+ place_of.ancestors().find(|it| it.parent().is_none_or(|p| &p == parent.syntax()))
+ && let Some(next) =
+ node.last_token().and_then(|t| skip_trivia_token(t.next_token()?, Direction::Next))
+ && matches!(next.kind(), T![||] | T![&&])
+ {
+ return true;
+ }
+
if self.is_paren_like()
|| parent.is_paren_like()
|| self.is_prefix()
diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs
index a3c19f7..7346b93 100644
--- a/crates/syntax/src/lib.rs
+++ b/crates/syntax/src/lib.rs
@@ -67,7 +67,7 @@
/// files.
#[derive(Debug, PartialEq, Eq)]
pub struct Parse<T> {
- green: GreenNode,
+ green: Option<GreenNode>,
errors: Option<Arc<[SyntaxError]>>,
_ty: PhantomData<fn() -> T>,
}
@@ -81,14 +81,14 @@
impl<T> Parse<T> {
fn new(green: GreenNode, errors: Vec<SyntaxError>) -> Parse<T> {
Parse {
- green,
+ green: Some(green),
errors: if errors.is_empty() { None } else { Some(errors.into()) },
_ty: PhantomData,
}
}
pub fn syntax_node(&self) -> SyntaxNode {
- SyntaxNode::new_root(self.green.clone())
+ SyntaxNode::new_root(self.green.as_ref().unwrap().clone())
}
pub fn errors(&self) -> Vec<SyntaxError> {
@@ -100,8 +100,10 @@
impl<T: AstNode> Parse<T> {
/// Converts this parse result into a parse result for an untyped syntax tree.
- pub fn to_syntax(self) -> Parse<SyntaxNode> {
- Parse { green: self.green, errors: self.errors, _ty: PhantomData }
+ pub fn to_syntax(mut self) -> Parse<SyntaxNode> {
+ let green = self.green.take();
+ let errors = self.errors.take();
+ Parse { green, errors, _ty: PhantomData }
}
/// Gets the parsed syntax tree as a typed ast node.
@@ -124,9 +126,9 @@
}
impl Parse<SyntaxNode> {
- pub fn cast<N: AstNode>(self) -> Option<Parse<N>> {
+ pub fn cast<N: AstNode>(mut self) -> Option<Parse<N>> {
if N::cast(self.syntax_node()).is_some() {
- Some(Parse { green: self.green, errors: self.errors, _ty: PhantomData })
+ Some(Parse { green: self.green.take(), errors: self.errors.take(), _ty: PhantomData })
} else {
None
}
@@ -162,7 +164,7 @@
edition,
)
.map(|(green_node, errors, _reparsed_range)| Parse {
- green: green_node,
+ green: Some(green_node),
errors: if errors.is_empty() { None } else { Some(errors.into()) },
_ty: PhantomData,
})
@@ -198,6 +200,28 @@
}
}
+#[cfg(not(no_salsa_async_drops))]
+impl<T> Drop for Parse<T> {
+ fn drop(&mut self) {
+ let Some(green) = self.green.take() else {
+ return;
+ };
+ static PARSE_DROP_THREAD: std::sync::OnceLock<std::sync::mpsc::Sender<GreenNode>> =
+ std::sync::OnceLock::new();
+ PARSE_DROP_THREAD
+ .get_or_init(|| {
+ let (sender, receiver) = std::sync::mpsc::channel::<GreenNode>();
+ std::thread::Builder::new()
+ .name("ParseNodeDropper".to_owned())
+ .spawn(move || receiver.iter().for_each(drop))
+ .unwrap();
+ sender
+ })
+ .send(green)
+ .unwrap();
+ }
+}
+
/// `SourceFile` represents a parse tree for a single Rust file.
pub use crate::ast::SourceFile;
diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs
index d3afac8..62867fd 100644
--- a/crates/test-utils/src/lib.rs
+++ b/crates/test-utils/src/lib.rs
@@ -43,7 +43,7 @@
#[macro_export]
macro_rules! assert_eq_text {
($left:expr, $right:expr) => {
- assert_eq_text!($left, $right,)
+ $crate::assert_eq_text!($left, $right,)
};
($left:expr, $right:expr, $($tt:tt)*) => {{
let left = $left;
diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs
index 696928b..0fe17e3 100644
--- a/crates/test-utils/src/minicore.rs
+++ b/crates/test-utils/src/minicore.rs
@@ -68,6 +68,7 @@
//! transmute:
//! try: infallible
//! tuple:
+//! unary_ops:
//! unpin: sized
//! unsize: sized
//! write: fmt
@@ -591,13 +592,13 @@
impl<T: PointeeSized> Deref for &T {
type Target = T;
fn deref(&self) -> &T {
- loop {}
+ *self
}
}
impl<T: PointeeSized> Deref for &mut T {
type Target = T;
fn deref(&self) -> &T {
- loop {}
+ *self
}
}
// region:deref_mut
@@ -1056,6 +1057,9 @@
type Output = $t;
fn add(self, other: $t) -> $t { self + other }
}
+ impl AddAssign for $t {
+ fn add_assign(&mut self, other: $t) { *self += other; }
+ }
)*)
}
@@ -1063,6 +1067,24 @@
// endregion:builtin_impls
// endregion:add
+ // region:unary_ops
+ #[lang = "not"]
+ pub const trait Not {
+ type Output;
+
+ #[must_use]
+ fn not(self) -> Self::Output;
+ }
+
+ #[lang = "neg"]
+ pub const trait Neg {
+ type Output;
+
+ #[must_use = "this returns the result of the operation, without modifying the original"]
+ fn neg(self) -> Self::Output;
+ }
+ // endregion:unary_ops
+
// region:coroutine
mod coroutine {
use crate::pin::Pin;
@@ -1118,6 +1140,12 @@
pub trait Eq: PartialEq<Self> + PointeeSized {}
+ // region:builtin_impls
+ impl PartialEq for () {
+ fn eq(&self, other: &()) -> bool { true }
+ }
+ // endregion:builtin_impls
+
// region:derive
#[rustc_builtin_macro]
pub macro PartialEq($item:item) {}
@@ -1490,6 +1518,12 @@
{
}
// endregion:dispatch_from_dyn
+ // region:coerce_unsized
+ impl<Ptr, U> crate::ops::CoerceUnsized<Pin<U>> for Pin<Ptr> where
+ Ptr: crate::ops::CoerceUnsized<U>
+ {
+ }
+ // endregion:coerce_unsized
}
// endregion:pin
@@ -1954,6 +1988,10 @@
// region:bool_impl
#[lang = "bool"]
impl bool {
+ pub fn then_some<T>(self, t: T) -> Option<T> {
+ if self { Some(t) } else { None }
+ }
+
pub fn then<T, F: FnOnce() -> T>(self, f: F) -> Option<T> {
if self { Some(f()) } else { None }
}
diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs
index f9a547f..ea07522 100644
--- a/crates/tt/src/lib.rs
+++ b/crates/tt/src/lib.rs
@@ -826,58 +826,6 @@
}
}
-impl<S> Literal<S> {
- pub fn display_no_minus(&self) -> impl fmt::Display {
- struct NoMinus<'a, S>(&'a Literal<S>);
- impl<S> fmt::Display for NoMinus<'_, S> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let symbol =
- self.0.symbol.as_str().strip_prefix('-').unwrap_or(self.0.symbol.as_str());
- match self.0.kind {
- LitKind::Byte => write!(f, "b'{symbol}'"),
- LitKind::Char => write!(f, "'{symbol}'"),
- LitKind::Integer | LitKind::Float | LitKind::Err(_) => write!(f, "{symbol}"),
- LitKind::Str => write!(f, "\"{symbol}\""),
- LitKind::ByteStr => write!(f, "b\"{symbol}\""),
- LitKind::CStr => write!(f, "c\"{symbol}\""),
- LitKind::StrRaw(num_of_hashes) => {
- let num_of_hashes = num_of_hashes as usize;
- write!(
- f,
- r#"r{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
- "",
- text = symbol
- )
- }
- LitKind::ByteStrRaw(num_of_hashes) => {
- let num_of_hashes = num_of_hashes as usize;
- write!(
- f,
- r#"br{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
- "",
- text = symbol
- )
- }
- LitKind::CStrRaw(num_of_hashes) => {
- let num_of_hashes = num_of_hashes as usize;
- write!(
- f,
- r#"cr{0:#<num_of_hashes$}"{text}"{0:#<num_of_hashes$}"#,
- "",
- text = symbol
- )
- }
- }?;
- if let Some(suffix) = &self.0.suffix {
- write!(f, "{suffix}")?;
- }
- Ok(())
- }
- }
- NoMinus(self)
- }
-}
-
impl<S> fmt::Display for Literal<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.kind {
diff --git a/docs/book/book.toml b/docs/book/book.toml
index edf11fa..c77eabd 100644
--- a/docs/book/book.toml
+++ b/docs/book/book.toml
@@ -1,7 +1,6 @@
[book]
authors = ["The rust-analyzer authors"]
language = "en"
-multilingual = false
src = "src"
title = "rust-analyzer"
diff --git a/docs/book/src/configuration_generated.md b/docs/book/src/configuration_generated.md
index 21e199c..f0da2bd 100644
--- a/docs/book/src/configuration_generated.md
+++ b/docs/book/src/configuration_generated.md
@@ -1033,6 +1033,13 @@
Show inlay hints for the implied type parameter `Sized` bound.
+## rust-analyzer.inlayHints.impliedDynTraitHints.enable {#inlayHints.impliedDynTraitHints.enable}
+
+Default: `true`
+
+Show inlay hints for the implied `dyn` keyword in trait object types.
+
+
## rust-analyzer.inlayHints.lifetimeElisionHints.enable {#inlayHints.lifetimeElisionHints.enable}
Default: `"never"`
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json
index 6dd4485..d49d19f 100644
--- a/editors/code/package-lock.json
+++ b/editors/code/package-lock.json
@@ -934,6 +934,29 @@
"url": "https://github.com/sponsors/nzakas"
}
},
+ "node_modules/@isaacs/balanced-match": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz",
+ "integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": "20 || >=22"
+ }
+ },
+ "node_modules/@isaacs/brace-expansion": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.0.tgz",
+ "integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "@isaacs/balanced-match": "^4.0.1"
+ },
+ "engines": {
+ "node": "20 || >=22"
+ }
+ },
"node_modules/@isaacs/cliui": {
"version": "8.0.2",
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
@@ -1296,9 +1319,9 @@
"license": "MIT"
},
"node_modules/@textlint/linter-formatter/node_modules/js-yaml": {
- "version": "3.14.1",
- "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz",
- "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
+ "version": "3.14.2",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.2.tgz",
+ "integrity": "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -3725,13 +3748,13 @@
}
},
"node_modules/foreground-child": {
- "version": "3.3.0",
- "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz",
- "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==",
+ "version": "3.3.1",
+ "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz",
+ "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==",
"dev": true,
"license": "ISC",
"dependencies": {
- "cross-spawn": "^7.0.0",
+ "cross-spawn": "^7.0.6",
"signal-exit": "^4.0.1"
},
"engines": {
@@ -3848,15 +3871,15 @@
"optional": true
},
"node_modules/glob": {
- "version": "11.0.1",
- "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.1.tgz",
- "integrity": "sha512-zrQDm8XPnYEKawJScsnM0QzobJxlT/kHOOlRTio8IH/GrmxRE5fjllkzdaHclIuNjUQTJYH2xHNIGfdpJkDJUw==",
+ "version": "11.1.0",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-11.1.0.tgz",
+ "integrity": "sha512-vuNwKSaKiqm7g0THUBu2x7ckSs3XJLXE+2ssL7/MfTGPLLcrJQ/4Uq1CjPTtO5cCIiRxqvN6Twy1qOwhL0Xjcw==",
"dev": true,
- "license": "ISC",
+ "license": "BlueOak-1.0.0",
"dependencies": {
- "foreground-child": "^3.1.0",
- "jackspeak": "^4.0.1",
- "minimatch": "^10.0.0",
+ "foreground-child": "^3.3.1",
+ "jackspeak": "^4.1.1",
+ "minimatch": "^10.1.1",
"minipass": "^7.1.2",
"package-json-from-dist": "^1.0.0",
"path-scurry": "^2.0.0"
@@ -3885,13 +3908,13 @@
}
},
"node_modules/glob/node_modules/minimatch": {
- "version": "10.0.1",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.1.tgz",
- "integrity": "sha512-ethXTt3SGGR+95gudmqJ1eNhRO7eGEGIgYA9vnPatK4/etz2MEVDno5GMCibdMTuBMyElzIlgxMna3K94XDIDQ==",
+ "version": "10.1.1",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.1.1.tgz",
+ "integrity": "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==",
"dev": true,
- "license": "ISC",
+ "license": "BlueOak-1.0.0",
"dependencies": {
- "brace-expansion": "^2.0.1"
+ "@isaacs/brace-expansion": "^5.0.0"
},
"engines": {
"node": "20 || >=22"
@@ -4363,9 +4386,9 @@
}
},
"node_modules/jackspeak": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.0.tgz",
- "integrity": "sha512-9DDdhb5j6cpeitCbvLO7n7J4IxnbM6hoF6O1g4HQ5TfhvvKN8ywDM7668ZhMHRqVmxqhps/F6syWK2KcPxYlkw==",
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz",
+ "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==",
"dev": true,
"license": "BlueOak-1.0.0",
"dependencies": {
@@ -4395,9 +4418,9 @@
"license": "MIT"
},
"node_modules/js-yaml": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
- "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz",
+ "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -6976,9 +6999,9 @@
}
},
"node_modules/wrap-ansi/node_modules/ansi-styles": {
- "version": "6.2.1",
- "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz",
- "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==",
+ "version": "6.2.3",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
+ "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
"dev": true,
"license": "MIT",
"engines": {
diff --git a/editors/code/package.json b/editors/code/package.json
index 7db4986..4d1ae48 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -2346,6 +2346,16 @@
{
"title": "Inlay Hints",
"properties": {
+ "rust-analyzer.inlayHints.impliedDynTraitHints.enable": {
+ "markdownDescription": "Show inlay hints for the implied `dyn` keyword in trait object types.",
+ "default": true,
+ "type": "boolean"
+ }
+ }
+ },
+ {
+ "title": "Inlay Hints",
+ "properties": {
"rust-analyzer.inlayHints.lifetimeElisionHints.enable": {
"markdownDescription": "Show inlay type hints for elided lifetimes in function signatures.",
"default": "never",
diff --git a/editors/code/src/lsp_ext.ts b/editors/code/src/lsp_ext.ts
index 20952e9..643f61b 100644
--- a/editors/code/src/lsp_ext.ts
+++ b/editors/code/src/lsp_ext.ts
@@ -46,9 +46,6 @@
export const runFlycheck = new lc.NotificationType<{
textDocument: lc.TextDocumentIdentifier | null;
}>("rust-analyzer/runFlycheck");
-export const syntaxTree = new lc.RequestType<SyntaxTreeParams, string, void>(
- "rust-analyzer/syntaxTree",
-);
export const viewSyntaxTree = new lc.RequestType<ViewSyntaxTreeParams, string, void>(
"rust-analyzer/viewSyntaxTree",
);
diff --git a/lib/smol_str/CHANGELOG.md b/lib/smol_str/CHANGELOG.md
index fb65d88..b7da6d1 100644
--- a/lib/smol_str/CHANGELOG.md
+++ b/lib/smol_str/CHANGELOG.md
@@ -1,6 +1,7 @@
# Changelog
## Unreleased
+- Optimise `SmolStr::clone` 4-5x speedup inline, 0.5x heap (slow down).
## 0.3.4 - 2025-10-23
diff --git a/lib/smol_str/src/gdb_smolstr_printer.py b/lib/smol_str/src/gdb_smolstr_printer.py
new file mode 100644
index 0000000..5f28ddd
--- /dev/null
+++ b/lib/smol_str/src/gdb_smolstr_printer.py
@@ -0,0 +1,147 @@
+# Pretty printer for smol_str::SmolStr
+#
+# Usage (any of these):
+# (gdb) source /path/to/gdb_smolstr_printer.py
+# or add to .gdbinit
+# python
+# import gdb
+# gdb.execute("source /path/to/gdb_smolstr_printer.py")
+# end
+#
+# After loading:
+# (gdb) info pretty-printer
+# ...
+# global pretty-printers:
+# smol_str
+# SmolStr
+#
+# Disable/enable:
+# (gdb) disable pretty-printer global smol_str SmolStr
+# (gdb) enable pretty-printer global smol_str SmolStr
+
+import gdb
+import gdb.printing
+import re
+
+SMOL_INLINE_SIZE_RE = re.compile(r".*::_V(\d+)$")
+
+
+def _read_utf8(mem):
+ try:
+ return mem.tobytes().decode("utf-8", errors="replace")
+ except Exception:
+ return repr(mem.tobytes())
+
+
+def _active_variant(enum_val):
+ """Return (variant_name, variant_value) for a Rust enum value using discriminant logic.
+ Assume layout: fields[0] is unnamed u8 discriminant; fields[1] is the active variant.
+ """
+ fields = enum_val.type.fields()
+ if len(fields) < 2:
+ return None, None
+ variant_field = fields[1]
+ return variant_field.name, enum_val[variant_field]
+
+
+class SmolStrProvider:
+ def __init__(self, val):
+ self.val = val
+
+ def to_string(self):
+ try:
+ repr_enum = self.val["__0"]
+ except Exception:
+ return "<SmolStr: missing __0>"
+
+ variant_name, variant_val = _active_variant(repr_enum)
+ if not variant_name:
+ return "<SmolStr: unknown variant>"
+
+ if variant_name == "Inline":
+ try:
+ inline_len_val = variant_val["len"]
+ m = SMOL_INLINE_SIZE_RE.match(str(inline_len_val))
+ if not m:
+ return "<SmolStr Inline: bad len>"
+ length = int(m.group(1))
+ buf = variant_val["buf"]
+ data = bytes(int(buf[i]) for i in range(length))
+ return data.decode("utf-8", errors="replace")
+ except Exception as e:
+ return f"<SmolStr Inline error: {e}>"
+
+ if variant_name == "Static":
+ try:
+ data_ptr = variant_val["data_ptr"]
+ length = int(variant_val["length"])
+ mem = gdb.selected_inferior().read_memory(int(data_ptr), length)
+ return _read_utf8(mem)
+ except Exception as e:
+ return f"<SmolStr Static error: {e}>"
+
+ if variant_name == "Heap":
+ try:
+ # variant_val is an Arc<str>
+ inner = variant_val["__0"]["ptr"]["pointer"]
+ # inner is a fat pointer to ArcInner<str>
+ data_ptr = inner["data_ptr"]
+ length = int(inner["length"])
+ # ArcInner layout:
+ # strong: Atomic<usize>, weak: Atomic<usize> | unsized tail 'data' bytes.
+ sizeof_AtomicUsize = gdb.lookup_type(
+ "core::sync::atomic::AtomicUsize"
+ ).sizeof
+ header_size = sizeof_AtomicUsize * 2 # strong + weak counters
+ data_arr = int(data_ptr) + header_size
+ mem = gdb.selected_inferior().read_memory(data_arr, length)
+ return _read_utf8(mem)
+ except Exception as e:
+ return f"<SmolStr Heap error: {e}>"
+
+ return f"<SmolStr: unhandled variant {variant_name}>"
+
+ def display_hint(self):
+ return "string"
+
+
+class SmolStrSubPrinter(gdb.printing.SubPrettyPrinter):
+ def __init__(self):
+ super(SmolStrSubPrinter, self).__init__("SmolStr")
+
+ def __call__(self, val):
+ if not self.enabled:
+ return None
+ try:
+ t = val.type.strip_typedefs()
+ if t.code == gdb.TYPE_CODE_STRUCT and t.name == "smol_str::SmolStr":
+ return SmolStrProvider(val)
+ except Exception:
+ pass
+ return None
+
+
+class SmolStrPrettyPrinter(gdb.printing.PrettyPrinter):
+ def __init__(self):
+ super(SmolStrPrettyPrinter, self).__init__("smol_str", [])
+ self.subprinters = []
+ self._sp = SmolStrSubPrinter()
+ self.subprinters.append(self._sp)
+
+ def __call__(self, val):
+ # Iterate subprinters (only one now, scalable for future)
+ for sp in self.subprinters:
+ pp = sp(val)
+ if pp is not None:
+ return pp
+ return None
+
+
+printer = SmolStrPrettyPrinter()
+
+
+def register_printers(objfile=None):
+ gdb.printing.register_pretty_printer(objfile, printer, replace=True)
+
+
+register_printers()
diff --git a/lib/smol_str/src/lib.rs b/lib/smol_str/src/lib.rs
index 31695b8..0d1f01a 100644
--- a/lib/smol_str/src/lib.rs
+++ b/lib/smol_str/src/lib.rs
@@ -1,5 +1,6 @@
#![cfg_attr(not(feature = "std"), no_std)]
#![cfg_attr(docsrs, feature(doc_cfg))]
+#![debugger_visualizer(gdb_script_file = "gdb_smolstr_printer.py")]
extern crate alloc;
@@ -104,11 +105,19 @@
impl Clone for SmolStr {
#[inline]
fn clone(&self) -> Self {
- if !self.is_heap_allocated() {
- // SAFETY: We verified that the payload of `Repr` is a POD
- return unsafe { core::ptr::read(self as *const SmolStr) };
+ // hint for faster inline / slower heap clones
+ #[cold]
+ #[inline(never)]
+ fn cold_clone(v: &SmolStr) -> SmolStr {
+ SmolStr(v.0.clone())
}
- Self(self.0.clone())
+
+ if self.is_heap_allocated() {
+ return cold_clone(self);
+ }
+
+ // SAFETY: We verified that the payload of `Repr` is a POD
+ unsafe { core::ptr::read(self as *const SmolStr) }
}
}
diff --git a/rust-version b/rust-version
index 0e89b4a..f545ef4 100644
--- a/rust-version
+++ b/rust-version
@@ -1 +1 @@
-c5dabe8cf798123087d094f06417f5a767ca73e8
+6159a44067ebce42b38f062cc7df267a1348e092
diff --git a/xtask/src/codegen/grammar.rs b/xtask/src/codegen/grammar.rs
index 9bd87a7..18f6c1f 100644
--- a/xtask/src/codegen/grammar.rs
+++ b/xtask/src/codegen/grammar.rs
@@ -706,7 +706,23 @@
}
};
- add_preamble(crate::flags::CodegenType::Grammar, reformat(ast.to_string()))
+ let result = add_preamble(crate::flags::CodegenType::Grammar, reformat(ast.to_string()));
+
+ if let Some(start) = result.find("macro_rules ! T_")
+ && let Some(macro_end) = result[start..].find("\nimpl ::core::marker::Copy")
+ {
+ let macro_section = &result[start..start + macro_end];
+ let formatted_macro = macro_section
+ .replace("T_ { [", "T_ {\n [")
+ .replace(" ; [", ";\n [")
+ .replace(" ; }", ";\n}")
+ .trim_end()
+ .to_owned()
+ + "\n";
+ return result.replace(macro_section, &formatted_macro);
+ }
+
+ result
}
fn to_upper_snake_case(s: &str) -> String {
diff --git a/xtask/src/install.rs b/xtask/src/install.rs
index 975e361..bddce0f 100644
--- a/xtask/src/install.rs
+++ b/xtask/src/install.rs
@@ -174,10 +174,17 @@
fn install_proc_macro_server(sh: &Shell, opts: ProcMacroServerOpt) -> anyhow::Result<()> {
let profile = if opts.dev_rel { "dev-rel" } else { "release" };
- cmd!(
+ let mut cmd = cmd!(
sh,
- "cargo +nightly install --path crates/proc-macro-srv-cli --profile={profile} --locked --force --features sysroot-abi"
- ).run()?;
+ "cargo install --path crates/proc-macro-srv-cli --profile={profile} --locked --force --features sysroot-abi"
+ );
+ if std::env::var_os("RUSTUP_TOOLCHAIN").is_none() {
+ cmd = cmd.env("RUSTUP_TOOLCHAIN", "nightly");
+ } else {
+ cmd = cmd.env("RUSTC_BOOTSTRAP", "1");
+ }
+
+ cmd.run()?;
Ok(())
}
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs
index ebfc7d0..0552850 100644
--- a/xtask/src/tidy.rs
+++ b/xtask/src/tidy.rs
@@ -194,6 +194,7 @@
"test-utils/src/fixture.rs",
// Generated code from lints contains doc tests in string literals.
"ide-db/src/generated/lints.rs",
+ "proc-macro-srv/src/tests/mod.rs",
];
if need_panic.iter().any(|p| path.ends_with(p)) {
return;